diff options
Diffstat (limited to 'src')
646 files changed, 15266 insertions, 9348 deletions
diff --git a/src/bootstrap/README.md b/src/bootstrap/README.md index 6ce4c6d62fa..2965174b45b 100644 --- a/src/bootstrap/README.md +++ b/src/bootstrap/README.md @@ -105,7 +105,7 @@ build/ debuginfo/ ... - # Bootstrap host tools (which are always compiled with the stage0 compiler) + # Host tools (which are always compiled with the stage0 compiler) # are stored here. bootstrap-tools/ diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py index d8c6be78247..40e08361a0f 100644 --- a/src/bootstrap/bootstrap.py +++ b/src/bootstrap/bootstrap.py @@ -8,6 +8,7 @@ import re import shutil import subprocess import sys +import sysconfig import tarfile import tempfile @@ -333,7 +334,11 @@ def default_build_triple(verbose): if ostype == "Android": kernel = "linux-android" else: - kernel = "unknown-linux-gnu" + python_soabi = sysconfig.get_config_var("SOABI") + if python_soabi is not None and "musl" in python_soabi: + kernel = "unknown-linux-musl" + else: + kernel = "unknown-linux-gnu" elif kernel == "SunOS": kernel = "pc-solaris" # On Solaris, uname -m will return a machine classification instead diff --git a/src/bootstrap/defaults/bootstrap.tools.toml b/src/bootstrap/defaults/bootstrap.tools.toml index 57c2706f60a..5abe636bd96 100644 --- a/src/bootstrap/defaults/bootstrap.tools.toml +++ b/src/bootstrap/defaults/bootstrap.tools.toml @@ -14,6 +14,8 @@ test-stage = 2 doc-stage = 2 # Contributors working on tools will probably expect compiler docs to be generated, so they can figure out how to use the API. compiler-docs = true +# Contributors working on tools are the most likely to change non-rust programs. +tidy-extra-checks = "auto:js,auto:py,auto:cpp,auto:spellcheck" [llvm] # Will download LLVM from CI if available on your platform. diff --git a/src/bootstrap/src/core/build_steps/check.rs b/src/bootstrap/src/core/build_steps/check.rs index 3278b55305c..f6653ed899b 100644 --- a/src/bootstrap/src/core/build_steps/check.rs +++ b/src/bootstrap/src/core/build_steps/check.rs @@ -4,13 +4,16 @@ use crate::core::build_steps::compile::{ add_to_sysroot, run_cargo, rustc_cargo, rustc_cargo_env, std_cargo, std_crates_for_run_make, }; use crate::core::build_steps::tool; -use crate::core::build_steps::tool::{COMPILETEST_ALLOW_FEATURES, SourceType, prepare_tool_cargo}; +use crate::core::build_steps::tool::{ + COMPILETEST_ALLOW_FEATURES, SourceType, ToolTargetBuildMode, get_tool_target_compiler, + prepare_tool_cargo, +}; use crate::core::builder::{ self, Alias, Builder, Kind, RunConfig, ShouldRun, Step, StepMetadata, crate_description, }; use crate::core::config::TargetSelection; use crate::utils::build_stamp::{self, BuildStamp}; -use crate::{Compiler, Mode, Subcommand}; +use crate::{CodegenBackendKind, Compiler, Mode, Subcommand}; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Std { @@ -252,8 +255,10 @@ fn prepare_compiler_for_check( mode: Mode, ) -> Compiler { let host = builder.host_target; + match mode { Mode::ToolBootstrap => builder.compiler(0, host), + Mode::ToolTarget => get_tool_target_compiler(builder, ToolTargetBuildMode::Build(target)), Mode::ToolStd => { if builder.config.compile_time_deps { // When --compile-time-deps is passed, we can't use any rustc @@ -307,7 +312,7 @@ fn prepare_compiler_for_check( pub struct CodegenBackend { pub build_compiler: Compiler, pub target: TargetSelection, - pub backend: &'static str, + pub backend: CodegenBackendKind, } impl Step for CodegenBackend { @@ -322,14 +327,14 @@ impl Step for CodegenBackend { fn make_run(run: RunConfig<'_>) { // FIXME: only check the backend(s) that were actually selected in run.paths let build_compiler = prepare_compiler_for_check(run.builder, run.target, Mode::Codegen); - for &backend in &["cranelift", "gcc"] { + for backend in [CodegenBackendKind::Cranelift, CodegenBackendKind::Gcc] { run.builder.ensure(CodegenBackend { build_compiler, target: run.target, backend }); } } fn run(self, builder: &Builder<'_>) { // FIXME: remove once https://github.com/rust-lang/rust/issues/112393 is resolved - if builder.build.config.vendor && self.backend == "gcc" { + if builder.build.config.vendor && self.backend.is_gcc() { println!("Skipping checking of `rustc_codegen_gcc` with vendoring enabled."); return; } @@ -349,138 +354,23 @@ impl Step for CodegenBackend { cargo .arg("--manifest-path") - .arg(builder.src.join(format!("compiler/rustc_codegen_{backend}/Cargo.toml"))); - rustc_cargo_env(builder, &mut cargo, target, build_compiler.stage); + .arg(builder.src.join(format!("compiler/{}/Cargo.toml", backend.crate_name()))); + rustc_cargo_env(builder, &mut cargo, target); - let _guard = builder.msg_check(format!("rustc_codegen_{backend}"), target, None); + let _guard = builder.msg_check(backend.crate_name(), target, None); - let stamp = build_stamp::codegen_backend_stamp(builder, build_compiler, target, backend) + let stamp = build_stamp::codegen_backend_stamp(builder, build_compiler, target, &backend) .with_prefix("check"); run_cargo(builder, cargo, builder.config.free_args.clone(), &stamp, vec![], true, false); } fn metadata(&self) -> Option<StepMetadata> { - Some(StepMetadata::check(self.backend, self.target).built_by(self.build_compiler)) - } -} - -/// Checks Rust analyzer that links to .rmetas from a checked rustc. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct RustAnalyzer { - pub build_compiler: Compiler, - pub target: TargetSelection, -} - -impl Step for RustAnalyzer { - type Output = (); - const ONLY_HOSTS: bool = true; - const DEFAULT: bool = true; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - let builder = run.builder; - run.path("src/tools/rust-analyzer").default_condition( - builder - .config - .tools - .as_ref() - .is_none_or(|tools| tools.iter().any(|tool| tool == "rust-analyzer")), + Some( + StepMetadata::check(&self.backend.crate_name(), self.target) + .built_by(self.build_compiler), ) } - - fn make_run(run: RunConfig<'_>) { - let build_compiler = prepare_compiler_for_check(run.builder, run.target, Mode::ToolRustc); - run.builder.ensure(RustAnalyzer { build_compiler, target: run.target }); - } - - fn run(self, builder: &Builder<'_>) { - let build_compiler = self.build_compiler; - let target = self.target; - - let mut cargo = prepare_tool_cargo( - builder, - build_compiler, - Mode::ToolRustc, - target, - builder.kind, - "src/tools/rust-analyzer", - SourceType::InTree, - &["in-rust-tree".to_owned()], - ); - - cargo.allow_features(crate::core::build_steps::tool::RustAnalyzer::ALLOW_FEATURES); - - cargo.arg("--bins"); - cargo.arg("--tests"); - cargo.arg("--benches"); - - // Cargo's output path in a given stage, compiled by a particular - // compiler for the specified target. - let stamp = BuildStamp::new(&builder.cargo_out(build_compiler, Mode::ToolRustc, target)) - .with_prefix("rust-analyzer-check"); - - let _guard = builder.msg_check("rust-analyzer artifacts", target, None); - run_cargo(builder, cargo, builder.config.free_args.clone(), &stamp, vec![], true, false); - } - - fn metadata(&self) -> Option<StepMetadata> { - Some(StepMetadata::check("rust-analyzer", self.target).built_by(self.build_compiler)) - } -} - -/// Compiletest is implicitly "checked" when it gets built in order to run tests, -/// so this is mainly for people working on compiletest to run locally. -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct Compiletest { - pub target: TargetSelection, -} - -impl Step for Compiletest { - type Output = (); - const ONLY_HOSTS: bool = true; - const DEFAULT: bool = false; - - fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/tools/compiletest") - } - - fn make_run(run: RunConfig<'_>) { - run.builder.ensure(Compiletest { target: run.target }); - } - - fn run(self, builder: &Builder<'_>) { - let mode = if builder.config.compiletest_use_stage0_libtest { - Mode::ToolBootstrap - } else { - Mode::ToolStd - }; - let build_compiler = prepare_compiler_for_check(builder, self.target, mode); - - let mut cargo = prepare_tool_cargo( - builder, - build_compiler, - mode, - self.target, - builder.kind, - "src/tools/compiletest", - SourceType::InTree, - &[], - ); - - cargo.allow_features(COMPILETEST_ALLOW_FEATURES); - - cargo.arg("--all-targets"); - - let stamp = BuildStamp::new(&builder.cargo_out(build_compiler, mode, self.target)) - .with_prefix("compiletest-check"); - - let _guard = builder.msg_check("compiletest artifacts", self.target, None); - run_cargo(builder, cargo, builder.config.free_args.clone(), &stamp, vec![], true, false); - } - - fn metadata(&self) -> Option<StepMetadata> { - Some(StepMetadata::check("compiletest", self.target)) - } } macro_rules! tool_check_step { @@ -489,8 +379,12 @@ macro_rules! tool_check_step { // The part of this path after the final '/' is also used as a display name. path: $path:literal $(, alt_path: $alt_path:literal )* - , mode: $mode:path + // Closure that returns `Mode` based on the passed `&Builder<'_>` + , mode: $mode:expr + // Subset of nightly features that are allowed to be used when checking $(, allow_features: $allow_features:expr )? + // Features that should be enabled when checking + $(, enable_features: [$($enable_features:expr),*] )? $(, default: $default:literal )? $( , )? } @@ -513,10 +407,13 @@ macro_rules! tool_check_step { fn make_run(run: RunConfig<'_>) { let target = run.target; - let build_compiler = prepare_compiler_for_check(run.builder, target, $mode); + let builder = run.builder; + let mode = $mode(builder); + + let build_compiler = prepare_compiler_for_check(run.builder, target, mode); // It doesn't make sense to cross-check bootstrap tools - if $mode == Mode::ToolBootstrap && target != run.builder.host_target { + if mode == Mode::ToolBootstrap && target != run.builder.host_target { println!("WARNING: not checking bootstrap tool {} for target {target} as it is a bootstrap (host-only) tool", stringify!($path)); return; }; @@ -531,7 +428,9 @@ macro_rules! tool_check_step { $( _value = $allow_features; )? _value }; - run_tool_check_step(builder, build_compiler, target, $path, $mode, allow_features); + let extra_features: &[&str] = &[$($($enable_features),*)?]; + let mode = $mode(builder); + run_tool_check_step(builder, build_compiler, target, $path, mode, allow_features, extra_features); } fn metadata(&self) -> Option<StepMetadata> { @@ -549,9 +448,11 @@ fn run_tool_check_step( path: &str, mode: Mode, allow_features: &str, + extra_features: &[&str], ) { let display_name = path.rsplit('/').next().unwrap(); + let extra_features = extra_features.iter().map(|f| f.to_string()).collect::<Vec<String>>(); let mut cargo = prepare_tool_cargo( builder, build_compiler, @@ -564,12 +465,19 @@ fn run_tool_check_step( // steps should probably be marked non-default so that the default // checks aren't affected by toolstate being broken. SourceType::InTree, - &[], + &extra_features, ); cargo.allow_features(allow_features); - // FIXME: check bootstrap doesn't currently work with --all-targets - cargo.arg("--all-targets"); + // FIXME: check bootstrap doesn't currently work when multiple targets are checked + // FIXME: rust-analyzer does not work with --all-targets + if display_name == "rust-analyzer" { + cargo.arg("--bins"); + cargo.arg("--tests"); + cargo.arg("--benches"); + } else { + cargo.arg("--all-targets"); + } let stamp = BuildStamp::new(&builder.cargo_out(build_compiler, mode, target)) .with_prefix(&format!("{display_name}-check")); @@ -588,43 +496,72 @@ fn run_tool_check_step( tool_check_step!(Rustdoc { path: "src/tools/rustdoc", alt_path: "src/librustdoc", - mode: Mode::ToolRustc + mode: |_builder| Mode::ToolRustc }); // Clippy, miri and Rustfmt are hybrids. They are external tools, but use a git subtree instead // of a submodule. Since the SourceType only drives the deny-warnings // behavior, treat it as in-tree so that any new warnings in clippy will be // rejected. -tool_check_step!(Clippy { path: "src/tools/clippy", mode: Mode::ToolRustc }); -tool_check_step!(Miri { path: "src/tools/miri", mode: Mode::ToolRustc }); -tool_check_step!(CargoMiri { path: "src/tools/miri/cargo-miri", mode: Mode::ToolRustc }); -tool_check_step!(Rustfmt { path: "src/tools/rustfmt", mode: Mode::ToolRustc }); +tool_check_step!(Clippy { path: "src/tools/clippy", mode: |_builder| Mode::ToolRustc }); +tool_check_step!(Miri { path: "src/tools/miri", mode: |_builder| Mode::ToolRustc }); +tool_check_step!(CargoMiri { path: "src/tools/miri/cargo-miri", mode: |_builder| Mode::ToolRustc }); +tool_check_step!(Rustfmt { path: "src/tools/rustfmt", mode: |_builder| Mode::ToolRustc }); +tool_check_step!(RustAnalyzer { + path: "src/tools/rust-analyzer", + mode: |_builder| Mode::ToolRustc, + allow_features: tool::RustAnalyzer::ALLOW_FEATURES, + enable_features: ["in-rust-tree"], +}); tool_check_step!(MiroptTestTools { path: "src/tools/miropt-test-tools", - mode: Mode::ToolBootstrap + mode: |_builder| Mode::ToolBootstrap }); // We want to test the local std tool_check_step!(TestFloatParse { path: "src/tools/test-float-parse", - mode: Mode::ToolStd, + mode: |_builder| Mode::ToolStd, allow_features: tool::TestFloatParse::ALLOW_FEATURES }); tool_check_step!(FeaturesStatusDump { path: "src/tools/features-status-dump", - mode: Mode::ToolBootstrap + mode: |_builder| Mode::ToolBootstrap }); -tool_check_step!(Bootstrap { path: "src/bootstrap", mode: Mode::ToolBootstrap, default: false }); +tool_check_step!(Bootstrap { + path: "src/bootstrap", + mode: |_builder| Mode::ToolBootstrap, + default: false +}); // `run-make-support` will be built as part of suitable run-make compiletest test steps, but support // check to make it easier to work on. tool_check_step!(RunMakeSupport { path: "src/tools/run-make-support", - mode: Mode::ToolBootstrap, + mode: |_builder| Mode::ToolBootstrap, default: false }); tool_check_step!(CoverageDump { path: "src/tools/coverage-dump", - mode: Mode::ToolBootstrap, + mode: |_builder| Mode::ToolBootstrap, + default: false +}); + +// Compiletest is implicitly "checked" when it gets built in order to run tests, +// so this is mainly for people working on compiletest to run locally. +tool_check_step!(Compiletest { + path: "src/tools/compiletest", + mode: |builder: &Builder<'_>| if builder.config.compiletest_use_stage0_libtest { + Mode::ToolBootstrap + } else { + Mode::ToolStd + }, + allow_features: COMPILETEST_ALLOW_FEATURES, + default: false, +}); + +tool_check_step!(Linkchecker { + path: "src/tools/linkchecker", + mode: |_builder| Mode::ToolBootstrap, default: false }); diff --git a/src/bootstrap/src/core/build_steps/clippy.rs b/src/bootstrap/src/core/build_steps/clippy.rs index a0371eb7155..b119f2dc3ce 100644 --- a/src/bootstrap/src/core/build_steps/clippy.rs +++ b/src/bootstrap/src/core/build_steps/clippy.rs @@ -19,6 +19,7 @@ const IGNORED_RULES_FOR_STD_AND_RUSTC: &[&str] = &[ "too_many_arguments", "needless_lifetimes", // people want to keep the lifetimes "wrong_self_convention", + "approx_constant", // libcore is what defines those ]; fn lint_args(builder: &Builder<'_>, config: &LintConfig, ignored_rules: &[&str]) -> Vec<String> { diff --git a/src/bootstrap/src/core/build_steps/compile.rs b/src/bootstrap/src/core/build_steps/compile.rs index 09bb2e35bda..59541bf12de 100644 --- a/src/bootstrap/src/core/build_steps/compile.rs +++ b/src/bootstrap/src/core/build_steps/compile.rs @@ -19,7 +19,7 @@ use serde_derive::Deserialize; use tracing::{instrument, span}; use crate::core::build_steps::gcc::{Gcc, add_cg_gcc_cargo_flags}; -use crate::core::build_steps::tool::SourceType; +use crate::core::build_steps::tool::{SourceType, copy_lld_artifacts}; use crate::core::build_steps::{dist, llvm}; use crate::core::builder; use crate::core::builder::{ @@ -33,7 +33,10 @@ use crate::utils::exec::command; use crate::utils::helpers::{ exe, get_clang_cl_resource_dir, is_debug_info, is_dylib, symlink_dir, t, up_to_date, }; -use crate::{CLang, Compiler, DependencyType, FileType, GitRepo, LLVM_TOOLS, Mode, debug, trace}; +use crate::{ + CLang, CodegenBackendKind, Compiler, DependencyType, FileType, GitRepo, LLVM_TOOLS, Mode, + debug, trace, +}; /// Build a standard library for the given `target` using the given `compiler`. #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] @@ -597,11 +600,6 @@ pub fn std_cargo(builder: &Builder<'_>, target: TargetSelection, stage: u32, car let mut features = String::new(); - if stage != 0 && builder.config.default_codegen_backend(target).as_deref() == Some("cranelift") - { - features += "compiler-builtins-no-f16-f128 "; - } - if builder.no_std(target) == Some(true) { features += " compiler-builtins-mem"; if !target.starts_with("bpf") { @@ -1316,15 +1314,10 @@ pub fn rustc_cargo( cargo.env("RUSTC_WRAPPER", ccache); } - rustc_cargo_env(builder, cargo, target, build_compiler.stage); + rustc_cargo_env(builder, cargo, target); } -pub fn rustc_cargo_env( - builder: &Builder<'_>, - cargo: &mut Cargo, - target: TargetSelection, - build_stage: u32, -) { +pub fn rustc_cargo_env(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetSelection) { // Set some configuration variables picked up by build scripts and // the compiler alike cargo @@ -1340,7 +1333,7 @@ pub fn rustc_cargo_env( } if let Some(backend) = builder.config.default_codegen_backend(target) { - cargo.env("CFG_DEFAULT_CODEGEN_BACKEND", backend); + cargo.env("CFG_DEFAULT_CODEGEN_BACKEND", backend.name()); } let libdir_relative = builder.config.libdir_relative().unwrap_or_else(|| Path::new("lib")); @@ -1379,18 +1372,24 @@ pub fn rustc_cargo_env( cargo.rustflag("--cfg=llvm_enzyme"); } - // Note that this is disabled if LLVM itself is disabled or we're in a check - // build. If we are in a check build we still go ahead here presuming we've - // detected that LLVM is already built and good to go which helps prevent - // busting caches (e.g. like #71152). + // These conditionals represent a tension between three forces: + // - For non-check builds, we need to define some LLVM-related environment + // variables, requiring LLVM to have been built. + // - For check builds, we want to avoid building LLVM if possible. + // - Check builds and non-check builds should have the same environment if + // possible, to avoid unnecessary rebuilds due to cache-busting. + // + // Therefore we try to avoid building LLVM for check builds, but only if + // building LLVM would be expensive. If "building" LLVM is cheap + // (i.e. it's already built or is downloadable), we prefer to maintain a + // consistent environment between check and non-check builds. if builder.config.llvm_enabled(target) { - let building_is_expensive = + let building_llvm_is_expensive = crate::core::build_steps::llvm::prebuilt_llvm_config(builder, target, false) .should_build(); - // `top_stage == stage` might be false for `check --stage 1`, if we are building the stage 1 compiler - let can_skip_build = builder.kind == Kind::Check && builder.top_stage == build_stage; - let should_skip_build = building_is_expensive && can_skip_build; - if !should_skip_build { + + let skip_llvm = (builder.kind == Kind::Check) && building_llvm_is_expensive; + if !skip_llvm { rustc_llvm_env(builder, cargo, target) } } @@ -1407,6 +1406,9 @@ pub fn rustc_cargo_env( /// Pass down configuration from the LLVM build into the build of /// rustc_llvm and rustc_codegen_llvm. +/// +/// Note that this has the side-effect of _building LLVM_, which is sometimes +/// unwanted (e.g. for check builds). fn rustc_llvm_env(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetSelection) { if builder.config.is_rust_llvm(target) { cargo.env("LLVM_RUSTLLVM", "1"); @@ -1544,7 +1546,7 @@ impl Step for RustcLink { pub struct CodegenBackend { pub target: TargetSelection, pub compiler: Compiler, - pub backend: String, + pub backend: CodegenBackendKind, } fn needs_codegen_config(run: &RunConfig<'_>) -> bool { @@ -1569,7 +1571,7 @@ fn is_codegen_cfg_needed(path: &TaskPath, run: &RunConfig<'_>) -> bool { if path.contains(CODEGEN_BACKEND_PREFIX) { let mut needs_codegen_backend_config = true; for backend in run.builder.config.codegen_backends(run.target) { - if path.ends_with(&(CODEGEN_BACKEND_PREFIX.to_owned() + backend)) { + if path.ends_with(&(CODEGEN_BACKEND_PREFIX.to_owned() + backend.name())) { needs_codegen_backend_config = false; } } @@ -1603,7 +1605,7 @@ impl Step for CodegenBackend { } for backend in run.builder.config.codegen_backends(run.target) { - if backend == "llvm" { + if backend.is_llvm() { continue; // Already built as part of rustc } @@ -1664,20 +1666,21 @@ impl Step for CodegenBackend { ); cargo .arg("--manifest-path") - .arg(builder.src.join(format!("compiler/rustc_codegen_{backend}/Cargo.toml"))); - rustc_cargo_env(builder, &mut cargo, target, compiler.stage); + .arg(builder.src.join(format!("compiler/{}/Cargo.toml", backend.crate_name()))); + rustc_cargo_env(builder, &mut cargo, target); // Ideally, we'd have a separate step for the individual codegen backends, // like we have in tests (test::CodegenGCC) but that would require a lot of restructuring. // If the logic gets more complicated, it should probably be done. - if backend == "gcc" { + if backend.is_gcc() { let gcc = builder.ensure(Gcc { target }); add_cg_gcc_cargo_flags(&mut cargo, &gcc); } let tmp_stamp = BuildStamp::new(&out_dir).with_prefix("tmp"); - let _guard = builder.msg_build(compiler, format_args!("codegen backend {backend}"), target); + let _guard = + builder.msg_build(compiler, format_args!("codegen backend {}", backend.name()), target); let files = run_cargo(builder, cargo, vec![], &tmp_stamp, vec![], false, false); if builder.config.dry_run() { return; @@ -1732,7 +1735,7 @@ fn copy_codegen_backends_to_sysroot( } for backend in builder.config.codegen_backends(target) { - if backend == "llvm" { + if backend.is_llvm() { continue; // Already built as part of rustc } @@ -2050,19 +2053,20 @@ impl Step for Assemble { } } - let maybe_install_llvm_bitcode_linker = |compiler| { + let maybe_install_llvm_bitcode_linker = || { if builder.config.llvm_bitcode_linker_enabled { trace!("llvm-bitcode-linker enabled, installing"); - let llvm_bitcode_linker = - builder.ensure(crate::core::build_steps::tool::LlvmBitcodeLinker { - build_compiler: compiler, - target: target_compiler.host, - }); + let llvm_bitcode_linker = builder.ensure( + crate::core::build_steps::tool::LlvmBitcodeLinker::from_target_compiler( + builder, + target_compiler, + ), + ); // Copy the llvm-bitcode-linker to the self-contained binary directory let bindir_self_contained = builder - .sysroot(compiler) - .join(format!("lib/rustlib/{}/bin/self-contained", compiler.host)); + .sysroot(target_compiler) + .join(format!("lib/rustlib/{}/bin/self-contained", target_compiler.host)); let tool_exe = exe("llvm-bitcode-linker", target_compiler.host); t!(fs::create_dir_all(&bindir_self_contained)); @@ -2089,9 +2093,9 @@ impl Step for Assemble { builder.info(&format!("Creating a sysroot for stage{stage} compiler (use `rustup toolchain link 'name' build/host/stage{stage}`)", stage = target_compiler.stage)); } - let mut precompiled_compiler = target_compiler; - precompiled_compiler.forced_compiler(true); - maybe_install_llvm_bitcode_linker(precompiled_compiler); + // FIXME: this is incomplete, we do not copy a bunch of other stuff to the downloaded + // sysroot... + maybe_install_llvm_bitcode_linker(); return target_compiler; } @@ -2161,7 +2165,7 @@ impl Step for Assemble { let _codegen_backend_span = span!(tracing::Level::DEBUG, "building requested codegen backends").entered(); for backend in builder.config.codegen_backends(target_compiler.host) { - if backend == "llvm" { + if backend.is_llvm() { debug!("llvm codegen backend is already built as part of rustc"); continue; // Already built as part of rustc } @@ -2256,10 +2260,12 @@ impl Step for Assemble { copy_codegen_backends_to_sysroot(builder, build_compiler, target_compiler); if builder.config.lld_enabled { - builder.ensure(crate::core::build_steps::tool::LldWrapper { - build_compiler, - target_compiler, - }); + let lld_wrapper = + builder.ensure(crate::core::build_steps::tool::LldWrapper::for_use_by_compiler( + builder, + target_compiler, + )); + copy_lld_artifacts(builder, lld_wrapper, target_compiler); } if builder.config.llvm_enabled(target_compiler.host) && builder.config.llvm_tools_enabled { @@ -2284,15 +2290,14 @@ impl Step for Assemble { } // In addition to `rust-lld` also install `wasm-component-ld` when - // LLD is enabled. This is a relatively small binary that primarily - // delegates to the `rust-lld` binary for linking and then runs - // logic to create the final binary. This is used by the - // `wasm32-wasip2` target of Rust. + // is enabled. This is used by the `wasm32-wasip2` target of Rust. if builder.tool_enabled("wasm-component-ld") { - let wasm_component = builder.ensure(crate::core::build_steps::tool::WasmComponentLd { - compiler: build_compiler, - target: target_compiler.host, - }); + let wasm_component = builder.ensure( + crate::core::build_steps::tool::WasmComponentLd::for_use_by_compiler( + builder, + target_compiler, + ), + ); builder.copy_link( &wasm_component.tool_path, &libdir_bin.join(wasm_component.tool_path.file_name().unwrap()), @@ -2300,7 +2305,7 @@ impl Step for Assemble { ); } - maybe_install_llvm_bitcode_linker(target_compiler); + maybe_install_llvm_bitcode_linker(); // Ensure that `libLLVM.so` ends up in the newly build compiler directory, // so that it can be found when the newly built `rustc` is run. diff --git a/src/bootstrap/src/core/build_steps/dist.rs b/src/bootstrap/src/core/build_steps/dist.rs index 8b2d65ace50..4699813abf4 100644 --- a/src/bootstrap/src/core/build_steps/dist.rs +++ b/src/bootstrap/src/core/build_steps/dist.rs @@ -32,7 +32,7 @@ use crate::utils::helpers::{ exe, is_dylib, move_file, t, target_supports_cranelift_backend, timeit, }; use crate::utils::tarball::{GeneratedTarball, OverlayKind, Tarball}; -use crate::{Compiler, DependencyType, FileType, LLVM_TOOLS, Mode, trace}; +use crate::{CodegenBackendKind, Compiler, DependencyType, FileType, LLVM_TOOLS, Mode, trace}; pub fn pkgname(builder: &Builder<'_>, component: &str) -> String { format!("{}-{}", component, builder.rust_package_vers()) @@ -1068,6 +1068,8 @@ impl Step for PlainSourceTarball { "bootstrap.example.toml", "configure", "license-metadata.json", + "package-lock.json", + "package.json", "x", "x.ps1", "x.py", @@ -1370,10 +1372,10 @@ impl Step for Miri { } } -#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct CodegenBackend { pub compiler: Compiler, - pub backend: String, + pub backend: CodegenBackendKind, } impl Step for CodegenBackend { @@ -1387,7 +1389,7 @@ impl Step for CodegenBackend { fn make_run(run: RunConfig<'_>) { for backend in run.builder.config.codegen_backends(run.target) { - if backend == "llvm" { + if backend.is_llvm() { continue; // Already built as part of rustc } @@ -1410,12 +1412,11 @@ impl Step for CodegenBackend { return None; } - if !builder.config.codegen_backends(self.compiler.host).contains(&self.backend.to_string()) - { + if !builder.config.codegen_backends(self.compiler.host).contains(&self.backend) { return None; } - if self.backend == "cranelift" && !target_supports_cranelift_backend(self.compiler.host) { + if self.backend.is_cranelift() && !target_supports_cranelift_backend(self.compiler.host) { builder.info("target not supported by rustc_codegen_cranelift. skipping"); return None; } @@ -1423,15 +1424,18 @@ impl Step for CodegenBackend { let compiler = self.compiler; let backend = self.backend; - let mut tarball = - Tarball::new(builder, &format!("rustc-codegen-{backend}"), &compiler.host.triple); - if backend == "cranelift" { + let mut tarball = Tarball::new( + builder, + &format!("rustc-codegen-{}", backend.name()), + &compiler.host.triple, + ); + if backend.is_cranelift() { tarball.set_overlay(OverlayKind::RustcCodegenCranelift); } else { - panic!("Unknown backend rustc_codegen_{backend}"); + panic!("Unknown codegen backend {}", backend.name()); } tarball.is_preview(true); - tarball.add_legal_and_readme_to(format!("share/doc/rustc_codegen_{backend}")); + tarball.add_legal_and_readme_to(format!("share/doc/{}", backend.crate_name())); let src = builder.sysroot(compiler); let backends_src = builder.sysroot_codegen_backends(compiler); @@ -1443,7 +1447,7 @@ impl Step for CodegenBackend { // Don't use custom libdir here because ^lib/ will be resolved again with installer let backends_dst = PathBuf::from("lib").join(backends_rel); - let backend_name = format!("rustc_codegen_{backend}"); + let backend_name = backend.crate_name(); let mut found_backend = false; for backend in fs::read_dir(&backends_src).unwrap() { let file_name = backend.unwrap().file_name(); @@ -1573,9 +1577,12 @@ impl Step for Extended { add_component!("analysis" => Analysis { compiler, target }); add_component!("rustc-codegen-cranelift" => CodegenBackend { compiler: builder.compiler(stage, target), - backend: "cranelift".to_string(), + backend: CodegenBackendKind::Cranelift, + }); + add_component!("llvm-bitcode-linker" => LlvmBitcodeLinker { + build_compiler: compiler, + target }); - add_component!("llvm-bitcode-linker" => LlvmBitcodeLinker {compiler, target}); let etc = builder.src.join("src/etc/installer"); @@ -2341,9 +2348,13 @@ impl Step for LlvmTools { } } +/// Distributes the `llvm-bitcode-linker` tool so that it can be used by a compiler whose host +/// is `target`. #[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct LlvmBitcodeLinker { - pub compiler: Compiler, + /// The linker will be compiled by this compiler. + pub build_compiler: Compiler, + /// The linker will by usable by rustc on this host. pub target: TargetSelection, } @@ -2359,9 +2370,8 @@ impl Step for LlvmBitcodeLinker { fn make_run(run: RunConfig<'_>) { run.builder.ensure(LlvmBitcodeLinker { - compiler: run.builder.compiler_for( - run.builder.top_stage, - run.builder.config.host_target, + build_compiler: tool::LlvmBitcodeLinker::get_build_compiler_for_target( + run.builder, run.target, ), target: run.target, @@ -2369,13 +2379,10 @@ impl Step for LlvmBitcodeLinker { } fn run(self, builder: &Builder<'_>) -> Option<GeneratedTarball> { - let compiler = self.compiler; let target = self.target; - builder.ensure(compile::Rustc::new(compiler, target)); - - let llbc_linker = - builder.ensure(tool::LlvmBitcodeLinker { build_compiler: compiler, target }); + let llbc_linker = builder + .ensure(tool::LlvmBitcodeLinker::from_build_compiler(self.build_compiler, target)); let self_contained_bin_dir = format!("lib/rustlib/{}/bin/self-contained", target.triple); diff --git a/src/bootstrap/src/core/build_steps/install.rs b/src/bootstrap/src/core/build_steps/install.rs index 4434d6658eb..4513a138e19 100644 --- a/src/bootstrap/src/core/build_steps/install.rs +++ b/src/bootstrap/src/core/build_steps/install.rs @@ -12,7 +12,7 @@ use crate::core::config::{Config, TargetSelection}; use crate::utils::exec::command; use crate::utils::helpers::t; use crate::utils::tarball::GeneratedTarball; -use crate::{Compiler, Kind}; +use crate::{CodegenBackendKind, Compiler, Kind}; #[cfg(target_os = "illumos")] const SHELL: &str = "bash"; @@ -276,7 +276,7 @@ install!((self, builder, _config), RustcCodegenCranelift, alias = "rustc-codegen-cranelift", Self::should_build(_config), only_hosts: true, { if let Some(tarball) = builder.ensure(dist::CodegenBackend { compiler: self.compiler, - backend: "cranelift".to_string(), + backend: CodegenBackendKind::Cranelift, }) { install_sh(builder, "rustc-codegen-cranelift", self.compiler.stage, Some(self.target), &tarball); } else { @@ -287,7 +287,7 @@ install!((self, builder, _config), } }; LlvmBitcodeLinker, alias = "llvm-bitcode-linker", Self::should_build(_config), only_hosts: true, { - if let Some(tarball) = builder.ensure(dist::LlvmBitcodeLinker { compiler: self.compiler, target: self.target }) { + if let Some(tarball) = builder.ensure(dist::LlvmBitcodeLinker { build_compiler: self.compiler, target: self.target }) { install_sh(builder, "llvm-bitcode-linker", self.compiler.stage, Some(self.target), &tarball); } else { builder.info( diff --git a/src/bootstrap/src/core/build_steps/test.rs b/src/bootstrap/src/core/build_steps/test.rs index 9e7ea5c115f..119fa4237bc 100644 --- a/src/bootstrap/src/core/build_steps/test.rs +++ b/src/bootstrap/src/core/build_steps/test.rs @@ -8,6 +8,9 @@ use std::ffi::{OsStr, OsString}; use std::path::{Path, PathBuf}; use std::{env, fs, iter}; +#[cfg(feature = "tracing")] +use tracing::instrument; + use crate::core::build_steps::compile::{Std, run_cargo}; use crate::core::build_steps::doc::DocumentationFormat; use crate::core::build_steps::gcc::{Gcc, add_cg_gcc_cargo_flags}; @@ -30,7 +33,7 @@ use crate::utils::helpers::{ linker_flags, t, target_supports_cranelift_backend, up_to_date, }; use crate::utils::render_tests::{add_flags_and_try_run_tests, try_run_tests}; -use crate::{CLang, DocTests, GitRepo, Mode, PathSet, envify}; +use crate::{CLang, CodegenBackendKind, DocTests, GitRepo, Mode, PathSet, debug, envify}; const ADB_TEST_DIR: &str = "/data/local/tmp/work"; @@ -713,9 +716,23 @@ impl Step for CompiletestTest { } /// Runs `cargo test` for compiletest. + #[cfg_attr( + feature = "tracing", + instrument(level = "debug", name = "CompiletestTest::run", skip_all) + )] fn run(self, builder: &Builder<'_>) { let host = self.host; + + if builder.top_stage == 0 && !builder.config.compiletest_allow_stage0 { + eprintln!("\ +ERROR: `--stage 0` runs compiletest self-tests against the stage0 (precompiled) compiler, not the in-tree compiler, and will almost always cause tests to fail +NOTE: if you're sure you want to do this, please open an issue as to why. In the meantime, you can override this with `--set build.compiletest-allow-stage0=true`." + ); + crate::exit!(1); + } + let compiler = builder.compiler(builder.top_stage, host); + debug!(?compiler); // We need `ToolStd` for the locally-built sysroot because // compiletest uses unstable features of the `test` crate. @@ -723,8 +740,8 @@ impl Step for CompiletestTest { let mut cargo = tool::prepare_tool_cargo( builder, compiler, - // compiletest uses libtest internals; make it use the in-tree std to make sure it never breaks - // when std sources change. + // compiletest uses libtest internals; make it use the in-tree std to make sure it never + // breaks when std sources change. Mode::ToolStd, host, Kind::Test, @@ -739,7 +756,6 @@ impl Step for CompiletestTest { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Clippy { - stage: u32, host: TargetSelection, } @@ -753,33 +769,23 @@ impl Step for Clippy { } fn make_run(run: RunConfig<'_>) { - // If stage is explicitly set or not lower than 2, keep it. Otherwise, make sure it's at least 2 - // as tests for this step don't work with a lower stage. - let stage = if run.builder.config.is_explicit_stage() || run.builder.top_stage >= 2 { - run.builder.top_stage - } else { - 2 - }; - - run.builder.ensure(Clippy { stage, host: run.target }); + run.builder.ensure(Clippy { host: run.target }); } /// Runs `cargo test` for clippy. fn run(self, builder: &Builder<'_>) { - let stage = self.stage; + let stage = builder.top_stage; let host = self.host; - let compiler = builder.compiler(stage, host); - - if stage < 2 { - eprintln!("WARNING: clippy tests on stage {stage} may not behave well."); - eprintln!("HELP: consider using stage 2"); - } + // We need to carefully distinguish the compiler that builds clippy, and the compiler + // that is linked into the clippy being tested. `target_compiler` is the latter, + // and it must also be used by clippy's test runner to build tests and their dependencies. + let target_compiler = builder.compiler(stage, host); - let tool_result = builder.ensure(tool::Clippy { compiler, target: self.host }); - let compiler = tool_result.build_compiler; + let tool_result = builder.ensure(tool::Clippy { compiler: target_compiler, target: host }); + let tool_compiler = tool_result.build_compiler; let mut cargo = tool::prepare_tool_cargo( builder, - compiler, + tool_compiler, Mode::ToolRustc, host, Kind::Test, @@ -788,11 +794,17 @@ impl Step for Clippy { &[], ); - cargo.env("RUSTC_TEST_SUITE", builder.rustc(compiler)); - cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler)); - let host_libs = builder.stage_out(compiler, Mode::ToolRustc).join(builder.cargo_dir()); + cargo.env("RUSTC_TEST_SUITE", builder.rustc(tool_compiler)); + cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(tool_compiler)); + let host_libs = builder.stage_out(tool_compiler, Mode::ToolRustc).join(builder.cargo_dir()); cargo.env("HOST_LIBS", host_libs); + // Build the standard library that the tests can use. + builder.std(target_compiler, host); + cargo.env("TEST_SYSROOT", builder.sysroot(target_compiler)); + cargo.env("TEST_RUSTC", builder.rustc(target_compiler)); + cargo.env("TEST_RUSTC_LIB", builder.rustc_libdir(target_compiler)); + // Collect paths of tests to run 'partially_test: { let paths = &builder.config.paths[..]; @@ -813,7 +825,8 @@ impl Step for Clippy { cargo.add_rustc_lib_path(builder); let cargo = prepare_cargo_test(cargo, &[], &[], host, builder); - let _guard = builder.msg_sysroot_tool(Kind::Test, compiler.stage, "clippy", host, host); + let _guard = + builder.msg_sysroot_tool(Kind::Test, tool_compiler.stage, "clippy", host, host); // Clippy reports errors if it blessed the outputs if cargo.allow_failure().run(builder) { @@ -1117,6 +1130,12 @@ impl Step for Tidy { 8 * std::thread::available_parallelism().map_or(1, std::num::NonZeroUsize::get) as u32 }); cmd.arg(jobs.to_string()); + // pass the path to the npm command used for installing js deps. + if let Some(npm) = &builder.config.npm { + cmd.arg(npm); + } else { + cmd.arg("npm"); + } if builder.is_verbose() { cmd.arg("--verbose"); } @@ -1346,7 +1365,12 @@ test!(Ui { path: "tests/ui", mode: "ui", suite: "ui", default: true }); test!(Crashes { path: "tests/crashes", mode: "crashes", suite: "crashes", default: true }); -test!(Codegen { path: "tests/codegen", mode: "codegen", suite: "codegen", default: true }); +test!(CodegenLlvm { + path: "tests/codegen-llvm", + mode: "codegen", + suite: "codegen-llvm", + default: true +}); test!(CodegenUnits { path: "tests/codegen-units", @@ -1411,7 +1435,12 @@ test!(Pretty { test!(RunMake { path: "tests/run-make", mode: "run-make", suite: "run-make", default: true }); -test!(Assembly { path: "tests/assembly", mode: "assembly", suite: "assembly", default: true }); +test!(AssemblyLlvm { + path: "tests/assembly-llvm", + mode: "assembly", + suite: "assembly-llvm", + default: true +}); /// Runs the coverage test suite at `tests/coverage` in some or all of the /// coverage test modes. @@ -1600,12 +1629,11 @@ impl Step for Compiletest { return; } - if builder.top_stage == 0 && env::var("COMPILETEST_FORCE_STAGE0").is_err() { + if builder.top_stage == 0 && !builder.config.compiletest_allow_stage0 { eprintln!("\ ERROR: `--stage 0` runs compiletest on the stage0 (precompiled) compiler, not your local changes, and will almost always cause tests to fail -HELP: to test the compiler, use `--stage 1` instead -HELP: to test the standard library, use `--stage 0 library/std` instead -NOTE: if you're sure you want to do this, please open an issue as to why. In the meantime, you can override this with `COMPILETEST_FORCE_STAGE0=1`." +HELP: to test the compiler or standard library, omit the stage or explicitly use `--stage 1` instead +NOTE: if you're sure you want to do this, please open an issue as to why. In the meantime, you can override this with `--set build.compiletest-allow-stage0=true`." ); crate::exit!(1); } @@ -1619,7 +1647,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the let suite_path = self.path; // Skip codegen tests if they aren't enabled in configuration. - if !builder.config.codegen_tests && suite == "codegen" { + if !builder.config.codegen_tests && mode == "codegen" { return; } @@ -1757,6 +1785,12 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the cmd.arg("--host").arg(&*compiler.host.triple); cmd.arg("--llvm-filecheck").arg(builder.llvm_filecheck(builder.config.host_target)); + if let Some(codegen_backend) = builder.config.default_codegen_backend(compiler.host) { + // Tells compiletest which codegen backend is used by default by the compiler. + // It is used to e.g. ignore tests that don't support that codegen backend. + cmd.arg("--codegen-backend").arg(codegen_backend.name()); + } + if builder.build.config.llvm_enzyme { cmd.arg("--has-enzyme"); } @@ -1810,7 +1844,24 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the } let mut flags = if is_rustdoc { Vec::new() } else { vec!["-Crpath".to_string()] }; - flags.push(format!("-Cdebuginfo={}", builder.config.rust_debuginfo_level_tests)); + flags.push(format!( + "-Cdebuginfo={}", + if mode == "codegen" { + // codegen tests typically check LLVM IR and are sensitive to additional debuginfo. + // So do not apply `rust.debuginfo-level-tests` for codegen tests. + if builder.config.rust_debuginfo_level_tests + != crate::core::config::DebuginfoLevel::None + { + println!( + "NOTE: ignoring `rust.debuginfo-level-tests={}` for codegen tests", + builder.config.rust_debuginfo_level_tests + ); + } + crate::core::config::DebuginfoLevel::None + } else { + builder.config.rust_debuginfo_level_tests + } + )); flags.extend(builder.config.cmd.compiletest_rustc_args().iter().map(|s| s.to_string())); if suite != "mir-opt" { @@ -2945,7 +2996,8 @@ impl Step for RemoteCopyLibs { builder.info(&format!("REMOTE copy libs to emulator ({target})")); - let remote_test_server = builder.ensure(tool::RemoteTestServer { compiler, target }); + let remote_test_server = + builder.ensure(tool::RemoteTestServer { build_compiler: compiler, target }); // Spawn the emulator and wait for it to come online let tool = builder.tool_exe(Tool::RemoteTestClient); @@ -3098,7 +3150,11 @@ impl Step for Bootstrap { } fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/bootstrap") + // Bootstrap tests might not be perfectly self-contained and can depend on the external + // environment, submodules that are checked out, etc. + // Therefore we only run them by default on CI. + let runs_on_ci = run.builder.config.is_running_on_ci; + run.path("src/bootstrap").default_condition(runs_on_ci) } fn make_run(run: RunConfig<'_>) { @@ -3352,7 +3408,7 @@ impl Step for CodegenCranelift { return; } - if !builder.config.codegen_backends(run.target).contains(&"cranelift".to_owned()) { + if !builder.config.codegen_backends(run.target).contains(&CodegenBackendKind::Cranelift) { builder.info("cranelift not in rust.codegen-backends. skipping"); return; } @@ -3386,7 +3442,7 @@ impl Step for CodegenCranelift { cargo .arg("--manifest-path") .arg(builder.src.join("compiler/rustc_codegen_cranelift/build_system/Cargo.toml")); - compile::rustc_cargo_env(builder, &mut cargo, target, compiler.stage); + compile::rustc_cargo_env(builder, &mut cargo, target); // Avoid incremental cache issues when changing rustc cargo.env("CARGO_BUILD_INCREMENTAL", "false"); @@ -3479,7 +3535,7 @@ impl Step for CodegenGCC { return; } - if !builder.config.codegen_backends(run.target).contains(&"gcc".to_owned()) { + if !builder.config.codegen_backends(run.target).contains(&CodegenBackendKind::Gcc) { builder.info("gcc not in rust.codegen-backends. skipping"); return; } @@ -3518,7 +3574,7 @@ impl Step for CodegenGCC { cargo .arg("--manifest-path") .arg(builder.src.join("compiler/rustc_codegen_gcc/build_system/Cargo.toml")); - compile::rustc_cargo_env(builder, &mut cargo, target, compiler.stage); + compile::rustc_cargo_env(builder, &mut cargo, target); add_cg_gcc_cargo_flags(&mut cargo, &gcc); // Avoid incremental cache issues when changing rustc diff --git a/src/bootstrap/src/core/build_steps/tool.rs b/src/bootstrap/src/core/build_steps/tool.rs index 1c994b0ccfc..f5fa33b98f3 100644 --- a/src/bootstrap/src/core/build_steps/tool.rs +++ b/src/bootstrap/src/core/build_steps/tool.rs @@ -42,7 +42,8 @@ pub enum ToolArtifactKind { #[derive(Debug, Clone, Hash, PartialEq, Eq)] struct ToolBuild { - compiler: Compiler, + /// Compiler that will build this tool. + build_compiler: Compiler, target: TargetSelection, tool: &'static str, path: &'static str, @@ -112,34 +113,34 @@ impl Step for ToolBuild { let mut tool = self.tool; let path = self.path; - let target_compiler = self.compiler; - self.compiler = if self.mode == Mode::ToolRustc { - get_tool_rustc_compiler(builder, self.compiler) + let target_compiler = self.build_compiler; + self.build_compiler = if self.mode == Mode::ToolRustc { + get_tool_rustc_compiler(builder, self.build_compiler) } else { - self.compiler + self.build_compiler }; match self.mode { Mode::ToolRustc => { // If compiler was forced, its artifacts should have been prepared earlier. - if !self.compiler.is_forced_compiler() { - builder.std(self.compiler, self.compiler.host); - builder.ensure(compile::Rustc::new(self.compiler, target)); + if !self.build_compiler.is_forced_compiler() { + builder.std(self.build_compiler, self.build_compiler.host); + builder.ensure(compile::Rustc::new(self.build_compiler, target)); } } Mode::ToolStd => { // If compiler was forced, its artifacts should have been prepared earlier. - if !self.compiler.is_forced_compiler() { - builder.std(self.compiler, target) + if !self.build_compiler.is_forced_compiler() { + builder.std(self.build_compiler, target); } } - Mode::ToolBootstrap => {} // uses downloaded stage0 compiler libs + Mode::ToolBootstrap | Mode::ToolTarget => {} // uses downloaded stage0 compiler libs _ => panic!("unexpected Mode for tool build"), } let mut cargo = prepare_tool_cargo( builder, - self.compiler, + self.build_compiler, self.mode, target, Kind::Build, @@ -161,7 +162,7 @@ impl Step for ToolBuild { // Rustc tools (miri, clippy, cargo, rustfmt, rust-analyzer) // could use the additional optimizations. - if self.mode == Mode::ToolRustc && is_lto_stage(&self.compiler) { + if self.mode == Mode::ToolRustc && is_lto_stage(&self.build_compiler) { let lto = match builder.config.rust_lto { RustcLto::Off => Some("off"), RustcLto::Thin => Some("thin"), @@ -183,8 +184,9 @@ impl Step for ToolBuild { Kind::Build, self.mode, self.tool, - self.compiler.stage, - &self.compiler.host, + // A stage N tool is built with the stage N-1 compiler. + self.build_compiler.stage + 1, + &self.build_compiler.host, &self.target, ); @@ -207,14 +209,14 @@ impl Step for ToolBuild { } let tool_path = match self.artifact_kind { ToolArtifactKind::Binary => { - copy_link_tool_bin(builder, self.compiler, self.target, self.mode, tool) + copy_link_tool_bin(builder, self.build_compiler, self.target, self.mode, tool) } ToolArtifactKind::Library => builder - .cargo_out(self.compiler, self.mode, self.target) + .cargo_out(self.build_compiler, self.mode, self.target) .join(format!("lib{tool}.rlib")), }; - ToolBuildResult { tool_path, build_compiler: self.compiler, target_compiler } + ToolBuildResult { tool_path, build_compiler: self.build_compiler, target_compiler } } } } @@ -365,6 +367,47 @@ pub(crate) fn get_tool_rustc_compiler( builder.compiler(target_compiler.stage.saturating_sub(1), builder.config.host_target) } +/// Determines how to build a `ToolTarget`, i.e. which compiler should be used to compile it. +/// The compiler stage is automatically bumped if we need to cross-compile a stage 1 tool. +pub enum ToolTargetBuildMode { + /// Build the tool using rustc that corresponds to the selected CLI stage. + Build(TargetSelection), + /// Build the tool so that it can be attached to the sysroot of the passed compiler. + /// Since we always dist stage 2+, the compiler that builds the tool in this case has to be + /// stage 1+. + Dist(Compiler), +} + +/// Returns compiler that is able to compile a `ToolTarget` tool with the given `mode`. +pub(crate) fn get_tool_target_compiler( + builder: &Builder<'_>, + mode: ToolTargetBuildMode, +) -> Compiler { + let (target, build_compiler_stage) = match mode { + ToolTargetBuildMode::Build(target) => { + assert!(builder.top_stage > 0); + // If we want to build a stage N tool, we need to compile it with stage N-1 rustc + (target, builder.top_stage - 1) + } + ToolTargetBuildMode::Dist(target_compiler) => { + assert!(target_compiler.stage > 0); + // If we want to dist a stage N rustc, we want to attach stage N tool to it. + // And to build that tool, we need to compile it with stage N-1 rustc + (target_compiler.host, target_compiler.stage - 1) + } + }; + + let compiler = if builder.host_target == target { + builder.compiler(build_compiler_stage, builder.host_target) + } else { + // If we are cross-compiling a stage 1 tool, we cannot do that with a stage 0 compiler, + // so we auto-bump the tool's stage to 2, which means we need a stage 1 compiler. + builder.compiler(build_compiler_stage.max(1), builder.host_target) + }; + builder.std(compiler, target); + compiler +} + /// Links a built tool binary with the given `name` from the build directory to the /// tools directory. fn copy_link_tool_bin( @@ -451,7 +494,7 @@ macro_rules! bootstrap_tool { let compiletest_wants_stage0 = $tool_name == "compiletest" && builder.config.compiletest_use_stage0_libtest; builder.ensure(ToolBuild { - compiler: self.compiler, + build_compiler: self.compiler, target: self.target, tool: $tool_name, mode: if is_unstable && !compiletest_wants_stage0 { @@ -521,7 +564,6 @@ bootstrap_tool!( // rustdoc-gui-test has a crate dependency on compiletest, so it needs the same unstable features. RustdocGUITest, "src/tools/rustdoc-gui-test", "rustdoc-gui-test", is_unstable_tool = true, allow_features = COMPILETEST_ALLOW_FEATURES; CoverageDump, "src/tools/coverage-dump", "coverage-dump"; - WasmComponentLd, "src/tools/wasm-component-ld", "wasm-component-ld", is_unstable_tool = true, allow_features = "min_specialization"; UnicodeTableGenerator, "src/tools/unicode-table-generator", "unicode-table-generator"; FeaturesStatusDump, "src/tools/features-status-dump", "features-status-dump"; OptimizedDist, "src/tools/opt-dist", "opt-dist", submodules = &["src/tools/rustc-perf"]; @@ -560,7 +602,7 @@ impl Step for RustcPerf { builder.require_submodule("src/tools/rustc-perf", None); let tool = ToolBuild { - compiler: self.compiler, + build_compiler: self.compiler, target: self.target, tool: "collector", mode: Mode::ToolBootstrap, @@ -576,7 +618,7 @@ impl Step for RustcPerf { let res = builder.ensure(tool.clone()); // We also need to symlink the `rustc-fake` binary to the corresponding directory, // because `collector` expects it in the same directory. - copy_link_tool_bin(builder, tool.compiler, tool.target, tool.mode, "rustc-fake"); + copy_link_tool_bin(builder, tool.build_compiler, tool.target, tool.mode, "rustc-fake"); res } @@ -620,7 +662,7 @@ impl Step for ErrorIndex { fn run(self, builder: &Builder<'_>) -> ToolBuildResult { builder.ensure(ToolBuild { - compiler: self.compiler, + build_compiler: self.compiler, target: self.compiler.host, tool: "error_index_generator", mode: Mode::ToolRustc, @@ -636,7 +678,7 @@ impl Step for ErrorIndex { #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct RemoteTestServer { - pub compiler: Compiler, + pub build_compiler: Compiler, pub target: TargetSelection, } @@ -649,17 +691,20 @@ impl Step for RemoteTestServer { fn make_run(run: RunConfig<'_>) { run.builder.ensure(RemoteTestServer { - compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.host_target), + build_compiler: get_tool_target_compiler( + run.builder, + ToolTargetBuildMode::Build(run.target), + ), target: run.target, }); } fn run(self, builder: &Builder<'_>) -> ToolBuildResult { builder.ensure(ToolBuild { - compiler: self.compiler, + build_compiler: self.build_compiler, target: self.target, tool: "remote-test-server", - mode: Mode::ToolStd, + mode: Mode::ToolTarget, path: "src/tools/remote-test-server", source_type: SourceType::InTree, extra_features: Vec::new(), @@ -668,6 +713,10 @@ impl Step for RemoteTestServer { artifact_kind: ToolArtifactKind::Binary, }) } + + fn metadata(&self) -> Option<StepMetadata> { + Some(StepMetadata::build("remote-test-server", self.target).built_by(self.build_compiler)) + } } #[derive(Debug, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)] @@ -757,7 +806,7 @@ impl Step for Rustdoc { let ToolBuildResult { tool_path, build_compiler, target_compiler } = builder.ensure(ToolBuild { - compiler: target_compiler, + build_compiler: target_compiler, target, // Cargo adds a number of paths to the dylib search path on windows, which results in // the wrong rustdoc being executed. To avoid the conflicting rustdocs, we name the "tool" @@ -825,7 +874,7 @@ impl Step for Cargo { builder.build.require_submodule("src/tools/cargo", None); builder.ensure(ToolBuild { - compiler: self.compiler, + build_compiler: self.compiler, target: self.target, tool: "cargo", mode: Mode::ToolRustc, @@ -839,17 +888,50 @@ impl Step for Cargo { } } +/// Represents a built LldWrapper, the `lld-wrapper` tool itself, and a directory +/// containing a build of LLD. +#[derive(Clone)] +pub struct BuiltLldWrapper { + tool: ToolBuildResult, + lld_dir: PathBuf, +} + #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct LldWrapper { pub build_compiler: Compiler, - pub target_compiler: Compiler, + pub target: TargetSelection, +} + +impl LldWrapper { + /// Returns `LldWrapper` that should be **used** by the passed compiler. + pub fn for_use_by_compiler(builder: &Builder<'_>, target_compiler: Compiler) -> Self { + Self { + build_compiler: get_tool_target_compiler( + builder, + ToolTargetBuildMode::Dist(target_compiler), + ), + target: target_compiler.host, + } + } } impl Step for LldWrapper { - type Output = ToolBuildResult; + type Output = BuiltLldWrapper; + + const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.never() + run.path("src/tools/lld-wrapper") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(LldWrapper { + build_compiler: get_tool_target_compiler( + run.builder, + ToolTargetBuildMode::Build(run.target), + ), + target: run.target, + }); } #[cfg_attr( @@ -858,25 +940,16 @@ impl Step for LldWrapper { level = "debug", name = "LldWrapper::run", skip_all, - fields(build_compiler = ?self.build_compiler, target_compiler = ?self.target_compiler), + fields(build_compiler = ?self.build_compiler), ), )] - fn run(self, builder: &Builder<'_>) -> ToolBuildResult { - if builder.config.dry_run() { - return ToolBuildResult { - tool_path: Default::default(), - build_compiler: self.build_compiler, - target_compiler: self.target_compiler, - }; - } - - let target = self.target_compiler.host; - - let tool_result = builder.ensure(ToolBuild { - compiler: self.build_compiler, - target, + fn run(self, builder: &Builder<'_>) -> Self::Output { + let lld_dir = builder.ensure(llvm::Lld { target: self.target }); + let tool = builder.ensure(ToolBuild { + build_compiler: self.build_compiler, + target: self.target, tool: "lld-wrapper", - mode: Mode::ToolStd, + mode: Mode::ToolTarget, path: "src/tools/lld-wrapper", source_type: SourceType::InTree, extra_features: Vec::new(), @@ -884,38 +957,110 @@ impl Step for LldWrapper { cargo_args: Vec::new(), artifact_kind: ToolArtifactKind::Binary, }); + BuiltLldWrapper { tool, lld_dir } + } - let libdir_bin = builder.sysroot_target_bindir(self.target_compiler, target); - t!(fs::create_dir_all(&libdir_bin)); + fn metadata(&self) -> Option<StepMetadata> { + Some(StepMetadata::build("LldWrapper", self.target).built_by(self.build_compiler)) + } +} - let lld_install = builder.ensure(llvm::Lld { target }); - let src_exe = exe("lld", target); - let dst_exe = exe("rust-lld", target); +pub(crate) fn copy_lld_artifacts( + builder: &Builder<'_>, + lld_wrapper: BuiltLldWrapper, + target_compiler: Compiler, +) { + let target = target_compiler.host; + let libdir_bin = builder.sysroot_target_bindir(target_compiler, target); + t!(fs::create_dir_all(&libdir_bin)); + + let src_exe = exe("lld", target); + let dst_exe = exe("rust-lld", target); + + builder.copy_link( + &lld_wrapper.lld_dir.join("bin").join(src_exe), + &libdir_bin.join(dst_exe), + FileType::Executable, + ); + let self_contained_lld_dir = libdir_bin.join("gcc-ld"); + t!(fs::create_dir_all(&self_contained_lld_dir)); + + for name in crate::LLD_FILE_NAMES { builder.copy_link( - &lld_install.join("bin").join(src_exe), - &libdir_bin.join(dst_exe), + &lld_wrapper.tool.tool_path, + &self_contained_lld_dir.join(exe(name, target)), FileType::Executable, ); - let self_contained_lld_dir = libdir_bin.join("gcc-ld"); - t!(fs::create_dir_all(&self_contained_lld_dir)); - - for name in crate::LLD_FILE_NAMES { - builder.copy_link( - &tool_result.tool_path, - &self_contained_lld_dir.join(exe(name, target)), - FileType::Executable, - ); + } +} + +/// Builds the `wasm-component-ld` linker wrapper, which is shipped with rustc to be executed on the +/// host platform where rustc runs. +#[derive(Debug, Clone, Hash, PartialEq, Eq)] +pub struct WasmComponentLd { + build_compiler: Compiler, + target: TargetSelection, +} + +impl WasmComponentLd { + /// Returns `WasmComponentLd` that should be **used** by the passed compiler. + pub fn for_use_by_compiler(builder: &Builder<'_>, target_compiler: Compiler) -> Self { + Self { + build_compiler: get_tool_target_compiler( + builder, + ToolTargetBuildMode::Dist(target_compiler), + ), + target: target_compiler.host, } + } +} + +impl Step for WasmComponentLd { + type Output = ToolBuildResult; + + const ONLY_HOSTS: bool = true; + + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.path("src/tools/wasm-component-ld") + } + + fn make_run(run: RunConfig<'_>) { + run.builder.ensure(WasmComponentLd { + build_compiler: get_tool_target_compiler( + run.builder, + ToolTargetBuildMode::Build(run.target), + ), + target: run.target, + }); + } - tool_result + #[cfg_attr( + feature = "tracing", + instrument( + level = "debug", + name = "WasmComponentLd::run", + skip_all, + fields(build_compiler = ?self.build_compiler), + ), + )] + fn run(self, builder: &Builder<'_>) -> ToolBuildResult { + builder.ensure(ToolBuild { + build_compiler: self.build_compiler, + target: self.target, + tool: "wasm-component-ld", + mode: Mode::ToolTarget, + path: "src/tools/wasm-component-ld", + source_type: SourceType::InTree, + extra_features: vec![], + allow_features: "", + cargo_args: vec![], + artifact_kind: ToolArtifactKind::Binary, + }) } fn metadata(&self) -> Option<StepMetadata> { - Some( - StepMetadata::build("LldWrapper", self.target_compiler.host) - .built_by(self.build_compiler), - ) + Some(StepMetadata::build("WasmComponentLd", self.target).built_by(self.build_compiler)) } } @@ -948,7 +1093,7 @@ impl Step for RustAnalyzer { fn run(self, builder: &Builder<'_>) -> ToolBuildResult { builder.ensure(ToolBuild { - compiler: self.compiler, + build_compiler: self.compiler, target: self.target, tool: "rust-analyzer", mode: Mode::ToolRustc, @@ -993,7 +1138,7 @@ impl Step for RustAnalyzerProcMacroSrv { fn run(self, builder: &Builder<'_>) -> Option<ToolBuildResult> { let tool_result = builder.ensure(ToolBuild { - compiler: self.compiler, + build_compiler: self.compiler, target: self.target, tool: "rust-analyzer-proc-macro-srv", mode: Mode::ToolRustc, @@ -1021,8 +1166,35 @@ impl Step for RustAnalyzerProcMacroSrv { #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct LlvmBitcodeLinker { - pub build_compiler: Compiler, - pub target: TargetSelection, + build_compiler: Compiler, + target: TargetSelection, +} + +impl LlvmBitcodeLinker { + /// Returns `LlvmBitcodeLinker` that will be **compiled** by the passed compiler, for the given + /// `target`. + pub fn from_build_compiler(build_compiler: Compiler, target: TargetSelection) -> Self { + Self { build_compiler, target } + } + + /// Returns `LlvmBitcodeLinker` that should be **used** by the passed compiler. + pub fn from_target_compiler(builder: &Builder<'_>, target_compiler: Compiler) -> Self { + Self { + build_compiler: get_tool_target_compiler( + builder, + ToolTargetBuildMode::Dist(target_compiler), + ), + target: target_compiler.host, + } + } + + /// Return a compiler that is able to build this tool for the given `target`. + pub fn get_build_compiler_for_target( + builder: &Builder<'_>, + target: TargetSelection, + ) -> Compiler { + get_tool_target_compiler(builder, ToolTargetBuildMode::Build(target)) + } } impl Step for LlvmBitcodeLinker { @@ -1038,9 +1210,7 @@ impl Step for LlvmBitcodeLinker { fn make_run(run: RunConfig<'_>) { run.builder.ensure(LlvmBitcodeLinker { - build_compiler: run - .builder - .compiler(run.builder.top_stage, run.builder.config.host_target), + build_compiler: Self::get_build_compiler_for_target(run.builder, run.target), target: run.target, }); } @@ -1051,10 +1221,10 @@ impl Step for LlvmBitcodeLinker { )] fn run(self, builder: &Builder<'_>) -> ToolBuildResult { builder.ensure(ToolBuild { - compiler: self.build_compiler, + build_compiler: self.build_compiler, target: self.target, tool: "llvm-bitcode-linker", - mode: Mode::ToolRustc, + mode: Mode::ToolTarget, path: "src/tools/llvm-bitcode-linker", source_type: SourceType::InTree, extra_features: vec![], @@ -1239,7 +1409,7 @@ fn run_tool_build_step( let ToolBuildResult { tool_path, build_compiler, target_compiler } = builder.ensure(ToolBuild { - compiler, + build_compiler: compiler, target, tool: tool_name, mode: Mode::ToolRustc, @@ -1338,7 +1508,7 @@ impl Step for TestFloatParse { let compiler = builder.compiler(builder.top_stage, bootstrap_host); builder.ensure(ToolBuild { - compiler, + build_compiler: compiler, target: bootstrap_host, tool: "test-float-parse", mode: Mode::ToolStd, diff --git a/src/bootstrap/src/core/builder/cargo.rs b/src/bootstrap/src/core/builder/cargo.rs index a3b471ca56e..6b3236ef47e 100644 --- a/src/bootstrap/src/core/builder/cargo.rs +++ b/src/bootstrap/src/core/builder/cargo.rs @@ -537,7 +537,7 @@ impl Builder<'_> { } } - let stage = if compiler.stage == 0 && self.local_rebuild { + let build_compiler_stage = if compiler.stage == 0 && self.local_rebuild { // Assume the local-rebuild rustc already has stage1 features. 1 } else { @@ -545,15 +545,17 @@ impl Builder<'_> { }; // We synthetically interpret a stage0 compiler used to build tools as a - // "raw" compiler in that it's the exact snapshot we download. Normally - // the stage0 build means it uses libraries build by the stage0 - // compiler, but for tools we just use the precompiled libraries that - // we've downloaded - let use_snapshot = mode == Mode::ToolBootstrap; - assert!(!use_snapshot || stage == 0 || self.local_rebuild); - - let maybe_sysroot = self.sysroot(compiler); - let sysroot = if use_snapshot { self.rustc_snapshot_sysroot() } else { &maybe_sysroot }; + // "raw" compiler in that it's the exact snapshot we download. For things like + // ToolRustc, we would have to use the artificial stage0-sysroot compiler instead. + let use_snapshot = + mode == Mode::ToolBootstrap || (mode == Mode::ToolTarget && build_compiler_stage == 0); + assert!(!use_snapshot || build_compiler_stage == 0 || self.local_rebuild); + + let sysroot = if use_snapshot { + self.rustc_snapshot_sysroot().to_path_buf() + } else { + self.sysroot(compiler) + }; let libdir = self.rustc_libdir(compiler); let sysroot_str = sysroot.as_os_str().to_str().expect("sysroot should be UTF-8"); @@ -562,7 +564,7 @@ impl Builder<'_> { } let mut rustflags = Rustflags::new(target); - if stage != 0 { + if build_compiler_stage != 0 { if let Ok(s) = env::var("CARGOFLAGS_NOT_BOOTSTRAP") { cargo.args(s.split_whitespace()); } @@ -604,7 +606,7 @@ impl Builder<'_> { // sysroot. Passing this cfg enables raw-dylib support instead, which makes the native // library unnecessary. This can be removed when windows-rs enables raw-dylib // unconditionally. - if let Mode::Rustc | Mode::ToolRustc | Mode::ToolBootstrap = mode { + if let Mode::Rustc | Mode::ToolRustc | Mode::ToolBootstrap | Mode::ToolTarget = mode { rustflags.arg("--cfg=windows_raw_dylib"); } @@ -657,7 +659,7 @@ impl Builder<'_> { // FIXME(rust-lang/cargo#5754) we shouldn't be using special command arguments // to the host invocation here, but rather Cargo should know what flags to pass rustc // itself. - if stage == 0 { + if build_compiler_stage == 0 { hostflags.arg("--cfg=bootstrap"); } @@ -666,7 +668,7 @@ impl Builder<'_> { // #71458. let mut rustdocflags = rustflags.clone(); rustdocflags.propagate_cargo_env("RUSTDOCFLAGS"); - if stage == 0 { + if build_compiler_stage == 0 { rustdocflags.env("RUSTDOCFLAGS_BOOTSTRAP"); } else { rustdocflags.env("RUSTDOCFLAGS_NOT_BOOTSTRAP"); @@ -677,7 +679,7 @@ impl Builder<'_> { } match mode { - Mode::Std | Mode::ToolBootstrap | Mode::ToolStd => {} + Mode::Std | Mode::ToolBootstrap | Mode::ToolStd | Mode::ToolTarget => {} Mode::Rustc | Mode::Codegen | Mode::ToolRustc => { // Build proc macros both for the host and the target unless proc-macros are not // supported by the target. @@ -719,7 +721,7 @@ impl Builder<'_> { // feature on the rustc side. cargo.arg("-Zbinary-dep-depinfo"); let allow_features = match mode { - Mode::ToolBootstrap | Mode::ToolStd => { + Mode::ToolBootstrap | Mode::ToolStd | Mode::ToolTarget => { // Restrict the allowed features so we don't depend on nightly // accidentally. // @@ -833,7 +835,7 @@ impl Builder<'_> { cargo .env("RUSTBUILD_NATIVE_DIR", self.native_dir(target)) .env("RUSTC_REAL", self.rustc(compiler)) - .env("RUSTC_STAGE", stage.to_string()) + .env("RUSTC_STAGE", build_compiler_stage.to_string()) .env("RUSTC_SYSROOT", sysroot) .env("RUSTC_LIBDIR", libdir) .env("RUSTDOC", self.bootstrap_out.join("rustdoc")) @@ -878,7 +880,7 @@ impl Builder<'_> { let debuginfo_level = match mode { Mode::Rustc | Mode::Codegen => self.config.rust_debuginfo_level_rustc, Mode::Std => self.config.rust_debuginfo_level_std, - Mode::ToolBootstrap | Mode::ToolStd | Mode::ToolRustc => { + Mode::ToolBootstrap | Mode::ToolStd | Mode::ToolRustc | Mode::ToolTarget => { self.config.rust_debuginfo_level_tools } }; @@ -890,11 +892,10 @@ impl Builder<'_> { profile_var("DEBUG_ASSERTIONS"), match mode { Mode::Std => self.config.std_debug_assertions, - Mode::Rustc => self.config.rustc_debug_assertions, - Mode::Codegen => self.config.rustc_debug_assertions, - Mode::ToolBootstrap => self.config.tools_debug_assertions, - Mode::ToolStd => self.config.tools_debug_assertions, - Mode::ToolRustc => self.config.tools_debug_assertions, + Mode::Rustc | Mode::Codegen => self.config.rustc_debug_assertions, + Mode::ToolBootstrap | Mode::ToolStd | Mode::ToolRustc | Mode::ToolTarget => { + self.config.tools_debug_assertions + } } .to_string(), ); @@ -965,7 +966,11 @@ impl Builder<'_> { cargo.env("CFG_VIRTUAL_RUSTC_DEV_SOURCE_BASE_DIR", map_to); } } - Mode::Std | Mode::ToolBootstrap | Mode::ToolRustc | Mode::ToolStd => { + Mode::Std + | Mode::ToolBootstrap + | Mode::ToolRustc + | Mode::ToolStd + | Mode::ToolTarget => { if let Some(ref map_to) = self.build.debuginfo_map_to(GitRepo::Rustc, RemapScheme::NonCompiler) { @@ -1280,8 +1285,8 @@ impl Builder<'_> { }; if let Some(limit) = limit - && (stage == 0 - || self.config.default_codegen_backend(target).unwrap_or_default() == "llvm") + && (build_compiler_stage == 0 + || self.config.default_codegen_backend(target).unwrap_or_default().is_llvm()) { rustflags.arg(&format!("-Cllvm-args=-import-instr-limit={limit}")); } diff --git a/src/bootstrap/src/core/builder/mod.rs b/src/bootstrap/src/core/builder/mod.rs index 1b75d00b30e..96289a63785 100644 --- a/src/bootstrap/src/core/builder/mod.rs +++ b/src/bootstrap/src/core/builder/mod.rs @@ -141,7 +141,7 @@ pub trait Step: 'static + Clone + Debug + PartialEq + Eq + Hash { #[allow(unused)] #[derive(Debug, PartialEq, Eq)] pub struct StepMetadata { - name: &'static str, + name: String, kind: Kind, target: TargetSelection, built_by: Option<Compiler>, @@ -151,28 +151,28 @@ pub struct StepMetadata { } impl StepMetadata { - pub fn build(name: &'static str, target: TargetSelection) -> Self { + pub fn build(name: &str, target: TargetSelection) -> Self { Self::new(name, target, Kind::Build) } - pub fn check(name: &'static str, target: TargetSelection) -> Self { + pub fn check(name: &str, target: TargetSelection) -> Self { Self::new(name, target, Kind::Check) } - pub fn doc(name: &'static str, target: TargetSelection) -> Self { + pub fn doc(name: &str, target: TargetSelection) -> Self { Self::new(name, target, Kind::Doc) } - pub fn dist(name: &'static str, target: TargetSelection) -> Self { + pub fn dist(name: &str, target: TargetSelection) -> Self { Self::new(name, target, Kind::Dist) } - pub fn test(name: &'static str, target: TargetSelection) -> Self { + pub fn test(name: &str, target: TargetSelection) -> Self { Self::new(name, target, Kind::Test) } - fn new(name: &'static str, target: TargetSelection, kind: Kind) -> Self { - Self { name, kind, target, built_by: None, stage: None, metadata: None } + fn new(name: &str, target: TargetSelection, kind: Kind) -> Self { + Self { name: name.to_string(), kind, target, built_by: None, stage: None, metadata: None } } pub fn built_by(mut self, compiler: Compiler) -> Self { @@ -411,8 +411,8 @@ const PATH_REMAP: &[(&str, &[&str])] = &[ "tests", &[ // tidy-alphabetical-start - "tests/assembly", - "tests/codegen", + "tests/assembly-llvm", + "tests/codegen-llvm", "tests/codegen-units", "tests/coverage", "tests/coverage-run-rustdoc", @@ -963,6 +963,7 @@ impl<'a> Builder<'a> { tool::RemoteTestServer, tool::RemoteTestClient, tool::RustInstaller, + tool::FeaturesStatusDump, tool::Cargo, tool::RustAnalyzer, tool::RustAnalyzerProcMacroSrv, @@ -984,6 +985,8 @@ impl<'a> Builder<'a> { tool::CoverageDump, tool::LlvmBitcodeLinker, tool::RustcPerf, + tool::WasmComponentLd, + tool::LldWrapper ), Kind::Clippy => describe!( clippy::Std, @@ -1030,6 +1033,7 @@ impl<'a> Builder<'a> { check::Compiletest, check::FeaturesStatusDump, check::CoverageDump, + check::Linkchecker, // This has special staging logic, it may run on stage 1 while others run on stage 0. // It takes quite some time to build stage 1, so put this at the end. // @@ -1046,9 +1050,9 @@ impl<'a> Builder<'a> { test::Crashes, test::Coverage, test::MirOpt, - test::Codegen, + test::CodegenLlvm, test::CodegenUnits, - test::Assembly, + test::AssemblyLlvm, test::Incremental, test::Debuginfo, test::UiFullDeps, diff --git a/src/bootstrap/src/core/builder/tests.rs b/src/bootstrap/src/core/builder/tests.rs index 51a90649692..f012645b7ef 100644 --- a/src/bootstrap/src/core/builder/tests.rs +++ b/src/bootstrap/src/core/builder/tests.rs @@ -642,6 +642,7 @@ mod snapshot { }; use crate::core::builder::{Builder, Kind, StepDescription, StepMetadata}; use crate::core::config::TargetSelection; + use crate::core::config::toml::rust::with_lld_opt_in_targets; use crate::utils::cache::Cache; use crate::utils::helpers::get_host_target; use crate::utils::tests::{ConfigBuilder, TestCtx}; @@ -712,7 +713,11 @@ mod snapshot { [build] llvm <host> [build] rustc 0 <host> -> rustc 1 <host> "); + } + #[test] + fn build_rustc_no_explicit_stage() { + let ctx = TestCtx::new(); insta::assert_snapshot!( ctx.config("build") .path("rustc") @@ -769,11 +774,11 @@ mod snapshot { [build] llvm <host> [build] rustc 0 <host> -> rustc 1 <host> [build] rustc 0 <host> -> LldWrapper 1 <host> - [build] rustc 1 <host> -> LlvmBitcodeLinker 2 <host> + [build] rustc 0 <host> -> LlvmBitcodeLinker 1 <host> [build] rustc 1 <host> -> std 1 <host> [build] rustc 1 <host> -> rustc 2 <host> [build] rustc 1 <host> -> LldWrapper 2 <host> - [build] rustc 2 <host> -> LlvmBitcodeLinker 3 <host> + [build] rustc 1 <host> -> LlvmBitcodeLinker 2 <host> [build] rustc 2 <host> -> std 2 <host> [build] rustdoc 1 <host> " @@ -793,17 +798,17 @@ mod snapshot { [build] llvm <host> [build] rustc 0 <host> -> rustc 1 <host> [build] rustc 0 <host> -> LldWrapper 1 <host> - [build] rustc 1 <host> -> LlvmBitcodeLinker 2 <host> + [build] rustc 0 <host> -> LlvmBitcodeLinker 1 <host> [build] rustc 1 <host> -> std 1 <host> [build] rustc 1 <host> -> rustc 2 <host> [build] rustc 1 <host> -> LldWrapper 2 <host> - [build] rustc 2 <host> -> LlvmBitcodeLinker 3 <host> + [build] rustc 1 <host> -> LlvmBitcodeLinker 2 <host> [build] rustc 1 <host> -> std 1 <target1> [build] rustc 2 <host> -> std 2 <target1> [build] llvm <target1> [build] rustc 1 <host> -> rustc 2 <target1> [build] rustc 1 <host> -> LldWrapper 2 <target1> - [build] rustc 2 <target1> -> LlvmBitcodeLinker 3 <target1> + [build] rustc 1 <host> -> LlvmBitcodeLinker 2 <target1> [build] rustdoc 1 <target1> " ); @@ -994,7 +999,7 @@ mod snapshot { [build] llvm <host> [build] rustc 0 <host> -> rustc 1 <host> [build] rustdoc 0 <host> - [doc] std 1 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,std,sysroot,test,unwind] + [doc] std 1 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind] "); } @@ -1043,7 +1048,7 @@ mod snapshot { [build] rustc 1 <host> -> std 1 <host> [build] rustc 1 <host> -> rustc 2 <host> [build] rustdoc 1 <host> - [doc] std 2 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,std,sysroot,test,unwind] + [doc] std 2 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind] [build] rustc 2 <host> -> std 2 <host> [build] rustc 0 <host> -> LintDocs 1 <host> [build] rustc 0 <host> -> RustInstaller 1 <host> @@ -1062,20 +1067,30 @@ mod snapshot { fn dist_extended() { let ctx = TestCtx::new(); insta::assert_snapshot!( - ctx - .config("dist") - .args(&["--set", "build.extended=true"]) - .render_steps(), @r" + ctx.config("dist") + .args(&[ + "--set", + "build.extended=true", + "--set", + "rust.llvm-bitcode-linker=true", + "--set", + "rust.lld=true", + ]) + .render_steps(), @r" [build] rustc 0 <host> -> UnstableBookGen 1 <host> [build] rustc 0 <host> -> Rustbook 1 <host> [build] llvm <host> [build] rustc 0 <host> -> rustc 1 <host> + [build] rustc 0 <host> -> LldWrapper 1 <host> [build] rustc 0 <host> -> WasmComponentLd 1 <host> + [build] rustc 0 <host> -> LlvmBitcodeLinker 1 <host> [build] rustc 1 <host> -> std 1 <host> [build] rustc 1 <host> -> rustc 2 <host> + [build] rustc 1 <host> -> LldWrapper 2 <host> [build] rustc 1 <host> -> WasmComponentLd 2 <host> + [build] rustc 1 <host> -> LlvmBitcodeLinker 2 <host> [build] rustdoc 1 <host> - [doc] std 2 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,std,sysroot,test,unwind] + [doc] std 2 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind] [build] rustc 2 <host> -> std 2 <host> [build] rustc 0 <host> -> LintDocs 1 <host> [build] rustc 0 <host> -> RustInstaller 1 <host> @@ -1092,7 +1107,6 @@ mod snapshot { [build] rustc 0 <host> -> cargo-clippy 1 <host> [build] rustc 0 <host> -> miri 1 <host> [build] rustc 0 <host> -> cargo-miri 1 <host> - [build] rustc 1 <host> -> LlvmBitcodeLinker 2 <host> "); } @@ -1112,8 +1126,8 @@ mod snapshot { [build] rustc 1 <host> -> std 1 <host> [build] rustc 1 <host> -> rustc 2 <host> [build] rustdoc 1 <host> - [doc] std 2 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,std,sysroot,test,unwind] - [doc] std 2 <target1> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,std,sysroot,test,unwind] + [doc] std 2 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind] + [doc] std 2 <target1> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind] [build] rustc 2 <host> -> std 2 <host> [build] rustc 0 <host> -> LintDocs 1 <host> [build] rustc 0 <host> -> RustInstaller 1 <host> @@ -1149,7 +1163,7 @@ mod snapshot { [build] rustc 1 <host> -> std 1 <host> [build] rustc 1 <host> -> rustc 2 <host> [build] rustdoc 1 <host> - [doc] std 2 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,std,sysroot,test,unwind] + [doc] std 2 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind] [build] rustc 2 <host> -> std 2 <host> [build] rustc 0 <host> -> LintDocs 1 <host> [build] rustc 1 <host> -> std 1 <target1> @@ -1186,8 +1200,8 @@ mod snapshot { [build] rustc 1 <host> -> std 1 <host> [build] rustc 1 <host> -> rustc 2 <host> [build] rustdoc 1 <host> - [doc] std 2 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,std,sysroot,test,unwind] - [doc] std 2 <target1> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,std,sysroot,test,unwind] + [doc] std 2 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind] + [doc] std 2 <target1> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind] [build] rustc 2 <host> -> std 2 <host> [build] rustc 0 <host> -> LintDocs 1 <host> [build] rustc 1 <host> -> std 1 <target1> @@ -1228,7 +1242,7 @@ mod snapshot { [build] rustc 1 <host> -> std 1 <host> [build] rustc 1 <host> -> rustc 2 <host> [build] rustdoc 1 <host> - [doc] std 2 <target1> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,std,sysroot,test,unwind] + [doc] std 2 <target1> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind] [build] rustc 2 <host> -> std 2 <host> [build] rustc 0 <host> -> RustInstaller 1 <host> [dist] docs <target1> @@ -1260,7 +1274,7 @@ mod snapshot { [build] rustc 1 <host> -> rustc 2 <host> [build] rustc 1 <host> -> WasmComponentLd 2 <host> [build] rustdoc 1 <host> - [doc] std 2 <target1> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,std,sysroot,test,unwind] + [doc] std 2 <target1> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind] [build] rustc 2 <host> -> std 2 <host> [build] rustc 1 <host> -> std 1 <target1> [build] rustc 2 <host> -> std 2 <target1> @@ -1294,17 +1308,19 @@ mod snapshot { ctx.config("check") .path("compiler") .render_steps(), @r" - [build] llvm <host> [check] rustc 0 <host> -> rustc 1 <host> - [check] rustc 0 <host> -> cranelift 1 <host> - [check] rustc 0 <host> -> gcc 1 <host> + [check] rustc 0 <host> -> rustc_codegen_cranelift 1 <host> + [check] rustc 0 <host> -> rustc_codegen_gcc 1 <host> "); + } + #[test] + fn check_rustc_no_explicit_stage() { + let ctx = TestCtx::new(); insta::assert_snapshot!( ctx.config("check") .path("rustc") .render_steps(), @r" - [build] llvm <host> [check] rustc 0 <host> -> rustc 1 <host> "); } @@ -1324,10 +1340,9 @@ mod snapshot { .path("compiler") .stage(1) .render_steps(), @r" - [build] llvm <host> [check] rustc 0 <host> -> rustc 1 <host> - [check] rustc 0 <host> -> cranelift 1 <host> - [check] rustc 0 <host> -> gcc 1 <host> + [check] rustc 0 <host> -> rustc_codegen_cranelift 1 <host> + [check] rustc 0 <host> -> rustc_codegen_gcc 1 <host> "); } @@ -1343,8 +1358,8 @@ mod snapshot { [build] rustc 0 <host> -> rustc 1 <host> [build] rustc 1 <host> -> std 1 <host> [check] rustc 1 <host> -> rustc 2 <host> - [check] rustc 1 <host> -> cranelift 2 <host> - [check] rustc 1 <host> -> gcc 2 <host> + [check] rustc 1 <host> -> rustc_codegen_cranelift 2 <host> + [check] rustc 1 <host> -> rustc_codegen_gcc 2 <host> "); } @@ -1362,13 +1377,13 @@ mod snapshot { [build] rustc 1 <host> -> std 1 <target1> [check] rustc 1 <host> -> rustc 2 <target1> [check] rustc 1 <host> -> Rustdoc 2 <target1> - [check] rustc 1 <host> -> cranelift 2 <target1> - [check] rustc 1 <host> -> gcc 2 <target1> + [check] rustc 1 <host> -> rustc_codegen_cranelift 2 <target1> + [check] rustc 1 <host> -> rustc_codegen_gcc 2 <target1> [check] rustc 1 <host> -> Clippy 2 <target1> [check] rustc 1 <host> -> Miri 2 <target1> [check] rustc 1 <host> -> CargoMiri 2 <target1> [check] rustc 1 <host> -> Rustfmt 2 <target1> - [check] rustc 1 <host> -> rust-analyzer 2 <target1> + [check] rustc 1 <host> -> RustAnalyzer 2 <target1> [check] rustc 1 <host> -> TestFloatParse 2 <target1> [check] rustc 1 <host> -> std 1 <target1> "); @@ -1456,10 +1471,9 @@ mod snapshot { .paths(&["library", "compiler"]) .args(&args) .render_steps(), @r" - [build] llvm <host> [check] rustc 0 <host> -> rustc 1 <host> - [check] rustc 0 <host> -> cranelift 1 <host> - [check] rustc 0 <host> -> gcc 1 <host> + [check] rustc 0 <host> -> rustc_codegen_cranelift 1 <host> + [check] rustc 0 <host> -> rustc_codegen_gcc 1 <host> "); } @@ -1470,7 +1484,6 @@ mod snapshot { ctx.config("check") .path("miri") .render_steps(), @r" - [build] llvm <host> [check] rustc 0 <host> -> rustc 1 <host> [check] rustc 0 <host> -> Miri 1 <host> "); @@ -1491,7 +1504,6 @@ mod snapshot { .path("miri") .stage(1) .render_steps(), @r" - [build] llvm <host> [check] rustc 0 <host> -> rustc 1 <host> [check] rustc 0 <host> -> Miri 1 <host> "); @@ -1519,7 +1531,7 @@ mod snapshot { insta::assert_snapshot!( ctx.config("check") .path("compiletest") - .render_steps(), @"[check] compiletest <host>"); + .render_steps(), @"[check] rustc 0 <host> -> Compiletest 1 <host>"); } #[test] @@ -1533,7 +1545,7 @@ mod snapshot { [build] llvm <host> [build] rustc 0 <host> -> rustc 1 <host> [build] rustc 1 <host> -> std 1 <host> - [check] compiletest <host> + [check] rustc 1 <host> -> Compiletest 2 <host> "); } @@ -1544,10 +1556,9 @@ mod snapshot { ctx.config("check") .path("rustc_codegen_cranelift") .render_steps(), @r" - [build] llvm <host> [check] rustc 0 <host> -> rustc 1 <host> - [check] rustc 0 <host> -> cranelift 1 <host> - [check] rustc 0 <host> -> gcc 1 <host> + [check] rustc 0 <host> -> rustc_codegen_cranelift 1 <host> + [check] rustc 0 <host> -> rustc_codegen_gcc 1 <host> "); } @@ -1558,9 +1569,8 @@ mod snapshot { ctx.config("check") .path("rust-analyzer") .render_steps(), @r" - [build] llvm <host> [check] rustc 0 <host> -> rustc 1 <host> - [check] rustc 0 <host> -> rust-analyzer 1 <host> + [check] rustc 0 <host> -> RustAnalyzer 1 <host> "); } @@ -1610,7 +1620,7 @@ mod snapshot { [build] llvm <host> [build] rustc 0 <host> -> rustc 1 <host> [build] rustdoc 0 <host> - [doc] std 1 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,std,sysroot,test,unwind] + [doc] std 1 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind] "); } @@ -1644,6 +1654,21 @@ mod snapshot { } #[test] + fn test_lld_opt_in() { + with_lld_opt_in_targets(vec![host_target()], || { + let ctx = TestCtx::new(); + insta::assert_snapshot!( + ctx.config("build") + .path("compiler") + .render_steps(), @r" + [build] llvm <host> + [build] rustc 0 <host> -> rustc 1 <host> + [build] rustc 0 <host> -> LldWrapper 1 <host> + "); + }); + } + + #[test] fn doc_library_no_std_target() { let ctx = TestCtx::new(); insta::assert_snapshot!( diff --git a/src/bootstrap/src/core/config/config.rs b/src/bootstrap/src/core/config/config.rs index 22a75183404..6055876c475 100644 --- a/src/bootstrap/src/core/config/config.rs +++ b/src/bootstrap/src/core/config/config.rs @@ -45,11 +45,13 @@ use crate::core::config::{ DebuginfoLevel, DryRun, GccCiMode, LlvmLibunwind, Merge, ReplaceOpt, RustcLto, SplitDebuginfo, StringOrBool, set, threads_from_config, }; -use crate::core::download::is_download_ci_available; +use crate::core::download::{ + DownloadContext, download_beta_toolchain, is_download_ci_available, maybe_download_rustfmt, +}; use crate::utils::channel; use crate::utils::exec::{ExecutionContext, command}; use crate::utils::helpers::{exe, get_host_target}; -use crate::{GitInfo, OnceLock, TargetSelection, check_ci_llvm, helpers, t}; +use crate::{CodegenBackendKind, GitInfo, OnceLock, TargetSelection, check_ci_llvm, helpers, t}; /// Each path in this list is considered "allowed" in the `download-rustc="if-unchanged"` logic. /// This means they can be modified and changes to these paths should never trigger a compiler build @@ -206,7 +208,7 @@ pub struct Config { pub rustc_default_linker: Option<String>, pub rust_optimize_tests: bool, pub rust_dist_src: bool, - pub rust_codegen_backends: Vec<String>, + pub rust_codegen_backends: Vec<CodegenBackendKind>, pub rust_verify_llvm_ir: bool, pub rust_thin_lto_import_instr_limit: Option<u32>, pub rust_randomize_layout: bool, @@ -296,8 +298,16 @@ pub struct Config { /// Command for visual diff display, e.g. `diff-tool --color=always`. pub compiletest_diff_tool: Option<String>, + /// Whether to allow running both `compiletest` self-tests and `compiletest`-managed test suites + /// against the stage 0 (rustc, std). + /// + /// This is only intended to be used when the stage 0 compiler is actually built from in-tree + /// sources. + pub compiletest_allow_stage0: bool, + /// Whether to use the precompiled stage0 libtest with compiletest. pub compiletest_use_stage0_libtest: bool, + /// Default value for `--extra-checks` pub tidy_extra_checks: Option<String>, pub is_running_on_ci: bool, @@ -340,7 +350,7 @@ impl Config { channel: "dev".to_string(), codegen_tests: true, rust_dist_src: true, - rust_codegen_backends: vec!["llvm".to_owned()], + rust_codegen_backends: vec![CodegenBackendKind::Llvm], deny_warnings: true, bindir: "bin".into(), dist_include_mingw_linker: true, @@ -747,6 +757,7 @@ impl Config { optimized_compiler_builtins, jobs, compiletest_diff_tool, + compiletest_allow_stage0, compiletest_use_stage0_libtest, tidy_extra_checks, ccache, @@ -795,13 +806,19 @@ impl Config { ); } + config.patch_binaries_for_nix = patch_binaries_for_nix; + config.bootstrap_cache_path = bootstrap_cache_path; + config.llvm_assertions = + toml.llvm.as_ref().is_some_and(|llvm| llvm.assertions.unwrap_or(false)); + config.initial_rustc = if let Some(rustc) = rustc { if !flags_skip_stage0_validation { config.check_stage0_version(&rustc, "rustc"); } rustc } else { - config.download_beta_toolchain(); + let dwn_ctx = DownloadContext::from(&config); + download_beta_toolchain(dwn_ctx); config .out .join(config.host_target) @@ -827,7 +844,8 @@ impl Config { } cargo } else { - config.download_beta_toolchain(); + let dwn_ctx = DownloadContext::from(&config); + download_beta_toolchain(dwn_ctx); config.initial_sysroot.join("bin").join(exe("cargo", config.host_target)) }; @@ -863,7 +881,6 @@ impl Config { config.reuse = reuse.map(PathBuf::from); config.submodules = submodules; config.android_ndk = android_ndk; - config.bootstrap_cache_path = bootstrap_cache_path; set(&mut config.low_priority, low_priority); set(&mut config.compiler_docs, compiler_docs); set(&mut config.library_docs_private_items, library_docs_private_items); @@ -882,7 +899,6 @@ impl Config { set(&mut config.local_rebuild, local_rebuild); set(&mut config.print_step_timings, print_step_timings); set(&mut config.print_step_rusage, print_step_rusage); - config.patch_binaries_for_nix = patch_binaries_for_nix; config.verbose = cmp::max(config.verbose, flags_verbose as usize); @@ -891,9 +907,6 @@ impl Config { config.apply_install_config(toml.install); - config.llvm_assertions = - toml.llvm.as_ref().is_some_and(|llvm| llvm.assertions.unwrap_or(false)); - let file_content = t!(fs::read_to_string(config.src.join("src/ci/channel"))); let ci_channel = file_content.trim_end(); @@ -942,6 +955,7 @@ impl Config { config.rust_profile_use = flags_rust_profile_use; config.rust_profile_generate = flags_rust_profile_generate; + config.apply_target_config(toml.target); config.apply_rust_config(toml.rust, flags_warnings); config.reproducible_artifacts = flags_reproducible_artifact; @@ -967,8 +981,6 @@ impl Config { config.apply_gcc_config(toml.gcc); - config.apply_target_config(toml.target); - match ccache { Some(StringOrBool::String(ref s)) => config.ccache = Some(s.to_string()), Some(StringOrBool::Bool(true)) => { @@ -995,8 +1007,12 @@ impl Config { config.apply_dist_config(toml.dist); - config.initial_rustfmt = - if let Some(r) = rustfmt { Some(r) } else { config.maybe_download_rustfmt() }; + config.initial_rustfmt = if let Some(r) = rustfmt { + Some(r) + } else { + let dwn_ctx = DownloadContext::from(&config); + maybe_download_rustfmt(dwn_ctx) + }; if matches!(config.lld_mode, LldMode::SelfContained) && !config.lld_enabled @@ -1013,8 +1029,12 @@ impl Config { config.optimized_compiler_builtins = optimized_compiler_builtins.unwrap_or(config.channel != "dev"); + config.compiletest_diff_tool = compiletest_diff_tool; + + config.compiletest_allow_stage0 = compiletest_allow_stage0.unwrap_or(false); config.compiletest_use_stage0_libtest = compiletest_use_stage0_libtest.unwrap_or(true); + config.tidy_extra_checks = tidy_extra_checks; let download_rustc = config.download_rustc_commit.is_some(); @@ -1727,7 +1747,7 @@ impl Config { .unwrap_or(self.profiler) } - pub fn codegen_backends(&self, target: TargetSelection) -> &[String] { + pub fn codegen_backends(&self, target: TargetSelection) -> &[CodegenBackendKind] { self.target_config .get(&target) .and_then(|cfg| cfg.codegen_backends.as_deref()) @@ -1738,7 +1758,7 @@ impl Config { self.target_config.get(&target).and_then(|cfg| cfg.jemalloc).unwrap_or(self.jemalloc) } - pub fn default_codegen_backend(&self, target: TargetSelection) -> Option<String> { + pub fn default_codegen_backend(&self, target: TargetSelection) -> Option<CodegenBackendKind> { self.codegen_backends(target).first().cloned() } @@ -1754,7 +1774,7 @@ impl Config { } pub fn llvm_enabled(&self, target: TargetSelection) -> bool { - self.codegen_backends(target).contains(&"llvm".to_owned()) + self.codegen_backends(target).contains(&CodegenBackendKind::Llvm) } pub fn llvm_libunwind(&self, target: TargetSelection) -> LlvmLibunwind { diff --git a/src/bootstrap/src/core/config/flags.rs b/src/bootstrap/src/core/config/flags.rs index 1547ca44494..31a427f9ffa 100644 --- a/src/bootstrap/src/core/config/flags.rs +++ b/src/bootstrap/src/core/config/flags.rs @@ -386,7 +386,7 @@ pub enum Subcommand { bless: bool, #[arg(long)] /// comma-separated list of other files types to check (accepts py, py:lint, - /// py:fmt, shell, shell:lint, cpp, cpp:fmt, spellcheck) + /// py:fmt, shell, cpp, cpp:fmt, js, js:lint, js:typecheck, spellcheck) /// /// Any argument can be prefixed with "auto:" to only run if /// relevant files are modified (eg. "auto:py"). diff --git a/src/bootstrap/src/core/config/toml/build.rs b/src/bootstrap/src/core/config/toml/build.rs index 4d29691f38b..728367b3972 100644 --- a/src/bootstrap/src/core/config/toml/build.rs +++ b/src/bootstrap/src/core/config/toml/build.rs @@ -68,6 +68,7 @@ define_config! { optimized_compiler_builtins: Option<bool> = "optimized-compiler-builtins", jobs: Option<u32> = "jobs", compiletest_diff_tool: Option<String> = "compiletest-diff-tool", + compiletest_allow_stage0: Option<bool> = "compiletest-allow-stage0", compiletest_use_stage0_libtest: Option<bool> = "compiletest-use-stage0-libtest", tidy_extra_checks: Option<String> = "tidy-extra-checks", ccache: Option<StringOrBool> = "ccache", diff --git a/src/bootstrap/src/core/config/toml/rust.rs b/src/bootstrap/src/core/config/toml/rust.rs index 71fab0e6ae6..03da993a17d 100644 --- a/src/bootstrap/src/core/config/toml/rust.rs +++ b/src/bootstrap/src/core/config/toml/rust.rs @@ -11,7 +11,9 @@ use crate::core::config::{ DebuginfoLevel, Merge, ReplaceOpt, RustcLto, StringOrBool, set, threads_from_config, }; use crate::flags::Warnings; -use crate::{BTreeSet, Config, HashSet, PathBuf, TargetSelection, define_config, exit}; +use crate::{ + BTreeSet, CodegenBackendKind, Config, HashSet, PathBuf, TargetSelection, define_config, exit, +}; define_config! { /// TOML representation of how the Rust build is configured. @@ -389,9 +391,13 @@ pub fn check_incompatible_options_for_ci_rustc( Ok(()) } -pub(crate) const VALID_CODEGEN_BACKENDS: &[&str] = &["llvm", "cranelift", "gcc"]; +pub(crate) const BUILTIN_CODEGEN_BACKENDS: &[&str] = &["llvm", "cranelift", "gcc"]; -pub(crate) fn validate_codegen_backends(backends: Vec<String>, section: &str) -> Vec<String> { +pub(crate) fn parse_codegen_backends( + backends: Vec<String>, + section: &str, +) -> Vec<CodegenBackendKind> { + let mut found_backends = vec![]; for backend in &backends { if let Some(stripped) = backend.strip_prefix(CODEGEN_BACKEND_PREFIX) { panic!( @@ -400,14 +406,46 @@ pub(crate) fn validate_codegen_backends(backends: Vec<String>, section: &str) -> Please, use '{stripped}' instead." ) } - if !VALID_CODEGEN_BACKENDS.contains(&backend.as_str()) { + if !BUILTIN_CODEGEN_BACKENDS.contains(&backend.as_str()) { println!( "HELP: '{backend}' for '{section}.codegen-backends' might fail. \ - List of known good values: {VALID_CODEGEN_BACKENDS:?}" + List of known codegen backends: {BUILTIN_CODEGEN_BACKENDS:?}" ); } + let backend = match backend.as_str() { + "llvm" => CodegenBackendKind::Llvm, + "cranelift" => CodegenBackendKind::Cranelift, + "gcc" => CodegenBackendKind::Gcc, + backend => CodegenBackendKind::Custom(backend.to_string()), + }; + found_backends.push(backend); } - backends + found_backends +} + +#[cfg(not(test))] +fn default_lld_opt_in_targets() -> Vec<String> { + vec!["x86_64-unknown-linux-gnu".to_string()] +} + +#[cfg(test)] +thread_local! { + static TEST_LLD_OPT_IN_TARGETS: std::cell::RefCell<Option<Vec<String>>> = std::cell::RefCell::new(None); +} + +#[cfg(test)] +fn default_lld_opt_in_targets() -> Vec<String> { + TEST_LLD_OPT_IN_TARGETS.with(|cell| cell.borrow().clone()).unwrap_or_default() +} + +#[cfg(test)] +pub fn with_lld_opt_in_targets<R>(targets: Vec<String>, f: impl FnOnce() -> R) -> R { + TEST_LLD_OPT_IN_TARGETS.with(|cell| { + let prev = cell.replace(Some(targets)); + let result = f(); + cell.replace(prev); + result + }) } impl Config { @@ -584,7 +622,7 @@ impl Config { llvm_libunwind.map(|v| v.parse().expect("failed to parse rust.llvm-libunwind")); set( &mut self.rust_codegen_backends, - codegen_backends.map(|backends| validate_codegen_backends(backends, "rust")), + codegen_backends.map(|backends| parse_codegen_backends(backends, "rust")), ); self.rust_codegen_units = codegen_units.map(threads_from_config); @@ -617,12 +655,13 @@ impl Config { // thus, disabled // - similarly, lld will not be built nor used by default when explicitly asked not to, e.g. // when the config sets `rust.lld = false` - if self.host_target.triple == "x86_64-unknown-linux-gnu" && self.hosts == [self.host_target] + if default_lld_opt_in_targets().contains(&self.host_target.triple.to_string()) + && self.hosts == [self.host_target] { let no_llvm_config = self .target_config .get(&self.host_target) - .is_some_and(|target_config| target_config.llvm_config.is_none()); + .is_none_or(|target_config| target_config.llvm_config.is_none()); let enable_lld = self.llvm_from_ci || no_llvm_config; // Prefer the config setting in case an explicit opt-out is needed. self.lld_enabled = lld_enabled.unwrap_or(enable_lld); diff --git a/src/bootstrap/src/core/config/toml/target.rs b/src/bootstrap/src/core/config/toml/target.rs index 337276948b3..9dedadff3a1 100644 --- a/src/bootstrap/src/core/config/toml/target.rs +++ b/src/bootstrap/src/core/config/toml/target.rs @@ -16,9 +16,9 @@ use std::collections::HashMap; use serde::{Deserialize, Deserializer}; -use crate::core::config::toml::rust::validate_codegen_backends; +use crate::core::config::toml::rust::parse_codegen_backends; use crate::core::config::{LlvmLibunwind, Merge, ReplaceOpt, SplitDebuginfo, StringOrBool}; -use crate::{Config, HashSet, PathBuf, TargetSelection, define_config, exit}; +use crate::{CodegenBackendKind, Config, HashSet, PathBuf, TargetSelection, define_config, exit}; define_config! { /// TOML representation of how each build target is configured. @@ -76,7 +76,7 @@ pub struct Target { pub qemu_rootfs: Option<PathBuf>, pub runner: Option<String>, pub no_std: bool, - pub codegen_backends: Option<Vec<String>>, + pub codegen_backends: Option<Vec<CodegenBackendKind>>, pub optimized_compiler_builtins: Option<bool>, pub jemalloc: Option<bool>, } @@ -144,7 +144,7 @@ impl Config { target.jemalloc = cfg.jemalloc; if let Some(backends) = cfg.codegen_backends { target.codegen_backends = - Some(validate_codegen_backends(backends, &format!("target.{triple}"))) + Some(parse_codegen_backends(backends, &format!("target.{triple}"))) } target.split_debuginfo = cfg.split_debuginfo.as_ref().map(|v| { diff --git a/src/bootstrap/src/core/download.rs b/src/bootstrap/src/core/download.rs index d7c6d8dbcc3..7ec6c62a07d 100644 --- a/src/bootstrap/src/core/download.rs +++ b/src/bootstrap/src/core/download.rs @@ -7,9 +7,9 @@ use std::sync::OnceLock; use xz2::bufread::XzDecoder; -use crate::core::config::BUILDER_CONFIG_FILENAME; +use crate::core::config::{BUILDER_CONFIG_FILENAME, TargetSelection}; use crate::utils::build_stamp::BuildStamp; -use crate::utils::exec::command; +use crate::utils::exec::{ExecutionContext, command}; use crate::utils::helpers::{exe, hex_encode, move_file}; use crate::{Config, t}; @@ -24,17 +24,6 @@ fn extract_curl_version(out: String) -> semver::Version { .unwrap_or(semver::Version::new(1, 0, 0)) } -fn curl_version(config: &Config) -> semver::Version { - let mut curl = command("curl"); - curl.arg("-V"); - let curl = curl.run_capture_stdout(config); - if curl.is_failure() { - return semver::Version::new(1, 0, 0); - } - let output = curl.stdout(); - extract_curl_version(output) -} - /// Generic helpers that are useful anywhere in bootstrap. impl Config { pub fn is_verbose(&self) -> bool { @@ -49,10 +38,7 @@ impl Config { } pub(crate) fn remove(&self, f: &Path) { - if self.dry_run() { - return; - } - fs::remove_file(f).unwrap_or_else(|_| panic!("failed to remove {f:?}")); + remove(&self.exec_ctx, f); } /// Create a temporary directory in `out` and return its path. @@ -68,49 +54,7 @@ impl Config { /// Whether or not `fix_bin_or_dylib` needs to be run; can only be true /// on NixOS fn should_fix_bins_and_dylibs(&self) -> bool { - let val = *SHOULD_FIX_BINS_AND_DYLIBS.get_or_init(|| { - let uname = command("uname").allow_failure().arg("-s").run_capture_stdout(self); - if uname.is_failure() { - return false; - } - let output = uname.stdout(); - if !output.starts_with("Linux") { - return false; - } - // If the user has asked binaries to be patched for Nix, then - // don't check for NixOS or `/lib`. - // NOTE: this intentionally comes after the Linux check: - // - patchelf only works with ELF files, so no need to run it on Mac or Windows - // - On other Unix systems, there is no stable syscall interface, so Nix doesn't manage the global libc. - if let Some(explicit_value) = self.patch_binaries_for_nix { - return explicit_value; - } - - // Use `/etc/os-release` instead of `/etc/NIXOS`. - // The latter one does not exist on NixOS when using tmpfs as root. - let is_nixos = match File::open("/etc/os-release") { - Err(e) if e.kind() == ErrorKind::NotFound => false, - Err(e) => panic!("failed to access /etc/os-release: {e}"), - Ok(os_release) => BufReader::new(os_release).lines().any(|l| { - let l = l.expect("reading /etc/os-release"); - matches!(l.trim(), "ID=nixos" | "ID='nixos'" | "ID=\"nixos\"") - }), - }; - if !is_nixos { - let in_nix_shell = env::var("IN_NIX_SHELL"); - if let Ok(in_nix_shell) = in_nix_shell { - eprintln!( - "The IN_NIX_SHELL environment variable is `{in_nix_shell}`; \ - you may need to set `patch-binaries-for-nix=true` in bootstrap.toml" - ); - } - } - is_nixos - }); - if val { - eprintln!("INFO: You seem to be using Nix."); - } - val + should_fix_bins_and_dylibs(self.patch_binaries_for_nix, &self.exec_ctx) } /// Modifies the interpreter section of 'fname' to fix the dynamic linker, @@ -121,259 +65,22 @@ impl Config { /// /// Please see <https://nixos.org/patchelf.html> for more information fn fix_bin_or_dylib(&self, fname: &Path) { - assert_eq!(SHOULD_FIX_BINS_AND_DYLIBS.get(), Some(&true)); - println!("attempting to patch {}", fname.display()); - - // Only build `.nix-deps` once. - static NIX_DEPS_DIR: OnceLock<PathBuf> = OnceLock::new(); - let mut nix_build_succeeded = true; - let nix_deps_dir = NIX_DEPS_DIR.get_or_init(|| { - // Run `nix-build` to "build" each dependency (which will likely reuse - // the existing `/nix/store` copy, or at most download a pre-built copy). - // - // Importantly, we create a gc-root called `.nix-deps` in the `build/` - // directory, but still reference the actual `/nix/store` path in the rpath - // as it makes it significantly more robust against changes to the location of - // the `.nix-deps` location. - // - // bintools: Needed for the path of `ld-linux.so` (via `nix-support/dynamic-linker`). - // zlib: Needed as a system dependency of `libLLVM-*.so`. - // patchelf: Needed for patching ELF binaries (see doc comment above). - let nix_deps_dir = self.out.join(".nix-deps"); - const NIX_EXPR: &str = " - with (import <nixpkgs> {}); - symlinkJoin { - name = \"rust-stage0-dependencies\"; - paths = [ - zlib - patchelf - stdenv.cc.bintools - ]; - } - "; - nix_build_succeeded = command("nix-build") - .allow_failure() - .args([Path::new("-E"), Path::new(NIX_EXPR), Path::new("-o"), &nix_deps_dir]) - .run_capture_stdout(self) - .is_success(); - nix_deps_dir - }); - if !nix_build_succeeded { - return; - } - - let mut patchelf = command(nix_deps_dir.join("bin/patchelf")); - patchelf.args(&[ - OsString::from("--add-rpath"), - OsString::from(t!(fs::canonicalize(nix_deps_dir)).join("lib")), - ]); - if !path_is_dylib(fname) { - // Finally, set the correct .interp for binaries - let dynamic_linker_path = nix_deps_dir.join("nix-support/dynamic-linker"); - let dynamic_linker = t!(fs::read_to_string(dynamic_linker_path)); - patchelf.args(["--set-interpreter", dynamic_linker.trim_end()]); - } - patchelf.arg(fname); - let _ = patchelf.allow_failure().run_capture_stdout(self); + fix_bin_or_dylib(&self.out, fname, &self.exec_ctx); } fn download_file(&self, url: &str, dest_path: &Path, help_on_error: &str) { - self.verbose(|| println!("download {url}")); - // Use a temporary file in case we crash while downloading, to avoid a corrupt download in cache/. - let tempfile = self.tempdir().join(dest_path.file_name().unwrap()); - // While bootstrap itself only supports http and https downloads, downstream forks might - // need to download components from other protocols. The match allows them adding more - // protocols without worrying about merge conflicts if we change the HTTP implementation. - match url.split_once("://").map(|(proto, _)| proto) { - Some("http") | Some("https") => { - self.download_http_with_retries(&tempfile, url, help_on_error) - } - Some(other) => panic!("unsupported protocol {other} in {url}"), - None => panic!("no protocol in {url}"), - } - t!( - move_file(&tempfile, dest_path), - format!("failed to rename {tempfile:?} to {dest_path:?}") - ); - } - - fn download_http_with_retries(&self, tempfile: &Path, url: &str, help_on_error: &str) { - println!("downloading {url}"); - // Try curl. If that fails and we are on windows, fallback to PowerShell. - // options should be kept in sync with - // src/bootstrap/src/core/download.rs - // for consistency - let mut curl = command("curl").allow_failure(); - curl.args([ - // follow redirect - "--location", - // timeout if speed is < 10 bytes/sec for > 30 seconds - "--speed-time", - "30", - "--speed-limit", - "10", - // timeout if cannot connect within 30 seconds - "--connect-timeout", - "30", - // output file - "--output", - tempfile.to_str().unwrap(), - // if there is an error, don't restart the download, - // instead continue where it left off. - "--continue-at", - "-", - // retry up to 3 times. note that this means a maximum of 4 - // attempts will be made, since the first attempt isn't a *re*try. - "--retry", - "3", - // show errors, even if --silent is specified - "--show-error", - // set timestamp of downloaded file to that of the server - "--remote-time", - // fail on non-ok http status - "--fail", - ]); - // Don't print progress in CI; the \r wrapping looks bad and downloads don't take long enough for progress to be useful. - if self.is_running_on_ci { - curl.arg("--silent"); - } else { - curl.arg("--progress-bar"); - } - // --retry-all-errors was added in 7.71.0, don't use it if curl is old. - if curl_version(self) >= semver::Version::new(7, 71, 0) { - curl.arg("--retry-all-errors"); - } - curl.arg(url); - if !curl.run(self) { - if self.host_target.contains("windows-msvc") { - eprintln!("Fallback to PowerShell"); - for _ in 0..3 { - let powershell = command("PowerShell.exe").allow_failure().args([ - "/nologo", - "-Command", - "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;", - &format!( - "(New-Object System.Net.WebClient).DownloadFile('{}', '{}')", - url, tempfile.to_str().expect("invalid UTF-8 not supported with powershell downloads"), - ), - ]).run_capture_stdout(self); - - if powershell.is_failure() { - return; - } - - eprintln!("\nspurious failure, trying again"); - } - } - if !help_on_error.is_empty() { - eprintln!("{help_on_error}"); - } - crate::exit!(1); - } + let dwn_ctx: DownloadContext<'_> = self.into(); + download_file(dwn_ctx, url, dest_path, help_on_error); } fn unpack(&self, tarball: &Path, dst: &Path, pattern: &str) { - eprintln!("extracting {} to {}", tarball.display(), dst.display()); - if !dst.exists() { - t!(fs::create_dir_all(dst)); - } - - // `tarball` ends with `.tar.xz`; strip that suffix - // example: `rust-dev-nightly-x86_64-unknown-linux-gnu` - let uncompressed_filename = - Path::new(tarball.file_name().expect("missing tarball filename")).file_stem().unwrap(); - let directory_prefix = Path::new(Path::new(uncompressed_filename).file_stem().unwrap()); - - // decompress the file - let data = t!(File::open(tarball), format!("file {} not found", tarball.display())); - let decompressor = XzDecoder::new(BufReader::new(data)); - - let mut tar = tar::Archive::new(decompressor); - - let is_ci_rustc = dst.ends_with("ci-rustc"); - let is_ci_llvm = dst.ends_with("ci-llvm"); - - // `compile::Sysroot` needs to know the contents of the `rustc-dev` tarball to avoid adding - // it to the sysroot unless it was explicitly requested. But parsing the 100 MB tarball is slow. - // Cache the entries when we extract it so we only have to read it once. - let mut recorded_entries = if is_ci_rustc { recorded_entries(dst, pattern) } else { None }; - - for member in t!(tar.entries()) { - let mut member = t!(member); - let original_path = t!(member.path()).into_owned(); - // skip the top-level directory - if original_path == directory_prefix { - continue; - } - let mut short_path = t!(original_path.strip_prefix(directory_prefix)); - let is_builder_config = short_path.to_str() == Some(BUILDER_CONFIG_FILENAME); - - if !(short_path.starts_with(pattern) - || ((is_ci_rustc || is_ci_llvm) && is_builder_config)) - { - continue; - } - short_path = short_path.strip_prefix(pattern).unwrap_or(short_path); - let dst_path = dst.join(short_path); - self.verbose(|| { - println!("extracting {} to {}", original_path.display(), dst.display()) - }); - if !t!(member.unpack_in(dst)) { - panic!("path traversal attack ??"); - } - if let Some(record) = &mut recorded_entries { - t!(writeln!(record, "{}", short_path.to_str().unwrap())); - } - let src_path = dst.join(original_path); - if src_path.is_dir() && dst_path.exists() { - continue; - } - t!(move_file(src_path, dst_path)); - } - let dst_dir = dst.join(directory_prefix); - if dst_dir.exists() { - t!(fs::remove_dir_all(&dst_dir), format!("failed to remove {}", dst_dir.display())); - } + unpack(&self.exec_ctx, tarball, dst, pattern); } /// Returns whether the SHA256 checksum of `path` matches `expected`. + #[cfg(test)] pub(crate) fn verify(&self, path: &Path, expected: &str) -> bool { - use sha2::Digest; - - self.verbose(|| println!("verifying {}", path.display())); - - if self.dry_run() { - return false; - } - - let mut hasher = sha2::Sha256::new(); - - let file = t!(File::open(path)); - let mut reader = BufReader::new(file); - - loop { - let buffer = t!(reader.fill_buf()); - let l = buffer.len(); - // break if EOF - if l == 0 { - break; - } - hasher.update(buffer); - reader.consume(l); - } - - let checksum = hex_encode(hasher.finalize().as_slice()); - let verified = checksum == expected; - - if !verified { - println!( - "invalid checksum: \n\ - found: {checksum}\n\ - expected: {expected}", - ); - } - - verified + verify(&self.exec_ctx, path, expected) } } @@ -388,6 +95,7 @@ fn recorded_entries(dst: &Path, pattern: &str) -> Option<BufWriter<File>> { Some(BufWriter::new(t!(File::create(dst.join(name))))) } +#[derive(Clone)] enum DownloadSource { CI, Dist, @@ -420,63 +128,6 @@ impl Config { cargo_clippy } - #[cfg(test)] - pub(crate) fn maybe_download_rustfmt(&self) -> Option<PathBuf> { - Some(PathBuf::new()) - } - - /// NOTE: rustfmt is a completely different toolchain than the bootstrap compiler, so it can't - /// reuse target directories or artifacts - #[cfg(not(test))] - pub(crate) fn maybe_download_rustfmt(&self) -> Option<PathBuf> { - use build_helper::stage0_parser::VersionMetadata; - - if self.dry_run() { - return Some(PathBuf::new()); - } - - let VersionMetadata { date, version } = self.stage0_metadata.rustfmt.as_ref()?; - let channel = format!("{version}-{date}"); - - let host = self.host_target; - let bin_root = self.out.join(host).join("rustfmt"); - let rustfmt_path = bin_root.join("bin").join(exe("rustfmt", host)); - let rustfmt_stamp = BuildStamp::new(&bin_root).with_prefix("rustfmt").add_stamp(channel); - if rustfmt_path.exists() && rustfmt_stamp.is_up_to_date() { - return Some(rustfmt_path); - } - - self.download_component( - DownloadSource::Dist, - format!("rustfmt-{version}-{build}.tar.xz", build = host.triple), - "rustfmt-preview", - date, - "rustfmt", - ); - self.download_component( - DownloadSource::Dist, - format!("rustc-{version}-{build}.tar.xz", build = host.triple), - "rustc", - date, - "rustfmt", - ); - - if self.should_fix_bins_and_dylibs() { - self.fix_bin_or_dylib(&bin_root.join("bin").join("rustfmt")); - self.fix_bin_or_dylib(&bin_root.join("bin").join("cargo-fmt")); - let lib_dir = bin_root.join("lib"); - for lib in t!(fs::read_dir(&lib_dir), lib_dir.display().to_string()) { - let lib = t!(lib); - if path_is_dylib(&lib.path()) { - self.fix_bin_or_dylib(&lib.path()); - } - } - } - - t!(rustfmt_stamp.write()); - Some(rustfmt_path) - } - pub(crate) fn ci_rust_std_contents(&self) -> Vec<String> { self.ci_component_contents(".rust-std-contents") } @@ -514,30 +165,6 @@ impl Config { ); } - #[cfg(test)] - pub(crate) fn download_beta_toolchain(&self) {} - - #[cfg(not(test))] - pub(crate) fn download_beta_toolchain(&self) { - self.verbose(|| println!("downloading stage0 beta artifacts")); - - let date = &self.stage0_metadata.compiler.date; - let version = &self.stage0_metadata.compiler.version; - let extra_components = ["cargo"]; - - let download_beta_component = |config: &Config, filename, prefix: &_, date: &_| { - config.download_component(DownloadSource::Dist, filename, prefix, date, "stage0") - }; - - self.download_toolchain( - version, - "stage0", - date, - &extra_components, - download_beta_component, - ); - } - fn download_toolchain( &self, version: &str, @@ -607,91 +234,8 @@ impl Config { key: &str, destination: &str, ) { - if self.dry_run() { - return; - } - - let cache_dst = - self.bootstrap_cache_path.as_ref().cloned().unwrap_or_else(|| self.out.join("cache")); - - let cache_dir = cache_dst.join(key); - if !cache_dir.exists() { - t!(fs::create_dir_all(&cache_dir)); - } - - let bin_root = self.out.join(self.host_target).join(destination); - let tarball = cache_dir.join(&filename); - let (base_url, url, should_verify) = match mode { - DownloadSource::CI => { - let dist_server = if self.llvm_assertions { - self.stage0_metadata.config.artifacts_with_llvm_assertions_server.clone() - } else { - self.stage0_metadata.config.artifacts_server.clone() - }; - let url = format!( - "{}/{filename}", - key.strip_suffix(&format!("-{}", self.llvm_assertions)).unwrap() - ); - (dist_server, url, false) - } - DownloadSource::Dist => { - let dist_server = env::var("RUSTUP_DIST_SERVER") - .unwrap_or(self.stage0_metadata.config.dist_server.to_string()); - // NOTE: make `dist` part of the URL because that's how it's stored in src/stage0 - (dist_server, format!("dist/{key}/{filename}"), true) - } - }; - - // For the stage0 compiler, put special effort into ensuring the checksums are valid. - let checksum = if should_verify { - let error = format!( - "src/stage0 doesn't contain a checksum for {url}. \ - Pre-built artifacts might not be available for this \ - target at this time, see https://doc.rust-lang.org/nightly\ - /rustc/platform-support.html for more information." - ); - let sha256 = self.stage0_metadata.checksums_sha256.get(&url).expect(&error); - if tarball.exists() { - if self.verify(&tarball, sha256) { - self.unpack(&tarball, &bin_root, prefix); - return; - } else { - self.verbose(|| { - println!( - "ignoring cached file {} due to failed verification", - tarball.display() - ) - }); - self.remove(&tarball); - } - } - Some(sha256) - } else if tarball.exists() { - self.unpack(&tarball, &bin_root, prefix); - return; - } else { - None - }; - - let mut help_on_error = ""; - if destination == "ci-rustc" { - help_on_error = "ERROR: failed to download pre-built rustc from CI - -NOTE: old builds get deleted after a certain time -HELP: if trying to compile an old commit of rustc, disable `download-rustc` in bootstrap.toml: - -[rust] -download-rustc = false -"; - } - self.download_file(&format!("{base_url}/{url}"), &tarball, help_on_error); - if let Some(sha256) = checksum - && !self.verify(&tarball, sha256) - { - panic!("failed to verify {}", tarball.display()); - } - - self.unpack(&tarball, &bin_root, prefix); + let dwn_ctx: DownloadContext<'_> = self.into(); + download_component(dwn_ctx, mode, filename, prefix, key, destination); } #[cfg(test)] @@ -852,6 +396,39 @@ download-rustc = false } } +/// Only should be used for pre config initialization downloads. +pub(crate) struct DownloadContext<'a> { + host_target: TargetSelection, + out: &'a Path, + patch_binaries_for_nix: Option<bool>, + exec_ctx: &'a ExecutionContext, + stage0_metadata: &'a build_helper::stage0_parser::Stage0, + llvm_assertions: bool, + bootstrap_cache_path: &'a Option<PathBuf>, + is_running_on_ci: bool, +} + +impl<'a> AsRef<DownloadContext<'a>> for DownloadContext<'a> { + fn as_ref(&self) -> &DownloadContext<'a> { + self + } +} + +impl<'a> From<&'a Config> for DownloadContext<'a> { + fn from(value: &'a Config) -> Self { + DownloadContext { + host_target: value.host_target, + out: &value.out, + patch_binaries_for_nix: value.patch_binaries_for_nix, + exec_ctx: &value.exec_ctx, + stage0_metadata: &value.stage0_metadata, + llvm_assertions: value.llvm_assertions, + bootstrap_cache_path: &value.bootstrap_cache_path, + is_running_on_ci: value.is_running_on_ci, + } + } +} + fn path_is_dylib(path: &Path) -> bool { // The .so is not necessarily the extension, it might be libLLVM.so.18.1 path.to_str().is_some_and(|path| path.contains(".so")) @@ -875,6 +452,7 @@ pub(crate) fn is_download_ci_available(target_triple: &str, llvm_assertions: boo "powerpc-unknown-linux-gnu", "powerpc64-unknown-linux-gnu", "powerpc64le-unknown-linux-gnu", + "powerpc64le-unknown-linux-musl", "riscv64gc-unknown-linux-gnu", "s390x-unknown-linux-gnu", "x86_64-apple-darwin", @@ -896,3 +474,596 @@ pub(crate) fn is_download_ci_available(target_triple: &str, llvm_assertions: boo SUPPORTED_PLATFORMS.contains(&target_triple) } } + +#[cfg(test)] +pub(crate) fn maybe_download_rustfmt<'a>( + dwn_ctx: impl AsRef<DownloadContext<'a>>, +) -> Option<PathBuf> { + Some(PathBuf::new()) +} + +/// NOTE: rustfmt is a completely different toolchain than the bootstrap compiler, so it can't +/// reuse target directories or artifacts +#[cfg(not(test))] +pub(crate) fn maybe_download_rustfmt<'a>( + dwn_ctx: impl AsRef<DownloadContext<'a>>, +) -> Option<PathBuf> { + use build_helper::stage0_parser::VersionMetadata; + + let dwn_ctx = dwn_ctx.as_ref(); + + if dwn_ctx.exec_ctx.dry_run() { + return Some(PathBuf::new()); + } + + let VersionMetadata { date, version } = dwn_ctx.stage0_metadata.rustfmt.as_ref()?; + let channel = format!("{version}-{date}"); + + let host = dwn_ctx.host_target; + let bin_root = dwn_ctx.out.join(host).join("rustfmt"); + let rustfmt_path = bin_root.join("bin").join(exe("rustfmt", host)); + let rustfmt_stamp = BuildStamp::new(&bin_root).with_prefix("rustfmt").add_stamp(channel); + if rustfmt_path.exists() && rustfmt_stamp.is_up_to_date() { + return Some(rustfmt_path); + } + + download_component( + dwn_ctx, + DownloadSource::Dist, + format!("rustfmt-{version}-{build}.tar.xz", build = host.triple), + "rustfmt-preview", + date, + "rustfmt", + ); + + download_component( + dwn_ctx, + DownloadSource::Dist, + format!("rustc-{version}-{build}.tar.xz", build = host.triple), + "rustc", + date, + "rustfmt", + ); + + if should_fix_bins_and_dylibs(dwn_ctx.patch_binaries_for_nix, dwn_ctx.exec_ctx) { + fix_bin_or_dylib(dwn_ctx.out, &bin_root.join("bin").join("rustfmt"), dwn_ctx.exec_ctx); + fix_bin_or_dylib(dwn_ctx.out, &bin_root.join("bin").join("cargo-fmt"), dwn_ctx.exec_ctx); + let lib_dir = bin_root.join("lib"); + for lib in t!(fs::read_dir(&lib_dir), lib_dir.display().to_string()) { + let lib = t!(lib); + if path_is_dylib(&lib.path()) { + fix_bin_or_dylib(dwn_ctx.out, &lib.path(), dwn_ctx.exec_ctx); + } + } + } + + t!(rustfmt_stamp.write()); + Some(rustfmt_path) +} + +#[cfg(test)] +pub(crate) fn download_beta_toolchain<'a>(dwn_ctx: impl AsRef<DownloadContext<'a>>) {} + +#[cfg(not(test))] +pub(crate) fn download_beta_toolchain<'a>(dwn_ctx: impl AsRef<DownloadContext<'a>>) { + let dwn_ctx = dwn_ctx.as_ref(); + dwn_ctx.exec_ctx.verbose(|| { + println!("downloading stage0 beta artifacts"); + }); + + let date = dwn_ctx.stage0_metadata.compiler.date.clone(); + let version = dwn_ctx.stage0_metadata.compiler.version.clone(); + let extra_components = ["cargo"]; + let sysroot = "stage0"; + download_toolchain( + dwn_ctx, + &version, + sysroot, + &date, + &extra_components, + "stage0", + DownloadSource::Dist, + ); +} + +fn download_toolchain<'a>( + dwn_ctx: impl AsRef<DownloadContext<'a>>, + version: &str, + sysroot: &str, + stamp_key: &str, + extra_components: &[&str], + destination: &str, + mode: DownloadSource, +) { + let dwn_ctx = dwn_ctx.as_ref(); + let host = dwn_ctx.host_target.triple; + let bin_root = dwn_ctx.out.join(host).join(sysroot); + let rustc_stamp = BuildStamp::new(&bin_root).with_prefix("rustc").add_stamp(stamp_key); + + if !bin_root.join("bin").join(exe("rustc", dwn_ctx.host_target)).exists() + || !rustc_stamp.is_up_to_date() + { + if bin_root.exists() { + t!(fs::remove_dir_all(&bin_root)); + } + let filename = format!("rust-std-{version}-{host}.tar.xz"); + let pattern = format!("rust-std-{host}"); + download_component(dwn_ctx, mode.clone(), filename, &pattern, stamp_key, destination); + let filename = format!("rustc-{version}-{host}.tar.xz"); + download_component(dwn_ctx, mode.clone(), filename, "rustc", stamp_key, destination); + + for component in extra_components { + let filename = format!("{component}-{version}-{host}.tar.xz"); + download_component(dwn_ctx, mode.clone(), filename, component, stamp_key, destination); + } + + if should_fix_bins_and_dylibs(dwn_ctx.patch_binaries_for_nix, dwn_ctx.exec_ctx) { + fix_bin_or_dylib(dwn_ctx.out, &bin_root.join("bin").join("rustc"), dwn_ctx.exec_ctx); + fix_bin_or_dylib(dwn_ctx.out, &bin_root.join("bin").join("rustdoc"), dwn_ctx.exec_ctx); + fix_bin_or_dylib( + dwn_ctx.out, + &bin_root.join("libexec").join("rust-analyzer-proc-macro-srv"), + dwn_ctx.exec_ctx, + ); + let lib_dir = bin_root.join("lib"); + for lib in t!(fs::read_dir(&lib_dir), lib_dir.display().to_string()) { + let lib = t!(lib); + if path_is_dylib(&lib.path()) { + fix_bin_or_dylib(dwn_ctx.out, &lib.path(), dwn_ctx.exec_ctx); + } + } + } + + t!(rustc_stamp.write()); + } +} + +pub(crate) fn remove(exec_ctx: &ExecutionContext, f: &Path) { + if exec_ctx.dry_run() { + return; + } + fs::remove_file(f).unwrap_or_else(|_| panic!("failed to remove {f:?}")); +} + +fn fix_bin_or_dylib(out: &Path, fname: &Path, exec_ctx: &ExecutionContext) { + assert_eq!(SHOULD_FIX_BINS_AND_DYLIBS.get(), Some(&true)); + println!("attempting to patch {}", fname.display()); + + // Only build `.nix-deps` once. + static NIX_DEPS_DIR: OnceLock<PathBuf> = OnceLock::new(); + let mut nix_build_succeeded = true; + let nix_deps_dir = NIX_DEPS_DIR.get_or_init(|| { + // Run `nix-build` to "build" each dependency (which will likely reuse + // the existing `/nix/store` copy, or at most download a pre-built copy). + // + // Importantly, we create a gc-root called `.nix-deps` in the `build/` + // directory, but still reference the actual `/nix/store` path in the rpath + // as it makes it significantly more robust against changes to the location of + // the `.nix-deps` location. + // + // bintools: Needed for the path of `ld-linux.so` (via `nix-support/dynamic-linker`). + // zlib: Needed as a system dependency of `libLLVM-*.so`. + // patchelf: Needed for patching ELF binaries (see doc comment above). + let nix_deps_dir = out.join(".nix-deps"); + const NIX_EXPR: &str = " + with (import <nixpkgs> {}); + symlinkJoin { + name = \"rust-stage0-dependencies\"; + paths = [ + zlib + patchelf + stdenv.cc.bintools + ]; + } + "; + nix_build_succeeded = command("nix-build") + .allow_failure() + .args([Path::new("-E"), Path::new(NIX_EXPR), Path::new("-o"), &nix_deps_dir]) + .run_capture_stdout(exec_ctx) + .is_success(); + nix_deps_dir + }); + if !nix_build_succeeded { + return; + } + + let mut patchelf = command(nix_deps_dir.join("bin/patchelf")); + patchelf.args(&[ + OsString::from("--add-rpath"), + OsString::from(t!(fs::canonicalize(nix_deps_dir)).join("lib")), + ]); + if !path_is_dylib(fname) { + // Finally, set the correct .interp for binaries + let dynamic_linker_path = nix_deps_dir.join("nix-support/dynamic-linker"); + let dynamic_linker = t!(fs::read_to_string(dynamic_linker_path)); + patchelf.args(["--set-interpreter", dynamic_linker.trim_end()]); + } + patchelf.arg(fname); + let _ = patchelf.allow_failure().run_capture_stdout(exec_ctx); +} + +fn should_fix_bins_and_dylibs( + patch_binaries_for_nix: Option<bool>, + exec_ctx: &ExecutionContext, +) -> bool { + let val = *SHOULD_FIX_BINS_AND_DYLIBS.get_or_init(|| { + let uname = command("uname").allow_failure().arg("-s").run_capture_stdout(exec_ctx); + if uname.is_failure() { + return false; + } + let output = uname.stdout(); + if !output.starts_with("Linux") { + return false; + } + // If the user has asked binaries to be patched for Nix, then + // don't check for NixOS or `/lib`. + // NOTE: this intentionally comes after the Linux check: + // - patchelf only works with ELF files, so no need to run it on Mac or Windows + // - On other Unix systems, there is no stable syscall interface, so Nix doesn't manage the global libc. + if let Some(explicit_value) = patch_binaries_for_nix { + return explicit_value; + } + + // Use `/etc/os-release` instead of `/etc/NIXOS`. + // The latter one does not exist on NixOS when using tmpfs as root. + let is_nixos = match File::open("/etc/os-release") { + Err(e) if e.kind() == ErrorKind::NotFound => false, + Err(e) => panic!("failed to access /etc/os-release: {e}"), + Ok(os_release) => BufReader::new(os_release).lines().any(|l| { + let l = l.expect("reading /etc/os-release"); + matches!(l.trim(), "ID=nixos" | "ID='nixos'" | "ID=\"nixos\"") + }), + }; + if !is_nixos { + let in_nix_shell = env::var("IN_NIX_SHELL"); + if let Ok(in_nix_shell) = in_nix_shell { + eprintln!( + "The IN_NIX_SHELL environment variable is `{in_nix_shell}`; \ + you may need to set `patch-binaries-for-nix=true` in bootstrap.toml" + ); + } + } + is_nixos + }); + if val { + eprintln!("INFO: You seem to be using Nix."); + } + val +} + +fn download_component<'a>( + dwn_ctx: impl AsRef<DownloadContext<'a>>, + mode: DownloadSource, + filename: String, + prefix: &str, + key: &str, + destination: &str, +) { + let dwn_ctx = dwn_ctx.as_ref(); + + if dwn_ctx.exec_ctx.dry_run() { + return; + } + + let cache_dst = + dwn_ctx.bootstrap_cache_path.as_ref().cloned().unwrap_or_else(|| dwn_ctx.out.join("cache")); + + let cache_dir = cache_dst.join(key); + if !cache_dir.exists() { + t!(fs::create_dir_all(&cache_dir)); + } + + let bin_root = dwn_ctx.out.join(dwn_ctx.host_target).join(destination); + let tarball = cache_dir.join(&filename); + let (base_url, url, should_verify) = match mode { + DownloadSource::CI => { + let dist_server = if dwn_ctx.llvm_assertions { + dwn_ctx.stage0_metadata.config.artifacts_with_llvm_assertions_server.clone() + } else { + dwn_ctx.stage0_metadata.config.artifacts_server.clone() + }; + let url = format!( + "{}/{filename}", + key.strip_suffix(&format!("-{}", dwn_ctx.llvm_assertions)).unwrap() + ); + (dist_server, url, false) + } + DownloadSource::Dist => { + let dist_server = env::var("RUSTUP_DIST_SERVER") + .unwrap_or(dwn_ctx.stage0_metadata.config.dist_server.to_string()); + // NOTE: make `dist` part of the URL because that's how it's stored in src/stage0 + (dist_server, format!("dist/{key}/{filename}"), true) + } + }; + + // For the stage0 compiler, put special effort into ensuring the checksums are valid. + let checksum = if should_verify { + let error = format!( + "src/stage0 doesn't contain a checksum for {url}. \ + Pre-built artifacts might not be available for this \ + target at this time, see https://doc.rust-lang.org/nightly\ + /rustc/platform-support.html for more information." + ); + let sha256 = dwn_ctx.stage0_metadata.checksums_sha256.get(&url).expect(&error); + if tarball.exists() { + if verify(dwn_ctx.exec_ctx, &tarball, sha256) { + unpack(dwn_ctx.exec_ctx, &tarball, &bin_root, prefix); + return; + } else { + dwn_ctx.exec_ctx.verbose(|| { + println!( + "ignoring cached file {} due to failed verification", + tarball.display() + ) + }); + remove(dwn_ctx.exec_ctx, &tarball); + } + } + Some(sha256) + } else if tarball.exists() { + unpack(dwn_ctx.exec_ctx, &tarball, &bin_root, prefix); + return; + } else { + None + }; + + let mut help_on_error = ""; + if destination == "ci-rustc" { + help_on_error = "ERROR: failed to download pre-built rustc from CI + +NOTE: old builds get deleted after a certain time +HELP: if trying to compile an old commit of rustc, disable `download-rustc` in bootstrap.toml: + +[rust] +download-rustc = false +"; + } + download_file(dwn_ctx, &format!("{base_url}/{url}"), &tarball, help_on_error); + if let Some(sha256) = checksum + && !verify(dwn_ctx.exec_ctx, &tarball, sha256) + { + panic!("failed to verify {}", tarball.display()); + } + + unpack(dwn_ctx.exec_ctx, &tarball, &bin_root, prefix); +} + +pub(crate) fn verify(exec_ctx: &ExecutionContext, path: &Path, expected: &str) -> bool { + use sha2::Digest; + + exec_ctx.verbose(|| { + println!("verifying {}", path.display()); + }); + + if exec_ctx.dry_run() { + return false; + } + + let mut hasher = sha2::Sha256::new(); + + let file = t!(File::open(path)); + let mut reader = BufReader::new(file); + + loop { + let buffer = t!(reader.fill_buf()); + let l = buffer.len(); + // break if EOF + if l == 0 { + break; + } + hasher.update(buffer); + reader.consume(l); + } + + let checksum = hex_encode(hasher.finalize().as_slice()); + let verified = checksum == expected; + + if !verified { + println!( + "invalid checksum: \n\ + found: {checksum}\n\ + expected: {expected}", + ); + } + + verified +} + +fn unpack(exec_ctx: &ExecutionContext, tarball: &Path, dst: &Path, pattern: &str) { + eprintln!("extracting {} to {}", tarball.display(), dst.display()); + if !dst.exists() { + t!(fs::create_dir_all(dst)); + } + + // `tarball` ends with `.tar.xz`; strip that suffix + // example: `rust-dev-nightly-x86_64-unknown-linux-gnu` + let uncompressed_filename = + Path::new(tarball.file_name().expect("missing tarball filename")).file_stem().unwrap(); + let directory_prefix = Path::new(Path::new(uncompressed_filename).file_stem().unwrap()); + + // decompress the file + let data = t!(File::open(tarball), format!("file {} not found", tarball.display())); + let decompressor = XzDecoder::new(BufReader::new(data)); + + let mut tar = tar::Archive::new(decompressor); + + let is_ci_rustc = dst.ends_with("ci-rustc"); + let is_ci_llvm = dst.ends_with("ci-llvm"); + + // `compile::Sysroot` needs to know the contents of the `rustc-dev` tarball to avoid adding + // it to the sysroot unless it was explicitly requested. But parsing the 100 MB tarball is slow. + // Cache the entries when we extract it so we only have to read it once. + let mut recorded_entries = if is_ci_rustc { recorded_entries(dst, pattern) } else { None }; + + for member in t!(tar.entries()) { + let mut member = t!(member); + let original_path = t!(member.path()).into_owned(); + // skip the top-level directory + if original_path == directory_prefix { + continue; + } + let mut short_path = t!(original_path.strip_prefix(directory_prefix)); + let is_builder_config = short_path.to_str() == Some(BUILDER_CONFIG_FILENAME); + + if !(short_path.starts_with(pattern) || ((is_ci_rustc || is_ci_llvm) && is_builder_config)) + { + continue; + } + short_path = short_path.strip_prefix(pattern).unwrap_or(short_path); + let dst_path = dst.join(short_path); + + exec_ctx.verbose(|| { + println!("extracting {} to {}", original_path.display(), dst.display()); + }); + + if !t!(member.unpack_in(dst)) { + panic!("path traversal attack ??"); + } + if let Some(record) = &mut recorded_entries { + t!(writeln!(record, "{}", short_path.to_str().unwrap())); + } + let src_path = dst.join(original_path); + if src_path.is_dir() && dst_path.exists() { + continue; + } + t!(move_file(src_path, dst_path)); + } + let dst_dir = dst.join(directory_prefix); + if dst_dir.exists() { + t!(fs::remove_dir_all(&dst_dir), format!("failed to remove {}", dst_dir.display())); + } +} + +fn download_file<'a>( + dwn_ctx: impl AsRef<DownloadContext<'a>>, + url: &str, + dest_path: &Path, + help_on_error: &str, +) { + let dwn_ctx = dwn_ctx.as_ref(); + + dwn_ctx.exec_ctx.verbose(|| { + println!("download {url}"); + }); + // Use a temporary file in case we crash while downloading, to avoid a corrupt download in cache/. + let tempfile = tempdir(dwn_ctx.out).join(dest_path.file_name().unwrap()); + // While bootstrap itself only supports http and https downloads, downstream forks might + // need to download components from other protocols. The match allows them adding more + // protocols without worrying about merge conflicts if we change the HTTP implementation. + match url.split_once("://").map(|(proto, _)| proto) { + Some("http") | Some("https") => download_http_with_retries( + dwn_ctx.host_target, + dwn_ctx.is_running_on_ci, + dwn_ctx.exec_ctx, + &tempfile, + url, + help_on_error, + ), + Some(other) => panic!("unsupported protocol {other} in {url}"), + None => panic!("no protocol in {url}"), + } + t!(move_file(&tempfile, dest_path), format!("failed to rename {tempfile:?} to {dest_path:?}")); +} + +/// Create a temporary directory in `out` and return its path. +/// +/// NOTE: this temporary directory is shared between all steps; +/// if you need an empty directory, create a new subdirectory inside it. +pub(crate) fn tempdir(out: &Path) -> PathBuf { + let tmp = out.join("tmp"); + t!(fs::create_dir_all(&tmp)); + tmp +} + +fn download_http_with_retries( + host_target: TargetSelection, + is_running_on_ci: bool, + exec_ctx: &ExecutionContext, + tempfile: &Path, + url: &str, + help_on_error: &str, +) { + println!("downloading {url}"); + // Try curl. If that fails and we are on windows, fallback to PowerShell. + // options should be kept in sync with + // src/bootstrap/src/core/download.rs + // for consistency + let mut curl = command("curl").allow_failure(); + curl.args([ + // follow redirect + "--location", + // timeout if speed is < 10 bytes/sec for > 30 seconds + "--speed-time", + "30", + "--speed-limit", + "10", + // timeout if cannot connect within 30 seconds + "--connect-timeout", + "30", + // output file + "--output", + tempfile.to_str().unwrap(), + // if there is an error, don't restart the download, + // instead continue where it left off. + "--continue-at", + "-", + // retry up to 3 times. note that this means a maximum of 4 + // attempts will be made, since the first attempt isn't a *re*try. + "--retry", + "3", + // show errors, even if --silent is specified + "--show-error", + // set timestamp of downloaded file to that of the server + "--remote-time", + // fail on non-ok http status + "--fail", + ]); + // Don't print progress in CI; the \r wrapping looks bad and downloads don't take long enough for progress to be useful. + if is_running_on_ci { + curl.arg("--silent"); + } else { + curl.arg("--progress-bar"); + } + // --retry-all-errors was added in 7.71.0, don't use it if curl is old. + if curl_version(exec_ctx) >= semver::Version::new(7, 71, 0) { + curl.arg("--retry-all-errors"); + } + curl.arg(url); + if !curl.run(exec_ctx) { + if host_target.contains("windows-msvc") { + eprintln!("Fallback to PowerShell"); + for _ in 0..3 { + let powershell = command("PowerShell.exe").allow_failure().args([ + "/nologo", + "-Command", + "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;", + &format!( + "(New-Object System.Net.WebClient).DownloadFile('{}', '{}')", + url, tempfile.to_str().expect("invalid UTF-8 not supported with powershell downloads"), + ), + ]).run_capture_stdout(exec_ctx); + + if powershell.is_failure() { + return; + } + + eprintln!("\nspurious failure, trying again"); + } + } + if !help_on_error.is_empty() { + eprintln!("{help_on_error}"); + } + crate::exit!(1); + } +} + +fn curl_version(exec_ctx: &ExecutionContext) -> semver::Version { + let mut curl = command("curl"); + curl.arg("-V"); + let curl = curl.run_capture_stdout(exec_ctx); + if curl.is_failure() { + return semver::Version::new(1, 0, 0); + } + let output = curl.stdout(); + extract_curl_version(output) +} diff --git a/src/bootstrap/src/core/sanity.rs b/src/bootstrap/src/core/sanity.rs index b39d464493e..15e04f59129 100644 --- a/src/bootstrap/src/core/sanity.rs +++ b/src/bootstrap/src/core/sanity.rs @@ -338,12 +338,6 @@ than building it. // Make sure musl-root is valid. if target.contains("musl") && !target.contains("unikraft") { - // If this is a native target (host is also musl) and no musl-root is given, - // fall back to the system toolchain in /usr before giving up - if build.musl_root(*target).is_none() && build.config.is_host_target(*target) { - let target = build.config.target_config.entry(*target).or_default(); - target.musl_root = Some("/usr".into()); - } match build.musl_libdir(*target) { Some(libdir) => { if fs::metadata(libdir.join("libc.a")).is_err() { diff --git a/src/bootstrap/src/lib.rs b/src/bootstrap/src/lib.rs index 44be51815c7..011b52df97b 100644 --- a/src/bootstrap/src/lib.rs +++ b/src/bootstrap/src/lib.rs @@ -123,6 +123,46 @@ impl PartialEq for Compiler { } } +/// Represents a codegen backend. +#[derive(Debug, Clone, PartialEq, Eq, Hash, Default)] +pub enum CodegenBackendKind { + #[default] + Llvm, + Cranelift, + Gcc, + Custom(String), +} + +impl CodegenBackendKind { + /// Name of the codegen backend, as identified in the `compiler` directory + /// (`rustc_codegen_<name>`). + pub fn name(&self) -> &str { + match self { + CodegenBackendKind::Llvm => "llvm", + CodegenBackendKind::Cranelift => "cranelift", + CodegenBackendKind::Gcc => "gcc", + CodegenBackendKind::Custom(name) => name, + } + } + + /// Name of the codegen backend's crate, e.g. `rustc_codegen_cranelift`. + pub fn crate_name(&self) -> String { + format!("rustc_codegen_{}", self.name()) + } + + pub fn is_llvm(&self) -> bool { + matches!(self, Self::Llvm) + } + + pub fn is_cranelift(&self) -> bool { + matches!(self, Self::Cranelift) + } + + pub fn is_gcc(&self) -> bool { + matches!(self, Self::Gcc) + } +} + #[derive(PartialEq, Eq, Copy, Clone, Debug)] pub enum DocTests { /// Run normal tests and doc tests (default). @@ -253,12 +293,24 @@ pub enum Mode { /// These tools are intended to be only executed on the host system that /// invokes bootstrap, and they thus cannot be cross-compiled. /// - /// They are always built using the stage0 compiler, and typically they + /// They are always built using the stage0 compiler, and they /// can be compiled with stable Rust. /// /// These tools also essentially do not participate in staging. ToolBootstrap, + /// Build a cross-compilable helper tool. These tools do not depend on unstable features or + /// compiler internals, but they might be cross-compilable (so we cannot build them using the + /// stage0 compiler, unlike `ToolBootstrap`). + /// + /// Some of these tools are also shipped in our `dist` archives. + /// While we could compile them using the stage0 compiler when not cross-compiling, we instead + /// use the in-tree compiler (and std) to build them, so that we can ship e.g. std security + /// fixes and avoid depending fully on stage0 for the artifacts that we ship. + /// + /// This mode is used e.g. for linkers and linker tools invoked by rustc on its host target. + ToolTarget, + /// Build a tool which uses the locally built std, placing output in the /// "stageN-tools" directory. Its usage is quite rare, mainly used by /// compiletest which needs libtest. @@ -273,11 +325,21 @@ pub enum Mode { impl Mode { pub fn is_tool(&self) -> bool { - matches!(self, Mode::ToolBootstrap | Mode::ToolRustc | Mode::ToolStd) + match self { + Mode::ToolBootstrap | Mode::ToolRustc | Mode::ToolStd | Mode::ToolTarget => true, + Mode::Std | Mode::Codegen | Mode::Rustc => false, + } } pub fn must_support_dlopen(&self) -> bool { - matches!(self, Mode::Std | Mode::Codegen) + match self { + Mode::Std | Mode::Codegen => true, + Mode::ToolBootstrap + | Mode::ToolRustc + | Mode::ToolStd + | Mode::ToolTarget + | Mode::Rustc => false, + } } } @@ -802,17 +864,39 @@ impl Build { /// stage when running with a particular host compiler. /// /// The mode indicates what the root directory is for. - fn stage_out(&self, compiler: Compiler, mode: Mode) -> PathBuf { - let suffix = match mode { - Mode::Std => "-std", - Mode::Rustc => "-rustc", - Mode::Codegen => "-codegen", - Mode::ToolBootstrap => { - return self.out.join(compiler.host).join("bootstrap-tools"); + fn stage_out(&self, build_compiler: Compiler, mode: Mode) -> PathBuf { + use std::fmt::Write; + + fn bootstrap_tool() -> (Option<u32>, &'static str) { + (None, "bootstrap-tools") + } + fn staged_tool(build_compiler: Compiler) -> (Option<u32>, &'static str) { + (Some(build_compiler.stage), "tools") + } + + let (stage, suffix) = match mode { + Mode::Std => (Some(build_compiler.stage), "std"), + Mode::Rustc => (Some(build_compiler.stage), "rustc"), + Mode::Codegen => (Some(build_compiler.stage), "codegen"), + Mode::ToolBootstrap => bootstrap_tool(), + Mode::ToolStd | Mode::ToolRustc => (Some(build_compiler.stage), "tools"), + Mode::ToolTarget => { + // If we're not cross-compiling (the common case), share the target directory with + // bootstrap tools to reuse the build cache. + if build_compiler.stage == 0 { + bootstrap_tool() + } else { + staged_tool(build_compiler) + } } - Mode::ToolStd | Mode::ToolRustc => "-tools", }; - self.out.join(compiler.host).join(format!("stage{}{}", compiler.stage, suffix)) + let path = self.out.join(build_compiler.host); + let mut dir_name = String::new(); + if let Some(stage) = stage { + write!(dir_name, "stage{stage}-").unwrap(); + } + dir_name.push_str(suffix); + path.join(dir_name) } /// Returns the root output directory for all Cargo output in a given stage, @@ -1285,23 +1369,33 @@ impl Build { } } - /// Returns the "musl root" for this `target`, if defined + /// Returns the "musl root" for this `target`, if defined. + /// + /// If this is a native target (host is also musl) and no musl-root is given, + /// it falls back to the system toolchain in /usr. fn musl_root(&self, target: TargetSelection) -> Option<&Path> { - self.config + let configured_root = self + .config .target_config .get(&target) .and_then(|t| t.musl_root.as_ref()) .or(self.config.musl_root.as_ref()) - .map(|p| &**p) + .map(|p| &**p); + + if self.config.is_host_target(target) && configured_root.is_none() { + Some(Path::new("/usr")) + } else { + configured_root + } } /// Returns the "musl libdir" for this `target`. fn musl_libdir(&self, target: TargetSelection) -> Option<PathBuf> { - let t = self.config.target_config.get(&target)?; - if let libdir @ Some(_) = &t.musl_libdir { - return libdir.clone(); - } - self.musl_root(target).map(|root| root.join("lib")) + self.config + .target_config + .get(&target) + .and_then(|t| t.musl_libdir.clone()) + .or_else(|| self.musl_root(target).map(|root| root.join("lib"))) } /// Returns the `lib` directory for the WASI target specified, if diff --git a/src/bootstrap/src/utils/build_stamp.rs b/src/bootstrap/src/utils/build_stamp.rs index f43d860893f..bd4eb790ae5 100644 --- a/src/bootstrap/src/utils/build_stamp.rs +++ b/src/bootstrap/src/utils/build_stamp.rs @@ -10,7 +10,7 @@ use sha2::digest::Digest; use crate::core::builder::Builder; use crate::core::config::TargetSelection; use crate::utils::helpers::{hex_encode, mtime}; -use crate::{Compiler, Mode, helpers, t}; +use crate::{CodegenBackendKind, Compiler, Mode, helpers, t}; #[cfg(test)] mod tests; @@ -129,10 +129,10 @@ pub fn codegen_backend_stamp( builder: &Builder<'_>, compiler: Compiler, target: TargetSelection, - backend: &str, + backend: &CodegenBackendKind, ) -> BuildStamp { BuildStamp::new(&builder.cargo_out(compiler, Mode::Codegen, target)) - .with_prefix(&format!("librustc_codegen_{backend}")) + .with_prefix(&format!("lib{}", backend.crate_name())) } /// Cargo's output path for the standard library in a given stage, compiled diff --git a/src/bootstrap/src/utils/change_tracker.rs b/src/bootstrap/src/utils/change_tracker.rs index f802640a42d..d3331b81587 100644 --- a/src/bootstrap/src/utils/change_tracker.rs +++ b/src/bootstrap/src/utils/change_tracker.rs @@ -439,7 +439,7 @@ pub const CONFIG_CHANGE_HISTORY: &[ChangeInfo] = &[ ChangeInfo { change_id: 143255, severity: ChangeSeverity::Warning, - summary: "`llvm.lld` is no longer enabled by default for the dist profile.", + summary: "`rust.lld` is no longer enabled by default for the dist profile.", }, ChangeInfo { change_id: 143251, @@ -486,4 +486,9 @@ pub const CONFIG_CHANGE_HISTORY: &[ChangeInfo] = &[ severity: ChangeSeverity::Warning, summary: "Removed `rust.description` and `llvm.ccache` as it was deprecated in #137723 and #136941 long time ago.", }, + ChangeInfo { + change_id: 144675, + severity: ChangeSeverity::Warning, + summary: "Added `build.compiletest-allow-stage0` flag instead of `COMPILETEST_FORCE_STAGE0` env var, and reject running `compiletest` self tests against stage 0 rustc unless explicitly allowed.", + }, ]; diff --git a/src/bootstrap/src/utils/proc_macro_deps.rs b/src/bootstrap/src/utils/proc_macro_deps.rs index 21c7fc89d7d..777c8601aa1 100644 --- a/src/bootstrap/src/utils/proc_macro_deps.rs +++ b/src/bootstrap/src/utils/proc_macro_deps.rs @@ -3,6 +3,7 @@ /// See <https://github.com/rust-lang/rust/issues/134863> pub static CRATES: &[&str] = &[ // tidy-alphabetical-start + "allocator-api2", "annotate-snippets", "anstyle", "askama_parser", @@ -16,13 +17,17 @@ pub static CRATES: &[&str] = &[ "darling_core", "derive_builder_core", "digest", + "equivalent", "fluent-bundle", "fluent-langneg", "fluent-syntax", "fnv", + "foldhash", "generic-array", + "hashbrown", "heck", "ident_case", + "indexmap", "intl-memoizer", "intl_pluralrules", "libc", diff --git a/src/bootstrap/src/utils/shared_helpers.rs b/src/bootstrap/src/utils/shared_helpers.rs index 9c6b4a7615d..9428e221f41 100644 --- a/src/bootstrap/src/utils/shared_helpers.rs +++ b/src/bootstrap/src/utils/shared_helpers.rs @@ -1,14 +1,18 @@ //! This module serves two purposes: -//! 1. It is part of the `utils` module and used in other parts of bootstrap. -//! 2. It is embedded inside bootstrap shims to avoid a dependency on the bootstrap library. -//! Therefore, this module should never use any other bootstrap module. This reduces binary -//! size and improves compilation time by minimizing linking time. +//! +//! 1. It is part of the `utils` module and used in other parts of bootstrap. +//! 2. It is embedded inside bootstrap shims to avoid a dependency on the bootstrap library. +//! Therefore, this module should never use any other bootstrap module. This reduces binary size +//! and improves compilation time by minimizing linking time. + +// # Note on tests +// +// If we were to declare a tests submodule here, the shim binaries that include this module via +// `#[path]` would fail to find it, which breaks `./x check bootstrap`. So instead the unit tests +// for this module are in `super::tests::shared_helpers_tests`. #![allow(dead_code)] -#[cfg(test)] -mod tests; - use std::env; use std::ffi::OsString; use std::fs::OpenOptions; @@ -16,10 +20,6 @@ use std::io::Write; use std::process::Command; use std::str::FromStr; -// If we were to declare a tests submodule here, the shim binaries that include this -// module via `#[path]` would fail to find it, which breaks `./x check bootstrap`. -// So instead the unit tests for this module are in `super::tests::shared_helpers_tests`. - /// Returns the environment variable which the dynamic library lookup path /// resides in for this platform. pub fn dylib_path_var() -> &'static str { diff --git a/src/bootstrap/src/utils/tests/mod.rs b/src/bootstrap/src/utils/tests/mod.rs index ec87e71e0b6..983680b0385 100644 --- a/src/bootstrap/src/utils/tests/mod.rs +++ b/src/bootstrap/src/utils/tests/mod.rs @@ -12,6 +12,10 @@ use crate::{Build, Config, Flags, t}; pub mod git; +// Note: tests for `shared_helpers` is separate here, as otherwise shim binaries that include the +// `shared_helpers` via `#[path]` would fail to find it, breaking `./x check bootstrap`. +mod shared_helpers_tests; + /// Holds temporary state of a bootstrap test. /// Right now it is only used to redirect the build directory of the bootstrap /// invocation, in the future it would be great if we could actually execute diff --git a/src/bootstrap/src/utils/shared_helpers/tests.rs b/src/bootstrap/src/utils/tests/shared_helpers_tests.rs index 559e9f70abd..c486e65007e 100644 --- a/src/bootstrap/src/utils/shared_helpers/tests.rs +++ b/src/bootstrap/src/utils/tests/shared_helpers_tests.rs @@ -1,3 +1,10 @@ +//! The `shared_helpers` module can't have its own tests submodule, because that would cause +//! problems for the shim binaries that include it via `#[path]`, so instead those unit tests live +//! here. +//! +//! To prevent tidy from complaining about this file not being named `tests.rs`, it lives inside a +//! submodule directory named `tests`. + use crate::utils::shared_helpers::parse_value_from_args; #[test] diff --git a/src/build_helper/src/npm.rs b/src/build_helper/src/npm.rs index dedef40978d..86cf6183bd0 100644 --- a/src/build_helper/src/npm.rs +++ b/src/build_helper/src/npm.rs @@ -3,23 +3,34 @@ use std::path::{Path, PathBuf}; use std::process::Command; use std::{fs, io}; -/// Install an exact package version, and return the path of `node_modules`. -pub fn install_one( - out_dir: &Path, - npm_bin: &Path, - pkg_name: &str, - pkg_version: &str, -) -> Result<PathBuf, io::Error> { +use crate::ci::CiEnv; + +/// Install all the npm deps, and return the path of `node_modules`. +pub fn install(src_root_path: &Path, out_dir: &Path, npm: &Path) -> Result<PathBuf, io::Error> { let nm_path = out_dir.join("node_modules"); - let _ = fs::create_dir(&nm_path); - let mut child = Command::new(npm_bin) - .arg("install") - .arg("--audit=false") - .arg("--fund=false") - .arg(format!("{pkg_name}@{pkg_version}")) - .current_dir(out_dir) - .spawn()?; - let exit_status = child.wait()?; + let copy_to_build = |p| { + fs::copy(src_root_path.join(p), out_dir.join(p)).map_err(|e| { + eprintln!("unable to copy {p:?} to build directory: {e:?}"); + e + }) + }; + // copy stuff to the output directory to make node_modules get put there. + copy_to_build("package.json")?; + copy_to_build("package-lock.json")?; + + let mut cmd = Command::new(npm); + if CiEnv::is_ci() { + // `npm ci` redownloads every time and thus is too slow for local development. + cmd.arg("ci"); + } else { + cmd.arg("install"); + } + // disable a bunch of things we don't want. + // this makes tidy output less noisy, and also significantly improves runtime + // of repeated tidy invokations. + cmd.args(&["--audit=false", "--save=false", "--fund=false"]); + cmd.current_dir(out_dir); + let exit_status = cmd.spawn()?.wait()?; if !exit_status.success() { eprintln!("npm install did not exit successfully"); return Err(io::Error::other(Box::<dyn Error + Send + Sync>::from(format!( diff --git a/src/ci/citool/src/jobs.rs b/src/ci/citool/src/jobs.rs index 410274227e4..47516cbc1f4 100644 --- a/src/ci/citool/src/jobs.rs +++ b/src/ci/citool/src/jobs.rs @@ -1,9 +1,9 @@ #[cfg(test)] mod tests; -use std::collections::BTreeMap; +use std::collections::{BTreeMap, HashSet}; -use anyhow::Context as _; +use anyhow::{Context as _, anyhow}; use serde_yaml::Value; use crate::GitHubContext; @@ -85,6 +85,10 @@ impl JobDatabase { .cloned() .collect() } + + fn find_auto_job_by_name(&self, job_name: &str) -> Option<&Job> { + self.auto_jobs.iter().find(|job| job.name == job_name) + } } pub fn load_job_db(db: &str) -> anyhow::Result<JobDatabase> { @@ -97,14 +101,118 @@ pub fn load_job_db(db: &str) -> anyhow::Result<JobDatabase> { db.apply_merge().context("failed to apply merge keys") }; - // Apply merge twice to handle nested merges + // Apply merge twice to handle nested merges up to depth 2. apply_merge(&mut db)?; apply_merge(&mut db)?; - let db: JobDatabase = serde_yaml::from_value(db).context("failed to parse job database")?; + let mut db: JobDatabase = serde_yaml::from_value(db).context("failed to parse job database")?; + + register_pr_jobs_as_auto_jobs(&mut db)?; + + validate_job_database(&db)?; + Ok(db) } +/// Maintain invariant that PR CI jobs must be a subset of Auto CI jobs modulo carve-outs. +/// +/// When PR jobs are auto-registered as Auto jobs, they will have `continue_on_error` overridden to +/// be `false` to avoid wasting Auto CI resources. +/// +/// When a job is already both a PR job and a auto job, we will post-validate their "equivalence +/// modulo certain carve-outs" in [`validate_job_database`]. +/// +/// This invariant is important to make sure that it's not easily possible (without modifying +/// `citool`) to have PRs with red PR-only CI jobs merged into `master`, causing all subsequent PR +/// CI runs to be red until the cause is fixed. +fn register_pr_jobs_as_auto_jobs(db: &mut JobDatabase) -> anyhow::Result<()> { + for pr_job in &db.pr_jobs { + // It's acceptable to "override" a PR job in Auto job, for instance, `x86_64-gnu-tools` will + // receive an additional `DEPLOY_TOOLSTATES_JSON: toolstates-linux.json` env when under Auto + // environment versus PR environment. + if db.find_auto_job_by_name(&pr_job.name).is_some() { + continue; + } + + let auto_registered_job = Job { continue_on_error: Some(false), ..pr_job.clone() }; + db.auto_jobs.push(auto_registered_job); + } + + Ok(()) +} + +fn validate_job_database(db: &JobDatabase) -> anyhow::Result<()> { + fn ensure_no_duplicate_job_names(section: &str, jobs: &Vec<Job>) -> anyhow::Result<()> { + let mut job_names = HashSet::new(); + for job in jobs { + let job_name = job.name.as_str(); + if !job_names.insert(job_name) { + return Err(anyhow::anyhow!( + "duplicate job name `{job_name}` in section `{section}`" + )); + } + } + Ok(()) + } + + ensure_no_duplicate_job_names("pr", &db.pr_jobs)?; + ensure_no_duplicate_job_names("auto", &db.auto_jobs)?; + ensure_no_duplicate_job_names("try", &db.try_jobs)?; + ensure_no_duplicate_job_names("optional", &db.optional_jobs)?; + + fn equivalent_modulo_carve_out(pr_job: &Job, auto_job: &Job) -> anyhow::Result<()> { + let Job { + name, + os, + only_on_channel, + free_disk, + doc_url, + codebuild, + + // Carve-out configs allowed to be different. + env: _, + continue_on_error: _, + } = pr_job; + + if *name == auto_job.name + && *os == auto_job.os + && *only_on_channel == auto_job.only_on_channel + && *free_disk == auto_job.free_disk + && *doc_url == auto_job.doc_url + && *codebuild == auto_job.codebuild + { + Ok(()) + } else { + Err(anyhow!( + "PR job `{}` differs from corresponding Auto job `{}` in configuration other than `continue_on_error` and `env`", + pr_job.name, + auto_job.name + )) + } + } + + for pr_job in &db.pr_jobs { + // At this point, any PR job must also be an Auto job, auto-registered or overridden. + let auto_job = db + .find_auto_job_by_name(&pr_job.name) + .expect("PR job must either be auto-registered as Auto job or overridden"); + + equivalent_modulo_carve_out(pr_job, auto_job)?; + } + + // Auto CI jobs must all "fail-fast" to avoid wasting Auto CI resources. For instance, `tidy`. + for auto_job in &db.auto_jobs { + if auto_job.continue_on_error == Some(true) { + return Err(anyhow!( + "Auto job `{}` cannot have `continue_on_error: true`", + auto_job.name + )); + } + } + + Ok(()) +} + /// Representation of a job outputted to a GitHub Actions workflow. #[derive(serde::Serialize, Debug)] struct GithubActionsJob { diff --git a/src/ci/citool/src/jobs/tests.rs b/src/ci/citool/src/jobs/tests.rs index 63ac508b632..f1f6274e1ed 100644 --- a/src/ci/citool/src/jobs/tests.rs +++ b/src/ci/citool/src/jobs/tests.rs @@ -1,3 +1,4 @@ +use std::collections::BTreeMap; use std::path::Path; use super::Job; @@ -146,3 +147,222 @@ fn validate_jobs() { panic!("Job validation failed:\n{error_messages}"); } } + +#[test] +fn pr_job_implies_auto_job() { + let db = load_job_db( + r#" +envs: + pr: + try: + auto: + optional: + +pr: + - name: pr-ci-a + os: ubuntu + env: {} +try: +auto: +optional: +"#, + ) + .unwrap(); + + assert_eq!(db.auto_jobs.iter().map(|j| j.name.as_str()).collect::<Vec<_>>(), vec!["pr-ci-a"]) +} + +#[test] +fn implied_auto_job_keeps_env_and_fails_fast() { + let db = load_job_db( + r#" +envs: + pr: + try: + auto: + optional: + +pr: + - name: tidy + env: + DEPLOY_TOOLSTATES_JSON: toolstates-linux.json + continue_on_error: true + os: ubuntu +try: +auto: +optional: +"#, + ) + .unwrap(); + + assert_eq!(db.auto_jobs.iter().map(|j| j.name.as_str()).collect::<Vec<_>>(), vec!["tidy"]); + assert_eq!(db.auto_jobs[0].continue_on_error, Some(false)); + assert_eq!( + db.auto_jobs[0].env, + BTreeMap::from([( + "DEPLOY_TOOLSTATES_JSON".to_string(), + serde_yaml::Value::String("toolstates-linux.json".to_string()) + )]) + ); +} + +#[test] +#[should_panic = "duplicate"] +fn duplicate_job_name() { + let _ = load_job_db( + r#" +envs: + pr: + try: + auto: + + +pr: + - name: pr-ci-a + os: ubuntu + env: {} + - name: pr-ci-a + os: ubuntu + env: {} +try: +auto: +optional: +"#, + ) + .unwrap(); +} + +#[test] +fn auto_job_can_override_pr_job_spec() { + let db = load_job_db( + r#" +envs: + pr: + try: + auto: + optional: + +pr: + - name: tidy + os: ubuntu + env: {} +try: +auto: + - name: tidy + env: + DEPLOY_TOOLSTATES_JSON: toolstates-linux.json + continue_on_error: false + os: ubuntu +optional: +"#, + ) + .unwrap(); + + assert_eq!(db.auto_jobs.iter().map(|j| j.name.as_str()).collect::<Vec<_>>(), vec!["tidy"]); + assert_eq!(db.auto_jobs[0].continue_on_error, Some(false)); + assert_eq!( + db.auto_jobs[0].env, + BTreeMap::from([( + "DEPLOY_TOOLSTATES_JSON".to_string(), + serde_yaml::Value::String("toolstates-linux.json".to_string()) + )]) + ); +} + +#[test] +fn compatible_divergence_pr_auto_job() { + let db = load_job_db( + r#" +envs: + pr: + try: + auto: + optional: + +pr: + - name: tidy + continue_on_error: true + env: + ENV_ALLOWED_TO_DIFFER: "hello world" + os: ubuntu +try: +auto: + - name: tidy + continue_on_error: false + env: + ENV_ALLOWED_TO_DIFFER: "goodbye world" + os: ubuntu +optional: +"#, + ) + .unwrap(); + + // `continue_on_error` and `env` are carve-outs *allowed* to diverge between PR and Auto job of + // the same name. Should load successfully. + + assert_eq!(db.auto_jobs.iter().map(|j| j.name.as_str()).collect::<Vec<_>>(), vec!["tidy"]); + assert_eq!(db.auto_jobs[0].continue_on_error, Some(false)); + assert_eq!( + db.auto_jobs[0].env, + BTreeMap::from([( + "ENV_ALLOWED_TO_DIFFER".to_string(), + serde_yaml::Value::String("goodbye world".to_string()) + )]) + ); +} + +#[test] +#[should_panic = "differs"] +fn incompatible_divergence_pr_auto_job() { + // `os` is not one of the carve-out options allowed to diverge. This should fail. + let _ = load_job_db( + r#" +envs: + pr: + try: + auto: + optional: + +pr: + - name: tidy + continue_on_error: true + env: + ENV_ALLOWED_TO_DIFFER: "hello world" + os: ubuntu +try: +auto: + - name: tidy + continue_on_error: false + env: + ENV_ALLOWED_TO_DIFFER: "goodbye world" + os: windows +optional: +"#, + ) + .unwrap(); +} + +#[test] +#[should_panic = "cannot have `continue_on_error: true`"] +fn auto_job_continue_on_error() { + // Auto CI jobs must fail-fast. + let _ = load_job_db( + r#" +envs: + pr: + try: + auto: + optional: + +pr: +try: +auto: + - name: tidy + continue_on_error: true + os: windows + env: {} +optional: +"#, + ) + .unwrap(); +} diff --git a/src/ci/citool/tests/jobs.rs b/src/ci/citool/tests/jobs.rs index dbaf13d4f42..24e0b85cab2 100644 --- a/src/ci/citool/tests/jobs.rs +++ b/src/ci/citool/tests/jobs.rs @@ -6,7 +6,7 @@ const TEST_JOBS_YML_PATH: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/tests/tes fn auto_jobs() { let stdout = get_matrix("push", "commit", "refs/heads/auto"); insta::assert_snapshot!(stdout, @r#" - jobs=[{"name":"aarch64-gnu","full_name":"auto - aarch64-gnu","os":"ubuntu-22.04-arm","env":{"ARTIFACTS_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZN24CBO55","AWS_REGION":"us-west-1","CACHES_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZI5DHEBFL","DEPLOY_BUCKET":"rust-lang-ci2","TOOLSTATE_PUBLISH":1},"free_disk":true},{"name":"x86_64-gnu-llvm-18-1","full_name":"auto - x86_64-gnu-llvm-18-1","os":"ubuntu-24.04","env":{"ARTIFACTS_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZN24CBO55","AWS_REGION":"us-west-1","CACHES_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZI5DHEBFL","DEPLOY_BUCKET":"rust-lang-ci2","DOCKER_SCRIPT":"stage_2_test_set1.sh","IMAGE":"x86_64-gnu-llvm-18","READ_ONLY_SRC":"0","RUST_BACKTRACE":1,"TOOLSTATE_PUBLISH":1},"free_disk":true},{"name":"aarch64-apple","full_name":"auto - aarch64-apple","os":"macos-14","env":{"ARTIFACTS_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZN24CBO55","AWS_REGION":"us-west-1","CACHES_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZI5DHEBFL","DEPLOY_BUCKET":"rust-lang-ci2","MACOSX_DEPLOYMENT_TARGET":11.0,"MACOSX_STD_DEPLOYMENT_TARGET":11.0,"NO_DEBUG_ASSERTIONS":1,"NO_LLVM_ASSERTIONS":1,"NO_OVERFLOW_CHECKS":1,"RUSTC_RETRY_LINKER_ON_SEGFAULT":1,"RUST_CONFIGURE_ARGS":"--enable-sanitizers --enable-profiler --set rust.jemalloc","SCRIPT":"./x.py --stage 2 test --host=aarch64-apple-darwin --target=aarch64-apple-darwin","SELECT_XCODE":"/Applications/Xcode_15.4.app","TOOLSTATE_PUBLISH":1,"USE_XCODE_CLANG":1}},{"name":"dist-i686-msvc","full_name":"auto - dist-i686-msvc","os":"windows-2022","env":{"ARTIFACTS_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZN24CBO55","AWS_REGION":"us-west-1","CACHES_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZI5DHEBFL","CODEGEN_BACKENDS":"llvm,cranelift","DEPLOY_BUCKET":"rust-lang-ci2","DIST_REQUIRE_ALL_TOOLS":1,"RUST_CONFIGURE_ARGS":"--build=i686-pc-windows-msvc --host=i686-pc-windows-msvc --target=i686-pc-windows-msvc,i586-pc-windows-msvc --enable-full-tools --enable-profiler","SCRIPT":"python x.py dist bootstrap --include-default-paths","TOOLSTATE_PUBLISH":1}}] + jobs=[{"name":"aarch64-gnu","full_name":"auto - aarch64-gnu","os":"ubuntu-22.04-arm","env":{"ARTIFACTS_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZN24CBO55","AWS_REGION":"us-west-1","CACHES_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZI5DHEBFL","DEPLOY_BUCKET":"rust-lang-ci2","TOOLSTATE_PUBLISH":1},"free_disk":true},{"name":"x86_64-gnu-llvm-18-1","full_name":"auto - x86_64-gnu-llvm-18-1","os":"ubuntu-24.04","env":{"ARTIFACTS_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZN24CBO55","AWS_REGION":"us-west-1","CACHES_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZI5DHEBFL","DEPLOY_BUCKET":"rust-lang-ci2","DOCKER_SCRIPT":"stage_2_test_set1.sh","IMAGE":"x86_64-gnu-llvm-18","READ_ONLY_SRC":"0","RUST_BACKTRACE":1,"TOOLSTATE_PUBLISH":1},"free_disk":true},{"name":"aarch64-apple","full_name":"auto - aarch64-apple","os":"macos-14","env":{"ARTIFACTS_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZN24CBO55","AWS_REGION":"us-west-1","CACHES_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZI5DHEBFL","DEPLOY_BUCKET":"rust-lang-ci2","MACOSX_DEPLOYMENT_TARGET":11.0,"MACOSX_STD_DEPLOYMENT_TARGET":11.0,"NO_DEBUG_ASSERTIONS":1,"NO_LLVM_ASSERTIONS":1,"NO_OVERFLOW_CHECKS":1,"RUSTC_RETRY_LINKER_ON_SEGFAULT":1,"RUST_CONFIGURE_ARGS":"--enable-sanitizers --enable-profiler --set rust.jemalloc","SCRIPT":"./x.py --stage 2 test --host=aarch64-apple-darwin --target=aarch64-apple-darwin","SELECT_XCODE":"/Applications/Xcode_15.4.app","TOOLSTATE_PUBLISH":1,"USE_XCODE_CLANG":1}},{"name":"dist-i686-msvc","full_name":"auto - dist-i686-msvc","os":"windows-2022","env":{"ARTIFACTS_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZN24CBO55","AWS_REGION":"us-west-1","CACHES_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZI5DHEBFL","CODEGEN_BACKENDS":"llvm,cranelift","DEPLOY_BUCKET":"rust-lang-ci2","DIST_REQUIRE_ALL_TOOLS":1,"RUST_CONFIGURE_ARGS":"--build=i686-pc-windows-msvc --host=i686-pc-windows-msvc --target=i686-pc-windows-msvc,i586-pc-windows-msvc --enable-full-tools --enable-profiler","SCRIPT":"python x.py dist bootstrap --include-default-paths","TOOLSTATE_PUBLISH":1}},{"name":"pr-check-1","full_name":"auto - pr-check-1","os":"ubuntu-24.04","env":{"ARTIFACTS_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZN24CBO55","AWS_REGION":"us-west-1","CACHES_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZI5DHEBFL","DEPLOY_BUCKET":"rust-lang-ci2","TOOLSTATE_PUBLISH":1},"continue_on_error":false,"free_disk":true},{"name":"pr-check-2","full_name":"auto - pr-check-2","os":"ubuntu-24.04","env":{"ARTIFACTS_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZN24CBO55","AWS_REGION":"us-west-1","CACHES_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZI5DHEBFL","DEPLOY_BUCKET":"rust-lang-ci2","TOOLSTATE_PUBLISH":1},"continue_on_error":false,"free_disk":true},{"name":"tidy","full_name":"auto - tidy","os":"ubuntu-24.04","env":{"ARTIFACTS_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZN24CBO55","AWS_REGION":"us-west-1","CACHES_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZI5DHEBFL","DEPLOY_BUCKET":"rust-lang-ci2","TOOLSTATE_PUBLISH":1},"continue_on_error":false,"free_disk":true,"doc_url":"https://foo.bar"}] run_type=auto "#); } diff --git a/src/ci/docker/host-x86_64/dist-various-2/Dockerfile b/src/ci/docker/host-x86_64/dist-various-2/Dockerfile index e1d83d36087..0855ea222a3 100644 --- a/src/ci/docker/host-x86_64/dist-various-2/Dockerfile +++ b/src/ci/docker/host-x86_64/dist-various-2/Dockerfile @@ -81,9 +81,9 @@ RUN /tmp/build-fuchsia-toolchain.sh COPY host-x86_64/dist-various-2/build-x86_64-fortanix-unknown-sgx-toolchain.sh /tmp/ RUN /tmp/build-x86_64-fortanix-unknown-sgx-toolchain.sh -RUN curl -L https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-25/wasi-sdk-25.0-x86_64-linux.tar.gz | \ +RUN curl -L https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-27/wasi-sdk-27.0-x86_64-linux.tar.gz | \ tar -xz -ENV WASI_SDK_PATH=/tmp/wasi-sdk-25.0-x86_64-linux +ENV WASI_SDK_PATH=/tmp/wasi-sdk-27.0-x86_64-linux COPY scripts/freebsd-toolchain.sh /tmp/ RUN /tmp/freebsd-toolchain.sh i686 diff --git a/src/ci/docker/host-x86_64/pr-check-1/Dockerfile b/src/ci/docker/host-x86_64/pr-check-1/Dockerfile index 8bbcc18e2be..04ac0f33daf 100644 --- a/src/ci/docker/host-x86_64/pr-check-1/Dockerfile +++ b/src/ci/docker/host-x86_64/pr-check-1/Dockerfile @@ -27,10 +27,6 @@ COPY scripts/nodejs.sh /scripts/ RUN sh /scripts/nodejs.sh /node ENV PATH="/node/bin:${PATH}" -# Install es-check -# Pin its version to prevent unrelated CI failures due to future es-check versions. -RUN npm install es-check@6.1.1 eslint@8.6.0 typescript@5.7.3 -g - COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh @@ -44,15 +40,12 @@ COPY host-x86_64/pr-check-1/validate-toolstate.sh /scripts/ # We disable optimized compiler built-ins because that requires a C toolchain for the target. # We also skip the x86_64-unknown-linux-gnu target as it is well-tested by other jobs. ENV SCRIPT \ + python3 ../x.py check bootstrap && \ /scripts/check-default-config-profiles.sh && \ python3 ../x.py build src/tools/build-manifest && \ - python3 ../x.py test --stage 0 src/tools/compiletest && \ python3 ../x.py check compiletest --set build.compiletest-use-stage0-libtest=true && \ python3 ../x.py check --target=i686-pc-windows-gnu --host=i686-pc-windows-gnu && \ python3 ../x.py check --set build.optimized-compiler-builtins=false core alloc std --target=aarch64-unknown-linux-gnu,i686-pc-windows-msvc,i686-unknown-linux-gnu,x86_64-apple-darwin,x86_64-pc-windows-gnu,x86_64-pc-windows-msvc && \ /scripts/validate-toolstate.sh && \ reuse --include-submodules lint && \ - python3 ../x.py test collect-license-metadata && \ - # Runs checks to ensure that there are no issues in our JS code. - es-check es2019 ../src/librustdoc/html/static/js/*.js && \ - tsc --project ../src/librustdoc/html/static/js/tsconfig.json + python3 ../x.py test collect-license-metadata diff --git a/src/ci/docker/host-x86_64/pr-check-2/Dockerfile b/src/ci/docker/host-x86_64/pr-check-2/Dockerfile index ce18a181d31..f82e19bcbb4 100644 --- a/src/ci/docker/host-x86_64/pr-check-2/Dockerfile +++ b/src/ci/docker/host-x86_64/pr-check-2/Dockerfile @@ -30,6 +30,7 @@ ENV SCRIPT \ python3 ../x.py check && \ python3 ../x.py clippy ci && \ python3 ../x.py test --stage 1 core alloc std test proc_macro && \ + python3 ../x.py test --stage 1 src/tools/compiletest && \ python3 ../x.py doc --stage 0 bootstrap && \ # Build both public and internal documentation. RUSTDOCFLAGS=\"--document-private-items --document-hidden-items\" python3 ../x.py doc --stage 0 compiler && \ @@ -37,6 +38,6 @@ ENV SCRIPT \ mkdir -p /checkout/obj/staging/doc && \ cp -r build/x86_64-unknown-linux-gnu/doc /checkout/obj/staging && \ RUSTDOCFLAGS=\"--document-private-items --document-hidden-items\" python3 ../x.py doc --stage 1 library/test && \ - # The BOOTSTRAP_TRACING flag is added to verify whether the + # The BOOTSTRAP_TRACING flag is added to verify whether the # bootstrap process compiles successfully with this flag enabled. BOOTSTRAP_TRACING=1 python3 ../x.py --help diff --git a/src/ci/docker/host-x86_64/test-various/Dockerfile b/src/ci/docker/host-x86_64/test-various/Dockerfile index 8d2e45ae497..82a820c859d 100644 --- a/src/ci/docker/host-x86_64/test-various/Dockerfile +++ b/src/ci/docker/host-x86_64/test-various/Dockerfile @@ -40,9 +40,9 @@ WORKDIR / COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh -RUN curl -L https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-25/wasi-sdk-25.0-x86_64-linux.tar.gz | \ +RUN curl -L https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-27/wasi-sdk-27.0-x86_64-linux.tar.gz | \ tar -xz -ENV WASI_SDK_PATH=/wasi-sdk-25.0-x86_64-linux +ENV WASI_SDK_PATH=/wasi-sdk-27.0-x86_64-linux ENV RUST_CONFIGURE_ARGS \ --musl-root-x86_64=/usr/local/x86_64-linux-musl \ @@ -65,21 +65,20 @@ ENV WASM_SCRIPT python3 /checkout/x.py --stage 2 test --host='' --target $WASM_T tests/ui \ tests/mir-opt \ tests/codegen-units \ - tests/codegen \ - tests/assembly \ + tests/codegen-llvm \ + tests/assembly-llvm \ library/core ENV NVPTX_TARGETS=nvptx64-nvidia-cuda ENV NVPTX_SCRIPT python3 /checkout/x.py --stage 2 test --host='' --target $NVPTX_TARGETS \ tests/run-make \ - tests/assembly + tests/assembly-llvm ENV MUSL_TARGETS=x86_64-unknown-linux-musl \ CC_x86_64_unknown_linux_musl=x86_64-linux-musl-gcc \ CXX_x86_64_unknown_linux_musl=x86_64-linux-musl-g++ ENV MUSL_SCRIPT python3 /checkout/x.py --stage 2 test --host='' --target $MUSL_TARGETS -COPY host-x86_64/test-various/uefi_qemu_test /uefi_qemu_test ENV UEFI_TARGETS=aarch64-unknown-uefi,i686-unknown-uefi,x86_64-unknown-uefi \ CC_aarch64_unknown_uefi=clang-11 \ CXX_aarch64_unknown_uefi=clang++-11 \ @@ -88,6 +87,8 @@ ENV UEFI_TARGETS=aarch64-unknown-uefi,i686-unknown-uefi,x86_64-unknown-uefi \ CC_x86_64_unknown_uefi=clang-11 \ CXX_x86_64_unknown_uefi=clang++-11 ENV UEFI_SCRIPT python3 /checkout/x.py --stage 2 build --host='' --target $UEFI_TARGETS && \ - python3 -u /uefi_qemu_test/run.py + python3 /checkout/x.py --stage 2 test tests/run-make/uefi-qemu/rmake.rs --target aarch64-unknown-uefi && \ + python3 /checkout/x.py --stage 2 test tests/run-make/uefi-qemu/rmake.rs --target i686-unknown-uefi && \ + python3 /checkout/x.py --stage 2 test tests/run-make/uefi-qemu/rmake.rs --target x86_64-unknown-uefi ENV SCRIPT $WASM_SCRIPT && $NVPTX_SCRIPT && $MUSL_SCRIPT && $UEFI_SCRIPT diff --git a/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.lock b/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.lock deleted file mode 100644 index 8b6a664ad93..00000000000 --- a/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.lock +++ /dev/null @@ -1,16 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 4 - -[[package]] -name = "r-efi" -version = "5.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" - -[[package]] -name = "uefi_qemu_test" -version = "0.0.0" -dependencies = [ - "r-efi", -] diff --git a/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.toml b/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.toml deleted file mode 100644 index 1a8d0d94368..00000000000 --- a/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.toml +++ /dev/null @@ -1,10 +0,0 @@ -[package] -name = "uefi_qemu_test" -version = "0.0.0" -edition = "2021" - -[workspace] -resolver = "2" - -[dependencies] -r-efi = "5.2.0" diff --git a/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/run.py b/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/run.py deleted file mode 100755 index 4f877389fbc..00000000000 --- a/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/run.py +++ /dev/null @@ -1,140 +0,0 @@ -#!/usr/bin/env python3 - -import os -import shutil -import subprocess -import sys -import tempfile - -from pathlib import Path - -TARGET_AARCH64 = "aarch64-unknown-uefi" -TARGET_I686 = "i686-unknown-uefi" -TARGET_X86_64 = "x86_64-unknown-uefi" - - -def run(*cmd, capture=False, check=True, env=None, timeout=None): - """Print and run a command, optionally capturing the output.""" - cmd = [str(p) for p in cmd] - print(" ".join(cmd)) - return subprocess.run( - cmd, capture_output=capture, check=check, env=env, text=True, timeout=timeout - ) - - -def build_and_run(tmp_dir, target): - if target == TARGET_AARCH64: - boot_file_name = "bootaa64.efi" - ovmf_dir = Path("/usr/share/AAVMF") - ovmf_code = "AAVMF_CODE.fd" - ovmf_vars = "AAVMF_VARS.fd" - qemu = "qemu-system-aarch64" - machine = "virt" - cpu = "cortex-a72" - elif target == TARGET_I686: - boot_file_name = "bootia32.efi" - ovmf_dir = Path("/usr/share/OVMF") - ovmf_code = "OVMF32_CODE_4M.secboot.fd" - ovmf_vars = "OVMF32_VARS_4M.fd" - # The i686 target intentionally uses 64-bit qemu; the important - # difference is that the OVMF code provides a 32-bit environment. - qemu = "qemu-system-x86_64" - machine = "q35" - cpu = "qemu64" - elif target == TARGET_X86_64: - boot_file_name = "bootx64.efi" - ovmf_dir = Path("/usr/share/OVMF") - ovmf_code = "OVMF_CODE.fd" - ovmf_vars = "OVMF_VARS.fd" - qemu = "qemu-system-x86_64" - machine = "q35" - cpu = "qemu64" - else: - raise KeyError("invalid target") - - host_artifacts = Path("/checkout/obj/build/x86_64-unknown-linux-gnu") - stage0 = host_artifacts / "stage0/bin" - stage2 = host_artifacts / "stage2/bin" - - env = dict(os.environ) - env["PATH"] = "{}:{}:{}".format(stage2, stage0, env["PATH"]) - - # Copy the test create into `tmp_dir`. - test_crate = Path(tmp_dir) / "uefi_qemu_test" - shutil.copytree("/uefi_qemu_test", test_crate) - - # Build the UEFI executable. - run( - "cargo", - "build", - "--manifest-path", - test_crate / "Cargo.toml", - "--target", - target, - env=env, - ) - - # Create a mock EFI System Partition in a subdirectory. - esp = test_crate / "esp" - boot = esp / "efi/boot" - os.makedirs(boot, exist_ok=True) - - # Copy the executable into the ESP. - src_exe_path = test_crate / "target" / target / "debug/uefi_qemu_test.efi" - shutil.copy(src_exe_path, boot / boot_file_name) - print(src_exe_path, boot / boot_file_name) - - # Select the appropriate EDK2 build. - ovmf_code = ovmf_dir / ovmf_code - ovmf_vars = ovmf_dir / ovmf_vars - - # Make a writable copy of the vars file. aarch64 doesn't boot - # correctly with read-only vars. - ovmf_rw_vars = Path(tmp_dir) / "vars.fd" - shutil.copy(ovmf_vars, ovmf_rw_vars) - - # Run the executable in QEMU and capture the output. - output = run( - qemu, - "-machine", - machine, - "-cpu", - cpu, - "-display", - "none", - "-serial", - "stdio", - "-drive", - f"if=pflash,format=raw,readonly=on,file={ovmf_code}", - "-drive", - f"if=pflash,format=raw,readonly=off,file={ovmf_rw_vars}", - "-drive", - f"format=raw,file=fat:rw:{esp}", - capture=True, - check=True, - # Set a timeout to kill the VM in case something goes wrong. - timeout=60, - ).stdout - - if "Hello World!" in output: - print("VM produced expected output") - else: - print("unexpected VM output:") - print("---start---") - print(output) - print("---end---") - sys.exit(1) - - -def main(): - targets = [TARGET_AARCH64, TARGET_I686, TARGET_X86_64] - - for target in targets: - # Create a temporary directory so that we have a writeable - # workspace. - with tempfile.TemporaryDirectory() as tmp_dir: - build_and_run(tmp_dir, target) - - -if __name__ == "__main__": - main() diff --git a/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/src/main.rs b/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/src/main.rs deleted file mode 100644 index 89e4393cb5c..00000000000 --- a/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/src/main.rs +++ /dev/null @@ -1,46 +0,0 @@ -// Code is adapted from this hello world example: -// https://doc.rust-lang.org/nightly/rustc/platform-support/unknown-uefi.html - -#![no_main] -#![no_std] - -use core::{panic, ptr}; - -use r_efi::efi::{Char16, Handle, RESET_SHUTDOWN, Status, SystemTable}; - -#[panic_handler] -fn panic_handler(_info: &panic::PanicInfo) -> ! { - loop {} -} - -#[export_name = "efi_main"] -pub extern "C" fn main(_h: Handle, st: *mut SystemTable) -> Status { - let s = [ - 0x0048u16, 0x0065u16, 0x006cu16, 0x006cu16, 0x006fu16, // "Hello" - 0x0020u16, // " " - 0x0057u16, 0x006fu16, 0x0072u16, 0x006cu16, 0x0064u16, // "World" - 0x0021u16, // "!" - 0x000au16, // "\n" - 0x0000u16, // NUL - ]; - - // Print "Hello World!". - let r = unsafe { ((*(*st).con_out).output_string)((*st).con_out, s.as_ptr() as *mut Char16) }; - if r.is_error() { - return r; - } - - // Shut down. - unsafe { - ((*((*st).runtime_services)).reset_system)( - RESET_SHUTDOWN, - Status::SUCCESS, - 0, - ptr::null_mut(), - ); - } - - // This should never be reached because `reset_system` should never - // return, so fail with an error if we get here. - Status::UNSUPPORTED -} diff --git a/src/ci/docker/host-x86_64/tidy/Dockerfile b/src/ci/docker/host-x86_64/tidy/Dockerfile index dbb950cbe0c..ee1ae5410ee 100644 --- a/src/ci/docker/host-x86_64/tidy/Dockerfile +++ b/src/ci/docker/host-x86_64/tidy/Dockerfile @@ -45,4 +45,4 @@ RUN bash -c 'npm install -g eslint@$(cat /tmp/eslint.version)' # NOTE: intentionally uses python2 for x.py so we can test it still works. # validate-toolstate only runs in our CI, so it's ok for it to only support python3. ENV SCRIPT TIDY_PRINT_DIFF=1 python2.7 ../x.py test --stage 0 \ - src/tools/tidy tidyselftest --extra-checks=py,cpp + src/tools/tidy tidyselftest --extra-checks=py,cpp,js diff --git a/src/ci/github-actions/jobs.yml b/src/ci/github-actions/jobs.yml index f0c52fe3d1c..48c570bfa11 100644 --- a/src/ci/github-actions/jobs.yml +++ b/src/ci/github-actions/jobs.yml @@ -31,20 +31,11 @@ runners: <<: *base-job - &job-windows - os: windows-2022 - <<: *base-job - - # NOTE: windows-2025 has less disk space available than windows-2022, - # because the D drive is missing. - - &job-windows-25 os: windows-2025 + free_disk: true <<: *base-job - &job-windows-8c - os: windows-2022-8core-32gb - <<: *base-job - - - &job-windows-25-8c os: windows-2025-8core-32gb <<: *base-job @@ -124,9 +115,16 @@ jobs: <<: *job-linux-36c-codebuild -# Jobs that run on each push to a pull request (PR) -# These jobs automatically inherit envs.pr, to avoid repeating -# it in each job definition. +# Jobs that run on each push to a pull request (PR). +# +# These jobs automatically inherit envs.pr, to avoid repeating it in each job +# definition. +# +# PR CI jobs will be automatically registered as Auto CI jobs or overriden. When +# automatically registered, the PR CI job configuration will be copied as an +# Auto CI job but with `continue_on_error` overriden to `false` (to fail-fast). +# When overriden, `citool` will check for equivalence between the PR and CI job +# of the same name modulo `continue_on_error` and `env`. pr: - name: pr-check-1 <<: *job-linux-4c @@ -172,14 +170,20 @@ try: optional: # This job is used just to test optional jobs. # It will be replaced by tier 2 and tier 3 jobs in the future. - - name: optional-mingw-check-1 + - name: optional-pr-check-1 env: - IMAGE: mingw-check-1 + IMAGE: pr-check-1 <<: *job-linux-4c -# Main CI jobs that have to be green to merge a commit into master -# These jobs automatically inherit envs.auto, to avoid repeating -# it in each job definition. +# Main CI jobs that have to be green to merge a commit into master. +# +# These jobs automatically inherit envs.auto, to avoid repeating it in each job +# definition. +# +# Auto jobs may not specify `continue_on_error: true`, and thus will fail-fast. +# +# Unless explicitly overriden, PR CI jobs will be automatically registered as +# Auto CI jobs. auto: ############################# # Linux/Docker builders # @@ -478,7 +482,7 @@ auto: NO_LLVM_ASSERTIONS: 1 NO_DEBUG_ASSERTIONS: 1 NO_OVERFLOW_CHECKS: 1 - <<: *job-macos + <<: *job-macos-m1 - name: x86_64-apple-1 env: @@ -655,7 +659,7 @@ auto: SCRIPT: python x.py build --set rust.debug=true opt-dist && PGO_HOST=x86_64-pc-windows-msvc ./build/x86_64-pc-windows-msvc/stage0-tools-bin/opt-dist windows-ci -- python x.py dist bootstrap --include-default-paths DIST_REQUIRE_ALL_TOOLS: 1 CODEGEN_BACKENDS: llvm,cranelift - <<: *job-windows-25-8c + <<: *job-windows-8c - name: dist-i686-msvc env: diff --git a/src/ci/scripts/free-disk-space-linux.sh b/src/ci/scripts/free-disk-space-linux.sh new file mode 100755 index 00000000000..32649fe0d9b --- /dev/null +++ b/src/ci/scripts/free-disk-space-linux.sh @@ -0,0 +1,265 @@ +#!/bin/bash +set -euo pipefail + +# Free disk space on Linux GitHub action runners +# Script inspired by https://github.com/jlumbroso/free-disk-space + +isX86() { + local arch + arch=$(uname -m) + if [ "$arch" = "x86_64" ]; then + return 0 + else + return 1 + fi +} + +# Check if we're on a GitHub hosted runner. +# In aws codebuild, the variable RUNNER_ENVIRONMENT is "self-hosted". +isGitHubRunner() { + # `:-` means "use the value of RUNNER_ENVIRONMENT if it exists, otherwise use an empty string". + if [[ "${RUNNER_ENVIRONMENT:-}" == "github-hosted" ]]; then + return 0 + else + return 1 + fi +} + +# print a line of the specified character +printSeparationLine() { + for ((i = 0; i < 80; i++)); do + printf "%s" "$1" + done + printf "\n" +} + +# compute available space +# REF: https://unix.stackexchange.com/a/42049/60849 +# REF: https://stackoverflow.com/a/450821/408734 +getAvailableSpace() { + df -a | awk 'NR > 1 {avail+=$4} END {print avail}' +} + +# make Kb human readable (assume the input is Kb) +# REF: https://unix.stackexchange.com/a/44087/60849 +formatByteCount() { + numfmt --to=iec-i --suffix=B --padding=7 "${1}000" +} + +# macro to output saved space +printSavedSpace() { + # Disk space before the operation + local before=${1} + local title=${2:-} + + local after + after=$(getAvailableSpace) + local saved=$((after - before)) + + if [ "$saved" -lt 0 ]; then + echo "::warning::Saved space is negative: $saved. Using '0' as saved space." + saved=0 + fi + + echo "" + printSeparationLine "*" + if [ -n "${title}" ]; then + echo "=> ${title}: Saved $(formatByteCount "$saved")" + else + echo "=> Saved $(formatByteCount "$saved")" + fi + printSeparationLine "*" + echo "" +} + +# macro to print output of df with caption +printDF() { + local caption=${1} + + printSeparationLine "=" + echo "${caption}" + echo "" + echo "$ df -h" + echo "" + df -h + printSeparationLine "=" +} + +removeUnusedFilesAndDirs() { + local to_remove=( + "/usr/share/java" + ) + + if isGitHubRunner; then + to_remove+=( + "/usr/local/aws-sam-cli" + "/usr/local/doc/cmake" + "/usr/local/julia"* + "/usr/local/lib/android" + "/usr/local/share/chromedriver-"* + "/usr/local/share/chromium" + "/usr/local/share/cmake-"* + "/usr/local/share/edge_driver" + "/usr/local/share/emacs" + "/usr/local/share/gecko_driver" + "/usr/local/share/icons" + "/usr/local/share/powershell" + "/usr/local/share/vcpkg" + "/usr/local/share/vim" + "/usr/share/apache-maven-"* + "/usr/share/gradle-"* + "/usr/share/kotlinc" + "/usr/share/miniconda" + "/usr/share/php" + "/usr/share/ri" + "/usr/share/swift" + + # binaries + "/usr/local/bin/azcopy" + "/usr/local/bin/bicep" + "/usr/local/bin/ccmake" + "/usr/local/bin/cmake-"* + "/usr/local/bin/cmake" + "/usr/local/bin/cpack" + "/usr/local/bin/ctest" + "/usr/local/bin/helm" + "/usr/local/bin/kind" + "/usr/local/bin/kustomize" + "/usr/local/bin/minikube" + "/usr/local/bin/packer" + "/usr/local/bin/phpunit" + "/usr/local/bin/pulumi-"* + "/usr/local/bin/pulumi" + "/usr/local/bin/stack" + + # Haskell runtime + "/usr/local/.ghcup" + + # Azure + "/opt/az" + "/usr/share/az_"* + ) + + if [ -n "${AGENT_TOOLSDIRECTORY:-}" ]; then + # Environment variable set by GitHub Actions + to_remove+=( + "${AGENT_TOOLSDIRECTORY}" + ) + else + echo "::warning::AGENT_TOOLSDIRECTORY is not set. Skipping removal." + fi + else + # Remove folders and files present in AWS CodeBuild + to_remove+=( + # binaries + "/usr/local/bin/ecs-cli" + "/usr/local/bin/eksctl" + "/usr/local/bin/kubectl" + + "${HOME}/.gradle" + "${HOME}/.dotnet" + "${HOME}/.goenv" + "${HOME}/.phpenv" + + ) + fi + + for element in "${to_remove[@]}"; do + if [ ! -e "$element" ]; then + # The file or directory doesn't exist. + # Maybe it was removed in a newer version of the runner or it's not present in a + # specific architecture (e.g. ARM). + echo "::warning::Directory or file $element does not exist, skipping." + fi + done + + # Remove all files and directories at once to save time. + sudo rm -rf "${to_remove[@]}" +} + +execAndMeasureSpaceChange() { + local operation=${1} # Function to execute + local title=${2} + + local before + before=$(getAvailableSpace) + $operation + + printSavedSpace "$before" "$title" +} + +# Remove large packages +# REF: https://github.com/apache/flink/blob/master/tools/azure-pipelines/free_disk_space.sh +cleanPackages() { + local packages=( + '^aspnetcore-.*' + '^dotnet-.*' + '^llvm-.*' + '^mongodb-.*' + 'firefox' + 'libgl1-mesa-dri' + 'mono-devel' + 'php.*' + ) + + if isGitHubRunner; then + packages+=( + azure-cli + ) + + if isX86; then + packages+=( + 'google-chrome-stable' + 'google-cloud-cli' + 'google-cloud-sdk' + 'powershell' + ) + fi + else + packages+=( + 'google-chrome-stable' + ) + fi + + sudo apt-get -qq remove -y --fix-missing "${packages[@]}" + + sudo apt-get autoremove -y || echo "::warning::The command [sudo apt-get autoremove -y] failed" + sudo apt-get clean || echo "::warning::The command [sudo apt-get clean] failed failed" +} + +# Remove Docker images. +# Ubuntu 22 runners have docker images already installed. +# They aren't present in ubuntu 24 runners. +cleanDocker() { + echo "=> Removing the following docker images:" + sudo docker image ls + echo "=> Removing docker images..." + sudo docker image prune --all --force || true +} + +# Remove Swap storage +cleanSwap() { + sudo swapoff -a || true + sudo rm -rf /mnt/swapfile || true + free -h +} + +# Display initial disk space stats + +AVAILABLE_INITIAL=$(getAvailableSpace) + +printDF "BEFORE CLEAN-UP:" +echo "" +execAndMeasureSpaceChange cleanPackages "Unused packages" +execAndMeasureSpaceChange cleanDocker "Docker images" +execAndMeasureSpaceChange cleanSwap "Swap storage" +execAndMeasureSpaceChange removeUnusedFilesAndDirs "Unused files and directories" + +# Output saved space statistic +echo "" +printDF "AFTER CLEAN-UP:" + +echo "" +echo "" + +printSavedSpace "$AVAILABLE_INITIAL" "Total saved" diff --git a/src/ci/scripts/free-disk-space-windows.ps1 b/src/ci/scripts/free-disk-space-windows.ps1 new file mode 100644 index 00000000000..8a4677bd2ab --- /dev/null +++ b/src/ci/scripts/free-disk-space-windows.ps1 @@ -0,0 +1,35 @@ +# Free disk space on Windows GitHub action runners. + +$ErrorActionPreference = 'Stop' + +Get-Volume | Out-String | Write-Output + +$available = $(Get-Volume C).SizeRemaining + +$dirs = 'C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Tools\Llvm', +'C:\rtools45', 'C:\ghcup', 'C:\Program Files (x86)\Android', +'C:\Program Files\Google\Chrome', 'C:\Program Files (x86)\Microsoft\Edge', +'C:\Program Files\Mozilla Firefox', 'C:\Program Files\MySQL', 'C:\Julia', +'C:\Program Files\MongoDB', 'C:\Program Files\Azure Cosmos DB Emulator', +'C:\Program Files\PostgreSQL', 'C:\Program Files\Unity Hub', +'C:\Strawberry', 'C:\hostedtoolcache\windows\Java_Temurin-Hotspot_jdk' + +foreach ($dir in $dirs) { + Start-ThreadJob -InputObject $dir { + Remove-Item -Recurse -Force -LiteralPath $input + } | Out-Null +} + +foreach ($job in Get-Job) { + Wait-Job $job | Out-Null + if ($job.Error) { + Write-Output "::warning file=$PSCommandPath::$($job.Error)" + } + Remove-Job $job +} + +Get-Volume | Out-String | Write-Output + +$saved = ($(Get-Volume C).SizeRemaining - $available) / 1gb +$savedRounded = [math]::Round($saved, 3) +Write-Output "total space saved: $savedRounded GB" diff --git a/src/ci/scripts/free-disk-space.sh b/src/ci/scripts/free-disk-space.sh index 173f64858b3..062ad801cd8 100755 --- a/src/ci/scripts/free-disk-space.sh +++ b/src/ci/scripts/free-disk-space.sh @@ -1,266 +1,10 @@ #!/bin/bash set -euo pipefail -# Free disk space on Linux GitHub action runners -# Script inspired by https://github.com/jlumbroso/free-disk-space +script_dir=$(dirname "$0") -isX86() { - local arch - arch=$(uname -m) - if [ "$arch" = "x86_64" ]; then - return 0 - else - return 1 - fi -} - -# Check if we're on a GitHub hosted runner. -# In aws codebuild, the variable RUNNER_ENVIRONMENT is "self-hosted". -isGitHubRunner() { - # `:-` means "use the value of RUNNER_ENVIRONMENT if it exists, otherwise use an empty string". - if [[ "${RUNNER_ENVIRONMENT:-}" == "github-hosted" ]]; then - return 0 - else - return 1 - fi -} - -# print a line of the specified character -printSeparationLine() { - for ((i = 0; i < 80; i++)); do - printf "%s" "$1" - done - printf "\n" -} - -# compute available space -# REF: https://unix.stackexchange.com/a/42049/60849 -# REF: https://stackoverflow.com/a/450821/408734 -getAvailableSpace() { - df -a | awk 'NR > 1 {avail+=$4} END {print avail}' -} - -# make Kb human readable (assume the input is Kb) -# REF: https://unix.stackexchange.com/a/44087/60849 -formatByteCount() { - numfmt --to=iec-i --suffix=B --padding=7 "${1}000" -} - -# macro to output saved space -printSavedSpace() { - # Disk space before the operation - local before=${1} - local title=${2:-} - - local after - after=$(getAvailableSpace) - local saved=$((after - before)) - - if [ "$saved" -lt 0 ]; then - echo "::warning::Saved space is negative: $saved. Using '0' as saved space." - saved=0 - fi - - echo "" - printSeparationLine "*" - if [ -n "${title}" ]; then - echo "=> ${title}: Saved $(formatByteCount "$saved")" - else - echo "=> Saved $(formatByteCount "$saved")" - fi - printSeparationLine "*" - echo "" -} - -# macro to print output of df with caption -printDF() { - local caption=${1} - - printSeparationLine "=" - echo "${caption}" - echo "" - echo "$ df -h" - echo "" - df -h - printSeparationLine "=" -} - -removeUnusedFilesAndDirs() { - local to_remove=( - "/usr/share/java" - ) - - if isGitHubRunner; then - to_remove+=( - "/usr/local/aws-sam-cli" - "/usr/local/doc/cmake" - "/usr/local/julia"* - "/usr/local/lib/android" - "/usr/local/share/chromedriver-"* - "/usr/local/share/chromium" - "/usr/local/share/cmake-"* - "/usr/local/share/edge_driver" - "/usr/local/share/emacs" - "/usr/local/share/gecko_driver" - "/usr/local/share/icons" - "/usr/local/share/powershell" - "/usr/local/share/vcpkg" - "/usr/local/share/vim" - "/usr/share/apache-maven-"* - "/usr/share/gradle-"* - "/usr/share/kotlinc" - "/usr/share/miniconda" - "/usr/share/php" - "/usr/share/ri" - "/usr/share/swift" - - # binaries - "/usr/local/bin/azcopy" - "/usr/local/bin/bicep" - "/usr/local/bin/ccmake" - "/usr/local/bin/cmake-"* - "/usr/local/bin/cmake" - "/usr/local/bin/cpack" - "/usr/local/bin/ctest" - "/usr/local/bin/helm" - "/usr/local/bin/kind" - "/usr/local/bin/kustomize" - "/usr/local/bin/minikube" - "/usr/local/bin/packer" - "/usr/local/bin/phpunit" - "/usr/local/bin/pulumi-"* - "/usr/local/bin/pulumi" - "/usr/local/bin/stack" - - # Haskell runtime - "/usr/local/.ghcup" - - # Azure - "/opt/az" - "/usr/share/az_"* - ) - - if [ -n "${AGENT_TOOLSDIRECTORY:-}" ]; then - # Environment variable set by GitHub Actions - to_remove+=( - "${AGENT_TOOLSDIRECTORY}" - ) - else - echo "::warning::AGENT_TOOLSDIRECTORY is not set. Skipping removal." - fi - else - # Remove folders and files present in AWS CodeBuild - to_remove+=( - # binaries - "/usr/local/bin/ecs-cli" - "/usr/local/bin/eksctl" - "/usr/local/bin/kubectl" - - "${HOME}/.gradle" - "${HOME}/.dotnet" - "${HOME}/.goenv" - "${HOME}/.phpenv" - - ) - fi - - for element in "${to_remove[@]}"; do - if [ ! -e "$element" ]; then - # The file or directory doesn't exist. - # Maybe it was removed in a newer version of the runner or it's not present in a - # specific architecture (e.g. ARM). - echo "::warning::Directory or file $element does not exist, skipping." - fi - done - - # Remove all files and directories at once to save time. - sudo rm -rf "${to_remove[@]}" -} - -execAndMeasureSpaceChange() { - local operation=${1} # Function to execute - local title=${2} - - local before - before=$(getAvailableSpace) - $operation - - printSavedSpace "$before" "$title" -} - -# Remove large packages -# REF: https://github.com/apache/flink/blob/master/tools/azure-pipelines/free_disk_space.sh -cleanPackages() { - local packages=( - '^aspnetcore-.*' - '^dotnet-.*' - '^llvm-.*' - '^mongodb-.*' - 'firefox' - 'libgl1-mesa-dri' - 'mono-devel' - 'php.*' - ) - - if isGitHubRunner; then - packages+=( - azure-cli - ) - - if isX86; then - packages+=( - 'google-chrome-stable' - 'google-cloud-cli' - 'google-cloud-sdk' - 'powershell' - ) - fi - else - packages+=( - 'google-chrome-stable' - ) - fi - - sudo apt-get -qq remove -y --fix-missing "${packages[@]}" - - sudo apt-get autoremove -y || echo "::warning::The command [sudo apt-get autoremove -y] failed" - sudo apt-get clean || echo "::warning::The command [sudo apt-get clean] failed failed" -} - -# Remove Docker images. -# Ubuntu 22 runners have docker images already installed. -# They aren't present in ubuntu 24 runners. -cleanDocker() { - echo "=> Removing the following docker images:" - sudo docker image ls - echo "=> Removing docker images..." - sudo docker image prune --all --force || true -} - -# Remove Swap storage -cleanSwap() { - sudo swapoff -a || true - sudo rm -rf /mnt/swapfile || true - free -h -} - -# Display initial disk space stats - -AVAILABLE_INITIAL=$(getAvailableSpace) - -printDF "BEFORE CLEAN-UP:" -echo "" - -execAndMeasureSpaceChange cleanPackages "Unused packages" -execAndMeasureSpaceChange cleanDocker "Docker images" -execAndMeasureSpaceChange cleanSwap "Swap storage" -execAndMeasureSpaceChange removeUnusedFilesAndDirs "Unused files and directories" - -# Output saved space statistic -echo "" -printDF "AFTER CLEAN-UP:" - -echo "" -echo "" - -printSavedSpace "$AVAILABLE_INITIAL" "Total saved" +if [[ "${RUNNER_OS:-}" == "Windows" ]]; then + pwsh $script_dir/free-disk-space-windows.ps1 +else + $script_dir/free-disk-space-linux.sh +fi diff --git a/src/ci/scripts/install-mingw.sh b/src/ci/scripts/install-mingw.sh index ad852071f29..ed87628659b 100755 --- a/src/ci/scripts/install-mingw.sh +++ b/src/ci/scripts/install-mingw.sh @@ -43,4 +43,9 @@ if isWindows && isKnownToBeMingwBuild; then curl -o mingw.7z "${MIRRORS_BASE}/${mingw_archive}" 7z x -y mingw.7z > /dev/null ciCommandAddPath "$(cygpath -m "$(pwd)/${mingw_dir}/bin")" + + # Initialize mingw for the user. + # This should be done by github but isn't for some reason. + # (see https://github.com/actions/runner-images/issues/12600) + /c/msys64/usr/bin/bash -lc ' ' fi diff --git a/src/ci/scripts/install-rust.sh b/src/ci/scripts/install-rust.sh deleted file mode 100755 index e4aee98c9fb..00000000000 --- a/src/ci/scripts/install-rust.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -# The Arm64 Windows Runner does not have Rust already installed -# https://github.com/actions/partner-runner-images/issues/77 - -set -euo pipefail -IFS=$'\n\t' - -source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" - -if [[ "${CI_JOB_NAME}" = *aarch64* ]] && isWindows; then - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | \ - sh -s -- -y -q --default-host aarch64-pc-windows-msvc - ciCommandAddPath "${USERPROFILE}/.cargo/bin" -fi diff --git a/src/doc/rustc-dev-guide/rust-version b/src/doc/rustc-dev-guide/rust-version index 3f10132b684..b631041b6bf 100644 --- a/src/doc/rustc-dev-guide/rust-version +++ b/src/doc/rustc-dev-guide/rust-version @@ -1 +1 @@ -fd2eb391d032181459773f3498c17b198513e0d0 +2b5e239c6b86cde974b0ef0f8e23754fb08ff3c5 diff --git a/src/doc/rustc-dev-guide/src/SUMMARY.md b/src/doc/rustc-dev-guide/src/SUMMARY.md index 651e2925ad5..e3c0d50fcc7 100644 --- a/src/doc/rustc-dev-guide/src/SUMMARY.md +++ b/src/doc/rustc-dev-guide/src/SUMMARY.md @@ -53,7 +53,8 @@ - [Walkthrough: a typical contribution](./walkthrough.md) - [Implementing new language features](./implementing_new_features.md) - [Stability attributes](./stability.md) -- [Stabilizing Features](./stabilization_guide.md) +- [Stabilizing language features](./stabilization_guide.md) + - [Stabilization report template](./stabilization_report_template.md) - [Feature Gates](./feature-gates.md) - [Coding conventions](./conventions.md) - [Procedures for breaking changes](./bug-fix-procedure.md) diff --git a/src/doc/rustc-dev-guide/src/asm.md b/src/doc/rustc-dev-guide/src/asm.md index eec9d448b0c..1bb493e73d5 100644 --- a/src/doc/rustc-dev-guide/src/asm.md +++ b/src/doc/rustc-dev-guide/src/asm.md @@ -155,9 +155,9 @@ can't know ahead of time whether a function will require a frame/base pointer. Various tests for inline assembly are available: -- `tests/assembly/asm` +- `tests/assembly-llvm/asm` - `tests/ui/asm` -- `tests/codegen/asm-*` +- `tests/codegen-llvm/asm-*` Every architecture supported by inline assembly must have exhaustive tests in -`tests/assembly/asm` which test all combinations of register classes and types. +`tests/assembly-llvm/asm` which test all combinations of register classes and types. diff --git a/src/doc/rustc-dev-guide/src/autodiff/installation.md b/src/doc/rustc-dev-guide/src/autodiff/installation.md index a550f6d233e..ddbb3a05424 100644 --- a/src/doc/rustc-dev-guide/src/autodiff/installation.md +++ b/src/doc/rustc-dev-guide/src/autodiff/installation.md @@ -25,7 +25,7 @@ rustup toolchain install nightly # enables -Z unstable-options You can then run our test cases: ```bash -./x test --stage 1 tests/codegen/autodiff +./x test --stage 1 tests/codegen-llvm/autodiff ./x test --stage 1 tests/pretty/autodiff ./x test --stage 1 tests/ui/autodiff ./x test --stage 1 tests/ui/feature-gates/feature-gate-autodiff.rs diff --git a/src/doc/rustc-dev-guide/src/building/suggested.md b/src/doc/rustc-dev-guide/src/building/suggested.md index 7f626314f71..c046161e77f 100644 --- a/src/doc/rustc-dev-guide/src/building/suggested.md +++ b/src/doc/rustc-dev-guide/src/building/suggested.md @@ -162,7 +162,7 @@ create a `.vim/coc-settings.json`. The settings can be edited with [`src/etc/rust_analyzer_settings.json`]. Another way is without a plugin, and creating your own logic in your -configuration. The following code will work for any checkout of rust-lang/rust (newer than Febuary 2025): +configuration. The following code will work for any checkout of rust-lang/rust (newer than February 2025): ```lua local function expand_config_variables(option) diff --git a/src/doc/rustc-dev-guide/src/external-repos.md b/src/doc/rustc-dev-guide/src/external-repos.md index ecc65b26ab7..5fb7eeee8e3 100644 --- a/src/doc/rustc-dev-guide/src/external-repos.md +++ b/src/doc/rustc-dev-guide/src/external-repos.md @@ -40,27 +40,24 @@ implement a new tool feature or test, that should happen in one collective rustc * `portable-simd` ([sync script](https://github.com/rust-lang/portable-simd/blob/master/subtree-sync.sh)) * `rustfmt` * `rustc_codegen_cranelift` ([sync script](https://github.com/rust-lang/rustc_codegen_cranelift/blob/113af154d459e41b3dc2c5d7d878e3d3a8f33c69/scripts/rustup.sh#L7)) -* Using the [josh] tool - * `miri` ([sync guide](https://github.com/rust-lang/miri/blob/master/CONTRIBUTING.md#advanced-topic-syncing-with-the-rustc-repo)) - * `rust-analyzer` ([sync script](https://github.com/rust-lang/rust-analyzer/blob/2e13684be123eca7181aa48e043e185d8044a84a/xtask/src/release.rs#L147)) - * `rustc-dev-guide` ([josh sync](#synchronizing-a-josh-subtree)) - * `compiler-builtins` ([josh sync](#synchronizing-a-josh-subtree)) - * `stdarch` ([josh sync](#synchronizing-a-josh-subtree)) +* Using the [josh](#synchronizing-a-josh-subtree) tool + * `miri` + * `rust-analyzer` + * `rustc-dev-guide` + * `compiler-builtins` + * `stdarch` ### Josh subtrees -The [josh] tool is an alternative to git subtrees, which manages git history in a different way and scales better to larger repositories. Specific tooling is required to work with josh; you can check out the `miri` or `rust-analyzer` scripts linked above for inspiration. We provide a helper [`rustc-josh-sync`][josh-sync] tool to help with the synchronization, described [below](#synchronizing-a-josh-subtree). +The [josh] tool is an alternative to git subtrees, which manages git history in a different way and scales better to larger repositories. Specific tooling is required to work with josh. We provide a helper [`rustc-josh-sync`][josh-sync] tool to help with the synchronization, described [below](#synchronizing-a-josh-subtree). ### Synchronizing a Josh subtree We use a dedicated tool called [`rustc-josh-sync`][josh-sync] for performing Josh subtree updates. -Currently, we are migrating Josh repositories to it. So far, it is used in: +The commands below can be used for all our Josh subtrees, although note that `miri` +requires you to perform some [additional steps](https://github.com/rust-lang/miri/blob/master/CONTRIBUTING.md#advanced-topic-syncing-with-the-rustc-repo) during pulls. -- compiler-builtins -- rustc-dev-guide -- stdarch - -To install the tool: +You can install the tool using the following command: ``` cargo install --locked --git https://github.com/rust-lang/josh-sync ``` @@ -80,6 +77,9 @@ switch to its repository checkout directory in your terminal). #### Performing push +> NOTE: +> Before you proceed, look at some guidance related to Git [on josh-sync README]. + 1) Run the push command to create a branch named `<branch-name>` in a `rustc` fork under the `<gh-username>` account ``` rustc-josh-sync push <branch-name> <gh-username> @@ -173,3 +173,4 @@ the week leading up to the beta cut. [Toolstate chapter]: https://forge.rust-lang.org/infra/toolstate.html [josh]: https://josh-project.github.io/josh/intro.html [josh-sync]: https://github.com/rust-lang/josh-sync +[on josh-sync README]: https://github.com/rust-lang/josh-sync#git-peculiarities diff --git a/src/doc/rustc-dev-guide/src/hir/ambig-unambig-ty-and-consts.md b/src/doc/rustc-dev-guide/src/hir/ambig-unambig-ty-and-consts.md index 709027883ae..d4f504ad2a9 100644 --- a/src/doc/rustc-dev-guide/src/hir/ambig-unambig-ty-and-consts.md +++ b/src/doc/rustc-dev-guide/src/hir/ambig-unambig-ty-and-consts.md @@ -38,7 +38,7 @@ Note that places 3 and 4 would never actually be possible to encounter as we alw This has a few failure modes: - People may write visitors which check for `GenericArg::Infer` but forget to check for `hir::TyKind/ConstArgKind::Infer`, only handling infers in ambig positions by accident. - People may write visitors which check for `hir::TyKind/ConstArgKind::Infer` but forget to check for `GenericArg::Infer`, only handling infers in unambig positions by accident. -- People may write visitors which check for `GenerArg::Type/Const(TyKind/ConstArgKind::Infer)` and `GenerigArg::Infer`, not realising that we never represent inferred types/consts in ambig positions as a `GenericArg::Type/Const`. +- People may write visitors which check for `GenericArg::Type/Const(TyKind/ConstArgKind::Infer)` and `GenericArg::Infer`, not realising that we never represent inferred types/consts in ambig positions as a `GenericArg::Type/Const`. - People may write visitors which check for *only* `TyKind::Infer` and not `ConstArgKind::Infer` forgetting that there are also inferred const arguments (and vice versa). To make writing HIR visitors less error prone when caring about inferred types/consts we have a relatively complex system: @@ -60,4 +60,4 @@ This has a number of benefits: [ambig_arg]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir/hir/enum.AmbigArg.html [visit_ty]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir/intravisit/trait.Visitor.html#method.visit_ty [visit_const_arg]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir/intravisit/trait.Visitor.html#method.visit_const_arg -[visit_infer]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir/intravisit/trait.Visitor.html#method.visit_infer \ No newline at end of file +[visit_infer]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_hir/intravisit/trait.Visitor.html#method.visit_infer diff --git a/src/doc/rustc-dev-guide/src/implementing_new_features.md b/src/doc/rustc-dev-guide/src/implementing_new_features.md index 5d0e875cbc1..76cf2386c82 100644 --- a/src/doc/rustc-dev-guide/src/implementing_new_features.md +++ b/src/doc/rustc-dev-guide/src/implementing_new_features.md @@ -2,145 +2,91 @@ <!-- toc --> -When you want to implement a new significant feature in the compiler, -you need to go through this process to make sure everything goes -smoothly. +When you want to implement a new significant feature in the compiler, you need to go through this process to make sure everything goes smoothly. -**NOTE: this section is for *language* features, not *library* features, -which use [a different process].** +**NOTE: This section is for *language* features, not *library* features, which use [a different process].** -See also [the Rust Language Design Team's procedures][lang-propose] for -proposing changes to the language. +See also [the Rust Language Design Team's procedures][lang-propose] for proposing changes to the language. [a different process]: ./stability.md [lang-propose]: https://lang-team.rust-lang.org/how_to/propose.html ## The @rfcbot FCP process -When the change is small and uncontroversial, then it can be done -with just writing a PR and getting an r+ from someone who knows that -part of the code. However, if the change is potentially controversial, -it would be a bad idea to push it without consensus from the rest -of the team (both in the "distributed system" sense to make sure -you don't break anything you don't know about, and in the social -sense to avoid PR fights). - -If such a change seems to be too small to require a full formal RFC process -(e.g., a small standard library addition, a big refactoring of the code, a -"technically-breaking" change, or a "big bugfix" that basically amounts to a -small feature) but is still too controversial or big to get by with a single r+, -you can propose a final comment period (FCP). Or, if you're not on the relevant -team (and thus don't have @rfcbot permissions), ask someone who is to start one; -unless they have a concern themselves, they should. - -Again, the FCP process is only needed if you need consensus – if you -don't think anyone would have a problem with your change, it's OK to -get by with only an r+. For example, it is OK to add or modify -unstable command-line flags or attributes without an FCP for -compiler development or standard library use, as long as you don't -expect them to be in wide use in the nightly ecosystem. -Some teams have lighter weight processes that they use in scenarios -like this; for example, the compiler team recommends -filing a Major Change Proposal ([MCP][mcp]) as a lightweight way to -garner support and feedback without requiring full consensus. +When the change is small, uncontroversial, non-breaking, and does not affect the stable language in any user-observable ways or add any new unstable features, then it can be done with just writing a PR and getting an r+ from someone who knows that part of the code. However, if not, more must be done. Even for compiler-internal work, it would be a bad idea to push a controversial change without consensus from the rest of the team (both in the "distributed system" sense to make sure you don't break anything you don't know about, and in the social sense to avoid PR fights). + +For changes that need the consensus of a team, we us the process of proposing a final comment period (FCP). If you're not on the relevant team (and thus don't have @rfcbot permissions), ask someone who is to start one; unless they have a concern themselves, they should. + +The FCP process is only needed if you need consensus – if no processes require consensus for your change and you don't think anyone would have a problem with it, it's OK to rely on only an r+. For example, it is OK to add or modify unstable command-line flags or attributes in the reserved compiler-internal `rustc_` namespace without an FCP for compiler development or standard library use, as long as you don't expect them to be in wide use in the nightly ecosystem. Some teams have lighter weight processes that they use in scenarios like this; for example, the compiler team recommends filing a Major Change Proposal ([MCP][mcp]) as a lightweight way to garner support and feedback without requiring full consensus. [mcp]: https://forge.rust-lang.org/compiler/proposals-and-stabilization.html#how-do-i-submit-an-mcp -You don't need to have the implementation fully ready for r+ to propose an FCP, -but it is generally a good idea to have at least a proof -of concept so that people can see what you are talking about. +You don't need to have the implementation fully ready for r+ to propose an FCP, but it is generally a good idea to have at least a proof of concept so that people can see what you are talking about. -When an FCP is proposed, it requires all members of the team to sign off the -FCP. After they all do so, there's a 10-day-long "final comment period" (hence -the name) where everybody can comment, and if no concerns are raised, the -PR/issue gets FCP approval. +When an FCP is proposed, it requires all members of the team to sign off on the FCP. After they all do so, there's a 10-day-long "final comment period" (hence the name) where everybody can comment, and if no concerns are raised, the PR/issue gets FCP approval. ## The logistics of writing features -There are a few "logistic" hoops you might need to go through in -order to implement a feature in a working way. +There are a few "logistical" hoops you might need to go through in order to implement a feature in a working way. ### Warning Cycles -In some cases, a feature or bugfix might break some existing programs -in some edge cases. In that case, you might want to do a crater run -to assess the impact and possibly add a future-compatibility lint, -similar to those used for -[edition-gated lints](diagnostics.md#edition-gated-lints). +In some cases, a feature or bugfix might break some existing programs in some edge cases. In that case, you'll want to do a crater run to assess the impact and possibly add a future-compatibility lint, similar to those used for [edition-gated lints](diagnostics.md#edition-gated-lints). ### Stability -We [value the stability of Rust]. Code that works and runs on stable -should (mostly) not break. Because of that, we don't want to release -a feature to the world with only team consensus and code review - -we want to gain real-world experience on using that feature on nightly, -and we might want to change the feature based on that experience. - -To allow for that, we must make sure users don't accidentally depend -on that new feature - otherwise, especially if experimentation takes -time or is delayed and the feature takes the trains to stable, -it would end up de facto stable and we'll not be able to make changes -in it without breaking people's code. - -The way we do that is that we make sure all new features are feature -gated - they can't be used without enabling a feature gate -(`#[feature(foo)]`), which can't be done in a stable/beta compiler. -See the [stability in code] section for the technical details. - -Eventually, after we gain enough experience using the feature, -make the necessary changes, and are satisfied, we expose it to -the world using the stabilization process described [here]. -Until then, the feature is not set in stone: every part of the -feature can be changed, or the feature might be completely -rewritten or removed. Features are not supposed to gain tenure -by being unstable and unchanged for a year. +We [value the stability of Rust]. Code that works and runs on stable should (mostly) not break. Because of that, we don't want to release a feature to the world with only team consensus and code review - we want to gain real-world experience on using that feature on nightly, and we might want to change the feature based on that experience. + +To allow for that, we must make sure users don't accidentally depend on that new feature - otherwise, especially if experimentation takes time or is delayed and the feature takes the trains to stable, it would end up de facto stable and we'll not be able to make changes in it without breaking people's code. + +The way we do that is that we make sure all new features are feature gated - they can't be used without enabling a feature gate (`#[feature(foo)]`), which can't be done in a stable/beta compiler. See the [stability in code] section for the technical details. + +Eventually, after we gain enough experience using the feature, make the necessary changes, and are satisfied, we expose it to the world using the stabilization process described [here]. Until then, the feature is not set in stone: every part of the feature can be changed, or the feature might be completely rewritten or removed. Features do not gain tenure by being unstable and unchanged for long periods of time. ### Tracking Issues -To keep track of the status of an unstable feature, the -experience we get while using it on nightly, and of the -concerns that block its stabilization, every feature-gate -needs a tracking issue. General discussions about the feature should be done on the tracking issue. +To keep track of the status of an unstable feature, the experience we get while using it on +nightly, and of the concerns that block its stabilization, every feature-gate needs a tracking +issue. When creating issues and PRs related to the feature, reference this tracking issue, and when there are updates about the feature's progress, post those to the tracking issue. -For features that have an RFC, you should use the RFC's -tracking issue for the feature. +For features that are part of an accept RFC or approved lang experiment, use the tracking issue for that. -For other features, you'll have to make a tracking issue -for that feature. The issue title should be "Tracking issue -for YOUR FEATURE". Use the ["Tracking Issue" issue template][template]. +For other features, create a tracking issue for that feature. The issue title should be "Tracking issue for YOUR FEATURE". Use the ["Tracking Issue" issue template][template]. [template]: https://github.com/rust-lang/rust/issues/new?template=tracking_issue.md +### Lang experiments + +To land in the compiler, features that have user-visible effects on the language (even unstable ones) must either be part of an accepted RFC or an approved [lang experiment]. + +To propose a new lang experiment, open an issue in `rust-lang/rust` that describes the motivation and the intended solution. If it's accepted, this issue will become the tracking issue for the experiment, so use the tracking issue [template] while also including these other details. Nominate the issue for the lang team and CC `@rust-lang/lang` and `@rust-lang/lang-advisors`. When the experiment is approved, the tracking issue will be marked as `B-experimental`. + +Feature flags related to a lang experiment must be marked as `incomplete` until an RFC is accepted for the feature. + +[lang experiment]: https://lang-team.rust-lang.org/how_to/experiment.html + ## Stability in code -The below steps needs to be followed in order to implement -a new unstable feature: +The below steps needs to be followed in order to implement a new unstable feature: -1. Open a [tracking issue] - - if you have an RFC, you can use the tracking issue for the RFC. +1. Open or identify the [tracking issue]. For features that are part of an accept RFC or approved lang experiment, use the tracking issue for that. - The tracking issue should be labeled with at least `C-tracking-issue`. - For a language feature, a label `F-feature_name` should be added as well. + Label the tracking issue with `C-tracking-issue` and the relevant `F-feature_name` label (adding that label if needed). -1. Pick a name for the feature gate (for RFCs, use the name - in the RFC). +1. Pick a name for the feature gate (for RFCs, use the name in the RFC). 1. Add the feature name to `rustc_span/src/symbol.rs` in the `Symbols {...}` block. Note that this block must be in alphabetical order. -1. Add a feature gate declaration to `rustc_feature/src/unstable.rs` in the unstable - `declare_features` block. +1. Add a feature gate declaration to `rustc_feature/src/unstable.rs` in the unstable `declare_features` block. ```rust ignore /// description of feature (unstable, $feature_name, "CURRENT_RUSTC_VERSION", Some($tracking_issue_number)) ``` - If you haven't yet - opened a tracking issue (e.g. because you want initial feedback on whether the feature is likely - to be accepted), you can temporarily use `None` - but make sure to update it before the PR is - merged! + If you haven't yet opened a tracking issue (e.g. because you want initial feedback on whether the feature is likely to be accepted), you can temporarily use `None` - but make sure to update it before the PR is merged! For example: @@ -149,9 +95,7 @@ a new unstable feature: (unstable, non_ascii_idents, "CURRENT_RUSTC_VERSION", Some(55467), None), ``` - Features can be marked as incomplete, and trigger the warn-by-default [`incomplete_features` - lint] - by setting their type to `incomplete`: + Features can be marked as incomplete, and trigger the warn-by-default [`incomplete_features` lint] by setting their type to `incomplete`: [`incomplete_features` lint]: https://doc.rust-lang.org/rustc/lints/listing/warn-by-default.html#incomplete-features @@ -160,42 +104,27 @@ a new unstable feature: (incomplete, deref_patterns, "CURRENT_RUSTC_VERSION", Some(87121), None), ``` - To avoid [semantic merge conflicts], please use `CURRENT_RUSTC_VERSION` instead of `1.70` or - another explicit version number. + Feature flags related to a lang experiment must be marked as `incomplete` until an RFC is accepted for the feature. + + To avoid [semantic merge conflicts], use `CURRENT_RUSTC_VERSION` instead of `1.70` or another explicit version number. [semantic merge conflicts]: https://bors.tech/essay/2017/02/02/pitch/ -1. Prevent usage of the new feature unless the feature gate is set. - You can check it in most places in the compiler using the - expression `tcx.features().$feature_name()` +1. Prevent usage of the new feature unless the feature gate is set. You can check it in most places in the compiler using the expression `tcx.features().$feature_name()`. + + If the feature gate is not set, you should either maintain the pre-feature behavior or raise an error, depending on what makes sense. Errors should generally use [`rustc_session::parse::feature_err`]. For an example of adding an error, see [#81015]. - If the feature gate is not set, you should either maintain - the pre-feature behavior or raise an error, depending on - what makes sense. Errors should generally use [`rustc_session::parse::feature_err`]. - For an example of adding an error, see [#81015]. + For features introducing new syntax, pre-expansion gating should be used instead. During parsing, when the new syntax is parsed, the symbol must be inserted to the current crate's [`GatedSpans`] via `self.sess.gated_span.gate(sym::my_feature, span)`. - For features introducing new syntax, pre-expansion gating should be used instead. - During parsing, when the new syntax is parsed, the symbol must be inserted to the - current crate's [`GatedSpans`] via `self.sess.gated_span.gate(sym::my_feature, span)`. - - After being inserted to the gated spans, the span must be checked in the - [`rustc_ast_passes::feature_gate::check_crate`] function, which actually denies - features. Exactly how it is gated depends on the exact type of feature, but most - likely will use the `gate_all!()` macro. + After being inserted to the gated spans, the span must be checked in the [`rustc_ast_passes::feature_gate::check_crate`] function, which actually denies features. Exactly how it is gated depends on the exact type of feature, but most likely will use the `gate_all!()` macro. -1. Add a test to ensure the feature cannot be used without - a feature gate, by creating `tests/ui/feature-gates/feature-gate-$feature_name.rs`. - You can generate the corresponding `.stderr` file by running `./x test -tests/ui/feature-gates/ --bless`. +1. Add a test to ensure the feature cannot be used without a feature gate, by creating `tests/ui/feature-gates/feature-gate-$feature_name.rs`. You can generate the corresponding `.stderr` file by running `./x test tests/ui/feature-gates/ --bless`. -1. Add a section to the unstable book, in - `src/doc/unstable-book/src/language-features/$feature_name.md`. +1. Add a section to the unstable book, in `src/doc/unstable-book/src/language-features/$feature_name.md`. -1. Write a lot of tests for the new feature, preferably in `tests/ui/$feature_name/`. - PRs without tests will not be accepted! +1. Write a lot of tests for the new feature, preferably in `tests/ui/$feature_name/`. PRs without tests will not be accepted! -1. Get your PR reviewed and land it. You have now successfully - implemented a feature in Rust! +1. Get your PR reviewed and land it. You have now successfully implemented a feature in Rust! [`GatedSpans`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_session/parse/struct.GatedSpans.html [#81015]: https://github.com/rust-lang/rust/pull/81015 @@ -206,3 +135,42 @@ tests/ui/feature-gates/ --bless`. [here]: ./stabilization_guide.md [tracking issue]: #tracking-issues [add-feature-gate]: ./feature-gates.md#adding-a-feature-gate + +## Call for testing + +Once the implementation is complete, the feature will be available to nightly users but not yet part of stable Rust. This is a good time to write a blog post on [the main Rust blog][rust-blog] and issue a "call for testing". + +Some earlier such blog posts include: + +1. [The push for GATs stabilization](https://blog.rust-lang.org/2021/08/03/GATs-stabilization-push/) +2. [Changes to `impl Trait` in Rust 2024](https://blog.rust-lang.org/2024/09/05/impl-trait-capture-rules.html) +3. [Async Closures MVP: Call for Testing!](https://blog.rust-lang.org/inside-rust/2024/08/09/async-closures-call-for-testing/) + +Alternatively, [*This Week in Rust*][twir] has a [section][twir-cft] for this. One example of this having been used is: + +- [Call for testing on boolean literals as cfg predicates](https://github.com/rust-lang/rust/issues/131204#issuecomment-2569314526) + +Which option to choose might depend on how significant the language change is, though note that the [*This Week in Rust*][twir] section might be less visible than a dedicated post on the main Rust blog. + +## Polishing + +Giving users a polished experience means more than just implementing the feature in rustc. We need to think about all of the tools and resources that we ship. This work includes: + +- Documenting the language feature in the [Rust Reference][reference]. +- Extending [`rustfmt`] to format any new syntax (if applicable). +- Extending [`rust-analyzer`] (if applicable). The extent of this work can depend on the nature of the language feature, as some features don't need to be blocked on *full* support. + - When a language feature degrades the user experience simply by existing before support is implemented in [`rust-analyzer`], that may lead the lang team to raise a blocking concern. + - Examples of such might include new syntax that [`rust-analyzer`] can't parse or type inference changes it doesn't understand when those lead to bogus diagnostics. + +## Stabilization + +The final step in the feature lifecycle is [stabilization][stab], which is when the feature becomes available to all Rust users. At this point, backward incompatible changes are generally no longer permitted (see the lang team's [defined semver policies](https://rust-lang.github.io/rfcs/1122-language-semver.html) for details). To learn more about stabilization, see the [stabilization guide][stab]. + + +[stab]: ./stabilization_guide.md +[rust-blog]: https://github.com/rust-lang/blog.rust-lang.org/ +[twir]: https://github.com/rust-lang/this-week-in-rust +[twir-cft]: https://this-week-in-rust.org/blog/2025/01/22/this-week-in-rust-583/#calls-for-testing +[`rustfmt`]: https://github.com/rust-lang/rustfmt +[`rust-analyzer`]: https://github.com/rust-lang/rust-analyzer +[reference]: https://github.com/rust-lang/reference diff --git a/src/doc/rustc-dev-guide/src/llvm-coverage-instrumentation.md b/src/doc/rustc-dev-guide/src/llvm-coverage-instrumentation.md index 28e0e7a908d..880363b94bf 100644 --- a/src/doc/rustc-dev-guide/src/llvm-coverage-instrumentation.md +++ b/src/doc/rustc-dev-guide/src/llvm-coverage-instrumentation.md @@ -117,7 +117,7 @@ human-readable coverage report. > directive, so they will be skipped if the profiler runtime has not been > [enabled in `bootstrap.toml`](#recommended-configtoml-settings). -Finally, the [`tests/codegen/instrument-coverage/testprog.rs`] test compiles a simple Rust program +Finally, the [`tests/codegen-llvm/instrument-coverage/testprog.rs`] test compiles a simple Rust program with `-C instrument-coverage` and compares the compiled program's LLVM IR to expected LLVM IR instructions and structured data for a coverage-enabled program, including various checks for Coverage Map-related metadata and the LLVM @@ -136,4 +136,4 @@ and `mir-opt` tests can be refreshed by running: [`tests/coverage`]: https://github.com/rust-lang/rust/tree/master/tests/coverage [`src/tools/coverage-dump`]: https://github.com/rust-lang/rust/tree/master/src/tools/coverage-dump [`tests/coverage-run-rustdoc`]: https://github.com/rust-lang/rust/tree/master/tests/coverage-run-rustdoc -[`tests/codegen/instrument-coverage/testprog.rs`]: https://github.com/rust-lang/rust/blob/master/tests/mir-opt/coverage/instrument_coverage.rs +[`tests/codegen-llvm/instrument-coverage/testprog.rs`]: https://github.com/rust-lang/rust/blob/master/tests/mir-opt/coverage/instrument_coverage.rs diff --git a/src/doc/rustc-dev-guide/src/offload/installation.md b/src/doc/rustc-dev-guide/src/offload/installation.md index 1962314c70a..1e792de3c8c 100644 --- a/src/doc/rustc-dev-guide/src/offload/installation.md +++ b/src/doc/rustc-dev-guide/src/offload/installation.md @@ -40,7 +40,7 @@ This gives you a working LLVM build. ## Testing run ``` -./x test --stage 1 tests/codegen/gpu_offload +./x test --stage 1 tests/codegen-llvm/gpu_offload ``` ## Usage diff --git a/src/doc/rustc-dev-guide/src/profile-guided-optimization.md b/src/doc/rustc-dev-guide/src/profile-guided-optimization.md index d279786ac45..2fa81021045 100644 --- a/src/doc/rustc-dev-guide/src/profile-guided-optimization.md +++ b/src/doc/rustc-dev-guide/src/profile-guided-optimization.md @@ -132,7 +132,7 @@ There is also a [codegen test][codegen-test] that checks that some expected instrumentation artifacts show up in LLVM IR. [rmake-tests]: https://github.com/rust-lang/rust/tree/master/tests/run-make -[codegen-test]: https://github.com/rust-lang/rust/blob/master/tests/codegen/pgo-instrumentation.rs +[codegen-test]: https://github.com/rust-lang/rust/blob/master/tests/codegen-llvm/pgo-instrumentation.rs ## Additional information diff --git a/src/doc/rustc-dev-guide/src/rustdoc-internals/rustdoc-test-suite.md b/src/doc/rustc-dev-guide/src/rustdoc-internals/rustdoc-test-suite.md index 4f44cf1701c..3ec5ebd799e 100644 --- a/src/doc/rustc-dev-guide/src/rustdoc-internals/rustdoc-test-suite.md +++ b/src/doc/rustc-dev-guide/src/rustdoc-internals/rustdoc-test-suite.md @@ -20,7 +20,7 @@ Internally, [`compiletest`] invokes the supplementary checker script [`htmldocck Directives to HtmlDocCk are assertions that place constraints on the generated HTML. They look similar to those given to `compiletest` in that they take the form of `//@` comments -but ultimately, they are completey distinct and processed by different programs. +but ultimately, they are completely distinct and processed by different programs. [XPath] is used to query parts of the HTML document tree. diff --git a/src/doc/rustc-dev-guide/src/sanitizers.md b/src/doc/rustc-dev-guide/src/sanitizers.md index 664b4feac4f..29d9056c15d 100644 --- a/src/doc/rustc-dev-guide/src/sanitizers.md +++ b/src/doc/rustc-dev-guide/src/sanitizers.md @@ -76,7 +76,7 @@ implementation: ## Testing sanitizers Sanitizers are validated by code generation tests in -[`tests/codegen/sanitize*.rs`][test-cg] and end-to-end functional tests in +[`tests/codegen-llvm/sanitize*.rs`][test-cg] and end-to-end functional tests in [`tests/ui/sanitizer/`][test-ui] directory. Testing sanitizer functionality requires the sanitizer runtimes (built when @@ -85,7 +85,7 @@ sanitizer. When sanitizer is unsupported on given target, sanitizers tests will be ignored. This behaviour is controlled by compiletest `needs-sanitizer-*` directives. -[test-cg]: https://github.com/rust-lang/rust/tree/master/tests/codegen +[test-cg]: https://github.com/rust-lang/rust/tree/master/tests/codegen-llvm [test-ui]: https://github.com/rust-lang/rust/tree/master/tests/ui/sanitizer ## Enabling sanitizer on a new target diff --git a/src/doc/rustc-dev-guide/src/serialization.md b/src/doc/rustc-dev-guide/src/serialization.md index 47667061eda..8eb37bbe20b 100644 --- a/src/doc/rustc-dev-guide/src/serialization.md +++ b/src/doc/rustc-dev-guide/src/serialization.md @@ -75,7 +75,7 @@ impl<D: Decoder> Decodable<D> for MyStruct { rustc has a lot of [arena allocated types]. Deserializing these types isn't possible without access to the arena that they need to be allocated on. -The [`TyDecoder`] and [`TyEncoder`] traits are supertraits of [`Decoder`] and [`Encoder`] that allow access to a [`TyCtxt`]. +The [`TyDecoder`] and [`TyEncoder`] traits are subtraits of [`Decoder`] and [`Encoder`] that allow access to a [`TyCtxt`]. Types which contain `arena` allocated types can then bound the type parameter of their [`Encodable`] and [`Decodable`] implementations with these traits. diff --git a/src/doc/rustc-dev-guide/src/stabilization_guide.md b/src/doc/rustc-dev-guide/src/stabilization_guide.md index f875c68745f..f155272e5a2 100644 --- a/src/doc/rustc-dev-guide/src/stabilization_guide.md +++ b/src/doc/rustc-dev-guide/src/stabilization_guide.md @@ -1,120 +1,66 @@ # Request for stabilization -**NOTE**: this page is about stabilizing *language* features. -For stabilizing *library* features, see [Stabilizing a library feature]. +**NOTE**: This page is about stabilizing *language* features. For stabilizing *library* features, see [Stabilizing a library feature]. [Stabilizing a library feature]: ./stability.md#stabilizing-a-library-feature -Once an unstable feature has been well-tested with no outstanding -concern, anyone may push for its stabilization. It involves the -following steps: +Once an unstable feature has been well-tested with no outstanding concerns, anyone may push for its stabilization, though involving the people who have worked on it is prudent. Follow these steps: <!-- toc --> -## Documentation PRs +## Write an RFC, if needed -<a id="updating-documentation"></a> +If the feature was part of a [lang experiment], the lang team generally will want to first accept an RFC before stabilization. -If any documentation for this feature exists, it should be -in the [`Unstable Book`], located at [`src/doc/unstable-book`]. -If it exists, the page for the feature gate should be removed. +[lang experiment]: https://lang-team.rust-lang.org/how_to/experiment.html + +## Documentation PRs -If there was documentation there, integrating it into the -existing documentation is needed. +<a id="updating-documentation"></a> -If there wasn't documentation there, it needs to be added. +The feature might be documented in the [`Unstable Book`], located at [`src/doc/unstable-book`]. Remove the page for the feature gate if it exists. Integrate any useful parts of that documentation in other places. -Places that may need updated documentation: +Places that may need updated documentation include: -- [The Reference]: This must be updated, in full detail. -- [The Book]: This may or may not need updating, depends. - If you're not sure, please open an issue on this repository - and it can be discussed. -- standard library documentation: As needed. Language features - often don't need this, but if it's a feature that changes - how good examples are written, such as when `?` was added - to the language, updating examples is important. -- [Rust by Example]: As needed. +- [The Reference]: This must be updated, in full detail, and a member of the lang-docs team must review and approve the PR before the stabilization can be merged. +- [The Book]: This is updated as needed. If you're not sure, please open an issue on this repository and it can be discussed. +- Standard library documentation: This is updated as needed. Language features often don't need this, but if it's a feature that changes how idiomatic examples are written, such as when `?` was added to the language, updating these in the library documentation is important. Review also the keyword documentation and ABI documentation in the standard library, as these sometimes needs updates for language changes. +- [Rust by Example]: This is updated as needed. -Prepare PRs to update documentation involving this new feature -for repositories mentioned above. Maintainers of these repositories -will keep these PRs open until the whole stabilization process -has completed. Meanwhile, we can proceed to the next step. +Prepare PRs to update documentation involving this new feature for the repositories mentioned above. Maintainers of these repositories will keep these PRs open until the whole stabilization process has completed. Meanwhile, we can proceed to the next step. ## Write a stabilization report -Find the tracking issue of the feature, and create a short -stabilization report. Essentially this would be a brief summary -of the feature plus some links to test cases showing it works -as expected, along with a list of edge cases that came up -and were considered. This is a minimal "due diligence" that -we do before stabilizing. - -The report should contain: +Author a stabilization report using the [template found in this repository][srt]. -- A summary, showing examples (e.g. code snippets) what is - enabled by this feature. -- Links to test cases in our test suite regarding this feature - and describe the feature's behavior on encountering edge cases. -- Links to the documentations (the PRs we have made in the - previous steps). -- Any other relevant information. -- The resolutions of any unresolved questions if the stabilization - is for an RFC. +The stabilization reports summarizes: -Examples of stabilization reports can be found in -[rust-lang/rust#44494][report1] and [rust-lang/rust#28237][report2] (these links -will bring you directly to the comment containing the stabilization report). +- The main design decisions and deviations since the RFC was accepted, including both decisions that were FCP'd or otherwise accepted by the language team as well as those being presented to the lang team for the first time. + - Often, the final stabilized language feature has significant design deviations from the original RFC. That's OK, but these deviations must be highlighted and explained carefully. +- The work that has been done since the RFC was accepted, acknowledging the main contributors that helped drive the language feature forward. -[report1]: https://github.com/rust-lang/rust/issues/44494#issuecomment-360191474 -[report2]: https://github.com/rust-lang/rust/issues/28237#issuecomment-363374130 +The [*Stabilization Template*][srt] includes a series of questions that aim to surface connections between this feature and lang's subteams (e.g. types, opsem, lang-docs, etc.) and to identify items that are commonly overlooked. -## FCP +[srt]: ./stabilization_report_template.md -If any member of the team responsible for tracking this -feature agrees with stabilizing this feature, they will -start the FCP (final-comment-period) process by commenting - -```text -@rfcbot fcp merge -``` - -The rest of the team members will review the proposal. If the final -decision is to stabilize, we proceed to do the actual code modification. +The stabilization report is typically posted as the main comment on the stabilization PR (see the next section). ## Stabilization PR -*This is for stabilizing language features. If you are stabilizing a library -feature, see [the stabilization chapter of the std dev guide][std-guide-stabilization] instead.* - -Once we have decided to stabilize a feature, we need to have -a PR that actually makes that stabilization happen. These kinds -of PRs are a great way to get involved in Rust, as they take -you on a little tour through the source code. +Every feature is different, and some may require steps beyond what this guide discusses. -Here is a general guide to how to stabilize a feature -- -every feature is different, of course, so some features may -require steps beyond what this guide talks about. - -Note: Before we stabilize any feature, it's the rule that it -should appear in the documentation. +Before the stabilization will be considered by the lang team, there must be a complete PR to the Reference describing the feature, and before the stabilization PR will be merged, this PR must have been reviewed and approved by the lang-docs team. ### Updating the feature-gate listing -There is a central listing of unstable feature-gates in -[`compiler/rustc_feature/src/unstable.rs`]. Search for the `declare_features!` -macro. There should be an entry for the feature you are aiming -to stabilize, something like (this example is taken from -[rust-lang/rust#32409]: +There is a central listing of unstable feature-gates in [`compiler/rustc_feature/src/unstable.rs`]. Search for the `declare_features!` macro. There should be an entry for the feature you are aiming to stabilize, something like (this example is taken from [rust-lang/rust#32409]: ```rust,ignore // pub(restricted) visibilities (RFC 1422) (unstable, pub_restricted, "CURRENT_RUSTC_VERSION", Some(32409)), ``` -The above line should be moved to [`compiler/rustc_feature/src/accepted.rs`]. -Entries in the `declare_features!` call are sorted, so find the correct place. -When it is done, it should look like: +The above line should be moved to [`compiler/rustc_feature/src/accepted.rs`]. Entries in the `declare_features!` call are sorted, so find the correct place. When it is done, it should look like: ```rust,ignore // pub(restricted) visibilities (RFC 1422) @@ -122,54 +68,31 @@ When it is done, it should look like: // note that we changed this ``` -(Even though you will encounter version numbers in the file of past changes, -you should not put the rustc version you expect your stabilization to happen in, -but instead `CURRENT_RUSTC_VERSION`) +(Even though you will encounter version numbers in the file of past changes, you should not put the rustc version you expect your stabilization to happen in, but instead use `CURRENT_RUSTC_VERSION`.) ### Removing existing uses of the feature-gate -Next search for the feature string (in this case, `pub_restricted`) -in the codebase to find where it appears. Change uses of -`#![feature(XXX)]` from the `std` and any rustc crates (this includes test folders -under `library/` and `compiler/` but not the toplevel `tests/` one) to be -`#![cfg_attr(bootstrap, feature(XXX))]`. This includes the feature-gate -only for stage0, which is built using the current beta (this is -needed because the feature is still unstable in the current beta). +Next, search for the feature string (in this case, `pub_restricted`) in the codebase to find where it appears. Change uses of `#![feature(XXX)]` from the `std` and any rustc crates (this includes test folders under `library/` and `compiler/` but not the toplevel `tests/` one) to be `#![cfg_attr(bootstrap, feature(XXX))]`. This includes the feature-gate only for stage0, which is built using the current beta (this is needed because the feature is still unstable in the current beta). -Also, remove those strings from any tests (e.g. under `tests/`). If there are tests -specifically targeting the feature-gate (i.e., testing that the -feature-gate is required to use the feature, but nothing else), -simply remove the test. +Also, remove those strings from any tests (e.g. under `tests/`). If there are tests specifically targeting the feature-gate (i.e., testing that the feature-gate is required to use the feature, but nothing else), simply remove the test. ### Do not require the feature-gate to use the feature -Most importantly, remove the code which flags an error if the -feature-gate is not present (since the feature is now considered -stable). If the feature can be detected because it employs some -new syntax, then a common place for that code to be is in the -same `compiler/rustc_ast_passes/src/feature_gate.rs`. -For example, you might see code like this: +Most importantly, remove the code which flags an error if the feature-gate is not present (since the feature is now considered stable). If the feature can be detected because it employs some new syntax, then a common place for that code to be is in `compiler/rustc_ast_passes/src/feature_gate.rs`. For example, you might see code like this: ```rust,ignore -gate_feature_post!(&self, pub_restricted, span, - "`pub(restricted)` syntax is experimental"); +gate_all!(pub_restricted, "`pub(restricted)` syntax is experimental"); ``` -This `gate_feature_post!` macro prints an error if the -`pub_restricted` feature is not enabled. It is not needed -now that `#[pub_restricted]` is stable. +This `gate_feature_post!` macro prints an error if the `pub_restricted` feature is not enabled. It is not needed now that `#[pub_restricted]` is stable. For more subtle features, you may find code like this: ```rust,ignore -if self.tcx.sess.features.borrow().pub_restricted { /* XXX */ } +if self.tcx.features().async_fn_in_dyn_trait() { /* XXX */ } ``` -This `pub_restricted` field (obviously named after the feature) -would ordinarily be false if the feature flag is not present -and true if it is. So transform the code to assume that the field -is true. In this case, that would mean removing the `if` and -leaving just the `/* XXX */`. +This `pub_restricted` field (named after the feature) would ordinarily be false if the feature flag is not present and true if it is. So transform the code to assume that the field is true. In this case, that would mean removing the `if` and leaving just the `/* XXX */`. ```rust,ignore if self.tcx.sess.features.borrow().pub_restricted { /* XXX */ } @@ -194,3 +117,40 @@ if something { /* XXX */ } [Rust by Example]: https://github.com/rust-lang/rust-by-example [`Unstable Book`]: https://doc.rust-lang.org/unstable-book/index.html [`src/doc/unstable-book`]: https://github.com/rust-lang/rust/tree/master/src/doc/unstable-book + +## Team nominations + +When opening the stabilization PR, CC the lang team and its advisors (`@rust-lang/lang @rust-lang/lang-advisors`) and any other teams to whom the feature is relevant, e.g.: + +- `@rust-lang/types`, for type system interactions. +- `@rust-lang/opsem`, for interactions with unsafe code. +- `@rust-lang/compiler`, for implementation robustness. +- `@rust-lang/libs-api`, for changes to the standard library API or its guarantees. +- `@rust-lang/lang-docs`, for questions about how this should be documented in the Reference. + +After the stabilization PR is opened with the stabilization report, wait a bit for any immediate comments. When such comments "simmer down" and you feel the PR is ready for consideration by the lang team, [nominate the PR](https://lang-team.rust-lang.org/how_to/nominate.html) to get it on the agenda for consideration in an upcoming lang meeting. + +If you are not a `rust-lang` organization member, you can ask your assigned reviewer to CC the relevant teams on your behalf. + +## Propose FCP on the PR + +After the lang team and other relevant teams review the stabilization, and after you have answered any questions they may have had, a member of one of the teams may propose to accept the stabilization by commenting: + +```text +@rfcbot fcp merge +``` + +Once enough team members have reviewed, the PR will move into a "final comment period" (FCP). If no new concerns are raised, this period will complete and the PR can be merged after implementation review in the usual way. + +## Reviewing and merging stabilizations + +On a stabilization, before giving it the `r+`, ensure that the PR: + +- Matches what the team proposed for stabilization and what is documented in the Reference PR. +- Includes any changes the team decided to request along the way in order to resolve or avoid concerns. +- Is otherwise exactly what is described in the stabilization report and in any relevant RFCs or prior lang FCPs. +- Does not expose on stable behaviors other than those specified, accepted for stabilization, and documented in the Reference. +- Has sufficient tests to convincingly demonstrate these things. +- Is accompanied by a PR to the Reference than has been reviewed and approved by a member of lang-docs. + +In particular, when reviewing the PR, keep an eye out for any user-visible details that the lang team failed to consider and specify. If you find one, describe it and nominate the PR for the lang team. diff --git a/src/doc/rustc-dev-guide/src/stabilization_report_template.md b/src/doc/rustc-dev-guide/src/stabilization_report_template.md new file mode 100644 index 00000000000..793f7d7e45c --- /dev/null +++ b/src/doc/rustc-dev-guide/src/stabilization_report_template.md @@ -0,0 +1,277 @@ +# Stabilization report template + +## What is this? + +This is a template for [stabilization reports](./stabilization_guide.md) of **language features**. The questions aim to solicit the details most often needed. These details help reviewers to identify potential problems upfront. Not all parts of the template will apply to every stabilization. If a question doesn't apply, explain briefly why. + +Copy everything after the separator and edit it as Markdown. Replace each *TODO* with your answer. + +--- + +# Stabilization report + +## Summary + +> Remind us what this feature is and what value it provides. Tell the story of what led up to this stabilization. +> +> E.g., see: +> +> - [Stabilize AFIT/RPITIT](https://web.archive.org/web/20250329190642/https://github.com/rust-lang/rust/pull/115822) +> - [Stabilize RTN](https://web.archive.org/web/20250321214601/https://github.com/rust-lang/rust/pull/138424) +> - [Stabilize ATPIT](https://web.archive.org/web/20250124214256/https://github.com/rust-lang/rust/pull/120700) +> - [Stabilize opaque type precise capturing](https://web.archive.org/web/20250312173538/https://github.com/rust-lang/rust/pull/127672) + +*TODO* + +Tracking: + +- *TODO* (Link to tracking issue.) + +Reference PRs: + +- *TODO* (Link to Reference PRs.) + +cc @rust-lang/lang @rust-lang/lang-advisors + +### What is stabilized + +> Describe each behavior being stabilized and give a short example of code that will now be accepted. + +```rust +todo!() +``` + +### What isn't stabilized + +> Describe any parts of the feature not being stabilized. Talk about what we might want to do later and what doors are being left open for that. If what we're not stabilizing might lead to surprises for users, talk about that in particular. + +## Design + +### Reference + +> What updates are needed to the Reference? Link to each PR. If the Reference is missing content needed for describing this feature, discuss that. + +- *TODO* + +### RFC history + +> What RFCs have been accepted for this feature? + +- *TODO* + +### Answers to unresolved questions + +> What questions were left unresolved by the RFC? How have they been answered? Link to any relevant lang decisions. + +*TODO* + +### Post-RFC changes + +> What other user-visible changes have occurred since the RFC was accepted? Describe both changes that the lang team accepted (and link to those decisions) as well as changes that are being presented to the team for the first time in this stabilization report. + +*TODO* + +### Key points + +> What decisions have been most difficult and what behaviors to be stabilized have proved most contentious? Summarize the major arguments on all sides and link to earlier documents and discussions. + +*TODO* + +### Nightly extensions + +> Are there extensions to this feature that remain unstable? How do we know that we are not accidentally committing to those? + +*TODO* + +### Doors closed + +> What doors does this stabilization close for later changes to the language? E.g., does this stabilization make any other RFCs, lang experiments, or known in-flight proposals more difficult or impossible to do later? + +## Feedback + +### Call for testing + +> Has a "call for testing" been done? If so, what feedback was received? + +*TODO* + +### Nightly use + +> Do any known nightly users use this feature? Counting instances of `#![feature(FEATURE_NAME)]` on GitHub with grep might be informative. + +*TODO* + +## Implementation + +### Major parts + +> Summarize the major parts of the implementation and provide links into the code and to relevant PRs. +> +> See, e.g., this breakdown of the major parts of async closures: +> +> - <https://rustc-dev-guide.rust-lang.org/coroutine-closures.html> + +*TODO* + +### Coverage + +> Summarize the test coverage of this feature. +> +> Consider what the "edges" of this feature are. We're particularly interested in seeing tests that assure us about exactly what nearby things we're not stabilizing. Tests should of course comprehensively demonstrate that the feature works. Think too about demonstrating the diagnostics seen when common mistakes are made and the feature is used incorrectly. +> +> Within each test, include a comment at the top describing the purpose of the test and what set of invariants it intends to demonstrate. This is a great help to our review. +> +> Describe any known or intentional gaps in test coverage. +> +> Contextualize and link to test folders and individual tests. + +*TODO* + +### Outstanding bugs + +> What outstanding bugs involve this feature? List them. Should any block the stabilization? Discuss why or why not. + +*TODO* + +- *TODO* +- *TODO* +- *TODO* + +### Outstanding FIXMEs + +> What FIXMEs are still in the code for that feature and why is it OK to leave them there? + +*TODO* + +### Tool changes + +> What changes must be made to our other tools to support this feature. Has this work been done? Link to any relevant PRs and issues. + +- [ ] rustfmt + - *TODO* +- [ ] rust-analyzer + - *TODO* +- [ ] rustdoc (both JSON and HTML) + - *TODO* +- [ ] cargo + - *TODO* +- [ ] clippy + - *TODO* +- [ ] rustup + - *TODO* +- [ ] docs.rs + - *TODO* + +*TODO* + +### Breaking changes + +> If this stabilization represents a known breaking change, link to the crater report, the analysis of the crater report, and to all PRs we've made to ecosystem projects affected by this breakage. Discuss any limitations of what we're able to know about or to fix. + +*TODO* + +Crater report: + +- *TODO* + +Crater analysis: + +- *TODO* + +PRs to affected crates: + +- *TODO* +- *TODO* +- *TODO* + +## Type system, opsem + +### Compile-time checks + +> What compilation-time checks are done that are needed to prevent undefined behavior? +> +> Link to tests demonstrating that these checks are being done. + +*TODO* + +- *TODO* +- *TODO* +- *TODO* + +### Type system rules + +> What type system rules are enforced for this feature and what is the purpose of each? + +*TODO* + +### Sound by default? + +> Does the feature's implementation need specific checks to prevent UB, or is it sound by default and need specific opt-in to perform the dangerous/unsafe operations? If it is not sound by default, what is the rationale? + +*TODO* + +### Breaks the AM? + +> Can users use this feature to introduce undefined behavior, or use this feature to break the abstraction of Rust and expose the underlying assembly-level implementation? Describe this if so. + +*TODO* + +## Common interactions + +### Temporaries + +> Does this feature introduce new expressions that can produce temporaries? What are the scopes of those temporaries? + +*TODO* + +### Drop order + +> Does this feature raise questions about the order in which we should drop values? Talk about the decisions made here and how they're consistent with our earlier decisions. + +*TODO* + +### Pre-expansion / post-expansion + +> Does this feature raise questions about what should be accepted pre-expansion (e.g. in code covered by `#[cfg(false)]`) versus what should be accepted post-expansion? What decisions were made about this? + +*TODO* + +### Edition hygiene + +> If this feature is gated on an edition, how do we decide, in the context of the edition hygiene of tokens, whether to accept or reject code. E.g., what token do we use to decide? + +*TODO* + +### SemVer implications + +> Does this feature create any new ways in which library authors must take care to prevent breaking downstreams when making minor-version releases? Describe these. Are these new hazards "major" or "minor" according to [RFC 1105](https://rust-lang.github.io/rfcs/1105-api-evolution.html)? + +*TODO* + +### Exposing other features + +> Are there any other unstable features whose behavior may be exposed by this feature in any way? What features present the highest risk of that? + +*TODO* + +## History + +> List issues and PRs that are important for understanding how we got here. + +- *TODO* +- *TODO* +- *TODO* + +## Acknowledgments + +> Summarize contributors to the feature by name for recognition and so that those people are notified about the stabilization. Does anyone who worked on this *not* think it should be stabilized right now? We'd like to hear about that if so. + +*TODO* + +## Open items + +> List any known items that have not yet been completed and that should be before this is stabilized. + +- [ ] *TODO* +- [ ] *TODO* +- [ ] *TODO* diff --git a/src/doc/rustc-dev-guide/src/tests/compiletest.md b/src/doc/rustc-dev-guide/src/tests/compiletest.md index ded30234e70..a108dfdef9b 100644 --- a/src/doc/rustc-dev-guide/src/tests/compiletest.md +++ b/src/doc/rustc-dev-guide/src/tests/compiletest.md @@ -29,7 +29,7 @@ on if or how to run the test, what behavior to expect, and more. See [directives](directives.md) and the test suite documentation below for more details on these annotations. -See the [Adding new tests](adding.md) and [Best practies](best-practices.md) +See the [Adding new tests](adding.md) and [Best practices](best-practices.md) chapters for a tutorial on creating a new test and advice on writing a good test, and the [Running tests](running.md) chapter on how to run the test suite. @@ -68,7 +68,7 @@ The following test suites are available, with links for more information: | [`pretty`](#pretty-printer-tests) | Check pretty printing | | [`incremental`](#incremental-tests) | Check incremental compilation behavior | | [`debuginfo`](#debuginfo-tests) | Check debuginfo generation running debuggers | -| [`codegen`](#codegen-tests) | Check code generation | +| [`codegen-*`](#codegen-tests) | Check code generation | | [`codegen-units`](#codegen-units-tests) | Check codegen unit partitioning | | [`assembly`](#assembly-tests) | Check assembly output | | [`mir-opt`](#mir-opt-tests) | Check MIR generation and optimizations | @@ -290,7 +290,7 @@ For example, `./x test tests/debuginfo -- --debugger gdb` will only test GDB com ### Codegen tests -The tests in [`tests/codegen`] test LLVM code generation. They compile the test +The tests in [`tests/codegen-llvm`] test LLVM code generation. They compile the test with the `--emit=llvm-ir` flag to emit LLVM IR. They then run the LLVM [FileCheck] tool. The test is annotated with various `// CHECK` comments to check the generated code. See the [FileCheck] documentation for a tutorial and @@ -301,13 +301,13 @@ See also the [assembly tests](#assembly-tests) for a similar set of tests. If you need to work with `#![no_std]` cross-compiling tests, consult the [`minicore` test auxiliary](./minicore.md) chapter. -[`tests/codegen`]: https://github.com/rust-lang/rust/tree/master/tests/codegen +[`tests/codegen-llvm`]: https://github.com/rust-lang/rust/tree/master/tests/codegen-llvm [FileCheck]: https://llvm.org/docs/CommandGuide/FileCheck.html ### Assembly tests -The tests in [`tests/assembly`] test LLVM assembly output. They compile the test +The tests in [`tests/assembly-llvm`] test LLVM assembly output. They compile the test with the `--emit=asm` flag to emit a `.s` file with the assembly output. They then run the LLVM [FileCheck] tool. @@ -324,7 +324,7 @@ See also the [codegen tests](#codegen-tests) for a similar set of tests. If you need to work with `#![no_std]` cross-compiling tests, consult the [`minicore` test auxiliary](./minicore.md) chapter. -[`tests/assembly`]: https://github.com/rust-lang/rust/tree/master/tests/assembly +[`tests/assembly-llvm`]: https://github.com/rust-lang/rust/tree/master/tests/assembly-llvm ### Codegen-units tests diff --git a/src/doc/rustc-dev-guide/src/tests/directives.md b/src/doc/rustc-dev-guide/src/tests/directives.md index 63aa08c389c..89e4d3e9b58 100644 --- a/src/doc/rustc-dev-guide/src/tests/directives.md +++ b/src/doc/rustc-dev-guide/src/tests/directives.md @@ -75,8 +75,10 @@ expectations](ui.md#controlling-passfail-expectations). | `check-fail` | Building (no codegen) should fail | `ui`, `crashes` | N/A | | `build-pass` | Building should pass | `ui`, `crashes`, `codegen`, `incremental` | N/A | | `build-fail` | Building should fail | `ui`, `crashes` | N/A | -| `run-pass` | Running the test binary should pass | `ui`, `crashes`, `incremental` | N/A | -| `run-fail` | Running the test binary should fail | `ui`, `crashes` | N/A | +| `run-pass` | Program must exit with code `0` | `ui`, `crashes`, `incremental` | N/A | +| `run-fail` | Program must exit with code `1..=127` | `ui`, `crashes` | N/A | +| `run-crash` | Program must crash | `ui` | N/A | +| `run-fail-or-crash` | Program must `run-fail` or `run-crash` | `ui` | N/A | | `ignore-pass` | Ignore `--pass` flag | `ui`, `crashes`, `codegen`, `incremental` | N/A | | `dont-check-failure-status` | Don't check exact failure status (i.e. `1`) | `ui`, `incremental` | N/A | | `failure-status` | Check | `ui`, `crashes` | Any `u16` | @@ -203,6 +205,8 @@ settings: on `wasm32-unknown-unknown` target because the target does not support the `proc-macro` crate type. - `needs-target-std` — ignores if target platform does not have std support. +- `ignore-backends` — ignores the listed backends, separated by whitespace characters. +- `needs-backends` — only runs the test if current codegen backend is listed. The following directives will check LLVM support: @@ -289,8 +293,6 @@ See [Pretty-printer](compiletest.md#pretty-printer-tests). - `no-auto-check-cfg` — disable auto check-cfg (only for `--check-cfg` tests) - [`revisions`](compiletest.md#revisions) — compile multiple times -- [`unused-revision-names`](compiletest.md#ignoring-unused-revision-names) - - suppress tidy checks for mentioning unknown revision names -[`forbid-output`](compiletest.md#incremental-tests) — incremental cfail rejects output pattern - [`should-ice`](compiletest.md#incremental-tests) — incremental cfail should @@ -311,6 +313,17 @@ test suites that use those tools: - `llvm-cov-flags` adds extra flags when running LLVM's `llvm-cov` tool. - Used by [coverage tests](compiletest.md#coverage-tests) in `coverage-run` mode. +### Tidy specific directives + +The following directives control how the [tidy script](../conventions.md#formatting) +verifies tests. + +- `ignore-tidy-target-specific-tests` disables checking that the appropriate + LLVM component is required (via a `needs-llvm-components` directive) when a + test is compiled for a specific target (via the `--target` flag in a + `compile-flag` directive). +- [`unused-revision-names`](compiletest.md#ignoring-unused-revision-names) - + suppress tidy checks for mentioning unknown revision names. ## Substitutions diff --git a/src/doc/rustc-dev-guide/src/tests/ui.md b/src/doc/rustc-dev-guide/src/tests/ui.md index 4fce5838b6e..782f78d7614 100644 --- a/src/doc/rustc-dev-guide/src/tests/ui.md +++ b/src/doc/rustc-dev-guide/src/tests/ui.md @@ -309,7 +309,9 @@ fn main((ؼ Use `//~?` to match an error without line information. `//~?` is precise and will not match errors if their line information is available. -It should be preferred to using `error-pattern`, which is imprecise and non-exhaustive. +It should be preferred over `//@ error-pattern` +for tests wishing to match against compiler diagnostics, +due to `//@ error-pattern` being imprecise and non-exhaustive. ```rust,ignore //@ compile-flags: --print yyyy @@ -319,8 +321,8 @@ It should be preferred to using `error-pattern`, which is imprecise and non-exha ### `error-pattern` -The `error-pattern` [directive](directives.md) can be used for runtime messages, which don't -have a specific span, or in exceptional cases, for compile time messages. +The `error-pattern` [directive](directives.md) can be used for runtime messages which don't +have a specific span, or, in exceptional cases, for compile time messages. Let's think about this test: @@ -347,8 +349,6 @@ fn main() { } ``` -Use of `error-pattern` is not recommended in general. - For strict testing of compile time output, try to use the line annotations `//~` as much as possible, including `//~?` annotations for diagnostics without spans. @@ -359,7 +359,8 @@ Some of the compiler messages can stay uncovered by annotations in this mode. For checking runtime output, `//@ check-run-results` may be preferable. -Only use `error-pattern` if none of the above works. +Only use `error-pattern` if none of the above works, such as when finding a +specific string pattern in a runtime panic output. Line annotations `//~` and `error-pattern` are compatible and can be used in the same test. @@ -448,7 +449,7 @@ even run the resulting program. Just add one of the following - `//@ build-pass` — compilation and linking should succeed but do not run the resulting binary. - `//@ run-pass` — compilation should succeed and running the resulting - binary should also succeed. + binary should make it exit with code 0 which indicates success. - Fail directives: - `//@ check-fail` — compilation should fail (the codegen phase is skipped). This is the default for UI tests. @@ -457,10 +458,20 @@ even run the resulting program. Just add one of the following - First time is to ensure that the compile succeeds without the codegen phase - Second time is to ensure that the full compile fails - `//@ run-fail` — compilation should succeed, but running the resulting - binary should fail. - -For `run-pass` and `run-fail` tests, by default the output of the program itself -is not checked. + binary should make it exit with a code in the range `1..=127` which + indicates regular failure. On targets without unwind support, crashes + are also accepted. + - `//@ run-crash` — compilation should succeed, but running the resulting + binary should fail with a crash. Crashing is defined as "not exiting with + a code in the range `0..=127`". Example on Linux: Termination by `SIGABRT` + or `SIGSEGV`. Example on Windows: Exiting with the code for + `STATUS_ILLEGAL_INSTRUCTION` (`0xC000001D`). + - `//@ run-fail-or-crash` — compilation should succeed, but running the + resulting binary should either `run-fail` or `run-crash`. Useful if a test + crashes on some targets but just fails on others. + +For `run-pass`. `run-fail`, `run-crash` and `run-fail-or-crash` tests, by +default the output of the program itself is not checked. If you want to check the output of running the program, include the `check-run-results` directive. This will check for a `.run.stderr` and diff --git a/src/doc/rustc-dev-guide/src/ty_module/instantiating_binders.md b/src/doc/rustc-dev-guide/src/ty_module/instantiating_binders.md index e3f091ca45f..0d1108c72e0 100644 --- a/src/doc/rustc-dev-guide/src/ty_module/instantiating_binders.md +++ b/src/doc/rustc-dev-guide/src/ty_module/instantiating_binders.md @@ -77,7 +77,7 @@ This end result is incorrect as we had two separate binders introducing their ow While in theory we could make this work it would be quite involved and more complex than the current setup, we would have to: - "rewrite" bound variables to have a higher `DebruijnIndex` whenever instantiating a `Binder`/`EarlyBinder` with a `Bound` ty/const/region -- When inferring an inference variable to a bound var, if that bound var is from a binder enterred after creating the infer var, we would have to lower the `DebruijnIndex` of the var. +- When inferring an inference variable to a bound var, if that bound var is from a binder entered after creating the infer var, we would have to lower the `DebruijnIndex` of the var. - Separately track what binder an inference variable was created inside of, also what the innermost binder it can name parameters from (currently we only have to track the latter) - When resolving inference variables rewrite any bound variables according to the current binder depth of the infcx - Maybe more (while writing this list items kept getting added so it seems naive to think this is exhaustive) diff --git a/src/doc/rustc/src/platform-support/xtensa.md b/src/doc/rustc/src/platform-support/xtensa.md index 994b3adb92e..8592ce7eda9 100644 --- a/src/doc/rustc/src/platform-support/xtensa.md +++ b/src/doc/rustc/src/platform-support/xtensa.md @@ -24,4 +24,4 @@ Xtensa targets that support `std` are documented in the [ESP-IDF platform suppor ## Building the targets -The targets can be built by installing the [Xtensa enabled Rust channel](https://github.com/esp-rs/rust/). See instructions in the [RISC-V and Xtensa Targets section of The Rust on ESP Book](https://docs.esp-rs.org/book/installation/riscv-and-xtensa.html). +The targets can be built by installing the [Xtensa enabled Rust channel](https://github.com/esp-rs/rust/). See instructions in the [RISC-V and Xtensa Targets section of The Rust on ESP Book](https://docs.espressif.com/projects/rust/book/installation/index.html). diff --git a/src/doc/rustdoc/src/unstable-features.md b/src/doc/rustdoc/src/unstable-features.md index 27910ad0ab7..7bd2970eee7 100644 --- a/src/doc/rustdoc/src/unstable-features.md +++ b/src/doc/rustdoc/src/unstable-features.md @@ -395,6 +395,12 @@ flags to control that behavior. When the `--extern-html-root-url` flag is given one of your dependencies, rustdoc use that URL for those docs. Keep in mind that if those docs exist in the output directory, those local docs will still override this flag. +The names in this flag are first matched against the names given in the `--extern name=` flags, +which allows selecting between multiple crates with the same name (e.g. multiple versions of +the same crate). For transitive dependencies that haven't been loaded via an `--extern` flag, matching +falls backs to using crate names only, without ability to distinguish between multiple crates with +the same name. + ## `-Z force-unstable-if-unmarked` Using this flag looks like this: diff --git a/src/doc/unstable-book/src/compiler-flags/offload.md b/src/doc/unstable-book/src/compiler-flags/offload.md new file mode 100644 index 00000000000..4266e8c11a2 --- /dev/null +++ b/src/doc/unstable-book/src/compiler-flags/offload.md @@ -0,0 +1,8 @@ +# `offload` + +The tracking issue for this feature is: [#131513](https://github.com/rust-lang/rust/issues/131513). + +------------------------ + +This feature will later allow you to run functions on GPUs. It is work in progress. +Set the `-Zoffload=Enable` compiler flag to experiment with it. diff --git a/src/etc/completions/x.fish b/src/etc/completions/x.fish index 28a228d5464..a5e5bb8f09e 100644 --- a/src/etc/completions/x.fish +++ b/src/etc/completions/x.fish @@ -299,7 +299,7 @@ complete -c x -n "__fish_x_using_subcommand doc" -l skip-std-check-if-no-downloa complete -c x -n "__fish_x_using_subcommand doc" -s h -l help -d 'Print help (see more with \'--help\')' complete -c x -n "__fish_x_using_subcommand test" -l test-args -d 'extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)' -r complete -c x -n "__fish_x_using_subcommand test" -l compiletest-rustc-args -d 'extra options to pass the compiler when running compiletest tests' -r -complete -c x -n "__fish_x_using_subcommand test" -l extra-checks -d 'comma-separated list of other files types to check (accepts py, py:lint, py:fmt, shell, shell:lint, cpp, cpp:fmt, spellcheck)' -r +complete -c x -n "__fish_x_using_subcommand test" -l extra-checks -d 'comma-separated list of other files types to check (accepts py, py:lint, py:fmt, shell, cpp, cpp:fmt, js, js:lint, js:typecheck, spellcheck)' -r complete -c x -n "__fish_x_using_subcommand test" -l compare-mode -d 'mode describing what file the actual ui output will be compared to' -r complete -c x -n "__fish_x_using_subcommand test" -l pass -d 'force {check,build,run}-pass tests to this mode' -r complete -c x -n "__fish_x_using_subcommand test" -l run -d 'whether to execute run-* tests' -r diff --git a/src/etc/completions/x.ps1 b/src/etc/completions/x.ps1 index 0c9b3828273..4fee3bc0a86 100644 --- a/src/etc/completions/x.ps1 +++ b/src/etc/completions/x.ps1 @@ -345,7 +345,7 @@ Register-ArgumentCompleter -Native -CommandName 'x' -ScriptBlock { 'x;test' { [CompletionResult]::new('--test-args', '--test-args', [CompletionResultType]::ParameterName, 'extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)') [CompletionResult]::new('--compiletest-rustc-args', '--compiletest-rustc-args', [CompletionResultType]::ParameterName, 'extra options to pass the compiler when running compiletest tests') - [CompletionResult]::new('--extra-checks', '--extra-checks', [CompletionResultType]::ParameterName, 'comma-separated list of other files types to check (accepts py, py:lint, py:fmt, shell, shell:lint, cpp, cpp:fmt, spellcheck)') + [CompletionResult]::new('--extra-checks', '--extra-checks', [CompletionResultType]::ParameterName, 'comma-separated list of other files types to check (accepts py, py:lint, py:fmt, shell, cpp, cpp:fmt, js, js:lint, js:typecheck, spellcheck)') [CompletionResult]::new('--compare-mode', '--compare-mode', [CompletionResultType]::ParameterName, 'mode describing what file the actual ui output will be compared to') [CompletionResult]::new('--pass', '--pass', [CompletionResultType]::ParameterName, 'force {check,build,run}-pass tests to this mode') [CompletionResult]::new('--run', '--run', [CompletionResultType]::ParameterName, 'whether to execute run-* tests') diff --git a/src/etc/completions/x.py.fish b/src/etc/completions/x.py.fish index 43ae7424e27..e2e6ae05ee0 100644 --- a/src/etc/completions/x.py.fish +++ b/src/etc/completions/x.py.fish @@ -299,7 +299,7 @@ complete -c x.py -n "__fish_x.py_using_subcommand doc" -l skip-std-check-if-no-d complete -c x.py -n "__fish_x.py_using_subcommand doc" -s h -l help -d 'Print help (see more with \'--help\')' complete -c x.py -n "__fish_x.py_using_subcommand test" -l test-args -d 'extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)' -r complete -c x.py -n "__fish_x.py_using_subcommand test" -l compiletest-rustc-args -d 'extra options to pass the compiler when running compiletest tests' -r -complete -c x.py -n "__fish_x.py_using_subcommand test" -l extra-checks -d 'comma-separated list of other files types to check (accepts py, py:lint, py:fmt, shell, shell:lint, cpp, cpp:fmt, spellcheck)' -r +complete -c x.py -n "__fish_x.py_using_subcommand test" -l extra-checks -d 'comma-separated list of other files types to check (accepts py, py:lint, py:fmt, shell, cpp, cpp:fmt, js, js:lint, js:typecheck, spellcheck)' -r complete -c x.py -n "__fish_x.py_using_subcommand test" -l compare-mode -d 'mode describing what file the actual ui output will be compared to' -r complete -c x.py -n "__fish_x.py_using_subcommand test" -l pass -d 'force {check,build,run}-pass tests to this mode' -r complete -c x.py -n "__fish_x.py_using_subcommand test" -l run -d 'whether to execute run-* tests' -r diff --git a/src/etc/completions/x.py.ps1 b/src/etc/completions/x.py.ps1 index 4311e383d64..ea3aacc21c7 100644 --- a/src/etc/completions/x.py.ps1 +++ b/src/etc/completions/x.py.ps1 @@ -345,7 +345,7 @@ Register-ArgumentCompleter -Native -CommandName 'x.py' -ScriptBlock { 'x.py;test' { [CompletionResult]::new('--test-args', '--test-args', [CompletionResultType]::ParameterName, 'extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)') [CompletionResult]::new('--compiletest-rustc-args', '--compiletest-rustc-args', [CompletionResultType]::ParameterName, 'extra options to pass the compiler when running compiletest tests') - [CompletionResult]::new('--extra-checks', '--extra-checks', [CompletionResultType]::ParameterName, 'comma-separated list of other files types to check (accepts py, py:lint, py:fmt, shell, shell:lint, cpp, cpp:fmt, spellcheck)') + [CompletionResult]::new('--extra-checks', '--extra-checks', [CompletionResultType]::ParameterName, 'comma-separated list of other files types to check (accepts py, py:lint, py:fmt, shell, cpp, cpp:fmt, js, js:lint, js:typecheck, spellcheck)') [CompletionResult]::new('--compare-mode', '--compare-mode', [CompletionResultType]::ParameterName, 'mode describing what file the actual ui output will be compared to') [CompletionResult]::new('--pass', '--pass', [CompletionResultType]::ParameterName, 'force {check,build,run}-pass tests to this mode') [CompletionResult]::new('--run', '--run', [CompletionResultType]::ParameterName, 'whether to execute run-* tests') diff --git a/src/etc/completions/x.py.zsh b/src/etc/completions/x.py.zsh index aff35b31e21..32e986ad141 100644 --- a/src/etc/completions/x.py.zsh +++ b/src/etc/completions/x.py.zsh @@ -345,7 +345,7 @@ _arguments "${_arguments_options[@]}" : \ _arguments "${_arguments_options[@]}" : \ '*--test-args=[extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)]:ARGS:_default' \ '*--compiletest-rustc-args=[extra options to pass the compiler when running compiletest tests]:ARGS:_default' \ -'--extra-checks=[comma-separated list of other files types to check (accepts py, py\:lint, py\:fmt, shell, shell\:lint, cpp, cpp\:fmt, spellcheck)]:EXTRA_CHECKS:_default' \ +'--extra-checks=[comma-separated list of other files types to check (accepts py, py\:lint, py\:fmt, shell, cpp, cpp\:fmt, js, js\:lint, js\:typecheck, spellcheck)]:EXTRA_CHECKS:_default' \ '--compare-mode=[mode describing what file the actual ui output will be compared to]:COMPARE MODE:_default' \ '--pass=[force {check,build,run}-pass tests to this mode]:check | build | run:_default' \ '--run=[whether to execute run-* tests]:auto | always | never:_default' \ diff --git a/src/etc/completions/x.zsh b/src/etc/completions/x.zsh index 28ad00f3a0d..65995553276 100644 --- a/src/etc/completions/x.zsh +++ b/src/etc/completions/x.zsh @@ -345,7 +345,7 @@ _arguments "${_arguments_options[@]}" : \ _arguments "${_arguments_options[@]}" : \ '*--test-args=[extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)]:ARGS:_default' \ '*--compiletest-rustc-args=[extra options to pass the compiler when running compiletest tests]:ARGS:_default' \ -'--extra-checks=[comma-separated list of other files types to check (accepts py, py\:lint, py\:fmt, shell, shell\:lint, cpp, cpp\:fmt, spellcheck)]:EXTRA_CHECKS:_default' \ +'--extra-checks=[comma-separated list of other files types to check (accepts py, py\:lint, py\:fmt, shell, cpp, cpp\:fmt, js, js\:lint, js\:typecheck, spellcheck)]:EXTRA_CHECKS:_default' \ '--compare-mode=[mode describing what file the actual ui output will be compared to]:COMPARE MODE:_default' \ '--pass=[force {check,build,run}-pass tests to this mode]:check | build | run:_default' \ '--run=[whether to execute run-* tests]:auto | always | never:_default' \ diff --git a/src/librustdoc/clean/auto_trait.rs b/src/librustdoc/clean/auto_trait.rs index a91ea55bcae..e6ac0270f78 100644 --- a/src/librustdoc/clean/auto_trait.rs +++ b/src/librustdoc/clean/auto_trait.rs @@ -1,6 +1,6 @@ use rustc_data_structures::fx::{FxIndexMap, FxIndexSet, IndexEntry}; use rustc_hir as hir; -use rustc_infer::infer::region_constraints::{Constraint, RegionConstraintData}; +use rustc_infer::infer::region_constraints::{ConstraintKind, RegionConstraintData}; use rustc_middle::bug; use rustc_middle::ty::{self, Region, Ty, fold_regions}; use rustc_span::def_id::DefId; @@ -233,31 +233,35 @@ fn clean_region_outlives_constraints<'tcx>( // Each `RegionTarget` (a `RegionVid` or a `Region`) maps to its smaller and larger regions. // Note that "larger" regions correspond to sub regions in the surface language. // E.g., in `'a: 'b`, `'a` is the larger region. - for (constraint, _) in ®ions.constraints { - match *constraint { - Constraint::VarSubVar(vid1, vid2) => { - let deps1 = map.entry(RegionTarget::RegionVid(vid1)).or_default(); - deps1.larger.insert(RegionTarget::RegionVid(vid2)); + for (c, _) in ®ions.constraints { + match c.kind { + ConstraintKind::VarSubVar => { + let sub_vid = c.sub.as_var(); + let sup_vid = c.sup.as_var(); + let deps1 = map.entry(RegionTarget::RegionVid(sub_vid)).or_default(); + deps1.larger.insert(RegionTarget::RegionVid(sup_vid)); - let deps2 = map.entry(RegionTarget::RegionVid(vid2)).or_default(); - deps2.smaller.insert(RegionTarget::RegionVid(vid1)); + let deps2 = map.entry(RegionTarget::RegionVid(sup_vid)).or_default(); + deps2.smaller.insert(RegionTarget::RegionVid(sub_vid)); } - Constraint::RegSubVar(region, vid) => { - let deps = map.entry(RegionTarget::RegionVid(vid)).or_default(); - deps.smaller.insert(RegionTarget::Region(region)); + ConstraintKind::RegSubVar => { + let sup_vid = c.sup.as_var(); + let deps = map.entry(RegionTarget::RegionVid(sup_vid)).or_default(); + deps.smaller.insert(RegionTarget::Region(c.sub)); } - Constraint::VarSubReg(vid, region) => { - let deps = map.entry(RegionTarget::RegionVid(vid)).or_default(); - deps.larger.insert(RegionTarget::Region(region)); + ConstraintKind::VarSubReg => { + let sub_vid = c.sub.as_var(); + let deps = map.entry(RegionTarget::RegionVid(sub_vid)).or_default(); + deps.larger.insert(RegionTarget::Region(c.sup)); } - Constraint::RegSubReg(r1, r2) => { + ConstraintKind::RegSubReg => { // The constraint is already in the form that we want, so we're done with it // The desired order is [larger, smaller], so flip them. - if early_bound_region_name(r1) != early_bound_region_name(r2) { + if early_bound_region_name(c.sub) != early_bound_region_name(c.sup) { outlives_predicates - .entry(early_bound_region_name(r2).expect("no region_name found")) + .entry(early_bound_region_name(c.sup).expect("no region_name found")) .or_default() - .push(r1); + .push(c.sub); } } } diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs index 9603399f235..8c0f897c992 100644 --- a/src/librustdoc/clean/inline.rs +++ b/src/librustdoc/clean/inline.rs @@ -217,7 +217,7 @@ pub(crate) fn try_inline_glob( } pub(crate) fn load_attrs<'hir>(cx: &DocContext<'hir>, did: DefId) -> &'hir [hir::Attribute] { - cx.tcx.get_attrs_unchecked(did) + cx.tcx.get_all_attrs(did) } pub(crate) fn item_relative_path(tcx: TyCtxt<'_>, def_id: DefId) -> Vec<Symbol> { diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 1265a39d27b..743ed2b5045 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -39,9 +39,10 @@ use rustc_ast::tokenstream::{TokenStream, TokenTree}; use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet, IndexEntry}; use rustc_errors::codes::*; use rustc_errors::{FatalError, struct_span_code_err}; +use rustc_hir::attrs::AttributeKind; use rustc_hir::def::{CtorKind, DefKind, Res}; use rustc_hir::def_id::{DefId, DefIdMap, DefIdSet, LOCAL_CRATE, LocalDefId}; -use rustc_hir::{LangItem, PredicateOrigin}; +use rustc_hir::{LangItem, PredicateOrigin, find_attr}; use rustc_hir_analysis::hir_ty_lowering::FeedConstTy; use rustc_hir_analysis::{lower_const_arg_for_rustdoc, lower_ty}; use rustc_middle::metadata::Reexport; @@ -987,28 +988,17 @@ fn clean_proc_macro<'tcx>( kind: MacroKind, cx: &mut DocContext<'tcx>, ) -> ItemKind { - let attrs = cx.tcx.hir_attrs(item.hir_id()); - if kind == MacroKind::Derive - && let Some(derive_name) = - hir_attr_lists(attrs, sym::proc_macro_derive).find_map(|mi| mi.ident()) - { - *name = derive_name.name; + if kind != MacroKind::Derive { + return ProcMacroItem(ProcMacro { kind, helpers: vec![] }); } + let attrs = cx.tcx.hir_attrs(item.hir_id()); + let Some((trait_name, helper_attrs)) = find_attr!(attrs, AttributeKind::ProcMacroDerive { trait_name, helper_attrs, ..} => (*trait_name, helper_attrs)) + else { + return ProcMacroItem(ProcMacro { kind, helpers: vec![] }); + }; + *name = trait_name; + let helpers = helper_attrs.iter().copied().collect(); - let mut helpers = Vec::new(); - for mi in hir_attr_lists(attrs, sym::proc_macro_derive) { - if !mi.has_name(sym::attributes) { - continue; - } - - if let Some(list) = mi.meta_item_list() { - for inner_mi in list { - if let Some(ident) = inner_mi.ident() { - helpers.push(ident.name); - } - } - } - } ProcMacroItem(ProcMacro { kind, helpers }) } @@ -1021,17 +1011,16 @@ fn clean_fn_or_proc_macro<'tcx>( cx: &mut DocContext<'tcx>, ) -> ItemKind { let attrs = cx.tcx.hir_attrs(item.hir_id()); - let macro_kind = attrs.iter().find_map(|a| { - if a.has_name(sym::proc_macro) { - Some(MacroKind::Bang) - } else if a.has_name(sym::proc_macro_derive) { - Some(MacroKind::Derive) - } else if a.has_name(sym::proc_macro_attribute) { - Some(MacroKind::Attr) - } else { - None - } - }); + let macro_kind = if find_attr!(attrs, AttributeKind::ProcMacro(..)) { + Some(MacroKind::Bang) + } else if find_attr!(attrs, AttributeKind::ProcMacroDerive { .. }) { + Some(MacroKind::Derive) + } else if find_attr!(attrs, AttributeKind::ProcMacroAttribute(..)) { + Some(MacroKind::Attr) + } else { + None + }; + match macro_kind { Some(kind) => clean_proc_macro(item, name, kind, cx), None => { diff --git a/src/librustdoc/clean/types.rs b/src/librustdoc/clean/types.rs index 09647492d93..782311e593b 100644 --- a/src/librustdoc/clean/types.rs +++ b/src/librustdoc/clean/types.rs @@ -6,14 +6,12 @@ use std::{fmt, iter}; use arrayvec::ArrayVec; use itertools::Either; use rustc_abi::{ExternAbi, VariantIdx}; -use rustc_attr_data_structures::{ - AttributeKind, ConstStability, Deprecation, Stability, StableSince, find_attr, -}; use rustc_data_structures::fx::{FxHashSet, FxIndexMap, FxIndexSet}; +use rustc_hir::attrs::{AttributeKind, DeprecatedSince, Deprecation}; use rustc_hir::def::{CtorKind, DefKind, Res}; use rustc_hir::def_id::{CrateNum, DefId, LOCAL_CRATE, LocalDefId}; use rustc_hir::lang_items::LangItem; -use rustc_hir::{BodyId, Mutability}; +use rustc_hir::{BodyId, ConstStability, Mutability, Stability, StableSince, find_attr}; use rustc_index::IndexVec; use rustc_metadata::rendered_const; use rustc_middle::span_bug; @@ -387,13 +385,13 @@ impl Item { // versions; the paths that are exposed through it are "deprecated" because they // were never supposed to work at all. let stab = self.stability(tcx)?; - if let rustc_attr_data_structures::StabilityLevel::Stable { + if let rustc_hir::StabilityLevel::Stable { allowed_through_unstable_modules: Some(note), .. } = stab.level { Some(Deprecation { - since: rustc_attr_data_structures::DeprecatedSince::Unspecified, + since: DeprecatedSince::Unspecified, note: Some(note), suggestion: None, }) @@ -404,10 +402,7 @@ impl Item { } pub(crate) fn inner_docs(&self, tcx: TyCtxt<'_>) -> bool { - self.item_id - .as_def_id() - .map(|did| inner_docs(tcx.get_attrs_unchecked(did))) - .unwrap_or(false) + self.item_id.as_def_id().map(|did| inner_docs(tcx.get_all_attrs(did))).unwrap_or(false) } pub(crate) fn span(&self, tcx: TyCtxt<'_>) -> Option<Span> { @@ -452,7 +447,7 @@ impl Item { kind: ItemKind, cx: &mut DocContext<'_>, ) -> Item { - let hir_attrs = cx.tcx.get_attrs_unchecked(def_id); + let hir_attrs = cx.tcx.get_all_attrs(def_id); Self::from_def_id_and_attrs_and_parts( def_id, @@ -768,13 +763,13 @@ impl Item { .iter() .filter_map(|attr| match attr { hir::Attribute::Parsed(AttributeKind::LinkSection { name, .. }) => { - Some(format!("#[link_section = \"{name}\"]")) + Some(format!("#[unsafe(link_section = \"{name}\")]")) } hir::Attribute::Parsed(AttributeKind::NoMangle(..)) => { - Some("#[no_mangle]".to_string()) + Some("#[unsafe(no_mangle)]".to_string()) } hir::Attribute::Parsed(AttributeKind::ExportName { name, .. }) => { - Some(format!("#[export_name = \"{name}\"]")) + Some(format!("#[unsafe(export_name = \"{name}\")]")) } hir::Attribute::Parsed(AttributeKind::NonExhaustive(..)) => { Some("#[non_exhaustive]".to_string()) @@ -1677,7 +1672,7 @@ impl Type { } } - pub(crate) fn generics<'a>(&'a self) -> Option<impl Iterator<Item = &'a Type>> { + pub(crate) fn generics(&self) -> Option<impl Iterator<Item = &Type>> { match self { Type::Path { path, .. } => path.generics(), _ => None, @@ -1963,43 +1958,6 @@ impl PrimitiveType { } } -impl From<ast::IntTy> for PrimitiveType { - fn from(int_ty: ast::IntTy) -> PrimitiveType { - match int_ty { - ast::IntTy::Isize => PrimitiveType::Isize, - ast::IntTy::I8 => PrimitiveType::I8, - ast::IntTy::I16 => PrimitiveType::I16, - ast::IntTy::I32 => PrimitiveType::I32, - ast::IntTy::I64 => PrimitiveType::I64, - ast::IntTy::I128 => PrimitiveType::I128, - } - } -} - -impl From<ast::UintTy> for PrimitiveType { - fn from(uint_ty: ast::UintTy) -> PrimitiveType { - match uint_ty { - ast::UintTy::Usize => PrimitiveType::Usize, - ast::UintTy::U8 => PrimitiveType::U8, - ast::UintTy::U16 => PrimitiveType::U16, - ast::UintTy::U32 => PrimitiveType::U32, - ast::UintTy::U64 => PrimitiveType::U64, - ast::UintTy::U128 => PrimitiveType::U128, - } - } -} - -impl From<ast::FloatTy> for PrimitiveType { - fn from(float_ty: ast::FloatTy) -> PrimitiveType { - match float_ty { - ast::FloatTy::F16 => PrimitiveType::F16, - ast::FloatTy::F32 => PrimitiveType::F32, - ast::FloatTy::F64 => PrimitiveType::F64, - ast::FloatTy::F128 => PrimitiveType::F128, - } - } -} - impl From<ty::IntTy> for PrimitiveType { fn from(int_ty: ty::IntTy) -> PrimitiveType { match int_ty { @@ -2227,7 +2185,7 @@ impl Path { self.segments.last().map(|seg| &seg.args) } - pub(crate) fn generics<'a>(&'a self) -> Option<impl Iterator<Item = &'a Type>> { + pub(crate) fn generics(&self) -> Option<impl Iterator<Item = &Type>> { self.segments.last().and_then(|seg| { if let GenericArgs::AngleBracketed { ref args, .. } = seg.args { Some(args.iter().filter_map(|arg| match arg { diff --git a/src/librustdoc/clean/utils.rs b/src/librustdoc/clean/utils.rs index fd1b17b6476..813fdee57e1 100644 --- a/src/librustdoc/clean/utils.rs +++ b/src/librustdoc/clean/utils.rs @@ -343,13 +343,11 @@ pub(crate) fn name_from_pat(p: &hir::Pat<'_>) -> Symbol { pub(crate) fn print_const(cx: &DocContext<'_>, n: ty::Const<'_>) -> String { match n.kind() { ty::ConstKind::Unevaluated(ty::UnevaluatedConst { def, args: _ }) => { - let s = if let Some(def) = def.as_local() { + if let Some(def) = def.as_local() { rendered_const(cx.tcx, cx.tcx.hir_body_owned_by(def), def) } else { inline::print_inlined_const(cx.tcx, def) - }; - - s + } } // array lengths are obviously usize ty::ConstKind::Value(cv) if *cv.ty.kind() == ty::Uint(ty::UintTy::Usize) => { diff --git a/src/librustdoc/config.rs b/src/librustdoc/config.rs index 986390dbaa0..fed4296fa22 100644 --- a/src/librustdoc/config.rs +++ b/src/librustdoc/config.rs @@ -173,6 +173,9 @@ pub(crate) struct Options { /// Arguments to be used when compiling doctests. pub(crate) doctest_build_args: Vec<String>, + + /// Target modifiers. + pub(crate) target_modifiers: BTreeMap<OptionsTargetModifiers, String>, } impl fmt::Debug for Options { @@ -377,7 +380,7 @@ impl Options { early_dcx: &mut EarlyDiagCtxt, matches: &getopts::Matches, args: Vec<String>, - ) -> Option<(InputMode, Options, RenderOptions)> { + ) -> Option<(InputMode, Options, RenderOptions, Vec<PathBuf>)> { // Check for unstable options. nightly_options::check_nightly_options(early_dcx, matches, &opts()); @@ -640,10 +643,13 @@ impl Options { let extension_css = matches.opt_str("e").map(|s| PathBuf::from(&s)); - if let Some(ref p) = extension_css - && !p.is_file() - { - dcx.fatal("option --extend-css argument must be a file"); + let mut loaded_paths = Vec::new(); + + if let Some(ref p) = extension_css { + loaded_paths.push(p.clone()); + if !p.is_file() { + dcx.fatal("option --extend-css argument must be a file"); + } } let mut themes = Vec::new(); @@ -687,6 +693,7 @@ impl Options { )) .emit(); } + loaded_paths.push(theme_file.clone()); themes.push(StylePath { path: theme_file }); } } @@ -705,6 +712,7 @@ impl Options { &mut id_map, edition, &None, + &mut loaded_paths, ) else { dcx.fatal("`ExternalHtml::load` failed"); }; @@ -796,7 +804,8 @@ impl Options { let scrape_examples_options = ScrapeExamplesOptions::new(matches, dcx); let with_examples = matches.opt_strs("with-examples"); - let call_locations = crate::scrape_examples::load_call_locations(with_examples, dcx); + let call_locations = + crate::scrape_examples::load_call_locations(with_examples, dcx, &mut loaded_paths); let doctest_build_args = matches.opt_strs("doctest-build-arg"); let unstable_features = @@ -846,6 +855,7 @@ impl Options { unstable_features, expanded_args: args, doctest_build_args, + target_modifiers, }; let render_options = RenderOptions { output, @@ -881,7 +891,7 @@ impl Options { parts_out_dir, disable_minification, }; - Some((input, options, render_options)) + Some((input, options, render_options, loaded_paths)) } } diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs index bd57bb21e63..e89733b2f6d 100644 --- a/src/librustdoc/core.rs +++ b/src/librustdoc/core.rs @@ -214,6 +214,7 @@ pub(crate) fn create_config( scrape_examples_options, expanded_args, remap_path_prefix, + target_modifiers, .. }: RustdocOptions, render_options: &RenderOptions, @@ -277,6 +278,7 @@ pub(crate) fn create_config( } else { OutputTypes::new(&[]) }, + target_modifiers, ..Options::default() }; diff --git a/src/librustdoc/doctest.rs b/src/librustdoc/doctest.rs index 9b4d2533954..35ace656638 100644 --- a/src/librustdoc/doctest.rs +++ b/src/librustdoc/doctest.rs @@ -11,7 +11,8 @@ use std::path::{Path, PathBuf}; use std::process::{self, Command, Stdio}; use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::{Arc, Mutex}; -use std::{panic, str}; +use std::time::{Duration, Instant}; +use std::{fmt, panic, str}; pub(crate) use make::{BuildDocTestBuilder, DocTestBuilder}; pub(crate) use markdown::test as test_markdown; @@ -36,6 +37,50 @@ use crate::config::{Options as RustdocOptions, OutputFormat}; use crate::html::markdown::{ErrorCodes, Ignore, LangString, MdRelLine}; use crate::lint::init_lints; +/// Type used to display times (compilation and total) information for merged doctests. +struct MergedDoctestTimes { + total_time: Instant, + /// Total time spent compiling all merged doctests. + compilation_time: Duration, + /// This field is used to keep track of how many merged doctests we (tried to) compile. + added_compilation_times: usize, +} + +impl MergedDoctestTimes { + fn new() -> Self { + Self { + total_time: Instant::now(), + compilation_time: Duration::default(), + added_compilation_times: 0, + } + } + + fn add_compilation_time(&mut self, duration: Duration) { + self.compilation_time += duration; + self.added_compilation_times += 1; + } + + fn display_times(&self) { + // If no merged doctest was compiled, then there is nothing to display since the numbers + // displayed by `libtest` for standalone tests are already accurate (they include both + // compilation and runtime). + if self.added_compilation_times > 0 { + println!("{self}"); + } + } +} + +impl fmt::Display for MergedDoctestTimes { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "all doctests ran in {:.2}s; merged doctests compilation took {:.2}s", + self.total_time.elapsed().as_secs_f64(), + self.compilation_time.as_secs_f64(), + ) + } +} + /// Options that apply to all doctests in a crate or Markdown file (for `rustdoc foo.md`). #[derive(Clone)] pub(crate) struct GlobalTestOptions { @@ -295,6 +340,7 @@ pub(crate) fn run_tests( let mut nb_errors = 0; let mut ran_edition_tests = 0; + let mut times = MergedDoctestTimes::new(); let target_str = rustdoc_options.target.to_string(); for (MergeableTestKey { edition, global_crate_attrs_hash }, mut doctests) in mergeable_tests { @@ -314,13 +360,15 @@ pub(crate) fn run_tests( for (doctest, scraped_test) in &doctests { tests_runner.add_test(doctest, scraped_test, &target_str); } - if let Ok(success) = tests_runner.run_merged_tests( + let (duration, ret) = tests_runner.run_merged_tests( rustdoc_test_options, edition, &opts, &test_args, rustdoc_options, - ) { + ); + times.add_compilation_time(duration); + if let Ok(success) = ret { ran_edition_tests += 1; if !success { nb_errors += 1; @@ -354,12 +402,15 @@ pub(crate) fn run_tests( test::test_main_with_exit_callback(&test_args, standalone_tests, None, || { // We ensure temp dir destructor is called. std::mem::drop(temp_dir.take()); + times.display_times(); }); } if nb_errors != 0 { // We ensure temp dir destructor is called. std::mem::drop(temp_dir); - // libtest::ERROR_EXIT_CODE is not public but it's the same value. + times.display_times(); + // FIXME(GuillaumeGomez): Uncomment the next line once #144297 has been merged. + // std::process::exit(test::ERROR_EXIT_CODE); std::process::exit(101); } } @@ -444,7 +495,7 @@ fn add_exe_suffix(input: String, target: &TargetTuple) -> String { let exe_suffix = match target { TargetTuple::TargetTuple(_) => Target::expect_builtin(target).options.exe_suffix, TargetTuple::TargetJson { contents, .. } => { - Target::from_json(contents.parse().unwrap()).unwrap().0.options.exe_suffix + Target::from_json(contents).unwrap().0.options.exe_suffix } }; input + &exe_suffix @@ -496,16 +547,19 @@ impl RunnableDocTest { /// /// This is the function that calculates the compiler command line, invokes the compiler, then /// invokes the test or tests in a separate executable (if applicable). +/// +/// Returns a tuple containing the `Duration` of the compilation and the `Result` of the test. fn run_test( doctest: RunnableDocTest, rustdoc_options: &RustdocOptions, supports_color: bool, report_unused_externs: impl Fn(UnusedExterns), -) -> Result<(), TestFailure> { +) -> (Duration, Result<(), TestFailure>) { let langstr = &doctest.langstr; // Make sure we emit well-formed executable names for our target. let rust_out = add_exe_suffix("rust_out".to_owned(), &rustdoc_options.target); let output_file = doctest.test_opts.outdir.path().join(rust_out); + let instant = Instant::now(); // Common arguments used for compiling the doctest runner. // On merged doctests, the compiler is invoked twice: once for the test code itself, @@ -589,7 +643,7 @@ fn run_test( if std::fs::write(&input_file, &doctest.full_test_code).is_err() { // If we cannot write this file for any reason, we leave. All combined tests will be // tested as standalone tests. - return Err(TestFailure::CompileError); + return (Duration::default(), Err(TestFailure::CompileError)); } if !rustdoc_options.nocapture { // If `nocapture` is disabled, then we don't display rustc's output when compiling @@ -632,7 +686,7 @@ fn run_test( // the user to exploit nightly-only features on stable runner_compiler.env("RUSTC_BOOTSTRAP", "1"); runner_compiler.args(compiler_args); - runner_compiler.args(&["--crate-type=bin", "-o"]).arg(&output_file); + runner_compiler.args(["--crate-type=bin", "-o"]).arg(&output_file); let mut extern_path = std::ffi::OsString::from(format!( "--extern=doctest_bundle_{edition}=", edition = doctest.edition @@ -657,10 +711,10 @@ fn run_test( extern_path.push(&output_bundle_file); runner_compiler.arg(extern_path); runner_compiler.arg(&runner_input_file); - if std::fs::write(&runner_input_file, &merged_test_code).is_err() { + if std::fs::write(&runner_input_file, merged_test_code).is_err() { // If we cannot write this file for any reason, we leave. All combined tests will be // tested as standalone tests. - return Err(TestFailure::CompileError); + return (instant.elapsed(), Err(TestFailure::CompileError)); } if !rustdoc_options.nocapture { // If `nocapture` is disabled, then we don't display rustc's output when compiling @@ -713,7 +767,7 @@ fn run_test( let _bomb = Bomb(&out); match (output.status.success(), langstr.compile_fail) { (true, true) => { - return Err(TestFailure::UnexpectedCompilePass); + return (instant.elapsed(), Err(TestFailure::UnexpectedCompilePass)); } (true, false) => {} (false, true) => { @@ -729,17 +783,18 @@ fn run_test( .collect(); if !missing_codes.is_empty() { - return Err(TestFailure::MissingErrorCodes(missing_codes)); + return (instant.elapsed(), Err(TestFailure::MissingErrorCodes(missing_codes))); } } } (false, false) => { - return Err(TestFailure::CompileError); + return (instant.elapsed(), Err(TestFailure::CompileError)); } } + let duration = instant.elapsed(); if doctest.no_run { - return Ok(()); + return (duration, Ok(())); } // Run the code! @@ -771,17 +826,17 @@ fn run_test( cmd.output() }; match result { - Err(e) => return Err(TestFailure::ExecutionError(e)), + Err(e) => return (duration, Err(TestFailure::ExecutionError(e))), Ok(out) => { if langstr.should_panic && out.status.success() { - return Err(TestFailure::UnexpectedRunPass); + return (duration, Err(TestFailure::UnexpectedRunPass)); } else if !langstr.should_panic && !out.status.success() { - return Err(TestFailure::ExecutionFailure(out)); + return (duration, Err(TestFailure::ExecutionFailure(out))); } } } - Ok(()) + (duration, Ok(())) } /// Converts a path intended to use as a command to absolute if it is @@ -1071,7 +1126,7 @@ fn doctest_run_fn( no_run: scraped_test.no_run(&rustdoc_options), merged_test_code: None, }; - let res = + let (_, res) = run_test(runnable_test, &rustdoc_options, doctest.supports_color, report_unused_externs); if let Err(err) = res { diff --git a/src/librustdoc/doctest/runner.rs b/src/librustdoc/doctest/runner.rs index f0914474c79..fcfa424968e 100644 --- a/src/librustdoc/doctest/runner.rs +++ b/src/librustdoc/doctest/runner.rs @@ -1,4 +1,5 @@ use std::fmt::Write; +use std::time::Duration; use rustc_data_structures::fx::FxIndexSet; use rustc_span::edition::Edition; @@ -67,6 +68,10 @@ impl DocTestRunner { self.nb_tests += 1; } + /// Returns a tuple containing the `Duration` of the compilation and the `Result` of the test. + /// + /// If compilation failed, it will return `Err`, otherwise it will return `Ok` containing if + /// the test ran successfully. pub(crate) fn run_merged_tests( &mut self, test_options: IndividualTestOptions, @@ -74,7 +79,7 @@ impl DocTestRunner { opts: &GlobalTestOptions, test_args: &[String], rustdoc_options: &RustdocOptions, - ) -> Result<bool, ()> { + ) -> (Duration, Result<bool, ()>) { let mut code = "\ #![allow(unused_extern_crates)] #![allow(internal_features)] @@ -204,9 +209,9 @@ std::process::Termination::report(test::test_main(test_args, tests, None)) no_run: false, merged_test_code: Some(code), }; - let ret = + let (duration, ret) = run_test(runnable_test, rustdoc_options, self.supports_color, |_: UnusedExterns| {}); - if let Err(TestFailure::CompileError) = ret { Err(()) } else { Ok(ret.is_ok()) } + (duration, if let Err(TestFailure::CompileError) = ret { Err(()) } else { Ok(ret.is_ok()) }) } } diff --git a/src/librustdoc/doctest/rust.rs b/src/librustdoc/doctest/rust.rs index 96975105ac5..f5ec828187a 100644 --- a/src/librustdoc/doctest/rust.rs +++ b/src/librustdoc/doctest/rust.rs @@ -140,7 +140,7 @@ impl HirCollector<'_> { .iter() .filter(|a| a.has_name(sym::attr)) .flat_map(|a| a.meta_item_list().unwrap_or_default()) - .map(|i| pprust::meta_list_item_to_string(i)) + .map(pprust::meta_list_item_to_string) { // Add the additional attributes to the global_crate_attrs vector self.collector.global_crate_attrs.push(attr); diff --git a/src/librustdoc/externalfiles.rs b/src/librustdoc/externalfiles.rs index ea2aa963edd..42ade5b9004 100644 --- a/src/librustdoc/externalfiles.rs +++ b/src/librustdoc/externalfiles.rs @@ -1,4 +1,4 @@ -use std::path::Path; +use std::path::{Path, PathBuf}; use std::{fs, str}; use rustc_errors::DiagCtxtHandle; @@ -32,12 +32,13 @@ impl ExternalHtml { id_map: &mut IdMap, edition: Edition, playground: &Option<Playground>, + loaded_paths: &mut Vec<PathBuf>, ) -> Option<ExternalHtml> { let codes = ErrorCodes::from(nightly_build); - let ih = load_external_files(in_header, dcx)?; + let ih = load_external_files(in_header, dcx, loaded_paths)?; let bc = { - let mut bc = load_external_files(before_content, dcx)?; - let m_bc = load_external_files(md_before_content, dcx)?; + let mut bc = load_external_files(before_content, dcx, loaded_paths)?; + let m_bc = load_external_files(md_before_content, dcx, loaded_paths)?; Markdown { content: &m_bc, links: &[], @@ -52,8 +53,8 @@ impl ExternalHtml { bc }; let ac = { - let mut ac = load_external_files(after_content, dcx)?; - let m_ac = load_external_files(md_after_content, dcx)?; + let mut ac = load_external_files(after_content, dcx, loaded_paths)?; + let m_ac = load_external_files(md_after_content, dcx, loaded_paths)?; Markdown { content: &m_ac, links: &[], @@ -79,8 +80,10 @@ pub(crate) enum LoadStringError { pub(crate) fn load_string<P: AsRef<Path>>( file_path: P, dcx: DiagCtxtHandle<'_>, + loaded_paths: &mut Vec<PathBuf>, ) -> Result<String, LoadStringError> { let file_path = file_path.as_ref(); + loaded_paths.push(file_path.to_owned()); let contents = match fs::read(file_path) { Ok(bytes) => bytes, Err(e) => { @@ -101,10 +104,14 @@ pub(crate) fn load_string<P: AsRef<Path>>( } } -fn load_external_files(names: &[String], dcx: DiagCtxtHandle<'_>) -> Option<String> { +fn load_external_files( + names: &[String], + dcx: DiagCtxtHandle<'_>, + loaded_paths: &mut Vec<PathBuf>, +) -> Option<String> { let mut out = String::new(); for name in names { - let Ok(s) = load_string(name, dcx) else { return None }; + let Ok(s) = load_string(name, dcx, loaded_paths) else { return None }; out.push_str(&s); out.push('\n'); } diff --git a/src/librustdoc/formats/cache.rs b/src/librustdoc/formats/cache.rs index 5191120ebdb..80399cf3842 100644 --- a/src/librustdoc/formats/cache.rs +++ b/src/librustdoc/formats/cache.rs @@ -1,9 +1,10 @@ use std::mem; use rustc_ast::join_path_syms; -use rustc_attr_data_structures::StabilityLevel; use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet}; +use rustc_hir::StabilityLevel; use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, DefIdSet}; +use rustc_metadata::creader::CStore; use rustc_middle::ty::{self, TyCtxt}; use rustc_span::Symbol; use tracing::debug; @@ -158,18 +159,33 @@ impl Cache { assert!(cx.external_traits.is_empty()); cx.cache.traits = mem::take(&mut krate.external_traits); + let render_options = &cx.render_options; + let extern_url_takes_precedence = render_options.extern_html_root_takes_precedence; + let dst = &render_options.output; + + // Make `--extern-html-root-url` support the same names as `--extern` whenever possible + let cstore = CStore::from_tcx(tcx); + for (name, extern_url) in &render_options.extern_html_root_urls { + if let Some(crate_num) = cstore.resolved_extern_crate(Symbol::intern(name)) { + let e = ExternalCrate { crate_num }; + let location = e.location(Some(extern_url), extern_url_takes_precedence, dst, tcx); + cx.cache.extern_locations.insert(e.crate_num, location); + } + } + // Cache where all our extern crates are located - // FIXME: this part is specific to HTML so it'd be nice to remove it from the common code + // This is also used in the JSON output. for &crate_num in tcx.crates(()) { let e = ExternalCrate { crate_num }; let name = e.name(tcx); - let render_options = &cx.render_options; - let extern_url = render_options.extern_html_root_urls.get(name.as_str()).map(|u| &**u); - let extern_url_takes_precedence = render_options.extern_html_root_takes_precedence; - let dst = &render_options.output; - let location = e.location(extern_url, extern_url_takes_precedence, dst, tcx); - cx.cache.extern_locations.insert(e.crate_num, location); + cx.cache.extern_locations.entry(e.crate_num).or_insert_with(|| { + // falls back to matching by crates' own names, because + // transitive dependencies and injected crates may be loaded without `--extern` + let extern_url = + render_options.extern_html_root_urls.get(name.as_str()).map(|u| &**u); + e.location(extern_url, extern_url_takes_precedence, dst, tcx) + }); cx.cache.external_paths.insert(e.def_id(), (vec![name], ItemType::Module)); } diff --git a/src/librustdoc/formats/renderer.rs b/src/librustdoc/formats/renderer.rs index 79ff1fa38c3..aa4be4db997 100644 --- a/src/librustdoc/formats/renderer.rs +++ b/src/librustdoc/formats/renderer.rs @@ -81,7 +81,7 @@ fn run_format_inner<'tcx, T: FormatRenderer<'tcx>>( let _timer = prof.generic_activity_with_arg("render_mod_item", item.name.unwrap().to_string()); - cx.mod_item_in(&item)?; + cx.mod_item_in(item)?; let (clean::StrippedItem(box clean::ModuleItem(ref module)) | clean::ModuleItem(ref module)) = item.inner.kind else { @@ -99,7 +99,7 @@ fn run_format_inner<'tcx, T: FormatRenderer<'tcx>>( } else if let Some(item_name) = item.name && !item.is_extern_crate() { - prof.generic_activity_with_arg("render_item", item_name.as_str()).run(|| cx.item(&item))?; + prof.generic_activity_with_arg("render_item", item_name.as_str()).run(|| cx.item(item))?; } Ok(()) } diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs index b16485107a0..493fdc6fb1b 100644 --- a/src/librustdoc/html/format.rs +++ b/src/librustdoc/html/format.rs @@ -15,11 +15,11 @@ use std::slice; use itertools::{Either, Itertools}; use rustc_abi::ExternAbi; use rustc_ast::join_path_syms; -use rustc_attr_data_structures::{ConstStability, StabilityLevel, StableSince}; use rustc_data_structures::fx::FxHashSet; use rustc_hir as hir; use rustc_hir::def::DefKind; use rustc_hir::def_id::{DefId, LOCAL_CRATE}; +use rustc_hir::{ConstStability, StabilityLevel, StableSince}; use rustc_metadata::creader::{CStore, LoadedMacro}; use rustc_middle::ty::{self, TyCtxt, TypingMode}; use rustc_span::symbol::kw; @@ -114,9 +114,9 @@ impl clean::Generics { let real_params = fmt::from_fn(|f| real_params.clone().map(|g| g.print(cx)).joined(", ", f)); if f.alternate() { - write!(f, "<{:#}>", real_params) + write!(f, "<{real_params:#}>") } else { - write!(f, "<{}>", real_params) + write!(f, "<{real_params}>") } }) } @@ -368,6 +368,8 @@ pub(crate) enum HrefError { Private, // Not in external cache, href link should be in same page NotInExternalCache, + /// Refers to an unnamable item, such as one defined within a function or const block. + UnnamableItem, } /// This function is to get the external macro path because they are not in the cache used in @@ -479,6 +481,26 @@ fn generate_item_def_id_path( Ok((url_parts, shortty, fqp)) } +/// Checks if the given defid refers to an item that is unnamable, such as one defined in a const block. +fn is_unnamable(tcx: TyCtxt<'_>, did: DefId) -> bool { + let mut cur_did = did; + while let Some(parent) = tcx.opt_parent(cur_did) { + match tcx.def_kind(parent) { + // items defined in these can be linked to, as long as they are visible + DefKind::Mod | DefKind::ForeignMod => cur_did = parent, + // items in impls can be linked to, + // as long as we can link to the item the impl is on. + // since associated traits are not a thing, + // it should not be possible to refer to an impl item if + // the base type is not namable. + DefKind::Impl { .. } => return false, + // everything else does not have docs generated for it + _ => return true, + } + } + return false; +} + fn to_module_fqp(shortty: ItemType, fqp: &[Symbol]) -> &[Symbol] { if shortty == ItemType::Module { fqp } else { &fqp[..fqp.len() - 1] } } @@ -552,6 +574,9 @@ pub(crate) fn href_with_root_path( } _ => original_did, }; + if is_unnamable(cx.tcx(), did) { + return Err(HrefError::UnnamableItem); + } let cache = cx.cache(); let relative_to = &cx.current; @@ -594,7 +619,7 @@ pub(crate) fn href_with_root_path( } } }; - let url_parts = make_href(root_path, shortty, url_parts, &fqp, is_remote); + let url_parts = make_href(root_path, shortty, url_parts, fqp, is_remote); Ok((url_parts, shortty, fqp.clone())) } @@ -1115,7 +1140,7 @@ impl clean::Impl { { let last = ty.last(); if f.alternate() { - write!(f, "{}<", last)?; + write!(f, "{last}<")?; self.print_type(inner_type, f, use_absolute, cx)?; write!(f, ">")?; } else { @@ -1219,7 +1244,7 @@ pub(crate) fn print_params(params: &[clean::Parameter], cx: &Context<'_>) -> imp .map(|param| { fmt::from_fn(|f| { if let Some(name) = param.name { - write!(f, "{}: ", name)?; + write!(f, "{name}: ")?; } param.type_.print(cx).fmt(f) }) @@ -1341,7 +1366,7 @@ impl clean::FnDecl { write!(f, "const ")?; } if let Some(name) = param.name { - write!(f, "{}: ", name)?; + write!(f, "{name}: ")?; } param.type_.print(cx).fmt(f)?; } diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index b2feee36c93..272180fb990 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -547,7 +547,7 @@ impl<'a> Iterator for TokenIter<'a> { fn get_real_ident_class(text: &str, allow_path_keywords: bool) -> Option<Class> { let ignore: &[&str] = if allow_path_keywords { &["self", "Self", "super", "crate"] } else { &["self", "Self"] }; - if ignore.iter().any(|k| *k == text) { + if ignore.contains(&text) { return None; } Some(match text { @@ -1159,7 +1159,7 @@ fn string_without_closing_tag<T: Display>( return Some("</a>"); } if !open_tag { - write!(out, "{}", text_s).unwrap(); + out.write_str(&text_s).unwrap(); return None; } let klass_s = klass.as_html(); diff --git a/src/librustdoc/html/layout.rs b/src/librustdoc/html/layout.rs index 50320cb231d..1f92c521d46 100644 --- a/src/librustdoc/html/layout.rs +++ b/src/librustdoc/html/layout.rs @@ -132,6 +132,5 @@ pub(crate) fn redirect(url: &str) -> String { <script>location.replace("{url}" + location.search + location.hash);</script> </body> </html>"##, - url = url, ) } diff --git a/src/librustdoc/html/markdown.rs b/src/librustdoc/html/markdown.rs index e41435de29c..4addf2c3c96 100644 --- a/src/librustdoc/html/markdown.rs +++ b/src/librustdoc/html/markdown.rs @@ -251,7 +251,7 @@ impl<'a, I: Iterator<Item = Event<'a>>> Iterator for CodeBlocks<'_, 'a, I> { if !parse_result.rust { let added_classes = parse_result.added_classes; let lang_string = if let Some(lang) = parse_result.unknown.first() { - format!("language-{}", lang) + format!("language-{lang}") } else { String::new() }; @@ -999,7 +999,7 @@ impl<'a, 'tcx> TagIterator<'a, 'tcx> { if let Some((_, c)) = self.inner.next() { if c != '=' { - self.emit_error(format!("expected `=`, found `{}`", c)); + self.emit_error(format!("expected `=`, found `{c}`")); return None; } } else { diff --git a/src/librustdoc/html/render/context.rs b/src/librustdoc/html/render/context.rs index 7b814701a73..5ceb1fc988d 100644 --- a/src/librustdoc/html/render/context.rs +++ b/src/librustdoc/html/render/context.rs @@ -193,14 +193,12 @@ impl<'tcx> Context<'tcx> { if it.is_stripped() && let Some(def_id) = it.def_id() && def_id.is_local() + && (self.info.is_inside_inlined_module + || self.shared.cache.inlined_items.contains(&def_id)) { - if self.info.is_inside_inlined_module - || self.shared.cache.inlined_items.contains(&def_id) - { - // For now we're forced to generate a redirect page for stripped items until - // `record_extern_fqn` correctly points to external items. - render_redirect_pages = true; - } + // For now we're forced to generate a redirect page for stripped items until + // `record_extern_fqn` correctly points to external items. + render_redirect_pages = true; } let mut title = String::new(); if !is_module { @@ -254,40 +252,36 @@ impl<'tcx> Context<'tcx> { &self.shared.style_files, ) } else { - if let Some(&(ref names, ty)) = self.cache().paths.get(&it.item_id.expect_def_id()) { - if self.current.len() + 1 != names.len() - || self.current.iter().zip(names.iter()).any(|(a, b)| a != b) - { - // We checked that the redirection isn't pointing to the current file, - // preventing an infinite redirection loop in the generated - // documentation. - - let path = fmt::from_fn(|f| { - for name in &names[..names.len() - 1] { - write!(f, "{name}/")?; - } - write!(f, "{}", print_item_path(ty, names.last().unwrap().as_str())) - }); - match self.shared.redirections { - Some(ref redirections) => { - let mut current_path = String::new(); - for name in &self.current { - current_path.push_str(name.as_str()); - current_path.push('/'); - } - let _ = write!( - current_path, - "{}", - print_item_path(ty, names.last().unwrap().as_str()) - ); - redirections.borrow_mut().insert(current_path, path.to_string()); - } - None => { - return layout::redirect(&format!( - "{root}{path}", - root = self.root_path() - )); + if let Some(&(ref names, ty)) = self.cache().paths.get(&it.item_id.expect_def_id()) + && (self.current.len() + 1 != names.len() + || self.current.iter().zip(names.iter()).any(|(a, b)| a != b)) + { + // We checked that the redirection isn't pointing to the current file, + // preventing an infinite redirection loop in the generated + // documentation. + + let path = fmt::from_fn(|f| { + for name in &names[..names.len() - 1] { + write!(f, "{name}/")?; + } + write!(f, "{}", print_item_path(ty, names.last().unwrap().as_str())) + }); + match self.shared.redirections { + Some(ref redirections) => { + let mut current_path = String::new(); + for name in &self.current { + current_path.push_str(name.as_str()); + current_path.push('/'); } + let _ = write!( + current_path, + "{}", + print_item_path(ty, names.last().unwrap().as_str()) + ); + redirections.borrow_mut().insert(current_path, path.to_string()); + } + None => { + return layout::redirect(&format!("{root}{path}", root = self.root_path())); } } } @@ -762,11 +756,7 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> { // Flush pending errors. self.shared.fs.close(); let nb_errors = self.shared.errors.iter().map(|err| self.tcx().dcx().err(err)).count(); - if nb_errors > 0 { - Err(Error::new(io::Error::new(io::ErrorKind::Other, "I/O error"), "")) - } else { - Ok(()) - } + if nb_errors > 0 { Err(Error::new(io::Error::other("I/O error"), "")) } else { Ok(()) } } fn mod_item_in(&mut self, item: &clean::Item) -> Result<(), Error> { @@ -842,7 +832,7 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> { self.info.render_redirect_pages = item.is_stripped(); } - let buf = self.render_item(&item, false); + let buf = self.render_item(item, false); // buf will be empty if the item is stripped and there is no redirect for it if !buf.is_empty() { let name = item.name.as_ref().unwrap(); @@ -853,7 +843,7 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> { self.shared.fs.write(joint_dst, buf)?; if !self.info.render_redirect_pages { - self.shared.all.borrow_mut().append(full_path(self, &item), &item_type); + self.shared.all.borrow_mut().append(full_path(self, item), &item_type); } // If the item is a macro, redirect from the old macro URL (with !) // to the new one (without). diff --git a/src/librustdoc/html/render/mod.rs b/src/librustdoc/html/render/mod.rs index 2cfc9af39e4..a46253237db 100644 --- a/src/librustdoc/html/render/mod.rs +++ b/src/librustdoc/html/render/mod.rs @@ -50,12 +50,10 @@ use std::{fs, str}; use askama::Template; use itertools::Either; use rustc_ast::join_path_syms; -use rustc_attr_data_structures::{ - ConstStability, DeprecatedSince, Deprecation, RustcVersion, StabilityLevel, StableSince, -}; use rustc_data_structures::fx::{FxHashSet, FxIndexMap, FxIndexSet}; -use rustc_hir::Mutability; +use rustc_hir::attrs::{DeprecatedSince, Deprecation}; use rustc_hir::def_id::{DefId, DefIdSet}; +use rustc_hir::{ConstStability, Mutability, RustcVersion, StabilityLevel, StableSince}; use rustc_middle::ty::print::PrintTraitRefExt; use rustc_middle::ty::{self, TyCtxt}; use rustc_span::symbol::{Symbol, sym}; @@ -1483,10 +1481,10 @@ fn render_deref_methods( } } render_assoc_items_inner(&mut w, cx, container_item, did, what, derefs); - } else if let Some(prim) = target.primitive_type() { - if let Some(&did) = cache.primitive_locations.get(&prim) { - render_assoc_items_inner(&mut w, cx, container_item, did, what, derefs); - } + } else if let Some(prim) = target.primitive_type() + && let Some(&did) = cache.primitive_locations.get(&prim) + { + render_assoc_items_inner(&mut w, cx, container_item, did, what, derefs); } } @@ -2058,21 +2056,20 @@ fn render_impl( // default items which weren't overridden in the implementation block. // We don't emit documentation for default items if they appear in the // Implementations on Foreign Types or Implementors sections. - if rendering_params.show_default_items { - if let Some(t) = trait_ - && !impl_.is_negative_trait_impl() - { - render_default_items( - &mut default_impl_items, - &mut impl_items, - cx, - t, - impl_, - &i.impl_item, - render_mode, - rendering_params, - )?; - } + if rendering_params.show_default_items + && let Some(t) = trait_ + && !impl_.is_negative_trait_impl() + { + render_default_items( + &mut default_impl_items, + &mut impl_items, + cx, + t, + impl_, + &i.impl_item, + render_mode, + rendering_params, + )?; } if render_mode == RenderMode::Normal { let toggled = !(impl_items.is_empty() && default_impl_items.is_empty()); @@ -2570,7 +2567,7 @@ fn collect_paths_for_type(first_ty: &clean::Type, cache: &Cache) -> Vec<String> match ty { clean::Type::Path { path } => process_path(path.def_id()), clean::Type::Tuple(tys) => { - work.extend(tys.into_iter()); + work.extend(tys.iter()); } clean::Type::Slice(ty) => { work.push_back(ty); diff --git a/src/librustdoc/html/render/ordered_json.rs b/src/librustdoc/html/render/ordered_json.rs index d1dddfebc83..be51dad1c2b 100644 --- a/src/librustdoc/html/render/ordered_json.rs +++ b/src/librustdoc/html/render/ordered_json.rs @@ -25,7 +25,7 @@ impl OrderedJson { .into_iter() .sorted_unstable_by(|a, b| a.borrow().cmp(b.borrow())) .format_with(",", |item, f| f(item.borrow())); - Self(format!("[{}]", items)) + Self(format!("[{items}]")) } pub(crate) fn array_unsorted<T: Borrow<Self>, I: IntoIterator<Item = T>>(items: I) -> Self { diff --git a/src/librustdoc/html/render/print_item.rs b/src/librustdoc/html/render/print_item.rs index 5fbda4797cc..02ee34aaac6 100644 --- a/src/librustdoc/html/render/print_item.rs +++ b/src/librustdoc/html/render/print_item.rs @@ -1451,7 +1451,7 @@ item_template!( impl<'a, 'cx: 'a> ItemUnion<'a, 'cx> { fn render_union(&self) -> impl Display { - render_union(self.it, Some(&self.generics), &self.fields, self.cx) + render_union(self.it, Some(self.generics), self.fields, self.cx) } fn document_field(&self, field: &'a clean::Item) -> impl Display { @@ -1982,16 +1982,14 @@ fn item_constant( w.write_str(";")?; } - if !is_literal { - if let Some(value) = &value { - let value_lowercase = value.to_lowercase(); - let expr_lowercase = expr.to_lowercase(); + if !is_literal && let Some(value) = &value { + let value_lowercase = value.to_lowercase(); + let expr_lowercase = expr.to_lowercase(); - if value_lowercase != expr_lowercase - && value_lowercase.trim_end_matches("i32") != expr_lowercase - { - write!(w, " // {value}", value = Escape(value))?; - } + if value_lowercase != expr_lowercase + && value_lowercase.trim_end_matches("i32") != expr_lowercase + { + write!(w, " // {value}", value = Escape(value))?; } } Ok::<(), fmt::Error>(()) @@ -2071,41 +2069,39 @@ fn item_fields( _ => None, }) .peekable(); - if let None | Some(CtorKind::Fn) = ctor_kind { - if fields.peek().is_some() { - let title = format!( - "{}{}", - if ctor_kind.is_none() { "Fields" } else { "Tuple Fields" }, - document_non_exhaustive_header(it), - ); + if let None | Some(CtorKind::Fn) = ctor_kind + && fields.peek().is_some() + { + let title = format!( + "{}{}", + if ctor_kind.is_none() { "Fields" } else { "Tuple Fields" }, + document_non_exhaustive_header(it), + ); + write!( + w, + "{}", + write_section_heading( + &title, + "fields", + Some("fields"), + document_non_exhaustive(it) + ) + )?; + for (index, (field, ty)) in fields.enumerate() { + let field_name = + field.name.map_or_else(|| index.to_string(), |sym| sym.as_str().to_string()); + let id = cx.derive_id(format!("{typ}.{field_name}", typ = ItemType::StructField)); write!( w, - "{}", - write_section_heading( - &title, - "fields", - Some("fields"), - document_non_exhaustive(it) - ) + "<span id=\"{id}\" class=\"{item_type} section-header\">\ + <a href=\"#{id}\" class=\"anchor field\">§</a>\ + <code>{field_name}: {ty}</code>\ + </span>\ + {doc}", + item_type = ItemType::StructField, + ty = ty.print(cx), + doc = document(cx, field, Some(it), HeadingOffset::H3), )?; - for (index, (field, ty)) in fields.enumerate() { - let field_name = field - .name - .map_or_else(|| index.to_string(), |sym| sym.as_str().to_string()); - let id = - cx.derive_id(format!("{typ}.{field_name}", typ = ItemType::StructField)); - write!( - w, - "<span id=\"{id}\" class=\"{item_type} section-header\">\ - <a href=\"#{id}\" class=\"anchor field\">§</a>\ - <code>{field_name}: {ty}</code>\ - </span>\ - {doc}", - item_type = ItemType::StructField, - ty = ty.print(cx), - doc = document(cx, field, Some(it), HeadingOffset::H3), - )?; - } } } Ok(()) diff --git a/src/librustdoc/html/render/search_index.rs b/src/librustdoc/html/render/search_index.rs index 80a59fa218c..e2f86b8a854 100644 --- a/src/librustdoc/html/render/search_index.rs +++ b/src/librustdoc/html/render/search_index.rs @@ -100,9 +100,22 @@ pub(crate) fn build_index( let crate_doc = short_markdown_summary(&krate.module.doc_value(), &krate.module.link_names(cache)); + #[derive(Eq, Ord, PartialEq, PartialOrd)] + struct SerSymbolAsStr(Symbol); + + impl Serialize for SerSymbolAsStr { + fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> + where + S: Serializer, + { + self.0.as_str().serialize(serializer) + } + } + + type AliasMap = BTreeMap<SerSymbolAsStr, Vec<usize>>; // Aliases added through `#[doc(alias = "...")]`. Since a few items can have the same alias, // we need the alias element to have an array of items. - let mut aliases: BTreeMap<String, Vec<usize>> = BTreeMap::new(); + let mut aliases: AliasMap = BTreeMap::new(); // Sort search index items. This improves the compressibility of the search index. cache.search_index.sort_unstable_by(|k1, k2| { @@ -116,7 +129,7 @@ pub(crate) fn build_index( // Set up alias indexes. for (i, item) in cache.search_index.iter().enumerate() { for alias in &item.aliases[..] { - aliases.entry(alias.as_str().to_lowercase()).or_default().push(i); + aliases.entry(SerSymbolAsStr(*alias)).or_default().push(i); } } @@ -474,7 +487,7 @@ pub(crate) fn build_index( // The String is alias name and the vec is the list of the elements with this alias. // // To be noted: the `usize` elements are indexes to `items`. - aliases: &'a BTreeMap<String, Vec<usize>>, + aliases: &'a AliasMap, // Used when a type has more than one impl with an associated item with the same name. associated_item_disambiguators: &'a Vec<(usize, String)>, // A list of shard lengths encoded as vlqhex. See the comment in write_vlqhex_to_string diff --git a/src/librustdoc/html/render/sidebar.rs b/src/librustdoc/html/render/sidebar.rs index 91540e06e33..b9f5ada417c 100644 --- a/src/librustdoc/html/render/sidebar.rs +++ b/src/librustdoc/html/render/sidebar.rs @@ -541,7 +541,7 @@ fn sidebar_deref_methods<'a>( .iter() .filter(|i| { i.inner_impl().trait_.is_none() - && real_target.is_doc_subtype_of(&i.inner_impl().for_, &c) + && real_target.is_doc_subtype_of(&i.inner_impl().for_, c) }) .flat_map(|i| get_methods(i.inner_impl(), true, used_links, deref_mut, cx.tcx())) .collect::<Vec<_>>(); diff --git a/src/librustdoc/html/render/sorted_template.rs b/src/librustdoc/html/render/sorted_template.rs index a7b954ab70b..659c5e6093b 100644 --- a/src/librustdoc/html/render/sorted_template.rs +++ b/src/librustdoc/html/render/sorted_template.rs @@ -63,7 +63,8 @@ impl<F: FileFormat> fmt::Display for SortedTemplate<F> { for (p, fragment) in self.fragments.iter().with_position() { let mut f = DeltaWriter { inner: &mut f, delta: 0 }; let sep = if matches!(p, Position::First | Position::Only) { "" } else { F::SEPARATOR }; - write!(f, "{}{}", sep, fragment)?; + f.write_str(sep)?; + f.write_str(fragment)?; fragment_lengths.push(f.delta); } let offset = Offset { start: self.before.len(), fragment_lengths }; diff --git a/src/librustdoc/html/static/js/main.js b/src/librustdoc/html/static/js/main.js index 3c4af0dc612..8e3d07b3a1c 100644 --- a/src/librustdoc/html/static/js/main.js +++ b/src/librustdoc/html/static/js/main.js @@ -568,7 +568,11 @@ function preLoadCss(cssUrl) { break; case "-": ev.preventDefault(); - collapseAllDocs(); + collapseAllDocs(false); + break; + case "_": + ev.preventDefault(); + collapseAllDocs(true); break; case "?": @@ -1038,11 +1042,14 @@ function preLoadCss(cssUrl) { innerToggle.children[0].innerText = "Summary"; } - function collapseAllDocs() { + /** + * @param {boolean} collapseImpls - also collapse impl blocks if set to true + */ + function collapseAllDocs(collapseImpls) { const innerToggle = document.getElementById(toggleAllDocsId); addClass(innerToggle, "will-expand"); onEachLazy(document.getElementsByClassName("toggle"), e => { - if (e.parentNode.id !== "implementations-list" || + if ((collapseImpls || e.parentNode.id !== "implementations-list") || (!hasClass(e, "implementors-toggle") && !hasClass(e, "type-contents-toggle")) ) { @@ -1053,7 +1060,10 @@ function preLoadCss(cssUrl) { innerToggle.children[0].innerText = "Show all"; } - function toggleAllDocs() { + /** + * @param {MouseEvent=} ev + */ + function toggleAllDocs(ev) { const innerToggle = document.getElementById(toggleAllDocsId); if (!innerToggle) { return; @@ -1061,7 +1071,7 @@ function preLoadCss(cssUrl) { if (hasClass(innerToggle, "will-expand")) { expandAllDocs(); } else { - collapseAllDocs(); + collapseAllDocs(ev !== undefined && ev.shiftKey); } } @@ -1519,6 +1529,10 @@ function preLoadCss(cssUrl) { ["⏎", "Go to active search result"], ["+", "Expand all sections"], ["-", "Collapse all sections"], + // for the sake of brevity, we don't say "inherint impl blocks", + // although that would be more correct, + // since trait impl blocks are collapsed by - + ["_", "Collapse all sections, including impl blocks"], ].map(x => "<dt>" + x[0].split(" ") .map((y, index) => ((index & 1) === 0 ? "<kbd>" + y + "</kbd>" : " " + y + " ")) diff --git a/src/librustdoc/html/static/js/rustdoc.d.ts b/src/librustdoc/html/static/js/rustdoc.d.ts index ca2512e5ab6..a9589764547 100644 --- a/src/librustdoc/html/static/js/rustdoc.d.ts +++ b/src/librustdoc/html/static/js/rustdoc.d.ts @@ -219,6 +219,8 @@ declare namespace rustdoc { crate: string, descShard: SearchDescShard, id: number, + // This is the name of the item. For doc aliases, if you want the name of the aliased + // item, take a look at `Row.original.name`. name: string, normalizedName: string, word: string, @@ -227,6 +229,11 @@ declare namespace rustdoc { path: string, ty: number, type: FunctionSearchType | null, + descIndex: number, + bitIndex: number, + implDisambiguator: String | null, + is_alias?: boolean, + original?: Row, } /** diff --git a/src/librustdoc/html/static/js/search.js b/src/librustdoc/html/static/js/search.js index 15cad31f555..2caf214ff73 100644 --- a/src/librustdoc/html/static/js/search.js +++ b/src/librustdoc/html/static/js/search.js @@ -830,7 +830,7 @@ function createQueryElement(query, parserState, name, generics, isInGenerics) { */ function makePrimitiveElement(name, extra) { return Object.assign({ - name: name, + name, id: null, fullPath: [name], pathWithoutLast: [], @@ -1483,6 +1483,7 @@ class DocSearch { */ this.assocTypeIdNameMap = new Map(); this.ALIASES = new Map(); + this.FOUND_ALIASES = new Set(); this.rootPath = rootPath; this.searchState = searchState; @@ -2030,6 +2031,8 @@ class DocSearch { // normalized names, type signature objects and fingerprints, and aliases. id = 0; + /** @type {Array<[string, { [key: string]: Array<number> }, number]>} */ + const allAliases = []; for (const [crate, crateCorpus] of rawSearchIndex) { // a string representing the lengths of each description shard // a string representing the list of function types @@ -2178,10 +2181,10 @@ class DocSearch { paths[i] = { ty, name, path, exactPath, unboxFlag }; } - // convert `item*` into an object form, and construct word indices. + // Convert `item*` into an object form, and construct word indices. // - // before any analysis is performed lets gather the search terms to - // search against apart from the rest of the data. This is a quick + // Before any analysis is performed, let's gather the search terms to + // search against apart from the rest of the data. This is a quick // operation that is cached for the life of the page state so that // all other search operations have access to this cached data for // faster analysis operations @@ -2269,29 +2272,58 @@ class DocSearch { } if (aliases) { - const currentCrateAliases = new Map(); - this.ALIASES.set(crate, currentCrateAliases); - for (const alias_name in aliases) { - if (!Object.prototype.hasOwnProperty.call(aliases, alias_name)) { - continue; - } - - /** @type{number[]} */ - let currentNameAliases; - if (currentCrateAliases.has(alias_name)) { - currentNameAliases = currentCrateAliases.get(alias_name); - } else { - currentNameAliases = []; - currentCrateAliases.set(alias_name, currentNameAliases); - } - for (const local_alias of aliases[alias_name]) { - currentNameAliases.push(local_alias + currentIndex); - } - } + // We need to add the aliases in `searchIndex` after we finished filling it + // to not mess up indexes. + allAliases.push([crate, aliases, currentIndex]); } currentIndex += itemTypes.length; this.searchState.descShards.set(crate, descShardList); } + + for (const [crate, aliases, index] of allAliases) { + for (const [alias_name, alias_refs] of Object.entries(aliases)) { + if (!this.ALIASES.has(crate)) { + this.ALIASES.set(crate, new Map()); + } + const word = alias_name.toLowerCase(); + const crate_alias_map = this.ALIASES.get(crate); + if (!crate_alias_map.has(word)) { + crate_alias_map.set(word, []); + } + const aliases_map = crate_alias_map.get(word); + + const normalizedName = word.indexOf("_") === -1 ? word : word.replace(/_/g, ""); + for (const alias of alias_refs) { + const originalIndex = alias + index; + const original = searchIndex[originalIndex]; + /** @type {rustdoc.Row} */ + const row = { + crate, + name: alias_name, + normalizedName, + is_alias: true, + ty: original.ty, + type: original.type, + paramNames: [], + word, + id, + parent: undefined, + original, + path: "", + implDisambiguator: original.implDisambiguator, + // Needed to load the description of the original item. + // @ts-ignore + descShard: original.descShard, + descIndex: original.descIndex, + bitIndex: original.bitIndex, + }; + aliases_map.push(row); + this.nameTrie.insert(normalizedName, id, this.tailTable); + id += 1; + searchIndex.push(row); + } + } + } // Drop the (rather large) hash table used for reusing function items this.TYPES_POOL = new Map(); return searchIndex; @@ -2536,6 +2568,8 @@ class DocSearch { parsedQuery.elems.reduce((acc, next) => acc + next.pathLast.length, 0) + parsedQuery.returned.reduce((acc, next) => acc + next.pathLast.length, 0); const maxEditDistance = Math.floor(queryLen / 3); + // We reinitialize the `FOUND_ALIASES` map. + this.FOUND_ALIASES.clear(); /** * @type {Map<string, number>} @@ -2695,6 +2729,10 @@ class DocSearch { const buildHrefAndPath = item => { let displayPath; let href; + if (item.is_alias) { + this.FOUND_ALIASES.add(item.word); + item = item.original; + } const type = itemTypes[item.ty]; const name = item.name; let path = item.path; @@ -3198,8 +3236,7 @@ class DocSearch { result.item = this.searchIndex[result.id]; result.word = this.searchIndex[result.id].word; if (isReturnTypeQuery) { - // we are doing a return-type based search, - // deprioritize "clone-like" results, + // We are doing a return-type based search, deprioritize "clone-like" results, // ie. functions that also take the queried type as an argument. const resultItemType = result.item && result.item.type; if (!resultItemType) { @@ -4259,28 +4296,13 @@ class DocSearch { return false; } - // this does not yet have a type in `rustdoc.d.ts`. - // @ts-expect-error - function createAliasFromItem(item) { - return { - crate: item.crate, - name: item.name, - path: item.path, - descShard: item.descShard, - descIndex: item.descIndex, - exactPath: item.exactPath, - ty: item.ty, - parent: item.parent, - type: item.type, - is_alias: true, - bitIndex: item.bitIndex, - implDisambiguator: item.implDisambiguator, - }; - } - // @ts-expect-error const handleAliases = async(ret, query, filterCrates, currentCrate) => { const lowerQuery = query.toLowerCase(); + if (this.FOUND_ALIASES.has(lowerQuery)) { + return; + } + this.FOUND_ALIASES.add(lowerQuery); // We separate aliases and crate aliases because we want to have current crate // aliases to be before the others in the displayed results. // @ts-expect-error @@ -4292,7 +4314,7 @@ class DocSearch { && this.ALIASES.get(filterCrates).has(lowerQuery)) { const query_aliases = this.ALIASES.get(filterCrates).get(lowerQuery); for (const alias of query_aliases) { - aliases.push(createAliasFromItem(this.searchIndex[alias])); + aliases.push(alias); } } } else { @@ -4302,7 +4324,7 @@ class DocSearch { const pushTo = crate === currentCrate ? crateAliases : aliases; const query_aliases = crateAliasesIndex.get(lowerQuery); for (const alias of query_aliases) { - pushTo.push(createAliasFromItem(this.searchIndex[alias])); + pushTo.push(alias); } } } @@ -4310,9 +4332,9 @@ class DocSearch { // @ts-expect-error const sortFunc = (aaa, bbb) => { - if (aaa.path < bbb.path) { + if (aaa.original.path < bbb.original.path) { return 1; - } else if (aaa.path === bbb.path) { + } else if (aaa.original.path === bbb.original.path) { return 0; } return -1; @@ -4322,20 +4344,9 @@ class DocSearch { aliases.sort(sortFunc); // @ts-expect-error - const fetchDesc = alias => { - // @ts-expect-error - return this.searchIndexEmptyDesc.get(alias.crate).contains(alias.bitIndex) ? - "" : this.searchState.loadDesc(alias); - }; - const [crateDescs, descs] = await Promise.all([ - // @ts-expect-error - Promise.all(crateAliases.map(fetchDesc)), - Promise.all(aliases.map(fetchDesc)), - ]); - - // @ts-expect-error const pushFunc = alias => { - alias.alias = query; + // Cloning `alias` to prevent its fields to be updated. + alias = {...alias}; const res = buildHrefAndPath(alias); alias.displayPath = pathSplitter(res[0]); alias.fullPath = alias.displayPath + alias.name; @@ -4347,16 +4358,8 @@ class DocSearch { } }; - aliases.forEach((alias, i) => { - // @ts-expect-error - alias.desc = descs[i]; - }); aliases.forEach(pushFunc); // @ts-expect-error - crateAliases.forEach((alias, i) => { - alias.desc = crateDescs[i]; - }); - // @ts-expect-error crateAliases.forEach(pushFunc); }; @@ -4802,7 +4805,7 @@ async function addTab(array, query, display) { output.className = "search-results " + extraClass; const lis = Promise.all(array.map(async item => { - const name = item.name; + const name = item.is_alias ? item.original.name : item.name; const type = itemTypes[item.ty]; const longType = longItemTypes[item.ty]; const typeName = longType.length !== 0 ? `${longType}` : "?"; @@ -4822,7 +4825,7 @@ async function addTab(array, query, display) { let alias = " "; if (item.is_alias) { alias = ` <div class="alias">\ -<b>${item.alias}</b><i class="grey"> - see </i>\ +<b>${item.name}</b><i class="grey"> - see </i>\ </div>`; } resultName.insertAdjacentHTML( @@ -5201,6 +5204,7 @@ function registerSearchEvents() { if (searchState.input.value.length === 0) { searchState.hideResults(); } else { + // @ts-ignore searchState.timeout = setTimeout(search, 500); } }; @@ -5842,8 +5846,8 @@ Lev1TParametricDescription.prototype.offsetIncrs3 = /*2 bits per value */ new In // be called ONLY when the whole file has been parsed and loaded. // @ts-expect-error -function initSearch(searchIndx) { - rawSearchIndex = searchIndx; +function initSearch(searchIndex) { + rawSearchIndex = searchIndex; if (typeof window !== "undefined") { // @ts-expect-error docSearch = new DocSearch(rawSearchIndex, ROOT_PATH, searchState); diff --git a/src/librustdoc/html/static/js/storage.js b/src/librustdoc/html/static/js/storage.js index 76113726894..ca13b891638 100644 --- a/src/librustdoc/html/static/js/storage.js +++ b/src/librustdoc/html/static/js/storage.js @@ -418,7 +418,9 @@ class RustdocToolbarElement extends HTMLElement { <div id="help-button" tabindex="-1"> <a href="${rootPath}help.html"><span class="label">Help</span></a> </div> - <button id="toggle-all-docs"><span class="label">Summary</span></button>`; + <button id="toggle-all-docs" +title="Collapse sections (shift-click to also collapse impl blocks)"><span +class="label">Summary</span></button>`; } } window.customElements.define("rustdoc-toolbar", RustdocToolbarElement); diff --git a/src/librustdoc/json/conversions.rs b/src/librustdoc/json/conversions.rs index 0a84d8caa30..f966d926562 100644 --- a/src/librustdoc/json/conversions.rs +++ b/src/librustdoc/json/conversions.rs @@ -4,8 +4,8 @@ use rustc_abi::ExternAbi; use rustc_ast::ast; -use rustc_attr_data_structures::{self as attrs, DeprecatedSince}; use rustc_hir as hir; +use rustc_hir::attrs::{self, DeprecatedSince}; use rustc_hir::def::CtorKind; use rustc_hir::def_id::DefId; use rustc_hir::{HeaderSafety, Safety}; @@ -50,7 +50,7 @@ impl JsonRenderer<'_> { let span = item.span(self.tcx); let visibility = item.visibility(self.tcx); let clean::ItemInner { name, item_id, .. } = *item.inner; - let id = self.id_from_item(&item); + let id = self.id_from_item(item); let inner = match item.kind { clean::KeywordItem => return None, clean::StrippedItem(ref inner) => { @@ -86,14 +86,14 @@ impl JsonRenderer<'_> { items .iter() .filter(|i| !i.is_stripped() && !i.is_keyword()) - .map(|i| self.id_from_item(&i)) + .map(|i| self.id_from_item(i)) .collect() } fn ids_keeping_stripped(&self, items: &[clean::Item]) -> Vec<Option<Id>> { items .iter() - .map(|i| (!i.is_stripped() && !i.is_keyword()).then(|| self.id_from_item(&i))) + .map(|i| (!i.is_stripped() && !i.is_keyword()).then(|| self.id_from_item(i))) .collect() } } @@ -358,12 +358,12 @@ impl FromClean<clean::Struct> for Struct { let clean::Struct { ctor_kind, generics, fields } = struct_; let kind = match ctor_kind { - Some(CtorKind::Fn) => StructKind::Tuple(renderer.ids_keeping_stripped(&fields)), + Some(CtorKind::Fn) => StructKind::Tuple(renderer.ids_keeping_stripped(fields)), Some(CtorKind::Const) => { assert!(fields.is_empty()); StructKind::Unit } - None => StructKind::Plain { fields: renderer.ids(&fields), has_stripped_fields }, + None => StructKind::Plain { fields: renderer.ids(fields), has_stripped_fields }, }; Struct { @@ -381,7 +381,7 @@ impl FromClean<clean::Union> for Union { Union { generics: generics.into_json(renderer), has_stripped_fields, - fields: renderer.ids(&fields), + fields: renderer.ids(fields), impls: Vec::new(), // Added in JsonRenderer::item } } @@ -659,7 +659,7 @@ impl FromClean<clean::FnDecl> for FunctionSignature { let clean::FnDecl { inputs, output, c_variadic } = decl; FunctionSignature { inputs: inputs - .into_iter() + .iter() .map(|param| { // `_` is the most sensible name for missing param names. let name = param.name.unwrap_or(kw::Underscore).to_string(); @@ -684,7 +684,7 @@ impl FromClean<clean::Trait> for Trait { is_auto, is_unsafe, is_dyn_compatible, - items: renderer.ids(&items), + items: renderer.ids(items), generics: generics.into_json(renderer), bounds: bounds.into_json(renderer), implementations: Vec::new(), // Added in JsonRenderer::item @@ -727,7 +727,7 @@ impl FromClean<clean::Impl> for Impl { .collect(), trait_: trait_.into_json(renderer), for_: for_.into_json(renderer), - items: renderer.ids(&items), + items: renderer.ids(items), is_negative, is_synthetic, blanket_impl: blanket_impl.map(|x| x.into_json(renderer)), @@ -770,7 +770,7 @@ impl FromClean<clean::Variant> for Variant { let kind = match &variant.kind { CLike => VariantKind::Plain, - Tuple(fields) => VariantKind::Tuple(renderer.ids_keeping_stripped(&fields)), + Tuple(fields) => VariantKind::Tuple(renderer.ids_keeping_stripped(fields)), Struct(s) => VariantKind::Struct { has_stripped_fields: s.has_stripped_entries(), fields: renderer.ids(&s.fields), @@ -908,8 +908,12 @@ fn maybe_from_hir_attr( hir::Attribute::Parsed(kind) => kind, hir::Attribute::Unparsed(_) => { - // FIXME: We should handle `#[doc(hidden)]`. - return Some(other_attr(tcx, attr)); + return Some(if attr.has_name(sym::macro_export) { + Attribute::MacroExport + // FIXME: We should handle `#[doc(hidden)]`. + } else { + other_attr(tcx, attr) + }); } }; diff --git a/src/librustdoc/json/mod.rs b/src/librustdoc/json/mod.rs index 600a4b429f3..760e48baffa 100644 --- a/src/librustdoc/json/mod.rs +++ b/src/librustdoc/json/mod.rs @@ -133,7 +133,7 @@ fn target(sess: &rustc_session::Session) -> types::Target { let feature_stability: FxHashMap<&str, Stability> = sess .target .rust_target_features() - .into_iter() + .iter() .copied() .map(|(name, stability, _)| (name, stability)) .collect(); @@ -143,7 +143,7 @@ fn target(sess: &rustc_session::Session) -> types::Target { target_features: sess .target .rust_target_features() - .into_iter() + .iter() .copied() .filter(|(_, stability, _)| { // Describe only target features which the user can toggle @@ -157,7 +157,7 @@ fn target(sess: &rustc_session::Session) -> types::Target { _ => None, }, implies_features: implied_features - .into_iter() + .iter() .copied() .filter(|name| { // Imply only target features which the user can toggle diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index a3cdc4f687f..28dbd8ba7d3 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -32,7 +32,6 @@ extern crate pulldown_cmark; extern crate rustc_abi; extern crate rustc_ast; extern crate rustc_ast_pretty; -extern crate rustc_attr_data_structures; extern crate rustc_attr_parsing; extern crate rustc_data_structures; extern crate rustc_driver; @@ -799,7 +798,7 @@ fn main_args(early_dcx: &mut EarlyDiagCtxt, at_args: &[String]) { // Note that we discard any distinction between different non-zero exit // codes from `from_matches` here. - let (input, options, render_options) = + let (input, options, render_options, loaded_paths) = match config::Options::from_matches(early_dcx, &matches, args) { Some(opts) => opts, None => return, @@ -870,6 +869,12 @@ fn main_args(early_dcx: &mut EarlyDiagCtxt, at_args: &[String]) { interface::run_compiler(config, |compiler| { let sess = &compiler.sess; + // Register the loaded external files in the source map so they show up in depinfo. + // We can't load them via the source map because it gets created after we process the options. + for external_path in &loaded_paths { + let _ = sess.source_map().load_file(external_path); + } + if sess.opts.describe_lints { rustc_driver::describe_lints(sess, registered_lints); return; diff --git a/src/librustdoc/passes/collect_intra_doc_links.rs b/src/librustdoc/passes/collect_intra_doc_links.rs index ca6f67eb6df..c9fa3a4837f 100644 --- a/src/librustdoc/passes/collect_intra_doc_links.rs +++ b/src/librustdoc/passes/collect_intra_doc_links.rs @@ -274,7 +274,7 @@ impl From<DiagnosticInfo<'_>> for OwnedDiagnosticInfo { } impl OwnedDiagnosticInfo { - pub(crate) fn into_info(&self) -> DiagnosticInfo<'_> { + pub(crate) fn as_info(&self) -> DiagnosticInfo<'_> { DiagnosticInfo { item: &self.item, ori_link: &self.ori_link, @@ -941,13 +941,21 @@ fn preprocess_link( ori_link: &MarkdownLink, dox: &str, ) -> Option<Result<PreprocessingInfo, PreprocessingError>> { + // certain link kinds cannot have their path be urls, + // so they should not be ignored, no matter how much they look like urls. + // e.g. [https://example.com/] is not a link to example.com. + let can_be_url = !matches!( + ori_link.kind, + LinkType::ShortcutUnknown | LinkType::CollapsedUnknown | LinkType::ReferenceUnknown + ); + // [] is mostly likely not supposed to be a link if ori_link.link.is_empty() { return None; } // Bail early for real links. - if ori_link.link.contains('/') { + if can_be_url && ori_link.link.contains('/') { return None; } @@ -972,7 +980,7 @@ fn preprocess_link( Ok(None) => (None, link, link), Err((err_msg, relative_range)) => { // Only report error if we would not have ignored this link. See issue #83859. - if !should_ignore_link_with_disambiguators(link) { + if !(can_be_url && should_ignore_link_with_disambiguators(link)) { let disambiguator_range = match range_between_backticks(&ori_link.range, dox) { MarkdownLinkRange::Destination(no_backticks_range) => { MarkdownLinkRange::Destination( @@ -989,7 +997,25 @@ fn preprocess_link( } }; - if should_ignore_link(path_str) { + // If there's no backticks, be lenient and revert to the old behavior. + // This is to prevent churn by linting on stuff that isn't meant to be a link. + // only shortcut links have simple enough syntax that they + // are likely to be written accidentally, collapsed and reference links + // need 4 metachars, and reference links will not usually use + // backticks in the reference name. + // therefore, only shortcut syntax gets the lenient behavior. + // + // here's a truth table for how link kinds that cannot be urls are handled: + // + // |-------------------------------------------------------| + // | | is shortcut link | not shortcut link | + // |--------------|--------------------|-------------------| + // | has backtick | never ignore | never ignore | + // | no backtick | ignore if url-like | never ignore | + // |-------------------------------------------------------| + let ignore_urllike = + can_be_url || (ori_link.kind == LinkType::ShortcutUnknown && !ori_link.link.contains('`')); + if ignore_urllike && should_ignore_link(path_str) { return None; } @@ -1177,7 +1203,7 @@ impl LinkCollector<'_, '_> { // Primitive types are always valid. Res::Primitive(_) => true, }); - let diag_info = info.diag_info.into_info(); + let diag_info = info.diag_info.as_info(); match info.resolved.len() { 1 => { let (res, fragment) = info.resolved.pop().unwrap(); @@ -1243,17 +1269,16 @@ impl LinkCollector<'_, '_> { disambiguator, None | Some(Disambiguator::Namespace(Namespace::TypeNS) | Disambiguator::Primitive) ) && !matches!(res, Res::Primitive(_)) + && let Some(prim) = resolve_primitive(path_str, TypeNS) { - if let Some(prim) = resolve_primitive(path_str, TypeNS) { - // `prim@char` - if matches!(disambiguator, Some(Disambiguator::Primitive)) { - res = prim; - } else { - // `[char]` when a `char` module is in scope - let candidates = &[(res, res.def_id(self.cx.tcx)), (prim, None)]; - ambiguity_error(self.cx, &diag_info, path_str, candidates, true); - return None; - } + // `prim@char` + if matches!(disambiguator, Some(Disambiguator::Primitive)) { + res = prim; + } else { + // `[char]` when a `char` module is in scope + let candidates = &[(res, res.def_id(self.cx.tcx)), (prim, None)]; + ambiguity_error(self.cx, &diag_info, path_str, candidates, true); + return None; } } @@ -2233,7 +2258,7 @@ fn ambiguity_error( // proc macro can exist in multiple namespaces at once, so we need to compare `DefIds` // to remove the candidate in the fn namespace. let mut possible_proc_macro_id = None; - let is_proc_macro_crate = cx.tcx.crate_types() == &[CrateType::ProcMacro]; + let is_proc_macro_crate = cx.tcx.crate_types() == [CrateType::ProcMacro]; let mut kinds = candidates .iter() .map(|(res, def_id)| { diff --git a/src/librustdoc/passes/lint/redundant_explicit_links.rs b/src/librustdoc/passes/lint/redundant_explicit_links.rs index 5757b6a9740..e69cf87f957 100644 --- a/src/librustdoc/passes/lint/redundant_explicit_links.rs +++ b/src/librustdoc/passes/lint/redundant_explicit_links.rs @@ -93,14 +93,14 @@ fn check_redundant_explicit_link<'md>( if let Event::Start(Tag::Link { link_type, dest_url, .. }) = event { let link_data = collect_link_data(&mut offset_iter); - if let Some(resolvable_link) = link_data.resolvable_link.as_ref() { - if &link_data.display_link.replace('`', "") != resolvable_link { - // Skips if display link does not match to actual - // resolvable link, usually happens if display link - // has several segments, e.g. - // [this is just an `Option`](Option) - continue; - } + if let Some(resolvable_link) = link_data.resolvable_link.as_ref() + && &link_data.display_link.replace('`', "") != resolvable_link + { + // Skips if display link does not match to actual + // resolvable link, usually happens if display link + // has several segments, e.g. + // [this is just an `Option`](Option) + continue; } let explicit_link = dest_url.to_string(); diff --git a/src/librustdoc/passes/propagate_stability.rs b/src/librustdoc/passes/propagate_stability.rs index 7b3da8d7c0f..14ec58702e3 100644 --- a/src/librustdoc/passes/propagate_stability.rs +++ b/src/librustdoc/passes/propagate_stability.rs @@ -6,8 +6,8 @@ //! [`core::error`] module is marked as stable since 1.81.0, so we want to show //! [`core::error::Error`] as stable since 1.81.0 as well. -use rustc_attr_data_structures::{Stability, StabilityLevel}; use rustc_hir::def_id::CRATE_DEF_ID; +use rustc_hir::{Stability, StabilityLevel}; use crate::clean::{Crate, Item, ItemId, ItemKind}; use crate::core::DocContext; diff --git a/src/librustdoc/passes/strip_aliased_non_local.rs b/src/librustdoc/passes/strip_aliased_non_local.rs index b53e3b4e3d7..bb13308e6c2 100644 --- a/src/librustdoc/passes/strip_aliased_non_local.rs +++ b/src/librustdoc/passes/strip_aliased_non_local.rs @@ -47,13 +47,11 @@ impl DocFolder for NonLocalStripper<'_> { // FIXME(#125009): Not-local should probably consider same Cargo workspace if let Some(def_id) = i.def_id() && !def_id.is_local() - { - if i.is_doc_hidden() + && (i.is_doc_hidden() // Default to *not* stripping items with inherited visibility. - || i.visibility(self.tcx).is_some_and(|viz| viz != Visibility::Public) - { - return Some(strip_item(i)); - } + || i.visibility(self.tcx).is_some_and(|viz| viz != Visibility::Public)) + { + return Some(strip_item(i)); } Some(self.fold_item_recur(i)) diff --git a/src/librustdoc/scrape_examples.rs b/src/librustdoc/scrape_examples.rs index fceacb69fb5..4d29c74e1eb 100644 --- a/src/librustdoc/scrape_examples.rs +++ b/src/librustdoc/scrape_examples.rs @@ -333,9 +333,11 @@ pub(crate) fn run( pub(crate) fn load_call_locations( with_examples: Vec<String>, dcx: DiagCtxtHandle<'_>, + loaded_paths: &mut Vec<PathBuf>, ) -> AllCallLocations { let mut all_calls: AllCallLocations = FxIndexMap::default(); for path in with_examples { + loaded_paths.push(path.clone().into()); let bytes = match fs::read(&path) { Ok(bytes) => bytes, Err(e) => dcx.fatal(format!("failed to load examples: {e}")), diff --git a/src/llvm-project b/src/llvm-project -Subproject d3c793b025645a4565ac59aceb30d2d116ff1a4 +Subproject e8a2ffcf322f45b8dce82c65ab27a3e2430a6b5 diff --git a/src/rustdoc-json-types/lib.rs b/src/rustdoc-json-types/lib.rs index 6235b0e8576..40f89009a43 100644 --- a/src/rustdoc-json-types/lib.rs +++ b/src/rustdoc-json-types/lib.rs @@ -37,8 +37,8 @@ pub type FxHashMap<K, V> = HashMap<K, V>; // re-export for use in src/librustdoc // will instead cause conflicts. See #94591 for more. (This paragraph and the "Latest feature" line // are deliberately not in a doc comment, because they need not be in public docs.) // -// Latest feature: Structured Attributes -pub const FORMAT_VERSION: u32 = 54; +// Latest feature: Add Attribute::MacroUse +pub const FORMAT_VERSION: u32 = 55; /// The root of the emitted JSON blob. /// @@ -216,6 +216,9 @@ pub enum Attribute { /// `#[must_use]` MustUse { reason: Option<String> }, + /// `#[macro_export]` + MacroExport, + /// `#[export_name = "name"]` ExportName(String), diff --git a/src/tools/cargo b/src/tools/cargo -Subproject 6833aa715d724437dc1247d0166afe314ab6854 +Subproject 840b83a10fb0e039a83f4d70ad032892c287570 diff --git a/src/tools/clippy/.github/workflows/feature_freeze.yml b/src/tools/clippy/.github/workflows/feature_freeze.yml index 7ad58af77d4..ec59be3e7f6 100644 --- a/src/tools/clippy/.github/workflows/feature_freeze.yml +++ b/src/tools/clippy/.github/workflows/feature_freeze.yml @@ -20,16 +20,26 @@ jobs: # of the pull request, as malicious code would be able to access the private # GitHub token. steps: - - name: Check PR Changes - id: pr-changes - run: echo "::set-output name=changes::${{ toJson(github.event.pull_request.changed_files) }}" - - - name: Create Comment - if: steps.pr-changes.outputs.changes != '[]' - run: | - # Use GitHub API to create a comment on the PR - PR_NUMBER=${{ github.event.pull_request.number }} - COMMENT="**Seems that you are trying to add a new lint!**\nWe are currently in a [feature freeze](https://doc.rust-lang.org/nightly/clippy/development/feature_freeze.html), so we are delaying all lint-adding PRs to September 18 and focusing on bugfixes.\nThanks a lot for your contribution, and sorry for the inconvenience.\nWith ❤ from the Clippy team\n\n@rustbot note Feature-freeze\n@rustbot blocked\n@rustbot label +A-lint\n" - GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }} - COMMENT_URL="https://api.github.com/repos/${{ github.repository }}/issues/${PR_NUMBER}/comments" - curl -s -H "Authorization: token ${GITHUB_TOKEN}" -X POST $COMMENT_URL -d "{\"body\":\"$COMMENT\"}" + - name: Add freeze warning comment + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_REPOSITORY: ${{ github.repository }} + PR_NUMBER: ${{ github.event.pull_request.number }} + run: | + COMMENT=$(echo "**Seems that you are trying to add a new lint!**\n\ + \n\ + We are currently in a [feature freeze](https://doc.rust-lang.org/nightly/clippy/development/feature_freeze.html), so we are delaying all lint-adding PRs to September 18 and [focusing on bugfixes](https://github.com/rust-lang/rust-clippy/issues/15086).\n\ + \n\ + Thanks a lot for your contribution, and sorry for the inconvenience.\n\ + \n\ + With ❤ from the Clippy team.\n\ + \n\ + @rustbot note Feature-freeze\n\ + @rustbot blocked\n\ + @rustbot label +A-lint" + ) + curl -s -H "Authorization: Bearer $GITHUB_TOKEN" \ + -H "Content-Type: application/vnd.github.raw+json" \ + -X POST \ + --data "{\"body\":\"${COMMENT}\"}" \ + "https://api.github.com/repos/${GITHUB_REPOSITORY}/issues/${PR_NUMBER}/comments" diff --git a/src/tools/clippy/.gitignore b/src/tools/clippy/.gitignore index a7c25b29021..36a4cdc1c35 100644 --- a/src/tools/clippy/.gitignore +++ b/src/tools/clippy/.gitignore @@ -19,8 +19,10 @@ out # Generated by Cargo *Cargo.lock +!/clippy_test_deps/Cargo.lock /target /clippy_lints/target +/clippy_lints_internal/target /clippy_utils/target /clippy_dev/target /lintcheck/target diff --git a/src/tools/clippy/clippy_dev/src/fmt.rs b/src/tools/clippy/clippy_dev/src/fmt.rs index bd9e57c9f6d..2b2138d3108 100644 --- a/src/tools/clippy/clippy_dev/src/fmt.rs +++ b/src/tools/clippy/clippy_dev/src/fmt.rs @@ -3,7 +3,7 @@ use crate::utils::{ walk_dir_no_dot_or_target, }; use itertools::Itertools; -use rustc_lexer::{TokenKind, tokenize}; +use rustc_lexer::{FrontmatterAllowed, TokenKind, tokenize}; use std::fmt::Write; use std::fs; use std::io::{self, Read}; @@ -92,7 +92,7 @@ fn fmt_conf(check: bool) -> Result<(), Error> { let mut fields = Vec::new(); let mut state = State::Start; - for (i, t) in tokenize(conf) + for (i, t) in tokenize(conf, FrontmatterAllowed::No) .map(|x| { let start = pos; pos += x.len; diff --git a/src/tools/clippy/clippy_lints/src/approx_const.rs b/src/tools/clippy/clippy_lints/src/approx_const.rs index 5ed4c82634a..ab47e309752 100644 --- a/src/tools/clippy/clippy_lints/src/approx_const.rs +++ b/src/tools/clippy/clippy_lints/src/approx_const.rs @@ -2,7 +2,7 @@ use clippy_config::Conf; use clippy_utils::diagnostics::span_lint_and_help; use clippy_utils::msrvs::{self, Msrv}; use rustc_ast::ast::{FloatTy, LitFloatType, LitKind}; -use rustc_attr_data_structures::RustcVersion; +use rustc_hir::RustcVersion; use rustc_hir::{HirId, Lit}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::impl_lint_pass; @@ -92,9 +92,11 @@ impl LateLintPass<'_> for ApproxConstant { impl ApproxConstant { fn check_known_consts(&self, cx: &LateContext<'_>, span: Span, s: symbol::Symbol, module: &str) { let s = s.as_str(); - if s.parse::<f64>().is_ok() { + if let Ok(maybe_constant) = s.parse::<f64>() { for &(constant, name, min_digits, msrv) in &KNOWN_CONSTS { - if is_approx_const(constant, s, min_digits) && msrv.is_none_or(|msrv| self.msrv.meets(cx, msrv)) { + if is_approx_const(constant, s, maybe_constant, min_digits) + && msrv.is_none_or(|msrv| self.msrv.meets(cx, msrv)) + { span_lint_and_help( cx, APPROX_CONSTANT, @@ -112,18 +114,35 @@ impl ApproxConstant { impl_lint_pass!(ApproxConstant => [APPROX_CONSTANT]); +fn count_digits_after_dot(input: &str) -> usize { + input + .char_indices() + .find(|(_, ch)| *ch == '.') + .map_or(0, |(i, _)| input.len() - i - 1) +} + /// Returns `false` if the number of significant figures in `value` are /// less than `min_digits`; otherwise, returns true if `value` is equal -/// to `constant`, rounded to the number of digits present in `value`. +/// to `constant`, rounded to the number of significant digits present in `value`. #[must_use] -fn is_approx_const(constant: f64, value: &str, min_digits: usize) -> bool { +fn is_approx_const(constant: f64, value: &str, f_value: f64, min_digits: usize) -> bool { if value.len() <= min_digits { + // The value is not precise enough false - } else if constant.to_string().starts_with(value) { - // The value is a truncated constant + } else if f_value.to_string().len() > min_digits && constant.to_string().starts_with(&f_value.to_string()) { + // The value represents the same value true } else { - let round_const = format!("{constant:.*}", value.len() - 2); + // The value is a truncated constant + + // Print constant with numeric formatting (`0`), with the length of `value` as minimum width + // (`value_len$`), and with the same precision as `value` (`.value_prec$`). + // See https://doc.rust-lang.org/std/fmt/index.html. + let round_const = format!( + "{constant:0value_len$.value_prec$}", + value_len = value.len(), + value_prec = count_digits_after_dot(value) + ); value == round_const } } diff --git a/src/tools/clippy/clippy_lints/src/arbitrary_source_item_ordering.rs b/src/tools/clippy/clippy_lints/src/arbitrary_source_item_ordering.rs index a9d3015ce5c..d6469d32931 100644 --- a/src/tools/clippy/clippy_lints/src/arbitrary_source_item_ordering.rs +++ b/src/tools/clippy/clippy_lints/src/arbitrary_source_item_ordering.rs @@ -6,13 +6,13 @@ use clippy_config::types::{ }; use clippy_utils::diagnostics::span_lint_and_note; use clippy_utils::is_cfg_test; -use rustc_attr_data_structures::AttributeKind; +use rustc_hir::attrs::AttributeKind; use rustc_hir::{ - Attribute, FieldDef, HirId, IsAuto, ImplItemId, Item, ItemKind, Mod, OwnerId, QPath, TraitItemId, TyKind, - Variant, VariantData, + Attribute, FieldDef, HirId, ImplItemId, IsAuto, Item, ItemKind, Mod, OwnerId, QPath, TraitItemId, TyKind, Variant, + VariantData, }; -use rustc_middle::ty::AssocKind; use rustc_lint::{LateContext, LateLintPass, LintContext}; +use rustc_middle::ty::AssocKind; use rustc_session::impl_lint_pass; use rustc_span::Ident; @@ -469,13 +469,14 @@ impl<'tcx> LateLintPass<'tcx> for ArbitrarySourceItemOrdering { /// This is implemented here because `rustc_hir` is not a dependency of /// `clippy_config`. fn convert_assoc_item_kind(cx: &LateContext<'_>, owner_id: OwnerId) -> SourceItemOrderingTraitAssocItemKind { - let kind = cx.tcx.associated_item(owner_id.def_id).kind; - #[allow(clippy::enum_glob_use)] // Very local glob use for legibility. use SourceItemOrderingTraitAssocItemKind::*; + + let kind = cx.tcx.associated_item(owner_id.def_id).kind; + match kind { - AssocKind::Const{..} => Const, - AssocKind::Type {..}=> Type, + AssocKind::Const { .. } => Const, + AssocKind::Type { .. } => Type, AssocKind::Fn { .. } => Fn, } } diff --git a/src/tools/clippy/clippy_lints/src/arc_with_non_send_sync.rs b/src/tools/clippy/clippy_lints/src/arc_with_non_send_sync.rs index 9e09fb5bb43..085029a744b 100644 --- a/src/tools/clippy/clippy_lints/src/arc_with_non_send_sync.rs +++ b/src/tools/clippy/clippy_lints/src/arc_with_non_send_sync.rs @@ -73,7 +73,7 @@ impl<'tcx> LateLintPass<'tcx> for ArcWithNonSendSync { diag.note(format!( "`Arc<{arg_ty}>` is not `Send` and `Sync` as `{arg_ty}` is {reason}" )); - diag.help("if the `Arc` will not used be across threads replace it with an `Rc`"); + diag.help("if the `Arc` will not be used across threads replace it with an `Rc`"); diag.help(format!( "otherwise make `{arg_ty}` `Send` and `Sync` or consider a wrapper type such as `Mutex`" )); diff --git a/src/tools/clippy/clippy_lints/src/assigning_clones.rs b/src/tools/clippy/clippy_lints/src/assigning_clones.rs index 8b8b42bbf72..52287be34c7 100644 --- a/src/tools/clippy/clippy_lints/src/assigning_clones.rs +++ b/src/tools/clippy/clippy_lints/src/assigning_clones.rs @@ -98,7 +98,7 @@ impl<'tcx> LateLintPass<'tcx> for AssigningClones { // That is overly conservative - the lint should fire even if there was no initializer, // but the variable has been initialized before `lhs` was evaluated. && path_to_local(lhs).is_none_or(|lhs| local_is_initialized(cx, lhs)) - && let Some(resolved_impl) = cx.tcx.impl_of_method(resolved_fn.def_id()) + && let Some(resolved_impl) = cx.tcx.impl_of_assoc(resolved_fn.def_id()) // Derived forms don't implement `clone_from`/`clone_into`. // See https://github.com/rust-lang/rust/pull/98445#issuecomment-1190681305 && !cx.tcx.is_builtin_derived(resolved_impl) diff --git a/src/tools/clippy/clippy_lints/src/attrs/inline_always.rs b/src/tools/clippy/clippy_lints/src/attrs/inline_always.rs index b8f93ee5e2c..409bb698665 100644 --- a/src/tools/clippy/clippy_lints/src/attrs/inline_always.rs +++ b/src/tools/clippy/clippy_lints/src/attrs/inline_always.rs @@ -1,6 +1,7 @@ use super::INLINE_ALWAYS; use clippy_utils::diagnostics::span_lint; -use rustc_attr_data_structures::{AttributeKind, InlineAttr, find_attr}; +use rustc_hir::attrs::{AttributeKind, InlineAttr}; +use rustc_hir::find_attr; use rustc_hir::Attribute; use rustc_lint::LateContext; use rustc_span::Span; diff --git a/src/tools/clippy/clippy_lints/src/attrs/repr_attributes.rs b/src/tools/clippy/clippy_lints/src/attrs/repr_attributes.rs index 3e8808cec61..4ece3ed44fd 100644 --- a/src/tools/clippy/clippy_lints/src/attrs/repr_attributes.rs +++ b/src/tools/clippy/clippy_lints/src/attrs/repr_attributes.rs @@ -1,4 +1,5 @@ -use rustc_attr_data_structures::{AttributeKind, ReprAttr, find_attr}; +use rustc_hir::attrs::{AttributeKind, ReprAttr}; +use rustc_hir::find_attr; use rustc_hir::Attribute; use rustc_lint::LateContext; use rustc_span::Span; diff --git a/src/tools/clippy/clippy_lints/src/attrs/useless_attribute.rs b/src/tools/clippy/clippy_lints/src/attrs/useless_attribute.rs index 4059f9603c3..b9b5cedb5aa 100644 --- a/src/tools/clippy/clippy_lints/src/attrs/useless_attribute.rs +++ b/src/tools/clippy/clippy_lints/src/attrs/useless_attribute.rs @@ -36,6 +36,7 @@ pub(super) fn check(cx: &EarlyContext<'_>, item: &Item, attrs: &[Attribute]) { | sym::unused_braces | sym::unused_import_braces | sym::unused_imports + | sym::redundant_imports ) { return; diff --git a/src/tools/clippy/clippy_lints/src/booleans.rs b/src/tools/clippy/clippy_lints/src/booleans.rs index 61c2fc49bd7..ba1135d745a 100644 --- a/src/tools/clippy/clippy_lints/src/booleans.rs +++ b/src/tools/clippy/clippy_lints/src/booleans.rs @@ -7,7 +7,7 @@ use clippy_utils::sugg::Sugg; use clippy_utils::ty::{implements_trait, is_type_diagnostic_item}; use clippy_utils::{eq_expr_value, sym}; use rustc_ast::ast::LitKind; -use rustc_attr_data_structures::RustcVersion; +use rustc_hir::RustcVersion; use rustc_errors::Applicability; use rustc_hir::intravisit::{FnKind, Visitor, walk_expr}; use rustc_hir::{BinOpKind, Body, Expr, ExprKind, FnDecl, UnOp}; diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_ptr_alignment.rs b/src/tools/clippy/clippy_lints/src/casts/cast_ptr_alignment.rs index e4dafde0f9d..a1543cabd2f 100644 --- a/src/tools/clippy/clippy_lints/src/casts/cast_ptr_alignment.rs +++ b/src/tools/clippy/clippy_lints/src/casts/cast_ptr_alignment.rs @@ -63,7 +63,7 @@ fn is_used_as_unaligned(cx: &LateContext<'_>, e: &Expr<'_>) -> bool { ExprKind::MethodCall(name, self_arg, ..) if self_arg.hir_id == e.hir_id => { if matches!(name.ident.name, sym::read_unaligned | sym::write_unaligned) && let Some(def_id) = cx.typeck_results().type_dependent_def_id(parent.hir_id) - && let Some(def_id) = cx.tcx.impl_of_method(def_id) + && let Some(def_id) = cx.tcx.impl_of_assoc(def_id) && cx.tcx.type_of(def_id).instantiate_identity().is_raw_ptr() { true diff --git a/src/tools/clippy/clippy_lints/src/casts/confusing_method_to_numeric_cast.rs b/src/tools/clippy/clippy_lints/src/casts/confusing_method_to_numeric_cast.rs index 769cc120c95..73347e7141e 100644 --- a/src/tools/clippy/clippy_lints/src/casts/confusing_method_to_numeric_cast.rs +++ b/src/tools/clippy/clippy_lints/src/casts/confusing_method_to_numeric_cast.rs @@ -37,7 +37,7 @@ fn get_const_name_and_ty_name( } else { return None; } - } else if let Some(impl_id) = cx.tcx.impl_of_method(method_def_id) + } else if let Some(impl_id) = cx.tcx.impl_of_assoc(method_def_id) && let Some(ty_name) = get_primitive_ty_name(cx.tcx.type_of(impl_id).instantiate_identity()) && matches!( method_name, @@ -59,9 +59,8 @@ fn get_const_name_and_ty_name( pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) { // We allow casts from any function type to any function type. - match cast_to.kind() { - ty::FnDef(..) | ty::FnPtr(..) => return, - _ => { /* continue to checks */ }, + if cast_to.is_fn() { + return; } if let ty::FnDef(def_id, generics) = cast_from.kind() diff --git a/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast.rs b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast.rs index 55e27a05f3c..c5d9643f56a 100644 --- a/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast.rs +++ b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast.rs @@ -3,7 +3,7 @@ use clippy_utils::source::snippet_with_applicability; use rustc_errors::Applicability; use rustc_hir::Expr; use rustc_lint::LateContext; -use rustc_middle::ty::{self, Ty}; +use rustc_middle::ty::Ty; use super::{FN_TO_NUMERIC_CAST, utils}; @@ -13,23 +13,20 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, return; }; - match cast_from.kind() { - ty::FnDef(..) | ty::FnPtr(..) => { - let mut applicability = Applicability::MaybeIncorrect; + if cast_from.is_fn() { + let mut applicability = Applicability::MaybeIncorrect; - if to_nbits >= cx.tcx.data_layout.pointer_size().bits() && !cast_to.is_usize() { - let from_snippet = snippet_with_applicability(cx, cast_expr.span, "x", &mut applicability); - span_lint_and_sugg( - cx, - FN_TO_NUMERIC_CAST, - expr.span, - format!("casting function pointer `{from_snippet}` to `{cast_to}`"), - "try", - format!("{from_snippet} as usize"), - applicability, - ); - } - }, - _ => {}, + if to_nbits >= cx.tcx.data_layout.pointer_size().bits() && !cast_to.is_usize() { + let from_snippet = snippet_with_applicability(cx, cast_expr.span, "x", &mut applicability); + span_lint_and_sugg( + cx, + FN_TO_NUMERIC_CAST, + expr.span, + format!("casting function pointer `{from_snippet}` to `{cast_to}`"), + "try", + format!("{from_snippet} as usize"), + applicability, + ); + } } } diff --git a/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_any.rs b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_any.rs index b22e8f4ee89..43ee91af6e5 100644 --- a/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_any.rs +++ b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_any.rs @@ -3,18 +3,17 @@ use clippy_utils::source::snippet_with_applicability; use rustc_errors::Applicability; use rustc_hir::Expr; use rustc_lint::LateContext; -use rustc_middle::ty::{self, Ty}; +use rustc_middle::ty::Ty; use super::FN_TO_NUMERIC_CAST_ANY; pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) { // We allow casts from any function type to any function type. - match cast_to.kind() { - ty::FnDef(..) | ty::FnPtr(..) => return, - _ => { /* continue to checks */ }, + if cast_to.is_fn() { + return; } - if let ty::FnDef(..) | ty::FnPtr(..) = cast_from.kind() { + if cast_from.is_fn() { let mut applicability = Applicability::MaybeIncorrect; let from_snippet = snippet_with_applicability(cx, cast_expr.span, "..", &mut applicability); diff --git a/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_with_truncation.rs b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_with_truncation.rs index 4da79205e20..9a2e44e07d4 100644 --- a/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_with_truncation.rs +++ b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_with_truncation.rs @@ -3,7 +3,7 @@ use clippy_utils::source::snippet_with_applicability; use rustc_errors::Applicability; use rustc_hir::Expr; use rustc_lint::LateContext; -use rustc_middle::ty::{self, Ty}; +use rustc_middle::ty::Ty; use super::{FN_TO_NUMERIC_CAST_WITH_TRUNCATION, utils}; @@ -12,23 +12,20 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, let Some(to_nbits) = utils::int_ty_to_nbits(cx.tcx, cast_to) else { return; }; - match cast_from.kind() { - ty::FnDef(..) | ty::FnPtr(..) => { - let mut applicability = Applicability::MaybeIncorrect; - let from_snippet = snippet_with_applicability(cx, cast_expr.span, "x", &mut applicability); + if cast_from.is_fn() { + let mut applicability = Applicability::MaybeIncorrect; + let from_snippet = snippet_with_applicability(cx, cast_expr.span, "x", &mut applicability); - if to_nbits < cx.tcx.data_layout.pointer_size().bits() { - span_lint_and_sugg( - cx, - FN_TO_NUMERIC_CAST_WITH_TRUNCATION, - expr.span, - format!("casting function pointer `{from_snippet}` to `{cast_to}`, which truncates the value"), - "try", - format!("{from_snippet} as usize"), - applicability, - ); - } - }, - _ => {}, + if to_nbits < cx.tcx.data_layout.pointer_size().bits() { + span_lint_and_sugg( + cx, + FN_TO_NUMERIC_CAST_WITH_TRUNCATION, + expr.span, + format!("casting function pointer `{from_snippet}` to `{cast_to}`, which truncates the value"), + "try", + format!("{from_snippet} as usize"), + applicability, + ); + } } } diff --git a/src/tools/clippy/clippy_lints/src/casts/ptr_as_ptr.rs b/src/tools/clippy/clippy_lints/src/casts/ptr_as_ptr.rs index 6f944914b8f..ee0f3fa81c6 100644 --- a/src/tools/clippy/clippy_lints/src/casts/ptr_as_ptr.rs +++ b/src/tools/clippy/clippy_lints/src/casts/ptr_as_ptr.rs @@ -4,10 +4,9 @@ use clippy_utils::source::snippet_with_applicability; use clippy_utils::sugg::Sugg; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind, Mutability, QPath, TyKind}; -use rustc_hir_pretty::qpath_to_string; use rustc_lint::LateContext; use rustc_middle::ty; -use rustc_span::sym; +use rustc_span::{Span, sym}; use super::PTR_AS_PTR; @@ -74,7 +73,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, msrv: Msrv) { let (help, final_suggestion) = if let Some(method) = omit_cast.corresponding_item() { // don't force absolute path - let method = qpath_to_string(&cx.tcx, method); + let method = snippet_with_applicability(cx, qpath_span_without_turbofish(method), "..", &mut app); ("try call directly", format!("{method}{turbofish}()")) } else { let cast_expr_sugg = Sugg::hir_with_applicability(cx, cast_expr, "_", &mut app); @@ -96,3 +95,14 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, msrv: Msrv) { ); } } + +fn qpath_span_without_turbofish(qpath: &QPath<'_>) -> Span { + if let QPath::Resolved(_, path) = qpath + && let [.., last_ident] = path.segments + && last_ident.args.is_some() + { + return qpath.span().shrink_to_lo().to(last_ident.ident.span); + } + + qpath.span() +} diff --git a/src/tools/clippy/clippy_lints/src/cognitive_complexity.rs b/src/tools/clippy/clippy_lints/src/cognitive_complexity.rs index d5d937d9133..518535e8c8b 100644 --- a/src/tools/clippy/clippy_lints/src/cognitive_complexity.rs +++ b/src/tools/clippy/clippy_lints/src/cognitive_complexity.rs @@ -110,7 +110,6 @@ impl CognitiveComplexity { FnKind::ItemFn(ident, _, _) | FnKind::Method(ident, _) => ident.span, FnKind::Closure => { let header_span = body_span.with_hi(decl.output.span().lo()); - #[expect(clippy::range_plus_one)] if let Some(range) = header_span.map_range(cx, |_, src, range| { let mut idxs = src.get(range.clone())?.match_indices('|'); Some(range.start + idxs.next()?.0..range.start + idxs.next()?.0 + 1) diff --git a/src/tools/clippy/clippy_lints/src/copies.rs b/src/tools/clippy/clippy_lints/src/copies.rs index 27918698cd6..4bd34527d21 100644 --- a/src/tools/clippy/clippy_lints/src/copies.rs +++ b/src/tools/clippy/clippy_lints/src/copies.rs @@ -1,5 +1,6 @@ use clippy_config::Conf; use clippy_utils::diagnostics::{span_lint, span_lint_and_note, span_lint_and_then}; +use clippy_utils::higher::has_let_expr; use clippy_utils::source::{IntoSpan, SpanRangeExt, first_line_of_span, indent_of, reindent_multiline, snippet}; use clippy_utils::ty::{InteriorMut, needs_ordered_drop}; use clippy_utils::visitors::for_each_expr_without_closures; @@ -11,7 +12,7 @@ use clippy_utils::{ use core::iter; use core::ops::ControlFlow; use rustc_errors::Applicability; -use rustc_hir::{BinOpKind, Block, Expr, ExprKind, HirId, HirIdSet, LetStmt, Node, Stmt, StmtKind, intravisit}; +use rustc_hir::{Block, Expr, ExprKind, HirId, HirIdSet, LetStmt, Node, Stmt, StmtKind, intravisit}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::TyCtxt; use rustc_session::impl_lint_pass; @@ -189,24 +190,13 @@ impl<'tcx> LateLintPass<'tcx> for CopyAndPaste<'tcx> { } } -/// Checks if the given expression is a let chain. -fn contains_let(e: &Expr<'_>) -> bool { - match e.kind { - ExprKind::Let(..) => true, - ExprKind::Binary(op, lhs, rhs) if op.node == BinOpKind::And => { - matches!(lhs.kind, ExprKind::Let(..)) || contains_let(rhs) - }, - _ => false, - } -} - fn lint_if_same_then_else(cx: &LateContext<'_>, conds: &[&Expr<'_>], blocks: &[&Block<'_>]) -> bool { let mut eq = SpanlessEq::new(cx); blocks .array_windows::<2>() .enumerate() .fold(true, |all_eq, (i, &[lhs, rhs])| { - if eq.eq_block(lhs, rhs) && !contains_let(conds[i]) && conds.get(i + 1).is_none_or(|e| !contains_let(e)) { + if eq.eq_block(lhs, rhs) && !has_let_expr(conds[i]) && conds.get(i + 1).is_none_or(|e| !has_let_expr(e)) { span_lint_and_note( cx, IF_SAME_THEN_ELSE, diff --git a/src/tools/clippy/clippy_lints/src/default_union_representation.rs b/src/tools/clippy/clippy_lints/src/default_union_representation.rs index 9bf2144e445..f41255e54db 100644 --- a/src/tools/clippy/clippy_lints/src/default_union_representation.rs +++ b/src/tools/clippy/clippy_lints/src/default_union_representation.rs @@ -1,5 +1,6 @@ use clippy_utils::diagnostics::span_lint_and_then; -use rustc_attr_data_structures::{AttributeKind, ReprAttr, find_attr}; +use rustc_hir::attrs::{AttributeKind, ReprAttr}; +use rustc_hir::find_attr; use rustc_hir::{HirId, Item, ItemKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::layout::LayoutOf; diff --git a/src/tools/clippy/clippy_lints/src/dereference.rs b/src/tools/clippy/clippy_lints/src/dereference.rs index 5099df3fa02..995a1209595 100644 --- a/src/tools/clippy/clippy_lints/src/dereference.rs +++ b/src/tools/clippy/clippy_lints/src/dereference.rs @@ -364,7 +364,7 @@ impl<'tcx> LateLintPass<'tcx> for Dereferencing<'tcx> { // * `&self` methods on `&T` can have auto-borrow, but `&self` methods on `T` will take // priority. if let Some(fn_id) = typeck.type_dependent_def_id(hir_id) - && let Some(trait_id) = cx.tcx.trait_of_item(fn_id) + && let Some(trait_id) = cx.tcx.trait_of_assoc(fn_id) && let arg_ty = cx.tcx.erase_regions(adjusted_ty) && let ty::Ref(_, sub_ty, _) = *arg_ty.kind() && let args = diff --git a/src/tools/clippy/clippy_lints/src/derive.rs b/src/tools/clippy/clippy_lints/src/derive.rs index 062f7cef3a7..49dd1bb09c6 100644 --- a/src/tools/clippy/clippy_lints/src/derive.rs +++ b/src/tools/clippy/clippy_lints/src/derive.rs @@ -432,6 +432,11 @@ impl<'tcx> Visitor<'tcx> for UnsafeVisitor<'_, 'tcx> { fn visit_expr(&mut self, expr: &'tcx Expr<'_>) -> Self::Result { if let ExprKind::Block(block, _) = expr.kind && block.rules == BlockCheckMode::UnsafeBlock(UnsafeSource::UserProvided) + && block + .span + .source_callee() + .and_then(|expr| expr.macro_def_id) + .is_none_or(|did| !self.cx.tcx.is_diagnostic_item(sym::pin_macro, did)) { return ControlFlow::Break(()); } diff --git a/src/tools/clippy/clippy_lints/src/disallowed_macros.rs b/src/tools/clippy/clippy_lints/src/disallowed_macros.rs index d55aeae98ed..23e7c7251cf 100644 --- a/src/tools/clippy/clippy_lints/src/disallowed_macros.rs +++ b/src/tools/clippy/clippy_lints/src/disallowed_macros.rs @@ -72,11 +72,11 @@ pub struct DisallowedMacros { // When a macro is disallowed in an early pass, it's stored // and emitted during the late pass. This happens for attributes. - earlies: AttrStorage, + early_macro_cache: AttrStorage, } impl DisallowedMacros { - pub fn new(tcx: TyCtxt<'_>, conf: &'static Conf, earlies: AttrStorage) -> Self { + pub fn new(tcx: TyCtxt<'_>, conf: &'static Conf, early_macro_cache: AttrStorage) -> Self { let (disallowed, _) = create_disallowed_map( tcx, &conf.disallowed_macros, @@ -89,7 +89,7 @@ impl DisallowedMacros { disallowed, seen: FxHashSet::default(), derive_src: None, - earlies, + early_macro_cache, } } @@ -130,7 +130,7 @@ impl_lint_pass!(DisallowedMacros => [DISALLOWED_MACROS]); impl LateLintPass<'_> for DisallowedMacros { fn check_crate(&mut self, cx: &LateContext<'_>) { // once we check a crate in the late pass we can emit the early pass lints - if let Some(attr_spans) = self.earlies.clone().0.get() { + if let Some(attr_spans) = self.early_macro_cache.clone().0.get() { for span in attr_spans { self.check(cx, *span, None); } diff --git a/src/tools/clippy/clippy_lints/src/doc/mod.rs b/src/tools/clippy/clippy_lints/src/doc/mod.rs index 22b781b8929..ea0da0d2467 100644 --- a/src/tools/clippy/clippy_lints/src/doc/mod.rs +++ b/src/tools/clippy/clippy_lints/src/doc/mod.rs @@ -1232,7 +1232,6 @@ fn check_doc<'a, Events: Iterator<Item = (pulldown_cmark::Event<'a>, Range<usize headers } -#[expect(clippy::range_plus_one)] // inclusive ranges aren't the same type fn looks_like_refdef(doc: &str, range: Range<usize>) -> Option<Range<usize>> { if range.end < range.start { return None; diff --git a/src/tools/clippy/clippy_lints/src/doc/suspicious_doc_comments.rs b/src/tools/clippy/clippy_lints/src/doc/suspicious_doc_comments.rs index ebfc9972aef..1e7d1f92fa3 100644 --- a/src/tools/clippy/clippy_lints/src/doc/suspicious_doc_comments.rs +++ b/src/tools/clippy/clippy_lints/src/doc/suspicious_doc_comments.rs @@ -1,7 +1,7 @@ use clippy_utils::diagnostics::span_lint_and_then; use rustc_ast::AttrStyle; use rustc_ast::token::CommentKind; -use rustc_attr_data_structures::AttributeKind; +use rustc_hir::attrs::AttributeKind; use rustc_errors::Applicability; use rustc_hir::Attribute; use rustc_lint::LateContext; diff --git a/src/tools/clippy/clippy_lints/src/doc/too_long_first_doc_paragraph.rs b/src/tools/clippy/clippy_lints/src/doc/too_long_first_doc_paragraph.rs index 7f7224ecfc6..32ba696b3ec 100644 --- a/src/tools/clippy/clippy_lints/src/doc/too_long_first_doc_paragraph.rs +++ b/src/tools/clippy/clippy_lints/src/doc/too_long_first_doc_paragraph.rs @@ -1,4 +1,4 @@ -use rustc_attr_data_structures::AttributeKind; +use rustc_hir::attrs::AttributeKind; use rustc_errors::Applicability; use rustc_hir::{Attribute, Item, ItemKind}; use rustc_lint::LateContext; diff --git a/src/tools/clippy/clippy_lints/src/empty_with_brackets.rs b/src/tools/clippy/clippy_lints/src/empty_with_brackets.rs index 4414aebbf9a..f2757407ba5 100644 --- a/src/tools/clippy/clippy_lints/src/empty_with_brackets.rs +++ b/src/tools/clippy/clippy_lints/src/empty_with_brackets.rs @@ -92,8 +92,10 @@ impl_lint_pass!(EmptyWithBrackets => [EMPTY_STRUCTS_WITH_BRACKETS, EMPTY_ENUM_VA impl LateLintPass<'_> for EmptyWithBrackets { fn check_item(&mut self, cx: &LateContext<'_>, item: &Item<'_>) { + // FIXME: handle `struct $name {}` if let ItemKind::Struct(ident, _, var_data) = &item.kind && !item.span.from_expansion() + && !ident.span.from_expansion() && has_brackets(var_data) && let span_after_ident = item.span.with_lo(ident.span.hi()) && has_no_fields(cx, var_data, span_after_ident) @@ -116,10 +118,12 @@ impl LateLintPass<'_> for EmptyWithBrackets { } fn check_variant(&mut self, cx: &LateContext<'_>, variant: &Variant<'_>) { - // the span of the parentheses/braces - let span_after_ident = variant.span.with_lo(variant.ident.span.hi()); - - if has_no_fields(cx, &variant.data, span_after_ident) { + // FIXME: handle `$name {}` + if !variant.span.from_expansion() + && !variant.ident.span.from_expansion() + && let span_after_ident = variant.span.with_lo(variant.ident.span.hi()) + && has_no_fields(cx, &variant.data, span_after_ident) + { match variant.data { VariantData::Struct { .. } => { // Empty struct variants can be linted immediately diff --git a/src/tools/clippy/clippy_lints/src/escape.rs b/src/tools/clippy/clippy_lints/src/escape.rs index db2fea1aae9..fc224fa5f92 100644 --- a/src/tools/clippy/clippy_lints/src/escape.rs +++ b/src/tools/clippy/clippy_lints/src/escape.rs @@ -1,8 +1,8 @@ use clippy_config::Conf; use clippy_utils::diagnostics::span_lint_hir; use rustc_abi::ExternAbi; -use rustc_hir::{Body, FnDecl, HirId, HirIdSet, Node, Pat, PatKind, intravisit}; use rustc_hir::def::DefKind; +use rustc_hir::{Body, FnDecl, HirId, HirIdSet, Node, Pat, PatKind, intravisit}; use rustc_hir_typeck::expr_use_visitor::{Delegate, ExprUseVisitor, PlaceBase, PlaceWithHirId}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::mir::FakeReadCause; @@ -87,16 +87,14 @@ impl<'tcx> LateLintPass<'tcx> for BoxedLocal { let mut trait_self_ty = None; match cx.tcx.def_kind(parent_id) { // If the method is an impl for a trait, don't warn. - DefKind::Impl { of_trait: true } => { - return - } + DefKind::Impl { of_trait: true } => return, // find `self` ty for this trait if relevant DefKind::Trait => { trait_self_ty = Some(TraitRef::identity(cx.tcx, parent_id.to_def_id()).self_ty()); - } + }, - _ => {} + _ => {}, } let mut v = EscapeDelegate { diff --git a/src/tools/clippy/clippy_lints/src/eta_reduction.rs b/src/tools/clippy/clippy_lints/src/eta_reduction.rs index 0288747d6f3..ba539d05b6b 100644 --- a/src/tools/clippy/clippy_lints/src/eta_reduction.rs +++ b/src/tools/clippy/clippy_lints/src/eta_reduction.rs @@ -7,7 +7,8 @@ use clippy_utils::{ get_path_from_caller_to_method_type, is_adjusted, is_no_std_crate, path_to_local, path_to_local_id, }; use rustc_abi::ExternAbi; -use rustc_attr_data_structures::{AttributeKind, find_attr}; +use rustc_hir::attrs::{AttributeKind}; +use rustc_hir::find_attr; use rustc_errors::Applicability; use rustc_hir::{BindingMode, Expr, ExprKind, FnRetTy, GenericArgs, Param, PatKind, QPath, Safety, TyKind}; use rustc_infer::infer::TyCtxtInferExt; @@ -29,12 +30,6 @@ declare_clippy_lint! { /// Needlessly creating a closure adds code for no benefit /// and gives the optimizer more work. /// - /// ### Known problems - /// If creating the closure inside the closure has a side- - /// effect then moving the closure creation out will change when that side- - /// effect runs. - /// See [#1439](https://github.com/rust-lang/rust-clippy/issues/1439) for more details. - /// /// ### Example /// ```rust,ignore /// xs.map(|x| foo(x)) diff --git a/src/tools/clippy/clippy_lints/src/exhaustive_items.rs b/src/tools/clippy/clippy_lints/src/exhaustive_items.rs index 8ad09279071..5f40e576443 100644 --- a/src/tools/clippy/clippy_lints/src/exhaustive_items.rs +++ b/src/tools/clippy/clippy_lints/src/exhaustive_items.rs @@ -1,6 +1,7 @@ use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::source::indent_of; -use rustc_attr_data_structures::{AttributeKind, find_attr}; +use rustc_hir::attrs::{AttributeKind}; +use rustc_hir::find_attr; use rustc_errors::Applicability; use rustc_hir::{Item, ItemKind}; use rustc_lint::{LateContext, LateLintPass}; diff --git a/src/tools/clippy/clippy_lints/src/fallible_impl_from.rs b/src/tools/clippy/clippy_lints/src/fallible_impl_from.rs index 552cd721f4e..fdfcbb540bc 100644 --- a/src/tools/clippy/clippy_lints/src/fallible_impl_from.rs +++ b/src/tools/clippy/clippy_lints/src/fallible_impl_from.rs @@ -64,8 +64,8 @@ impl<'tcx> LateLintPass<'tcx> for FallibleImplFrom { } fn lint_impl_body(cx: &LateContext<'_>, item_def_id: hir::OwnerId, impl_span: Span) { - use rustc_hir::intravisit::{self, Visitor}; use rustc_hir::Expr; + use rustc_hir::intravisit::{self, Visitor}; struct FindPanicUnwrap<'a, 'tcx> { lcx: &'a LateContext<'tcx>, @@ -96,10 +96,12 @@ fn lint_impl_body(cx: &LateContext<'_>, item_def_id: hir::OwnerId, impl_span: Sp } } - for impl_item in cx.tcx.associated_items(item_def_id) + for impl_item in cx + .tcx + .associated_items(item_def_id) .filter_by_name_unhygienic_and_kind(sym::from, ty::AssocTag::Fn) { - let impl_item_def_id= impl_item.def_id.expect_local(); + let impl_item_def_id = impl_item.def_id.expect_local(); // check the body for `begin_panic` or `unwrap` let body = cx.tcx.hir_body_owned_by(impl_item_def_id); diff --git a/src/tools/clippy/clippy_lints/src/float_literal.rs b/src/tools/clippy/clippy_lints/src/float_literal.rs index c51267567d0..ccaf38aee4d 100644 --- a/src/tools/clippy/clippy_lints/src/float_literal.rs +++ b/src/tools/clippy/clippy_lints/src/float_literal.rs @@ -1,6 +1,6 @@ use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::numeric_literal; -use rustc_ast::ast::{self, LitFloatType, LitKind}; +use rustc_ast::ast::{LitFloatType, LitKind}; use rustc_errors::Applicability; use rustc_hir as hir; use rustc_lint::{LateContext, LateLintPass}; @@ -75,10 +75,10 @@ impl<'tcx> LateLintPass<'tcx> for FloatLiteral { let digits = count_digits(sym_str); let max = max_digits(fty); let type_suffix = match lit_float_ty { - LitFloatType::Suffixed(ast::FloatTy::F16) => Some("f16"), - LitFloatType::Suffixed(ast::FloatTy::F32) => Some("f32"), - LitFloatType::Suffixed(ast::FloatTy::F64) => Some("f64"), - LitFloatType::Suffixed(ast::FloatTy::F128) => Some("f128"), + LitFloatType::Suffixed(FloatTy::F16) => Some("f16"), + LitFloatType::Suffixed(FloatTy::F32) => Some("f32"), + LitFloatType::Suffixed(FloatTy::F64) => Some("f64"), + LitFloatType::Suffixed(FloatTy::F128) => Some("f128"), LitFloatType::Unsuffixed => None, }; let (is_whole, is_inf, mut float_str) = match fty { diff --git a/src/tools/clippy/clippy_lints/src/format_args.rs b/src/tools/clippy/clippy_lints/src/format_args.rs index 16c58ecb455..af4202422e4 100644 --- a/src/tools/clippy/clippy_lints/src/format_args.rs +++ b/src/tools/clippy/clippy_lints/src/format_args.rs @@ -17,7 +17,8 @@ use rustc_ast::{ FormatArgPosition, FormatArgPositionKind, FormatArgsPiece, FormatArgumentKind, FormatCount, FormatOptions, FormatPlaceholder, FormatTrait, }; -use rustc_attr_data_structures::RustcVersion; +use rustc_hir::attrs::{AttributeKind}; +use rustc_hir::{find_attr,RustcVersion}; use rustc_data_structures::fx::FxHashMap; use rustc_errors::Applicability; use rustc_errors::SuggestionStyle::{CompletelyHidden, ShowCode}; @@ -30,7 +31,6 @@ use rustc_span::edition::Edition::Edition2021; use rustc_span::{Span, Symbol, sym}; use rustc_trait_selection::infer::TyCtxtInferExt; use rustc_trait_selection::traits::{Obligation, ObligationCause, Selection, SelectionContext}; -use rustc_attr_data_structures::{AttributeKind, find_attr}; declare_clippy_lint! { /// ### What it does @@ -129,6 +129,7 @@ declare_clippy_lint! { /// # let width = 1; /// # let prec = 2; /// format!("{}", var); + /// format!("{:?}", var); /// format!("{v:?}", v = var); /// format!("{0} {0}", var); /// format!("{0:1$}", var, width); @@ -141,6 +142,7 @@ declare_clippy_lint! { /// # let prec = 2; /// format!("{var}"); /// format!("{var:?}"); + /// format!("{var:?}"); /// format!("{var} {var}"); /// format!("{var:width$}"); /// format!("{var:.prec$}"); @@ -164,7 +166,7 @@ declare_clippy_lint! { /// nothing will be suggested, e.g. `println!("{0}={1}", var, 1+2)`. #[clippy::version = "1.66.0"] pub UNINLINED_FORMAT_ARGS, - style, + pedantic, "using non-inlined variables in `format!` calls" } @@ -657,7 +659,10 @@ impl<'tcx> FormatArgsExpr<'_, 'tcx> { }; let selection = SelectionContext::new(&infcx).select(&obligation); let derived = if let Ok(Some(Selection::UserDefined(data))) = selection { - find_attr!(tcx.get_all_attrs(data.impl_def_id), AttributeKind::AutomaticallyDerived(..)) + find_attr!( + tcx.get_all_attrs(data.impl_def_id), + AttributeKind::AutomaticallyDerived(..) + ) } else { false }; diff --git a/src/tools/clippy/clippy_lints/src/from_over_into.rs b/src/tools/clippy/clippy_lints/src/from_over_into.rs index 85b40ba7419..1da6952eb64 100644 --- a/src/tools/clippy/clippy_lints/src/from_over_into.rs +++ b/src/tools/clippy/clippy_lints/src/from_over_into.rs @@ -9,7 +9,7 @@ use clippy_utils::source::SpanRangeExt; use rustc_errors::Applicability; use rustc_hir::intravisit::{Visitor, walk_path}; use rustc_hir::{ - FnRetTy, GenericArg, GenericArgs, HirId, Impl, ImplItemKind, ImplItemId, Item, ItemKind, PatKind, Path, + FnRetTy, GenericArg, GenericArgs, HirId, Impl, ImplItemId, ImplItemKind, Item, ItemKind, PatKind, Path, PathSegment, Ty, TyKind, }; use rustc_lint::{LateContext, LateLintPass}; diff --git a/src/tools/clippy/clippy_lints/src/functions/must_use.rs b/src/tools/clippy/clippy_lints/src/functions/must_use.rs index d959981a83c..55ca0d9ecb7 100644 --- a/src/tools/clippy/clippy_lints/src/functions/must_use.rs +++ b/src/tools/clippy/clippy_lints/src/functions/must_use.rs @@ -10,11 +10,12 @@ use rustc_span::{Span, sym}; use clippy_utils::attrs::is_proc_macro; use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_then}; -use clippy_utils::source::SpanRangeExt; +use clippy_utils::source::snippet_indent; use clippy_utils::ty::is_must_use_ty; use clippy_utils::visitors::for_each_expr_without_closures; use clippy_utils::{return_ty, trait_ref_of_method}; -use rustc_attr_data_structures::{AttributeKind, find_attr}; +use rustc_hir::attrs::{AttributeKind}; +use rustc_hir::find_attr; use rustc_span::Symbol; use rustc_trait_selection::error_reporting::InferCtxtErrorExt; @@ -28,6 +29,7 @@ pub(super) fn check_item<'tcx>(cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_> if let hir::ItemKind::Fn { ref sig, body: ref body_id, + ident, .. } = item.kind { @@ -51,8 +53,8 @@ pub(super) fn check_item<'tcx>(cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_> sig.decl, cx.tcx.hir_body(*body_id), item.span, + ident.span, item.owner_id, - item.span.with_hi(sig.decl.output.span().hi()), "this function could have a `#[must_use]` attribute", ); } @@ -84,8 +86,8 @@ pub(super) fn check_impl_item<'tcx>(cx: &LateContext<'tcx>, item: &'tcx hir::Imp sig.decl, cx.tcx.hir_body(*body_id), item.span, + item.ident.span, item.owner_id, - item.span.with_hi(sig.decl.output.span().hi()), "this method could have a `#[must_use]` attribute", ); } @@ -120,8 +122,8 @@ pub(super) fn check_trait_item<'tcx>(cx: &LateContext<'tcx>, item: &'tcx hir::Tr sig.decl, body, item.span, + item.ident.span, item.owner_id, - item.span.with_hi(sig.decl.output.span().hi()), "this method could have a `#[must_use]` attribute", ); } @@ -198,8 +200,8 @@ fn check_must_use_candidate<'tcx>( decl: &'tcx hir::FnDecl<'_>, body: &'tcx hir::Body<'_>, item_span: Span, + ident_span: Span, item_id: hir::OwnerId, - fn_span: Span, msg: &'static str, ) { if has_mutable_arg(cx, body) @@ -208,18 +210,18 @@ fn check_must_use_candidate<'tcx>( || returns_unit(decl) || !cx.effective_visibilities.is_exported(item_id.def_id) || is_must_use_ty(cx, return_ty(cx, item_id)) + || item_span.from_expansion() { return; } - span_lint_and_then(cx, MUST_USE_CANDIDATE, fn_span, msg, |diag| { - if let Some(snippet) = fn_span.get_source_text(cx) { - diag.span_suggestion( - fn_span, - "add the attribute", - format!("#[must_use] {snippet}"), - Applicability::MachineApplicable, - ); - } + span_lint_and_then(cx, MUST_USE_CANDIDATE, ident_span, msg, |diag| { + let indent = snippet_indent(cx, item_span).unwrap_or_default(); + diag.span_suggestion( + item_span.shrink_to_lo(), + "add the attribute", + format!("#[must_use] \n{indent}"), + Applicability::MachineApplicable, + ); }); } diff --git a/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs b/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs index 9e94280fc07..7158f9419c1 100644 --- a/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs +++ b/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs @@ -5,7 +5,8 @@ use clippy_utils::msrvs::{self, Msrv}; use clippy_utils::source::snippet_with_context; use clippy_utils::sugg::Sugg; use clippy_utils::{ - contains_return, higher, is_else_clause, is_in_const_context, is_res_lang_ctor, path_res, peel_blocks, + contains_return, expr_adjustment_requires_coercion, higher, is_else_clause, is_in_const_context, is_res_lang_ctor, + path_res, peel_blocks, }; use rustc_errors::Applicability; use rustc_hir::LangItem::{OptionNone, OptionSome}; @@ -92,6 +93,10 @@ impl<'tcx> LateLintPass<'tcx> for IfThenSomeElseNone { expr.span, format!("this could be simplified with `bool::{method_name}`"), |diag| { + if expr_adjustment_requires_coercion(cx, then_arg) { + return; + } + let mut app = Applicability::MachineApplicable; let cond_snip = Sugg::hir_with_context(cx, cond, expr.span.ctxt(), "[condition]", &mut app) .maybe_paren() diff --git a/src/tools/clippy/clippy_lints/src/implicit_saturating_sub.rs b/src/tools/clippy/clippy_lints/src/implicit_saturating_sub.rs index c743501da25..c634c12e187 100644 --- a/src/tools/clippy/clippy_lints/src/implicit_saturating_sub.rs +++ b/src/tools/clippy/clippy_lints/src/implicit_saturating_sub.rs @@ -339,7 +339,7 @@ fn check_with_condition<'tcx>( ExprKind::Path(QPath::TypeRelative(_, name)) => { if name.ident.name == sym::MIN && let Some(const_id) = cx.typeck_results().type_dependent_def_id(cond_num_val.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(const_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(const_id) && let None = cx.tcx.impl_trait_ref(impl_id) // An inherent impl && cx.tcx.type_of(impl_id).instantiate_identity().is_integral() { @@ -350,7 +350,7 @@ fn check_with_condition<'tcx>( if let ExprKind::Path(QPath::TypeRelative(_, name)) = func.kind && name.ident.name == sym::min_value && let Some(func_id) = cx.typeck_results().type_dependent_def_id(func.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(func_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(func_id) && let None = cx.tcx.impl_trait_ref(impl_id) // An inherent impl && cx.tcx.type_of(impl_id).instantiate_identity().is_integral() { diff --git a/src/tools/clippy/clippy_lints/src/incompatible_msrv.rs b/src/tools/clippy/clippy_lints/src/incompatible_msrv.rs index 5d0bd3e8ca3..85ebc830d3b 100644 --- a/src/tools/clippy/clippy_lints/src/incompatible_msrv.rs +++ b/src/tools/clippy/clippy_lints/src/incompatible_msrv.rs @@ -1,10 +1,11 @@ use clippy_config::Conf; -use clippy_utils::diagnostics::span_lint; -use clippy_utils::is_in_test; +use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::msrvs::Msrv; -use rustc_attr_data_structures::{RustcVersion, StabilityLevel, StableSince}; +use clippy_utils::{is_in_const_context, is_in_test}; +use rustc_hir::{RustcVersion, StabilityLevel, StableSince}; use rustc_data_structures::fx::FxHashMap; -use rustc_hir::{Expr, ExprKind, HirId, QPath}; +use rustc_hir::def::DefKind; +use rustc_hir::{self as hir, AmbigArg, Expr, ExprKind, HirId, QPath}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::TyCtxt; use rustc_session::impl_lint_pass; @@ -33,15 +34,54 @@ declare_clippy_lint! { /// /// To fix this problem, either increase your MSRV or use another item /// available in your current MSRV. + /// + /// You can also locally change the MSRV that should be checked by Clippy, + /// for example if a feature in your crate (e.g., `modern_compiler`) should + /// allow you to use an item: + /// + /// ```no_run + /// //! This crate has a MSRV of 1.3.0, but we also have an optional feature + /// //! `sleep_well` which requires at least Rust 1.4.0. + /// + /// // When the `sleep_well` feature is set, do not warn for functions available + /// // in Rust 1.4.0 and below. + /// #![cfg_attr(feature = "sleep_well", clippy::msrv = "1.4.0")] + /// + /// use std::time::Duration; + /// + /// #[cfg(feature = "sleep_well")] + /// fn sleep_for_some_time() { + /// std::thread::sleep(Duration::new(1, 0)); // Will not trigger the lint + /// } + /// ``` + /// + /// You can also increase the MSRV in tests, by using: + /// + /// ```no_run + /// // Use a much higher MSRV for tests while keeping the main one low + /// #![cfg_attr(test, clippy::msrv = "1.85.0")] + /// + /// #[test] + /// fn my_test() { + /// // The tests can use items introduced in Rust 1.85.0 and lower + /// // without triggering the `incompatible_msrv` lint. + /// } + /// ``` #[clippy::version = "1.78.0"] pub INCOMPATIBLE_MSRV, suspicious, "ensures that all items used in the crate are available for the current MSRV" } +#[derive(Clone, Copy)] +enum Availability { + FeatureEnabled, + Since(RustcVersion), +} + pub struct IncompatibleMsrv { msrv: Msrv, - is_above_msrv: FxHashMap<DefId, RustcVersion>, + availability_cache: FxHashMap<(DefId, bool), Availability>, check_in_tests: bool, } @@ -51,38 +91,50 @@ impl IncompatibleMsrv { pub fn new(conf: &'static Conf) -> Self { Self { msrv: conf.msrv, - is_above_msrv: FxHashMap::default(), + availability_cache: FxHashMap::default(), check_in_tests: conf.check_incompatible_msrv_in_tests, } } - fn get_def_id_version(&mut self, tcx: TyCtxt<'_>, def_id: DefId) -> RustcVersion { - if let Some(version) = self.is_above_msrv.get(&def_id) { - return *version; + /// Returns the availability of `def_id`, whether it is enabled through a feature or + /// available since a given version (the default being Rust 1.0.0). `needs_const` requires + /// the `const`-stability to be looked up instead of the stability in non-`const` contexts. + fn get_def_id_availability(&mut self, tcx: TyCtxt<'_>, def_id: DefId, needs_const: bool) -> Availability { + if let Some(availability) = self.availability_cache.get(&(def_id, needs_const)) { + return *availability; } - let version = if let Some(version) = tcx - .lookup_stability(def_id) - .and_then(|stability| match stability.level { - StabilityLevel::Stable { - since: StableSince::Version(version), - .. - } => Some(version), - _ => None, - }) { - version + let (feature, stability_level) = if needs_const { + tcx.lookup_const_stability(def_id) + .map(|stability| (stability.feature, stability.level)) + .unzip() + } else { + tcx.lookup_stability(def_id) + .map(|stability| (stability.feature, stability.level)) + .unzip() + }; + let version = if feature.is_some_and(|feature| tcx.features().enabled(feature)) { + Availability::FeatureEnabled + } else if let Some(StableSince::Version(version)) = + stability_level.as_ref().and_then(StabilityLevel::stable_since) + { + Availability::Since(version) + } else if needs_const { + // Fallback to regular stability + self.get_def_id_availability(tcx, def_id, false) } else if let Some(parent_def_id) = tcx.opt_parent(def_id) { - self.get_def_id_version(tcx, parent_def_id) + self.get_def_id_availability(tcx, parent_def_id, needs_const) } else { - RustcVersion { + Availability::Since(RustcVersion { major: 1, minor: 0, patch: 0, - } + }) }; - self.is_above_msrv.insert(def_id, version); + self.availability_cache.insert((def_id, needs_const), version); version } + /// Emit lint if `def_id`, associated with `node` and `span`, is below the current MSRV. fn emit_lint_if_under_msrv(&mut self, cx: &LateContext<'_>, def_id: DefId, node: HirId, span: Span) { if def_id.is_local() { // We don't check local items since their MSRV is supposed to always be valid. @@ -108,18 +160,28 @@ impl IncompatibleMsrv { return; } + let needs_const = cx.enclosing_body.is_some() + && is_in_const_context(cx) + && matches!(cx.tcx.def_kind(def_id), DefKind::AssocFn | DefKind::Fn); + if (self.check_in_tests || !is_in_test(cx.tcx, node)) && let Some(current) = self.msrv.current(cx) - && let version = self.get_def_id_version(cx.tcx, def_id) + && let Availability::Since(version) = self.get_def_id_availability(cx.tcx, def_id, needs_const) && version > current { - span_lint( + span_lint_and_then( cx, INCOMPATIBLE_MSRV, span, format!( - "current MSRV (Minimum Supported Rust Version) is `{current}` but this item is stable since `{version}`" + "current MSRV (Minimum Supported Rust Version) is `{current}` but this item is stable{} since `{version}`", + if needs_const { " in a `const` context" } else { "" }, ), + |diag| { + if is_under_cfg_attribute(cx, node) { + diag.note_once("you may want to conditionally increase the MSRV considered by Clippy using the `clippy::msrv` attribute"); + } + }, ); } } @@ -133,17 +195,38 @@ impl<'tcx> LateLintPass<'tcx> for IncompatibleMsrv { self.emit_lint_if_under_msrv(cx, method_did, expr.hir_id, span); } }, - ExprKind::Call(call, _) => { - // Desugaring into function calls by the compiler will use `QPath::LangItem` variants. Those should - // not be linted as they will not be generated in older compilers if the function is not available, - // and the compiler is allowed to call unstable functions. - if let ExprKind::Path(qpath @ (QPath::Resolved(..) | QPath::TypeRelative(..))) = call.kind - && let Some(path_def_id) = cx.qpath_res(&qpath, call.hir_id).opt_def_id() - { - self.emit_lint_if_under_msrv(cx, path_def_id, expr.hir_id, call.span); + // Desugaring into function calls by the compiler will use `QPath::LangItem` variants. Those should + // not be linted as they will not be generated in older compilers if the function is not available, + // and the compiler is allowed to call unstable functions. + ExprKind::Path(qpath @ (QPath::Resolved(..) | QPath::TypeRelative(..))) => { + if let Some(path_def_id) = cx.qpath_res(&qpath, expr.hir_id).opt_def_id() { + self.emit_lint_if_under_msrv(cx, path_def_id, expr.hir_id, expr.span); } }, _ => {}, } } + + fn check_ty(&mut self, cx: &LateContext<'tcx>, hir_ty: &'tcx hir::Ty<'tcx, AmbigArg>) { + if let hir::TyKind::Path(qpath @ (QPath::Resolved(..) | QPath::TypeRelative(..))) = hir_ty.kind + && let Some(ty_def_id) = cx.qpath_res(&qpath, hir_ty.hir_id).opt_def_id() + // `CStr` and `CString` have been moved around but have been available since Rust 1.0.0 + && !matches!(cx.tcx.get_diagnostic_name(ty_def_id), Some(sym::cstr_type | sym::cstring_type)) + { + self.emit_lint_if_under_msrv(cx, ty_def_id, hir_ty.hir_id, hir_ty.span); + } + } +} + +/// Heuristic checking if the node `hir_id` is under a `#[cfg()]` or `#[cfg_attr()]` +/// attribute. +fn is_under_cfg_attribute(cx: &LateContext<'_>, hir_id: HirId) -> bool { + cx.tcx.hir_parent_id_iter(hir_id).any(|id| { + cx.tcx.hir_attrs(id).iter().any(|attr| { + matches!( + attr.ident().map(|ident| ident.name), + Some(sym::cfg_trace | sym::cfg_attr_trace) + ) + }) + }) } diff --git a/src/tools/clippy/clippy_lints/src/ineffective_open_options.rs b/src/tools/clippy/clippy_lints/src/ineffective_open_options.rs index 7a751514b64..a159f615718 100644 --- a/src/tools/clippy/clippy_lints/src/ineffective_open_options.rs +++ b/src/tools/clippy/clippy_lints/src/ineffective_open_options.rs @@ -1,13 +1,12 @@ -use crate::methods::method_call; use clippy_utils::diagnostics::span_lint_and_sugg; -use clippy_utils::{peel_blocks, sym}; +use clippy_utils::source::SpanRangeExt; +use clippy_utils::ty::is_type_diagnostic_item; +use clippy_utils::{peel_blocks, peel_hir_expr_while, sym}; use rustc_ast::LitKind; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; -use rustc_middle::ty; use rustc_session::declare_lint_pass; -use rustc_span::{BytePos, Span}; declare_clippy_lint! { /// ### What it does @@ -43,53 +42,58 @@ declare_clippy_lint! { declare_lint_pass!(IneffectiveOpenOptions => [INEFFECTIVE_OPEN_OPTIONS]); -fn index_if_arg_is_boolean(args: &[Expr<'_>], call_span: Span) -> Option<Span> { - if let [arg] = args - && let ExprKind::Lit(lit) = peel_blocks(arg).kind - && lit.node == LitKind::Bool(true) - { - // The `.` is not included in the span so we cheat a little bit to include it as well. - Some(call_span.with_lo(call_span.lo() - BytePos(1))) - } else { - None - } -} - impl<'tcx> LateLintPass<'tcx> for IneffectiveOpenOptions { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { - let Some((sym::open, mut receiver, [_arg], _, _)) = method_call(expr) else { - return; - }; - let receiver_ty = cx.typeck_results().expr_ty(receiver); - match receiver_ty.peel_refs().kind() { - ty::Adt(adt, _) if cx.tcx.is_diagnostic_item(sym::FsOpenOptions, adt.did()) => {}, - _ => return, - } - - let mut append = None; - let mut write = None; + if let ExprKind::MethodCall(name, recv, [_], _) = expr.kind + && name.ident.name == sym::open + && !expr.span.from_expansion() + && is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(recv).peel_refs(), sym::FsOpenOptions) + { + let mut append = false; + let mut write = None; + peel_hir_expr_while(recv, |e| { + if let ExprKind::MethodCall(name, recv, args, call_span) = e.kind + && !e.span.from_expansion() + { + if let [arg] = args + && let ExprKind::Lit(lit) = peel_blocks(arg).kind + && matches!(lit.node, LitKind::Bool(true)) + && !arg.span.from_expansion() + && !lit.span.from_expansion() + { + match name.ident.name { + sym::append => append = true, + sym::write + if let Some(range) = call_span.map_range(cx, |_, text, range| { + if text.get(..range.start)?.ends_with('.') { + Some(range.start - 1..range.end) + } else { + None + } + }) => + { + write = Some(call_span.with_lo(range.start)); + }, + _ => {}, + } + } + Some(recv) + } else { + None + } + }); - while let Some((name, recv, args, _, span)) = method_call(receiver) { - if name == sym::append { - append = index_if_arg_is_boolean(args, span); - } else if name == sym::write { - write = index_if_arg_is_boolean(args, span); + if append && let Some(write_span) = write { + span_lint_and_sugg( + cx, + INEFFECTIVE_OPEN_OPTIONS, + write_span, + "unnecessary use of `.write(true)` because there is `.append(true)`", + "remove `.write(true)`", + String::new(), + Applicability::MachineApplicable, + ); } - receiver = recv; - } - - if let Some(write_span) = write - && append.is_some() - { - span_lint_and_sugg( - cx, - INEFFECTIVE_OPEN_OPTIONS, - write_span, - "unnecessary use of `.write(true)` because there is `.append(true)`", - "remove `.write(true)`", - String::new(), - Applicability::MachineApplicable, - ); } } } diff --git a/src/tools/clippy/clippy_lints/src/infallible_try_from.rs b/src/tools/clippy/clippy_lints/src/infallible_try_from.rs index e79fcec6e6a..f7cdf05359a 100644 --- a/src/tools/clippy/clippy_lints/src/infallible_try_from.rs +++ b/src/tools/clippy/clippy_lints/src/infallible_try_from.rs @@ -52,13 +52,17 @@ impl<'tcx> LateLintPass<'tcx> for InfallibleTryFrom { if !cx.tcx.is_diagnostic_item(sym::TryFrom, trait_def_id) { return; } - for ii in cx.tcx.associated_items(item.owner_id.def_id) + for ii in cx + .tcx + .associated_items(item.owner_id.def_id) .filter_by_name_unhygienic_and_kind(sym::Error, AssocTag::Type) { let ii_ty = cx.tcx.type_of(ii.def_id).instantiate_identity(); if !ii_ty.is_inhabited_from(cx.tcx, ii.def_id, cx.typing_env()) { let mut span = MultiSpan::from_span(cx.tcx.def_span(item.owner_id.to_def_id())); - let ii_ty_span = cx.tcx.hir_node_by_def_id(ii.def_id.expect_local()) + let ii_ty_span = cx + .tcx + .hir_node_by_def_id(ii.def_id.expect_local()) .expect_impl_item() .expect_type() .span; diff --git a/src/tools/clippy/clippy_lints/src/inline_fn_without_body.rs b/src/tools/clippy/clippy_lints/src/inline_fn_without_body.rs index ffe6ad14f63..ee59a4cc8cb 100644 --- a/src/tools/clippy/clippy_lints/src/inline_fn_without_body.rs +++ b/src/tools/clippy/clippy_lints/src/inline_fn_without_body.rs @@ -1,6 +1,7 @@ use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::sugg::DiagExt; -use rustc_attr_data_structures::{AttributeKind, find_attr}; +use rustc_hir::attrs::{AttributeKind}; +use rustc_hir::find_attr; use rustc_errors::Applicability; use rustc_hir::{TraitFn, TraitItem, TraitItemKind}; use rustc_lint::{LateContext, LateLintPass}; diff --git a/src/tools/clippy/clippy_lints/src/item_name_repetitions.rs b/src/tools/clippy/clippy_lints/src/item_name_repetitions.rs index 9c91cf68085..95e16aae40f 100644 --- a/src/tools/clippy/clippy_lints/src/item_name_repetitions.rs +++ b/src/tools/clippy/clippy_lints/src/item_name_repetitions.rs @@ -8,6 +8,7 @@ use rustc_data_structures::fx::FxHashSet; use rustc_hir::{EnumDef, FieldDef, Item, ItemKind, OwnerId, QPath, TyKind, Variant, VariantData}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::impl_lint_pass; +use rustc_span::MacroKind; use rustc_span::symbol::Symbol; declare_clippy_lint! { @@ -502,7 +503,8 @@ impl LateLintPass<'_> for ItemNameRepetitions { ); } - if both_are_public && item_camel.len() > mod_camel.len() { + let is_macro_rule = matches!(item.kind, ItemKind::Macro(_, _, MacroKind::Bang)); + if both_are_public && item_camel.len() > mod_camel.len() && !is_macro_rule { let matching = count_match_start(mod_camel, &item_camel); let rmatching = count_match_end(mod_camel, &item_camel); let nchars = mod_camel.chars().count(); diff --git a/src/tools/clippy/clippy_lints/src/iter_without_into_iter.rs b/src/tools/clippy/clippy_lints/src/iter_without_into_iter.rs index 03038f0ab49..b89f91f7255 100644 --- a/src/tools/clippy/clippy_lints/src/iter_without_into_iter.rs +++ b/src/tools/clippy/clippy_lints/src/iter_without_into_iter.rs @@ -139,11 +139,17 @@ impl LateLintPass<'_> for IterWithoutIntoIter { // We can't check inherent impls for slices, but we know that they have an `iter(_mut)` method ty.peel_refs().is_slice() || get_adt_inherent_method(cx, ty, expected_method_name).is_some() }) - && let Some(iter_assoc_span) = cx.tcx.associated_items(item.owner_id) + && let Some(iter_assoc_span) = cx + .tcx + .associated_items(item.owner_id) .filter_by_name_unhygienic_and_kind(sym::IntoIter, ty::AssocTag::Type) .next() .map(|assoc_item| { - cx.tcx.hir_node_by_def_id(assoc_item.def_id.expect_local()).expect_impl_item().expect_type().span + cx.tcx + .hir_node_by_def_id(assoc_item.def_id.expect_local()) + .expect_impl_item() + .expect_type() + .span }) && is_ty_exported(cx, ty) { diff --git a/src/tools/clippy/clippy_lints/src/large_enum_variant.rs b/src/tools/clippy/clippy_lints/src/large_enum_variant.rs index e85d779b488..c2b73943106 100644 --- a/src/tools/clippy/clippy_lints/src/large_enum_variant.rs +++ b/src/tools/clippy/clippy_lints/src/large_enum_variant.rs @@ -1,5 +1,6 @@ use clippy_config::Conf; use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::is_no_std_crate; use clippy_utils::source::snippet_with_applicability; use clippy_utils::ty::{AdtVariantInfo, approx_ty_size, is_copy}; use rustc_errors::Applicability; @@ -83,7 +84,7 @@ impl<'tcx> LateLintPass<'tcx> for LargeEnumVariant { let mut difference = variants_size[0].size - variants_size[1].size; if difference > self.maximum_size_difference_allowed { - let help_text = "consider boxing the large fields to reduce the total size of the enum"; + let help_text = "consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum"; span_lint_and_then( cx, LARGE_ENUM_VARIANT, @@ -117,7 +118,7 @@ impl<'tcx> LateLintPass<'tcx> for LargeEnumVariant { ident.span, "boxing a variant would require the type no longer be `Copy`", ); - } else { + } else if !is_no_std_crate(cx) { let sugg: Vec<(Span, String)> = variants_size[0] .fields_size .iter() diff --git a/src/tools/clippy/clippy_lints/src/legacy_numeric_constants.rs b/src/tools/clippy/clippy_lints/src/legacy_numeric_constants.rs index b3c63f022d3..42c636505c0 100644 --- a/src/tools/clippy/clippy_lints/src/legacy_numeric_constants.rs +++ b/src/tools/clippy/clippy_lints/src/legacy_numeric_constants.rs @@ -1,7 +1,8 @@ use clippy_config::Conf; -use clippy_utils::diagnostics::{span_lint_and_then, span_lint_hir_and_then}; +use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::is_from_proc_macro; use clippy_utils::msrvs::{self, Msrv}; -use clippy_utils::{get_parent_expr, is_from_proc_macro}; +use clippy_utils::source::SpanRangeExt; use hir::def_id::DefId; use rustc_errors::Applicability; use rustc_hir as hir; @@ -102,39 +103,45 @@ impl<'tcx> LateLintPass<'tcx> for LegacyNumericConstants { } fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx rustc_hir::Expr<'tcx>) { - let ExprKind::Path(qpath) = &expr.kind else { - return; - }; - // `std::<integer>::<CONST>` check - let (span, sugg, msg) = if let QPath::Resolved(None, path) = qpath + let (sugg, msg) = if let ExprKind::Path(qpath) = &expr.kind + && let QPath::Resolved(None, path) = qpath && let Some(def_id) = path.res.opt_def_id() && is_numeric_const(cx, def_id) - && let def_path = cx.get_def_path(def_id) - && let [.., mod_name, name] = &*def_path + && let [.., mod_name, name] = &*cx.get_def_path(def_id) // Skip linting if this usage looks identical to the associated constant, // since this would only require removing a `use` import (which is already linted). && !is_numeric_const_path_canonical(path, [*mod_name, *name]) { ( - expr.span, - format!("{mod_name}::{name}"), + vec![(expr.span, format!("{mod_name}::{name}"))], "usage of a legacy numeric constant", ) // `<integer>::xxx_value` check - } else if let QPath::TypeRelative(_, last_segment) = qpath - && let Some(def_id) = cx.qpath_res(qpath, expr.hir_id).opt_def_id() - && let Some(par_expr) = get_parent_expr(cx, expr) - && let ExprKind::Call(_, []) = par_expr.kind + } else if let ExprKind::Call(func, []) = &expr.kind + && let ExprKind::Path(qpath) = &func.kind + && let QPath::TypeRelative(ty, last_segment) = qpath + && let Some(def_id) = cx.qpath_res(qpath, func.hir_id).opt_def_id() && is_integer_method(cx, def_id) { - let name = last_segment.ident.name.as_str(); - - ( - last_segment.ident.span.with_hi(par_expr.span.hi()), - name[..=2].to_ascii_uppercase(), - "usage of a legacy numeric method", - ) + let mut sugg = vec![ + // Replace the function name up to the end by the constant name + ( + last_segment.ident.span.to(expr.span.shrink_to_hi()), + last_segment.ident.name.as_str()[..=2].to_ascii_uppercase(), + ), + ]; + let before_span = expr.span.shrink_to_lo().until(ty.span); + if !before_span.is_empty() { + // Remove everything before the type name + sugg.push((before_span, String::new())); + } + // Use `::` between the type name and the constant + let between_span = ty.span.shrink_to_hi().until(last_segment.ident.span); + if !between_span.check_source_text(cx, |s| s == "::") { + sugg.push((between_span, String::from("::"))); + } + (sugg, "usage of a legacy numeric method") } else { return; }; @@ -143,9 +150,8 @@ impl<'tcx> LateLintPass<'tcx> for LegacyNumericConstants { && self.msrv.meets(cx, msrvs::NUMERIC_ASSOCIATED_CONSTANTS) && !is_from_proc_macro(cx, expr) { - span_lint_hir_and_then(cx, LEGACY_NUMERIC_CONSTANTS, expr.hir_id, span, msg, |diag| { - diag.span_suggestion_verbose( - span, + span_lint_and_then(cx, LEGACY_NUMERIC_CONSTANTS, expr.span, msg, |diag| { + diag.multipart_suggestion_verbose( "use the associated constant instead", sugg, Applicability::MaybeIncorrect, diff --git a/src/tools/clippy/clippy_lints/src/len_zero.rs b/src/tools/clippy/clippy_lints/src/len_zero.rs index 1bf03480c82..6beddc1be14 100644 --- a/src/tools/clippy/clippy_lints/src/len_zero.rs +++ b/src/tools/clippy/clippy_lints/src/len_zero.rs @@ -10,9 +10,9 @@ use rustc_errors::Applicability; use rustc_hir::def::Res; use rustc_hir::def_id::{DefId, DefIdSet}; use rustc_hir::{ - BinOpKind, Expr, ExprKind, FnRetTy, GenericArg, GenericBound, HirId, ImplItem, ImplItemKind, - ImplicitSelfKind, Item, ItemKind, Mutability, Node, OpaqueTyOrigin, PatExprKind, PatKind, PathSegment, PrimTy, - QPath, TraitItemId, TyKind, + BinOpKind, Expr, ExprKind, FnRetTy, GenericArg, GenericBound, HirId, ImplItem, ImplItemKind, ImplicitSelfKind, + Item, ItemKind, Mutability, Node, OpaqueTyOrigin, PatExprKind, PatKind, PathSegment, PrimTy, QPath, TraitItemId, + TyKind, }; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::{self, FnSig, Ty}; @@ -266,11 +266,14 @@ fn span_without_enclosing_paren(cx: &LateContext<'_>, span: Span) -> Span { } fn check_trait_items(cx: &LateContext<'_>, visited_trait: &Item<'_>, ident: Ident, trait_items: &[TraitItemId]) { - fn is_named_self(cx: &LateContext<'_>, item: &TraitItemId, name: Symbol) -> bool { + fn is_named_self(cx: &LateContext<'_>, item: TraitItemId, name: Symbol) -> bool { cx.tcx.item_name(item.owner_id) == name && matches!( cx.tcx.fn_arg_idents(item.owner_id), - [Some(Ident { name: kw::SelfLower, .. })], + [Some(Ident { + name: kw::SelfLower, + .. + })], ) } @@ -284,7 +287,7 @@ fn check_trait_items(cx: &LateContext<'_>, visited_trait: &Item<'_>, ident: Iden } if cx.effective_visibilities.is_exported(visited_trait.owner_id.def_id) - && trait_items.iter().any(|i| is_named_self(cx, i, sym::len)) + && trait_items.iter().any(|&i| is_named_self(cx, i, sym::len)) { let mut current_and_super_traits = DefIdSet::default(); fill_trait_set(visited_trait.owner_id.to_def_id(), &mut current_and_super_traits, cx); diff --git a/src/tools/clippy/clippy_lints/src/lib.rs b/src/tools/clippy/clippy_lints/src/lib.rs index 96a6dee5885..914aa6b9b80 100644 --- a/src/tools/clippy/clippy_lints/src/lib.rs +++ b/src/tools/clippy/clippy_lints/src/lib.rs @@ -35,7 +35,6 @@ extern crate rustc_abi; extern crate rustc_arena; extern crate rustc_ast; extern crate rustc_ast_pretty; -extern crate rustc_attr_data_structures; extern crate rustc_data_structures; extern crate rustc_driver; extern crate rustc_errors; diff --git a/src/tools/clippy/clippy_lints/src/loops/needless_range_loop.rs b/src/tools/clippy/clippy_lints/src/loops/needless_range_loop.rs index 7837b18bcd3..7bb684d65bb 100644 --- a/src/tools/clippy/clippy_lints/src/loops/needless_range_loop.rs +++ b/src/tools/clippy/clippy_lints/src/loops/needless_range_loop.rs @@ -39,7 +39,9 @@ pub(super) fn check<'tcx>( var: canonical_id, indexed_mut: FxHashSet::default(), indexed_indirectly: FxHashMap::default(), + unnamed_indexed_indirectly: false, indexed_directly: FxIndexMap::default(), + unnamed_indexed_directly: false, referenced: FxHashSet::default(), nonindex: false, prefer_mutable: false, @@ -47,7 +49,11 @@ pub(super) fn check<'tcx>( walk_expr(&mut visitor, body); // linting condition: we only indexed one variable, and indexed it directly - if visitor.indexed_indirectly.is_empty() && visitor.indexed_directly.len() == 1 { + if visitor.indexed_indirectly.is_empty() + && !visitor.unnamed_indexed_indirectly + && !visitor.unnamed_indexed_directly + && visitor.indexed_directly.len() == 1 + { let (indexed, (indexed_extent, indexed_ty)) = visitor .indexed_directly .into_iter() @@ -217,6 +223,7 @@ fn is_end_eq_array_len<'tcx>( false } +#[expect(clippy::struct_excessive_bools)] struct VarVisitor<'a, 'tcx> { /// context reference cx: &'a LateContext<'tcx>, @@ -226,9 +233,13 @@ struct VarVisitor<'a, 'tcx> { indexed_mut: FxHashSet<Symbol>, /// indirectly indexed variables (`v[(i + 4) % N]`), the extend is `None` for global indexed_indirectly: FxHashMap<Symbol, Option<region::Scope>>, + /// indirectly indexed literals, like `[1, 2, 3][(i + 4) % N]` + unnamed_indexed_indirectly: bool, /// subset of `indexed` of vars that are indexed directly: `v[i]` /// this will not contain cases like `v[calc_index(i)]` or `v[(i + 4) % N]` indexed_directly: FxIndexMap<Symbol, (Option<region::Scope>, Ty<'tcx>)>, + /// directly indexed literals, like `[1, 2, 3][i]` + unnamed_indexed_directly: bool, /// Any names that are used outside an index operation. /// Used to detect things like `&mut vec` used together with `vec[i]` referenced: FxHashSet<Symbol>, @@ -242,6 +253,7 @@ struct VarVisitor<'a, 'tcx> { impl<'tcx> VarVisitor<'_, 'tcx> { fn check(&mut self, idx: &'tcx Expr<'_>, seqexpr: &'tcx Expr<'_>, expr: &'tcx Expr<'_>) -> bool { + let index_used_directly = matches!(idx.kind, ExprKind::Path(_)); if let ExprKind::Path(ref seqpath) = seqexpr.kind // the indexed container is referenced by a name && let QPath::Resolved(None, seqvar) = *seqpath @@ -251,7 +263,6 @@ impl<'tcx> VarVisitor<'_, 'tcx> { if self.prefer_mutable { self.indexed_mut.insert(seqvar.segments[0].ident.name); } - let index_used_directly = matches!(idx.kind, ExprKind::Path(_)); let res = self.cx.qpath_res(seqpath, seqexpr.hir_id); match res { Res::Local(hir_id) => { @@ -286,6 +297,13 @@ impl<'tcx> VarVisitor<'_, 'tcx> { }, _ => (), } + } else if let ExprKind::Repeat(..) | ExprKind::Array(..) = seqexpr.kind { + if index_used_directly { + self.unnamed_indexed_directly = true; + } else { + self.unnamed_indexed_indirectly = true; + } + return false; } true } @@ -299,7 +317,7 @@ impl<'tcx> Visitor<'tcx> for VarVisitor<'_, 'tcx> { .cx .typeck_results() .type_dependent_def_id(expr.hir_id) - .and_then(|def_id| self.cx.tcx.trait_of_item(def_id)) + .and_then(|def_id| self.cx.tcx.trait_of_assoc(def_id)) && ((meth.ident.name == sym::index && self.cx.tcx.lang_items().index_trait() == Some(trait_id)) || (meth.ident.name == sym::index_mut && self.cx.tcx.lang_items().index_mut_trait() == Some(trait_id))) && !self.check(args_1, args_0, expr) diff --git a/src/tools/clippy/clippy_lints/src/loops/never_loop.rs b/src/tools/clippy/clippy_lints/src/loops/never_loop.rs index 69c84bc7038..8a253ae5810 100644 --- a/src/tools/clippy/clippy_lints/src/loops/never_loop.rs +++ b/src/tools/clippy/clippy_lints/src/loops/never_loop.rs @@ -6,9 +6,11 @@ use clippy_utils::macros::root_macro_call_first_node; use clippy_utils::source::snippet; use clippy_utils::visitors::{Descend, for_each_expr_without_closures}; use rustc_errors::Applicability; -use rustc_hir::{Block, Destination, Expr, ExprKind, HirId, InlineAsmOperand, Pat, Stmt, StmtKind, StructTailExpr}; +use rustc_hir::{ + Block, Destination, Expr, ExprKind, HirId, InlineAsmOperand, Node, Pat, Stmt, StmtKind, StructTailExpr, +}; use rustc_lint::LateContext; -use rustc_span::{Span, sym}; +use rustc_span::{BytePos, Span, sym}; use std::iter::once; use std::ops::ControlFlow; @@ -20,7 +22,7 @@ pub(super) fn check<'tcx>( for_loop: Option<&ForLoop<'_>>, ) { match never_loop_block(cx, block, &mut Vec::new(), loop_id) { - NeverLoopResult::Diverging => { + NeverLoopResult::Diverging { ref break_spans } => { span_lint_and_then(cx, NEVER_LOOP, span, "this loop never actually loops", |diag| { if let Some(ForLoop { arg: iterator, @@ -38,10 +40,15 @@ pub(super) fn check<'tcx>( Applicability::Unspecified }; - diag.span_suggestion_verbose( + let mut suggestions = vec![( for_span.with_hi(iterator.span.hi()), - "if you need the first element of the iterator, try writing", for_to_if_let_sugg(cx, iterator, pat), + )]; + // Make sure to clear up the diverging sites when we remove a loopp. + suggestions.extend(break_spans.iter().map(|span| (*span, String::new()))); + diag.multipart_suggestion_verbose( + "if you need the first element of the iterator, try writing", + suggestions, app, ); } @@ -70,22 +77,22 @@ fn contains_any_break_or_continue(block: &Block<'_>) -> bool { /// The first two bits of information are in this enum, and the last part is in the /// `local_labels` variable, which contains a list of `(block_id, reachable)` pairs ordered by /// scope. -#[derive(Copy, Clone)] +#[derive(Clone)] enum NeverLoopResult { /// A continue may occur for the main loop. MayContinueMainLoop, /// We have not encountered any main loop continue, /// but we are diverging (subsequent control flow is not reachable) - Diverging, + Diverging { break_spans: Vec<Span> }, /// We have not encountered any main loop continue, /// and subsequent control flow is (possibly) reachable Normal, } #[must_use] -fn absorb_break(arg: NeverLoopResult) -> NeverLoopResult { +fn absorb_break(arg: &NeverLoopResult) -> NeverLoopResult { match arg { - NeverLoopResult::Diverging | NeverLoopResult::Normal => NeverLoopResult::Normal, + NeverLoopResult::Diverging { .. } | NeverLoopResult::Normal => NeverLoopResult::Normal, NeverLoopResult::MayContinueMainLoop => NeverLoopResult::MayContinueMainLoop, } } @@ -94,7 +101,7 @@ fn absorb_break(arg: NeverLoopResult) -> NeverLoopResult { #[must_use] fn combine_seq(first: NeverLoopResult, second: impl FnOnce() -> NeverLoopResult) -> NeverLoopResult { match first { - NeverLoopResult::Diverging | NeverLoopResult::MayContinueMainLoop => first, + NeverLoopResult::Diverging { .. } | NeverLoopResult::MayContinueMainLoop => first, NeverLoopResult::Normal => second(), } } @@ -103,7 +110,7 @@ fn combine_seq(first: NeverLoopResult, second: impl FnOnce() -> NeverLoopResult) #[must_use] fn combine_seq_many(iter: impl IntoIterator<Item = NeverLoopResult>) -> NeverLoopResult { for e in iter { - if let NeverLoopResult::Diverging | NeverLoopResult::MayContinueMainLoop = e { + if let NeverLoopResult::Diverging { .. } | NeverLoopResult::MayContinueMainLoop = e { return e; } } @@ -118,7 +125,19 @@ fn combine_branches(b1: NeverLoopResult, b2: NeverLoopResult) -> NeverLoopResult NeverLoopResult::MayContinueMainLoop }, (NeverLoopResult::Normal, _) | (_, NeverLoopResult::Normal) => NeverLoopResult::Normal, - (NeverLoopResult::Diverging, NeverLoopResult::Diverging) => NeverLoopResult::Diverging, + ( + NeverLoopResult::Diverging { + break_spans: mut break_spans1, + }, + NeverLoopResult::Diverging { + break_spans: mut break_spans2, + }, + ) => { + break_spans1.append(&mut break_spans2); + NeverLoopResult::Diverging { + break_spans: break_spans1, + } + }, } } @@ -136,7 +155,7 @@ fn never_loop_block<'tcx>( combine_seq_many(iter.map(|(e, els)| { let e = never_loop_expr(cx, e, local_labels, main_loop_id); // els is an else block in a let...else binding - els.map_or(e, |els| { + els.map_or(e.clone(), |els| { combine_seq(e, || match never_loop_block(cx, els, local_labels, main_loop_id) { // Returning MayContinueMainLoop here means that // we will not evaluate the rest of the body @@ -144,7 +163,7 @@ fn never_loop_block<'tcx>( // An else block always diverges, so the Normal case should not happen, // but the analysis is approximate so it might return Normal anyway. // Returning Normal here says that nothing more happens on the main path - NeverLoopResult::Diverging | NeverLoopResult::Normal => NeverLoopResult::Normal, + NeverLoopResult::Diverging { .. } | NeverLoopResult::Normal => NeverLoopResult::Normal, }) }) })) @@ -159,6 +178,45 @@ fn stmt_to_expr<'tcx>(stmt: &Stmt<'tcx>) -> Option<(&'tcx Expr<'tcx>, Option<&'t } } +fn stmt_source_span(stmt: &Stmt<'_>) -> Span { + let call_span = stmt.span.source_callsite(); + // if it is a macro call, the span will be missing the trailing semicolon + if stmt.span == call_span { + return call_span; + } + + // An expression without a trailing semi-colon (must have unit type). + if let StmtKind::Expr(..) = stmt.kind { + return call_span; + } + + call_span.with_hi(call_span.hi() + BytePos(1)) +} + +/// Returns a Vec of all the individual spans after the highlighted expression in a block +fn all_spans_after_expr(cx: &LateContext<'_>, expr: &Expr<'_>) -> Vec<Span> { + if let Node::Stmt(stmt) = cx.tcx.parent_hir_node(expr.hir_id) { + if let Node::Block(block) = cx.tcx.parent_hir_node(stmt.hir_id) { + return block + .stmts + .iter() + .skip_while(|inner| inner.hir_id != stmt.hir_id) + .map(stmt_source_span) + .chain(if let Some(e) = block.expr { vec![e.span] } else { vec![] }) + .collect(); + } + + return vec![stmt.span]; + } + + vec![] +} + +fn is_label_for_block(cx: &LateContext<'_>, dest: &Destination) -> bool { + dest.target_id + .is_ok_and(|hir_id| matches!(cx.tcx.hir_node(hir_id), Node::Block(_))) +} + #[allow(clippy::too_many_lines)] fn never_loop_expr<'tcx>( cx: &LateContext<'tcx>, @@ -197,7 +255,7 @@ fn never_loop_expr<'tcx>( ExprKind::Loop(b, _, _, _) => { // We don't attempt to track reachability after a loop, // just assume there may have been a break somewhere - absorb_break(never_loop_block(cx, b, local_labels, main_loop_id)) + absorb_break(&never_loop_block(cx, b, local_labels, main_loop_id)) }, ExprKind::If(e, e2, e3) => { let e1 = never_loop_expr(cx, e, local_labels, main_loop_id); @@ -212,9 +270,10 @@ fn never_loop_expr<'tcx>( ExprKind::Match(e, arms, _) => { let e = never_loop_expr(cx, e, local_labels, main_loop_id); combine_seq(e, || { - arms.iter().fold(NeverLoopResult::Diverging, |a, b| { - combine_branches(a, never_loop_expr(cx, b.body, local_labels, main_loop_id)) - }) + arms.iter() + .fold(NeverLoopResult::Diverging { break_spans: vec![] }, |a, b| { + combine_branches(a, never_loop_expr(cx, b.body, local_labels, main_loop_id)) + }) }) }, ExprKind::Block(b, _) => { @@ -224,7 +283,7 @@ fn never_loop_expr<'tcx>( let ret = never_loop_block(cx, b, local_labels, main_loop_id); let jumped_to = b.targeted_by_break && local_labels.pop().unwrap().1; match ret { - NeverLoopResult::Diverging if jumped_to => NeverLoopResult::Normal, + NeverLoopResult::Diverging { .. } if jumped_to => NeverLoopResult::Normal, _ => ret, } }, @@ -235,25 +294,39 @@ fn never_loop_expr<'tcx>( if id == main_loop_id { NeverLoopResult::MayContinueMainLoop } else { - NeverLoopResult::Diverging + NeverLoopResult::Diverging { + break_spans: all_spans_after_expr(cx, expr), + } } }, - ExprKind::Break(_, e) | ExprKind::Ret(e) => { + ExprKind::Ret(e) => { let first = e.as_ref().map_or(NeverLoopResult::Normal, |e| { never_loop_expr(cx, e, local_labels, main_loop_id) }); combine_seq(first, || { // checks if break targets a block instead of a loop - if let ExprKind::Break(Destination { target_id: Ok(t), .. }, _) = expr.kind - && let Some((_, reachable)) = local_labels.iter_mut().find(|(label, _)| *label == t) - { - *reachable = true; + mark_block_as_reachable(expr, local_labels); + NeverLoopResult::Diverging { break_spans: vec![] } + }) + }, + ExprKind::Break(dest, e) => { + let first = e.as_ref().map_or(NeverLoopResult::Normal, |e| { + never_loop_expr(cx, e, local_labels, main_loop_id) + }); + combine_seq(first, || { + // checks if break targets a block instead of a loop + mark_block_as_reachable(expr, local_labels); + NeverLoopResult::Diverging { + break_spans: if is_label_for_block(cx, &dest) { + vec![] + } else { + all_spans_after_expr(cx, expr) + }, } - NeverLoopResult::Diverging }) }, ExprKind::Become(e) => combine_seq(never_loop_expr(cx, e, local_labels, main_loop_id), || { - NeverLoopResult::Diverging + NeverLoopResult::Diverging { break_spans: vec![] } }), ExprKind::InlineAsm(asm) => combine_seq_many(asm.operands.iter().map(|(o, _)| match o { InlineAsmOperand::In { expr, .. } | InlineAsmOperand::InOut { expr, .. } => { @@ -283,12 +356,12 @@ fn never_loop_expr<'tcx>( }; let result = combine_seq(result, || { if cx.typeck_results().expr_ty(expr).is_never() { - NeverLoopResult::Diverging + NeverLoopResult::Diverging { break_spans: vec![] } } else { NeverLoopResult::Normal } }); - if let NeverLoopResult::Diverging = result + if let NeverLoopResult::Diverging { .. } = result && let Some(macro_call) = root_macro_call_first_node(cx, expr) && let Some(sym::todo_macro) = cx.tcx.get_diagnostic_name(macro_call.def_id) { @@ -316,3 +389,11 @@ fn for_to_if_let_sugg(cx: &LateContext<'_>, iterator: &Expr<'_>, pat: &Pat<'_>) format!("if let Some({pat_snippet}) = {iter_snippet}.next()") } + +fn mark_block_as_reachable(expr: &Expr<'_>, local_labels: &mut [(HirId, bool)]) { + if let ExprKind::Break(Destination { target_id: Ok(t), .. }, _) = expr.kind + && let Some((_, reachable)) = local_labels.iter_mut().find(|(label, _)| *label == t) + { + *reachable = true; + } +} diff --git a/src/tools/clippy/clippy_lints/src/macro_use.rs b/src/tools/clippy/clippy_lints/src/macro_use.rs index c1a26c5a9c7..bf89556fbb6 100644 --- a/src/tools/clippy/clippy_lints/src/macro_use.rs +++ b/src/tools/clippy/clippy_lints/src/macro_use.rs @@ -1,13 +1,15 @@ use clippy_utils::diagnostics::span_lint_hir_and_then; use clippy_utils::source::snippet; use hir::def::{DefKind, Res}; +use rustc_hir::attrs::{AttributeKind}; +use rustc_hir::find_attr; use rustc_data_structures::fx::FxHashSet; use rustc_errors::Applicability; use rustc_hir::{self as hir, AmbigArg}; use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_session::impl_lint_pass; +use rustc_span::Span; use rustc_span::edition::Edition; -use rustc_span::{Span, sym}; use std::collections::BTreeMap; declare_clippy_lint! { @@ -99,15 +101,14 @@ impl LateLintPass<'_> for MacroUseImports { && let hir::ItemKind::Use(path, _kind) = &item.kind && let hir_id = item.hir_id() && let attrs = cx.tcx.hir_attrs(hir_id) - && let Some(mac_attr) = attrs.iter().find(|attr| attr.has_name(sym::macro_use)) + && let Some(mac_attr_span) = find_attr!(attrs, AttributeKind::MacroUse {span, ..} => *span) && let Some(Res::Def(DefKind::Mod, id)) = path.res.type_ns && !id.is_local() { for kid in cx.tcx.module_children(id) { if let Res::Def(DefKind::Macro(_mac_type), mac_id) = kid.res { - let span = mac_attr.span(); let def_path = cx.tcx.def_path_str(mac_id); - self.imports.push((def_path, span, hir_id)); + self.imports.push((def_path, mac_attr_span, hir_id)); } } } else if item.span.from_expansion() { diff --git a/src/tools/clippy/clippy_lints/src/manual_abs_diff.rs b/src/tools/clippy/clippy_lints/src/manual_abs_diff.rs index bac4b3d32f2..288f27db8ca 100644 --- a/src/tools/clippy/clippy_lints/src/manual_abs_diff.rs +++ b/src/tools/clippy/clippy_lints/src/manual_abs_diff.rs @@ -5,7 +5,7 @@ use clippy_utils::msrvs::{self, Msrv}; use clippy_utils::source::HasSession as _; use clippy_utils::sugg::Sugg; use clippy_utils::ty::is_type_diagnostic_item; -use clippy_utils::{eq_expr_value, peel_blocks, span_contains_comment}; +use clippy_utils::{eq_expr_value, peel_blocks, peel_middle_ty_refs, span_contains_comment}; use rustc_errors::Applicability; use rustc_hir::{BinOpKind, Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; @@ -62,7 +62,7 @@ impl<'tcx> LateLintPass<'tcx> for ManualAbsDiff { && let ExprKind::Binary(op, rhs, lhs) = if_expr.cond.kind && let (BinOpKind::Gt | BinOpKind::Ge, mut a, mut b) | (BinOpKind::Lt | BinOpKind::Le, mut b, mut a) = (op.node, rhs, lhs) - && let Some(ty) = self.are_ty_eligible(cx, a, b) + && let Some((ty, b_n_refs)) = self.are_ty_eligible(cx, a, b) && is_sub_expr(cx, if_expr.then, a, b, ty) && is_sub_expr(cx, r#else, b, a, ty) { @@ -86,8 +86,9 @@ impl<'tcx> LateLintPass<'tcx> for ManualAbsDiff { } }; let sugg = format!( - "{}.abs_diff({})", + "{}.abs_diff({}{})", Sugg::hir(cx, a, "..").maybe_paren(), + "*".repeat(b_n_refs), Sugg::hir(cx, b, "..") ); diag.span_suggestion(expr.span, "replace with `abs_diff`", sugg, applicability); @@ -100,13 +101,15 @@ impl<'tcx> LateLintPass<'tcx> for ManualAbsDiff { impl ManualAbsDiff { /// Returns a type if `a` and `b` are both of it, and this lint can be applied to that /// type (currently, any primitive int, or a `Duration`) - fn are_ty_eligible<'tcx>(&self, cx: &LateContext<'tcx>, a: &Expr<'_>, b: &Expr<'_>) -> Option<Ty<'tcx>> { + fn are_ty_eligible<'tcx>(&self, cx: &LateContext<'tcx>, a: &Expr<'_>, b: &Expr<'_>) -> Option<(Ty<'tcx>, usize)> { let is_int = |ty: Ty<'_>| matches!(ty.kind(), ty::Uint(_) | ty::Int(_)) && self.msrv.meets(cx, msrvs::ABS_DIFF); let is_duration = |ty| is_type_diagnostic_item(cx, ty, sym::Duration) && self.msrv.meets(cx, msrvs::DURATION_ABS_DIFF); let a_ty = cx.typeck_results().expr_ty(a).peel_refs(); - (a_ty == cx.typeck_results().expr_ty(b).peel_refs() && (is_int(a_ty) || is_duration(a_ty))).then_some(a_ty) + let (b_ty, b_n_refs) = peel_middle_ty_refs(cx.typeck_results().expr_ty(b)); + + (a_ty == b_ty && (is_int(a_ty) || is_duration(a_ty))).then_some((a_ty, b_n_refs)) } } diff --git a/src/tools/clippy/clippy_lints/src/manual_assert.rs b/src/tools/clippy/clippy_lints/src/manual_assert.rs index 8378e15c581..ea6b01a053a 100644 --- a/src/tools/clippy/clippy_lints/src/manual_assert.rs +++ b/src/tools/clippy/clippy_lints/src/manual_assert.rs @@ -60,7 +60,8 @@ impl<'tcx> LateLintPass<'tcx> for ManualAssert { ExprKind::Unary(UnOp::Not, e) => (e, ""), _ => (cond, "!"), }; - let cond_sugg = sugg::Sugg::hir_with_applicability(cx, cond, "..", &mut applicability).maybe_paren(); + let cond_sugg = + sugg::Sugg::hir_with_context(cx, cond, expr.span.ctxt(), "..", &mut applicability).maybe_paren(); let semicolon = if is_parent_stmt(cx, expr.hir_id) { ";" } else { "" }; let sugg = format!("assert!({not}{cond_sugg}, {format_args_snip}){semicolon}"); // we show to the user the suggestion without the comments, but when applying the fix, include the diff --git a/src/tools/clippy/clippy_lints/src/manual_non_exhaustive.rs b/src/tools/clippy/clippy_lints/src/manual_non_exhaustive.rs index 2d52a93f34e..6b0f7446849 100644 --- a/src/tools/clippy/clippy_lints/src/manual_non_exhaustive.rs +++ b/src/tools/clippy/clippy_lints/src/manual_non_exhaustive.rs @@ -4,7 +4,8 @@ use clippy_utils::is_doc_hidden; use clippy_utils::msrvs::{self, Msrv}; use clippy_utils::source::snippet_indent; use itertools::Itertools; -use rustc_attr_data_structures::{AttributeKind, find_attr}; +use rustc_hir::attrs::{AttributeKind}; +use rustc_hir::find_attr; use rustc_data_structures::fx::FxHashSet; use rustc_errors::Applicability; use rustc_hir::def::{CtorKind, CtorOf, DefKind, Res}; diff --git a/src/tools/clippy/clippy_lints/src/matches/significant_drop_in_scrutinee.rs b/src/tools/clippy/clippy_lints/src/matches/significant_drop_in_scrutinee.rs index 88b4d9b7d54..027dd7ce053 100644 --- a/src/tools/clippy/clippy_lints/src/matches/significant_drop_in_scrutinee.rs +++ b/src/tools/clippy/clippy_lints/src/matches/significant_drop_in_scrutinee.rs @@ -185,7 +185,7 @@ impl<'a, 'tcx> SigDropChecker<'a, 'tcx> { if let Some(adt) = ty.ty_adt_def() && get_attr( self.cx.sess(), - self.cx.tcx.get_attrs_unchecked(adt.did()), + self.cx.tcx.get_all_attrs(adt.did()), sym::has_significant_drop, ) .count() diff --git a/src/tools/clippy/clippy_lints/src/matches/single_match.rs b/src/tools/clippy/clippy_lints/src/matches/single_match.rs index 08c0caa4266..7e530e98ac4 100644 --- a/src/tools/clippy/clippy_lints/src/matches/single_match.rs +++ b/src/tools/clippy/clippy_lints/src/matches/single_match.rs @@ -152,21 +152,26 @@ fn report_single_pattern( }) if lit.node.is_str() || lit.node.is_bytestr() => pat_ref_count + 1, _ => pat_ref_count, }; - // References are only implicitly added to the pattern, so no overflow here. - // e.g. will work: match &Some(_) { Some(_) => () } - // will not: match Some(_) { &Some(_) => () } - let ref_count_diff = ty_ref_count - pat_ref_count; - // Try to remove address of expressions first. - let (ex, removed) = peel_n_hir_expr_refs(ex, ref_count_diff); - let ref_count_diff = ref_count_diff - removed; + // References are implicitly removed when `deref_patterns` are used. + // They are implicitly added when match ergonomics are used. + let (ex, ref_or_deref_adjust) = if ty_ref_count > pat_ref_count { + let ref_count_diff = ty_ref_count - pat_ref_count; + + // Try to remove address of expressions first. + let (ex, removed) = peel_n_hir_expr_refs(ex, ref_count_diff); + + (ex, String::from(if ref_count_diff == removed { "" } else { "&" })) + } else { + (ex, "*".repeat(pat_ref_count - ty_ref_count)) + }; let msg = "you seem to be trying to use `match` for an equality check. Consider using `if`"; let sugg = format!( "if {} == {}{} {}{els_str}", snippet_with_context(cx, ex.span, ctxt, "..", &mut app).0, // PartialEq for different reference counts may not exist. - "&".repeat(ref_count_diff), + ref_or_deref_adjust, snippet_with_applicability(cx, arm.pat.span, "..", &mut app), expr_block(cx, arm.body, ctxt, "..", Some(expr.span), &mut app), ); diff --git a/src/tools/clippy/clippy_lints/src/methods/bytes_count_to_len.rs b/src/tools/clippy/clippy_lints/src/methods/bytes_count_to_len.rs index a9f6a41c235..b8cc5ddd845 100644 --- a/src/tools/clippy/clippy_lints/src/methods/bytes_count_to_len.rs +++ b/src/tools/clippy/clippy_lints/src/methods/bytes_count_to_len.rs @@ -14,7 +14,7 @@ pub(super) fn check<'tcx>( bytes_recv: &'tcx hir::Expr<'_>, ) { if let Some(bytes_id) = cx.typeck_results().type_dependent_def_id(count_recv.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(bytes_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(bytes_id) && cx.tcx.type_of(impl_id).instantiate_identity().is_str() && let ty = cx.typeck_results().expr_ty(bytes_recv).peel_refs() && (ty.is_str() || is_type_lang_item(cx, ty, hir::LangItem::String)) diff --git a/src/tools/clippy/clippy_lints/src/methods/case_sensitive_file_extension_comparisons.rs b/src/tools/clippy/clippy_lints/src/methods/case_sensitive_file_extension_comparisons.rs index 292fa08b598..6f9702f6c6c 100644 --- a/src/tools/clippy/clippy_lints/src/methods/case_sensitive_file_extension_comparisons.rs +++ b/src/tools/clippy/clippy_lints/src/methods/case_sensitive_file_extension_comparisons.rs @@ -30,7 +30,7 @@ pub(super) fn check<'tcx>( } if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(method_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(method_id) && cx.tcx.type_of(impl_id).instantiate_identity().is_str() && let ExprKind::Lit(Spanned { node: LitKind::Str(ext_literal, ..), diff --git a/src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs b/src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs index 2ecf3eb8979..0a456d1057a 100644 --- a/src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs +++ b/src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs @@ -28,7 +28,7 @@ pub(super) fn check( if cx .typeck_results() .type_dependent_def_id(expr.hir_id) - .and_then(|id| cx.tcx.trait_of_item(id)) + .and_then(|id| cx.tcx.trait_of_assoc(id)) .zip(cx.tcx.lang_items().clone_trait()) .is_none_or(|(x, y)| x != y) { diff --git a/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs b/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs index 82e5a6d5a41..6e5da5bda8c 100644 --- a/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs +++ b/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs @@ -2,13 +2,15 @@ use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::macros::{FormatArgsStorage, format_args_inputs_span, root_macro_call_first_node}; use clippy_utils::source::snippet_with_applicability; use clippy_utils::ty::{is_type_diagnostic_item, is_type_lang_item}; +use clippy_utils::visitors::for_each_expr; +use clippy_utils::{contains_return, is_inside_always_const_context, peel_blocks}; use rustc_errors::Applicability; use rustc_hir as hir; use rustc_lint::LateContext; -use rustc_middle::ty; use rustc_span::symbol::sym; use rustc_span::{Span, Symbol}; use std::borrow::Cow; +use std::ops::ControlFlow; use super::EXPECT_FUN_CALL; @@ -23,10 +25,10 @@ pub(super) fn check<'tcx>( receiver: &'tcx hir::Expr<'tcx>, args: &'tcx [hir::Expr<'tcx>], ) { - // Strip `&`, `as_ref()` and `as_str()` off `arg` until we're left with either a `String` or + // Strip `{}`, `&`, `as_ref()` and `as_str()` off `arg` until we're left with either a `String` or // `&str` fn get_arg_root<'a>(cx: &LateContext<'_>, arg: &'a hir::Expr<'a>) -> &'a hir::Expr<'a> { - let mut arg_root = arg; + let mut arg_root = peel_blocks(arg); loop { arg_root = match &arg_root.kind { hir::ExprKind::AddrOf(hir::BorrowKind::Ref, _, expr) => expr, @@ -47,124 +49,68 @@ pub(super) fn check<'tcx>( arg_root } - // Only `&'static str` or `String` can be used directly in the `panic!`. Other types should be - // converted to string. - fn requires_to_string(cx: &LateContext<'_>, arg: &hir::Expr<'_>) -> bool { - let arg_ty = cx.typeck_results().expr_ty(arg); - if is_type_lang_item(cx, arg_ty, hir::LangItem::String) { - return false; - } - if let ty::Ref(_, ty, ..) = arg_ty.kind() - && ty.is_str() - && can_be_static_str(cx, arg) - { - return false; - } - true + fn contains_call<'a>(cx: &LateContext<'a>, arg: &'a hir::Expr<'a>) -> bool { + for_each_expr(cx, arg, |expr| { + if matches!(expr.kind, hir::ExprKind::MethodCall { .. } | hir::ExprKind::Call { .. }) + && !is_inside_always_const_context(cx.tcx, expr.hir_id) + { + ControlFlow::Break(()) + } else { + ControlFlow::Continue(()) + } + }) + .is_some() } - // Check if an expression could have type `&'static str`, knowing that it - // has type `&str` for some lifetime. - fn can_be_static_str(cx: &LateContext<'_>, arg: &hir::Expr<'_>) -> bool { - match arg.kind { - hir::ExprKind::Lit(_) => true, - hir::ExprKind::Call(fun, _) => { - if let hir::ExprKind::Path(ref p) = fun.kind { - match cx.qpath_res(p, fun.hir_id) { - hir::def::Res::Def(hir::def::DefKind::Fn | hir::def::DefKind::AssocFn, def_id) => matches!( - cx.tcx.fn_sig(def_id).instantiate_identity().output().skip_binder().kind(), - ty::Ref(re, ..) if re.is_static(), - ), - _ => false, - } - } else { - false - } - }, - hir::ExprKind::MethodCall(..) => { - cx.typeck_results() - .type_dependent_def_id(arg.hir_id) - .is_some_and(|method_id| { - matches!( - cx.tcx.fn_sig(method_id).instantiate_identity().output().skip_binder().kind(), - ty::Ref(re, ..) if re.is_static() - ) - }) - }, - hir::ExprKind::Path(ref p) => matches!( - cx.qpath_res(p, arg.hir_id), - hir::def::Res::Def(hir::def::DefKind::Const | hir::def::DefKind::Static { .. }, _) - ), - _ => false, - } - } + if name == sym::expect + && let [arg] = args + && let arg_root = get_arg_root(cx, arg) + && contains_call(cx, arg_root) + && !contains_return(arg_root) + { + let receiver_type = cx.typeck_results().expr_ty_adjusted(receiver); + let closure_args = if is_type_diagnostic_item(cx, receiver_type, sym::Option) { + "||" + } else if is_type_diagnostic_item(cx, receiver_type, sym::Result) { + "|_|" + } else { + return; + }; - fn is_call(node: &hir::ExprKind<'_>) -> bool { - match node { - hir::ExprKind::AddrOf(hir::BorrowKind::Ref, _, expr) => { - is_call(&expr.kind) - }, - hir::ExprKind::Call(..) - | hir::ExprKind::MethodCall(..) - // These variants are debatable or require further examination - | hir::ExprKind::If(..) - | hir::ExprKind::Match(..) - | hir::ExprKind::Block{ .. } => true, - _ => false, - } - } + let span_replace_word = method_span.with_hi(expr.span.hi()); - if args.len() != 1 || name != sym::expect || !is_call(&args[0].kind) { - return; - } + let mut applicability = Applicability::MachineApplicable; - let receiver_type = cx.typeck_results().expr_ty_adjusted(receiver); - let closure_args = if is_type_diagnostic_item(cx, receiver_type, sym::Option) { - "||" - } else if is_type_diagnostic_item(cx, receiver_type, sym::Result) { - "|_|" - } else { - return; - }; - - let arg_root = get_arg_root(cx, &args[0]); - - let span_replace_word = method_span.with_hi(expr.span.hi()); - - let mut applicability = Applicability::MachineApplicable; - - // Special handling for `format!` as arg_root - if let Some(macro_call) = root_macro_call_first_node(cx, arg_root) { - if cx.tcx.is_diagnostic_item(sym::format_macro, macro_call.def_id) - && let Some(format_args) = format_args_storage.get(cx, arg_root, macro_call.expn) - { - let span = format_args_inputs_span(format_args); - let sugg = snippet_with_applicability(cx, span, "..", &mut applicability); - span_lint_and_sugg( - cx, - EXPECT_FUN_CALL, - span_replace_word, - format!("function call inside of `{name}`"), - "try", - format!("unwrap_or_else({closure_args} panic!({sugg}))"), - applicability, - ); + // Special handling for `format!` as arg_root + if let Some(macro_call) = root_macro_call_first_node(cx, arg_root) { + if cx.tcx.is_diagnostic_item(sym::format_macro, macro_call.def_id) + && let Some(format_args) = format_args_storage.get(cx, arg_root, macro_call.expn) + { + let span = format_args_inputs_span(format_args); + let sugg = snippet_with_applicability(cx, span, "..", &mut applicability); + span_lint_and_sugg( + cx, + EXPECT_FUN_CALL, + span_replace_word, + format!("function call inside of `{name}`"), + "try", + format!("unwrap_or_else({closure_args} panic!({sugg}))"), + applicability, + ); + } + return; } - return; - } - let mut arg_root_snippet: Cow<'_, _> = snippet_with_applicability(cx, arg_root.span, "..", &mut applicability); - if requires_to_string(cx, arg_root) { - arg_root_snippet.to_mut().push_str(".to_string()"); - } + let arg_root_snippet: Cow<'_, _> = snippet_with_applicability(cx, arg_root.span, "..", &mut applicability); - span_lint_and_sugg( - cx, - EXPECT_FUN_CALL, - span_replace_word, - format!("function call inside of `{name}`"), - "try", - format!("unwrap_or_else({closure_args} {{ panic!(\"{{}}\", {arg_root_snippet}) }})"), - applicability, - ); + span_lint_and_sugg( + cx, + EXPECT_FUN_CALL, + span_replace_word, + format!("function call inside of `{name}`"), + "try", + format!("unwrap_or_else({closure_args} panic!(\"{{}}\", {arg_root_snippet}))"), + applicability, + ); + } } diff --git a/src/tools/clippy/clippy_lints/src/methods/filter_map_bool_then.rs b/src/tools/clippy/clippy_lints/src/methods/filter_map_bool_then.rs index 965993808f6..94944bd9445 100644 --- a/src/tools/clippy/clippy_lints/src/methods/filter_map_bool_then.rs +++ b/src/tools/clippy/clippy_lints/src/methods/filter_map_bool_then.rs @@ -1,6 +1,6 @@ use super::FILTER_MAP_BOOL_THEN; use clippy_utils::diagnostics::span_lint_and_then; -use clippy_utils::source::SpanRangeExt; +use clippy_utils::source::{SpanRangeExt, snippet_with_context}; use clippy_utils::ty::is_copy; use clippy_utils::{ CaptureKind, can_move_expr_to_closure, contains_return, is_from_proc_macro, is_trait_method, peel_blocks, @@ -45,9 +45,11 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>, arg: & .filter(|adj| matches!(adj.kind, Adjust::Deref(_))) .count() && let Some(param_snippet) = param.span.get_source_text(cx) - && let Some(filter) = recv.span.get_source_text(cx) - && let Some(map) = then_body.span.get_source_text(cx) { + let mut applicability = Applicability::MachineApplicable; + let (filter, _) = snippet_with_context(cx, recv.span, expr.span.ctxt(), "..", &mut applicability); + let (map, _) = snippet_with_context(cx, then_body.span, expr.span.ctxt(), "..", &mut applicability); + span_lint_and_then( cx, FILTER_MAP_BOOL_THEN, @@ -62,7 +64,7 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>, arg: & "filter(|&{param_snippet}| {derefs}{filter}).map(|{param_snippet}| {map})", derefs = "*".repeat(needed_derefs) ), - Applicability::MachineApplicable, + applicability, ); } else { diag.help("consider using `filter` then `map` instead"); diff --git a/src/tools/clippy/clippy_lints/src/methods/get_first.rs b/src/tools/clippy/clippy_lints/src/methods/get_first.rs index f4465e654c2..2e1d71ce284 100644 --- a/src/tools/clippy/clippy_lints/src/methods/get_first.rs +++ b/src/tools/clippy/clippy_lints/src/methods/get_first.rs @@ -18,7 +18,7 @@ pub(super) fn check<'tcx>( arg: &'tcx hir::Expr<'_>, ) { if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(method_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(method_id) && let identity = cx.tcx.type_of(impl_id).instantiate_identity() && let hir::ExprKind::Lit(Spanned { node: LitKind::Int(Pu128(0), _), diff --git a/src/tools/clippy/clippy_lints/src/methods/implicit_clone.rs b/src/tools/clippy/clippy_lints/src/methods/implicit_clone.rs index 9724463f0c0..efa8cee58df 100644 --- a/src/tools/clippy/clippy_lints/src/methods/implicit_clone.rs +++ b/src/tools/clippy/clippy_lints/src/methods/implicit_clone.rs @@ -50,7 +50,7 @@ pub fn is_clone_like(cx: &LateContext<'_>, method_name: Symbol, method_def_id: h sym::to_path_buf => is_diag_item_method(cx, method_def_id, sym::Path), sym::to_vec => cx .tcx - .impl_of_method(method_def_id) + .impl_of_assoc(method_def_id) .filter(|&impl_did| { cx.tcx.type_of(impl_did).instantiate_identity().is_slice() && cx.tcx.impl_trait_ref(impl_did).is_none() }) diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_overeager_cloned.rs b/src/tools/clippy/clippy_lints/src/methods/iter_overeager_cloned.rs index f5fe4316eb0..f851ebe91f3 100644 --- a/src/tools/clippy/clippy_lints/src/methods/iter_overeager_cloned.rs +++ b/src/tools/clippy/clippy_lints/src/methods/iter_overeager_cloned.rs @@ -44,9 +44,9 @@ pub(super) fn check<'tcx>( let typeck = cx.typeck_results(); if let Some(iter_id) = cx.tcx.get_diagnostic_item(sym::Iterator) && let Some(method_id) = typeck.type_dependent_def_id(expr.hir_id) - && cx.tcx.trait_of_item(method_id) == Some(iter_id) + && cx.tcx.trait_of_assoc(method_id) == Some(iter_id) && let Some(method_id) = typeck.type_dependent_def_id(cloned_call.hir_id) - && cx.tcx.trait_of_item(method_id) == Some(iter_id) + && cx.tcx.trait_of_assoc(method_id) == Some(iter_id) && let cloned_recv_ty = typeck.expr_ty_adjusted(cloned_recv) && let Some(iter_assoc_ty) = cx.get_associated_type(cloned_recv_ty, iter_id, sym::Item) && matches!(*iter_assoc_ty.kind(), ty::Ref(_, ty, _) if !is_copy(cx, ty)) diff --git a/src/tools/clippy/clippy_lints/src/methods/manual_inspect.rs b/src/tools/clippy/clippy_lints/src/methods/manual_inspect.rs index 21f2ce8b7c9..bc96815944d 100644 --- a/src/tools/clippy/clippy_lints/src/methods/manual_inspect.rs +++ b/src/tools/clippy/clippy_lints/src/methods/manual_inspect.rs @@ -100,7 +100,6 @@ pub(crate) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, arg: &Expr<'_>, name: match x { UseKind::Return(s) => edits.push((s.with_leading_whitespace(cx).with_ctxt(s.ctxt()), String::new())), UseKind::Borrowed(s) => { - #[expect(clippy::range_plus_one)] let range = s.map_range(cx, |_, src, range| { let src = src.get(range.clone())?; let trimmed = src.trim_start_matches([' ', '\t', '\n', '\r', '(']); diff --git a/src/tools/clippy/clippy_lints/src/methods/manual_ok_or.rs b/src/tools/clippy/clippy_lints/src/methods/manual_ok_or.rs index c286c5faaed..077957fa44d 100644 --- a/src/tools/clippy/clippy_lints/src/methods/manual_ok_or.rs +++ b/src/tools/clippy/clippy_lints/src/methods/manual_ok_or.rs @@ -18,7 +18,7 @@ pub(super) fn check<'tcx>( map_expr: &'tcx Expr<'_>, ) { if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(method_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(method_id) && is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::Option) && let ExprKind::Call(err_path, [err_arg]) = or_expr.kind && is_res_lang_ctor(cx, path_res(cx, err_path), ResultErr) diff --git a/src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs b/src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs index 8167e4f9605..a811dd1cee1 100644 --- a/src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs +++ b/src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs @@ -59,7 +59,7 @@ pub(super) fn check( && is_type_lang_item(cx, cx.typeck_results().expr_ty(collect_expr), LangItem::String) && let Some(take_id) = cx.typeck_results().type_dependent_def_id(take_expr.hir_id) && let Some(iter_trait_id) = cx.tcx.get_diagnostic_item(sym::Iterator) - && cx.tcx.trait_of_item(take_id) == Some(iter_trait_id) + && cx.tcx.trait_of_assoc(take_id) == Some(iter_trait_id) && let Some(repeat_kind) = parse_repeat_arg(cx, repeat_arg) && let ctxt = collect_expr.span.ctxt() && ctxt == take_expr.span.ctxt() diff --git a/src/tools/clippy/clippy_lints/src/methods/map_clone.rs b/src/tools/clippy/clippy_lints/src/methods/map_clone.rs index 333a33f7527..748be9bfcc6 100644 --- a/src/tools/clippy/clippy_lints/src/methods/map_clone.rs +++ b/src/tools/clippy/clippy_lints/src/methods/map_clone.rs @@ -23,7 +23,7 @@ fn should_run_lint(cx: &LateContext<'_>, e: &hir::Expr<'_>, method_id: DefId) -> return true; } // We check if it's an `Option` or a `Result`. - if let Some(id) = cx.tcx.impl_of_method(method_id) { + if let Some(id) = cx.tcx.impl_of_assoc(method_id) { let identity = cx.tcx.type_of(id).instantiate_identity(); if !is_type_diagnostic_item(cx, identity, sym::Option) && !is_type_diagnostic_item(cx, identity, sym::Result) { return false; @@ -69,7 +69,7 @@ pub(super) fn check(cx: &LateContext<'_>, e: &hir::Expr<'_>, recv: &hir::Expr<'_ hir::ExprKind::MethodCall(method, obj, [], _) => { if ident_eq(name, obj) && method.ident.name == sym::clone && let Some(fn_id) = cx.typeck_results().type_dependent_def_id(closure_expr.hir_id) - && let Some(trait_id) = cx.tcx.trait_of_item(fn_id) + && let Some(trait_id) = cx.tcx.trait_of_assoc(fn_id) && cx.tcx.lang_items().clone_trait() == Some(trait_id) // no autoderefs && !cx.typeck_results().expr_adjustments(obj).iter() diff --git a/src/tools/clippy/clippy_lints/src/methods/map_err_ignore.rs b/src/tools/clippy/clippy_lints/src/methods/map_err_ignore.rs index 5d0d4dae35f..41beda9c5cb 100644 --- a/src/tools/clippy/clippy_lints/src/methods/map_err_ignore.rs +++ b/src/tools/clippy/clippy_lints/src/methods/map_err_ignore.rs @@ -8,7 +8,7 @@ use super::MAP_ERR_IGNORE; pub(super) fn check(cx: &LateContext<'_>, e: &Expr<'_>, arg: &Expr<'_>) { if let Some(method_id) = cx.typeck_results().type_dependent_def_id(e.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(method_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(method_id) && is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::Result) && let ExprKind::Closure(&Closure { capture_clause: CaptureBy::Ref, diff --git a/src/tools/clippy/clippy_lints/src/methods/mod.rs b/src/tools/clippy/clippy_lints/src/methods/mod.rs index f2dabdd3438..bcd54557331 100644 --- a/src/tools/clippy/clippy_lints/src/methods/mod.rs +++ b/src/tools/clippy/clippy_lints/src/methods/mod.rs @@ -3859,6 +3859,7 @@ declare_clippy_lint! { declare_clippy_lint! { /// ### What it does /// Checks for usage of `option.map(f).unwrap_or_default()` and `result.map(f).unwrap_or_default()` where f is a function or closure that returns the `bool` type. + /// Also checks for equality comparisons like `option.map(f) == Some(true)` and `result.map(f) == Ok(true)`. /// /// ### Why is this bad? /// Readability. These can be written more concisely as `option.is_some_and(f)` and `result.is_ok_and(f)`. @@ -3869,6 +3870,11 @@ declare_clippy_lint! { /// # let result: Result<usize, ()> = Ok(1); /// option.map(|a| a > 10).unwrap_or_default(); /// result.map(|a| a > 10).unwrap_or_default(); + /// + /// option.map(|a| a > 10) == Some(true); + /// result.map(|a| a > 10) == Ok(true); + /// option.map(|a| a > 10) != Some(true); + /// result.map(|a| a > 10) != Ok(true); /// ``` /// Use instead: /// ```no_run @@ -3876,11 +3882,16 @@ declare_clippy_lint! { /// # let result: Result<usize, ()> = Ok(1); /// option.is_some_and(|a| a > 10); /// result.is_ok_and(|a| a > 10); + /// + /// option.is_some_and(|a| a > 10); + /// result.is_ok_and(|a| a > 10); + /// option.is_none_or(|a| a > 10); + /// !result.is_ok_and(|a| a > 10); /// ``` #[clippy::version = "1.77.0"] pub MANUAL_IS_VARIANT_AND, pedantic, - "using `.map(f).unwrap_or_default()`, which is more succinctly expressed as `is_some_and(f)` or `is_ok_and(f)`" + "using `.map(f).unwrap_or_default()` or `.map(f) == Some/Ok(true)`, which are more succinctly expressed as `is_some_and(f)` or `is_ok_and(f)`" } declare_clippy_lint! { @@ -5275,10 +5286,6 @@ impl Methods { } map_identity::check(cx, expr, recv, m_arg, name, span); manual_inspect::check(cx, expr, m_arg, name, span, self.msrv); - crate::useless_conversion::check_function_application(cx, expr, recv, m_arg); - }, - (sym::map_break | sym::map_continue, [m_arg]) => { - crate::useless_conversion::check_function_application(cx, expr, recv, m_arg); }, (sym::map_or, [def, map]) => { option_map_or_none::check(cx, expr, recv, def, map); @@ -5546,7 +5553,7 @@ impl Methods { // Handle method calls whose receiver and arguments may come from expansion if let ExprKind::MethodCall(path, recv, args, _call_span) = expr.kind { match (path.ident.name, args) { - (sym::expect, [_]) if !matches!(method_call(recv), Some((sym::ok | sym::err, _, [], _, _))) => { + (sym::expect, [_]) => { unwrap_expect_used::check( cx, expr, diff --git a/src/tools/clippy/clippy_lints/src/methods/mut_mutex_lock.rs b/src/tools/clippy/clippy_lints/src/methods/mut_mutex_lock.rs index 320523aceb6..4235af882b0 100644 --- a/src/tools/clippy/clippy_lints/src/methods/mut_mutex_lock.rs +++ b/src/tools/clippy/clippy_lints/src/methods/mut_mutex_lock.rs @@ -13,7 +13,7 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, ex: &'tcx Expr<'tcx>, recv: &' && let (_, ref_depth, Mutability::Mut) = peel_mid_ty_refs_is_mutable(cx.typeck_results().expr_ty(recv)) && ref_depth >= 1 && let Some(method_id) = cx.typeck_results().type_dependent_def_id(ex.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(method_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(method_id) && is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::Mutex) { span_lint_and_sugg( diff --git a/src/tools/clippy/clippy_lints/src/methods/open_options.rs b/src/tools/clippy/clippy_lints/src/methods/open_options.rs index 9b5f138295c..37a8e25bef9 100644 --- a/src/tools/clippy/clippy_lints/src/methods/open_options.rs +++ b/src/tools/clippy/clippy_lints/src/methods/open_options.rs @@ -18,7 +18,7 @@ fn is_open_options(cx: &LateContext<'_>, ty: Ty<'_>) -> bool { pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>, recv: &'tcx Expr<'_>) { if let Some(method_id) = cx.typeck_results().type_dependent_def_id(e.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(method_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(method_id) && is_open_options(cx, cx.tcx.type_of(impl_id).instantiate_identity()) { let mut options = Vec::new(); @@ -111,7 +111,7 @@ fn get_open_options( // This might be a user defined extension trait with a method like `truncate_write` // which would be a false positive if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(argument.hir_id) - && cx.tcx.trait_of_item(method_def_id).is_some() + && cx.tcx.trait_of_assoc(method_def_id).is_some() { return false; } diff --git a/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs b/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs index 6ce7dd3d4d0..04f0e3c0479 100644 --- a/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs +++ b/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs @@ -242,15 +242,23 @@ pub(super) fn check<'tcx>( let inner_arg = peel_blocks(arg); for_each_expr(cx, inner_arg, |ex| { let is_top_most_expr = ex.hir_id == inner_arg.hir_id; - if let hir::ExprKind::Call(fun, fun_args) = ex.kind { - let fun_span = if fun_args.is_empty() && is_top_most_expr { - Some(fun.span) - } else { - None - }; - if check_or_fn_call(cx, name, method_span, receiver, arg, Some(lambda), expr.span, fun_span) { - return ControlFlow::Break(()); - } + match ex.kind { + hir::ExprKind::Call(fun, fun_args) => { + let fun_span = if fun_args.is_empty() && is_top_most_expr { + Some(fun.span) + } else { + None + }; + if check_or_fn_call(cx, name, method_span, receiver, arg, Some(lambda), expr.span, fun_span) { + return ControlFlow::Break(()); + } + }, + hir::ExprKind::MethodCall(..) => { + if check_or_fn_call(cx, name, method_span, receiver, arg, Some(lambda), expr.span, None) { + return ControlFlow::Break(()); + } + }, + _ => {}, } ControlFlow::Continue(()) }); diff --git a/src/tools/clippy/clippy_lints/src/methods/path_buf_push_overwrite.rs b/src/tools/clippy/clippy_lints/src/methods/path_buf_push_overwrite.rs index 38d9c5f1677..32752ef7435 100644 --- a/src/tools/clippy/clippy_lints/src/methods/path_buf_push_overwrite.rs +++ b/src/tools/clippy/clippy_lints/src/methods/path_buf_push_overwrite.rs @@ -11,7 +11,7 @@ use super::PATH_BUF_PUSH_OVERWRITE; pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, arg: &'tcx Expr<'_>) { if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(method_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(method_id) && is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::PathBuf) && let ExprKind::Lit(lit) = arg.kind && let LitKind::Str(ref path_lit, _) = lit.node diff --git a/src/tools/clippy/clippy_lints/src/methods/stable_sort_primitive.rs b/src/tools/clippy/clippy_lints/src/methods/stable_sort_primitive.rs index aef14435d8a..17d1a6abde0 100644 --- a/src/tools/clippy/clippy_lints/src/methods/stable_sort_primitive.rs +++ b/src/tools/clippy/clippy_lints/src/methods/stable_sort_primitive.rs @@ -9,7 +9,7 @@ use super::STABLE_SORT_PRIMITIVE; pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>, recv: &'tcx Expr<'_>) { if let Some(method_id) = cx.typeck_results().type_dependent_def_id(e.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(method_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(method_id) && cx.tcx.type_of(impl_id).instantiate_identity().is_slice() && let Some(slice_type) = is_slice_of_primitives(cx, recv) { diff --git a/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs b/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs index 6f78d6c6128..51dd4ac313a 100644 --- a/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs +++ b/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs @@ -286,7 +286,7 @@ fn parse_iter_usage<'tcx>( let iter_id = cx.tcx.get_diagnostic_item(sym::Iterator)?; match (name.ident.name, args) { - (sym::next, []) if cx.tcx.trait_of_item(did) == Some(iter_id) => (IterUsageKind::Nth(0), e.span), + (sym::next, []) if cx.tcx.trait_of_assoc(did) == Some(iter_id) => (IterUsageKind::Nth(0), e.span), (sym::next_tuple, []) => { return if paths::ITERTOOLS_NEXT_TUPLE.matches(cx, did) && let ty::Adt(adt_def, subs) = cx.typeck_results().expr_ty(e).kind() @@ -303,7 +303,7 @@ fn parse_iter_usage<'tcx>( None }; }, - (sym::nth | sym::skip, [idx_expr]) if cx.tcx.trait_of_item(did) == Some(iter_id) => { + (sym::nth | sym::skip, [idx_expr]) if cx.tcx.trait_of_assoc(did) == Some(iter_id) => { if let Some(Constant::Int(idx)) = ConstEvalCtxt::new(cx).eval(idx_expr) { let span = if name.ident.as_str() == "nth" { e.span @@ -312,7 +312,7 @@ fn parse_iter_usage<'tcx>( && next_name.ident.name == sym::next && next_expr.span.ctxt() == ctxt && let Some(next_id) = cx.typeck_results().type_dependent_def_id(next_expr.hir_id) - && cx.tcx.trait_of_item(next_id) == Some(iter_id) + && cx.tcx.trait_of_assoc(next_id) == Some(iter_id) { next_expr.span } else { diff --git a/src/tools/clippy/clippy_lints/src/methods/suspicious_splitn.rs b/src/tools/clippy/clippy_lints/src/methods/suspicious_splitn.rs index f8b6d4349fb..9876681ddbb 100644 --- a/src/tools/clippy/clippy_lints/src/methods/suspicious_splitn.rs +++ b/src/tools/clippy/clippy_lints/src/methods/suspicious_splitn.rs @@ -10,7 +10,7 @@ use super::SUSPICIOUS_SPLITN; pub(super) fn check(cx: &LateContext<'_>, method_name: Symbol, expr: &Expr<'_>, self_arg: &Expr<'_>, count: u128) { if count <= 1 && let Some(call_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(call_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(call_id) && cx.tcx.impl_trait_ref(impl_id).is_none() && let self_ty = cx.tcx.type_of(impl_id).instantiate_identity() && (self_ty.is_slice() || self_ty.is_str()) diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_fallible_conversions.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_fallible_conversions.rs index ce81282ddfe..0ec2d8b4fc3 100644 --- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_fallible_conversions.rs +++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_fallible_conversions.rs @@ -165,7 +165,7 @@ pub(super) fn check_method(cx: &LateContext<'_>, expr: &Expr<'_>) { pub(super) fn check_function(cx: &LateContext<'_>, expr: &Expr<'_>, callee: &Expr<'_>) { if let ExprKind::Path(ref qpath) = callee.kind && let Some(item_def_id) = cx.qpath_res(qpath, callee.hir_id).opt_def_id() - && let Some(trait_def_id) = cx.tcx.trait_of_item(item_def_id) + && let Some(trait_def_id) = cx.tcx.trait_of_assoc(item_def_id) { let qpath_spans = match qpath { QPath::Resolved(_, path) => { diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_sort_by.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_sort_by.rs index dbff08bc51c..1de9f6ab497 100644 --- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_sort_by.rs +++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_sort_by.rs @@ -114,7 +114,7 @@ fn mirrored_exprs(a_expr: &Expr<'_>, a_ident: &Ident, b_expr: &Expr<'_>, b_ident fn detect_lint(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, arg: &Expr<'_>) -> Option<LintTrigger> { if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(method_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(method_id) && cx.tcx.type_of(impl_id).instantiate_identity().is_slice() && let ExprKind::Closure(&Closure { body, .. }) = arg.kind && let closure_body = cx.tcx.hir_body(body) diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs index 769526d131b..54f45263275 100644 --- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs +++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs @@ -694,7 +694,7 @@ fn check_if_applicable_to_argument<'tcx>(cx: &LateContext<'tcx>, arg: &Expr<'tcx sym::to_string => cx.tcx.is_diagnostic_item(sym::to_string_method, method_def_id), sym::to_vec => cx .tcx - .impl_of_method(method_def_id) + .impl_of_assoc(method_def_id) .filter(|&impl_did| cx.tcx.type_of(impl_did).instantiate_identity().is_slice()) .is_some(), _ => false, @@ -734,7 +734,7 @@ fn check_if_applicable_to_argument<'tcx>(cx: &LateContext<'tcx>, arg: &Expr<'tcx fn check_borrow_predicate<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'tcx>) { if let ExprKind::MethodCall(_, caller, &[arg], _) = expr.kind && let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) - && cx.tcx.trait_of_item(method_def_id).is_none() + && cx.tcx.trait_of_assoc(method_def_id).is_none() && let Some(borrow_id) = cx.tcx.get_diagnostic_item(sym::Borrow) && cx.tcx.predicates_of(method_def_id).predicates.iter().any(|(pred, _)| { if let ClauseKind::Trait(trait_pred) = pred.kind().skip_binder() diff --git a/src/tools/clippy/clippy_lints/src/methods/useless_asref.rs b/src/tools/clippy/clippy_lints/src/methods/useless_asref.rs index d30c12e0c48..38fad239f67 100644 --- a/src/tools/clippy/clippy_lints/src/methods/useless_asref.rs +++ b/src/tools/clippy/clippy_lints/src/methods/useless_asref.rs @@ -79,7 +79,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, call_name: Symbo applicability, ); } - } else if let Some(impl_id) = cx.tcx.impl_of_method(def_id) + } else if let Some(impl_id) = cx.tcx.impl_of_assoc(def_id) && let Some(adt) = cx.tcx.type_of(impl_id).instantiate_identity().ty_adt_def() && matches!(cx.tcx.get_diagnostic_name(adt.did()), Some(sym::Option | sym::Result)) { @@ -131,7 +131,7 @@ fn is_calling_clone(cx: &LateContext<'_>, arg: &hir::Expr<'_>) -> bool { hir::ExprKind::MethodCall(method, obj, [], _) => { if method.ident.name == sym::clone && let Some(fn_id) = cx.typeck_results().type_dependent_def_id(closure_expr.hir_id) - && let Some(trait_id) = cx.tcx.trait_of_item(fn_id) + && let Some(trait_id) = cx.tcx.trait_of_assoc(fn_id) // We check it's the `Clone` trait. && cx.tcx.lang_items().clone_trait().is_some_and(|id| id == trait_id) // no autoderefs diff --git a/src/tools/clippy/clippy_lints/src/methods/vec_resize_to_zero.rs b/src/tools/clippy/clippy_lints/src/methods/vec_resize_to_zero.rs index 5ea4ada128a..bfb481f4fc0 100644 --- a/src/tools/clippy/clippy_lints/src/methods/vec_resize_to_zero.rs +++ b/src/tools/clippy/clippy_lints/src/methods/vec_resize_to_zero.rs @@ -18,7 +18,7 @@ pub(super) fn check<'tcx>( name_span: Span, ) { if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(method_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(method_id) && is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::Vec) && let ExprKind::Lit(Spanned { node: LitKind::Int(Pu128(0), _), diff --git a/src/tools/clippy/clippy_lints/src/missing_fields_in_debug.rs b/src/tools/clippy/clippy_lints/src/missing_fields_in_debug.rs index 760ecf07589..18e2b384a46 100644 --- a/src/tools/clippy/clippy_lints/src/missing_fields_in_debug.rs +++ b/src/tools/clippy/clippy_lints/src/missing_fields_in_debug.rs @@ -7,9 +7,7 @@ use clippy_utils::{is_path_lang_item, sym}; use rustc_ast::LitKind; use rustc_data_structures::fx::FxHashSet; use rustc_hir::def::{DefKind, Res}; -use rustc_hir::{ - Block, Expr, ExprKind, Impl, Item, ItemKind, LangItem, Node, QPath, TyKind, VariantData, -}; +use rustc_hir::{Block, Expr, ExprKind, Impl, Item, ItemKind, LangItem, Node, QPath, TyKind, VariantData}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::{Ty, TypeckResults}; use rustc_session::declare_lint_pass; diff --git a/src/tools/clippy/clippy_lints/src/missing_inline.rs b/src/tools/clippy/clippy_lints/src/missing_inline.rs index c4a3d10299b..c637fb247ff 100644 --- a/src/tools/clippy/clippy_lints/src/missing_inline.rs +++ b/src/tools/clippy/clippy_lints/src/missing_inline.rs @@ -1,7 +1,8 @@ use clippy_utils::diagnostics::span_lint; -use rustc_attr_data_structures::{AttributeKind, find_attr}; -use rustc_hir as hir; -use rustc_hir::Attribute; +use rustc_hir::attrs::{AttributeKind}; +use rustc_hir::find_attr; +use rustc_hir::def_id::DefId; +use rustc_hir::{self as hir, Attribute}; use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_middle::ty::AssocItemContainer; use rustc_session::declare_lint_pass; @@ -97,11 +98,23 @@ impl<'tcx> LateLintPass<'tcx> for MissingInline { } match it.kind { hir::ItemKind::Fn { .. } => { + if fn_is_externally_exported(cx, it.owner_id.to_def_id()) { + return; + } + let desc = "a function"; let attrs = cx.tcx.hir_attrs(it.hir_id()); check_missing_inline_attrs(cx, attrs, it.span, desc); }, - hir::ItemKind::Trait(ref _constness, ref _is_auto, ref _unsafe, _ident, _generics, _bounds, trait_items) => { + hir::ItemKind::Trait( + ref _constness, + ref _is_auto, + ref _unsafe, + _ident, + _generics, + _bounds, + trait_items, + ) => { // note: we need to check if the trait is exported so we can't use // `LateLintPass::check_trait_item` here. for &tit in trait_items { @@ -173,3 +186,10 @@ impl<'tcx> LateLintPass<'tcx> for MissingInline { check_missing_inline_attrs(cx, attrs, impl_item.span, desc); } } + +/// Checks if this function is externally exported, where #[inline] wouldn't have the desired effect +/// and a rustc warning would be triggered, see #15301 +fn fn_is_externally_exported(cx: &LateContext<'_>, def_id: DefId) -> bool { + let attrs = cx.tcx.codegen_fn_attrs(def_id); + attrs.contains_extern_indicator() +} diff --git a/src/tools/clippy/clippy_lints/src/missing_trait_methods.rs b/src/tools/clippy/clippy_lints/src/missing_trait_methods.rs index fa61d0fa11a..399bf4e1806 100644 --- a/src/tools/clippy/clippy_lints/src/missing_trait_methods.rs +++ b/src/tools/clippy/clippy_lints/src/missing_trait_methods.rs @@ -66,7 +66,9 @@ impl<'tcx> LateLintPass<'tcx> for MissingTraitMethods { }) = item.kind && let Some(trait_id) = trait_ref.trait_def_id() { - let trait_item_ids: DefIdSet = cx.tcx.associated_items(item.owner_id) + let trait_item_ids: DefIdSet = cx + .tcx + .associated_items(item.owner_id) .in_definition_order() .filter_map(|assoc_item| assoc_item.trait_item_def_id) .collect(); diff --git a/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs b/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs index d9f4fb271fb..a489c0a4a5a 100644 --- a/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs +++ b/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs @@ -171,14 +171,11 @@ impl<'tcx> Visitor<'tcx> for DivergenceVisitor<'_, 'tcx> { ExprKind::Continue(_) | ExprKind::Break(_, _) | ExprKind::Ret(_) => self.report_diverging_sub_expr(e), ExprKind::Call(func, _) => { let typ = self.cx.typeck_results().expr_ty(func); - match typ.kind() { - ty::FnDef(..) | ty::FnPtr(..) => { - let sig = typ.fn_sig(self.cx.tcx); - if self.cx.tcx.instantiate_bound_regions_with_erased(sig).output().kind() == &ty::Never { - self.report_diverging_sub_expr(e); - } - }, - _ => {}, + if typ.is_fn() { + let sig = typ.fn_sig(self.cx.tcx); + if self.cx.tcx.instantiate_bound_regions_with_erased(sig).output().kind() == &ty::Never { + self.report_diverging_sub_expr(e); + } } }, ExprKind::MethodCall(..) => { diff --git a/src/tools/clippy/clippy_lints/src/mut_reference.rs b/src/tools/clippy/clippy_lints/src/mut_reference.rs index 2f1ab3d2652..31f51b45754 100644 --- a/src/tools/clippy/clippy_lints/src/mut_reference.rs +++ b/src/tools/clippy/clippy_lints/src/mut_reference.rs @@ -79,7 +79,7 @@ fn check_arguments<'tcx>( name: &str, fn_kind: &str, ) { - if let ty::FnDef(..) | ty::FnPtr(..) = type_definition.kind() { + if type_definition.is_fn() { let parameters = type_definition.fn_sig(cx.tcx).skip_binder().inputs(); for (argument, parameter) in iter::zip(arguments, parameters) { if let ty::Ref(_, _, Mutability::Not) | ty::RawPtr(_, Mutability::Not) = parameter.kind() diff --git a/src/tools/clippy/clippy_lints/src/needless_for_each.rs b/src/tools/clippy/clippy_lints/src/needless_for_each.rs index 6a7c8436bad..a67545e419c 100644 --- a/src/tools/clippy/clippy_lints/src/needless_for_each.rs +++ b/src/tools/clippy/clippy_lints/src/needless_for_each.rs @@ -6,7 +6,7 @@ use rustc_session::declare_lint_pass; use rustc_span::Span; use clippy_utils::diagnostics::span_lint_and_then; -use clippy_utils::source::snippet_with_applicability; +use clippy_utils::source::{snippet_with_applicability, snippet_with_context}; use clippy_utils::ty::has_iter_method; use clippy_utils::{is_trait_method, sym}; @@ -101,18 +101,23 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessForEach { let body_param_sugg = snippet_with_applicability(cx, body.params[0].pat.span, "..", &mut applicability); let for_each_rev_sugg = snippet_with_applicability(cx, for_each_recv.span, "..", &mut applicability); - let body_value_sugg = snippet_with_applicability(cx, body.value.span, "..", &mut applicability); + let (body_value_sugg, is_macro_call) = + snippet_with_context(cx, body.value.span, for_each_recv.span.ctxt(), "..", &mut applicability); let sugg = format!( "for {} in {} {}", body_param_sugg, for_each_rev_sugg, - match body.value.kind { - ExprKind::Block(block, _) if is_let_desugar(block) => { - format!("{{ {body_value_sugg} }}") - }, - ExprKind::Block(_, _) => body_value_sugg.to_string(), - _ => format!("{{ {body_value_sugg}; }}"), + if is_macro_call { + format!("{{ {body_value_sugg}; }}") + } else { + match body.value.kind { + ExprKind::Block(block, _) if is_let_desugar(block) => { + format!("{{ {body_value_sugg} }}") + }, + ExprKind::Block(_, _) => body_value_sugg.to_string(), + _ => format!("{{ {body_value_sugg}; }}"), + } } ); diff --git a/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs b/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs index c97ecce75b4..7b057998063 100644 --- a/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs +++ b/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs @@ -246,8 +246,10 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByValue { for (span, suggestion) in clone_spans { diag.span_suggestion( span, - span.get_source_text(cx) - .map_or("change the call to".to_owned(), |src| format!("change `{src}` to")), + span.get_source_text(cx).map_or_else( + || "change the call to".to_owned(), + |src| format!("change `{src}` to"), + ), suggestion, Applicability::Unspecified, ); @@ -275,8 +277,10 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByValue { for (span, suggestion) in clone_spans { diag.span_suggestion( span, - span.get_source_text(cx) - .map_or("change the call to".to_owned(), |src| format!("change `{src}` to")), + span.get_source_text(cx).map_or_else( + || "change the call to".to_owned(), + |src| format!("change `{src}` to"), + ), suggestion, Applicability::Unspecified, ); @@ -308,9 +312,7 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByValue { /// Functions marked with these attributes must have the exact signature. pub(crate) fn requires_exact_signature(attrs: &[Attribute]) -> bool { attrs.iter().any(|attr| { - [sym::proc_macro, sym::proc_macro_attribute, sym::proc_macro_derive] - .iter() - .any(|&allow| attr.has_name(allow)) + attr.is_proc_macro_attr() }) } diff --git a/src/tools/clippy/clippy_lints/src/new_without_default.rs b/src/tools/clippy/clippy_lints/src/new_without_default.rs index 3b86f1d1f59..b598a390005 100644 --- a/src/tools/clippy/clippy_lints/src/new_without_default.rs +++ b/src/tools/clippy/clippy_lints/src/new_without_default.rs @@ -65,11 +65,16 @@ impl<'tcx> LateLintPass<'tcx> for NewWithoutDefault { .. }) = item.kind { - for assoc_item in cx.tcx.associated_items(item.owner_id.def_id) + for assoc_item in cx + .tcx + .associated_items(item.owner_id.def_id) .filter_by_name_unhygienic(sym::new) { if let AssocKind::Fn { has_self: false, .. } = assoc_item.kind { - let impl_item = cx.tcx.hir_node_by_def_id(assoc_item.def_id.expect_local()).expect_impl_item(); + let impl_item = cx + .tcx + .hir_node_by_def_id(assoc_item.def_id.expect_local()) + .expect_impl_item(); if impl_item.span.in_external_macro(cx.sess().source_map()) { return; } diff --git a/src/tools/clippy/clippy_lints/src/no_mangle_with_rust_abi.rs b/src/tools/clippy/clippy_lints/src/no_mangle_with_rust_abi.rs index dee8efeb291..791bbbe30a8 100644 --- a/src/tools/clippy/clippy_lints/src/no_mangle_with_rust_abi.rs +++ b/src/tools/clippy/clippy_lints/src/no_mangle_with_rust_abi.rs @@ -1,7 +1,7 @@ use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::source::{snippet, snippet_with_applicability}; use rustc_abi::ExternAbi; -use rustc_attr_data_structures::AttributeKind; +use rustc_hir::attrs::AttributeKind; use rustc_errors::Applicability; use rustc_hir::{Attribute, Item, ItemKind}; use rustc_lint::{LateContext, LateLintPass}; diff --git a/src/tools/clippy/clippy_lints/src/non_copy_const.rs b/src/tools/clippy/clippy_lints/src/non_copy_const.rs index 5f10e1968f1..388c029c9ef 100644 --- a/src/tools/clippy/clippy_lints/src/non_copy_const.rs +++ b/src/tools/clippy/clippy_lints/src/non_copy_const.rs @@ -338,7 +338,7 @@ impl<'tcx> NonCopyConst<'tcx> { tcx: TyCtxt<'tcx>, typing_env: TypingEnv<'tcx>, ty: Ty<'tcx>, - val: ConstValue<'tcx>, + val: ConstValue, ) -> Result<bool, ()> { let ty = tcx.try_normalize_erasing_regions(typing_env, ty).unwrap_or(ty); match self.is_ty_freeze(tcx, typing_env, ty) { @@ -477,7 +477,7 @@ impl<'tcx> NonCopyConst<'tcx> { typing_env: TypingEnv<'tcx>, typeck: &'tcx TypeckResults<'tcx>, mut src_expr: &'tcx Expr<'tcx>, - mut val: ConstValue<'tcx>, + mut val: ConstValue, ) -> Result<Option<BorrowSource<'tcx>>, ()> { let mut parents = tcx.hir_parent_iter(src_expr.hir_id); let mut ty = typeck.expr_ty(src_expr); diff --git a/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs b/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs index a78a342d4fe..466beb04b07 100644 --- a/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs +++ b/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs @@ -3,12 +3,11 @@ use clippy_config::Conf; use clippy_utils::consts::{ConstEvalCtxt, Constant}; use clippy_utils::diagnostics::span_lint; use clippy_utils::ty::is_type_diagnostic_item; -use clippy_utils::{expr_or_init, is_from_proc_macro, is_lint_allowed, peel_hir_expr_refs, peel_hir_expr_unary}; +use clippy_utils::{expr_or_init, is_from_proc_macro, is_lint_allowed, peel_hir_expr_refs, peel_hir_expr_unary, sym}; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::{self, Ty}; use rustc_session::impl_lint_pass; -use rustc_span::symbol::sym; use rustc_span::{Span, Symbol}; use {rustc_ast as ast, rustc_hir as hir}; @@ -89,6 +88,18 @@ impl ArithmeticSideEffects { self.allowed_unary.contains(ty_string_elem) } + fn is_non_zero_u(cx: &LateContext<'_>, ty: Ty<'_>) -> bool { + if let ty::Adt(adt, substs) = ty.kind() + && cx.tcx.is_diagnostic_item(sym::NonZero, adt.did()) + && let int_type = substs.type_at(0) + && matches!(int_type.kind(), ty::Uint(_)) + { + true + } else { + false + } + } + /// Verifies built-in types that have specific allowed operations fn has_specific_allowed_type_and_operation<'tcx>( cx: &LateContext<'tcx>, @@ -97,33 +108,12 @@ impl ArithmeticSideEffects { rhs_ty: Ty<'tcx>, ) -> bool { let is_div_or_rem = matches!(op, hir::BinOpKind::Div | hir::BinOpKind::Rem); - let is_non_zero_u = |cx: &LateContext<'tcx>, ty: Ty<'tcx>| { - let tcx = cx.tcx; - - let ty::Adt(adt, substs) = ty.kind() else { return false }; - - if !tcx.is_diagnostic_item(sym::NonZero, adt.did()) { - return false; - } - - let int_type = substs.type_at(0); - let unsigned_int_types = [ - tcx.types.u8, - tcx.types.u16, - tcx.types.u32, - tcx.types.u64, - tcx.types.u128, - tcx.types.usize, - ]; - - unsigned_int_types.contains(&int_type) - }; let is_sat_or_wrap = |ty: Ty<'_>| { is_type_diagnostic_item(cx, ty, sym::Saturating) || is_type_diagnostic_item(cx, ty, sym::Wrapping) }; // If the RHS is `NonZero<u*>`, then division or module by zero will never occur. - if is_non_zero_u(cx, rhs_ty) && is_div_or_rem { + if Self::is_non_zero_u(cx, rhs_ty) && is_div_or_rem { return true; } @@ -219,6 +209,18 @@ impl ArithmeticSideEffects { let (mut actual_rhs, rhs_ref_counter) = peel_hir_expr_refs(rhs); actual_lhs = expr_or_init(cx, actual_lhs); actual_rhs = expr_or_init(cx, actual_rhs); + + // `NonZeroU*.get() - 1`, will never overflow + if let hir::BinOpKind::Sub = op + && let hir::ExprKind::MethodCall(method, receiver, [], _) = actual_lhs.kind + && method.ident.name == sym::get + && let receiver_ty = cx.typeck_results().expr_ty(receiver).peel_refs() + && Self::is_non_zero_u(cx, receiver_ty) + && let Some(1) = Self::literal_integer(cx, actual_rhs) + { + return; + } + let lhs_ty = cx.typeck_results().expr_ty(actual_lhs).peel_refs(); let rhs_ty = cx.typeck_results().expr_ty_adjusted(actual_rhs).peel_refs(); if self.has_allowed_binary(lhs_ty, rhs_ty) { @@ -227,6 +229,7 @@ impl ArithmeticSideEffects { if Self::has_specific_allowed_type_and_operation(cx, lhs_ty, op, rhs_ty) { return; } + let has_valid_op = if Self::is_integral(lhs_ty) && Self::is_integral(rhs_ty) { if let hir::BinOpKind::Shl | hir::BinOpKind::Shr = op { // At least for integers, shifts are already handled by the CTFE diff --git a/src/tools/clippy/clippy_lints/src/operators/cmp_owned.rs b/src/tools/clippy/clippy_lints/src/operators/cmp_owned.rs index 9b2cfd91b85..22ec4fe60fb 100644 --- a/src/tools/clippy/clippy_lints/src/operators/cmp_owned.rs +++ b/src/tools/clippy/clippy_lints/src/operators/cmp_owned.rs @@ -41,7 +41,7 @@ fn check_op(cx: &LateContext<'_>, expr: &Expr<'_>, other: &Expr<'_>, left: bool) ExprKind::MethodCall(_, arg, [], _) if typeck .type_dependent_def_id(expr.hir_id) - .and_then(|id| cx.tcx.trait_of_item(id)) + .and_then(|id| cx.tcx.trait_of_assoc(id)) .is_some_and(|id| matches!(cx.tcx.get_diagnostic_name(id), Some(sym::ToString | sym::ToOwned))) => { (arg, arg.span) diff --git a/src/tools/clippy/clippy_lints/src/operators/manual_is_multiple_of.rs b/src/tools/clippy/clippy_lints/src/operators/manual_is_multiple_of.rs index 821178a4315..55bb78cfce5 100644 --- a/src/tools/clippy/clippy_lints/src/operators/manual_is_multiple_of.rs +++ b/src/tools/clippy/clippy_lints/src/operators/manual_is_multiple_of.rs @@ -2,11 +2,12 @@ use clippy_utils::consts::is_zero_integer_const; use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::msrvs::{self, Msrv}; use clippy_utils::sugg::Sugg; +use clippy_utils::ty::expr_type_is_certain; use rustc_ast::BinOpKind; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind}; use rustc_lint::LateContext; -use rustc_middle::ty; +use rustc_middle::ty::{self, Ty}; use super::MANUAL_IS_MULTIPLE_OF; @@ -22,9 +23,21 @@ pub(super) fn check<'tcx>( && let Some(operand) = uint_compare_to_zero(cx, op, lhs, rhs) && let ExprKind::Binary(operand_op, operand_left, operand_right) = operand.kind && operand_op.node == BinOpKind::Rem + && matches!( + cx.typeck_results().expr_ty_adjusted(operand_left).peel_refs().kind(), + ty::Uint(_) + ) + && matches!( + cx.typeck_results().expr_ty_adjusted(operand_right).peel_refs().kind(), + ty::Uint(_) + ) + && expr_type_is_certain(cx, operand_left) { let mut app = Applicability::MachineApplicable; - let divisor = Sugg::hir_with_applicability(cx, operand_right, "_", &mut app); + let divisor = deref_sugg( + Sugg::hir_with_applicability(cx, operand_right, "_", &mut app), + cx.typeck_results().expr_ty_adjusted(operand_right), + ); span_lint_and_sugg( cx, MANUAL_IS_MULTIPLE_OF, @@ -64,3 +77,11 @@ fn uint_compare_to_zero<'tcx>( matches!(cx.typeck_results().expr_ty_adjusted(operand).kind(), ty::Uint(_)).then_some(operand) } + +fn deref_sugg<'a>(sugg: Sugg<'a>, ty: Ty<'_>) -> Sugg<'a> { + if let ty::Ref(_, target_ty, _) = ty.kind() { + deref_sugg(sugg.deref(), *target_ty) + } else { + sugg + } +} diff --git a/src/tools/clippy/clippy_lints/src/operators/op_ref.rs b/src/tools/clippy/clippy_lints/src/operators/op_ref.rs index 21e1ab0f4f2..0a1f2625f4c 100644 --- a/src/tools/clippy/clippy_lints/src/operators/op_ref.rs +++ b/src/tools/clippy/clippy_lints/src/operators/op_ref.rs @@ -179,7 +179,7 @@ fn in_impl<'tcx>( bin_op: DefId, ) -> Option<(&'tcx rustc_hir::Ty<'tcx>, &'tcx rustc_hir::Ty<'tcx>)> { if let Some(block) = get_enclosing_block(cx, e.hir_id) - && let Some(impl_def_id) = cx.tcx.impl_of_method(block.hir_id.owner.to_def_id()) + && let Some(impl_def_id) = cx.tcx.impl_of_assoc(block.hir_id.owner.to_def_id()) && let item = cx.tcx.hir_expect_item(impl_def_id.expect_local()) && let ItemKind::Impl(item) = &item.kind && let Some(of_trait) = &item.of_trait diff --git a/src/tools/clippy/clippy_lints/src/pass_by_ref_or_value.rs b/src/tools/clippy/clippy_lints/src/pass_by_ref_or_value.rs index b8005dfd6f8..303c5dfed89 100644 --- a/src/tools/clippy/clippy_lints/src/pass_by_ref_or_value.rs +++ b/src/tools/clippy/clippy_lints/src/pass_by_ref_or_value.rs @@ -5,7 +5,8 @@ use clippy_utils::ty::{for_each_top_level_late_bound_region, is_copy}; use clippy_utils::{is_self, is_self_ty}; use core::ops::ControlFlow; use rustc_abi::ExternAbi; -use rustc_attr_data_structures::{AttributeKind, InlineAttr, find_attr}; +use rustc_hir::attrs::{AttributeKind, InlineAttr}; +use rustc_hir::find_attr; use rustc_data_structures::fx::FxHashSet; use rustc_errors::Applicability; use rustc_hir as hir; diff --git a/src/tools/clippy/clippy_lints/src/pattern_type_mismatch.rs b/src/tools/clippy/clippy_lints/src/pattern_type_mismatch.rs index 19d9acfc930..4197680dd04 100644 --- a/src/tools/clippy/clippy_lints/src/pattern_type_mismatch.rs +++ b/src/tools/clippy/clippy_lints/src/pattern_type_mismatch.rs @@ -96,6 +96,12 @@ impl<'tcx> LateLintPass<'tcx> for PatternTypeMismatch { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { if let ExprKind::Match(_, arms, _) = expr.kind { + // if the match is generated by an external macro, the writer does not control + // how the scrutinee (`match &scrutiny { ... }`) is matched + if expr.span.in_external_macro(cx.sess().source_map()) { + return; + } + for arm in arms { let pat = &arm.pat; if apply_lint(cx, pat, DerefPossible::Possible) { diff --git a/src/tools/clippy/clippy_lints/src/ptr.rs b/src/tools/clippy/clippy_lints/src/ptr.rs index 94cdcf00054..b3058c51afd 100644 --- a/src/tools/clippy/clippy_lints/src/ptr.rs +++ b/src/tools/clippy/clippy_lints/src/ptr.rs @@ -584,7 +584,13 @@ fn check_ptr_arg_usage<'tcx>(cx: &LateContext<'tcx>, body: &Body<'tcx>, args: &[ Some((Node::Stmt(_), _)) => (), Some((Node::LetStmt(l), _)) => { // Only trace simple bindings. e.g `let x = y;` - if let PatKind::Binding(BindingMode::NONE, id, _, None) = l.pat.kind { + if let PatKind::Binding(BindingMode::NONE, id, ident, None) = l.pat.kind + // Let's not lint for the current parameter. The user may still intend to mutate + // (or, if not mutate, then perhaps call a method that's not otherwise available + // for) the referenced value behind the parameter through this local let binding + // with the underscore being only temporary. + && !ident.name.as_str().starts_with('_') + { self.bindings.insert(id, args_idx); } else { set_skip_flag(); @@ -650,7 +656,14 @@ fn check_ptr_arg_usage<'tcx>(cx: &LateContext<'tcx>, body: &Body<'tcx>, args: &[ .filter_map(|(i, arg)| { let param = &body.params[arg.idx]; match param.pat.kind { - PatKind::Binding(BindingMode::NONE, id, _, None) if !is_lint_allowed(cx, PTR_ARG, param.hir_id) => { + PatKind::Binding(BindingMode::NONE, id, ident, None) + if !is_lint_allowed(cx, PTR_ARG, param.hir_id) + // Let's not lint for the current parameter. The user may still intend to mutate + // (or, if not mutate, then perhaps call a method that's not otherwise available + // for) the referenced value behind the parameter with the underscore being only + // temporary. + && !ident.name.as_str().starts_with('_') => + { Some((id, i)) }, _ => { diff --git a/src/tools/clippy/clippy_lints/src/ranges.rs b/src/tools/clippy/clippy_lints/src/ranges.rs index d292ed86ea4..03d00ba849f 100644 --- a/src/tools/clippy/clippy_lints/src/ranges.rs +++ b/src/tools/clippy/clippy_lints/src/ranges.rs @@ -4,15 +4,20 @@ use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg, span_lint_and_the use clippy_utils::msrvs::{self, Msrv}; use clippy_utils::source::{SpanRangeExt, snippet, snippet_with_applicability}; use clippy_utils::sugg::Sugg; -use clippy_utils::{get_parent_expr, higher, is_in_const_context, is_integer_const, path_to_local}; +use clippy_utils::ty::implements_trait; +use clippy_utils::{ + expr_use_ctxt, fn_def_id, get_parent_expr, higher, is_in_const_context, is_integer_const, is_path_lang_item, + path_to_local, +}; +use rustc_ast::Mutability; use rustc_ast::ast::RangeLimits; use rustc_errors::Applicability; -use rustc_hir::{BinOpKind, Expr, ExprKind, HirId}; -use rustc_lint::{LateContext, LateLintPass}; -use rustc_middle::ty; +use rustc_hir::{BinOpKind, Expr, ExprKind, HirId, LangItem, Node}; +use rustc_lint::{LateContext, LateLintPass, Lint}; +use rustc_middle::ty::{self, ClauseKind, GenericArgKind, PredicatePolarity, Ty}; use rustc_session::impl_lint_pass; -use rustc_span::Span; use rustc_span::source_map::Spanned; +use rustc_span::{Span, sym}; use std::cmp::Ordering; declare_clippy_lint! { @@ -24,6 +29,12 @@ declare_clippy_lint! { /// The code is more readable with an inclusive range /// like `x..=y`. /// + /// ### Limitations + /// The lint is conservative and will trigger only when switching + /// from an exclusive to an inclusive range is provably safe from + /// a typing point of view. This corresponds to situations where + /// the range is used as an iterator, or for indexing. + /// /// ### Known problems /// Will add unnecessary pair of parentheses when the /// expression is not wrapped in a pair but starts with an opening parenthesis @@ -34,11 +45,6 @@ declare_clippy_lint! { /// exclusive ranges, because they essentially add an extra branch that /// LLVM may fail to hoist out of the loop. /// - /// This will cause a warning that cannot be fixed if the consumer of the - /// range only accepts a specific range type, instead of the generic - /// `RangeBounds` trait - /// ([#3307](https://github.com/rust-lang/rust-clippy/issues/3307)). - /// /// ### Example /// ```no_run /// # let x = 0; @@ -71,11 +77,11 @@ declare_clippy_lint! { /// The code is more readable with an exclusive range /// like `x..y`. /// - /// ### Known problems - /// This will cause a warning that cannot be fixed if - /// the consumer of the range only accepts a specific range type, instead of - /// the generic `RangeBounds` trait - /// ([#3307](https://github.com/rust-lang/rust-clippy/issues/3307)). + /// ### Limitations + /// The lint is conservative and will trigger only when switching + /// from an inclusive to an exclusive range is provably safe from + /// a typing point of view. This corresponds to situations where + /// the range is used as an iterator, or for indexing. /// /// ### Example /// ```no_run @@ -344,70 +350,188 @@ fn check_range_bounds<'a, 'tcx>(cx: &'a LateContext<'tcx>, ex: &'a Expr<'_>) -> None } -// exclusive range plus one: `x..(y+1)` -fn check_exclusive_range_plus_one(cx: &LateContext<'_>, expr: &Expr<'_>) { - if expr.span.can_be_used_for_suggestions() - && let Some(higher::Range { - start, - end: Some(end), - limits: RangeLimits::HalfOpen, - }) = higher::Range::hir(expr) - && let Some(y) = y_plus_one(cx, end) +/// Check whether `expr` could switch range types without breaking the typing requirements. This is +/// generally the case when `expr` is used as an iterator for example, or as a slice or `&str` +/// index. +/// +/// FIXME: Note that the current implementation may still return false positives. A proper fix would +/// check that the obligations are still satisfied after switching the range type. +fn can_switch_ranges<'tcx>( + cx: &LateContext<'tcx>, + expr: &'tcx Expr<'_>, + original: RangeLimits, + inner_ty: Ty<'tcx>, +) -> bool { + let use_ctxt = expr_use_ctxt(cx, expr); + let (Node::Expr(parent_expr), false) = (use_ctxt.node, use_ctxt.is_ty_unified) else { + return false; + }; + + // Check if `expr` is the argument of a compiler-generated `IntoIter::into_iter(expr)` + if let ExprKind::Call(func, [arg]) = parent_expr.kind + && arg.hir_id == use_ctxt.child_id + && is_path_lang_item(cx, func, LangItem::IntoIterIntoIter) { - let span = expr.span; - span_lint_and_then( - cx, - RANGE_PLUS_ONE, - span, - "an inclusive range would be more readable", - |diag| { - let start = start.map_or(String::new(), |x| Sugg::hir(cx, x, "x").maybe_paren().to_string()); - let end = Sugg::hir(cx, y, "y").maybe_paren(); - match span.with_source_text(cx, |src| src.starts_with('(') && src.ends_with(')')) { - Some(true) => { - diag.span_suggestion(span, "use", format!("({start}..={end})"), Applicability::MaybeIncorrect); - }, - Some(false) => { - diag.span_suggestion( - span, - "use", - format!("{start}..={end}"), - Applicability::MachineApplicable, // snippet - ); - }, - None => {}, - } - }, - ); + return true; + } + + // Check if `expr` is used as the receiver of a method of the `Iterator`, `IntoIterator`, + // or `RangeBounds` traits. + if let ExprKind::MethodCall(_, receiver, _, _) = parent_expr.kind + && receiver.hir_id == use_ctxt.child_id + && let Some(method_did) = cx.typeck_results().type_dependent_def_id(parent_expr.hir_id) + && let Some(trait_did) = cx.tcx.trait_of_assoc(method_did) + && matches!( + cx.tcx.get_diagnostic_name(trait_did), + Some(sym::Iterator | sym::IntoIterator | sym::RangeBounds) + ) + { + return true; + } + + // Check if `expr` is an argument of a call which requires an `Iterator`, `IntoIterator`, + // or `RangeBounds` trait. + if let ExprKind::Call(_, args) | ExprKind::MethodCall(_, _, args, _) = parent_expr.kind + && let Some(id) = fn_def_id(cx, parent_expr) + && let Some(arg_idx) = args.iter().position(|e| e.hir_id == use_ctxt.child_id) + { + let input_idx = if matches!(parent_expr.kind, ExprKind::MethodCall(..)) { + arg_idx + 1 + } else { + arg_idx + }; + let inputs = cx + .tcx + .liberate_late_bound_regions(id, cx.tcx.fn_sig(id).instantiate_identity()) + .inputs(); + let expr_ty = inputs[input_idx]; + // Check that the `expr` type is present only once, otherwise modifying just one of them might be + // risky if they are referenced using the same generic type for example. + if inputs.iter().enumerate().all(|(n, ty)| + n == input_idx + || !ty.walk().any(|arg| matches!(arg.kind(), + GenericArgKind::Type(ty) if ty == expr_ty))) + // Look for a clause requiring `Iterator`, `IntoIterator`, or `RangeBounds`, and resolving to `expr_type`. + && cx + .tcx + .param_env(id) + .caller_bounds() + .into_iter() + .any(|p| { + if let ClauseKind::Trait(t) = p.kind().skip_binder() + && t.polarity == PredicatePolarity::Positive + && matches!( + cx.tcx.get_diagnostic_name(t.trait_ref.def_id), + Some(sym::Iterator | sym::IntoIterator | sym::RangeBounds) + ) + { + t.self_ty() == expr_ty + } else { + false + } + }) + { + return true; + } + } + + // Check if `expr` is used for indexing, and if the switched range type could be used + // as well. + if let ExprKind::Index(outer_expr, index, _) = parent_expr.kind + && index.hir_id == expr.hir_id + // Build the switched range type (for example `RangeInclusive<usize>`). + && let Some(switched_range_def_id) = match original { + RangeLimits::HalfOpen => cx.tcx.lang_items().range_inclusive_struct(), + RangeLimits::Closed => cx.tcx.lang_items().range_struct(), + } + && let switched_range_ty = cx + .tcx + .type_of(switched_range_def_id) + .instantiate(cx.tcx, &[inner_ty.into()]) + // Check that the switched range type can be used for indexing the original expression + // through the `Index` or `IndexMut` trait. + && let ty::Ref(_, outer_ty, mutability) = cx.typeck_results().expr_ty_adjusted(outer_expr).kind() + && let Some(index_def_id) = match mutability { + Mutability::Not => cx.tcx.lang_items().index_trait(), + Mutability::Mut => cx.tcx.lang_items().index_mut_trait(), + } + && implements_trait(cx, *outer_ty, index_def_id, &[switched_range_ty.into()]) + // We could also check that the associated item of the `index_def_id` trait with the switched range type + // return the same type, but it is reasonable to expect so. We can't check that the result is identical + // in both `Index<Range<…>>` and `Index<RangeInclusive<…>>` anyway. + { + return true; } + + false +} + +// exclusive range plus one: `x..(y+1)` +fn check_exclusive_range_plus_one<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { + check_range_switch( + cx, + expr, + RangeLimits::HalfOpen, + y_plus_one, + RANGE_PLUS_ONE, + "an inclusive range would be more readable", + "..=", + ); } // inclusive range minus one: `x..=(y-1)` -fn check_inclusive_range_minus_one(cx: &LateContext<'_>, expr: &Expr<'_>) { +fn check_inclusive_range_minus_one<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { + check_range_switch( + cx, + expr, + RangeLimits::Closed, + y_minus_one, + RANGE_MINUS_ONE, + "an exclusive range would be more readable", + "..", + ); +} + +/// Check for a `kind` of range in `expr`, check for `predicate` on the end, +/// and emit the `lint` with `msg` and the `operator`. +fn check_range_switch<'tcx>( + cx: &LateContext<'tcx>, + expr: &'tcx Expr<'_>, + kind: RangeLimits, + predicate: impl for<'hir> FnOnce(&LateContext<'_>, &Expr<'hir>) -> Option<&'hir Expr<'hir>>, + lint: &'static Lint, + msg: &'static str, + operator: &str, +) { if expr.span.can_be_used_for_suggestions() && let Some(higher::Range { start, end: Some(end), - limits: RangeLimits::Closed, + limits, }) = higher::Range::hir(expr) - && let Some(y) = y_minus_one(cx, end) + && limits == kind + && let Some(y) = predicate(cx, end) + && can_switch_ranges(cx, expr, kind, cx.typeck_results().expr_ty(y)) { - span_lint_and_then( - cx, - RANGE_MINUS_ONE, - expr.span, - "an exclusive range would be more readable", - |diag| { - let start = start.map_or(String::new(), |x| Sugg::hir(cx, x, "x").maybe_paren().to_string()); - let end = Sugg::hir(cx, y, "y").maybe_paren(); - diag.span_suggestion( - expr.span, - "use", - format!("{start}..{end}"), - Applicability::MachineApplicable, // snippet - ); - }, - ); + let span = expr.span; + span_lint_and_then(cx, lint, span, msg, |diag| { + let mut app = Applicability::MachineApplicable; + let start = start.map_or(String::new(), |x| { + Sugg::hir_with_applicability(cx, x, "<x>", &mut app) + .maybe_paren() + .to_string() + }); + let end = Sugg::hir_with_applicability(cx, y, "<y>", &mut app).maybe_paren(); + match span.with_source_text(cx, |src| src.starts_with('(') && src.ends_with(')')) { + Some(true) => { + diag.span_suggestion(span, "use", format!("({start}{operator}{end})"), app); + }, + Some(false) => { + diag.span_suggestion(span, "use", format!("{start}{operator}{end}"), app); + }, + None => {}, + } + }); } } @@ -494,7 +618,7 @@ fn check_reversed_empty_range(cx: &LateContext<'_>, expr: &Expr<'_>) { } } -fn y_plus_one<'t>(cx: &LateContext<'_>, expr: &'t Expr<'_>) -> Option<&'t Expr<'t>> { +fn y_plus_one<'tcx>(cx: &LateContext<'_>, expr: &Expr<'tcx>) -> Option<&'tcx Expr<'tcx>> { match expr.kind { ExprKind::Binary( Spanned { @@ -515,7 +639,7 @@ fn y_plus_one<'t>(cx: &LateContext<'_>, expr: &'t Expr<'_>) -> Option<&'t Expr<' } } -fn y_minus_one<'t>(cx: &LateContext<'_>, expr: &'t Expr<'_>) -> Option<&'t Expr<'t>> { +fn y_minus_one<'tcx>(cx: &LateContext<'_>, expr: &Expr<'tcx>) -> Option<&'tcx Expr<'tcx>> { match expr.kind { ExprKind::Binary( Spanned { diff --git a/src/tools/clippy/clippy_lints/src/return_self_not_must_use.rs b/src/tools/clippy/clippy_lints/src/return_self_not_must_use.rs index 25929b853af..2cdb8ef3a65 100644 --- a/src/tools/clippy/clippy_lints/src/return_self_not_must_use.rs +++ b/src/tools/clippy/clippy_lints/src/return_self_not_must_use.rs @@ -1,7 +1,8 @@ use clippy_utils::diagnostics::span_lint_and_help; use clippy_utils::ty::is_must_use_ty; use clippy_utils::{nth_arg, return_ty}; -use rustc_attr_data_structures::{AttributeKind, find_attr}; +use rustc_hir::attrs::{AttributeKind}; +use rustc_hir::find_attr; use rustc_hir::def_id::LocalDefId; use rustc_hir::intravisit::FnKind; use rustc_hir::{Body, FnDecl, OwnerId, TraitItem, TraitItemKind}; @@ -113,7 +114,7 @@ impl<'tcx> LateLintPass<'tcx> for ReturnSelfNotMustUse { ) { if matches!(kind, FnKind::Method(_, _)) // We are only interested in methods, not in functions or associated functions. - && let Some(impl_def) = cx.tcx.impl_of_method(fn_def.to_def_id()) + && let Some(impl_def) = cx.tcx.impl_of_assoc(fn_def.to_def_id()) // We don't want this method to be te implementation of a trait because the // `#[must_use]` should be put on the trait definition directly. && cx.tcx.trait_id_of_impl(impl_def).is_none() diff --git a/src/tools/clippy/clippy_lints/src/same_name_method.rs b/src/tools/clippy/clippy_lints/src/same_name_method.rs index 85fde780e68..67eb71f7d07 100644 --- a/src/tools/clippy/clippy_lints/src/same_name_method.rs +++ b/src/tools/clippy/clippy_lints/src/same_name_method.rs @@ -3,7 +3,7 @@ use rustc_data_structures::fx::FxHashMap; use rustc_hir::def::{DefKind, Res}; use rustc_hir::{HirId, Impl, ItemKind, Node, Path, QPath, TraitRef, TyKind}; use rustc_lint::{LateContext, LateLintPass}; -use rustc_middle::ty::{AssocKind, AssocItem}; +use rustc_middle::ty::{AssocItem, AssocKind}; use rustc_session::declare_lint_pass; use rustc_span::Span; use rustc_span::symbol::Symbol; @@ -53,11 +53,7 @@ impl<'tcx> LateLintPass<'tcx> for SameNameMethod { for id in cx.tcx.hir_free_items() { if matches!(cx.tcx.def_kind(id.owner_id), DefKind::Impl { .. }) && let item = cx.tcx.hir_item(id) - && let ItemKind::Impl(Impl { - of_trait, - self_ty, - .. - }) = &item.kind + && let ItemKind::Impl(Impl { of_trait, self_ty, .. }) = &item.kind && let TyKind::Path(QPath::Resolved(_, Path { res, .. })) = self_ty.kind { if !map.contains_key(res) { @@ -127,7 +123,9 @@ impl<'tcx> LateLintPass<'tcx> for SameNameMethod { }, None => { for assoc_item in cx.tcx.associated_items(id.owner_id).in_definition_order() { - let AssocKind::Fn { name, .. } = assoc_item.kind else { continue }; + let AssocKind::Fn { name, .. } = assoc_item.kind else { + continue; + }; let impl_span = cx.tcx.def_span(assoc_item.def_id); let hir_id = cx.tcx.local_def_id_to_hir_id(assoc_item.def_id.expect_local()); if let Some(trait_spans) = existing_name.trait_methods.get(&name) { @@ -140,10 +138,7 @@ impl<'tcx> LateLintPass<'tcx> for SameNameMethod { |diag| { // TODO should we `span_note` on every trait? // iterate on trait_spans? - diag.span_note( - trait_spans[0], - format!("existing `{name}` defined here"), - ); + diag.span_note(trait_spans[0], format!("existing `{name}` defined here")); }, ); } diff --git a/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs b/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs index 521754f7bab..9110f684bd1 100644 --- a/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs +++ b/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs @@ -168,7 +168,7 @@ impl<'cx, 'others, 'tcx> AttrChecker<'cx, 'others, 'tcx> { if let Some(adt) = ty.ty_adt_def() { let mut iter = get_attr( self.cx.sess(), - self.cx.tcx.get_attrs_unchecked(adt.did()), + self.cx.tcx.get_all_attrs(adt.did()), sym::has_significant_drop, ); if iter.next().is_some() { diff --git a/src/tools/clippy/clippy_lints/src/std_instead_of_core.rs b/src/tools/clippy/clippy_lints/src/std_instead_of_core.rs index 216f168471e..50c44a8e75c 100644 --- a/src/tools/clippy/clippy_lints/src/std_instead_of_core.rs +++ b/src/tools/clippy/clippy_lints/src/std_instead_of_core.rs @@ -2,7 +2,7 @@ use clippy_config::Conf; use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_sugg}; use clippy_utils::is_from_proc_macro; use clippy_utils::msrvs::Msrv; -use rustc_attr_data_structures::{StabilityLevel, StableSince}; +use rustc_hir::{StabilityLevel, StableSince}; use rustc_errors::Applicability; use rustc_hir::def::{DefKind, Res}; use rustc_hir::def_id::DefId; diff --git a/src/tools/clippy/clippy_lints/src/unconditional_recursion.rs b/src/tools/clippy/clippy_lints/src/unconditional_recursion.rs index d321c48f6af..dcddff557d1 100644 --- a/src/tools/clippy/clippy_lints/src/unconditional_recursion.rs +++ b/src/tools/clippy/clippy_lints/src/unconditional_recursion.rs @@ -206,7 +206,7 @@ fn check_partial_eq(cx: &LateContext<'_>, method_span: Span, method_def_id: Loca let arg_ty = cx.typeck_results().expr_ty_adjusted(arg); if let Some(fn_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) - && let Some(trait_id) = cx.tcx.trait_of_item(fn_id) + && let Some(trait_id) = cx.tcx.trait_of_assoc(fn_id) && trait_id == trait_def_id && matches_ty(receiver_ty, arg_ty, self_arg, other_arg) { @@ -250,7 +250,7 @@ fn check_to_string(cx: &LateContext<'_>, method_span: Span, method_def_id: Local let is_bad = match expr.kind { ExprKind::MethodCall(segment, _receiver, &[_arg], _) if segment.ident.name == name.name => { if let Some(fn_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) - && let Some(trait_id) = cx.tcx.trait_of_item(fn_id) + && let Some(trait_id) = cx.tcx.trait_of_assoc(fn_id) && trait_id == trait_def_id { true @@ -318,7 +318,7 @@ where && let ExprKind::Path(qpath) = f.kind && is_default_method_on_current_ty(self.cx.tcx, qpath, self.implemented_ty_id) && let Some(method_def_id) = path_def_id(self.cx, f) - && let Some(trait_def_id) = self.cx.tcx.trait_of_item(method_def_id) + && let Some(trait_def_id) = self.cx.tcx.trait_of_assoc(method_def_id) && self.cx.tcx.is_diagnostic_item(sym::Default, trait_def_id) { span_error(self.cx, self.method_span, expr); @@ -426,7 +426,7 @@ fn check_from(cx: &LateContext<'_>, method_span: Span, method_def_id: LocalDefId if let Some((fn_def_id, node_args)) = fn_def_id_with_node_args(cx, expr) && let [s1, s2] = **node_args && let (Some(s1), Some(s2)) = (s1.as_type(), s2.as_type()) - && let Some(trait_def_id) = cx.tcx.trait_of_item(fn_def_id) + && let Some(trait_def_id) = cx.tcx.trait_of_assoc(fn_def_id) && cx.tcx.is_diagnostic_item(sym::Into, trait_def_id) && get_impl_trait_def_id(cx, method_def_id) == cx.tcx.get_diagnostic_item(sym::From) && s1 == sig.inputs()[0] diff --git a/src/tools/clippy/clippy_lints/src/unused_async.rs b/src/tools/clippy/clippy_lints/src/unused_async.rs index e67afc7f5a8..5a3e4b7adf6 100644 --- a/src/tools/clippy/clippy_lints/src/unused_async.rs +++ b/src/tools/clippy/clippy_lints/src/unused_async.rs @@ -1,8 +1,12 @@ use clippy_utils::diagnostics::span_lint_hir_and_then; use clippy_utils::is_def_id_trait_method; +use clippy_utils::usage::is_todo_unimplemented_stub; use rustc_hir::def::DefKind; use rustc_hir::intravisit::{FnKind, Visitor, walk_expr, walk_fn}; -use rustc_hir::{Body, Defaultness, Expr, ExprKind, FnDecl, HirId, Node, TraitItem, YieldSource}; +use rustc_hir::{ + Body, Closure, ClosureKind, CoroutineDesugaring, CoroutineKind, Defaultness, Expr, ExprKind, FnDecl, HirId, Node, + TraitItem, YieldSource, +}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::hir::nested_filter; use rustc_session::impl_lint_pass; @@ -81,11 +85,8 @@ impl<'tcx> Visitor<'tcx> for AsyncFnVisitor<'_, 'tcx> { let is_async_block = matches!( ex.kind, - ExprKind::Closure(rustc_hir::Closure { - kind: rustc_hir::ClosureKind::Coroutine(rustc_hir::CoroutineKind::Desugared( - rustc_hir::CoroutineDesugaring::Async, - _ - )), + ExprKind::Closure(Closure { + kind: ClosureKind::Coroutine(CoroutineKind::Desugared(CoroutineDesugaring::Async, _)), .. }) ); @@ -120,6 +121,7 @@ impl<'tcx> LateLintPass<'tcx> for UnusedAsync { && fn_kind.asyncness().is_async() && !is_def_id_trait_method(cx, def_id) && !is_default_trait_impl(cx, def_id) + && !async_fn_contains_todo_unimplemented_macro(cx, body) { let mut visitor = AsyncFnVisitor { cx, @@ -203,3 +205,18 @@ fn is_default_trait_impl(cx: &LateContext<'_>, def_id: LocalDefId) -> bool { }) ) } + +fn async_fn_contains_todo_unimplemented_macro(cx: &LateContext<'_>, body: &Body<'_>) -> bool { + if let ExprKind::Closure(closure) = body.value.kind + && let ClosureKind::Coroutine(CoroutineKind::Desugared(CoroutineDesugaring::Async, _)) = closure.kind + && let body = cx.tcx.hir_body(closure.body) + && let ExprKind::Block(block, _) = body.value.kind + && block.stmts.is_empty() + && let Some(expr) = block.expr + && let ExprKind::DropTemps(inner) = expr.kind + { + return is_todo_unimplemented_stub(cx, inner); + } + + false +} diff --git a/src/tools/clippy/clippy_lints/src/unused_io_amount.rs b/src/tools/clippy/clippy_lints/src/unused_io_amount.rs index 12cc1093899..f3cd3f1bb28 100644 --- a/src/tools/clippy/clippy_lints/src/unused_io_amount.rs +++ b/src/tools/clippy/clippy_lints/src/unused_io_amount.rs @@ -84,7 +84,7 @@ impl<'tcx> LateLintPass<'tcx> for UnusedIoAmount { /// get desugared to match. fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx hir::Block<'tcx>) { let fn_def_id = block.hir_id.owner.to_def_id(); - if let Some(impl_id) = cx.tcx.impl_of_method(fn_def_id) + if let Some(impl_id) = cx.tcx.impl_of_assoc(fn_def_id) && let Some(trait_id) = cx.tcx.trait_id_of_impl(impl_id) { // We don't want to lint inside io::Read or io::Write implementations, as the author has more @@ -300,7 +300,7 @@ fn check_io_mode(cx: &LateContext<'_>, call: &hir::Expr<'_>) -> Option<IoOp> { }; if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(call.hir_id) - && let Some(trait_def_id) = cx.tcx.trait_of_item(method_def_id) + && let Some(trait_def_id) = cx.tcx.trait_of_assoc(method_def_id) { if let Some(diag_name) = cx.tcx.get_diagnostic_name(trait_def_id) { match diag_name { diff --git a/src/tools/clippy/clippy_lints/src/unused_self.rs b/src/tools/clippy/clippy_lints/src/unused_self.rs index 12da891a71b..dff39974a37 100644 --- a/src/tools/clippy/clippy_lints/src/unused_self.rs +++ b/src/tools/clippy/clippy_lints/src/unused_self.rs @@ -1,12 +1,10 @@ use clippy_config::Conf; use clippy_utils::diagnostics::span_lint_and_help; -use clippy_utils::macros::root_macro_call_first_node; -use clippy_utils::sym; +use clippy_utils::usage::is_todo_unimplemented_stub; use clippy_utils::visitors::is_local_used; -use rustc_hir::{Body, Impl, ImplItem, ImplItemKind, ItemKind}; +use rustc_hir::{Impl, ImplItem, ImplItemKind, ItemKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::impl_lint_pass; -use std::ops::ControlFlow; declare_clippy_lint! { /// ### What it does @@ -60,18 +58,6 @@ impl<'tcx> LateLintPass<'tcx> for UnusedSelf { let parent = cx.tcx.hir_get_parent_item(impl_item.hir_id()).def_id; let parent_item = cx.tcx.hir_expect_item(parent); let assoc_item = cx.tcx.associated_item(impl_item.owner_id); - let contains_todo = |cx, body: &'_ Body<'_>| -> bool { - clippy_utils::visitors::for_each_expr_without_closures(body.value, |e| { - if let Some(macro_call) = root_macro_call_first_node(cx, e) - && cx.tcx.is_diagnostic_item(sym::todo_macro, macro_call.def_id) - { - ControlFlow::Break(()) - } else { - ControlFlow::Continue(()) - } - }) - .is_some() - }; if let ItemKind::Impl(Impl { of_trait: None, .. }) = parent_item.kind && assoc_item.is_method() && let ImplItemKind::Fn(.., body_id) = &impl_item.kind @@ -79,7 +65,7 @@ impl<'tcx> LateLintPass<'tcx> for UnusedSelf { && let body = cx.tcx.hir_body(*body_id) && let [self_param, ..] = body.params && !is_local_used(cx, body, self_param.pat.hir_id) - && !contains_todo(cx, body) + && !is_todo_unimplemented_stub(cx, body.value) { span_lint_and_help( cx, diff --git a/src/tools/clippy/clippy_lints/src/unused_trait_names.rs b/src/tools/clippy/clippy_lints/src/unused_trait_names.rs index 610cec7b8c8..12f2804dbaa 100644 --- a/src/tools/clippy/clippy_lints/src/unused_trait_names.rs +++ b/src/tools/clippy/clippy_lints/src/unused_trait_names.rs @@ -6,7 +6,7 @@ use clippy_utils::source::snippet_opt; use rustc_errors::Applicability; use rustc_hir::def::{DefKind, Res}; use rustc_hir::{Item, ItemKind, UseKind}; -use rustc_lint::{LateContext, LateLintPass, LintContext as _}; +use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::Visibility; use rustc_session::impl_lint_pass; use rustc_span::symbol::kw; @@ -59,13 +59,13 @@ impl_lint_pass!(UnusedTraitNames => [UNUSED_TRAIT_NAMES]); impl<'tcx> LateLintPass<'tcx> for UnusedTraitNames { fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'tcx>) { - if !item.span.in_external_macro(cx.sess().source_map()) + if !item.span.from_expansion() && let ItemKind::Use(path, UseKind::Single(ident)) = item.kind // Ignore imports that already use Underscore && ident.name != kw::Underscore // Only check traits && let Some(Res::Def(DefKind::Trait, _)) = path.res.type_ns - && cx.tcx.maybe_unused_trait_imports(()).contains(&item.owner_id.def_id) + && cx.tcx.resolutions(()).maybe_unused_trait_imports.contains(&item.owner_id.def_id) // Only check this import if it is visible to its module only (no pub, pub(crate), ...) && let module = cx.tcx.parent_module_from_def_id(item.owner_id.def_id) && cx.tcx.visibility(item.owner_id.def_id) == Visibility::Restricted(module.to_def_id()) diff --git a/src/tools/clippy/clippy_lints/src/useless_conversion.rs b/src/tools/clippy/clippy_lints/src/useless_conversion.rs index 380ddea4e1e..e5b20c0e0a1 100644 --- a/src/tools/clippy/clippy_lints/src/useless_conversion.rs +++ b/src/tools/clippy/clippy_lints/src/useless_conversion.rs @@ -176,6 +176,33 @@ impl<'tcx> LateLintPass<'tcx> for UselessConversion { } }, + ExprKind::MethodCall(path, recv, [arg], _) => { + if matches!( + path.ident.name, + sym::map | sym::map_err | sym::map_break | sym::map_continue + ) && has_eligible_receiver(cx, recv, e) + && (is_trait_item(cx, arg, sym::Into) || is_trait_item(cx, arg, sym::From)) + && let ty::FnDef(_, args) = cx.typeck_results().expr_ty(arg).kind() + && let &[from_ty, to_ty] = args.into_type_list(cx.tcx).as_slice() + && same_type_and_consts(from_ty, to_ty) + { + span_lint_and_then( + cx, + USELESS_CONVERSION, + e.span.with_lo(recv.span.hi()), + format!("useless conversion to the same type: `{from_ty}`"), + |diag| { + diag.suggest_remove_item( + cx, + e.span.with_lo(recv.span.hi()), + "consider removing", + Applicability::MachineApplicable, + ); + }, + ); + } + }, + ExprKind::MethodCall(name, recv, [], _) => { if is_trait_method(cx, e, sym::Into) && name.ident.name == sym::into { let a = cx.typeck_results().expr_ty(e); @@ -412,32 +439,6 @@ impl<'tcx> LateLintPass<'tcx> for UselessConversion { } } -/// Check if `arg` is a `Into::into` or `From::from` applied to `receiver` to give `expr`, through a -/// higher-order mapping function. -pub fn check_function_application(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, arg: &Expr<'_>) { - if has_eligible_receiver(cx, recv, expr) - && (is_trait_item(cx, arg, sym::Into) || is_trait_item(cx, arg, sym::From)) - && let ty::FnDef(_, args) = cx.typeck_results().expr_ty(arg).kind() - && let &[from_ty, to_ty] = args.into_type_list(cx.tcx).as_slice() - && same_type_and_consts(from_ty, to_ty) - { - span_lint_and_then( - cx, - USELESS_CONVERSION, - expr.span.with_lo(recv.span.hi()), - format!("useless conversion to the same type: `{from_ty}`"), - |diag| { - diag.suggest_remove_item( - cx, - expr.span.with_lo(recv.span.hi()), - "consider removing", - Applicability::MachineApplicable, - ); - }, - ); - } -} - fn has_eligible_receiver(cx: &LateContext<'_>, recv: &Expr<'_>, expr: &Expr<'_>) -> bool { if is_inherent_method_call(cx, expr) { matches!( diff --git a/src/tools/clippy/clippy_lints/src/utils/author.rs b/src/tools/clippy/clippy_lints/src/utils/author.rs index ac92ab5a245..29931738412 100644 --- a/src/tools/clippy/clippy_lints/src/utils/author.rs +++ b/src/tools/clippy/clippy_lints/src/utils/author.rs @@ -9,6 +9,7 @@ use rustc_hir::{ FnRetTy, HirId, Lit, PatExprKind, PatKind, QPath, StmtKind, StructTailExpr, }; use rustc_lint::{LateContext, LateLintPass, LintContext}; +use rustc_middle::ty::{FloatTy, IntTy, UintTy}; use rustc_session::declare_lint_pass; use rustc_span::symbol::{Ident, Symbol}; use std::cell::Cell; @@ -337,15 +338,43 @@ impl<'a, 'tcx> PrintVisitor<'a, 'tcx> { LitKind::Byte(b) => kind!("Byte({b})"), LitKind::Int(i, suffix) => { let int_ty = match suffix { - LitIntType::Signed(int_ty) => format!("LitIntType::Signed(IntTy::{int_ty:?})"), - LitIntType::Unsigned(uint_ty) => format!("LitIntType::Unsigned(UintTy::{uint_ty:?})"), + LitIntType::Signed(int_ty) => { + let t = match int_ty { + IntTy::Isize => "Isize", + IntTy::I8 => "I8", + IntTy::I16 => "I16", + IntTy::I32 => "I32", + IntTy::I64 => "I64", + IntTy::I128 => "I128", + }; + format!("LitIntType::Signed(IntTy::{t})") + } + LitIntType::Unsigned(uint_ty) => { + let t = match uint_ty { + UintTy::Usize => "Usize", + UintTy::U8 => "U8", + UintTy::U16 => "U16", + UintTy::U32 => "U32", + UintTy::U64 => "U64", + UintTy::U128 => "U128", + }; + format!("LitIntType::Unsigned(UintTy::{t})") + } LitIntType::Unsuffixed => String::from("LitIntType::Unsuffixed"), }; kind!("Int({i}, {int_ty})"); }, LitKind::Float(_, suffix) => { let float_ty = match suffix { - LitFloatType::Suffixed(suffix_ty) => format!("LitFloatType::Suffixed(FloatTy::{suffix_ty:?})"), + LitFloatType::Suffixed(suffix_ty) => { + let t = match suffix_ty { + FloatTy::F16 => "F16", + FloatTy::F32 => "F32", + FloatTy::F64 => "F64", + FloatTy::F128 => "F128", + }; + format!("LitFloatType::Suffixed(FloatTy::{t})") + } LitFloatType::Unsuffixed => String::from("LitFloatType::Unsuffixed"), }; kind!("Float(_, {float_ty})"); diff --git a/src/tools/clippy/clippy_lints_internal/src/derive_deserialize_allowing_unknown.rs b/src/tools/clippy/clippy_lints_internal/src/derive_deserialize_allowing_unknown.rs index 88b099c477f..e0ae0c11cc2 100644 --- a/src/tools/clippy/clippy_lints_internal/src/derive_deserialize_allowing_unknown.rs +++ b/src/tools/clippy/clippy_lints_internal/src/derive_deserialize_allowing_unknown.rs @@ -2,6 +2,8 @@ use clippy_utils::diagnostics::span_lint; use clippy_utils::paths; use rustc_ast::tokenstream::{TokenStream, TokenTree}; use rustc_ast::{AttrStyle, DelimArgs}; +use rustc_hir::attrs::{AttributeKind}; +use rustc_hir::find_attr; use rustc_hir::def::Res; use rustc_hir::def_id::LocalDefId; use rustc_hir::{ @@ -11,7 +13,6 @@ use rustc_lint::{LateContext, LateLintPass}; use rustc_lint_defs::declare_tool_lint; use rustc_middle::ty::TyCtxt; use rustc_session::declare_lint_pass; -use rustc_span::sym; declare_tool_lint! { /// ### What it does @@ -88,7 +89,10 @@ impl<'tcx> LateLintPass<'tcx> for DeriveDeserializeAllowingUnknown { } // Is it derived? - if !find_attr!(cx.tcx.get_all_attrs(item.owner_id), AttributeKind::AutomaticallyDerived(..)) { + if !find_attr!( + cx.tcx.get_all_attrs(item.owner_id), + AttributeKind::AutomaticallyDerived(..) + ) { return; } diff --git a/src/tools/clippy/clippy_lints_internal/src/lib.rs b/src/tools/clippy/clippy_lints_internal/src/lib.rs index 0c94d100c41..43cde86504f 100644 --- a/src/tools/clippy/clippy_lints_internal/src/lib.rs +++ b/src/tools/clippy/clippy_lints_internal/src/lib.rs @@ -20,7 +20,6 @@ #![allow(clippy::missing_clippy_version_attribute)] extern crate rustc_ast; -extern crate rustc_attr_data_structures; extern crate rustc_attr_parsing; extern crate rustc_data_structures; extern crate rustc_errors; diff --git a/src/tools/clippy/clippy_lints_internal/src/lint_without_lint_pass.rs b/src/tools/clippy/clippy_lints_internal/src/lint_without_lint_pass.rs index 45a866030b2..fda65bc84ed 100644 --- a/src/tools/clippy/clippy_lints_internal/src/lint_without_lint_pass.rs +++ b/src/tools/clippy/clippy_lints_internal/src/lint_without_lint_pass.rs @@ -1,7 +1,7 @@ use crate::internal_paths; use clippy_utils::diagnostics::{span_lint, span_lint_and_help}; -use clippy_utils::is_lint_allowed; use clippy_utils::macros::root_macro_call_first_node; +use clippy_utils::{is_lint_allowed, sym}; use rustc_ast::ast::LitKind; use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; use rustc_hir as hir; @@ -12,9 +12,9 @@ use rustc_hir::{ExprKind, HirId, Item, MutTy, Mutability, Path, TyKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::hir::nested_filter; use rustc_session::{declare_tool_lint, impl_lint_pass}; +use rustc_span::Span; use rustc_span::source_map::Spanned; use rustc_span::symbol::Symbol; -use rustc_span::{Span, sym}; declare_tool_lint! { /// ### What it does @@ -160,9 +160,8 @@ impl<'tcx> LateLintPass<'tcx> for LintWithoutLintPass { let body = cx.tcx.hir_body_owned_by( impl_item_refs .iter() - .find(|iiref| iiref.ident.as_str() == "lint_vec") + .find(|&&iiref| cx.tcx.item_name(iiref.owner_id) == sym::lint_vec) .expect("LintPass needs to implement lint_vec") - .id .owner_id .def_id, ); diff --git a/src/tools/clippy/clippy_lints_internal/src/msrv_attr_impl.rs b/src/tools/clippy/clippy_lints_internal/src/msrv_attr_impl.rs index 70b3c03d2bb..66aeb910891 100644 --- a/src/tools/clippy/clippy_lints_internal/src/msrv_attr_impl.rs +++ b/src/tools/clippy/clippy_lints_internal/src/msrv_attr_impl.rs @@ -1,6 +1,7 @@ use crate::internal_paths; use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::source::snippet; +use clippy_utils::sym; use rustc_errors::Applicability; use rustc_hir as hir; use rustc_lint::{LateContext, LateLintPass, LintContext}; @@ -40,7 +41,9 @@ impl LateLintPass<'_> for MsrvAttrImpl { .filter(|t| matches!(t.kind(), GenericArgKind::Type(_))) .any(|t| internal_paths::MSRV_STACK.matches_ty(cx, t.expect_ty())) }) - && !items.iter().any(|item| item.ident.name.as_str() == "check_attributes") + && !items + .iter() + .any(|&item| cx.tcx.item_name(item.owner_id) == sym::check_attributes) { let span = cx.sess().source_map().span_through_char(item.span, '{'); span_lint_and_sugg( diff --git a/src/tools/clippy/clippy_test_deps/Cargo.lock b/src/tools/clippy/clippy_test_deps/Cargo.lock index a591dae3a1a..5be404f24e6 100644 --- a/src/tools/clippy/clippy_test_deps/Cargo.lock +++ b/src/tools/clippy/clippy_test_deps/Cargo.lock @@ -72,6 +72,7 @@ dependencies = [ "futures", "if_chain", "itertools", + "libc", "parking_lot", "quote", "regex", diff --git a/src/tools/clippy/clippy_test_deps/Cargo.toml b/src/tools/clippy/clippy_test_deps/Cargo.toml index a23ffcaf2f9..fcedc5d4843 100644 --- a/src/tools/clippy/clippy_test_deps/Cargo.toml +++ b/src/tools/clippy/clippy_test_deps/Cargo.toml @@ -6,6 +6,7 @@ edition = "2021" # Add dependencies here to make them available in ui tests. [dependencies] +libc = "0.2" regex = "1.5.5" serde = { version = "1.0.145", features = ["derive"] } if_chain = "1.0" diff --git a/src/tools/clippy/clippy_utils/README.md b/src/tools/clippy/clippy_utils/README.md index 645b644d9f4..19e71f6af1d 100644 --- a/src/tools/clippy/clippy_utils/README.md +++ b/src/tools/clippy/clippy_utils/README.md @@ -8,7 +8,7 @@ This crate is only guaranteed to build with this `nightly` toolchain: <!-- begin autogenerated nightly --> ``` -nightly-2025-07-10 +nightly-2025-07-25 ``` <!-- end autogenerated nightly --> diff --git a/src/tools/clippy/clippy_utils/src/attrs.rs b/src/tools/clippy/clippy_utils/src/attrs.rs index 34472eaab93..4ccd9c5300b 100644 --- a/src/tools/clippy/clippy_utils/src/attrs.rs +++ b/src/tools/clippy/clippy_utils/src/attrs.rs @@ -2,7 +2,8 @@ use crate::source::SpanRangeExt; use crate::{sym, tokenize_with_text}; use rustc_ast::attr; use rustc_ast::attr::AttributeExt; -use rustc_attr_data_structures::{AttributeKind, find_attr}; +use rustc_hir::attrs::{AttributeKind}; +use rustc_hir::find_attr; use rustc_errors::Applicability; use rustc_lexer::TokenKind; use rustc_lint::LateContext; diff --git a/src/tools/clippy/clippy_utils/src/consts.rs b/src/tools/clippy/clippy_utils/src/consts.rs index 25afa12e95d..ecd88daa6b3 100644 --- a/src/tools/clippy/clippy_utils/src/consts.rs +++ b/src/tools/clippy/clippy_utils/src/consts.rs @@ -10,7 +10,7 @@ use crate::{clip, is_direct_expn_of, sext, unsext}; use rustc_abi::Size; use rustc_apfloat::Float; use rustc_apfloat::ieee::{Half, Quad}; -use rustc_ast::ast::{self, LitFloatType, LitKind}; +use rustc_ast::ast::{LitFloatType, LitKind}; use rustc_hir::def::{DefKind, Res}; use rustc_hir::{ BinOpKind, Block, ConstBlock, Expr, ExprKind, HirId, Item, ItemKind, Node, PatExpr, PatExprKind, QPath, UnOp, @@ -309,10 +309,10 @@ pub fn lit_to_mir_constant<'tcx>(lit: &LitKind, ty: Option<Ty<'tcx>>) -> Constan LitKind::Int(n, _) => Constant::Int(n.get()), LitKind::Float(ref is, LitFloatType::Suffixed(fty)) => match fty { // FIXME(f16_f128): just use `parse()` directly when available for `f16`/`f128` - ast::FloatTy::F16 => Constant::parse_f16(is.as_str()), - ast::FloatTy::F32 => Constant::F32(is.as_str().parse().unwrap()), - ast::FloatTy::F64 => Constant::F64(is.as_str().parse().unwrap()), - ast::FloatTy::F128 => Constant::parse_f128(is.as_str()), + FloatTy::F16 => Constant::parse_f16(is.as_str()), + FloatTy::F32 => Constant::F32(is.as_str().parse().unwrap()), + FloatTy::F64 => Constant::F64(is.as_str().parse().unwrap()), + FloatTy::F128 => Constant::parse_f128(is.as_str()), }, LitKind::Float(ref is, LitFloatType::Unsuffixed) => match ty.expect("type of float is known").kind() { ty::Float(FloatTy::F16) => Constant::parse_f16(is.as_str()), diff --git a/src/tools/clippy/clippy_utils/src/diagnostics.rs b/src/tools/clippy/clippy_utils/src/diagnostics.rs index 8453165818b..625e1eead21 100644 --- a/src/tools/clippy/clippy_utils/src/diagnostics.rs +++ b/src/tools/clippy/clippy_utils/src/diagnostics.rs @@ -22,10 +22,13 @@ fn docs_link(diag: &mut Diag<'_, ()>, lint: &'static Lint) { { diag.help(format!( "for further information visit https://rust-lang.github.io/rust-clippy/{}/index.html#{lint}", - &option_env!("RUST_RELEASE_NUM").map_or("master".to_string(), |n| { - // extract just major + minor version and ignore patch versions - format!("rust-{}", n.rsplit_once('.').unwrap().1) - }) + &option_env!("RUST_RELEASE_NUM").map_or_else( + || "master".to_string(), + |n| { + // extract just major + minor version and ignore patch versions + format!("rust-{}", n.rsplit_once('.').unwrap().1) + } + ) )); } } diff --git a/src/tools/clippy/clippy_utils/src/eager_or_lazy.rs b/src/tools/clippy/clippy_utils/src/eager_or_lazy.rs index 9d38672efad..eb3f442ac75 100644 --- a/src/tools/clippy/clippy_utils/src/eager_or_lazy.rs +++ b/src/tools/clippy/clippy_utils/src/eager_or_lazy.rs @@ -51,7 +51,7 @@ impl ops::BitOrAssign for EagernessSuggestion { fn fn_eagerness(cx: &LateContext<'_>, fn_id: DefId, name: Symbol, have_one_arg: bool) -> EagernessSuggestion { use EagernessSuggestion::{Eager, Lazy, NoChange}; - let ty = match cx.tcx.impl_of_method(fn_id) { + let ty = match cx.tcx.impl_of_assoc(fn_id) { Some(id) => cx.tcx.type_of(id).instantiate_identity(), None => return Lazy, }; diff --git a/src/tools/clippy/clippy_utils/src/lib.rs b/src/tools/clippy/clippy_utils/src/lib.rs index ff1ee663f9b..5b9b0ef3001 100644 --- a/src/tools/clippy/clippy_utils/src/lib.rs +++ b/src/tools/clippy/clippy_utils/src/lib.rs @@ -28,7 +28,6 @@ extern crate indexmap; extern crate rustc_abi; extern crate rustc_ast; -extern crate rustc_attr_data_structures; extern crate rustc_attr_parsing; extern crate rustc_const_eval; extern crate rustc_data_structures; @@ -89,9 +88,10 @@ use std::sync::{Mutex, MutexGuard, OnceLock}; use itertools::Itertools; use rustc_abi::Integer; -use rustc_ast::join_path_syms; use rustc_ast::ast::{self, LitKind, RangeLimits}; -use rustc_attr_data_structures::{AttributeKind, find_attr}; +use rustc_ast::join_path_syms; +use rustc_hir::attrs::{AttributeKind}; +use rustc_hir::find_attr; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::packed::Pu128; use rustc_data_structures::unhash::UnindexMap; @@ -114,7 +114,7 @@ use rustc_middle::hir::nested_filter; use rustc_middle::hir::place::PlaceBase; use rustc_middle::lint::LevelAndSource; use rustc_middle::mir::{AggregateKind, Operand, RETURN_PLACE, Rvalue, StatementKind, TerminatorKind}; -use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow}; +use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow, PointerCoercion}; use rustc_middle::ty::layout::IntegerExt; use rustc_middle::ty::{ self as rustc_ty, Binder, BorrowKind, ClosureKind, EarlyBinder, GenericArgKind, GenericArgsRef, IntTy, Ty, TyCtxt, @@ -349,7 +349,7 @@ pub fn is_ty_alias(qpath: &QPath<'_>) -> bool { /// Checks if the given method call expression calls an inherent method. pub fn is_inherent_method_call(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) { - cx.tcx.trait_of_item(method_id).is_none() + cx.tcx.trait_of_assoc(method_id).is_none() } else { false } @@ -357,7 +357,7 @@ pub fn is_inherent_method_call(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { /// Checks if a method is defined in an impl of a diagnostic item pub fn is_diag_item_method(cx: &LateContext<'_>, def_id: DefId, diag_item: Symbol) -> bool { - if let Some(impl_did) = cx.tcx.impl_of_method(def_id) + if let Some(impl_did) = cx.tcx.impl_of_assoc(def_id) && let Some(adt) = cx.tcx.type_of(impl_did).instantiate_identity().ty_adt_def() { return cx.tcx.is_diagnostic_item(diag_item, adt.did()); @@ -367,7 +367,7 @@ pub fn is_diag_item_method(cx: &LateContext<'_>, def_id: DefId, diag_item: Symbo /// Checks if a method is in a diagnostic item trait pub fn is_diag_trait_item(cx: &LateContext<'_>, def_id: DefId, diag_item: Symbol) -> bool { - if let Some(trait_did) = cx.tcx.trait_of_item(def_id) { + if let Some(trait_did) = cx.tcx.trait_of_assoc(def_id) { return cx.tcx.is_diagnostic_item(diag_item, trait_did); } false @@ -620,7 +620,7 @@ fn is_default_equivalent_ctor(cx: &LateContext<'_>, def_id: DefId, path: &QPath< if let QPath::TypeRelative(_, method) = path && method.ident.name == sym::new - && let Some(impl_did) = cx.tcx.impl_of_method(def_id) + && let Some(impl_did) = cx.tcx.impl_of_assoc(def_id) && let Some(adt) = cx.tcx.type_of(impl_did).instantiate_identity().ty_adt_def() { return std_types_symbols.iter().any(|&symbol| { @@ -1897,6 +1897,7 @@ pub fn is_must_use_func_call(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { /// * `|x| { return x }` /// * `|x| { return x; }` /// * `|(x, y)| (x, y)` +/// * `|[x, y]| [x, y]` /// /// Consider calling [`is_expr_untyped_identity_function`] or [`is_expr_identity_function`] instead. fn is_body_identity_function(cx: &LateContext<'_>, func: &Body<'_>) -> bool { @@ -1907,9 +1908,9 @@ fn is_body_identity_function(cx: &LateContext<'_>, func: &Body<'_>) -> bool { .get(pat.hir_id) .is_some_and(|mode| matches!(mode.0, ByRef::Yes(_))) { - // If a tuple `(x, y)` is of type `&(i32, i32)`, then due to match ergonomics, - // the inner patterns become references. Don't consider this the identity function - // as that changes types. + // If the parameter is `(x, y)` of type `&(T, T)`, or `[x, y]` of type `&[T; 2]`, then + // due to match ergonomics, the inner patterns become references. Don't consider this + // the identity function as that changes types. return false; } @@ -1922,6 +1923,13 @@ fn is_body_identity_function(cx: &LateContext<'_>, func: &Body<'_>) -> bool { { pats.iter().zip(tup).all(|(pat, expr)| check_pat(cx, pat, expr)) }, + (PatKind::Slice(before, slice, after), ExprKind::Array(arr)) + if slice.is_none() && before.len() + after.len() == arr.len() => + { + (before.iter().chain(after)) + .zip(arr) + .all(|(pat, expr)| check_pat(cx, pat, expr)) + }, _ => false, } } @@ -3269,15 +3277,13 @@ fn maybe_get_relative_path(from: &DefPath, to: &DefPath, max_super: usize) -> St if go_up_by > max_super { // `super` chain would be too long, just use the absolute path instead - join_path_syms( - once(kw::Crate).chain(to.data.iter().filter_map(|el| { - if let DefPathData::TypeNs(sym) = el.data { - Some(sym) - } else { - None - } - })) - ) + join_path_syms(once(kw::Crate).chain(to.data.iter().filter_map(|el| { + if let DefPathData::TypeNs(sym) = el.data { + Some(sym) + } else { + None + } + }))) } else { join_path_syms(repeat_n(kw::Super, go_up_by).chain(path)) } @@ -3560,3 +3566,14 @@ pub fn potential_return_of_enclosing_body(cx: &LateContext<'_>, expr: &Expr<'_>) // enclosing body. false } + +/// Checks if the expression has adjustments that require coercion, for example: dereferencing with +/// overloaded deref, coercing pointers and `dyn` objects. +pub fn expr_adjustment_requires_coercion(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { + cx.typeck_results().expr_adjustments(expr).iter().any(|adj| { + matches!( + adj.kind, + Adjust::Deref(Some(_)) | Adjust::Pointer(PointerCoercion::Unsize) | Adjust::NeverToAny + ) + }) +} diff --git a/src/tools/clippy/clippy_utils/src/macros.rs b/src/tools/clippy/clippy_utils/src/macros.rs index ba126fcd05d..60473a26493 100644 --- a/src/tools/clippy/clippy_utils/src/macros.rs +++ b/src/tools/clippy/clippy_utils/src/macros.rs @@ -42,7 +42,7 @@ pub fn is_format_macro(cx: &LateContext<'_>, macro_def_id: DefId) -> bool { } else { // Allow users to tag any macro as being format!-like // TODO: consider deleting FORMAT_MACRO_DIAG_ITEMS and using just this method - get_unique_attr(cx.sess(), cx.tcx.get_attrs_unchecked(macro_def_id), sym::format_args).is_some() + get_unique_attr(cx.sess(), cx.tcx.get_all_attrs(macro_def_id), sym::format_args).is_some() } } diff --git a/src/tools/clippy/clippy_utils/src/msrvs.rs b/src/tools/clippy/clippy_utils/src/msrvs.rs index 24ed4c3a8be..480e0687756 100644 --- a/src/tools/clippy/clippy_utils/src/msrvs.rs +++ b/src/tools/clippy/clippy_utils/src/msrvs.rs @@ -1,7 +1,7 @@ use crate::sym; use rustc_ast::Attribute; use rustc_ast::attr::AttributeExt; -use rustc_attr_data_structures::RustcVersion; +use rustc_hir::RustcVersion; use rustc_attr_parsing::parse_version; use rustc_lint::LateContext; use rustc_session::Session; diff --git a/src/tools/clippy/clippy_utils/src/paths.rs b/src/tools/clippy/clippy_utils/src/paths.rs index c681806517a..ea8cfc59356 100644 --- a/src/tools/clippy/clippy_utils/src/paths.rs +++ b/src/tools/clippy/clippy_utils/src/paths.rs @@ -308,10 +308,11 @@ fn local_item_child_by_name(tcx: TyCtxt<'_>, local_id: LocalDefId, ns: PathNS, n None } }), - ItemKind::Impl(..) | ItemKind::Trait(..) - => tcx.associated_items(local_id).filter_by_name_unhygienic(name) - .find(|assoc_item| ns.matches(Some(assoc_item.namespace()))) - .map(|assoc_item| assoc_item.def_id), + ItemKind::Impl(..) | ItemKind::Trait(..) => tcx + .associated_items(local_id) + .filter_by_name_unhygienic(name) + .find(|assoc_item| ns.matches(Some(assoc_item.namespace()))) + .map(|assoc_item| assoc_item.def_id), _ => None, } } diff --git a/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs b/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs index b3356450d38..79116eba971 100644 --- a/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs +++ b/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs @@ -5,7 +5,7 @@ use crate::msrvs::{self, Msrv}; use hir::LangItem; -use rustc_attr_data_structures::{RustcVersion, StableSince}; +use rustc_hir::{RustcVersion, StableSince}; use rustc_const_eval::check_consts::ConstCx; use rustc_hir as hir; use rustc_hir::def_id::DefId; @@ -420,11 +420,11 @@ pub fn is_stable_const_fn(cx: &LateContext<'_>, def_id: DefId, msrv: Msrv) -> bo .lookup_const_stability(def_id) .or_else(|| { cx.tcx - .trait_of_item(def_id) + .trait_of_assoc(def_id) .and_then(|trait_def_id| cx.tcx.lookup_const_stability(trait_def_id)) }) .is_none_or(|const_stab| { - if let rustc_attr_data_structures::StabilityLevel::Stable { since, .. } = const_stab.level { + if let rustc_hir::StabilityLevel::Stable { since, .. } = const_stab.level { // Checking MSRV is manually necessary because `rustc` has no such concept. This entire // function could be removed if `rustc` provided a MSRV-aware version of `is_stable_const_fn`. // as a part of an unimplemented MSRV check https://github.com/rust-lang/rust/issues/65262. diff --git a/src/tools/clippy/clippy_utils/src/sym.rs b/src/tools/clippy/clippy_utils/src/sym.rs index 8a8218c6976..934be97d94e 100644 --- a/src/tools/clippy/clippy_utils/src/sym.rs +++ b/src/tools/clippy/clippy_utils/src/sym.rs @@ -98,6 +98,7 @@ generate! { ceil_char_boundary, chain, chars, + check_attributes, checked_abs, checked_add, checked_isqrt, @@ -196,6 +197,7 @@ generate! { kw, last, lazy_static, + lint_vec, ln, lock, lock_api, @@ -261,6 +263,7 @@ generate! { read_to_end, read_to_string, read_unaligned, + redundant_imports, redundant_pub_crate, regex, rem_euclid, diff --git a/src/tools/clippy/clippy_utils/src/ty/mod.rs b/src/tools/clippy/clippy_utils/src/ty/mod.rs index fe208c032f4..02a8eda5893 100644 --- a/src/tools/clippy/clippy_utils/src/ty/mod.rs +++ b/src/tools/clippy/clippy_utils/src/ty/mod.rs @@ -6,7 +6,8 @@ use core::ops::ControlFlow; use itertools::Itertools; use rustc_abi::VariantIdx; use rustc_ast::ast::Mutability; -use rustc_attr_data_structures::{AttributeKind, find_attr}; +use rustc_hir::attrs::{AttributeKind}; +use rustc_hir::find_attr; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_hir as hir; use rustc_hir::def::{CtorKind, CtorOf, DefKind, Res}; @@ -492,10 +493,7 @@ pub fn peel_mid_ty_refs_is_mutable(ty: Ty<'_>) -> (Ty<'_>, usize, Mutability) { /// Returns `true` if the given type is an `unsafe` function. pub fn type_is_unsafe_function<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool { - match ty.kind() { - ty::FnDef(..) | ty::FnPtr(..) => ty.fn_sig(cx.tcx).safety().is_unsafe(), - _ => false, - } + ty.is_fn() && ty.fn_sig(cx.tcx).safety().is_unsafe() } /// Returns the base type for HIR references and pointers. diff --git a/src/tools/clippy/clippy_utils/src/ty/type_certainty/mod.rs b/src/tools/clippy/clippy_utils/src/ty/type_certainty/mod.rs index 84df36c75bf..d9c7e6eac9f 100644 --- a/src/tools/clippy/clippy_utils/src/ty/type_certainty/mod.rs +++ b/src/tools/clippy/clippy_utils/src/ty/type_certainty/mod.rs @@ -12,10 +12,11 @@ //! be considered a bug. use crate::paths::{PathNS, lookup_path}; +use rustc_ast::{LitFloatType, LitIntType, LitKind}; use rustc_hir::def::{DefKind, Res}; use rustc_hir::def_id::DefId; use rustc_hir::intravisit::{InferKind, Visitor, VisitorExt, walk_qpath, walk_ty}; -use rustc_hir::{self as hir, AmbigArg, Expr, ExprKind, GenericArgs, HirId, Node, PathSegment, QPath, TyKind}; +use rustc_hir::{self as hir, AmbigArg, Expr, ExprKind, GenericArgs, HirId, Node, Param, PathSegment, QPath, TyKind}; use rustc_lint::LateContext; use rustc_middle::ty::{self, AdtDef, GenericArgKind, Ty}; use rustc_span::Span; @@ -24,22 +25,24 @@ mod certainty; use certainty::{Certainty, Meet, join, meet}; pub fn expr_type_is_certain(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { - expr_type_certainty(cx, expr).is_certain() + expr_type_certainty(cx, expr, false).is_certain() } -fn expr_type_certainty(cx: &LateContext<'_>, expr: &Expr<'_>) -> Certainty { +/// Determine the type certainty of `expr`. `in_arg` indicates that the expression happens within +/// the evaluation of a function or method call argument. +fn expr_type_certainty(cx: &LateContext<'_>, expr: &Expr<'_>, in_arg: bool) -> Certainty { let certainty = match &expr.kind { ExprKind::Unary(_, expr) | ExprKind::Field(expr, _) | ExprKind::Index(expr, _, _) - | ExprKind::AddrOf(_, _, expr) => expr_type_certainty(cx, expr), + | ExprKind::AddrOf(_, _, expr) => expr_type_certainty(cx, expr, in_arg), - ExprKind::Array(exprs) => join(exprs.iter().map(|expr| expr_type_certainty(cx, expr))), + ExprKind::Array(exprs) => join(exprs.iter().map(|expr| expr_type_certainty(cx, expr, in_arg))), ExprKind::Call(callee, args) => { - let lhs = expr_type_certainty(cx, callee); + let lhs = expr_type_certainty(cx, callee, false); let rhs = if type_is_inferable_from_arguments(cx, expr) { - meet(args.iter().map(|arg| expr_type_certainty(cx, arg))) + meet(args.iter().map(|arg| expr_type_certainty(cx, arg, true))) } else { Certainty::Uncertain }; @@ -47,7 +50,7 @@ fn expr_type_certainty(cx: &LateContext<'_>, expr: &Expr<'_>) -> Certainty { }, ExprKind::MethodCall(method, receiver, args, _) => { - let mut receiver_type_certainty = expr_type_certainty(cx, receiver); + let mut receiver_type_certainty = expr_type_certainty(cx, receiver, false); // Even if `receiver_type_certainty` is `Certain(Some(..))`, the `Self` type in the method // identified by `type_dependent_def_id(..)` can differ. This can happen as a result of a `deref`, // for example. So update the `DefId` in `receiver_type_certainty` (if any). @@ -59,7 +62,8 @@ fn expr_type_certainty(cx: &LateContext<'_>, expr: &Expr<'_>) -> Certainty { let lhs = path_segment_certainty(cx, receiver_type_certainty, method, false); let rhs = if type_is_inferable_from_arguments(cx, expr) { meet( - std::iter::once(receiver_type_certainty).chain(args.iter().map(|arg| expr_type_certainty(cx, arg))), + std::iter::once(receiver_type_certainty) + .chain(args.iter().map(|arg| expr_type_certainty(cx, arg, true))), ) } else { Certainty::Uncertain @@ -67,16 +71,39 @@ fn expr_type_certainty(cx: &LateContext<'_>, expr: &Expr<'_>) -> Certainty { lhs.join(rhs) }, - ExprKind::Tup(exprs) => meet(exprs.iter().map(|expr| expr_type_certainty(cx, expr))), + ExprKind::Tup(exprs) => meet(exprs.iter().map(|expr| expr_type_certainty(cx, expr, in_arg))), - ExprKind::Binary(_, lhs, rhs) => expr_type_certainty(cx, lhs).meet(expr_type_certainty(cx, rhs)), + ExprKind::Binary(_, lhs, rhs) => { + // If one of the side of the expression is uncertain, the certainty will come from the other side, + // with no information on the type. + match ( + expr_type_certainty(cx, lhs, in_arg), + expr_type_certainty(cx, rhs, in_arg), + ) { + (Certainty::Uncertain, Certainty::Certain(_)) | (Certainty::Certain(_), Certainty::Uncertain) => { + Certainty::Certain(None) + }, + (l, r) => l.meet(r), + } + }, - ExprKind::Lit(_) => Certainty::Certain(None), + ExprKind::Lit(lit) => { + if !in_arg + && matches!( + lit.node, + LitKind::Int(_, LitIntType::Unsuffixed) | LitKind::Float(_, LitFloatType::Unsuffixed) + ) + { + Certainty::Uncertain + } else { + Certainty::Certain(None) + } + }, ExprKind::Cast(_, ty) => type_certainty(cx, ty), ExprKind::If(_, if_expr, Some(else_expr)) => { - expr_type_certainty(cx, if_expr).join(expr_type_certainty(cx, else_expr)) + expr_type_certainty(cx, if_expr, in_arg).join(expr_type_certainty(cx, else_expr, in_arg)) }, ExprKind::Path(qpath) => qpath_certainty(cx, qpath, false), @@ -188,6 +215,20 @@ fn qpath_certainty(cx: &LateContext<'_>, qpath: &QPath<'_>, resolves_to_type: bo certainty } +/// Tries to tell whether `param` resolves to something certain, e.g., a non-wildcard type if +/// present. The certainty `DefId` is cleared before returning. +fn param_certainty(cx: &LateContext<'_>, param: &Param<'_>) -> Certainty { + let owner_did = cx.tcx.hir_enclosing_body_owner(param.hir_id); + let Some(fn_decl) = cx.tcx.hir_fn_decl_by_hir_id(cx.tcx.local_def_id_to_hir_id(owner_did)) else { + return Certainty::Uncertain; + }; + let inputs = fn_decl.inputs; + let body_params = cx.tcx.hir_body_owned_by(owner_did).params; + std::iter::zip(body_params, inputs) + .find(|(p, _)| p.hir_id == param.hir_id) + .map_or(Certainty::Uncertain, |(_, ty)| type_certainty(cx, ty).clear_def_id()) +} + fn path_segment_certainty( cx: &LateContext<'_>, parent_certainty: Certainty, @@ -240,15 +281,16 @@ fn path_segment_certainty( // `get_parent` because `hir_id` refers to a `Pat`, and we're interested in the node containing the `Pat`. Res::Local(hir_id) => match cx.tcx.parent_hir_node(hir_id) { - // An argument's type is always certain. - Node::Param(..) => Certainty::Certain(None), + // A parameter's type is not always certain, as it may come from an untyped closure definition, + // or from a wildcard in a typed closure definition. + Node::Param(param) => param_certainty(cx, param), // A local's type is certain if its type annotation is certain or it has an initializer whose // type is certain. Node::LetStmt(local) => { let lhs = local.ty.map_or(Certainty::Uncertain, |ty| type_certainty(cx, ty)); let rhs = local .init - .map_or(Certainty::Uncertain, |init| expr_type_certainty(cx, init)); + .map_or(Certainty::Uncertain, |init| expr_type_certainty(cx, init, false)); let certainty = lhs.join(rhs); if resolves_to_type { certainty diff --git a/src/tools/clippy/clippy_utils/src/usage.rs b/src/tools/clippy/clippy_utils/src/usage.rs index 1b049b6d12c..76d43feee12 100644 --- a/src/tools/clippy/clippy_utils/src/usage.rs +++ b/src/tools/clippy/clippy_utils/src/usage.rs @@ -1,3 +1,4 @@ +use crate::macros::root_macro_call_first_node; use crate::visitors::{Descend, Visitable, for_each_expr, for_each_expr_without_closures}; use crate::{self as utils, get_enclosing_loop_or_multi_call_closure}; use core::ops::ControlFlow; @@ -9,6 +10,7 @@ use rustc_lint::LateContext; use rustc_middle::hir::nested_filter; use rustc_middle::mir::FakeReadCause; use rustc_middle::ty; +use rustc_span::sym; /// Returns a set of mutated local variable IDs, or `None` if mutations could not be determined. pub fn mutated_variables<'tcx>(expr: &'tcx Expr<'_>, cx: &LateContext<'tcx>) -> Option<HirIdSet> { @@ -140,6 +142,46 @@ impl<'tcx> Visitor<'tcx> for BindingUsageFinder<'_, 'tcx> { } } +/// Checks if the given expression is a macro call to `todo!()` or `unimplemented!()`. +pub fn is_todo_unimplemented_macro(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { + root_macro_call_first_node(cx, expr).is_some_and(|macro_call| { + [sym::todo_macro, sym::unimplemented_macro] + .iter() + .any(|&sym| cx.tcx.is_diagnostic_item(sym, macro_call.def_id)) + }) +} + +/// Checks if the given expression is a stub, i.e., a `todo!()` or `unimplemented!()` expression, +/// or a block whose last expression is a `todo!()` or `unimplemented!()`. +pub fn is_todo_unimplemented_stub(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { + if let ExprKind::Block(block, _) = expr.kind { + if let Some(last_expr) = block.expr { + return is_todo_unimplemented_macro(cx, last_expr); + } + + return block.stmts.last().is_some_and(|stmt| { + if let hir::StmtKind::Expr(expr) | hir::StmtKind::Semi(expr) = stmt.kind { + return is_todo_unimplemented_macro(cx, expr); + } + false + }); + } + + is_todo_unimplemented_macro(cx, expr) +} + +/// Checks if the given expression contains macro call to `todo!()` or `unimplemented!()`. +pub fn contains_todo_unimplement_macro(cx: &LateContext<'_>, expr: &'_ Expr<'_>) -> bool { + for_each_expr_without_closures(expr, |e| { + if is_todo_unimplemented_macro(cx, e) { + ControlFlow::Break(()) + } else { + ControlFlow::Continue(()) + } + }) + .is_some() +} + pub fn contains_return_break_continue_macro(expression: &Expr<'_>) -> bool { for_each_expr_without_closures(expression, |e| { match e.kind { diff --git a/src/tools/clippy/rust-toolchain.toml b/src/tools/clippy/rust-toolchain.toml index f46e079db3f..0edb80edd04 100644 --- a/src/tools/clippy/rust-toolchain.toml +++ b/src/tools/clippy/rust-toolchain.toml @@ -1,6 +1,6 @@ [toolchain] # begin autogenerated nightly -channel = "nightly-2025-07-10" +channel = "nightly-2025-07-25" # end autogenerated nightly components = ["cargo", "llvm-tools", "rust-src", "rust-std", "rustc", "rustc-dev", "rustfmt"] profile = "minimal" diff --git a/src/tools/clippy/rustc_tools_util/src/lib.rs b/src/tools/clippy/rustc_tools_util/src/lib.rs index b45edf23455..194ed84d04c 100644 --- a/src/tools/clippy/rustc_tools_util/src/lib.rs +++ b/src/tools/clippy/rustc_tools_util/src/lib.rs @@ -157,7 +157,8 @@ pub fn get_commit_date() -> Option<String> { #[must_use] pub fn get_compiler_version() -> Option<String> { - get_output("rustc", &["-V"]) + let compiler = std::option_env!("RUSTC").unwrap_or("rustc"); + get_output(compiler, &["-V"]) } #[must_use] @@ -172,6 +173,8 @@ pub fn get_channel(compiler_version: Option<String>) -> String { return String::from("beta"); } else if rustc_output.contains("nightly") { return String::from("nightly"); + } else if rustc_output.contains("dev") { + return String::from("dev"); } } diff --git a/src/tools/clippy/tests/compile-test.rs b/src/tools/clippy/tests/compile-test.rs index 57d623b2cfc..464efc45c6b 100644 --- a/src/tools/clippy/tests/compile-test.rs +++ b/src/tools/clippy/tests/compile-test.rs @@ -151,7 +151,32 @@ impl TestContext { defaults.set_custom( "dependencies", DependencyBuilder { - program: CommandBuilder::cargo(), + program: { + let mut p = CommandBuilder::cargo(); + // If we run in bootstrap, we need to use the right compiler for building the + // tests -- not the compiler that built clippy, but the compiler that got linked + // into clippy. Just invoking TEST_RUSTC does not work because LD_LIBRARY_PATH + // is set in a way that makes it pick the wrong sysroot. Sadly due to + // <https://github.com/rust-lang/cargo/issues/4423> we cannot use RUSTFLAGS to + // set `--sysroot`, so we need to use bootstrap's rustc wrapper. That wrapper + // however has some staging logic that is hurting us here, so to work around + // that we set both the "real" and "staging" rustc to TEST_RUSTC, including the + // associated library paths. + #[expect( + clippy::option_env_unwrap, + reason = "TEST_RUSTC will ensure that the requested env vars are set during compile time" + )] + if let Some(rustc) = option_env!("TEST_RUSTC") { + let libdir = option_env!("TEST_RUSTC_LIB").unwrap(); + let sysroot = option_env!("TEST_SYSROOT").unwrap(); + p.envs.push(("RUSTC_REAL".into(), Some(rustc.into()))); + p.envs.push(("RUSTC_REAL_LIBDIR".into(), Some(libdir.into()))); + p.envs.push(("RUSTC_SNAPSHOT".into(), Some(rustc.into()))); + p.envs.push(("RUSTC_SNAPSHOT_LIBDIR".into(), Some(libdir.into()))); + p.envs.push(("RUSTC_SYSROOT".into(), Some(sysroot.into()))); + } + p + }, crate_manifest_path: Path::new("clippy_test_deps").join("Cargo.toml"), build_std: None, bless_lockfile: self.args.bless, @@ -192,6 +217,9 @@ impl TestContext { let dep = format!("-Ldependency={}", Path::new(host_libs).join("deps").display()); config.program.args.push(dep.into()); } + if let Some(sysroot) = option_env!("TEST_SYSROOT") { + config.program.args.push(format!("--sysroot={sysroot}").into()); + } config.program.program = profile_path.join(if cfg!(windows) { "clippy-driver.exe" diff --git a/src/tools/clippy/tests/ui-toml/check_incompatible_msrv_in_tests/check_incompatible_msrv_in_tests.enabled.stderr b/src/tools/clippy/tests/ui-toml/check_incompatible_msrv_in_tests/check_incompatible_msrv_in_tests.enabled.stderr index 8a85d38fba3..608264beb10 100644 --- a/src/tools/clippy/tests/ui-toml/check_incompatible_msrv_in_tests/check_incompatible_msrv_in_tests.enabled.stderr +++ b/src/tools/clippy/tests/ui-toml/check_incompatible_msrv_in_tests/check_incompatible_msrv_in_tests.enabled.stderr @@ -18,6 +18,8 @@ error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is | LL | sleep(Duration::new(1, 0)); | ^^^^^ + | + = note: you may want to conditionally increase the MSRV considered by Clippy using the `clippy::msrv` attribute error: aborting due to 3 previous errors diff --git a/src/tools/clippy/tests/ui-toml/enum_variant_size/enum_variant_size.stderr b/src/tools/clippy/tests/ui-toml/enum_variant_size/enum_variant_size.stderr index 020b3cc7878..a5dfd7015a3 100644 --- a/src/tools/clippy/tests/ui-toml/enum_variant_size/enum_variant_size.stderr +++ b/src/tools/clippy/tests/ui-toml/enum_variant_size/enum_variant_size.stderr @@ -12,7 +12,7 @@ LL | | } | = note: `-D clippy::large-enum-variant` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::large_enum_variant)]` -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - B([u8; 501]), LL + B(Box<[u8; 501]>), diff --git a/src/tools/clippy/tests/ui/approx_const.rs b/src/tools/clippy/tests/ui/approx_const.rs index 6461666be8f..fc493421a16 100644 --- a/src/tools/clippy/tests/ui/approx_const.rs +++ b/src/tools/clippy/tests/ui/approx_const.rs @@ -106,4 +106,19 @@ fn main() { //~^ approx_constant let no_tau = 6.3; + + // issue #15194 + #[allow(clippy::excessive_precision)] + let x: f64 = 3.1415926535897932384626433832; + //~^ approx_constant + + #[allow(clippy::excessive_precision)] + let _: f64 = 003.14159265358979311599796346854418516159057617187500; + //~^ approx_constant + + let almost_frac_1_sqrt_2 = 00.70711; + //~^ approx_constant + + let almost_frac_1_sqrt_2 = 00.707_11; + //~^ approx_constant } diff --git a/src/tools/clippy/tests/ui/approx_const.stderr b/src/tools/clippy/tests/ui/approx_const.stderr index f7bda0468cb..32a3517ff2e 100644 --- a/src/tools/clippy/tests/ui/approx_const.stderr +++ b/src/tools/clippy/tests/ui/approx_const.stderr @@ -184,5 +184,37 @@ LL | let almost_tau = 6.28; | = help: consider using the constant directly -error: aborting due to 23 previous errors +error: approximate value of `f{32, 64}::consts::PI` found + --> tests/ui/approx_const.rs:112:18 + | +LL | let x: f64 = 3.1415926535897932384626433832; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: consider using the constant directly + +error: approximate value of `f{32, 64}::consts::PI` found + --> tests/ui/approx_const.rs:116:18 + | +LL | let _: f64 = 003.14159265358979311599796346854418516159057617187500; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: consider using the constant directly + +error: approximate value of `f{32, 64}::consts::FRAC_1_SQRT_2` found + --> tests/ui/approx_const.rs:119:32 + | +LL | let almost_frac_1_sqrt_2 = 00.70711; + | ^^^^^^^^ + | + = help: consider using the constant directly + +error: approximate value of `f{32, 64}::consts::FRAC_1_SQRT_2` found + --> tests/ui/approx_const.rs:122:32 + | +LL | let almost_frac_1_sqrt_2 = 00.707_11; + | ^^^^^^^^^ + | + = help: consider using the constant directly + +error: aborting due to 27 previous errors diff --git a/src/tools/clippy/tests/ui/arc_with_non_send_sync.stderr b/src/tools/clippy/tests/ui/arc_with_non_send_sync.stderr index 5556b0df88c..ce726206b0c 100644 --- a/src/tools/clippy/tests/ui/arc_with_non_send_sync.stderr +++ b/src/tools/clippy/tests/ui/arc_with_non_send_sync.stderr @@ -5,7 +5,7 @@ LL | let _ = Arc::new(RefCell::new(42)); | ^^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: `Arc<RefCell<i32>>` is not `Send` and `Sync` as `RefCell<i32>` is not `Sync` - = help: if the `Arc` will not used be across threads replace it with an `Rc` + = help: if the `Arc` will not be used across threads replace it with an `Rc` = help: otherwise make `RefCell<i32>` `Send` and `Sync` or consider a wrapper type such as `Mutex` = note: `-D clippy::arc-with-non-send-sync` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::arc_with_non_send_sync)]` @@ -17,7 +17,7 @@ LL | let _ = Arc::new(mutex.lock().unwrap()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: `Arc<MutexGuard<'_, i32>>` is not `Send` and `Sync` as `MutexGuard<'_, i32>` is not `Send` - = help: if the `Arc` will not used be across threads replace it with an `Rc` + = help: if the `Arc` will not be used across threads replace it with an `Rc` = help: otherwise make `MutexGuard<'_, i32>` `Send` and `Sync` or consider a wrapper type such as `Mutex` error: usage of an `Arc` that is not `Send` and `Sync` @@ -27,7 +27,7 @@ LL | let _ = Arc::new(&42 as *const i32); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: `Arc<*const i32>` is not `Send` and `Sync` as `*const i32` is neither `Send` nor `Sync` - = help: if the `Arc` will not used be across threads replace it with an `Rc` + = help: if the `Arc` will not be used across threads replace it with an `Rc` = help: otherwise make `*const i32` `Send` and `Sync` or consider a wrapper type such as `Mutex` error: aborting due to 3 previous errors diff --git a/src/tools/clippy/tests/ui/arithmetic_side_effects.rs b/src/tools/clippy/tests/ui/arithmetic_side_effects.rs index 21be2af201f..3245b2c983e 100644 --- a/src/tools/clippy/tests/ui/arithmetic_side_effects.rs +++ b/src/tools/clippy/tests/ui/arithmetic_side_effects.rs @@ -664,6 +664,20 @@ pub fn issue_12318() { //~^ arithmetic_side_effects } +pub fn issue_15225() { + use core::num::{NonZero, NonZeroU8}; + + let one = const { NonZeroU8::new(1).unwrap() }; + let _ = one.get() - 1; + + let one: NonZero<u8> = const { NonZero::new(1).unwrap() }; + let _ = one.get() - 1; + + type AliasedType = u8; + let one: NonZero<AliasedType> = const { NonZero::new(1).unwrap() }; + let _ = one.get() - 1; +} + pub fn explicit_methods() { use core::ops::Add; let one: i32 = 1; diff --git a/src/tools/clippy/tests/ui/arithmetic_side_effects.stderr b/src/tools/clippy/tests/ui/arithmetic_side_effects.stderr index e15fb612be5..4150493ba94 100644 --- a/src/tools/clippy/tests/ui/arithmetic_side_effects.stderr +++ b/src/tools/clippy/tests/ui/arithmetic_side_effects.stderr @@ -758,13 +758,13 @@ LL | one.sub_assign(1); | ^^^^^^^^^^^^^^^^^ error: arithmetic operation that can potentially result in unexpected side-effects - --> tests/ui/arithmetic_side_effects.rs:670:5 + --> tests/ui/arithmetic_side_effects.rs:684:5 | LL | one.add(&one); | ^^^^^^^^^^^^^ error: arithmetic operation that can potentially result in unexpected side-effects - --> tests/ui/arithmetic_side_effects.rs:672:5 + --> tests/ui/arithmetic_side_effects.rs:686:5 | LL | Box::new(one).add(one); | ^^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/assign_ops.fixed b/src/tools/clippy/tests/ui/assign_ops.fixed index eee61f949e7..3754b9dfe74 100644 --- a/src/tools/clippy/tests/ui/assign_ops.fixed +++ b/src/tools/clippy/tests/ui/assign_ops.fixed @@ -84,6 +84,7 @@ mod issue14871 { const ONE: Self; } + #[rustfmt::skip] // rustfmt doesn't understand the order of pub const on traits (yet) pub const trait NumberConstants { fn constant(value: usize) -> Self; } diff --git a/src/tools/clippy/tests/ui/assign_ops.rs b/src/tools/clippy/tests/ui/assign_ops.rs index 13ffcee0a3c..0b878d4f490 100644 --- a/src/tools/clippy/tests/ui/assign_ops.rs +++ b/src/tools/clippy/tests/ui/assign_ops.rs @@ -84,6 +84,7 @@ mod issue14871 { const ONE: Self; } + #[rustfmt::skip] // rustfmt doesn't understand the order of pub const on traits (yet) pub const trait NumberConstants { fn constant(value: usize) -> Self; } diff --git a/src/tools/clippy/tests/ui/auxiliary/external_item.rs b/src/tools/clippy/tests/ui/auxiliary/external_item.rs index ca4bc369e44..621e18f5c01 100644 --- a/src/tools/clippy/tests/ui/auxiliary/external_item.rs +++ b/src/tools/clippy/tests/ui/auxiliary/external_item.rs @@ -4,4 +4,4 @@ impl _ExternalStruct { pub fn _foo(self) {} } -pub fn _exernal_foo() {} +pub fn _external_foo() {} diff --git a/src/tools/clippy/tests/ui/auxiliary/proc_macro_derive.rs b/src/tools/clippy/tests/ui/auxiliary/proc_macro_derive.rs index 5992d15935d..54650922871 100644 --- a/src/tools/clippy/tests/ui/auxiliary/proc_macro_derive.rs +++ b/src/tools/clippy/tests/ui/auxiliary/proc_macro_derive.rs @@ -16,7 +16,7 @@ pub fn derive(_: TokenStream) -> TokenStream { let output = quote! { // Should not trigger `useless_attribute` #[allow(dead_code)] - extern crate rustc_middle; + extern crate core; }; output } diff --git a/src/tools/clippy/tests/ui/cast_alignment.rs b/src/tools/clippy/tests/ui/cast_alignment.rs index 5773ffddb91..ef667f5598a 100644 --- a/src/tools/clippy/tests/ui/cast_alignment.rs +++ b/src/tools/clippy/tests/ui/cast_alignment.rs @@ -1,6 +1,5 @@ //! Test casts for alignment issues -#![feature(rustc_private)] #![feature(core_intrinsics)] #![warn(clippy::cast_ptr_alignment)] #![allow( @@ -10,8 +9,6 @@ clippy::borrow_as_ptr )] -extern crate libc; - fn main() { /* These should be warned against */ diff --git a/src/tools/clippy/tests/ui/cast_alignment.stderr b/src/tools/clippy/tests/ui/cast_alignment.stderr index 6d9a81f0ecf..ee4c3e9a77e 100644 --- a/src/tools/clippy/tests/ui/cast_alignment.stderr +++ b/src/tools/clippy/tests/ui/cast_alignment.stderr @@ -1,5 +1,5 @@ error: casting from `*const u8` to a more-strictly-aligned pointer (`*const u16`) (1 < 2 bytes) - --> tests/ui/cast_alignment.rs:19:5 + --> tests/ui/cast_alignment.rs:16:5 | LL | (&1u8 as *const u8) as *const u16; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -8,19 +8,19 @@ LL | (&1u8 as *const u8) as *const u16; = help: to override `-D warnings` add `#[allow(clippy::cast_ptr_alignment)]` error: casting from `*mut u8` to a more-strictly-aligned pointer (`*mut u16`) (1 < 2 bytes) - --> tests/ui/cast_alignment.rs:22:5 + --> tests/ui/cast_alignment.rs:19:5 | LL | (&mut 1u8 as *mut u8) as *mut u16; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: casting from `*const u8` to a more-strictly-aligned pointer (`*const u16`) (1 < 2 bytes) - --> tests/ui/cast_alignment.rs:26:5 + --> tests/ui/cast_alignment.rs:23:5 | LL | (&1u8 as *const u8).cast::<u16>(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: casting from `*mut u8` to a more-strictly-aligned pointer (`*mut u16`) (1 < 2 bytes) - --> tests/ui/cast_alignment.rs:29:5 + --> tests/ui/cast_alignment.rs:26:5 | LL | (&mut 1u8 as *mut u8).cast::<u16>(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/checked_conversions.fixed b/src/tools/clippy/tests/ui/checked_conversions.fixed index 279a5b6e1ff..6175275ef04 100644 --- a/src/tools/clippy/tests/ui/checked_conversions.fixed +++ b/src/tools/clippy/tests/ui/checked_conversions.fixed @@ -95,7 +95,7 @@ pub const fn issue_8898(i: u32) -> bool { #[clippy::msrv = "1.33"] fn msrv_1_33() { let value: i64 = 33; - let _ = value <= (u32::MAX as i64) && value >= 0; + let _ = value <= (u32::max_value() as i64) && value >= 0; } #[clippy::msrv = "1.34"] diff --git a/src/tools/clippy/tests/ui/checked_conversions.rs b/src/tools/clippy/tests/ui/checked_conversions.rs index c339bc674bb..9ed0e8f660d 100644 --- a/src/tools/clippy/tests/ui/checked_conversions.rs +++ b/src/tools/clippy/tests/ui/checked_conversions.rs @@ -95,13 +95,13 @@ pub const fn issue_8898(i: u32) -> bool { #[clippy::msrv = "1.33"] fn msrv_1_33() { let value: i64 = 33; - let _ = value <= (u32::MAX as i64) && value >= 0; + let _ = value <= (u32::max_value() as i64) && value >= 0; } #[clippy::msrv = "1.34"] fn msrv_1_34() { let value: i64 = 34; - let _ = value <= (u32::MAX as i64) && value >= 0; + let _ = value <= (u32::max_value() as i64) && value >= 0; //~^ checked_conversions } diff --git a/src/tools/clippy/tests/ui/checked_conversions.stderr b/src/tools/clippy/tests/ui/checked_conversions.stderr index 3841b9d5a4d..624876dacb2 100644 --- a/src/tools/clippy/tests/ui/checked_conversions.stderr +++ b/src/tools/clippy/tests/ui/checked_conversions.stderr @@ -100,8 +100,8 @@ LL | let _ = value <= u16::MAX as u32 && value as i32 == 5; error: checked cast can be simplified --> tests/ui/checked_conversions.rs:104:13 | -LL | let _ = value <= (u32::MAX as i64) && value >= 0; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `u32::try_from(value).is_ok()` +LL | let _ = value <= (u32::max_value() as i64) && value >= 0; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `u32::try_from(value).is_ok()` error: aborting due to 17 previous errors diff --git a/src/tools/clippy/tests/ui/checked_unwrap/simple_conditionals.stderr b/src/tools/clippy/tests/ui/checked_unwrap/simple_conditionals.stderr index a4bf0099244..26e360112b6 100644 --- a/src/tools/clippy/tests/ui/checked_unwrap/simple_conditionals.stderr +++ b/src/tools/clippy/tests/ui/checked_unwrap/simple_conditionals.stderr @@ -328,7 +328,7 @@ error: creating a shared reference to mutable static LL | if X.is_some() { | ^^^^^^^^^^^ shared reference to mutable static | - = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/static-mut-references.html> + = note: for more information, see <https://doc.rust-lang.org/edition-guide/rust-2024/static-mut-references.html> = note: shared references to mutable statics are dangerous; it's undefined behavior if the static is mutated or if a mutable reference is created for it while the shared reference lives = note: `#[deny(static_mut_refs)]` on by default diff --git a/src/tools/clippy/tests/ui/expect.rs b/src/tools/clippy/tests/ui/expect.rs index 8f7379f0021..1ab01ecfcfe 100644 --- a/src/tools/clippy/tests/ui/expect.rs +++ b/src/tools/clippy/tests/ui/expect.rs @@ -16,7 +16,26 @@ fn expect_result() { //~^ expect_used } +#[allow(clippy::ok_expect)] +#[allow(clippy::err_expect)] +fn issue_15247() { + let x: Result<u8, u8> = Err(0); + x.ok().expect("Huh"); + //~^ expect_used + + { x.ok() }.expect("..."); + //~^ expect_used + + let y: Result<u8, u8> = Ok(0); + y.err().expect("Huh"); + //~^ expect_used + + { y.err() }.expect("..."); + //~^ expect_used +} + fn main() { expect_option(); expect_result(); + issue_15247(); } diff --git a/src/tools/clippy/tests/ui/expect.stderr b/src/tools/clippy/tests/ui/expect.stderr index 70cf3072003..353fb776531 100644 --- a/src/tools/clippy/tests/ui/expect.stderr +++ b/src/tools/clippy/tests/ui/expect.stderr @@ -24,5 +24,37 @@ LL | let _ = res.expect_err(""); | = note: if this value is an `Ok`, it will panic -error: aborting due to 3 previous errors +error: used `expect()` on an `Option` value + --> tests/ui/expect.rs:23:5 + | +LL | x.ok().expect("Huh"); + | ^^^^^^^^^^^^^^^^^^^^ + | + = note: if this value is `None`, it will panic + +error: used `expect()` on an `Option` value + --> tests/ui/expect.rs:26:5 + | +LL | { x.ok() }.expect("..."); + | ^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: if this value is `None`, it will panic + +error: used `expect()` on an `Option` value + --> tests/ui/expect.rs:30:5 + | +LL | y.err().expect("Huh"); + | ^^^^^^^^^^^^^^^^^^^^^ + | + = note: if this value is `None`, it will panic + +error: used `expect()` on an `Option` value + --> tests/ui/expect.rs:33:5 + | +LL | { y.err() }.expect("..."); + | ^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: if this value is `None`, it will panic + +error: aborting due to 7 previous errors diff --git a/src/tools/clippy/tests/ui/expect_fun_call.fixed b/src/tools/clippy/tests/ui/expect_fun_call.fixed index 73eaebf773c..b923521afde 100644 --- a/src/tools/clippy/tests/ui/expect_fun_call.fixed +++ b/src/tools/clippy/tests/ui/expect_fun_call.fixed @@ -90,17 +90,30 @@ fn main() { "foo" } - Some("foo").unwrap_or_else(|| { panic!("{}", get_string()) }); + const fn const_evaluable() -> &'static str { + "foo" + } + + Some("foo").unwrap_or_else(|| panic!("{}", get_string())); //~^ expect_fun_call - Some("foo").unwrap_or_else(|| { panic!("{}", get_string()) }); + Some("foo").unwrap_or_else(|| panic!("{}", get_string())); //~^ expect_fun_call - Some("foo").unwrap_or_else(|| { panic!("{}", get_string()) }); + Some("foo").unwrap_or_else(|| panic!("{}", get_string())); //~^ expect_fun_call - Some("foo").unwrap_or_else(|| { panic!("{}", get_static_str()) }); + Some("foo").unwrap_or_else(|| panic!("{}", get_static_str())); //~^ expect_fun_call - Some("foo").unwrap_or_else(|| { panic!("{}", get_non_static_str(&0).to_string()) }); + Some("foo").unwrap_or_else(|| panic!("{}", get_non_static_str(&0))); + //~^ expect_fun_call + + Some("foo").unwrap_or_else(|| panic!("{}", const_evaluable())); //~^ expect_fun_call + + const { + Some("foo").expect(const_evaluable()); + } + + Some("foo").expect(const { const_evaluable() }); } //Issue #3839 @@ -122,4 +135,15 @@ fn main() { let format_capture_and_value: Option<i32> = None; format_capture_and_value.unwrap_or_else(|| panic!("{error_code}, {}", 1)); //~^ expect_fun_call + + // Issue #15056 + let a = false; + Some(5).expect(if a { "a" } else { "b" }); + + let return_in_expect: Option<i32> = None; + return_in_expect.expect(if true { + "Error" + } else { + return; + }); } diff --git a/src/tools/clippy/tests/ui/expect_fun_call.rs b/src/tools/clippy/tests/ui/expect_fun_call.rs index ecebc9ebfb6..bc58d24bc81 100644 --- a/src/tools/clippy/tests/ui/expect_fun_call.rs +++ b/src/tools/clippy/tests/ui/expect_fun_call.rs @@ -90,6 +90,10 @@ fn main() { "foo" } + const fn const_evaluable() -> &'static str { + "foo" + } + Some("foo").expect(&get_string()); //~^ expect_fun_call Some("foo").expect(get_string().as_ref()); @@ -101,6 +105,15 @@ fn main() { //~^ expect_fun_call Some("foo").expect(get_non_static_str(&0)); //~^ expect_fun_call + + Some("foo").expect(const_evaluable()); + //~^ expect_fun_call + + const { + Some("foo").expect(const_evaluable()); + } + + Some("foo").expect(const { const_evaluable() }); } //Issue #3839 @@ -122,4 +135,15 @@ fn main() { let format_capture_and_value: Option<i32> = None; format_capture_and_value.expect(&format!("{error_code}, {}", 1)); //~^ expect_fun_call + + // Issue #15056 + let a = false; + Some(5).expect(if a { "a" } else { "b" }); + + let return_in_expect: Option<i32> = None; + return_in_expect.expect(if true { + "Error" + } else { + return; + }); } diff --git a/src/tools/clippy/tests/ui/expect_fun_call.stderr b/src/tools/clippy/tests/ui/expect_fun_call.stderr index 36713196cb9..0692ecb4862 100644 --- a/src/tools/clippy/tests/ui/expect_fun_call.stderr +++ b/src/tools/clippy/tests/ui/expect_fun_call.stderr @@ -38,58 +38,64 @@ LL | Some("foo").expect(format!("{} {}", 1, 2).as_ref()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{} {}", 1, 2))` error: function call inside of `expect` - --> tests/ui/expect_fun_call.rs:93:21 + --> tests/ui/expect_fun_call.rs:97:21 | LL | Some("foo").expect(&get_string()); - | ^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| { panic!("{}", get_string()) })` + | ^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{}", get_string()))` error: function call inside of `expect` - --> tests/ui/expect_fun_call.rs:95:21 + --> tests/ui/expect_fun_call.rs:99:21 | LL | Some("foo").expect(get_string().as_ref()); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| { panic!("{}", get_string()) })` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{}", get_string()))` error: function call inside of `expect` - --> tests/ui/expect_fun_call.rs:97:21 + --> tests/ui/expect_fun_call.rs:101:21 | LL | Some("foo").expect(get_string().as_str()); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| { panic!("{}", get_string()) })` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{}", get_string()))` error: function call inside of `expect` - --> tests/ui/expect_fun_call.rs:100:21 + --> tests/ui/expect_fun_call.rs:104:21 | LL | Some("foo").expect(get_static_str()); - | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| { panic!("{}", get_static_str()) })` + | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{}", get_static_str()))` error: function call inside of `expect` - --> tests/ui/expect_fun_call.rs:102:21 + --> tests/ui/expect_fun_call.rs:106:21 | LL | Some("foo").expect(get_non_static_str(&0)); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| { panic!("{}", get_non_static_str(&0).to_string()) })` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{}", get_non_static_str(&0)))` + +error: function call inside of `expect` + --> tests/ui/expect_fun_call.rs:109:21 + | +LL | Some("foo").expect(const_evaluable()); + | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{}", const_evaluable()))` error: function call inside of `expect` - --> tests/ui/expect_fun_call.rs:107:16 + --> tests/ui/expect_fun_call.rs:120:16 | LL | Some(true).expect(&format!("key {}, {}", 1, 2)); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("key {}, {}", 1, 2))` error: function call inside of `expect` - --> tests/ui/expect_fun_call.rs:114:17 + --> tests/ui/expect_fun_call.rs:127:17 | LL | opt_ref.expect(&format!("{:?}", opt_ref)); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{:?}", opt_ref))` error: function call inside of `expect` - --> tests/ui/expect_fun_call.rs:119:20 + --> tests/ui/expect_fun_call.rs:132:20 | LL | format_capture.expect(&format!("{error_code}")); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{error_code}"))` error: function call inside of `expect` - --> tests/ui/expect_fun_call.rs:123:30 + --> tests/ui/expect_fun_call.rs:136:30 | LL | format_capture_and_value.expect(&format!("{error_code}, {}", 1)); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{error_code}, {}", 1))` -error: aborting due to 15 previous errors +error: aborting due to 16 previous errors diff --git a/src/tools/clippy/tests/ui/filter_map_bool_then.fixed b/src/tools/clippy/tests/ui/filter_map_bool_then.fixed index b3e112f19eb..d370b85a67e 100644 --- a/src/tools/clippy/tests/ui/filter_map_bool_then.fixed +++ b/src/tools/clippy/tests/ui/filter_map_bool_then.fixed @@ -89,3 +89,24 @@ fn issue11503() { let _: Vec<usize> = bools.iter().enumerate().filter(|&(i, b)| ****b).map(|(i, b)| i).collect(); //~^ filter_map_bool_then } + +fn issue15047() { + #[derive(Clone, Copy)] + enum MyEnum { + A, + B, + C, + } + + macro_rules! foo { + ($e:expr) => { + $e + 1 + }; + } + + let x = 1; + let _ = [(MyEnum::A, "foo", 1i32)] + .iter() + .filter(|&(t, s, i)| matches!(t, MyEnum::A if s.starts_with("bar"))).map(|(t, s, i)| foo!(x)); + //~^ filter_map_bool_then +} diff --git a/src/tools/clippy/tests/ui/filter_map_bool_then.rs b/src/tools/clippy/tests/ui/filter_map_bool_then.rs index d996b3cb3c5..12295cc2482 100644 --- a/src/tools/clippy/tests/ui/filter_map_bool_then.rs +++ b/src/tools/clippy/tests/ui/filter_map_bool_then.rs @@ -89,3 +89,24 @@ fn issue11503() { let _: Vec<usize> = bools.iter().enumerate().filter_map(|(i, b)| b.then(|| i)).collect(); //~^ filter_map_bool_then } + +fn issue15047() { + #[derive(Clone, Copy)] + enum MyEnum { + A, + B, + C, + } + + macro_rules! foo { + ($e:expr) => { + $e + 1 + }; + } + + let x = 1; + let _ = [(MyEnum::A, "foo", 1i32)] + .iter() + .filter_map(|(t, s, i)| matches!(t, MyEnum::A if s.starts_with("bar")).then(|| foo!(x))); + //~^ filter_map_bool_then +} diff --git a/src/tools/clippy/tests/ui/filter_map_bool_then.stderr b/src/tools/clippy/tests/ui/filter_map_bool_then.stderr index aeb1baeb35e..edf6c655939 100644 --- a/src/tools/clippy/tests/ui/filter_map_bool_then.stderr +++ b/src/tools/clippy/tests/ui/filter_map_bool_then.stderr @@ -61,5 +61,11 @@ error: usage of `bool::then` in `filter_map` LL | let _: Vec<usize> = bools.iter().enumerate().filter_map(|(i, b)| b.then(|| i)).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `filter` then `map` instead: `filter(|&(i, b)| ****b).map(|(i, b)| i)` -error: aborting due to 10 previous errors +error: usage of `bool::then` in `filter_map` + --> tests/ui/filter_map_bool_then.rs:110:10 + | +LL | .filter_map(|(t, s, i)| matches!(t, MyEnum::A if s.starts_with("bar")).then(|| foo!(x))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `filter` then `map` instead: `filter(|&(t, s, i)| matches!(t, MyEnum::A if s.starts_with("bar"))).map(|(t, s, i)| foo!(x))` + +error: aborting due to 11 previous errors diff --git a/src/tools/clippy/tests/ui/flat_map_identity.fixed b/src/tools/clippy/tests/ui/flat_map_identity.fixed index f6206232612..06a3eee9d84 100644 --- a/src/tools/clippy/tests/ui/flat_map_identity.fixed +++ b/src/tools/clippy/tests/ui/flat_map_identity.fixed @@ -16,3 +16,16 @@ fn main() { let _ = iterator.flatten(); //~^ flat_map_identity } + +fn issue15198() { + let x = [[1, 2], [3, 4]]; + // don't lint: this is an `Iterator<Item = &[i32, i32]>` + // match ergonomics makes the binding patterns into references + // so that its type changes to `Iterator<Item = [&i32, &i32]>` + let _ = x.iter().flat_map(|[x, y]| [x, y]); + let _ = x.iter().flat_map(|x| [x[0]]); + + // no match ergonomics for `[i32, i32]` + let _ = x.iter().copied().flatten(); + //~^ flat_map_identity +} diff --git a/src/tools/clippy/tests/ui/flat_map_identity.rs b/src/tools/clippy/tests/ui/flat_map_identity.rs index c59e749474e..1cab7d559d8 100644 --- a/src/tools/clippy/tests/ui/flat_map_identity.rs +++ b/src/tools/clippy/tests/ui/flat_map_identity.rs @@ -16,3 +16,16 @@ fn main() { let _ = iterator.flat_map(|x| return x); //~^ flat_map_identity } + +fn issue15198() { + let x = [[1, 2], [3, 4]]; + // don't lint: this is an `Iterator<Item = &[i32, i32]>` + // match ergonomics makes the binding patterns into references + // so that its type changes to `Iterator<Item = [&i32, &i32]>` + let _ = x.iter().flat_map(|[x, y]| [x, y]); + let _ = x.iter().flat_map(|x| [x[0]]); + + // no match ergonomics for `[i32, i32]` + let _ = x.iter().copied().flat_map(|[x, y]| [x, y]); + //~^ flat_map_identity +} diff --git a/src/tools/clippy/tests/ui/flat_map_identity.stderr b/src/tools/clippy/tests/ui/flat_map_identity.stderr index 75137f5d9e5..18c863bf96d 100644 --- a/src/tools/clippy/tests/ui/flat_map_identity.stderr +++ b/src/tools/clippy/tests/ui/flat_map_identity.stderr @@ -19,5 +19,11 @@ error: use of `flat_map` with an identity function LL | let _ = iterator.flat_map(|x| return x); | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `flatten()` -error: aborting due to 3 previous errors +error: use of `flat_map` with an identity function + --> tests/ui/flat_map_identity.rs:29:31 + | +LL | let _ = x.iter().copied().flat_map(|[x, y]| [x, y]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `flatten()` + +error: aborting due to 4 previous errors diff --git a/src/tools/clippy/tests/ui/if_then_some_else_none.fixed b/src/tools/clippy/tests/ui/if_then_some_else_none.fixed index f774608712d..d14a805b666 100644 --- a/src/tools/clippy/tests/ui/if_then_some_else_none.fixed +++ b/src/tools/clippy/tests/ui/if_then_some_else_none.fixed @@ -122,3 +122,46 @@ const fn issue12103(x: u32) -> Option<u32> { // Should not issue an error in `const` context if x > 42 { Some(150) } else { None } } + +mod issue15257 { + struct Range { + start: u8, + end: u8, + } + + fn can_be_safely_rewrite(rs: &[&Range]) -> Option<Vec<u8>> { + (rs.len() == 1 && rs[0].start == rs[0].end).then(|| vec![rs[0].start]) + } + + fn reborrow_as_ptr(i: *mut i32) -> Option<*const i32> { + let modulo = unsafe { *i % 2 }; + (modulo == 0).then_some(i) + } + + fn reborrow_as_fn_ptr(i: i32) { + fn do_something(fn_: Option<fn(i32)>) { + todo!() + } + + fn item_fn(i: i32) { + todo!() + } + + do_something((i % 2 == 0).then_some(item_fn)); + } + + fn reborrow_as_fn_unsafe(i: i32) { + fn do_something(fn_: Option<unsafe fn(i32)>) { + todo!() + } + + fn item_fn(i: i32) { + todo!() + } + + do_something((i % 2 == 0).then_some(item_fn)); + + let closure_fn = |i: i32| {}; + do_something((i % 2 == 0).then_some(closure_fn)); + } +} diff --git a/src/tools/clippy/tests/ui/if_then_some_else_none.rs b/src/tools/clippy/tests/ui/if_then_some_else_none.rs index 8b8ff0a6ea0..bb0072f3157 100644 --- a/src/tools/clippy/tests/ui/if_then_some_else_none.rs +++ b/src/tools/clippy/tests/ui/if_then_some_else_none.rs @@ -143,3 +143,71 @@ const fn issue12103(x: u32) -> Option<u32> { // Should not issue an error in `const` context if x > 42 { Some(150) } else { None } } + +mod issue15257 { + struct Range { + start: u8, + end: u8, + } + + fn can_be_safely_rewrite(rs: &[&Range]) -> Option<Vec<u8>> { + if rs.len() == 1 && rs[0].start == rs[0].end { + //~^ if_then_some_else_none + Some(vec![rs[0].start]) + } else { + None + } + } + + fn reborrow_as_ptr(i: *mut i32) -> Option<*const i32> { + let modulo = unsafe { *i % 2 }; + if modulo == 0 { + //~^ if_then_some_else_none + Some(i) + } else { + None + } + } + + fn reborrow_as_fn_ptr(i: i32) { + fn do_something(fn_: Option<fn(i32)>) { + todo!() + } + + fn item_fn(i: i32) { + todo!() + } + + do_something(if i % 2 == 0 { + //~^ if_then_some_else_none + Some(item_fn) + } else { + None + }); + } + + fn reborrow_as_fn_unsafe(i: i32) { + fn do_something(fn_: Option<unsafe fn(i32)>) { + todo!() + } + + fn item_fn(i: i32) { + todo!() + } + + do_something(if i % 2 == 0 { + //~^ if_then_some_else_none + Some(item_fn) + } else { + None + }); + + let closure_fn = |i: i32| {}; + do_something(if i % 2 == 0 { + //~^ if_then_some_else_none + Some(closure_fn) + } else { + None + }); + } +} diff --git a/src/tools/clippy/tests/ui/if_then_some_else_none.stderr b/src/tools/clippy/tests/ui/if_then_some_else_none.stderr index 71285574ef2..c2e624a0a73 100644 --- a/src/tools/clippy/tests/ui/if_then_some_else_none.stderr +++ b/src/tools/clippy/tests/ui/if_then_some_else_none.stderr @@ -58,5 +58,63 @@ error: this could be simplified with `bool::then` LL | if s == "1" { Some(true) } else { None } | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `(s == "1").then(|| true)` -error: aborting due to 6 previous errors +error: this could be simplified with `bool::then` + --> tests/ui/if_then_some_else_none.rs:154:9 + | +LL | / if rs.len() == 1 && rs[0].start == rs[0].end { +LL | | +LL | | Some(vec![rs[0].start]) +LL | | } else { +LL | | None +LL | | } + | |_________^ help: try: `(rs.len() == 1 && rs[0].start == rs[0].end).then(|| vec![rs[0].start])` + +error: this could be simplified with `bool::then_some` + --> tests/ui/if_then_some_else_none.rs:164:9 + | +LL | / if modulo == 0 { +LL | | +LL | | Some(i) +LL | | } else { +LL | | None +LL | | } + | |_________^ help: try: `(modulo == 0).then_some(i)` + +error: this could be simplified with `bool::then_some` + --> tests/ui/if_then_some_else_none.rs:181:22 + | +LL | do_something(if i % 2 == 0 { + | ______________________^ +LL | | +LL | | Some(item_fn) +LL | | } else { +LL | | None +LL | | }); + | |_________^ help: try: `(i % 2 == 0).then_some(item_fn)` + +error: this could be simplified with `bool::then_some` + --> tests/ui/if_then_some_else_none.rs:198:22 + | +LL | do_something(if i % 2 == 0 { + | ______________________^ +LL | | +LL | | Some(item_fn) +LL | | } else { +LL | | None +LL | | }); + | |_________^ help: try: `(i % 2 == 0).then_some(item_fn)` + +error: this could be simplified with `bool::then_some` + --> tests/ui/if_then_some_else_none.rs:206:22 + | +LL | do_something(if i % 2 == 0 { + | ______________________^ +LL | | +LL | | Some(closure_fn) +LL | | } else { +LL | | None +LL | | }); + | |_________^ help: try: `(i % 2 == 0).then_some(closure_fn)` + +error: aborting due to 11 previous errors diff --git a/src/tools/clippy/tests/ui/if_then_some_else_none_unfixable.rs b/src/tools/clippy/tests/ui/if_then_some_else_none_unfixable.rs new file mode 100644 index 00000000000..be04299a6ab --- /dev/null +++ b/src/tools/clippy/tests/ui/if_then_some_else_none_unfixable.rs @@ -0,0 +1,35 @@ +#![warn(clippy::if_then_some_else_none)] +#![allow(clippy::manual_is_multiple_of)] + +mod issue15257 { + use std::pin::Pin; + + #[derive(Default)] + pub struct Foo {} + pub trait Bar {} + impl Bar for Foo {} + + fn pointer_unsized_coercion(i: u32) -> Option<Box<dyn Bar>> { + if i % 2 == 0 { + //~^ if_then_some_else_none + Some(Box::new(Foo::default())) + } else { + None + } + } + + fn reborrow_as_pin(i: Pin<&mut i32>) { + use std::ops::Rem; + + fn do_something(i: Option<&i32>) { + todo!() + } + + do_something(if i.rem(2) == 0 { + //~^ if_then_some_else_none + Some(&i) + } else { + None + }); + } +} diff --git a/src/tools/clippy/tests/ui/if_then_some_else_none_unfixable.stderr b/src/tools/clippy/tests/ui/if_then_some_else_none_unfixable.stderr new file mode 100644 index 00000000000..f77ce7910e7 --- /dev/null +++ b/src/tools/clippy/tests/ui/if_then_some_else_none_unfixable.stderr @@ -0,0 +1,28 @@ +error: this could be simplified with `bool::then` + --> tests/ui/if_then_some_else_none_unfixable.rs:13:9 + | +LL | / if i % 2 == 0 { +LL | | +LL | | Some(Box::new(Foo::default())) +LL | | } else { +LL | | None +LL | | } + | |_________^ + | + = note: `-D clippy::if-then-some-else-none` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::if_then_some_else_none)]` + +error: this could be simplified with `bool::then` + --> tests/ui/if_then_some_else_none_unfixable.rs:28:22 + | +LL | do_something(if i.rem(2) == 0 { + | ______________________^ +LL | | +LL | | Some(&i) +LL | | } else { +LL | | None +LL | | }); + | |_________^ + +error: aborting due to 2 previous errors + diff --git a/src/tools/clippy/tests/ui/incompatible_msrv.rs b/src/tools/clippy/tests/ui/incompatible_msrv.rs index 99101b2bb8f..f7f21e1850d 100644 --- a/src/tools/clippy/tests/ui/incompatible_msrv.rs +++ b/src/tools/clippy/tests/ui/incompatible_msrv.rs @@ -1,8 +1,10 @@ #![warn(clippy::incompatible_msrv)] #![feature(custom_inner_attributes)] -#![feature(panic_internals)] +#![allow(stable_features)] +#![feature(strict_provenance)] // For use in test #![clippy::msrv = "1.3.0"] +use std::cell::Cell; use std::collections::HashMap; use std::collections::hash_map::Entry; use std::future::Future; @@ -13,6 +15,8 @@ fn foo() { let mut map: HashMap<&str, u32> = HashMap::new(); assert_eq!(map.entry("poneyland").key(), &"poneyland"); //~^ incompatible_msrv + //~| NOTE: `-D clippy::incompatible-msrv` implied by `-D warnings` + //~| HELP: to override `-D warnings` add `#[allow(clippy::incompatible_msrv)]` if let Entry::Vacant(v) = map.entry("poneyland") { v.into_key(); @@ -23,6 +27,18 @@ fn foo() { //~^ incompatible_msrv } +#[clippy::msrv = "1.2.0"] +static NO_BODY_BAD_MSRV: Option<Duration> = None; +//~^ incompatible_msrv + +static NO_BODY_GOOD_MSRV: Option<Duration> = None; + +#[clippy::msrv = "1.2.0"] +fn bad_type_msrv() { + let _: Option<Duration> = None; + //~^ incompatible_msrv +} + #[test] fn test() { sleep(Duration::new(1, 0)); @@ -43,21 +59,22 @@ fn core_special_treatment(p: bool) { // But still lint code calling `core` functions directly if p { - core::panicking::panic("foo"); - //~^ ERROR: is `1.3.0` but this item is stable since `1.6.0` + let _ = core::iter::once_with(|| 0); + //~^ incompatible_msrv } // Lint code calling `core` from non-`core` macros macro_rules! my_panic { ($msg:expr) => { - core::panicking::panic($msg) - }; //~^ ERROR: is `1.3.0` but this item is stable since `1.6.0` + let _ = core::iter::once_with(|| $msg); + //~^ incompatible_msrv + }; } my_panic!("foo"); // Lint even when the macro comes from `core` and calls `core` functions - assert!(core::panicking::panic("out of luck")); - //~^ ERROR: is `1.3.0` but this item is stable since `1.6.0` + assert!(core::iter::once_with(|| 0).next().is_some()); + //~^ incompatible_msrv } #[clippy::msrv = "1.26.0"] @@ -70,7 +87,85 @@ fn lang_items() { #[clippy::msrv = "1.80.0"] fn issue14212() { let _ = std::iter::repeat_n((), 5); - //~^ ERROR: is `1.80.0` but this item is stable since `1.82.0` + //~^ incompatible_msrv +} + +#[clippy::msrv = "1.0.0"] +fn cstr_and_cstring_ok() { + let _: Option<&'static std::ffi::CStr> = None; + let _: Option<std::ffi::CString> = None; +} + +fn local_msrv_change_suggestion() { + let _ = std::iter::repeat_n((), 5); + //~^ incompatible_msrv + + #[cfg(any(test, not(test)))] + { + let _ = std::iter::repeat_n((), 5); + //~^ incompatible_msrv + //~| NOTE: you may want to conditionally increase the MSRV + + // Emit the additional note only once + let _ = std::iter::repeat_n((), 5); + //~^ incompatible_msrv + } +} + +#[clippy::msrv = "1.78.0"] +fn feature_enable_14425(ptr: *const u8) -> usize { + // Do not warn, because it is enabled through a feature even though + // it is stabilized only since Rust 1.84.0. + let r = ptr.addr(); + + // Warn about this which has been introduced in the same Rust version + // but is not allowed through a feature. + r.isqrt() + //~^ incompatible_msrv +} + +fn non_fn_items() { + let _ = std::io::ErrorKind::CrossesDevices; + //~^ incompatible_msrv +} + +#[clippy::msrv = "1.87.0"] +fn msrv_non_ok_in_const() { + { + let c = Cell::new(42); + _ = c.get(); + } + const { + let c = Cell::new(42); + _ = c.get(); + //~^ incompatible_msrv + } +} + +#[clippy::msrv = "1.88.0"] +fn msrv_ok_in_const() { + { + let c = Cell::new(42); + _ = c.get(); + } + const { + let c = Cell::new(42); + _ = c.get(); + } +} + +#[clippy::msrv = "1.86.0"] +fn enum_variant_not_ok() { + let _ = std::io::ErrorKind::InvalidFilename; + //~^ incompatible_msrv + let _ = const { std::io::ErrorKind::InvalidFilename }; + //~^ incompatible_msrv +} + +#[clippy::msrv = "1.87.0"] +fn enum_variant_ok() { + let _ = std::io::ErrorKind::InvalidFilename; + let _ = const { std::io::ErrorKind::InvalidFilename }; } fn main() {} diff --git a/src/tools/clippy/tests/ui/incompatible_msrv.stderr b/src/tools/clippy/tests/ui/incompatible_msrv.stderr index 5ea2bb9cc58..e42360d296f 100644 --- a/src/tools/clippy/tests/ui/incompatible_msrv.stderr +++ b/src/tools/clippy/tests/ui/incompatible_msrv.stderr @@ -1,5 +1,5 @@ error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is stable since `1.10.0` - --> tests/ui/incompatible_msrv.rs:14:39 + --> tests/ui/incompatible_msrv.rs:16:39 | LL | assert_eq!(map.entry("poneyland").key(), &"poneyland"); | ^^^^^ @@ -8,45 +8,107 @@ LL | assert_eq!(map.entry("poneyland").key(), &"poneyland"); = help: to override `-D warnings` add `#[allow(clippy::incompatible_msrv)]` error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is stable since `1.12.0` - --> tests/ui/incompatible_msrv.rs:18:11 + --> tests/ui/incompatible_msrv.rs:22:11 | LL | v.into_key(); | ^^^^^^^^^^ error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is stable since `1.4.0` - --> tests/ui/incompatible_msrv.rs:22:5 + --> tests/ui/incompatible_msrv.rs:26:5 | LL | sleep(Duration::new(1, 0)); | ^^^^^ -error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is stable since `1.6.0` - --> tests/ui/incompatible_msrv.rs:46:9 +error: current MSRV (Minimum Supported Rust Version) is `1.2.0` but this item is stable since `1.3.0` + --> tests/ui/incompatible_msrv.rs:31:33 | -LL | core::panicking::panic("foo"); - | ^^^^^^^^^^^^^^^^^^^^^^ +LL | static NO_BODY_BAD_MSRV: Option<Duration> = None; + | ^^^^^^^^ -error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is stable since `1.6.0` - --> tests/ui/incompatible_msrv.rs:53:13 +error: current MSRV (Minimum Supported Rust Version) is `1.2.0` but this item is stable since `1.3.0` + --> tests/ui/incompatible_msrv.rs:38:19 | -LL | core::panicking::panic($msg) - | ^^^^^^^^^^^^^^^^^^^^^^ +LL | let _: Option<Duration> = None; + | ^^^^^^^^ + +error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is stable since `1.43.0` + --> tests/ui/incompatible_msrv.rs:62:17 + | +LL | let _ = core::iter::once_with(|| 0); + | ^^^^^^^^^^^^^^^^^^^^^ + +error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is stable since `1.43.0` + --> tests/ui/incompatible_msrv.rs:69:21 + | +LL | let _ = core::iter::once_with(|| $msg); + | ^^^^^^^^^^^^^^^^^^^^^ ... LL | my_panic!("foo"); | ---------------- in this macro invocation | = note: this error originates in the macro `my_panic` (in Nightly builds, run with -Z macro-backtrace for more info) -error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is stable since `1.6.0` - --> tests/ui/incompatible_msrv.rs:59:13 +error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is stable since `1.43.0` + --> tests/ui/incompatible_msrv.rs:76:13 | -LL | assert!(core::panicking::panic("out of luck")); - | ^^^^^^^^^^^^^^^^^^^^^^ +LL | assert!(core::iter::once_with(|| 0).next().is_some()); + | ^^^^^^^^^^^^^^^^^^^^^ error: current MSRV (Minimum Supported Rust Version) is `1.80.0` but this item is stable since `1.82.0` - --> tests/ui/incompatible_msrv.rs:72:13 + --> tests/ui/incompatible_msrv.rs:89:13 + | +LL | let _ = std::iter::repeat_n((), 5); + | ^^^^^^^^^^^^^^^^^^^ + +error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is stable since `1.82.0` + --> tests/ui/incompatible_msrv.rs:100:13 | LL | let _ = std::iter::repeat_n((), 5); | ^^^^^^^^^^^^^^^^^^^ -error: aborting due to 7 previous errors +error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is stable since `1.82.0` + --> tests/ui/incompatible_msrv.rs:105:17 + | +LL | let _ = std::iter::repeat_n((), 5); + | ^^^^^^^^^^^^^^^^^^^ + | + = note: you may want to conditionally increase the MSRV considered by Clippy using the `clippy::msrv` attribute + +error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is stable since `1.82.0` + --> tests/ui/incompatible_msrv.rs:110:17 + | +LL | let _ = std::iter::repeat_n((), 5); + | ^^^^^^^^^^^^^^^^^^^ + +error: current MSRV (Minimum Supported Rust Version) is `1.78.0` but this item is stable since `1.84.0` + --> tests/ui/incompatible_msrv.rs:123:7 + | +LL | r.isqrt() + | ^^^^^^^ + +error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is stable since `1.85.0` + --> tests/ui/incompatible_msrv.rs:128:13 + | +LL | let _ = std::io::ErrorKind::CrossesDevices; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error: current MSRV (Minimum Supported Rust Version) is `1.87.0` but this item is stable in a `const` context since `1.88.0` + --> tests/ui/incompatible_msrv.rs:140:15 + | +LL | _ = c.get(); + | ^^^^^ + +error: current MSRV (Minimum Supported Rust Version) is `1.86.0` but this item is stable since `1.87.0` + --> tests/ui/incompatible_msrv.rs:159:13 + | +LL | let _ = std::io::ErrorKind::InvalidFilename; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error: current MSRV (Minimum Supported Rust Version) is `1.86.0` but this item is stable since `1.87.0` + --> tests/ui/incompatible_msrv.rs:161:21 + | +LL | let _ = const { std::io::ErrorKind::InvalidFilename }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error: aborting due to 17 previous errors diff --git a/src/tools/clippy/tests/ui/iter_over_hash_type.rs b/src/tools/clippy/tests/ui/iter_over_hash_type.rs index 914cc9df0de..9a3e7033cd8 100644 --- a/src/tools/clippy/tests/ui/iter_over_hash_type.rs +++ b/src/tools/clippy/tests/ui/iter_over_hash_type.rs @@ -3,15 +3,18 @@ #![warn(clippy::iter_over_hash_type)] use std::collections::{HashMap, HashSet}; -extern crate rustc_data_structures; - extern crate proc_macros; +// Ensure it also works via type aliases (this isn't really the Fx hasher but that does not matter). +type FxBuildHasher = std::collections::hash_map::RandomState; +type FxHashMap<K, V> = HashMap<K, V, FxBuildHasher>; +type FxHashSet<K> = HashSet<K, FxBuildHasher>; + fn main() { let mut hash_set = HashSet::<i32>::new(); let mut hash_map = HashMap::<i32, i32>::new(); - let mut fx_hash_map = rustc_data_structures::fx::FxHashMap::<i32, i32>::default(); - let mut fx_hash_set = rustc_data_structures::fx::FxHashMap::<i32, i32>::default(); + let mut fx_hash_map = FxHashMap::<i32, i32>::default(); + let mut fx_hash_set = FxHashSet::<i32>::default(); let vec = Vec::<i32>::new(); // test hashset diff --git a/src/tools/clippy/tests/ui/iter_over_hash_type.stderr b/src/tools/clippy/tests/ui/iter_over_hash_type.stderr index 1bc6f4588d4..3356186547d 100644 --- a/src/tools/clippy/tests/ui/iter_over_hash_type.stderr +++ b/src/tools/clippy/tests/ui/iter_over_hash_type.stderr @@ -1,5 +1,5 @@ error: iteration over unordered hash-based type - --> tests/ui/iter_over_hash_type.rs:18:5 + --> tests/ui/iter_over_hash_type.rs:21:5 | LL | / for x in &hash_set { LL | | @@ -11,7 +11,7 @@ LL | | } = help: to override `-D warnings` add `#[allow(clippy::iter_over_hash_type)]` error: iteration over unordered hash-based type - --> tests/ui/iter_over_hash_type.rs:22:5 + --> tests/ui/iter_over_hash_type.rs:25:5 | LL | / for x in hash_set.iter() { LL | | @@ -20,7 +20,7 @@ LL | | } | |_____^ error: iteration over unordered hash-based type - --> tests/ui/iter_over_hash_type.rs:26:5 + --> tests/ui/iter_over_hash_type.rs:29:5 | LL | / for x in hash_set.clone() { LL | | @@ -29,7 +29,7 @@ LL | | } | |_____^ error: iteration over unordered hash-based type - --> tests/ui/iter_over_hash_type.rs:30:5 + --> tests/ui/iter_over_hash_type.rs:33:5 | LL | / for x in hash_set.drain() { LL | | @@ -38,7 +38,7 @@ LL | | } | |_____^ error: iteration over unordered hash-based type - --> tests/ui/iter_over_hash_type.rs:36:5 + --> tests/ui/iter_over_hash_type.rs:39:5 | LL | / for (x, y) in &hash_map { LL | | @@ -47,7 +47,7 @@ LL | | } | |_____^ error: iteration over unordered hash-based type - --> tests/ui/iter_over_hash_type.rs:40:5 + --> tests/ui/iter_over_hash_type.rs:43:5 | LL | / for x in hash_map.keys() { LL | | @@ -56,7 +56,7 @@ LL | | } | |_____^ error: iteration over unordered hash-based type - --> tests/ui/iter_over_hash_type.rs:44:5 + --> tests/ui/iter_over_hash_type.rs:47:5 | LL | / for x in hash_map.values() { LL | | @@ -65,7 +65,7 @@ LL | | } | |_____^ error: iteration over unordered hash-based type - --> tests/ui/iter_over_hash_type.rs:48:5 + --> tests/ui/iter_over_hash_type.rs:51:5 | LL | / for x in hash_map.values_mut() { LL | | @@ -74,7 +74,7 @@ LL | | } | |_____^ error: iteration over unordered hash-based type - --> tests/ui/iter_over_hash_type.rs:52:5 + --> tests/ui/iter_over_hash_type.rs:55:5 | LL | / for x in hash_map.iter() { LL | | @@ -83,7 +83,7 @@ LL | | } | |_____^ error: iteration over unordered hash-based type - --> tests/ui/iter_over_hash_type.rs:56:5 + --> tests/ui/iter_over_hash_type.rs:59:5 | LL | / for x in hash_map.clone() { LL | | @@ -92,7 +92,7 @@ LL | | } | |_____^ error: iteration over unordered hash-based type - --> tests/ui/iter_over_hash_type.rs:60:5 + --> tests/ui/iter_over_hash_type.rs:63:5 | LL | / for x in hash_map.drain() { LL | | @@ -101,7 +101,7 @@ LL | | } | |_____^ error: iteration over unordered hash-based type - --> tests/ui/iter_over_hash_type.rs:66:5 + --> tests/ui/iter_over_hash_type.rs:69:5 | LL | / for x in fx_hash_set { LL | | @@ -110,7 +110,7 @@ LL | | } | |_____^ error: iteration over unordered hash-based type - --> tests/ui/iter_over_hash_type.rs:70:5 + --> tests/ui/iter_over_hash_type.rs:73:5 | LL | / for x in fx_hash_map { LL | | diff --git a/src/tools/clippy/tests/ui/large_enum_variant.32bit.stderr b/src/tools/clippy/tests/ui/large_enum_variant.32bit.stderr index 80ca5daa1d5..ac1ed27a6b3 100644 --- a/src/tools/clippy/tests/ui/large_enum_variant.32bit.stderr +++ b/src/tools/clippy/tests/ui/large_enum_variant.32bit.stderr @@ -12,7 +12,7 @@ LL | | } | = note: `-D clippy::large-enum-variant` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::large_enum_variant)]` -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - B([i32; 8000]), LL + B(Box<[i32; 8000]>), @@ -30,7 +30,7 @@ LL | | ContainingLargeEnum(LargeEnum), LL | | } | |_^ the entire enum is at least 32004 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - ContainingLargeEnum(LargeEnum), LL + ContainingLargeEnum(Box<LargeEnum>), @@ -49,7 +49,7 @@ LL | | StructLikeLittle { x: i32, y: i32 }, LL | | } | |_^ the entire enum is at least 70008 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - ContainingMoreThanOneField(i32, [i32; 8000], [i32; 9500]), LL + ContainingMoreThanOneField(i32, Box<[i32; 8000]>, Box<[i32; 9500]>), @@ -67,7 +67,7 @@ LL | | StructLikeLarge { x: [i32; 8000], y: i32 }, LL | | } | |_^ the entire enum is at least 32008 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - StructLikeLarge { x: [i32; 8000], y: i32 }, LL + StructLikeLarge { x: Box<[i32; 8000]>, y: i32 }, @@ -85,7 +85,7 @@ LL | | StructLikeLarge2 { x: [i32; 8000] }, LL | | } | |_^ the entire enum is at least 32004 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - StructLikeLarge2 { x: [i32; 8000] }, LL + StructLikeLarge2 { x: Box<[i32; 8000]> }, @@ -104,7 +104,7 @@ LL | | C([u8; 200]), LL | | } | |_^ the entire enum is at least 1256 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - B([u8; 1255]), LL + B(Box<[u8; 1255]>), @@ -122,7 +122,7 @@ LL | | ContainingMoreThanOneField([i32; 8000], [i32; 2], [i32; 9500], [i32; LL | | } | |_^ the entire enum is at least 70132 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - ContainingMoreThanOneField([i32; 8000], [i32; 2], [i32; 9500], [i32; 30]), LL + ContainingMoreThanOneField(Box<[i32; 8000]>, [i32; 2], Box<[i32; 9500]>, [i32; 30]), @@ -140,7 +140,7 @@ LL | | B(Struct2), LL | | } | |_^ the entire enum is at least 32004 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - B(Struct2), LL + B(Box<Struct2>), @@ -158,7 +158,7 @@ LL | | B(Struct2), LL | | } | |_^ the entire enum is at least 32000 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - B(Struct2), LL + B(Box<Struct2>), @@ -176,7 +176,7 @@ LL | | B(Struct2), LL | | } | |_^ the entire enum is at least 32000 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - B(Struct2), LL + B(Box<Struct2>), @@ -199,7 +199,7 @@ note: boxing a variant would require the type no longer be `Copy` | LL | enum CopyableLargeEnum { | ^^^^^^^^^^^^^^^^^ -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum --> tests/ui/large_enum_variant.rs:118:5 | LL | B([u64; 8000]), @@ -222,7 +222,7 @@ note: boxing a variant would require the type no longer be `Copy` | LL | enum ManuallyCopyLargeEnum { | ^^^^^^^^^^^^^^^^^^^^^ -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum --> tests/ui/large_enum_variant.rs:124:5 | LL | B([u64; 8000]), @@ -245,7 +245,7 @@ note: boxing a variant would require the type no longer be `Copy` | LL | enum SomeGenericPossiblyCopyEnum<T> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum --> tests/ui/large_enum_variant.rs:138:5 | LL | B([u64; 4000]), @@ -263,7 +263,7 @@ LL | | Large((T, [u8; 512])), LL | | } | |_^ the entire enum is at least 512 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - Large((T, [u8; 512])), LL + Large(Box<(T, [u8; 512])>), @@ -281,7 +281,7 @@ LL | | Small(u8), LL | | } | |_^ the entire enum is at least 516 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - Large([Foo<u64>; 64]), LL + Large(Box<[Foo<u64>; 64]>), @@ -299,7 +299,7 @@ LL | | Error(PossiblyLargeEnumWithConst<256>), LL | | } | |_^ the entire enum is at least 514 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - Error(PossiblyLargeEnumWithConst<256>), LL + Error(Box<PossiblyLargeEnumWithConst<256>>), @@ -317,7 +317,7 @@ LL | | Recursive(Box<WithRecursion>), LL | | } | |_^ the entire enum is at least 516 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - Large([u64; 64]), LL + Large(Box<[u64; 64]>), @@ -335,7 +335,7 @@ LL | | Error(WithRecursionAndGenerics<u64>), LL | | } | |_^ the entire enum is at least 516 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - Error(WithRecursionAndGenerics<u64>), LL + Error(Box<WithRecursionAndGenerics<u64>>), diff --git a/src/tools/clippy/tests/ui/large_enum_variant.64bit.stderr b/src/tools/clippy/tests/ui/large_enum_variant.64bit.stderr index 559bdf2a2f5..d8199f9090f 100644 --- a/src/tools/clippy/tests/ui/large_enum_variant.64bit.stderr +++ b/src/tools/clippy/tests/ui/large_enum_variant.64bit.stderr @@ -12,7 +12,7 @@ LL | | } | = note: `-D clippy::large-enum-variant` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::large_enum_variant)]` -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - B([i32; 8000]), LL + B(Box<[i32; 8000]>), @@ -30,7 +30,7 @@ LL | | ContainingLargeEnum(LargeEnum), LL | | } | |_^ the entire enum is at least 32004 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - ContainingLargeEnum(LargeEnum), LL + ContainingLargeEnum(Box<LargeEnum>), @@ -49,7 +49,7 @@ LL | | StructLikeLittle { x: i32, y: i32 }, LL | | } | |_^ the entire enum is at least 70008 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - ContainingMoreThanOneField(i32, [i32; 8000], [i32; 9500]), LL + ContainingMoreThanOneField(i32, Box<[i32; 8000]>, Box<[i32; 9500]>), @@ -67,7 +67,7 @@ LL | | StructLikeLarge { x: [i32; 8000], y: i32 }, LL | | } | |_^ the entire enum is at least 32008 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - StructLikeLarge { x: [i32; 8000], y: i32 }, LL + StructLikeLarge { x: Box<[i32; 8000]>, y: i32 }, @@ -85,7 +85,7 @@ LL | | StructLikeLarge2 { x: [i32; 8000] }, LL | | } | |_^ the entire enum is at least 32004 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - StructLikeLarge2 { x: [i32; 8000] }, LL + StructLikeLarge2 { x: Box<[i32; 8000]> }, @@ -104,7 +104,7 @@ LL | | C([u8; 200]), LL | | } | |_^ the entire enum is at least 1256 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - B([u8; 1255]), LL + B(Box<[u8; 1255]>), @@ -122,7 +122,7 @@ LL | | ContainingMoreThanOneField([i32; 8000], [i32; 2], [i32; 9500], [i32; LL | | } | |_^ the entire enum is at least 70132 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - ContainingMoreThanOneField([i32; 8000], [i32; 2], [i32; 9500], [i32; 30]), LL + ContainingMoreThanOneField(Box<[i32; 8000]>, [i32; 2], Box<[i32; 9500]>, [i32; 30]), @@ -140,7 +140,7 @@ LL | | B(Struct2), LL | | } | |_^ the entire enum is at least 32004 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - B(Struct2), LL + B(Box<Struct2>), @@ -158,7 +158,7 @@ LL | | B(Struct2), LL | | } | |_^ the entire enum is at least 32000 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - B(Struct2), LL + B(Box<Struct2>), @@ -176,7 +176,7 @@ LL | | B(Struct2), LL | | } | |_^ the entire enum is at least 32000 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - B(Struct2), LL + B(Box<Struct2>), @@ -199,7 +199,7 @@ note: boxing a variant would require the type no longer be `Copy` | LL | enum CopyableLargeEnum { | ^^^^^^^^^^^^^^^^^ -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum --> tests/ui/large_enum_variant.rs:118:5 | LL | B([u64; 8000]), @@ -222,7 +222,7 @@ note: boxing a variant would require the type no longer be `Copy` | LL | enum ManuallyCopyLargeEnum { | ^^^^^^^^^^^^^^^^^^^^^ -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum --> tests/ui/large_enum_variant.rs:124:5 | LL | B([u64; 8000]), @@ -245,7 +245,7 @@ note: boxing a variant would require the type no longer be `Copy` | LL | enum SomeGenericPossiblyCopyEnum<T> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum --> tests/ui/large_enum_variant.rs:138:5 | LL | B([u64; 4000]), @@ -263,7 +263,7 @@ LL | | Large((T, [u8; 512])), LL | | } | |_^ the entire enum is at least 512 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - Large((T, [u8; 512])), LL + Large(Box<(T, [u8; 512])>), @@ -281,7 +281,7 @@ LL | | Small(u8), LL | | } | |_^ the entire enum is at least 520 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - Large([Foo<u64>; 64]), LL + Large(Box<[Foo<u64>; 64]>), @@ -299,7 +299,7 @@ LL | | Error(PossiblyLargeEnumWithConst<256>), LL | | } | |_^ the entire enum is at least 514 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - Error(PossiblyLargeEnumWithConst<256>), LL + Error(Box<PossiblyLargeEnumWithConst<256>>), @@ -317,7 +317,7 @@ LL | | Recursive(Box<WithRecursion>), LL | | } | |_^ the entire enum is at least 520 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - Large([u64; 64]), LL + Large(Box<[u64; 64]>), @@ -335,7 +335,7 @@ LL | | Error(WithRecursionAndGenerics<u64>), LL | | } | |_^ the entire enum is at least 520 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - Error(WithRecursionAndGenerics<u64>), LL + Error(Box<WithRecursionAndGenerics<u64>>), @@ -353,7 +353,7 @@ LL | | _SmallBoi(u8), LL | | } | |_____^ the entire enum is at least 296 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - BigBoi(PublishWithBytes), LL + BigBoi(Box<PublishWithBytes>), @@ -371,7 +371,7 @@ LL | | _SmallBoi(u8), LL | | } | |_____^ the entire enum is at least 224 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - BigBoi(PublishWithVec), LL + BigBoi(Box<PublishWithVec>), diff --git a/src/tools/clippy/tests/ui/large_enum_variant_no_std.rs b/src/tools/clippy/tests/ui/large_enum_variant_no_std.rs new file mode 100644 index 00000000000..ff0213155b6 --- /dev/null +++ b/src/tools/clippy/tests/ui/large_enum_variant_no_std.rs @@ -0,0 +1,8 @@ +#![no_std] +#![warn(clippy::large_enum_variant)] + +enum Myenum { + //~^ ERROR: large size difference between variants + Small(u8), + Large([u8; 1024]), +} diff --git a/src/tools/clippy/tests/ui/large_enum_variant_no_std.stderr b/src/tools/clippy/tests/ui/large_enum_variant_no_std.stderr new file mode 100644 index 00000000000..4f32e3e4835 --- /dev/null +++ b/src/tools/clippy/tests/ui/large_enum_variant_no_std.stderr @@ -0,0 +1,22 @@ +error: large size difference between variants + --> tests/ui/large_enum_variant_no_std.rs:4:1 + | +LL | / enum Myenum { +LL | | +LL | | Small(u8), + | | --------- the second-largest variant contains at least 1 bytes +LL | | Large([u8; 1024]), + | | ----------------- the largest variant contains at least 1024 bytes +LL | | } + | |_^ the entire enum is at least 1025 bytes + | +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum + --> tests/ui/large_enum_variant_no_std.rs:7:5 + | +LL | Large([u8; 1024]), + | ^^^^^^^^^^^^^^^^^ + = note: `-D clippy::large-enum-variant` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::large_enum_variant)]` + +error: aborting due to 1 previous error + diff --git a/src/tools/clippy/tests/ui/legacy_numeric_constants.fixed b/src/tools/clippy/tests/ui/legacy_numeric_constants.fixed index 30bb549a9d6..d90e7bec027 100644 --- a/src/tools/clippy/tests/ui/legacy_numeric_constants.fixed +++ b/src/tools/clippy/tests/ui/legacy_numeric_constants.fixed @@ -79,9 +79,31 @@ fn main() { f64::consts::E; b!(); + std::primitive::i32::MAX; + //~^ ERROR: usage of a legacy numeric method + //~| HELP: use the associated constant instead [(0, "", i128::MAX)]; //~^ ERROR: usage of a legacy numeric constant //~| HELP: use the associated constant instead + i32::MAX; + //~^ ERROR: usage of a legacy numeric method + //~| HELP: use the associated constant instead + assert_eq!(0, -i32::MAX); + //~^ ERROR: usage of a legacy numeric method + //~| HELP: use the associated constant instead + i128::MAX; + //~^ ERROR: usage of a legacy numeric constant + //~| HELP: use the associated constant instead + u32::MAX; + //~^ ERROR: usage of a legacy numeric method + //~| HELP: use the associated constant instead + i32::MAX; + //~^ ERROR: usage of a legacy numeric method + //~| HELP: use the associated constant instead + type Ω = i32; + Ω::MAX; + //~^ ERROR: usage of a legacy numeric method + //~| HELP: use the associated constant instead } #[warn(clippy::legacy_numeric_constants)] diff --git a/src/tools/clippy/tests/ui/legacy_numeric_constants.rs b/src/tools/clippy/tests/ui/legacy_numeric_constants.rs index d3878199055..4a2ef3f70c2 100644 --- a/src/tools/clippy/tests/ui/legacy_numeric_constants.rs +++ b/src/tools/clippy/tests/ui/legacy_numeric_constants.rs @@ -79,9 +79,31 @@ fn main() { f64::consts::E; b!(); + <std::primitive::i32>::max_value(); + //~^ ERROR: usage of a legacy numeric method + //~| HELP: use the associated constant instead [(0, "", std::i128::MAX)]; //~^ ERROR: usage of a legacy numeric constant //~| HELP: use the associated constant instead + (i32::max_value()); + //~^ ERROR: usage of a legacy numeric method + //~| HELP: use the associated constant instead + assert_eq!(0, -(i32::max_value())); + //~^ ERROR: usage of a legacy numeric method + //~| HELP: use the associated constant instead + (std::i128::MAX); + //~^ ERROR: usage of a legacy numeric constant + //~| HELP: use the associated constant instead + (<u32>::max_value()); + //~^ ERROR: usage of a legacy numeric method + //~| HELP: use the associated constant instead + ((i32::max_value)()); + //~^ ERROR: usage of a legacy numeric method + //~| HELP: use the associated constant instead + type Ω = i32; + Ω::max_value(); + //~^ ERROR: usage of a legacy numeric method + //~| HELP: use the associated constant instead } #[warn(clippy::legacy_numeric_constants)] diff --git a/src/tools/clippy/tests/ui/legacy_numeric_constants.stderr b/src/tools/clippy/tests/ui/legacy_numeric_constants.stderr index 4d69b8165a3..0b4f32e0abc 100644 --- a/src/tools/clippy/tests/ui/legacy_numeric_constants.stderr +++ b/src/tools/clippy/tests/ui/legacy_numeric_constants.stderr @@ -72,10 +72,10 @@ LL | u32::MAX; | +++++ error: usage of a legacy numeric method - --> tests/ui/legacy_numeric_constants.rs:50:10 + --> tests/ui/legacy_numeric_constants.rs:50:5 | LL | i32::max_value(); - | ^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^^ | help: use the associated constant instead | @@ -84,10 +84,10 @@ LL + i32::MAX; | error: usage of a legacy numeric method - --> tests/ui/legacy_numeric_constants.rs:53:9 + --> tests/ui/legacy_numeric_constants.rs:53:5 | LL | u8::max_value(); - | ^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^ | help: use the associated constant instead | @@ -96,10 +96,10 @@ LL + u8::MAX; | error: usage of a legacy numeric method - --> tests/ui/legacy_numeric_constants.rs:56:9 + --> tests/ui/legacy_numeric_constants.rs:56:5 | LL | u8::min_value(); - | ^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^ | help: use the associated constant instead | @@ -120,10 +120,10 @@ LL + u8::MIN; | error: usage of a legacy numeric method - --> tests/ui/legacy_numeric_constants.rs:62:27 + --> tests/ui/legacy_numeric_constants.rs:62:5 | LL | ::std::primitive::u8::min_value(); - | ^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | help: use the associated constant instead | @@ -132,10 +132,10 @@ LL + ::std::primitive::u8::MIN; | error: usage of a legacy numeric method - --> tests/ui/legacy_numeric_constants.rs:65:26 + --> tests/ui/legacy_numeric_constants.rs:65:5 | LL | std::primitive::i32::max_value(); - | ^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | help: use the associated constant instead | @@ -171,8 +171,20 @@ LL - let x = std::u64::MAX; LL + let x = u64::MAX; | +error: usage of a legacy numeric method + --> tests/ui/legacy_numeric_constants.rs:82:5 + | +LL | <std::primitive::i32>::max_value(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: use the associated constant instead + | +LL - <std::primitive::i32>::max_value(); +LL + std::primitive::i32::MAX; + | + error: usage of a legacy numeric constant - --> tests/ui/legacy_numeric_constants.rs:82:14 + --> tests/ui/legacy_numeric_constants.rs:85:14 | LL | [(0, "", std::i128::MAX)]; | ^^^^^^^^^^^^^^ @@ -183,8 +195,80 @@ LL - [(0, "", std::i128::MAX)]; LL + [(0, "", i128::MAX)]; | +error: usage of a legacy numeric method + --> tests/ui/legacy_numeric_constants.rs:88:5 + | +LL | (i32::max_value()); + | ^^^^^^^^^^^^^^^^^^ + | +help: use the associated constant instead + | +LL - (i32::max_value()); +LL + i32::MAX; + | + +error: usage of a legacy numeric method + --> tests/ui/legacy_numeric_constants.rs:91:20 + | +LL | assert_eq!(0, -(i32::max_value())); + | ^^^^^^^^^^^^^^^^^^ + | +help: use the associated constant instead + | +LL - assert_eq!(0, -(i32::max_value())); +LL + assert_eq!(0, -i32::MAX); + | + +error: usage of a legacy numeric constant + --> tests/ui/legacy_numeric_constants.rs:94:5 + | +LL | (std::i128::MAX); + | ^^^^^^^^^^^^^^^^ + | +help: use the associated constant instead + | +LL - (std::i128::MAX); +LL + i128::MAX; + | + +error: usage of a legacy numeric method + --> tests/ui/legacy_numeric_constants.rs:97:5 + | +LL | (<u32>::max_value()); + | ^^^^^^^^^^^^^^^^^^^^ + | +help: use the associated constant instead + | +LL - (<u32>::max_value()); +LL + u32::MAX; + | + +error: usage of a legacy numeric method + --> tests/ui/legacy_numeric_constants.rs:100:5 + | +LL | ((i32::max_value)()); + | ^^^^^^^^^^^^^^^^^^^^ + | +help: use the associated constant instead + | +LL - ((i32::max_value)()); +LL + i32::MAX; + | + +error: usage of a legacy numeric method + --> tests/ui/legacy_numeric_constants.rs:104:5 + | +LL | Ω::max_value(); + | ^^^^^^^^^^^^^^ + | +help: use the associated constant instead + | +LL - Ω::max_value(); +LL + Ω::MAX; + | + error: usage of a legacy numeric constant - --> tests/ui/legacy_numeric_constants.rs:116:5 + --> tests/ui/legacy_numeric_constants.rs:138:5 | LL | std::u32::MAX; | ^^^^^^^^^^^^^ @@ -195,5 +279,5 @@ LL - std::u32::MAX; LL + u32::MAX; | -error: aborting due to 16 previous errors +error: aborting due to 23 previous errors diff --git a/src/tools/clippy/tests/ui/manual_abs_diff.fixed b/src/tools/clippy/tests/ui/manual_abs_diff.fixed index f1b1278ea6d..2766942140c 100644 --- a/src/tools/clippy/tests/ui/manual_abs_diff.fixed +++ b/src/tools/clippy/tests/ui/manual_abs_diff.fixed @@ -104,3 +104,7 @@ fn non_primitive_ty() { let (a, b) = (S(10), S(20)); let _ = if a < b { b - a } else { a - b }; } + +fn issue15254(a: &usize, b: &usize) -> usize { + b.abs_diff(*a) +} diff --git a/src/tools/clippy/tests/ui/manual_abs_diff.rs b/src/tools/clippy/tests/ui/manual_abs_diff.rs index 60ef819c12d..2c408f2be37 100644 --- a/src/tools/clippy/tests/ui/manual_abs_diff.rs +++ b/src/tools/clippy/tests/ui/manual_abs_diff.rs @@ -114,3 +114,12 @@ fn non_primitive_ty() { let (a, b) = (S(10), S(20)); let _ = if a < b { b - a } else { a - b }; } + +fn issue15254(a: &usize, b: &usize) -> usize { + if a < b { + //~^ manual_abs_diff + b - a + } else { + a - b + } +} diff --git a/src/tools/clippy/tests/ui/manual_abs_diff.stderr b/src/tools/clippy/tests/ui/manual_abs_diff.stderr index c14c1dc830f..bb6d312b435 100644 --- a/src/tools/clippy/tests/ui/manual_abs_diff.stderr +++ b/src/tools/clippy/tests/ui/manual_abs_diff.stderr @@ -79,5 +79,16 @@ error: manual absolute difference pattern without using `abs_diff` LL | let _ = if a > b { (a - b) as u32 } else { (b - a) as u32 }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with `abs_diff`: `a.abs_diff(b)` -error: aborting due to 11 previous errors +error: manual absolute difference pattern without using `abs_diff` + --> tests/ui/manual_abs_diff.rs:119:5 + | +LL | / if a < b { +LL | | +LL | | b - a +LL | | } else { +LL | | a - b +LL | | } + | |_____^ help: replace with `abs_diff`: `b.abs_diff(*a)` + +error: aborting due to 12 previous errors diff --git a/src/tools/clippy/tests/ui/manual_assert.edition2018.stderr b/src/tools/clippy/tests/ui/manual_assert.edition2018.stderr index 8cedf2c6863..221cddf069d 100644 --- a/src/tools/clippy/tests/ui/manual_assert.edition2018.stderr +++ b/src/tools/clippy/tests/ui/manual_assert.edition2018.stderr @@ -189,5 +189,23 @@ LL - }; LL + const BAR: () = assert!(!(N == 0), ); | -error: aborting due to 10 previous errors +error: only a `panic!` in `if`-then statement + --> tests/ui/manual_assert.rs:116:5 + | +LL | / if !is_x86_feature_detected!("ssse3") { +LL | | +LL | | panic!("SSSE3 is not supported"); +LL | | } + | |_____^ + | +help: try instead + | +LL - if !is_x86_feature_detected!("ssse3") { +LL - +LL - panic!("SSSE3 is not supported"); +LL - } +LL + assert!(is_x86_feature_detected!("ssse3"), "SSSE3 is not supported"); + | + +error: aborting due to 11 previous errors diff --git a/src/tools/clippy/tests/ui/manual_assert.edition2021.stderr b/src/tools/clippy/tests/ui/manual_assert.edition2021.stderr index 8cedf2c6863..221cddf069d 100644 --- a/src/tools/clippy/tests/ui/manual_assert.edition2021.stderr +++ b/src/tools/clippy/tests/ui/manual_assert.edition2021.stderr @@ -189,5 +189,23 @@ LL - }; LL + const BAR: () = assert!(!(N == 0), ); | -error: aborting due to 10 previous errors +error: only a `panic!` in `if`-then statement + --> tests/ui/manual_assert.rs:116:5 + | +LL | / if !is_x86_feature_detected!("ssse3") { +LL | | +LL | | panic!("SSSE3 is not supported"); +LL | | } + | |_____^ + | +help: try instead + | +LL - if !is_x86_feature_detected!("ssse3") { +LL - +LL - panic!("SSSE3 is not supported"); +LL - } +LL + assert!(is_x86_feature_detected!("ssse3"), "SSSE3 is not supported"); + | + +error: aborting due to 11 previous errors diff --git a/src/tools/clippy/tests/ui/manual_assert.rs b/src/tools/clippy/tests/ui/manual_assert.rs index 46a42c3d00a..ab02bd5f5e5 100644 --- a/src/tools/clippy/tests/ui/manual_assert.rs +++ b/src/tools/clippy/tests/ui/manual_assert.rs @@ -105,3 +105,17 @@ fn issue12505() { }; } } + +fn issue15227(left: u64, right: u64) -> u64 { + macro_rules! is_x86_feature_detected { + ($feature:literal) => { + $feature.len() > 0 && $feature.starts_with("ss") + }; + } + + if !is_x86_feature_detected!("ssse3") { + //~^ manual_assert + panic!("SSSE3 is not supported"); + } + unsafe { todo!() } +} diff --git a/src/tools/clippy/tests/ui/manual_is_multiple_of.fixed b/src/tools/clippy/tests/ui/manual_is_multiple_of.fixed index 6735b99f298..03f75e725ed 100644 --- a/src/tools/clippy/tests/ui/manual_is_multiple_of.fixed +++ b/src/tools/clippy/tests/ui/manual_is_multiple_of.fixed @@ -23,3 +23,81 @@ fn f(a: u64, b: u64) { fn g(a: u64, b: u64) { let _ = a % b == 0; } + +fn needs_deref(a: &u64, b: &u64) { + let _ = a.is_multiple_of(*b); //~ manual_is_multiple_of +} + +fn closures(a: u64, b: u64) { + // Do not lint, types are ambiguous at this point + let cl = |a, b| a % b == 0; + let _ = cl(a, b); + + // Do not lint, types are ambiguous at this point + let cl = |a: _, b: _| a % b == 0; + let _ = cl(a, b); + + // Type of `a` is enough + let cl = |a: u64, b| a.is_multiple_of(b); //~ manual_is_multiple_of + let _ = cl(a, b); + + // Type of `a` is enough + let cl = |a: &u64, b| a.is_multiple_of(b); //~ manual_is_multiple_of + let _ = cl(&a, b); + + // Type of `b` is not enough + let cl = |a, b: u64| a % b == 0; + let _ = cl(&a, b); +} + +fn any_rem<T: std::ops::Rem<Output = u32>>(a: T, b: T) { + // An arbitrary `Rem` implementation should not lint + let _ = a % b == 0; +} + +mod issue15103 { + fn foo() -> Option<u64> { + let mut n: u64 = 150_000_000; + + (2..).find(|p| { + while n.is_multiple_of(*p) { + //~^ manual_is_multiple_of + n /= p; + } + n <= 1 + }) + } + + const fn generate_primes<const N: usize>() -> [u64; N] { + let mut result = [0; N]; + if N == 0 { + return result; + } + result[0] = 2; + if N == 1 { + return result; + } + let mut idx = 1; + let mut p = 3; + while idx < N { + let mut j = 0; + while j < idx && p % result[j] != 0 { + j += 1; + } + if j == idx { + result[idx] = p; + idx += 1; + } + p += 1; + } + result + } + + fn bar() -> u32 { + let d = |n: u32| -> u32 { (1..=n / 2).filter(|i| n.is_multiple_of(*i)).sum() }; + //~^ manual_is_multiple_of + + let d = |n| (1..=n / 2).filter(|i| n % i == 0).sum(); + (1..1_000).filter(|&i| i == d(d(i)) && i != d(i)).sum() + } +} diff --git a/src/tools/clippy/tests/ui/manual_is_multiple_of.rs b/src/tools/clippy/tests/ui/manual_is_multiple_of.rs index 00b638e4fd9..7b6fa64c843 100644 --- a/src/tools/clippy/tests/ui/manual_is_multiple_of.rs +++ b/src/tools/clippy/tests/ui/manual_is_multiple_of.rs @@ -23,3 +23,81 @@ fn f(a: u64, b: u64) { fn g(a: u64, b: u64) { let _ = a % b == 0; } + +fn needs_deref(a: &u64, b: &u64) { + let _ = a % b == 0; //~ manual_is_multiple_of +} + +fn closures(a: u64, b: u64) { + // Do not lint, types are ambiguous at this point + let cl = |a, b| a % b == 0; + let _ = cl(a, b); + + // Do not lint, types are ambiguous at this point + let cl = |a: _, b: _| a % b == 0; + let _ = cl(a, b); + + // Type of `a` is enough + let cl = |a: u64, b| a % b == 0; //~ manual_is_multiple_of + let _ = cl(a, b); + + // Type of `a` is enough + let cl = |a: &u64, b| a % b == 0; //~ manual_is_multiple_of + let _ = cl(&a, b); + + // Type of `b` is not enough + let cl = |a, b: u64| a % b == 0; + let _ = cl(&a, b); +} + +fn any_rem<T: std::ops::Rem<Output = u32>>(a: T, b: T) { + // An arbitrary `Rem` implementation should not lint + let _ = a % b == 0; +} + +mod issue15103 { + fn foo() -> Option<u64> { + let mut n: u64 = 150_000_000; + + (2..).find(|p| { + while n % p == 0 { + //~^ manual_is_multiple_of + n /= p; + } + n <= 1 + }) + } + + const fn generate_primes<const N: usize>() -> [u64; N] { + let mut result = [0; N]; + if N == 0 { + return result; + } + result[0] = 2; + if N == 1 { + return result; + } + let mut idx = 1; + let mut p = 3; + while idx < N { + let mut j = 0; + while j < idx && p % result[j] != 0 { + j += 1; + } + if j == idx { + result[idx] = p; + idx += 1; + } + p += 1; + } + result + } + + fn bar() -> u32 { + let d = |n: u32| -> u32 { (1..=n / 2).filter(|i| n % i == 0).sum() }; + //~^ manual_is_multiple_of + + let d = |n| (1..=n / 2).filter(|i| n % i == 0).sum(); + (1..1_000).filter(|&i| i == d(d(i)) && i != d(i)).sum() + } +} diff --git a/src/tools/clippy/tests/ui/manual_is_multiple_of.stderr b/src/tools/clippy/tests/ui/manual_is_multiple_of.stderr index 0b1ae70c2a7..8523599ec40 100644 --- a/src/tools/clippy/tests/ui/manual_is_multiple_of.stderr +++ b/src/tools/clippy/tests/ui/manual_is_multiple_of.stderr @@ -37,5 +37,35 @@ error: manual implementation of `.is_multiple_of()` LL | let _ = 0 < a % b; | ^^^^^^^^^ help: replace with: `!a.is_multiple_of(b)` -error: aborting due to 6 previous errors +error: manual implementation of `.is_multiple_of()` + --> tests/ui/manual_is_multiple_of.rs:28:13 + | +LL | let _ = a % b == 0; + | ^^^^^^^^^^ help: replace with: `a.is_multiple_of(*b)` + +error: manual implementation of `.is_multiple_of()` + --> tests/ui/manual_is_multiple_of.rs:41:26 + | +LL | let cl = |a: u64, b| a % b == 0; + | ^^^^^^^^^^ help: replace with: `a.is_multiple_of(b)` + +error: manual implementation of `.is_multiple_of()` + --> tests/ui/manual_is_multiple_of.rs:45:27 + | +LL | let cl = |a: &u64, b| a % b == 0; + | ^^^^^^^^^^ help: replace with: `a.is_multiple_of(b)` + +error: manual implementation of `.is_multiple_of()` + --> tests/ui/manual_is_multiple_of.rs:63:19 + | +LL | while n % p == 0 { + | ^^^^^^^^^^ help: replace with: `n.is_multiple_of(*p)` + +error: manual implementation of `.is_multiple_of()` + --> tests/ui/manual_is_multiple_of.rs:97:58 + | +LL | let d = |n: u32| -> u32 { (1..=n / 2).filter(|i| n % i == 0).sum() }; + | ^^^^^^^^^^ help: replace with: `n.is_multiple_of(*i)` + +error: aborting due to 11 previous errors diff --git a/src/tools/clippy/tests/ui/map_identity.fixed b/src/tools/clippy/tests/ui/map_identity.fixed index 83b2dac5fc5..b82d3e6d956 100644 --- a/src/tools/clippy/tests/ui/map_identity.fixed +++ b/src/tools/clippy/tests/ui/map_identity.fixed @@ -87,3 +87,15 @@ fn issue13904() { let _ = { it }.next(); //~^ map_identity } + +// same as `issue11764`, but for arrays +fn issue15198() { + let x = [[1, 2], [3, 4]]; + // don't lint: `&[i32; 2]` becomes `[&i32; 2]` + let _ = x.iter().map(|[x, y]| [x, y]); + let _ = x.iter().map(|x| [x[0]]).map(|[x]| x); + + // no match ergonomics for `[i32, i32]` + let _ = x.iter().copied(); + //~^ map_identity +} diff --git a/src/tools/clippy/tests/ui/map_identity.rs b/src/tools/clippy/tests/ui/map_identity.rs index e839c551364..c295bf87270 100644 --- a/src/tools/clippy/tests/ui/map_identity.rs +++ b/src/tools/clippy/tests/ui/map_identity.rs @@ -93,3 +93,15 @@ fn issue13904() { let _ = { it }.map(|x| x).next(); //~^ map_identity } + +// same as `issue11764`, but for arrays +fn issue15198() { + let x = [[1, 2], [3, 4]]; + // don't lint: `&[i32; 2]` becomes `[&i32; 2]` + let _ = x.iter().map(|[x, y]| [x, y]); + let _ = x.iter().map(|x| [x[0]]).map(|[x]| x); + + // no match ergonomics for `[i32, i32]` + let _ = x.iter().copied().map(|[x, y]| [x, y]); + //~^ map_identity +} diff --git a/src/tools/clippy/tests/ui/map_identity.stderr b/src/tools/clippy/tests/ui/map_identity.stderr index 9836f3b4cc5..9b624a0dc75 100644 --- a/src/tools/clippy/tests/ui/map_identity.stderr +++ b/src/tools/clippy/tests/ui/map_identity.stderr @@ -87,5 +87,11 @@ error: unnecessary map of the identity function LL | let _ = { it }.map(|x| x).next(); | ^^^^^^^^^^^ help: remove the call to `map` -error: aborting due to 13 previous errors +error: unnecessary map of the identity function + --> tests/ui/map_identity.rs:105:30 + | +LL | let _ = x.iter().copied().map(|[x, y]| [x, y]); + | ^^^^^^^^^^^^^^^^^^^^^ help: remove the call to `map` + +error: aborting due to 14 previous errors diff --git a/src/tools/clippy/tests/ui/match_str_case_mismatch.stderr b/src/tools/clippy/tests/ui/match_str_case_mismatch.stderr index 8068edfff94..c2b58b952aa 100644 --- a/src/tools/clippy/tests/ui/match_str_case_mismatch.stderr +++ b/src/tools/clippy/tests/ui/match_str_case_mismatch.stderr @@ -18,7 +18,7 @@ error: this `match` arm has a differing case than its expression LL | "~!@#$%^&*()-_=+Foo" => {}, | ^^^^^^^^^^^^^^^^^^^^ | -help: consider changing the case of this arm to respect `to_ascii_lowercase` (notice the capitalization difference) +help: consider changing the case of this arm to respect `to_ascii_lowercase` (notice the capitalization) | LL - "~!@#$%^&*()-_=+Foo" => {}, LL + "~!@#$%^&*()-_=+foo" => {}, diff --git a/src/tools/clippy/tests/ui/missing_inline.rs b/src/tools/clippy/tests/ui/missing_inline.rs index c1801005b77..223c7447975 100644 --- a/src/tools/clippy/tests/ui/missing_inline.rs +++ b/src/tools/clippy/tests/ui/missing_inline.rs @@ -80,3 +80,20 @@ impl PubFoo { // do not lint this since users cannot control the external code #[derive(Debug)] pub struct S; + +pub mod issue15301 { + #[unsafe(no_mangle)] + pub extern "C" fn call_from_c() { + println!("Just called a Rust function from C!"); + } + + #[unsafe(no_mangle)] + pub extern "Rust" fn call_from_rust() { + println!("Just called a Rust function from Rust!"); + } + + #[unsafe(no_mangle)] + pub fn call_from_rust_no_extern() { + println!("Just called a Rust function from Rust!"); + } +} diff --git a/src/tools/clippy/tests/ui/module_name_repetitions.rs b/src/tools/clippy/tests/ui/module_name_repetitions.rs index 2fde98d7927..5d16858bf85 100644 --- a/src/tools/clippy/tests/ui/module_name_repetitions.rs +++ b/src/tools/clippy/tests/ui/module_name_repetitions.rs @@ -55,3 +55,21 @@ pub mod foo { } fn main() {} + +pub mod issue14095 { + pub mod widget { + #[macro_export] + macro_rules! define_widget { + ($id:ident) => { + /* ... */ + }; + } + + #[macro_export] + macro_rules! widget_impl { + ($id:ident) => { + /* ... */ + }; + } + } +} diff --git a/src/tools/clippy/tests/ui/must_use_candidates.fixed b/src/tools/clippy/tests/ui/must_use_candidates.fixed index 4c1d6b1ccb5..1e8589cf39d 100644 --- a/src/tools/clippy/tests/ui/must_use_candidates.fixed +++ b/src/tools/clippy/tests/ui/must_use_candidates.fixed @@ -13,13 +13,15 @@ use std::sync::atomic::{AtomicBool, Ordering}; pub struct MyAtomic(AtomicBool); pub struct MyPure; -#[must_use] pub fn pure(i: u8) -> u8 { +#[must_use] +pub fn pure(i: u8) -> u8 { //~^ must_use_candidate i } impl MyPure { - #[must_use] pub fn inherent_pure(&self) -> u8 { + #[must_use] + pub fn inherent_pure(&self) -> u8 { //~^ must_use_candidate 0 } @@ -51,7 +53,8 @@ pub fn with_callback<F: Fn(u32) -> bool>(f: &F) -> bool { f(0) } -#[must_use] pub fn with_marker(_d: std::marker::PhantomData<&mut u32>) -> bool { +#[must_use] +pub fn with_marker(_d: std::marker::PhantomData<&mut u32>) -> bool { //~^ must_use_candidate true } @@ -64,7 +67,8 @@ pub fn atomics(b: &AtomicBool) -> bool { b.load(Ordering::SeqCst) } -#[must_use] pub fn rcd(_x: Rc<u32>) -> bool { +#[must_use] +pub fn rcd(_x: Rc<u32>) -> bool { //~^ must_use_candidate true } @@ -73,7 +77,8 @@ pub fn rcmut(_x: Rc<&mut u32>) -> bool { true } -#[must_use] pub fn arcd(_x: Arc<u32>) -> bool { +#[must_use] +pub fn arcd(_x: Arc<u32>) -> bool { //~^ must_use_candidate false } diff --git a/src/tools/clippy/tests/ui/must_use_candidates.stderr b/src/tools/clippy/tests/ui/must_use_candidates.stderr index 590253d95f9..5ddbd026062 100644 --- a/src/tools/clippy/tests/ui/must_use_candidates.stderr +++ b/src/tools/clippy/tests/ui/must_use_candidates.stderr @@ -1,35 +1,64 @@ error: this function could have a `#[must_use]` attribute - --> tests/ui/must_use_candidates.rs:16:1 + --> tests/ui/must_use_candidates.rs:16:8 | LL | pub fn pure(i: u8) -> u8 { - | ^^^^^^^^^^^^^^^^^^^^^^^^ help: add the attribute: `#[must_use] pub fn pure(i: u8) -> u8` + | ^^^^ | = note: `-D clippy::must-use-candidate` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::must_use_candidate)]` +help: add the attribute + | +LL + #[must_use] +LL | pub fn pure(i: u8) -> u8 { + | error: this method could have a `#[must_use]` attribute - --> tests/ui/must_use_candidates.rs:22:5 + --> tests/ui/must_use_candidates.rs:22:12 | LL | pub fn inherent_pure(&self) -> u8 { - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: add the attribute: `#[must_use] pub fn inherent_pure(&self) -> u8` + | ^^^^^^^^^^^^^ + | +help: add the attribute + | +LL ~ #[must_use] +LL ~ pub fn inherent_pure(&self) -> u8 { + | error: this function could have a `#[must_use]` attribute - --> tests/ui/must_use_candidates.rs:54:1 + --> tests/ui/must_use_candidates.rs:54:8 + | +LL | pub fn with_marker(_d: std::marker::PhantomData<&mut u32>) -> bool { + | ^^^^^^^^^^^ + | +help: add the attribute | +LL + #[must_use] LL | pub fn with_marker(_d: std::marker::PhantomData<&mut u32>) -> bool { - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: add the attribute: `#[must_use] pub fn with_marker(_d: std::marker::PhantomData<&mut u32>) -> bool` + | error: this function could have a `#[must_use]` attribute - --> tests/ui/must_use_candidates.rs:67:1 + --> tests/ui/must_use_candidates.rs:67:8 | LL | pub fn rcd(_x: Rc<u32>) -> bool { - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: add the attribute: `#[must_use] pub fn rcd(_x: Rc<u32>) -> bool` + | ^^^ + | +help: add the attribute + | +LL + #[must_use] +LL | pub fn rcd(_x: Rc<u32>) -> bool { + | error: this function could have a `#[must_use]` attribute - --> tests/ui/must_use_candidates.rs:76:1 + --> tests/ui/must_use_candidates.rs:76:8 | LL | pub fn arcd(_x: Arc<u32>) -> bool { - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: add the attribute: `#[must_use] pub fn arcd(_x: Arc<u32>) -> bool` + | ^^^^ + | +help: add the attribute + | +LL + #[must_use] +LL | pub fn arcd(_x: Arc<u32>) -> bool { + | error: aborting due to 5 previous errors diff --git a/src/tools/clippy/tests/ui/needless_for_each_fixable.fixed b/src/tools/clippy/tests/ui/needless_for_each_fixable.fixed index a73aff55639..a6d64d9afc1 100644 --- a/src/tools/clippy/tests/ui/needless_for_each_fixable.fixed +++ b/src/tools/clippy/tests/ui/needless_for_each_fixable.fixed @@ -143,3 +143,9 @@ mod issue14734 { //~^ needless_for_each } } + +fn issue15256() { + let vec: Vec<i32> = Vec::new(); + for v in vec.iter() { println!("{v}"); } + //~^ needless_for_each +} diff --git a/src/tools/clippy/tests/ui/needless_for_each_fixable.rs b/src/tools/clippy/tests/ui/needless_for_each_fixable.rs index d92f055d3f4..7e74d2b428f 100644 --- a/src/tools/clippy/tests/ui/needless_for_each_fixable.rs +++ b/src/tools/clippy/tests/ui/needless_for_each_fixable.rs @@ -143,3 +143,9 @@ mod issue14734 { //~^ needless_for_each } } + +fn issue15256() { + let vec: Vec<i32> = Vec::new(); + vec.iter().for_each(|v| println!("{v}")); + //~^ needless_for_each +} diff --git a/src/tools/clippy/tests/ui/needless_for_each_fixable.stderr b/src/tools/clippy/tests/ui/needless_for_each_fixable.stderr index f8014456097..204cfa36b02 100644 --- a/src/tools/clippy/tests/ui/needless_for_each_fixable.stderr +++ b/src/tools/clippy/tests/ui/needless_for_each_fixable.stderr @@ -148,5 +148,11 @@ error: needless use of `for_each` LL | rows.iter().for_each(|x| do_something(x, 1u8)); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for x in rows.iter() { do_something(x, 1u8); }` -error: aborting due to 10 previous errors +error: needless use of `for_each` + --> tests/ui/needless_for_each_fixable.rs:149:5 + | +LL | vec.iter().for_each(|v| println!("{v}")); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for v in vec.iter() { println!("{v}"); }` + +error: aborting due to 11 previous errors diff --git a/src/tools/clippy/tests/ui/needless_range_loop.rs b/src/tools/clippy/tests/ui/needless_range_loop.rs index 8a1c1be289c..70cf9fa7369 100644 --- a/src/tools/clippy/tests/ui/needless_range_loop.rs +++ b/src/tools/clippy/tests/ui/needless_range_loop.rs @@ -185,3 +185,28 @@ mod issue_2496 { unimplemented!() } } + +fn needless_loop() { + use std::hint::black_box; + let x = [0; 64]; + for i in 0..64 { + let y = [0; 64]; + + black_box(x[i]); + black_box(y[i]); + } + + for i in 0..64 { + black_box(x[i]); + black_box([0; 64][i]); + } + + for i in 0..64 { + black_box(x[i]); + black_box([1, 2, 3, 4, 5, 6, 7, 8][i]); + } + + for i in 0..64 { + black_box([1, 2, 3, 4, 5, 6, 7, 8][i]); + } +} diff --git a/src/tools/clippy/tests/ui/never_loop.rs b/src/tools/clippy/tests/ui/never_loop.rs index e0f54ef899b..48d4b8ad151 100644 --- a/src/tools/clippy/tests/ui/never_loop.rs +++ b/src/tools/clippy/tests/ui/never_loop.rs @@ -466,3 +466,35 @@ fn main() { test13(); test14(); } + +fn issue15059() { + 'a: for _ in 0..1 { + //~^ never_loop + break 'a; + } + + let mut b = 1; + 'a: for i in 0..1 { + //~^ never_loop + match i { + 0 => { + b *= 2; + break 'a; + }, + x => { + b += x; + break 'a; + }, + } + } + + #[allow(clippy::unused_unit)] + for v in 0..10 { + //~^ never_loop + break; + println!("{v}"); + // This is comment and should be kept + println!("This is a comment"); + () + } +} diff --git a/src/tools/clippy/tests/ui/never_loop.stderr b/src/tools/clippy/tests/ui/never_loop.stderr index bc9a7ec48b4..54b463266a3 100644 --- a/src/tools/clippy/tests/ui/never_loop.stderr +++ b/src/tools/clippy/tests/ui/never_loop.stderr @@ -176,8 +176,10 @@ LL | | } | help: if you need the first element of the iterator, try writing | -LL - for v in 0..10 { -LL + if let Some(v) = (0..10).next() { +LL ~ if let Some(v) = (0..10).next() { +LL | +LL ~ +LL ~ | error: this loop never actually loops @@ -232,5 +234,68 @@ LL | | break 'inner; LL | | } | |_________^ -error: aborting due to 21 previous errors +error: this loop never actually loops + --> tests/ui/never_loop.rs:471:5 + | +LL | / 'a: for _ in 0..1 { +LL | | +LL | | break 'a; +LL | | } + | |_____^ + | +help: if you need the first element of the iterator, try writing + | +LL ~ if let Some(_) = (0..1).next() { +LL | +LL ~ + | + +error: this loop never actually loops + --> tests/ui/never_loop.rs:477:5 + | +LL | / 'a: for i in 0..1 { +LL | | +LL | | match i { +LL | | 0 => { +... | +LL | | } + | |_____^ + | +help: if you need the first element of the iterator, try writing + | +LL ~ if let Some(i) = (0..1).next() { +LL | +... +LL | b *= 2; +LL ~ +LL | }, +LL | x => { +LL | b += x; +LL ~ + | + +error: this loop never actually loops + --> tests/ui/never_loop.rs:492:5 + | +LL | / for v in 0..10 { +LL | | +LL | | break; +LL | | println!("{v}"); +... | +LL | | () +LL | | } + | |_____^ + | +help: if you need the first element of the iterator, try writing + | +LL ~ if let Some(v) = (0..10).next() { +LL | +LL ~ +LL ~ +LL | // This is comment and should be kept +LL ~ +LL ~ + | + +error: aborting due to 24 previous errors diff --git a/src/tools/clippy/tests/ui/or_fun_call.fixed b/src/tools/clippy/tests/ui/or_fun_call.fixed index bcd2602edb6..0a8525a12f5 100644 --- a/src/tools/clippy/tests/ui/or_fun_call.fixed +++ b/src/tools/clippy/tests/ui/or_fun_call.fixed @@ -283,6 +283,8 @@ mod issue8993 { let _ = Some(4).map_or_else(g, f); //~^ or_fun_call let _ = Some(4).map_or(0, f); + let _ = Some(4).map_or_else(|| "asd".to_string().len() as i32, f); + //~^ or_fun_call } } @@ -426,6 +428,8 @@ mod result_map_or { let _ = x.map_or_else(|_| g(), f); //~^ or_fun_call let _ = x.map_or(0, f); + let _ = x.map_or_else(|_| "asd".to_string().len() as i32, f); + //~^ or_fun_call } } diff --git a/src/tools/clippy/tests/ui/or_fun_call.rs b/src/tools/clippy/tests/ui/or_fun_call.rs index 8d1202ebf91..b4f9b950a7f 100644 --- a/src/tools/clippy/tests/ui/or_fun_call.rs +++ b/src/tools/clippy/tests/ui/or_fun_call.rs @@ -283,6 +283,8 @@ mod issue8993 { let _ = Some(4).map_or(g(), f); //~^ or_fun_call let _ = Some(4).map_or(0, f); + let _ = Some(4).map_or("asd".to_string().len() as i32, f); + //~^ or_fun_call } } @@ -426,6 +428,8 @@ mod result_map_or { let _ = x.map_or(g(), f); //~^ or_fun_call let _ = x.map_or(0, f); + let _ = x.map_or("asd".to_string().len() as i32, f); + //~^ or_fun_call } } diff --git a/src/tools/clippy/tests/ui/or_fun_call.stderr b/src/tools/clippy/tests/ui/or_fun_call.stderr index 585ee2d0e19..3e4df772668 100644 --- a/src/tools/clippy/tests/ui/or_fun_call.stderr +++ b/src/tools/clippy/tests/ui/or_fun_call.stderr @@ -154,62 +154,68 @@ error: function call inside of `map_or` LL | let _ = Some(4).map_or(g(), f); | ^^^^^^^^^^^^^^ help: try: `map_or_else(g, f)` +error: function call inside of `map_or` + --> tests/ui/or_fun_call.rs:286:25 + | +LL | let _ = Some(4).map_or("asd".to_string().len() as i32, f); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `map_or_else(|| "asd".to_string().len() as i32, f)` + error: use of `unwrap_or_else` to construct default value - --> tests/ui/or_fun_call.rs:315:18 + --> tests/ui/or_fun_call.rs:317:18 | LL | with_new.unwrap_or_else(Vec::new); | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: use of `unwrap_or_else` to construct default value - --> tests/ui/or_fun_call.rs:319:28 + --> tests/ui/or_fun_call.rs:321:28 | LL | with_default_trait.unwrap_or_else(Default::default); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: use of `unwrap_or_else` to construct default value - --> tests/ui/or_fun_call.rs:323:27 + --> tests/ui/or_fun_call.rs:325:27 | LL | with_default_type.unwrap_or_else(u64::default); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: use of `unwrap_or_else` to construct default value - --> tests/ui/or_fun_call.rs:327:22 + --> tests/ui/or_fun_call.rs:329:22 | LL | real_default.unwrap_or_else(<FakeDefault as Default>::default); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: use of `or_insert_with` to construct default value - --> tests/ui/or_fun_call.rs:331:23 + --> tests/ui/or_fun_call.rs:333:23 | LL | map.entry(42).or_insert_with(String::new); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `or_default()` error: use of `or_insert_with` to construct default value - --> tests/ui/or_fun_call.rs:335:25 + --> tests/ui/or_fun_call.rs:337:25 | LL | btree.entry(42).or_insert_with(String::new); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `or_default()` error: use of `unwrap_or_else` to construct default value - --> tests/ui/or_fun_call.rs:339:25 + --> tests/ui/or_fun_call.rs:341:25 | LL | let _ = stringy.unwrap_or_else(String::new); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: function call inside of `unwrap_or` - --> tests/ui/or_fun_call.rs:381:17 + --> tests/ui/or_fun_call.rs:383:17 | LL | let _ = opt.unwrap_or({ f() }); // suggest `.unwrap_or_else(f)` | ^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(f)` error: function call inside of `unwrap_or` - --> tests/ui/or_fun_call.rs:386:17 + --> tests/ui/or_fun_call.rs:388:17 | LL | let _ = opt.unwrap_or(f() + 1); // suggest `.unwrap_or_else(|| f() + 1)` | ^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| f() + 1)` error: function call inside of `unwrap_or` - --> tests/ui/or_fun_call.rs:391:17 + --> tests/ui/or_fun_call.rs:393:17 | LL | let _ = opt.unwrap_or({ | _________________^ @@ -229,52 +235,58 @@ LL ~ }); | error: function call inside of `map_or` - --> tests/ui/or_fun_call.rs:397:17 + --> tests/ui/or_fun_call.rs:399:17 | LL | let _ = opt.map_or(f() + 1, |v| v); // suggest `.map_or_else(|| f() + 1, |v| v)` | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `map_or_else(|| f() + 1, |v| v)` error: use of `unwrap_or` to construct default value - --> tests/ui/or_fun_call.rs:402:17 + --> tests/ui/or_fun_call.rs:404:17 | LL | let _ = opt.unwrap_or({ i32::default() }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: function call inside of `unwrap_or` - --> tests/ui/or_fun_call.rs:409:21 + --> tests/ui/or_fun_call.rs:411:21 | LL | let _ = opt_foo.unwrap_or(Foo { val: String::default() }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| Foo { val: String::default() })` error: function call inside of `map_or` - --> tests/ui/or_fun_call.rs:424:19 + --> tests/ui/or_fun_call.rs:426:19 | LL | let _ = x.map_or(g(), |v| v); | ^^^^^^^^^^^^^^^^^^ help: try: `map_or_else(|_| g(), |v| v)` error: function call inside of `map_or` - --> tests/ui/or_fun_call.rs:426:19 + --> tests/ui/or_fun_call.rs:428:19 | LL | let _ = x.map_or(g(), f); | ^^^^^^^^^^^^^^ help: try: `map_or_else(|_| g(), f)` +error: function call inside of `map_or` + --> tests/ui/or_fun_call.rs:431:19 + | +LL | let _ = x.map_or("asd".to_string().len() as i32, f); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `map_or_else(|_| "asd".to_string().len() as i32, f)` + error: function call inside of `get_or_insert` - --> tests/ui/or_fun_call.rs:438:15 + --> tests/ui/or_fun_call.rs:442:15 | LL | let _ = x.get_or_insert(g()); | ^^^^^^^^^^^^^^^^^^ help: try: `get_or_insert_with(g)` error: function call inside of `and` - --> tests/ui/or_fun_call.rs:448:15 + --> tests/ui/or_fun_call.rs:452:15 | LL | let _ = x.and(g()); | ^^^^^^^^ help: try: `and_then(|_| g())` error: function call inside of `and` - --> tests/ui/or_fun_call.rs:458:15 + --> tests/ui/or_fun_call.rs:462:15 | LL | let _ = x.and(g()); | ^^^^^^^^ help: try: `and_then(|_| g())` -error: aborting due to 43 previous errors +error: aborting due to 45 previous errors diff --git a/src/tools/clippy/tests/ui/pattern_type_mismatch/auxiliary/external.rs b/src/tools/clippy/tests/ui/pattern_type_mismatch/auxiliary/external.rs new file mode 100644 index 00000000000..cd27c5c74aa --- /dev/null +++ b/src/tools/clippy/tests/ui/pattern_type_mismatch/auxiliary/external.rs @@ -0,0 +1,13 @@ +//! **FAKE** external macro crate. + +#[macro_export] +macro_rules! macro_with_match { + ( $p:pat ) => { + let something = (); + + match &something { + $p => true, + _ => false, + } + }; +} diff --git a/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.rs b/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.rs index 49ea1d3f7a6..aa988a577df 100644 --- a/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.rs +++ b/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.rs @@ -6,6 +6,9 @@ clippy::single_match )] +//@aux-build:external.rs +use external::macro_with_match; + fn main() {} fn syntax_match() { @@ -159,3 +162,9 @@ fn macro_expansion() { let value = &Some(23); matching_macro!(value); } + +fn external_macro_expansion() { + macro_with_match! { + () + }; +} diff --git a/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.stderr b/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.stderr index cd604d604c1..636841e0a21 100644 --- a/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.stderr +++ b/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.stderr @@ -1,5 +1,5 @@ error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/syntax.rs:16:9 + --> tests/ui/pattern_type_mismatch/syntax.rs:19:9 | LL | Some(_) => (), | ^^^^^^^ @@ -9,7 +9,7 @@ LL | Some(_) => (), = help: to override `-D warnings` add `#[allow(clippy::pattern_type_mismatch)]` error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/syntax.rs:36:12 + --> tests/ui/pattern_type_mismatch/syntax.rs:39:12 | LL | if let Some(_) = ref_value {} | ^^^^^^^ @@ -17,7 +17,7 @@ LL | if let Some(_) = ref_value {} = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/syntax.rs:48:15 + --> tests/ui/pattern_type_mismatch/syntax.rs:51:15 | LL | while let Some(_) = ref_value { | ^^^^^^^ @@ -25,7 +25,7 @@ LL | while let Some(_) = ref_value { = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/syntax.rs:68:9 + --> tests/ui/pattern_type_mismatch/syntax.rs:71:9 | LL | for (_a, _b) in slice.iter() {} | ^^^^^^^^ @@ -33,7 +33,7 @@ LL | for (_a, _b) in slice.iter() {} = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/syntax.rs:79:9 + --> tests/ui/pattern_type_mismatch/syntax.rs:82:9 | LL | let (_n, _m) = ref_value; | ^^^^^^^^ @@ -41,7 +41,7 @@ LL | let (_n, _m) = ref_value; = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/syntax.rs:89:12 + --> tests/ui/pattern_type_mismatch/syntax.rs:92:12 | LL | fn foo((_a, _b): &(i32, i32)) {} | ^^^^^^^^ @@ -49,7 +49,7 @@ LL | fn foo((_a, _b): &(i32, i32)) {} = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/syntax.rs:104:10 + --> tests/ui/pattern_type_mismatch/syntax.rs:107:10 | LL | foo(|(_a, _b)| ()); | ^^^^^^^^ @@ -57,7 +57,7 @@ LL | foo(|(_a, _b)| ()); = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/syntax.rs:121:9 + --> tests/ui/pattern_type_mismatch/syntax.rs:124:9 | LL | Some(_) => (), | ^^^^^^^ @@ -65,7 +65,7 @@ LL | Some(_) => (), = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/syntax.rs:142:17 + --> tests/ui/pattern_type_mismatch/syntax.rs:145:17 | LL | Some(_) => (), | ^^^^^^^ diff --git a/src/tools/clippy/tests/ui/ptr_arg.rs b/src/tools/clippy/tests/ui/ptr_arg.rs index 578641e910d..be14e0762ff 100644 --- a/src/tools/clippy/tests/ui/ptr_arg.rs +++ b/src/tools/clippy/tests/ui/ptr_arg.rs @@ -123,7 +123,7 @@ fn test_cow_with_ref(c: &Cow<[i32]>) {} //~^ ptr_arg fn test_cow(c: Cow<[i32]>) { - let _c = c; + let d = c; } trait Foo2 { @@ -141,36 +141,36 @@ mod issue_5644 { use std::path::PathBuf; fn allowed( - #[allow(clippy::ptr_arg)] _v: &Vec<u32>, - #[allow(clippy::ptr_arg)] _s: &String, - #[allow(clippy::ptr_arg)] _p: &PathBuf, - #[allow(clippy::ptr_arg)] _c: &Cow<[i32]>, - #[expect(clippy::ptr_arg)] _expect: &Cow<[i32]>, + #[allow(clippy::ptr_arg)] v: &Vec<u32>, + #[allow(clippy::ptr_arg)] s: &String, + #[allow(clippy::ptr_arg)] p: &PathBuf, + #[allow(clippy::ptr_arg)] c: &Cow<[i32]>, + #[expect(clippy::ptr_arg)] expect: &Cow<[i32]>, ) { } - fn some_allowed(#[allow(clippy::ptr_arg)] _v: &Vec<u32>, _s: &String) {} + fn some_allowed(#[allow(clippy::ptr_arg)] v: &Vec<u32>, s: &String) {} //~^ ptr_arg struct S; impl S { fn allowed( - #[allow(clippy::ptr_arg)] _v: &Vec<u32>, - #[allow(clippy::ptr_arg)] _s: &String, - #[allow(clippy::ptr_arg)] _p: &PathBuf, - #[allow(clippy::ptr_arg)] _c: &Cow<[i32]>, - #[expect(clippy::ptr_arg)] _expect: &Cow<[i32]>, + #[allow(clippy::ptr_arg)] v: &Vec<u32>, + #[allow(clippy::ptr_arg)] s: &String, + #[allow(clippy::ptr_arg)] p: &PathBuf, + #[allow(clippy::ptr_arg)] c: &Cow<[i32]>, + #[expect(clippy::ptr_arg)] expect: &Cow<[i32]>, ) { } } trait T { fn allowed( - #[allow(clippy::ptr_arg)] _v: &Vec<u32>, - #[allow(clippy::ptr_arg)] _s: &String, - #[allow(clippy::ptr_arg)] _p: &PathBuf, - #[allow(clippy::ptr_arg)] _c: &Cow<[i32]>, - #[expect(clippy::ptr_arg)] _expect: &Cow<[i32]>, + #[allow(clippy::ptr_arg)] v: &Vec<u32>, + #[allow(clippy::ptr_arg)] s: &String, + #[allow(clippy::ptr_arg)] p: &PathBuf, + #[allow(clippy::ptr_arg)] c: &Cow<[i32]>, + #[expect(clippy::ptr_arg)] expect: &Cow<[i32]>, ) { } } @@ -182,22 +182,22 @@ mod issue6509 { fn foo_vec(vec: &Vec<u8>) { //~^ ptr_arg - let _ = vec.clone().pop(); - let _ = vec.clone().clone(); + let a = vec.clone().pop(); + let b = vec.clone().clone(); } fn foo_path(path: &PathBuf) { //~^ ptr_arg - let _ = path.clone().pop(); - let _ = path.clone().clone(); + let c = path.clone().pop(); + let d = path.clone().clone(); } - fn foo_str(str: &PathBuf) { + fn foo_str(str: &String) { //~^ ptr_arg - let _ = str.clone().pop(); - let _ = str.clone().clone(); + let e = str.clone().pop(); + let f = str.clone().clone(); } } @@ -340,8 +340,8 @@ mod issue_13308 { ToOwned::clone_into(source, destination); } - fn h1(_: &<String as Deref>::Target) {} - fn h2<T: Deref>(_: T, _: &T::Target) {} + fn h1(x: &<String as Deref>::Target) {} + fn h2<T: Deref>(x: T, y: &T::Target) {} // Other cases that are still ok to lint and ideally shouldn't regress fn good(v1: &String, v2: &String) { @@ -352,3 +352,91 @@ mod issue_13308 { h2(String::new(), v2); } } + +mod issue_13489_and_13728 { + // This is a no-lint from now on. + fn foo(_x: &Vec<i32>) { + todo!(); + } + + // But this still gives us a lint. + fn foo_used(x: &Vec<i32>) { + //~^ ptr_arg + + todo!(); + } + + // This is also a no-lint from now on. + fn foo_local(x: &Vec<i32>) { + let _y = x; + + todo!(); + } + + // But this still gives us a lint. + fn foo_local_used(x: &Vec<i32>) { + //~^ ptr_arg + + let y = x; + + todo!(); + } + + // This only lints once from now on. + fn foofoo(_x: &Vec<i32>, y: &String) { + //~^ ptr_arg + + todo!(); + } + + // And this is also a no-lint from now on. + fn foofoo_local(_x: &Vec<i32>, y: &String) { + let _z = y; + + todo!(); + } +} + +mod issue_13489_and_13728_mut { + // This is a no-lint from now on. + fn bar(_x: &mut Vec<u32>) { + todo!() + } + + // But this still gives us a lint. + fn bar_used(x: &mut Vec<u32>) { + //~^ ptr_arg + + todo!() + } + + // This is also a no-lint from now on. + fn bar_local(x: &mut Vec<u32>) { + let _y = x; + + todo!() + } + + // But this still gives us a lint. + fn bar_local_used(x: &mut Vec<u32>) { + //~^ ptr_arg + + let y = x; + + todo!() + } + + // This only lints once from now on. + fn barbar(_x: &mut Vec<u32>, y: &mut String) { + //~^ ptr_arg + + todo!() + } + + // And this is also a no-lint from now on. + fn barbar_local(_x: &mut Vec<u32>, y: &mut String) { + let _z = y; + + todo!() + } +} diff --git a/src/tools/clippy/tests/ui/ptr_arg.stderr b/src/tools/clippy/tests/ui/ptr_arg.stderr index fd9ceddfe11..87235057349 100644 --- a/src/tools/clippy/tests/ui/ptr_arg.stderr +++ b/src/tools/clippy/tests/ui/ptr_arg.stderr @@ -127,10 +127,10 @@ LL | fn test_cow_with_ref(c: &Cow<[i32]>) {} | ^^^^^^^^^^^ help: change this to: `&[i32]` error: writing `&String` instead of `&str` involves a new object where a slice will do - --> tests/ui/ptr_arg.rs:152:66 + --> tests/ui/ptr_arg.rs:152:64 | -LL | fn some_allowed(#[allow(clippy::ptr_arg)] _v: &Vec<u32>, _s: &String) {} - | ^^^^^^^ help: change this to: `&str` +LL | fn some_allowed(#[allow(clippy::ptr_arg)] v: &Vec<u32>, s: &String) {} + | ^^^^^^^ help: change this to: `&str` error: writing `&Vec` instead of `&[_]` involves a new object where a slice will do --> tests/ui/ptr_arg.rs:182:21 @@ -143,8 +143,8 @@ help: change this to LL ~ fn foo_vec(vec: &[u8]) { LL | LL | -LL ~ let _ = vec.to_owned().pop(); -LL ~ let _ = vec.to_owned().clone(); +LL ~ let a = vec.to_owned().pop(); +LL ~ let b = vec.to_owned().clone(); | error: writing `&PathBuf` instead of `&Path` involves a new object where a slice will do @@ -158,23 +158,23 @@ help: change this to LL ~ fn foo_path(path: &Path) { LL | LL | -LL ~ let _ = path.to_path_buf().pop(); -LL ~ let _ = path.to_path_buf().clone(); +LL ~ let c = path.to_path_buf().pop(); +LL ~ let d = path.to_path_buf().clone(); | -error: writing `&PathBuf` instead of `&Path` involves a new object where a slice will do +error: writing `&String` instead of `&str` involves a new object where a slice will do --> tests/ui/ptr_arg.rs:196:21 | -LL | fn foo_str(str: &PathBuf) { - | ^^^^^^^^ +LL | fn foo_str(str: &String) { + | ^^^^^^^ | help: change this to | -LL ~ fn foo_str(str: &Path) { +LL ~ fn foo_str(str: &str) { LL | LL | -LL ~ let _ = str.to_path_buf().pop(); -LL ~ let _ = str.to_path_buf().clone(); +LL ~ let e = str.to_owned().pop(); +LL ~ let f = str.to_owned().clone(); | error: writing `&mut Vec` instead of `&mut [_]` involves a new object where a slice will do @@ -231,6 +231,42 @@ error: writing `&String` instead of `&str` involves a new object where a slice w LL | fn good(v1: &String, v2: &String) { | ^^^^^^^ help: change this to: `&str` +error: writing `&Vec` instead of `&[_]` involves a new object where a slice will do + --> tests/ui/ptr_arg.rs:363:20 + | +LL | fn foo_used(x: &Vec<i32>) { + | ^^^^^^^^^ help: change this to: `&[i32]` + +error: writing `&Vec` instead of `&[_]` involves a new object where a slice will do + --> tests/ui/ptr_arg.rs:377:26 + | +LL | fn foo_local_used(x: &Vec<i32>) { + | ^^^^^^^^^ help: change this to: `&[i32]` + +error: writing `&String` instead of `&str` involves a new object where a slice will do + --> tests/ui/ptr_arg.rs:386:33 + | +LL | fn foofoo(_x: &Vec<i32>, y: &String) { + | ^^^^^^^ help: change this to: `&str` + +error: writing `&mut Vec` instead of `&mut [_]` involves a new object where a slice will do + --> tests/ui/ptr_arg.rs:407:20 + | +LL | fn bar_used(x: &mut Vec<u32>) { + | ^^^^^^^^^^^^^ help: change this to: `&mut [u32]` + +error: writing `&mut Vec` instead of `&mut [_]` involves a new object where a slice will do + --> tests/ui/ptr_arg.rs:421:26 + | +LL | fn bar_local_used(x: &mut Vec<u32>) { + | ^^^^^^^^^^^^^ help: change this to: `&mut [u32]` + +error: writing `&mut String` instead of `&mut str` involves a new object where a slice will do + --> tests/ui/ptr_arg.rs:430:37 + | +LL | fn barbar(_x: &mut Vec<u32>, y: &mut String) { + | ^^^^^^^^^^^ help: change this to: `&mut str` + error: eliding a lifetime that's named elsewhere is confusing --> tests/ui/ptr_arg.rs:314:36 | @@ -248,5 +284,5 @@ help: consistently use `'a` LL | fn cow_good_ret_ty<'a>(input: &'a Cow<'a, str>) -> &'a str { | ++ -error: aborting due to 27 previous errors +error: aborting due to 33 previous errors diff --git a/src/tools/clippy/tests/ui/ptr_as_ptr.fixed b/src/tools/clippy/tests/ui/ptr_as_ptr.fixed index 2033f31c1ee..71fea6144e7 100644 --- a/src/tools/clippy/tests/ui/ptr_as_ptr.fixed +++ b/src/tools/clippy/tests/ui/ptr_as_ptr.fixed @@ -219,3 +219,11 @@ mod null_entire_infer { //~^ ptr_as_ptr } } + +#[allow(clippy::transmute_null_to_fn)] +fn issue15283() { + unsafe { + let _: fn() = std::mem::transmute(std::ptr::null::<u8>()); + //~^ ptr_as_ptr + } +} diff --git a/src/tools/clippy/tests/ui/ptr_as_ptr.rs b/src/tools/clippy/tests/ui/ptr_as_ptr.rs index 224d09b0eb6..4d507592a1e 100644 --- a/src/tools/clippy/tests/ui/ptr_as_ptr.rs +++ b/src/tools/clippy/tests/ui/ptr_as_ptr.rs @@ -219,3 +219,11 @@ mod null_entire_infer { //~^ ptr_as_ptr } } + +#[allow(clippy::transmute_null_to_fn)] +fn issue15283() { + unsafe { + let _: fn() = std::mem::transmute(std::ptr::null::<()>() as *const u8); + //~^ ptr_as_ptr + } +} diff --git a/src/tools/clippy/tests/ui/ptr_as_ptr.stderr b/src/tools/clippy/tests/ui/ptr_as_ptr.stderr index 66dae8e0135..adad159bb0f 100644 --- a/src/tools/clippy/tests/ui/ptr_as_ptr.stderr +++ b/src/tools/clippy/tests/ui/ptr_as_ptr.stderr @@ -201,5 +201,11 @@ error: `as` casting between raw pointers without changing their constness LL | core::ptr::null() as _ | ^^^^^^^^^^^^^^^^^^^^^^ help: try call directly: `core::ptr::null()` -error: aborting due to 33 previous errors +error: `as` casting between raw pointers without changing their constness + --> tests/ui/ptr_as_ptr.rs:226:43 + | +LL | let _: fn() = std::mem::transmute(std::ptr::null::<()>() as *const u8); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try call directly: `std::ptr::null::<u8>()` + +error: aborting due to 34 previous errors diff --git a/src/tools/clippy/tests/ui/range_plus_minus_one.fixed b/src/tools/clippy/tests/ui/range_plus_minus_one.fixed index ee716ef3a6a..5c6da6d5aed 100644 --- a/src/tools/clippy/tests/ui/range_plus_minus_one.fixed +++ b/src/tools/clippy/tests/ui/range_plus_minus_one.fixed @@ -1,5 +1,9 @@ +#![warn(clippy::range_minus_one, clippy::range_plus_one)] #![allow(unused_parens)] #![allow(clippy::iter_with_drain)] + +use std::ops::{Index, IndexMut, Range, RangeBounds, RangeInclusive}; + fn f() -> usize { 42 } @@ -20,8 +24,6 @@ macro_rules! macro_minus_one { }; } -#[warn(clippy::range_plus_one)] -#[warn(clippy::range_minus_one)] fn main() { for _ in 0..2 {} for _ in 0..=2 {} @@ -45,15 +47,13 @@ fn main() { //~^ range_plus_one for _ in 0..=(1 + f()) {} + // Those are not linted, as in the general case we cannot be sure that the exact type won't be + // important. let _ = ..11 - 1; - let _ = ..11; - //~^ range_minus_one - let _ = ..11; - //~^ range_minus_one - let _ = (1..=11); - //~^ range_plus_one - let _ = ((f() + 1)..=f()); - //~^ range_plus_one + let _ = ..=11 - 1; + let _ = ..=(11 - 1); + let _ = (1..11 + 1); + let _ = (f() + 1)..(f() + 1); const ONE: usize = 1; // integer consts are linted, too @@ -65,4 +65,118 @@ fn main() { macro_plus_one!(5); macro_minus_one!(5); + + // As an instance of `Iterator` + (1..=10).for_each(|_| {}); + //~^ range_plus_one + + // As an instance of `IntoIterator` + #[allow(clippy::useless_conversion)] + (1..=10).into_iter().for_each(|_| {}); + //~^ range_plus_one + + // As an instance of `RangeBounds` + { + let _ = (1..=10).start_bound(); + //~^ range_plus_one + } + + // As a `SliceIndex` + let a = [10, 20, 30]; + let _ = &a[1..=1]; + //~^ range_plus_one + + // As method call argument + vec.drain(2..=3); + //~^ range_plus_one + + // As function call argument + take_arg(10..=20); + //~^ range_plus_one + + // As function call argument inside a block + take_arg({ 10..=20 }); + //~^ range_plus_one + + // Do not lint in case types are unified + take_arg(if true { 10..20 } else { 10..20 + 1 }); + + // Do not lint, as the same type is used for both parameters + take_args(10..20 + 1, 10..21); + + // Do not lint, as the range type is also used indirectly in second parameter + take_arg_and_struct(10..20 + 1, S { t: 1..2 }); + + // As target of `IndexMut` + let mut a = [10, 20, 30]; + a[0..=2][0] = 1; + //~^ range_plus_one +} + +fn take_arg<T: Iterator<Item = u32>>(_: T) {} +fn take_args<T: Iterator<Item = u32>>(_: T, _: T) {} + +struct S<T> { + t: T, +} +fn take_arg_and_struct<T: Iterator<Item = u32>>(_: T, _: S<T>) {} + +fn no_index_by_range_inclusive(a: usize) { + struct S; + + impl Index<Range<usize>> for S { + type Output = [u32]; + fn index(&self, _: Range<usize>) -> &Self::Output { + &[] + } + } + + _ = &S[0..a + 1]; +} + +fn no_index_mut_with_switched_range(a: usize) { + struct S(u32); + + impl Index<Range<usize>> for S { + type Output = u32; + fn index(&self, _: Range<usize>) -> &Self::Output { + &self.0 + } + } + + impl IndexMut<Range<usize>> for S { + fn index_mut(&mut self, _: Range<usize>) -> &mut Self::Output { + &mut self.0 + } + } + + impl Index<RangeInclusive<usize>> for S { + type Output = u32; + fn index(&self, _: RangeInclusive<usize>) -> &Self::Output { + &self.0 + } + } + + S(2)[0..a + 1] = 3; +} + +fn issue9908() { + // Simplified test case + let _ = || 0..=1; + + // Original test case + let full_length = 1024; + let range = { + // do some stuff, omit here + None + }; + + let range = range.map(|(s, t)| s..=t).unwrap_or(0..=(full_length - 1)); + + assert_eq!(range, 0..=1023); +} + +fn issue9908_2(n: usize) -> usize { + (1..n).sum() + //~^ range_minus_one } diff --git a/src/tools/clippy/tests/ui/range_plus_minus_one.rs b/src/tools/clippy/tests/ui/range_plus_minus_one.rs index f2d5ae2c150..7172da6034b 100644 --- a/src/tools/clippy/tests/ui/range_plus_minus_one.rs +++ b/src/tools/clippy/tests/ui/range_plus_minus_one.rs @@ -1,5 +1,9 @@ +#![warn(clippy::range_minus_one, clippy::range_plus_one)] #![allow(unused_parens)] #![allow(clippy::iter_with_drain)] + +use std::ops::{Index, IndexMut, Range, RangeBounds, RangeInclusive}; + fn f() -> usize { 42 } @@ -20,8 +24,6 @@ macro_rules! macro_minus_one { }; } -#[warn(clippy::range_plus_one)] -#[warn(clippy::range_minus_one)] fn main() { for _ in 0..2 {} for _ in 0..=2 {} @@ -45,15 +47,13 @@ fn main() { //~^ range_plus_one for _ in 0..=(1 + f()) {} + // Those are not linted, as in the general case we cannot be sure that the exact type won't be + // important. let _ = ..11 - 1; let _ = ..=11 - 1; - //~^ range_minus_one let _ = ..=(11 - 1); - //~^ range_minus_one let _ = (1..11 + 1); - //~^ range_plus_one let _ = (f() + 1)..(f() + 1); - //~^ range_plus_one const ONE: usize = 1; // integer consts are linted, too @@ -65,4 +65,118 @@ fn main() { macro_plus_one!(5); macro_minus_one!(5); + + // As an instance of `Iterator` + (1..10 + 1).for_each(|_| {}); + //~^ range_plus_one + + // As an instance of `IntoIterator` + #[allow(clippy::useless_conversion)] + (1..10 + 1).into_iter().for_each(|_| {}); + //~^ range_plus_one + + // As an instance of `RangeBounds` + { + let _ = (1..10 + 1).start_bound(); + //~^ range_plus_one + } + + // As a `SliceIndex` + let a = [10, 20, 30]; + let _ = &a[1..1 + 1]; + //~^ range_plus_one + + // As method call argument + vec.drain(2..3 + 1); + //~^ range_plus_one + + // As function call argument + take_arg(10..20 + 1); + //~^ range_plus_one + + // As function call argument inside a block + take_arg({ 10..20 + 1 }); + //~^ range_plus_one + + // Do not lint in case types are unified + take_arg(if true { 10..20 } else { 10..20 + 1 }); + + // Do not lint, as the same type is used for both parameters + take_args(10..20 + 1, 10..21); + + // Do not lint, as the range type is also used indirectly in second parameter + take_arg_and_struct(10..20 + 1, S { t: 1..2 }); + + // As target of `IndexMut` + let mut a = [10, 20, 30]; + a[0..2 + 1][0] = 1; + //~^ range_plus_one +} + +fn take_arg<T: Iterator<Item = u32>>(_: T) {} +fn take_args<T: Iterator<Item = u32>>(_: T, _: T) {} + +struct S<T> { + t: T, +} +fn take_arg_and_struct<T: Iterator<Item = u32>>(_: T, _: S<T>) {} + +fn no_index_by_range_inclusive(a: usize) { + struct S; + + impl Index<Range<usize>> for S { + type Output = [u32]; + fn index(&self, _: Range<usize>) -> &Self::Output { + &[] + } + } + + _ = &S[0..a + 1]; +} + +fn no_index_mut_with_switched_range(a: usize) { + struct S(u32); + + impl Index<Range<usize>> for S { + type Output = u32; + fn index(&self, _: Range<usize>) -> &Self::Output { + &self.0 + } + } + + impl IndexMut<Range<usize>> for S { + fn index_mut(&mut self, _: Range<usize>) -> &mut Self::Output { + &mut self.0 + } + } + + impl Index<RangeInclusive<usize>> for S { + type Output = u32; + fn index(&self, _: RangeInclusive<usize>) -> &Self::Output { + &self.0 + } + } + + S(2)[0..a + 1] = 3; +} + +fn issue9908() { + // Simplified test case + let _ = || 0..=1; + + // Original test case + let full_length = 1024; + let range = { + // do some stuff, omit here + None + }; + + let range = range.map(|(s, t)| s..=t).unwrap_or(0..=(full_length - 1)); + + assert_eq!(range, 0..=1023); +} + +fn issue9908_2(n: usize) -> usize { + (1..=n - 1).sum() + //~^ range_minus_one } diff --git a/src/tools/clippy/tests/ui/range_plus_minus_one.stderr b/src/tools/clippy/tests/ui/range_plus_minus_one.stderr index 9b23a8b8c0b..a419d935bd6 100644 --- a/src/tools/clippy/tests/ui/range_plus_minus_one.stderr +++ b/src/tools/clippy/tests/ui/range_plus_minus_one.stderr @@ -1,5 +1,5 @@ error: an inclusive range would be more readable - --> tests/ui/range_plus_minus_one.rs:29:14 + --> tests/ui/range_plus_minus_one.rs:31:14 | LL | for _ in 0..3 + 1 {} | ^^^^^^^^ help: use: `0..=3` @@ -8,55 +8,85 @@ LL | for _ in 0..3 + 1 {} = help: to override `-D warnings` add `#[allow(clippy::range_plus_one)]` error: an inclusive range would be more readable - --> tests/ui/range_plus_minus_one.rs:33:14 + --> tests/ui/range_plus_minus_one.rs:35:14 | LL | for _ in 0..1 + 5 {} | ^^^^^^^^ help: use: `0..=5` error: an inclusive range would be more readable - --> tests/ui/range_plus_minus_one.rs:37:14 + --> tests/ui/range_plus_minus_one.rs:39:14 | LL | for _ in 1..1 + 1 {} | ^^^^^^^^ help: use: `1..=1` error: an inclusive range would be more readable - --> tests/ui/range_plus_minus_one.rs:44:14 + --> tests/ui/range_plus_minus_one.rs:46:14 | LL | for _ in 0..(1 + f()) {} | ^^^^^^^^^^^^ help: use: `0..=f()` -error: an exclusive range would be more readable - --> tests/ui/range_plus_minus_one.rs:49:13 +error: an inclusive range would be more readable + --> tests/ui/range_plus_minus_one.rs:60:14 | -LL | let _ = ..=11 - 1; - | ^^^^^^^^^ help: use: `..11` +LL | for _ in 1..ONE + ONE {} + | ^^^^^^^^^^^^ help: use: `1..=ONE` + +error: an inclusive range would be more readable + --> tests/ui/range_plus_minus_one.rs:70:5 | - = note: `-D clippy::range-minus-one` implied by `-D warnings` - = help: to override `-D warnings` add `#[allow(clippy::range_minus_one)]` +LL | (1..10 + 1).for_each(|_| {}); + | ^^^^^^^^^^^ help: use: `(1..=10)` -error: an exclusive range would be more readable - --> tests/ui/range_plus_minus_one.rs:51:13 +error: an inclusive range would be more readable + --> tests/ui/range_plus_minus_one.rs:75:5 | -LL | let _ = ..=(11 - 1); - | ^^^^^^^^^^^ help: use: `..11` +LL | (1..10 + 1).into_iter().for_each(|_| {}); + | ^^^^^^^^^^^ help: use: `(1..=10)` error: an inclusive range would be more readable - --> tests/ui/range_plus_minus_one.rs:53:13 + --> tests/ui/range_plus_minus_one.rs:80:17 | -LL | let _ = (1..11 + 1); - | ^^^^^^^^^^^ help: use: `(1..=11)` +LL | let _ = (1..10 + 1).start_bound(); + | ^^^^^^^^^^^ help: use: `(1..=10)` error: an inclusive range would be more readable - --> tests/ui/range_plus_minus_one.rs:55:13 + --> tests/ui/range_plus_minus_one.rs:86:16 | -LL | let _ = (f() + 1)..(f() + 1); - | ^^^^^^^^^^^^^^^^^^^^ help: use: `((f() + 1)..=f())` +LL | let _ = &a[1..1 + 1]; + | ^^^^^^^^ help: use: `1..=1` error: an inclusive range would be more readable - --> tests/ui/range_plus_minus_one.rs:60:14 + --> tests/ui/range_plus_minus_one.rs:90:15 | -LL | for _ in 1..ONE + ONE {} - | ^^^^^^^^^^^^ help: use: `1..=ONE` +LL | vec.drain(2..3 + 1); + | ^^^^^^^^ help: use: `2..=3` + +error: an inclusive range would be more readable + --> tests/ui/range_plus_minus_one.rs:94:14 + | +LL | take_arg(10..20 + 1); + | ^^^^^^^^^^ help: use: `10..=20` + +error: an inclusive range would be more readable + --> tests/ui/range_plus_minus_one.rs:98:16 + | +LL | take_arg({ 10..20 + 1 }); + | ^^^^^^^^^^ help: use: `10..=20` + +error: an inclusive range would be more readable + --> tests/ui/range_plus_minus_one.rs:112:7 + | +LL | a[0..2 + 1][0] = 1; + | ^^^^^^^^ help: use: `0..=2` + +error: an exclusive range would be more readable + --> tests/ui/range_plus_minus_one.rs:180:5 + | +LL | (1..=n - 1).sum() + | ^^^^^^^^^^^ help: use: `(1..n)` + | + = note: `-D clippy::range-minus-one` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::range_minus_one)]` -error: aborting due to 9 previous errors +error: aborting due to 14 previous errors diff --git a/src/tools/clippy/tests/ui/single_match_else_deref_patterns.fixed b/src/tools/clippy/tests/ui/single_match_else_deref_patterns.fixed new file mode 100644 index 00000000000..7a9f8063096 --- /dev/null +++ b/src/tools/clippy/tests/ui/single_match_else_deref_patterns.fixed @@ -0,0 +1,53 @@ +#![feature(deref_patterns)] +#![allow( + incomplete_features, + clippy::eq_op, + clippy::op_ref, + clippy::deref_addrof, + clippy::borrow_deref_ref, + clippy::needless_if +)] +#![deny(clippy::single_match_else)] + +fn string() { + if *"" == *"" {} + + if *&*&*&*"" == *"" {} + + if ***&&"" == *"" {} + + if *&*&*"" == *"" {} + + if **&&*"" == *"" {} +} + +fn int() { + if &&&1 == &&&2 { unreachable!() } else { + // ok + } + //~^^^^^^ single_match_else + if &&1 == &&2 { unreachable!() } else { + // ok + } + //~^^^^^^ single_match_else + if &&1 == &&2 { unreachable!() } else { + // ok + } + //~^^^^^^ single_match_else + if &1 == &2 { unreachable!() } else { + // ok + } + //~^^^^^^ single_match_else + if &1 == &2 { unreachable!() } else { + // ok + } + //~^^^^^^ single_match_else + if 1 == 2 { unreachable!() } else { + // ok + } + //~^^^^^^ single_match_else + if 1 == 2 { unreachable!() } else { + // ok + } + //~^^^^^^ single_match_else +} diff --git a/src/tools/clippy/tests/ui/single_match_else_deref_patterns.rs b/src/tools/clippy/tests/ui/single_match_else_deref_patterns.rs new file mode 100644 index 00000000000..ef19c7cbde2 --- /dev/null +++ b/src/tools/clippy/tests/ui/single_match_else_deref_patterns.rs @@ -0,0 +1,94 @@ +#![feature(deref_patterns)] +#![allow( + incomplete_features, + clippy::eq_op, + clippy::op_ref, + clippy::deref_addrof, + clippy::borrow_deref_ref, + clippy::needless_if +)] +#![deny(clippy::single_match_else)] + +fn string() { + match *"" { + //~^ single_match + "" => {}, + _ => {}, + } + + match *&*&*&*"" { + //~^ single_match + "" => {}, + _ => {}, + } + + match ***&&"" { + //~^ single_match + "" => {}, + _ => {}, + } + + match *&*&*"" { + //~^ single_match + "" => {}, + _ => {}, + } + + match **&&*"" { + //~^ single_match + "" => {}, + _ => {}, + } +} + +fn int() { + match &&&1 { + &&&2 => unreachable!(), + _ => { + // ok + }, + } + //~^^^^^^ single_match_else + match &&&1 { + &&2 => unreachable!(), + _ => { + // ok + }, + } + //~^^^^^^ single_match_else + match &&1 { + &&2 => unreachable!(), + _ => { + // ok + }, + } + //~^^^^^^ single_match_else + match &&&1 { + &2 => unreachable!(), + _ => { + // ok + }, + } + //~^^^^^^ single_match_else + match &&1 { + &2 => unreachable!(), + _ => { + // ok + }, + } + //~^^^^^^ single_match_else + match &&&1 { + 2 => unreachable!(), + _ => { + // ok + }, + } + //~^^^^^^ single_match_else + match &&1 { + 2 => unreachable!(), + _ => { + // ok + }, + } + //~^^^^^^ single_match_else +} diff --git a/src/tools/clippy/tests/ui/single_match_else_deref_patterns.stderr b/src/tools/clippy/tests/ui/single_match_else_deref_patterns.stderr new file mode 100644 index 00000000000..a47df55459b --- /dev/null +++ b/src/tools/clippy/tests/ui/single_match_else_deref_patterns.stderr @@ -0,0 +1,188 @@ +error: you seem to be trying to use `match` for an equality check. Consider using `if` + --> tests/ui/single_match_else_deref_patterns.rs:13:5 + | +LL | / match *"" { +LL | | +LL | | "" => {}, +LL | | _ => {}, +LL | | } + | |_____^ help: try: `if *"" == *"" {}` + | + = note: you might want to preserve the comments from inside the `match` + = note: `-D clippy::single-match` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::single_match)]` + +error: you seem to be trying to use `match` for an equality check. Consider using `if` + --> tests/ui/single_match_else_deref_patterns.rs:19:5 + | +LL | / match *&*&*&*"" { +LL | | +LL | | "" => {}, +LL | | _ => {}, +LL | | } + | |_____^ help: try: `if *&*&*&*"" == *"" {}` + | + = note: you might want to preserve the comments from inside the `match` + +error: you seem to be trying to use `match` for an equality check. Consider using `if` + --> tests/ui/single_match_else_deref_patterns.rs:25:5 + | +LL | / match ***&&"" { +LL | | +LL | | "" => {}, +LL | | _ => {}, +LL | | } + | |_____^ help: try: `if ***&&"" == *"" {}` + | + = note: you might want to preserve the comments from inside the `match` + +error: you seem to be trying to use `match` for an equality check. Consider using `if` + --> tests/ui/single_match_else_deref_patterns.rs:31:5 + | +LL | / match *&*&*"" { +LL | | +LL | | "" => {}, +LL | | _ => {}, +LL | | } + | |_____^ help: try: `if *&*&*"" == *"" {}` + | + = note: you might want to preserve the comments from inside the `match` + +error: you seem to be trying to use `match` for an equality check. Consider using `if` + --> tests/ui/single_match_else_deref_patterns.rs:37:5 + | +LL | / match **&&*"" { +LL | | +LL | | "" => {}, +LL | | _ => {}, +LL | | } + | |_____^ help: try: `if **&&*"" == *"" {}` + | + = note: you might want to preserve the comments from inside the `match` + +error: you seem to be trying to use `match` for an equality check. Consider using `if` + --> tests/ui/single_match_else_deref_patterns.rs:45:5 + | +LL | / match &&&1 { +LL | | &&&2 => unreachable!(), +LL | | _ => { +... | +LL | | } + | |_____^ + | +note: the lint level is defined here + --> tests/ui/single_match_else_deref_patterns.rs:10:9 + | +LL | #![deny(clippy::single_match_else)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^ +help: try + | +LL ~ if &&&1 == &&&2 { unreachable!() } else { +LL + // ok +LL + } + | + +error: you seem to be trying to use `match` for an equality check. Consider using `if` + --> tests/ui/single_match_else_deref_patterns.rs:52:5 + | +LL | / match &&&1 { +LL | | &&2 => unreachable!(), +LL | | _ => { +... | +LL | | } + | |_____^ + | +help: try + | +LL ~ if &&1 == &&2 { unreachable!() } else { +LL + // ok +LL + } + | + +error: you seem to be trying to use `match` for an equality check. Consider using `if` + --> tests/ui/single_match_else_deref_patterns.rs:59:5 + | +LL | / match &&1 { +LL | | &&2 => unreachable!(), +LL | | _ => { +... | +LL | | } + | |_____^ + | +help: try + | +LL ~ if &&1 == &&2 { unreachable!() } else { +LL + // ok +LL + } + | + +error: you seem to be trying to use `match` for an equality check. Consider using `if` + --> tests/ui/single_match_else_deref_patterns.rs:66:5 + | +LL | / match &&&1 { +LL | | &2 => unreachable!(), +LL | | _ => { +... | +LL | | } + | |_____^ + | +help: try + | +LL ~ if &1 == &2 { unreachable!() } else { +LL + // ok +LL + } + | + +error: you seem to be trying to use `match` for an equality check. Consider using `if` + --> tests/ui/single_match_else_deref_patterns.rs:73:5 + | +LL | / match &&1 { +LL | | &2 => unreachable!(), +LL | | _ => { +... | +LL | | } + | |_____^ + | +help: try + | +LL ~ if &1 == &2 { unreachable!() } else { +LL + // ok +LL + } + | + +error: you seem to be trying to use `match` for an equality check. Consider using `if` + --> tests/ui/single_match_else_deref_patterns.rs:80:5 + | +LL | / match &&&1 { +LL | | 2 => unreachable!(), +LL | | _ => { +... | +LL | | } + | |_____^ + | +help: try + | +LL ~ if 1 == 2 { unreachable!() } else { +LL + // ok +LL + } + | + +error: you seem to be trying to use `match` for an equality check. Consider using `if` + --> tests/ui/single_match_else_deref_patterns.rs:87:5 + | +LL | / match &&1 { +LL | | 2 => unreachable!(), +LL | | _ => { +... | +LL | | } + | |_____^ + | +help: try + | +LL ~ if 1 == 2 { unreachable!() } else { +LL + // ok +LL + } + | + +error: aborting due to 12 previous errors + diff --git a/src/tools/clippy/tests/ui/strlen_on_c_strings.fixed b/src/tools/clippy/tests/ui/strlen_on_c_strings.fixed index 31ed1cf03a2..17c1b541f77 100644 --- a/src/tools/clippy/tests/ui/strlen_on_c_strings.fixed +++ b/src/tools/clippy/tests/ui/strlen_on_c_strings.fixed @@ -1,7 +1,5 @@ #![warn(clippy::strlen_on_c_strings)] #![allow(dead_code, clippy::manual_c_str_literals)] -#![feature(rustc_private)] -extern crate libc; #[allow(unused)] use libc::strlen; diff --git a/src/tools/clippy/tests/ui/strlen_on_c_strings.rs b/src/tools/clippy/tests/ui/strlen_on_c_strings.rs index 0f3798c9fd8..c641422f5df 100644 --- a/src/tools/clippy/tests/ui/strlen_on_c_strings.rs +++ b/src/tools/clippy/tests/ui/strlen_on_c_strings.rs @@ -1,7 +1,5 @@ #![warn(clippy::strlen_on_c_strings)] #![allow(dead_code, clippy::manual_c_str_literals)] -#![feature(rustc_private)] -extern crate libc; #[allow(unused)] use libc::strlen; diff --git a/src/tools/clippy/tests/ui/strlen_on_c_strings.stderr b/src/tools/clippy/tests/ui/strlen_on_c_strings.stderr index b8619fa2df3..84a93b99ee3 100644 --- a/src/tools/clippy/tests/ui/strlen_on_c_strings.stderr +++ b/src/tools/clippy/tests/ui/strlen_on_c_strings.stderr @@ -1,5 +1,5 @@ error: using `libc::strlen` on a `CString` or `CStr` value - --> tests/ui/strlen_on_c_strings.rs:13:13 + --> tests/ui/strlen_on_c_strings.rs:11:13 | LL | let _ = unsafe { libc::strlen(cstring.as_ptr()) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `cstring.as_bytes().len()` @@ -8,37 +8,37 @@ LL | let _ = unsafe { libc::strlen(cstring.as_ptr()) }; = help: to override `-D warnings` add `#[allow(clippy::strlen_on_c_strings)]` error: using `libc::strlen` on a `CString` or `CStr` value - --> tests/ui/strlen_on_c_strings.rs:18:13 + --> tests/ui/strlen_on_c_strings.rs:16:13 | LL | let _ = unsafe { libc::strlen(cstr.as_ptr()) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `cstr.to_bytes().len()` error: using `libc::strlen` on a `CString` or `CStr` value - --> tests/ui/strlen_on_c_strings.rs:21:13 + --> tests/ui/strlen_on_c_strings.rs:19:13 | LL | let _ = unsafe { strlen(cstr.as_ptr()) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `cstr.to_bytes().len()` error: using `libc::strlen` on a `CString` or `CStr` value - --> tests/ui/strlen_on_c_strings.rs:25:22 + --> tests/ui/strlen_on_c_strings.rs:23:22 | LL | let _ = unsafe { strlen((*pcstr).as_ptr()) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `(*pcstr).to_bytes().len()` error: using `libc::strlen` on a `CString` or `CStr` value - --> tests/ui/strlen_on_c_strings.rs:31:22 + --> tests/ui/strlen_on_c_strings.rs:29:22 | LL | let _ = unsafe { strlen(unsafe_identity(cstr).as_ptr()) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unsafe_identity(cstr).to_bytes().len()` error: using `libc::strlen` on a `CString` or `CStr` value - --> tests/ui/strlen_on_c_strings.rs:33:13 + --> tests/ui/strlen_on_c_strings.rs:31:13 | LL | let _ = unsafe { strlen(unsafe { unsafe_identity(cstr) }.as_ptr()) }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unsafe { unsafe_identity(cstr) }.to_bytes().len()` error: using `libc::strlen` on a `CString` or `CStr` value - --> tests/ui/strlen_on_c_strings.rs:37:22 + --> tests/ui/strlen_on_c_strings.rs:35:22 | LL | let _ = unsafe { strlen(f(cstr).as_ptr()) }; | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `f(cstr).to_bytes().len()` diff --git a/src/tools/clippy/tests/ui/unsafe_derive_deserialize.rs b/src/tools/clippy/tests/ui/unsafe_derive_deserialize.rs index 14371bc203b..d0022f3b6d9 100644 --- a/src/tools/clippy/tests/ui/unsafe_derive_deserialize.rs +++ b/src/tools/clippy/tests/ui/unsafe_derive_deserialize.rs @@ -82,3 +82,32 @@ impl H { } fn main() {} + +mod issue15120 { + macro_rules! uns { + ($e:expr) => { + unsafe { $e } + }; + } + + #[derive(serde::Deserialize)] + struct Foo; + + impl Foo { + fn foo(&self) { + // Do not lint if `unsafe` comes from the `core::pin::pin!()` macro + std::pin::pin!(()); + } + } + + //~v unsafe_derive_deserialize + #[derive(serde::Deserialize)] + struct Bar; + + impl Bar { + fn bar(&self) { + // Lint if `unsafe` comes from the another macro + _ = uns!(42); + } + } +} diff --git a/src/tools/clippy/tests/ui/unsafe_derive_deserialize.stderr b/src/tools/clippy/tests/ui/unsafe_derive_deserialize.stderr index f2d4429f707..4b5dd6e61fc 100644 --- a/src/tools/clippy/tests/ui/unsafe_derive_deserialize.stderr +++ b/src/tools/clippy/tests/ui/unsafe_derive_deserialize.stderr @@ -36,5 +36,14 @@ LL | #[derive(Deserialize)] = help: consider implementing `serde::Deserialize` manually. See https://serde.rs/impl-deserialize.html = note: this error originates in the derive macro `Deserialize` (in Nightly builds, run with -Z macro-backtrace for more info) -error: aborting due to 4 previous errors +error: you are deriving `serde::Deserialize` on a type that has methods using `unsafe` + --> tests/ui/unsafe_derive_deserialize.rs:104:14 + | +LL | #[derive(serde::Deserialize)] + | ^^^^^^^^^^^^^^^^^^ + | + = help: consider implementing `serde::Deserialize` manually. See https://serde.rs/impl-deserialize.html + = note: this error originates in the derive macro `serde::Deserialize` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: aborting due to 5 previous errors diff --git a/src/tools/clippy/tests/ui/unused_async.rs b/src/tools/clippy/tests/ui/unused_async.rs index 433459253dd..7a0be825a2d 100644 --- a/src/tools/clippy/tests/ui/unused_async.rs +++ b/src/tools/clippy/tests/ui/unused_async.rs @@ -127,3 +127,13 @@ mod issue14704 { async fn cancel(self: Arc<Self>) {} } } + +mod issue15305 { + async fn todo_task() -> Result<(), String> { + todo!("Implement task"); + } + + async fn unimplemented_task() -> Result<(), String> { + unimplemented!("Implement task"); + } +} diff --git a/src/tools/clippy/tests/ui/unused_trait_names.fixed b/src/tools/clippy/tests/ui/unused_trait_names.fixed index 17e32ddfd9d..6abbed01bb0 100644 --- a/src/tools/clippy/tests/ui/unused_trait_names.fixed +++ b/src/tools/clippy/tests/ui/unused_trait_names.fixed @@ -200,11 +200,11 @@ fn msrv_1_33() { MyStruct.do_things(); } +// Linting inside macro expansion is no longer supported mod lint_inside_macro_expansion_bad { macro_rules! foo { () => { - use std::any::Any as _; - //~^ unused_trait_names + use std::any::Any; fn bar() { "bar".type_id(); } diff --git a/src/tools/clippy/tests/ui/unused_trait_names.rs b/src/tools/clippy/tests/ui/unused_trait_names.rs index 3cf8597e535..4a06f062dc3 100644 --- a/src/tools/clippy/tests/ui/unused_trait_names.rs +++ b/src/tools/clippy/tests/ui/unused_trait_names.rs @@ -200,11 +200,11 @@ fn msrv_1_33() { MyStruct.do_things(); } +// Linting inside macro expansion is no longer supported mod lint_inside_macro_expansion_bad { macro_rules! foo { () => { use std::any::Any; - //~^ unused_trait_names fn bar() { "bar".type_id(); } diff --git a/src/tools/clippy/tests/ui/unused_trait_names.stderr b/src/tools/clippy/tests/ui/unused_trait_names.stderr index 3183289d853..28067e17414 100644 --- a/src/tools/clippy/tests/ui/unused_trait_names.stderr +++ b/src/tools/clippy/tests/ui/unused_trait_names.stderr @@ -58,16 +58,5 @@ error: importing trait that is only used anonymously LL | use simple_trait::{MyStruct, MyTrait}; | ^^^^^^^ help: use: `MyTrait as _` -error: importing trait that is only used anonymously - --> tests/ui/unused_trait_names.rs:206:27 - | -LL | use std::any::Any; - | ^^^ help: use: `Any as _` -... -LL | foo!(); - | ------ in this macro invocation - | - = note: this error originates in the macro `foo` (in Nightly builds, run with -Z macro-backtrace for more info) - -error: aborting due to 10 previous errors +error: aborting due to 9 previous errors diff --git a/src/tools/clippy/tests/ui/used_underscore_items.rs b/src/tools/clippy/tests/ui/used_underscore_items.rs index 7e8289f1406..aecdd32693c 100644 --- a/src/tools/clippy/tests/ui/used_underscore_items.rs +++ b/src/tools/clippy/tests/ui/used_underscore_items.rs @@ -62,13 +62,13 @@ fn main() { //~^ used_underscore_items } -// should not lint exteranl crate. +// should not lint external crate. // user cannot control how others name their items fn external_item_call() { let foo_struct3 = external_item::_ExternalStruct {}; foo_struct3._foo(); - external_item::_exernal_foo(); + external_item::_external_foo(); } // should not lint foreign functions. diff --git a/src/tools/clippy/tests/ui/useless_attribute.fixed b/src/tools/clippy/tests/ui/useless_attribute.fixed index a96c8f46f55..be4fb55ddfb 100644 --- a/src/tools/clippy/tests/ui/useless_attribute.fixed +++ b/src/tools/clippy/tests/ui/useless_attribute.fixed @@ -13,7 +13,7 @@ #[allow(unused_imports)] #[allow(unused_extern_crates)] #[macro_use] -extern crate rustc_middle; +extern crate regex as regex_crate; #[macro_use] extern crate proc_macro_derive; @@ -146,3 +146,15 @@ pub mod unknown_namespace { #[allow(rustc::non_glob_import_of_type_ir_inherent)] use some_module::SomeType; } + +// Regression test for https://github.com/rust-lang/rust-clippy/issues/15316 +pub mod redundant_imports_issue { + macro_rules! empty { + () => {}; + } + + #[expect(redundant_imports)] + pub(crate) use empty; + + empty!(); +} diff --git a/src/tools/clippy/tests/ui/useless_attribute.rs b/src/tools/clippy/tests/ui/useless_attribute.rs index b26410134bb..5a1bcf97a5b 100644 --- a/src/tools/clippy/tests/ui/useless_attribute.rs +++ b/src/tools/clippy/tests/ui/useless_attribute.rs @@ -13,7 +13,7 @@ #[allow(unused_imports)] #[allow(unused_extern_crates)] #[macro_use] -extern crate rustc_middle; +extern crate regex as regex_crate; #[macro_use] extern crate proc_macro_derive; @@ -146,3 +146,15 @@ pub mod unknown_namespace { #[allow(rustc::non_glob_import_of_type_ir_inherent)] use some_module::SomeType; } + +// Regression test for https://github.com/rust-lang/rust-clippy/issues/15316 +pub mod redundant_imports_issue { + macro_rules! empty { + () => {}; + } + + #[expect(redundant_imports)] + pub(crate) use empty; + + empty!(); +} diff --git a/src/tools/clippy/triagebot.toml b/src/tools/clippy/triagebot.toml index 805baf2af6d..a62b6269a3b 100644 --- a/src/tools/clippy/triagebot.toml +++ b/src/tools/clippy/triagebot.toml @@ -54,6 +54,7 @@ contributing_url = "https://github.com/rust-lang/rust-clippy/blob/master/CONTRIB users_on_vacation = [ "matthiaskrgr", "Manishearth", + "samueltardieu", ] [assign.owners] diff --git a/src/tools/clippy/util/gh-pages/index_template.html b/src/tools/clippy/util/gh-pages/index_template.html index 6f380ec8fee..5d65ea585df 100644 --- a/src/tools/clippy/util/gh-pages/index_template.html +++ b/src/tools/clippy/util/gh-pages/index_template.html @@ -49,9 +49,7 @@ Otherwise, have a great day =^.^= <script src="theme.js"></script> {# #} <div class="container"> {# #} - <div class="page-header"> {# #} - <h1>Clippy Lints <span id="lint-count" class="badge"></span></h1> {# #} - </div> {# #} + <h1 class="page-header">Clippy Lints <span id="lint-count" class="badge"></span></h1> {# #} <noscript> {# #} <div class="alert alert-danger" role="alert"> {# #} @@ -59,143 +57,141 @@ Otherwise, have a great day =^.^= </div> {# #} </noscript> {# #} - <div> {# #} - <div class="panel panel-default" id="menu-filters"> {# #} - <div class="panel-body row"> {# #} - <div id="upper-filters" class="col-12 col-md-5"> {# #} - <div class="btn-group" id="lint-levels" tabindex="-1"> {# #} - <button type="button" class="btn btn-default dropdown-toggle"> {# #} - Lint levels <span class="badge">4</span> <span class="caret"></span> {# #} - </button> {# #} - <ul class="dropdown-menu" id="lint-levels-selector"> {# #} - <li class="checkbox"> {# #} - <button onclick="toggleElements('levels_filter', true)">All</button> {# #} - </li> {# #} - <li class="checkbox"> {# #} - <button onclick="toggleElements('levels_filter', false)">None</button> {# #} - </li> {# #} - <li role="separator" class="divider"></li> {# #} - </ul> {# #} - </div> {# #} - <div class="btn-group" id="lint-groups" tabindex="-1"> {# #} - <button type="button" class="btn btn-default dropdown-toggle"> {# #} - Lint groups <span class="badge">9</span> <span class="caret"></span> {# #} - </button> {# #} - <ul class="dropdown-menu" id="lint-groups-selector"> {# #} - <li class="checkbox"> {# #} - <button onclick="toggleElements('groups_filter', true)">All</button> {# #} - </li> {# #} - <li class="checkbox"> {# #} - <button onclick="resetGroupsToDefault()">Default</button> {# #} - </li> {# #} - <li class="checkbox"> {# #} - <button onclick="toggleElements('groups_filter', false)">None</button> {# #} - </li> {# #} - <li role="separator" class="divider"></li> {# #} - </ul> {# #} - </div> {# #} - <div class="btn-group" id="version-filter" tabindex="-1"> {# #} - <button type="button" class="btn btn-default dropdown-toggle"> {# #} - Version {#+ #} - <span id="version-filter-count" class="badge">0</span> {#+ #} - <span class="caret"></span> {# #} - </button> {# #} - <ul id="version-filter-selector" class="dropdown-menu"> {# #} - <li class="checkbox"> {# #} - <button onclick="clearVersionFilters()">Clear filters</button> {# #} - </li> {# #} - <li role="separator" class="divider"></li> {# #} - </ul> {# #} - </div> {# #} - <div class="btn-group" id="lint-applicabilities" tabindex="-1"> {# #} - <button type="button" class="btn btn-default dropdown-toggle"> {# #} - Applicability {#+ #} - <span class="badge">4</span> {#+ #} - <span class="caret"></span> {# #} - </button> {# #} - <ul class="dropdown-menu" id="lint-applicabilities-selector"> {# #} - <li class="checkbox"> {# #} - <button onclick="toggleElements('applicabilities_filter', true)">All</button> {# #} - </li> {# #} - <li class="checkbox"> {# #} - <button onclick="toggleElements('applicabilities_filter', false)">None</button> {# #} - </li> {# #} - <li role="separator" class="divider"></li> {# #} - </ul> {# #} - </div> {# #} + <div id="menu-filters"> {# #} + <div class="panel-body row"> {# #} + <div id="upper-filters" class="col-12 col-md-5"> {# #} + <div class="btn-group" id="lint-levels" tabindex="-1"> {# #} + <button type="button" class="btn btn-default dropdown-toggle"> {# #} + Lint levels <span class="badge">4</span> <span class="caret"></span> {# #} + </button> {# #} + <ul class="dropdown-menu" id="lint-levels-selector"> {# #} + <li class="checkbox"> {# #} + <button onclick="toggleElements('levels_filter', true)">All</button> {# #} + </li> {# #} + <li class="checkbox"> {# #} + <button onclick="toggleElements('levels_filter', false)">None</button> {# #} + </li> {# #} + <li role="separator" class="divider"></li> {# #} + </ul> {# #} </div> {# #} - <div class="col-12 col-md-5 search-control"> {# #} - <div class="input-group"> {# #} - <label class="input-group-addon" id="filter-label" for="search-input">Filter:</label> {# #} - <input type="text" class="form-control filter-input" placeholder="Keywords or search string (`S` or `/` to focus)" id="search-input" /> {# #} - <span class="input-group-btn"> {# #} - <button class="filter-clear btn" type="button" onclick="searchState.clearInput(event)"> {# #} - Clear {# #} - </button> {# #} - </span> {# #} - </div> {# #} + <div class="btn-group" id="lint-groups" tabindex="-1"> {# #} + <button type="button" class="btn btn-default dropdown-toggle"> {# #} + Lint groups <span class="badge">9</span> <span class="caret"></span> {# #} + </button> {# #} + <ul class="dropdown-menu" id="lint-groups-selector"> {# #} + <li class="checkbox"> {# #} + <button onclick="toggleElements('groups_filter', true)">All</button> {# #} + </li> {# #} + <li class="checkbox"> {# #} + <button onclick="resetGroupsToDefault()">Default</button> {# #} + </li> {# #} + <li class="checkbox"> {# #} + <button onclick="toggleElements('groups_filter', false)">None</button> {# #} + </li> {# #} + <li role="separator" class="divider"></li> {# #} + </ul> {# #} </div> {# #} - <div class="col-12 col-md-2 btn-group expansion-group"> {# #} - <button title="Collapse All" class="btn btn-default expansion-control" type="button" id="collapse-all"> {# #} - <span class="glyphicon glyphicon-collapse-up"></span> {# #} + <div class="btn-group" id="version-filter" tabindex="-1"> {# #} + <button type="button" class="btn btn-default dropdown-toggle"> {# #} + Version {#+ #} + <span id="version-filter-count" class="badge">0</span> {#+ #} + <span class="caret"></span> {# #} </button> {# #} - <button title="Expand All" class="btn btn-default expansion-control" type="button" id="expand-all"> {# #} - <span class="glyphicon glyphicon-collapse-down"></span> {# #} + <ul id="version-filter-selector" class="dropdown-menu"> {# #} + <li class="checkbox"> {# #} + <button onclick="clearVersionFilters()">Clear filters</button> {# #} + </li> {# #} + <li role="separator" class="divider"></li> {# #} + </ul> {# #} + </div> {# #} + <div class="btn-group" id="lint-applicabilities" tabindex="-1"> {# #} + <button type="button" class="btn btn-default dropdown-toggle"> {# #} + Applicability {#+ #} + <span class="badge">4</span> {#+ #} + <span class="caret"></span> {# #} </button> {# #} + <ul class="dropdown-menu" id="lint-applicabilities-selector"> {# #} + <li class="checkbox"> {# #} + <button onclick="toggleElements('applicabilities_filter', true)">All</button> {# #} + </li> {# #} + <li class="checkbox"> {# #} + <button onclick="toggleElements('applicabilities_filter', false)">None</button> {# #} + </li> {# #} + <li role="separator" class="divider"></li> {# #} + </ul> {# #} + </div> {# #} + </div> {# #} + <div class="col-12 col-md-5 search-control"> {# #} + <div class="input-group"> {# #} + <label class="input-group-addon" id="filter-label" for="search-input">Filter:</label> {# #} + <input type="text" class="form-control filter-input" placeholder="Keywords or search string (`S` or `/` to focus)" id="search-input" /> {# #} + <span class="input-group-btn"> {# #} + <button class="filter-clear btn" type="button" onclick="searchState.clearInput(event)"> {# #} + Clear {# #} + </button> {# #} + </span> {# #} </div> {# #} </div> {# #} - </div> - {% for lint in lints %} - <article class="panel panel-default" id="{{lint.id}}"> {# #} - <input id="label-{{lint.id}}" type="checkbox"> {# #} - <label for="label-{{lint.id}}"> {# #} - <h2 class="lint-title"> {# #} - <div class="panel-title-name" id="lint-{{lint.id}}"> {# #} - {{lint.id +}} - <a href="#{{lint.id}}" class="anchor label label-default">¶</a> {#+ #} - <a href="" class="copy-to-clipboard anchor label label-default"> {# #} - 📋 {# #} - </a> {# #} - </div> {# #} + <div class="col-12 col-md-2 btn-group expansion-group"> {# #} + <button title="Collapse All" class="btn btn-default expansion-control" type="button" id="collapse-all"> {# #} + <span class="glyphicon glyphicon-collapse-up"></span> {# #} + </button> {# #} + <button title="Expand All" class="btn btn-default expansion-control" type="button" id="expand-all"> {# #} + <span class="glyphicon glyphicon-collapse-down"></span> {# #} + </button> {# #} + </div> {# #} + </div> {# #} + </div> + {% for lint in lints %} + <article id="{{lint.id}}"> {# #} + <input id="label-{{lint.id}}" type="checkbox"> {# #} + <label for="label-{{lint.id}}"> {# #} + <h2 class="lint-title"> {# #} + <div class="panel-title-name" id="lint-{{lint.id}}"> {# #} + {{lint.id ~}} + <a href="#{{lint.id}}" class="anchor label label-default">¶</a> {#+ #} + <a href="" class="copy-to-clipboard anchor label label-default"> {# #} + 📋 {# #} + </a> {# #} + </div> {# #} - <span class="label label-lint-group label-default label-group-{{lint.group}}">{{lint.group}}</span> {#+ #} + <span class="label label-default lint-group group-{{lint.group}}">{{lint.group}}</span> {#+ #} - <span class="label label-lint-level label-lint-level-{{lint.level}}">{{lint.level}}</span> {#+ #} + <span class="label lint-level level-{{lint.level}}">{{lint.level}}</span> {#+ #} - <span class="label label-doc-folding"></span> {# #} - </h2> {# #} - </label> {# #} + <span class="label doc-folding"></span> {# #} + </h2> {# #} + </label> {# #} - <div class="list-group lint-docs"> {# #} - <div class="list-group-item lint-doc-md">{{Self::markdown(lint.docs)}}</div> {# #} - <div class="lint-additional-info-container"> - {# Applicability #} - <div> {# #} - Applicability: {#+ #} - <span class="label label-default label-applicability">{{ lint.applicability_str() }}</span> {# #} - <a href="https://doc.rust-lang.org/nightly/nightly-rustc/rustc_lint_defs/enum.Applicability.html#variants">(?)</a> {# #} - </div> - {# Clippy version #} - <div> {# #} - {% if lint.group == "deprecated" %}Deprecated{% else %} Added{% endif +%} in: {#+ #} - <span class="label label-default label-version">{{lint.version}}</span> {# #} - </div> - {# Open related issues #} + <div class="lint-docs"> {# #} + <div class="lint-doc-md">{{Self::markdown(lint.docs)}}</div> {# #} + <div class="lint-additional-info"> + {# Applicability #} + <div> {# #} + Applicability: {#+ #} + <span class="label label-default applicability">{{ lint.applicability_str() }}</span> {# #} + <a href="https://doc.rust-lang.org/nightly/nightly-rustc/rustc_lint_defs/enum.Applicability.html#variants">(?)</a> {# #} + </div> + {# Clippy version #} + <div> {# #} + {% if lint.group == "deprecated" %}Deprecated{% else %} Added{% endif +%} in: {#+ #} + <span class="label label-default label-version">{{lint.version}}</span> {# #} + </div> + {# Open related issues #} + <div> {# #} + <a href="https://github.com/rust-lang/rust-clippy/issues?q=is%3Aissue+{{lint.id}}">Related Issues</a> {# #} + </div> + + {# Jump to source #} + {% if let Some(id_location) = lint.id_location %} <div> {# #} - <a href="https://github.com/rust-lang/rust-clippy/issues?q=is%3Aissue+{{lint.id}}">Related Issues</a> {# #} + <a href="https://github.com/rust-lang/rust-clippy/blob/master/{{id_location}}">View Source</a> {# #} </div> - - {# Jump to source #} - {% if let Some(id_location) = lint.id_location %} - <div> {# #} - <a href="https://github.com/rust-lang/rust-clippy/blob/master/{{id_location}}">View Source</a> {# #} - </div> - {% endif %} - </div> {# #} + {% endif %} </div> {# #} - </article> - {% endfor %} - </div> {# #} + </div> {# #} + </article> + {% endfor %} </div> {# #} <a {#+ #} diff --git a/src/tools/clippy/util/gh-pages/script.js b/src/tools/clippy/util/gh-pages/script.js index ee13f1c0cd8..d3204967531 100644 --- a/src/tools/clippy/util/gh-pages/script.js +++ b/src/tools/clippy/util/gh-pages/script.js @@ -208,7 +208,6 @@ const LEVEL_FILTERS_DEFAULT = { allow: true, warn: true, deny: true, - none: true, }; const APPLICABILITIES_FILTER_DEFAULT = { Unspecified: true, @@ -250,10 +249,10 @@ window.filters = { } return { elem: elem, - group: elem.querySelector(".label-lint-group").innerText, - level: elem.querySelector(".label-lint-level").innerText, + group: elem.querySelector(".lint-group").innerText, + level: elem.querySelector(".lint-level").innerText, version: parseInt(version.split(".")[1]), - applicability: elem.querySelector(".label-applicability").innerText, + applicability: elem.querySelector(".applicability").innerText, filteredOut: false, searchFilteredOut: false, }; @@ -594,19 +593,19 @@ disableShortcutsButton.checked = disableShortcuts; addListeners(); highlightLazily(); -generateSettings(); -generateSearch(); -parseURLFilters(); -scrollToLintByURL(); -filters.filterLints(); -updateLintCount(); - function updateLintCount() { const allLints = filters.getAllLints().filter(lint => lint.group != "deprecated"); const totalLints = allLints.length; - + const countElement = document.getElementById("lint-count"); if (countElement) { countElement.innerText = `Total number: ${totalLints}`; } } + +generateSettings(); +generateSearch(); +parseURLFilters(); +scrollToLintByURL(); +filters.filterLints(); +updateLintCount(); diff --git a/src/tools/clippy/util/gh-pages/style.css b/src/tools/clippy/util/gh-pages/style.css index 022ea875200..66abf4598b0 100644 --- a/src/tools/clippy/util/gh-pages/style.css +++ b/src/tools/clippy/util/gh-pages/style.css @@ -30,17 +30,25 @@ blockquote { font-size: 1em; } background-color: var(--theme-hover); } -div.panel div.panel-body button { +.container > * { + margin-bottom: 20px; + border-radius: 4px; + background: var(--bg); + border: 1px solid var(--theme-popup-border); + box-shadow: 0 1px 1px rgba(0,0,0,.05); +} + +div.panel-body button { background: var(--searchbar-bg); color: var(--searchbar-fg); border-color: var(--theme-popup-border); } -div.panel div.panel-body button:hover { +div.panel-body button:hover { box-shadow: 0 0 3px var(--searchbar-shadow-color); } -div.panel div.panel-body button.open { +div.panel-body button.open { filter: brightness(90%); } @@ -48,8 +56,6 @@ div.panel div.panel-body button.open { background-color: #777; } -.panel-heading { cursor: pointer; } - .lint-title { cursor: pointer; margin-top: 0; @@ -70,8 +76,8 @@ div.panel div.panel-body button.open { .panel-title-name { flex: 1; min-width: 400px;} -.panel .panel-title-name .anchor { display: none; } -.panel:hover .panel-title-name .anchor { display: inline;} +.panel-title-name .anchor { display: none; } +article:hover .panel-title-name .anchor { display: inline;} .search-control { margin-top: 15px; @@ -111,40 +117,48 @@ div.panel div.panel-body button.open { padding-bottom: 0.3em; } -.label-lint-group { - min-width: 8em; -} -.label-lint-level { +.lint-level { min-width: 4em; } - -.label-lint-level-allow { +.level-allow { background-color: #5cb85c; } -.label-lint-level-warn { +.level-warn { background-color: #f0ad4e; } -.label-lint-level-deny { +.level-deny { background-color: #d9534f; } -.label-lint-level-none { +.level-none { background-color: #777777; opacity: 0.5; } -.label-group-deprecated { +.lint-group { + min-width: 8em; +} +.group-deprecated { opacity: 0.5; } -.label-doc-folding { +.doc-folding { color: #000; background-color: #fff; border: 1px solid var(--theme-popup-border); } -.label-doc-folding:hover { +.doc-folding:hover { background-color: #e6e6e6; } +.lint-doc-md { + position: relative; + display: block; + padding: 10px 15px; + margin-bottom: -1px; + background: 0%; + border-bottom: 1px solid var(--theme-popup-border); + border-top: 1px solid var(--theme-popup-border); +} .lint-doc-md > h3 { border-top: 1px solid var(--theme-popup-border); padding: 10px 15px; @@ -157,32 +171,32 @@ div.panel div.panel-body button.open { } @media (max-width:749px) { - .lint-additional-info-container { + .lint-additional-info { display: flex; flex-flow: column; } - .lint-additional-info-container > div + div { + .lint-additional-info > div + div { border-top: 1px solid var(--theme-popup-border); } } @media (min-width:750px) { - .lint-additional-info-container { + .lint-additional-info { display: flex; flex-flow: row; } - .lint-additional-info-container > div + div { + .lint-additional-info > div + div { border-left: 1px solid var(--theme-popup-border); } } -.lint-additional-info-container > div { +.lint-additional-info > div { display: inline-flex; min-width: 200px; flex-grow: 1; padding: 9px 5px 5px 15px; } -.label-applicability { +.applicability { background-color: #777777; margin: auto 5px; } @@ -332,21 +346,12 @@ L4.75,12h2.5l0.5393066-2.1572876 c0.2276001-0.1062012,0.4459839-0.2269287,0.649 border: 1px solid var(--theme-popup-border); } .page-header { - border-color: var(--theme-popup-border); -} -.panel-default .panel-heading { - background: var(--theme-hover); - color: var(--fg); - border: 1px solid var(--theme-popup-border); -} -.panel-default .panel-heading:hover { - filter: brightness(90%); -} -.list-group-item { - background: 0%; - border: 1px solid var(--theme-popup-border); + border: 0; + border-bottom: 1px solid var(--theme-popup-border); + padding-bottom: 19px; + border-radius: 0; } -.panel, pre, hr { +pre, hr { background: var(--bg); border: 1px solid var(--theme-popup-border); } @@ -442,14 +447,15 @@ article > label { article > input[type="checkbox"] { display: none; } -article > input[type="checkbox"] + label .label-doc-folding::before { +article > input[type="checkbox"] + label .doc-folding::before { content: "+"; } -article > input[type="checkbox"]:checked + label .label-doc-folding::before { +article > input[type="checkbox"]:checked + label .doc-folding::before { content: "−"; } .lint-docs { display: none; + margin-bottom: 0; } article > input[type="checkbox"]:checked ~ .lint-docs { display: block; diff --git a/src/tools/compiletest/src/common.rs b/src/tools/compiletest/src/common.rs index 33da1a25db1..aceae3e3a3b 100644 --- a/src/tools/compiletest/src/common.rs +++ b/src/tools/compiletest/src/common.rs @@ -57,8 +57,8 @@ impl TestMode { string_enum! { #[derive(Clone, Copy, PartialEq, Debug)] pub enum TestSuite { - Assembly => "assembly", - Codegen => "codegen", + AssemblyLlvm => "assembly-llvm", + CodegenLlvm => "codegen-llvm", CodegenUnits => "codegen-units", Coverage => "coverage", CoverageRunRustdoc => "coverage-run-rustdoc", @@ -88,11 +88,37 @@ string_enum! { } } +string_enum! { + #[derive(Clone, Copy, PartialEq, Debug, Hash)] + pub enum RunResult { + Pass => "run-pass", + Fail => "run-fail", + Crash => "run-crash", + } +} + +#[derive(Copy, Clone, Debug, PartialEq, PartialOrd)] +pub enum RunFailMode { + /// Running the program must make it exit with a regular failure exit code + /// in the range `1..=127`. If the program is terminated by e.g. a signal + /// the test will fail. + Fail, + /// Running the program must result in a crash, e.g. by `SIGABRT` or + /// `SIGSEGV` on Unix or on Windows by having an appropriate NTSTATUS high + /// bit in the exit code. + Crash, + /// Running the program must either fail or crash. Useful for e.g. sanitizer + /// tests since some sanitizer implementations exit the process with code 1 + /// to in the face of memory errors while others abort (crash) the process + /// in the face of memory errors. + FailOrCrash, +} + #[derive(Copy, Clone, Debug, PartialEq, PartialOrd)] pub enum FailMode { Check, Build, - Run, + Run(RunFailMode), } string_enum! { @@ -149,6 +175,36 @@ pub enum Sanitizer { Hwaddress, } +#[derive(Clone, Copy, Debug, PartialEq)] +pub enum CodegenBackend { + Cranelift, + Gcc, + Llvm, +} + +impl<'a> TryFrom<&'a str> for CodegenBackend { + type Error = &'static str; + + fn try_from(value: &'a str) -> Result<Self, Self::Error> { + match value.to_lowercase().as_str() { + "cranelift" => Ok(Self::Cranelift), + "gcc" => Ok(Self::Gcc), + "llvm" => Ok(Self::Llvm), + _ => Err("unknown backend"), + } + } +} + +impl CodegenBackend { + pub fn as_str(self) -> &'static str { + match self { + Self::Cranelift => "cranelift", + Self::Gcc => "gcc", + Self::Llvm => "llvm", + } + } +} + /// Configuration for `compiletest` *per invocation*. /// /// In terms of `bootstrap`, this means that `./x test tests/ui tests/run-make` actually correspond @@ -625,6 +681,9 @@ pub struct Config { /// need `core` stubs in cross-compilation scenarios that do not otherwise want/need to /// `-Zbuild-std`. Used in e.g. ABI tests. pub minicore_path: Utf8PathBuf, + + /// Current codegen backend used. + pub codegen_backend: CodegenBackend, } impl Config { @@ -727,6 +786,7 @@ impl Config { profiler_runtime: Default::default(), diff_command: Default::default(), minicore_path: Default::default(), + codegen_backend: CodegenBackend::Llvm, } } diff --git a/src/tools/compiletest/src/directives.rs b/src/tools/compiletest/src/directives.rs index 93133ea0bfd..54511f4fd08 100644 --- a/src/tools/compiletest/src/directives.rs +++ b/src/tools/compiletest/src/directives.rs @@ -9,9 +9,12 @@ use camino::{Utf8Path, Utf8PathBuf}; use semver::Version; use tracing::*; -use crate::common::{Config, Debugger, FailMode, PassMode, TestMode}; +use crate::common::{CodegenBackend, Config, Debugger, FailMode, PassMode, RunFailMode, TestMode}; use crate::debuggers::{extract_cdb_version, extract_gdb_version}; use crate::directives::auxiliary::{AuxProps, parse_and_update_aux}; +use crate::directives::directive_names::{ + KNOWN_DIRECTIVE_NAMES, KNOWN_HTMLDOCCK_DIRECTIVE_NAMES, KNOWN_JSONDOCCK_DIRECTIVE_NAMES, +}; use crate::directives::needs::CachedNeedsConditions; use crate::errors::ErrorKind; use crate::executor::{CollectedTestDesc, ShouldPanic}; @@ -20,6 +23,7 @@ use crate::util::static_regex; pub(crate) mod auxiliary; mod cfg; +mod directive_names; mod needs; #[cfg(test)] mod tests; @@ -59,9 +63,9 @@ impl EarlyProps { &mut poisoned, testfile, rdr, - &mut |DirectiveLine { raw_directive: ln, .. }| { - parse_and_update_aux(config, ln, &mut props.aux); - config.parse_and_update_revisions(testfile, ln, &mut props.revisions); + &mut |DirectiveLine { line_number, raw_directive: ln, .. }| { + parse_and_update_aux(config, ln, testfile, line_number, &mut props.aux); + config.parse_and_update_revisions(testfile, line_number, ln, &mut props.revisions); }, ); @@ -351,7 +355,7 @@ impl TestProps { &mut poisoned, testfile, file, - &mut |directive @ DirectiveLine { raw_directive: ln, .. }| { + &mut |directive @ DirectiveLine { line_number, raw_directive: ln, .. }| { if !directive.applies_to_test_revision(test_revision) { return; } @@ -361,17 +365,28 @@ impl TestProps { config.push_name_value_directive( ln, ERROR_PATTERN, + testfile, + line_number, &mut self.error_patterns, |r| r, ); config.push_name_value_directive( ln, REGEX_ERROR_PATTERN, + testfile, + line_number, &mut self.regex_error_patterns, |r| r, ); - config.push_name_value_directive(ln, DOC_FLAGS, &mut self.doc_flags, |r| r); + config.push_name_value_directive( + ln, + DOC_FLAGS, + testfile, + line_number, + &mut self.doc_flags, + |r| r, + ); fn split_flags(flags: &str) -> Vec<String> { // Individual flags can be single-quoted to preserve spaces; see @@ -386,7 +401,9 @@ impl TestProps { .collect::<Vec<_>>() } - if let Some(flags) = config.parse_name_value_directive(ln, COMPILE_FLAGS) { + if let Some(flags) = + config.parse_name_value_directive(ln, COMPILE_FLAGS, testfile, line_number) + { let flags = split_flags(&flags); for flag in &flags { if flag == "--edition" || flag.starts_with("--edition=") { @@ -395,25 +412,40 @@ impl TestProps { } self.compile_flags.extend(flags); } - if config.parse_name_value_directive(ln, INCORRECT_COMPILER_FLAGS).is_some() { + if config + .parse_name_value_directive( + ln, + INCORRECT_COMPILER_FLAGS, + testfile, + line_number, + ) + .is_some() + { panic!("`compiler-flags` directive should be spelled `compile-flags`"); } - if let Some(edition) = config.parse_edition(ln) { + if let Some(edition) = config.parse_edition(ln, testfile, line_number) { // The edition is added at the start, since flags from //@compile-flags must // be passed to rustc last. self.compile_flags.insert(0, format!("--edition={}", edition.trim())); has_edition = true; } - config.parse_and_update_revisions(testfile, ln, &mut self.revisions); + config.parse_and_update_revisions( + testfile, + line_number, + ln, + &mut self.revisions, + ); - if let Some(flags) = config.parse_name_value_directive(ln, RUN_FLAGS) { + if let Some(flags) = + config.parse_name_value_directive(ln, RUN_FLAGS, testfile, line_number) + { self.run_flags.extend(split_flags(&flags)); } if self.pp_exact.is_none() { - self.pp_exact = config.parse_pp_exact(ln, testfile); + self.pp_exact = config.parse_pp_exact(ln, testfile, line_number); } config.set_name_directive(ln, SHOULD_ICE, &mut self.should_ice); @@ -435,7 +467,9 @@ impl TestProps { ); config.set_name_directive(ln, NO_PREFER_DYNAMIC, &mut self.no_prefer_dynamic); - if let Some(m) = config.parse_name_value_directive(ln, PRETTY_MODE) { + if let Some(m) = + config.parse_name_value_directive(ln, PRETTY_MODE, testfile, line_number) + { self.pretty_mode = m; } @@ -446,35 +480,45 @@ impl TestProps { ); // Call a helper method to deal with aux-related directives. - parse_and_update_aux(config, ln, &mut self.aux); + parse_and_update_aux(config, ln, testfile, line_number, &mut self.aux); config.push_name_value_directive( ln, EXEC_ENV, + testfile, + line_number, &mut self.exec_env, Config::parse_env, ); config.push_name_value_directive( ln, UNSET_EXEC_ENV, + testfile, + line_number, &mut self.unset_exec_env, |r| r.trim().to_owned(), ); config.push_name_value_directive( ln, RUSTC_ENV, + testfile, + line_number, &mut self.rustc_env, Config::parse_env, ); config.push_name_value_directive( ln, UNSET_RUSTC_ENV, + testfile, + line_number, &mut self.unset_rustc_env, |r| r.trim().to_owned(), ); config.push_name_value_directive( ln, FORBID_OUTPUT, + testfile, + line_number, &mut self.forbid_output, |r| r, ); @@ -510,7 +554,7 @@ impl TestProps { } if let Some(code) = config - .parse_name_value_directive(ln, FAILURE_STATUS) + .parse_name_value_directive(ln, FAILURE_STATUS, testfile, line_number) .and_then(|code| code.trim().parse::<i32>().ok()) { self.failure_status = Some(code); @@ -531,6 +575,8 @@ impl TestProps { config.set_name_value_directive( ln, ASSEMBLY_OUTPUT, + testfile, + line_number, &mut self.assembly_output, |r| r.trim().to_string(), ); @@ -543,7 +589,9 @@ impl TestProps { // Unlike the other `name_value_directive`s this needs to be handled manually, // because it sets a `bool` flag. - if let Some(known_bug) = config.parse_name_value_directive(ln, KNOWN_BUG) { + if let Some(known_bug) = + config.parse_name_value_directive(ln, KNOWN_BUG, testfile, line_number) + { let known_bug = known_bug.trim(); if known_bug == "unknown" || known_bug.split(',').all(|issue_ref| { @@ -571,16 +619,25 @@ impl TestProps { config.set_name_value_directive( ln, TEST_MIR_PASS, + testfile, + line_number, &mut self.mir_unit_test, |s| s.trim().to_string(), ); config.set_name_directive(ln, REMAP_SRC_BASE, &mut self.remap_src_base); - if let Some(flags) = config.parse_name_value_directive(ln, LLVM_COV_FLAGS) { + if let Some(flags) = + config.parse_name_value_directive(ln, LLVM_COV_FLAGS, testfile, line_number) + { self.llvm_cov_flags.extend(split_flags(&flags)); } - if let Some(flags) = config.parse_name_value_directive(ln, FILECHECK_FLAGS) { + if let Some(flags) = config.parse_name_value_directive( + ln, + FILECHECK_FLAGS, + testfile, + line_number, + ) { self.filecheck_flags.extend(split_flags(&flags)); } @@ -588,9 +645,12 @@ impl TestProps { self.update_add_core_stubs(ln, config); - if let Some(err_kind) = - config.parse_name_value_directive(ln, DONT_REQUIRE_ANNOTATIONS) - { + if let Some(err_kind) = config.parse_name_value_directive( + ln, + DONT_REQUIRE_ANNOTATIONS, + testfile, + line_number, + ) { self.dont_require_annotations .insert(ErrorKind::expect_from_user_str(err_kind.trim())); } @@ -654,7 +714,13 @@ impl TestProps { Some(FailMode::Build) } else if config.parse_name_directive(ln, "run-fail") { check_ui("run"); - Some(FailMode::Run) + Some(FailMode::Run(RunFailMode::Fail)) + } else if config.parse_name_directive(ln, "run-crash") { + check_ui("run"); + Some(FailMode::Run(RunFailMode::Crash)) + } else if config.parse_name_directive(ln, "run-fail-or-crash") { + check_ui("run"); + Some(FailMode::Run(RunFailMode::FailOrCrash)) } else { None }; @@ -763,290 +829,6 @@ fn line_directive<'line>( Some(DirectiveLine { line_number, revision, raw_directive }) } -/// This was originally generated by collecting directives from ui tests and then extracting their -/// directive names. This is **not** an exhaustive list of all possible directives. Instead, this is -/// a best-effort approximation for diagnostics. Add new directives to this list when needed. -const KNOWN_DIRECTIVE_NAMES: &[&str] = &[ - // tidy-alphabetical-start - "add-core-stubs", - "assembly-output", - "aux-bin", - "aux-build", - "aux-codegen-backend", - "aux-crate", - "build-aux-docs", - "build-fail", - "build-pass", - "check-fail", - "check-pass", - "check-run-results", - "check-stdout", - "check-test-line-numbers-match", - "compile-flags", - "doc-flags", - "dont-check-compiler-stderr", - "dont-check-compiler-stdout", - "dont-check-failure-status", - "dont-require-annotations", - "edition", - "error-pattern", - "exact-llvm-major-version", - "exec-env", - "failure-status", - "filecheck-flags", - "forbid-output", - "force-host", - "ignore-16bit", - "ignore-32bit", - "ignore-64bit", - "ignore-aarch64", - "ignore-aarch64-pc-windows-msvc", - "ignore-aarch64-unknown-linux-gnu", - "ignore-aix", - "ignore-android", - "ignore-apple", - "ignore-arm", - "ignore-arm-unknown-linux-gnueabi", - "ignore-arm-unknown-linux-gnueabihf", - "ignore-arm-unknown-linux-musleabi", - "ignore-arm-unknown-linux-musleabihf", - "ignore-auxiliary", - "ignore-avr", - "ignore-beta", - "ignore-cdb", - "ignore-compare-mode-next-solver", - "ignore-compare-mode-polonius", - "ignore-coverage-map", - "ignore-coverage-run", - "ignore-cross-compile", - "ignore-eabi", - "ignore-elf", - "ignore-emscripten", - "ignore-endian-big", - "ignore-enzyme", - "ignore-freebsd", - "ignore-fuchsia", - "ignore-gdb", - "ignore-gdb-version", - "ignore-gnu", - "ignore-haiku", - "ignore-horizon", - "ignore-i686-pc-windows-gnu", - "ignore-i686-pc-windows-msvc", - "ignore-illumos", - "ignore-ios", - "ignore-linux", - "ignore-lldb", - "ignore-llvm-version", - "ignore-loongarch32", - "ignore-loongarch64", - "ignore-macabi", - "ignore-macos", - "ignore-msp430", - "ignore-msvc", - "ignore-musl", - "ignore-netbsd", - "ignore-nightly", - "ignore-none", - "ignore-nto", - "ignore-nvptx64", - "ignore-nvptx64-nvidia-cuda", - "ignore-openbsd", - "ignore-pass", - "ignore-powerpc", - "ignore-remote", - "ignore-riscv64", - "ignore-rustc-debug-assertions", - "ignore-rustc_abi-x86-sse2", - "ignore-s390x", - "ignore-sgx", - "ignore-sparc64", - "ignore-spirv", - "ignore-stable", - "ignore-stage1", - "ignore-stage2", - "ignore-std-debug-assertions", - "ignore-test", - "ignore-thumb", - "ignore-thumbv8m.base-none-eabi", - "ignore-thumbv8m.main-none-eabi", - "ignore-tvos", - "ignore-unix", - "ignore-unknown", - "ignore-uwp", - "ignore-visionos", - "ignore-vxworks", - "ignore-wasi", - "ignore-wasm", - "ignore-wasm32", - "ignore-wasm32-bare", - "ignore-wasm64", - "ignore-watchos", - "ignore-windows", - "ignore-windows-gnu", - "ignore-windows-msvc", - "ignore-x32", - "ignore-x86", - "ignore-x86_64", - "ignore-x86_64-apple-darwin", - "ignore-x86_64-pc-windows-gnu", - "ignore-x86_64-unknown-linux-gnu", - "incremental", - "known-bug", - "llvm-cov-flags", - "max-llvm-major-version", - "min-cdb-version", - "min-gdb-version", - "min-lldb-version", - "min-llvm-version", - "min-system-llvm-version", - "needs-asm-support", - "needs-crate-type", - "needs-deterministic-layouts", - "needs-dlltool", - "needs-dynamic-linking", - "needs-enzyme", - "needs-force-clang-based-tests", - "needs-git-hash", - "needs-llvm-components", - "needs-llvm-zstd", - "needs-profiler-runtime", - "needs-relocation-model-pic", - "needs-run-enabled", - "needs-rust-lld", - "needs-rustc-debug-assertions", - "needs-sanitizer-address", - "needs-sanitizer-cfi", - "needs-sanitizer-dataflow", - "needs-sanitizer-hwaddress", - "needs-sanitizer-kcfi", - "needs-sanitizer-leak", - "needs-sanitizer-memory", - "needs-sanitizer-memtag", - "needs-sanitizer-safestack", - "needs-sanitizer-shadow-call-stack", - "needs-sanitizer-support", - "needs-sanitizer-thread", - "needs-std-debug-assertions", - "needs-subprocess", - "needs-symlink", - "needs-target-has-atomic", - "needs-target-std", - "needs-threads", - "needs-unwind", - "needs-wasmtime", - "needs-xray", - "no-auto-check-cfg", - "no-prefer-dynamic", - "normalize-stderr", - "normalize-stderr-32bit", - "normalize-stderr-64bit", - "normalize-stdout", - "only-16bit", - "only-32bit", - "only-64bit", - "only-aarch64", - "only-aarch64-apple-darwin", - "only-aarch64-unknown-linux-gnu", - "only-apple", - "only-arm", - "only-avr", - "only-beta", - "only-bpf", - "only-cdb", - "only-dist", - "only-elf", - "only-emscripten", - "only-gnu", - "only-i686-pc-windows-gnu", - "only-i686-pc-windows-msvc", - "only-i686-unknown-linux-gnu", - "only-ios", - "only-linux", - "only-loongarch32", - "only-loongarch64", - "only-loongarch64-unknown-linux-gnu", - "only-macos", - "only-mips", - "only-mips64", - "only-msp430", - "only-msvc", - "only-musl", - "only-nightly", - "only-nvptx64", - "only-powerpc", - "only-riscv64", - "only-rustc_abi-x86-sse2", - "only-s390x", - "only-sparc", - "only-sparc64", - "only-stable", - "only-thumb", - "only-tvos", - "only-unix", - "only-visionos", - "only-wasm32", - "only-wasm32-bare", - "only-wasm32-wasip1", - "only-watchos", - "only-windows", - "only-windows-gnu", - "only-windows-msvc", - "only-x86", - "only-x86_64", - "only-x86_64-apple-darwin", - "only-x86_64-fortanix-unknown-sgx", - "only-x86_64-pc-windows-gnu", - "only-x86_64-pc-windows-msvc", - "only-x86_64-unknown-linux-gnu", - "pp-exact", - "pretty-compare-only", - "pretty-mode", - "proc-macro", - "reference", - "regex-error-pattern", - "remap-src-base", - "revisions", - "run-fail", - "run-flags", - "run-pass", - "run-rustfix", - "rustc-env", - "rustfix-only-machine-applicable", - "should-fail", - "should-ice", - "stderr-per-bitwidth", - "test-mir-pass", - "unique-doc-out-dir", - "unset-exec-env", - "unset-rustc-env", - // Used by the tidy check `unknown_revision`. - "unused-revision-names", - // tidy-alphabetical-end -]; - -const KNOWN_HTMLDOCCK_DIRECTIVE_NAMES: &[&str] = &[ - "count", - "!count", - "files", - "!files", - "has", - "!has", - "has-dir", - "!has-dir", - "hasraw", - "!hasraw", - "matches", - "!matches", - "matchesraw", - "!matchesraw", - "snapshot", - "!snapshot", -]; - -const KNOWN_JSONDOCCK_DIRECTIVE_NAMES: &[&str] = - &["count", "!count", "has", "!has", "is", "!is", "ismany", "!ismany", "set", "!set"]; - /// The (partly) broken-down contents of a line containing a test directive, /// which [`iter_directives`] passes to its callback function. /// @@ -1194,6 +976,7 @@ impl Config { fn parse_and_update_revisions( &self, testfile: &Utf8Path, + line_number: usize, line: &str, existing: &mut Vec<String>, ) { @@ -1207,7 +990,8 @@ impl Config { const FILECHECK_FORBIDDEN_REVISION_NAMES: [&str; 9] = ["CHECK", "COM", "NEXT", "SAME", "EMPTY", "NOT", "COUNT", "DAG", "LABEL"]; - if let Some(raw) = self.parse_name_value_directive(line, "revisions") { + if let Some(raw) = self.parse_name_value_directive(line, "revisions", testfile, line_number) + { if self.mode == TestMode::RunMake { panic!("`run-make` tests do not support revisions: {}", testfile); } @@ -1252,8 +1036,13 @@ impl Config { (name.to_owned(), value.to_owned()) } - fn parse_pp_exact(&self, line: &str, testfile: &Utf8Path) -> Option<Utf8PathBuf> { - if let Some(s) = self.parse_name_value_directive(line, "pp-exact") { + fn parse_pp_exact( + &self, + line: &str, + testfile: &Utf8Path, + line_number: usize, + ) -> Option<Utf8PathBuf> { + if let Some(s) = self.parse_name_value_directive(line, "pp-exact", testfile, line_number) { Some(Utf8PathBuf::from(&s)) } else if self.parse_name_directive(line, "pp-exact") { testfile.file_name().map(Utf8PathBuf::from) @@ -1294,19 +1083,31 @@ impl Config { line.starts_with("no-") && self.parse_name_directive(&line[3..], directive) } - pub fn parse_name_value_directive(&self, line: &str, directive: &str) -> Option<String> { + pub fn parse_name_value_directive( + &self, + line: &str, + directive: &str, + testfile: &Utf8Path, + line_number: usize, + ) -> Option<String> { let colon = directive.len(); if line.starts_with(directive) && line.as_bytes().get(colon) == Some(&b':') { let value = line[(colon + 1)..].to_owned(); debug!("{}: {}", directive, value); - Some(expand_variables(value, self)) + let value = expand_variables(value, self); + if value.is_empty() { + error!("{testfile}:{line_number}: empty value for directive `{directive}`"); + help!("expected syntax is: `{directive}: value`"); + panic!("empty directive value detected"); + } + Some(value) } else { None } } - fn parse_edition(&self, line: &str) -> Option<String> { - self.parse_name_value_directive(line, "edition") + fn parse_edition(&self, line: &str, testfile: &Utf8Path, line_number: usize) -> Option<String> { + self.parse_name_value_directive(line, "edition", testfile, line_number) } fn set_name_directive(&self, line: &str, directive: &str, value: &mut bool) { @@ -1328,11 +1129,14 @@ impl Config { &self, line: &str, directive: &str, + testfile: &Utf8Path, + line_number: usize, value: &mut Option<T>, parse: impl FnOnce(String) -> T, ) { if value.is_none() { - *value = self.parse_name_value_directive(line, directive).map(parse); + *value = + self.parse_name_value_directive(line, directive, testfile, line_number).map(parse); } } @@ -1340,10 +1144,14 @@ impl Config { &self, line: &str, directive: &str, + testfile: &Utf8Path, + line_number: usize, values: &mut Vec<T>, parse: impl FnOnce(String) -> T, ) { - if let Some(value) = self.parse_name_value_directive(line, directive).map(parse) { + if let Some(value) = + self.parse_name_value_directive(line, directive, testfile, line_number).map(parse) + { values.push(value); } } @@ -1660,7 +1468,9 @@ pub(crate) fn make_test_description<R: Read>( decision!(cfg::handle_ignore(config, ln)); decision!(cfg::handle_only(config, ln)); decision!(needs::handle_needs(&cache.needs, config, ln)); - decision!(ignore_llvm(config, path, ln)); + decision!(ignore_llvm(config, path, ln, line_number)); + decision!(ignore_backends(config, path, ln, line_number)); + decision!(needs_backends(config, path, ln, line_number)); decision!(ignore_cdb(config, ln)); decision!(ignore_gdb(config, ln)); decision!(ignore_lldb(config, ln)); @@ -1787,9 +1597,66 @@ fn ignore_lldb(config: &Config, line: &str) -> IgnoreDecision { IgnoreDecision::Continue } -fn ignore_llvm(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecision { +fn ignore_backends( + config: &Config, + path: &Utf8Path, + line: &str, + line_number: usize, +) -> IgnoreDecision { + if let Some(backends_to_ignore) = + config.parse_name_value_directive(line, "ignore-backends", path, line_number) + { + for backend in backends_to_ignore.split_whitespace().map(|backend| { + match CodegenBackend::try_from(backend) { + Ok(backend) => backend, + Err(error) => { + panic!("Invalid ignore-backends value `{backend}` in `{path}`: {error}") + } + } + }) { + if config.codegen_backend == backend { + return IgnoreDecision::Ignore { + reason: format!("{} backend is marked as ignore", backend.as_str()), + }; + } + } + } + IgnoreDecision::Continue +} + +fn needs_backends( + config: &Config, + path: &Utf8Path, + line: &str, + line_number: usize, +) -> IgnoreDecision { + if let Some(needed_backends) = + config.parse_name_value_directive(line, "needs-backends", path, line_number) + { + if !needed_backends + .split_whitespace() + .map(|backend| match CodegenBackend::try_from(backend) { + Ok(backend) => backend, + Err(error) => { + panic!("Invalid needs-backends value `{backend}` in `{path}`: {error}") + } + }) + .any(|backend| config.codegen_backend == backend) + { + return IgnoreDecision::Ignore { + reason: format!( + "{} backend is not part of required backends", + config.codegen_backend.as_str() + ), + }; + } + } + IgnoreDecision::Continue +} + +fn ignore_llvm(config: &Config, path: &Utf8Path, line: &str, line_number: usize) -> IgnoreDecision { if let Some(needed_components) = - config.parse_name_value_directive(line, "needs-llvm-components") + config.parse_name_value_directive(line, "needs-llvm-components", path, line_number) { let components: HashSet<_> = config.llvm_components.split_whitespace().collect(); if let Some(missing_component) = needed_components @@ -1810,7 +1677,9 @@ fn ignore_llvm(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecision { if let Some(actual_version) = &config.llvm_version { // Note that these `min` versions will check for not just major versions. - if let Some(version_string) = config.parse_name_value_directive(line, "min-llvm-version") { + if let Some(version_string) = + config.parse_name_value_directive(line, "min-llvm-version", path, line_number) + { let min_version = extract_llvm_version(&version_string); // Ignore if actual version is smaller than the minimum required version. if *actual_version < min_version { @@ -1821,7 +1690,7 @@ fn ignore_llvm(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecision { }; } } else if let Some(version_string) = - config.parse_name_value_directive(line, "max-llvm-major-version") + config.parse_name_value_directive(line, "max-llvm-major-version", path, line_number) { let max_version = extract_llvm_version(&version_string); // Ignore if actual major version is larger than the maximum required major version. @@ -1835,7 +1704,7 @@ fn ignore_llvm(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecision { }; } } else if let Some(version_string) = - config.parse_name_value_directive(line, "min-system-llvm-version") + config.parse_name_value_directive(line, "min-system-llvm-version", path, line_number) { let min_version = extract_llvm_version(&version_string); // Ignore if using system LLVM and actual version @@ -1848,7 +1717,7 @@ fn ignore_llvm(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecision { }; } } else if let Some(version_range) = - config.parse_name_value_directive(line, "ignore-llvm-version") + config.parse_name_value_directive(line, "ignore-llvm-version", path, line_number) { // Syntax is: "ignore-llvm-version: <version1> [- <version2>]" let (v_min, v_max) = @@ -1874,7 +1743,7 @@ fn ignore_llvm(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecision { } } } else if let Some(version_string) = - config.parse_name_value_directive(line, "exact-llvm-major-version") + config.parse_name_value_directive(line, "exact-llvm-major-version", path, line_number) { // Syntax is "exact-llvm-major-version: <version>" let version = extract_llvm_version(&version_string); diff --git a/src/tools/compiletest/src/directives/auxiliary.rs b/src/tools/compiletest/src/directives/auxiliary.rs index cdb75f6ffa9..7c1ed2e7006 100644 --- a/src/tools/compiletest/src/directives/auxiliary.rs +++ b/src/tools/compiletest/src/directives/auxiliary.rs @@ -3,6 +3,8 @@ use std::iter; +use camino::Utf8Path; + use super::directives::{AUX_BIN, AUX_BUILD, AUX_CODEGEN_BACKEND, AUX_CRATE, PROC_MACRO}; use crate::common::Config; @@ -41,17 +43,42 @@ impl AuxProps { /// If the given test directive line contains an `aux-*` directive, parse it /// and update [`AuxProps`] accordingly. -pub(super) fn parse_and_update_aux(config: &Config, ln: &str, aux: &mut AuxProps) { +pub(super) fn parse_and_update_aux( + config: &Config, + ln: &str, + testfile: &Utf8Path, + line_number: usize, + aux: &mut AuxProps, +) { if !(ln.starts_with("aux-") || ln.starts_with("proc-macro")) { return; } - config.push_name_value_directive(ln, AUX_BUILD, &mut aux.builds, |r| r.trim().to_string()); - config.push_name_value_directive(ln, AUX_BIN, &mut aux.bins, |r| r.trim().to_string()); - config.push_name_value_directive(ln, AUX_CRATE, &mut aux.crates, parse_aux_crate); - config - .push_name_value_directive(ln, PROC_MACRO, &mut aux.proc_macros, |r| r.trim().to_string()); - if let Some(r) = config.parse_name_value_directive(ln, AUX_CODEGEN_BACKEND) { + config.push_name_value_directive(ln, AUX_BUILD, testfile, line_number, &mut aux.builds, |r| { + r.trim().to_string() + }); + config.push_name_value_directive(ln, AUX_BIN, testfile, line_number, &mut aux.bins, |r| { + r.trim().to_string() + }); + config.push_name_value_directive( + ln, + AUX_CRATE, + testfile, + line_number, + &mut aux.crates, + parse_aux_crate, + ); + config.push_name_value_directive( + ln, + PROC_MACRO, + testfile, + line_number, + &mut aux.proc_macros, + |r| r.trim().to_string(), + ); + if let Some(r) = + config.parse_name_value_directive(ln, AUX_CODEGEN_BACKEND, testfile, line_number) + { aux.codegen_backend = Some(r.trim().to_owned()); } } diff --git a/src/tools/compiletest/src/directives/cfg.rs b/src/tools/compiletest/src/directives/cfg.rs index 35f6a9e1644..802a1d63d1f 100644 --- a/src/tools/compiletest/src/directives/cfg.rs +++ b/src/tools/compiletest/src/directives/cfg.rs @@ -285,6 +285,11 @@ fn parse_cfg_name_directive<'a>( if name == "gdb-version" { outcome = MatchOutcome::External; } + + // Don't error out for ignore-backends,as it is handled elsewhere. + if name == "backends" { + outcome = MatchOutcome::External; + } } ParsedNameDirective { diff --git a/src/tools/compiletest/src/directives/directive_names.rs b/src/tools/compiletest/src/directives/directive_names.rs new file mode 100644 index 00000000000..7fc76a42e0c --- /dev/null +++ b/src/tools/compiletest/src/directives/directive_names.rs @@ -0,0 +1,289 @@ +/// This was originally generated by collecting directives from ui tests and then extracting their +/// directive names. This is **not** an exhaustive list of all possible directives. Instead, this is +/// a best-effort approximation for diagnostics. Add new directives to this list when needed. +pub(crate) const KNOWN_DIRECTIVE_NAMES: &[&str] = &[ + // tidy-alphabetical-start + "add-core-stubs", + "assembly-output", + "aux-bin", + "aux-build", + "aux-codegen-backend", + "aux-crate", + "build-aux-docs", + "build-fail", + "build-pass", + "check-fail", + "check-pass", + "check-run-results", + "check-stdout", + "check-test-line-numbers-match", + "compile-flags", + "doc-flags", + "dont-check-compiler-stderr", + "dont-check-compiler-stdout", + "dont-check-failure-status", + "dont-require-annotations", + "edition", + "error-pattern", + "exact-llvm-major-version", + "exec-env", + "failure-status", + "filecheck-flags", + "forbid-output", + "force-host", + "ignore-16bit", + "ignore-32bit", + "ignore-64bit", + "ignore-aarch64", + "ignore-aarch64-pc-windows-msvc", + "ignore-aarch64-unknown-linux-gnu", + "ignore-aix", + "ignore-android", + "ignore-apple", + "ignore-arm", + "ignore-arm-unknown-linux-gnueabi", + "ignore-arm-unknown-linux-gnueabihf", + "ignore-arm-unknown-linux-musleabi", + "ignore-arm-unknown-linux-musleabihf", + "ignore-auxiliary", + "ignore-avr", + "ignore-backends", + "ignore-beta", + "ignore-cdb", + "ignore-compare-mode-next-solver", + "ignore-compare-mode-polonius", + "ignore-coverage-map", + "ignore-coverage-run", + "ignore-cross-compile", + "ignore-eabi", + "ignore-elf", + "ignore-emscripten", + "ignore-endian-big", + "ignore-enzyme", + "ignore-freebsd", + "ignore-fuchsia", + "ignore-gdb", + "ignore-gdb-version", + "ignore-gnu", + "ignore-haiku", + "ignore-horizon", + "ignore-i686-pc-windows-gnu", + "ignore-i686-pc-windows-msvc", + "ignore-illumos", + "ignore-ios", + "ignore-linux", + "ignore-lldb", + "ignore-llvm-version", + "ignore-loongarch32", + "ignore-loongarch64", + "ignore-macabi", + "ignore-macos", + "ignore-msp430", + "ignore-msvc", + "ignore-musl", + "ignore-netbsd", + "ignore-nightly", + "ignore-none", + "ignore-nto", + "ignore-nvptx64", + "ignore-nvptx64-nvidia-cuda", + "ignore-openbsd", + "ignore-pass", + "ignore-powerpc", + "ignore-powerpc64", + "ignore-remote", + "ignore-riscv64", + "ignore-rustc-debug-assertions", + "ignore-rustc_abi-x86-sse2", + "ignore-s390x", + "ignore-sgx", + "ignore-sparc64", + "ignore-spirv", + "ignore-stable", + "ignore-stage1", + "ignore-stage2", + "ignore-std-debug-assertions", + "ignore-test", + "ignore-thumb", + "ignore-thumbv8m.base-none-eabi", + "ignore-thumbv8m.main-none-eabi", + "ignore-tvos", + "ignore-unix", + "ignore-unknown", + "ignore-uwp", + "ignore-visionos", + "ignore-vxworks", + "ignore-wasi", + "ignore-wasm", + "ignore-wasm32", + "ignore-wasm32-bare", + "ignore-wasm64", + "ignore-watchos", + "ignore-windows", + "ignore-windows-gnu", + "ignore-windows-msvc", + "ignore-x32", + "ignore-x86", + "ignore-x86_64", + "ignore-x86_64-apple-darwin", + "ignore-x86_64-pc-windows-gnu", + "ignore-x86_64-unknown-linux-gnu", + "incremental", + "known-bug", + "llvm-cov-flags", + "max-llvm-major-version", + "min-cdb-version", + "min-gdb-version", + "min-lldb-version", + "min-llvm-version", + "min-system-llvm-version", + "needs-asm-support", + "needs-backends", + "needs-crate-type", + "needs-deterministic-layouts", + "needs-dlltool", + "needs-dynamic-linking", + "needs-enzyme", + "needs-force-clang-based-tests", + "needs-git-hash", + "needs-llvm-components", + "needs-llvm-zstd", + "needs-profiler-runtime", + "needs-relocation-model-pic", + "needs-run-enabled", + "needs-rust-lld", + "needs-rustc-debug-assertions", + "needs-sanitizer-address", + "needs-sanitizer-cfi", + "needs-sanitizer-dataflow", + "needs-sanitizer-hwaddress", + "needs-sanitizer-kcfi", + "needs-sanitizer-leak", + "needs-sanitizer-memory", + "needs-sanitizer-memtag", + "needs-sanitizer-safestack", + "needs-sanitizer-shadow-call-stack", + "needs-sanitizer-support", + "needs-sanitizer-thread", + "needs-std-debug-assertions", + "needs-subprocess", + "needs-symlink", + "needs-target-has-atomic", + "needs-target-std", + "needs-threads", + "needs-unwind", + "needs-wasmtime", + "needs-xray", + "no-auto-check-cfg", + "no-prefer-dynamic", + "normalize-stderr", + "normalize-stderr-32bit", + "normalize-stderr-64bit", + "normalize-stdout", + "only-16bit", + "only-32bit", + "only-64bit", + "only-aarch64", + "only-aarch64-apple-darwin", + "only-aarch64-unknown-linux-gnu", + "only-apple", + "only-arm", + "only-avr", + "only-beta", + "only-bpf", + "only-cdb", + "only-dist", + "only-elf", + "only-emscripten", + "only-gnu", + "only-i686-pc-windows-gnu", + "only-i686-pc-windows-msvc", + "only-i686-unknown-linux-gnu", + "only-ios", + "only-linux", + "only-loongarch32", + "only-loongarch64", + "only-loongarch64-unknown-linux-gnu", + "only-macos", + "only-mips", + "only-mips64", + "only-msp430", + "only-msvc", + "only-musl", + "only-nightly", + "only-nvptx64", + "only-powerpc", + "only-riscv64", + "only-rustc_abi-x86-sse2", + "only-s390x", + "only-sparc", + "only-sparc64", + "only-stable", + "only-thumb", + "only-tvos", + "only-uefi", + "only-unix", + "only-visionos", + "only-wasm32", + "only-wasm32-bare", + "only-wasm32-wasip1", + "only-watchos", + "only-windows", + "only-windows-gnu", + "only-windows-msvc", + "only-x86", + "only-x86_64", + "only-x86_64-apple-darwin", + "only-x86_64-fortanix-unknown-sgx", + "only-x86_64-pc-windows-gnu", + "only-x86_64-pc-windows-msvc", + "only-x86_64-unknown-linux-gnu", + "pp-exact", + "pretty-compare-only", + "pretty-mode", + "proc-macro", + "reference", + "regex-error-pattern", + "remap-src-base", + "revisions", + "run-crash", + "run-fail", + "run-fail-or-crash", + "run-flags", + "run-pass", + "run-rustfix", + "rustc-env", + "rustfix-only-machine-applicable", + "should-fail", + "should-ice", + "stderr-per-bitwidth", + "test-mir-pass", + "unique-doc-out-dir", + "unset-exec-env", + "unset-rustc-env", + // Used by the tidy check `unknown_revision`. + "unused-revision-names", + // tidy-alphabetical-end +]; + +pub(crate) const KNOWN_HTMLDOCCK_DIRECTIVE_NAMES: &[&str] = &[ + "count", + "!count", + "files", + "!files", + "has", + "!has", + "has-dir", + "!has-dir", + "hasraw", + "!hasraw", + "matches", + "!matches", + "matchesraw", + "!matchesraw", + "snapshot", + "!snapshot", +]; + +pub(crate) const KNOWN_JSONDOCCK_DIRECTIVE_NAMES: &[&str] = + &["count", "!count", "has", "!has", "is", "!is", "ismany", "!ismany", "set", "!set"]; diff --git a/src/tools/compiletest/src/lib.rs b/src/tools/compiletest/src/lib.rs index f3b3605a120..c712185733c 100644 --- a/src/tools/compiletest/src/lib.rs +++ b/src/tools/compiletest/src/lib.rs @@ -31,7 +31,7 @@ use std::time::SystemTime; use std::{env, fs, vec}; use build_helper::git::{get_git_modified_files, get_git_untracked_files}; -use camino::{Utf8Path, Utf8PathBuf}; +use camino::{Utf8Component, Utf8Path, Utf8PathBuf}; use getopts::Options; use rayon::iter::{ParallelBridge, ParallelIterator}; use tracing::debug; @@ -39,7 +39,7 @@ use walkdir::WalkDir; use self::directives::{EarlyProps, make_test_description}; use crate::common::{ - CompareMode, Config, Debugger, PassMode, TestMode, TestPaths, UI_EXTENSIONS, + CodegenBackend, CompareMode, Config, Debugger, PassMode, TestMode, TestPaths, UI_EXTENSIONS, expected_output_path, output_base_dir, output_relative_path, }; use crate::directives::DirectivesCache; @@ -203,6 +203,12 @@ pub fn parse_config(args: Vec<String>) -> Config { "debugger", "only test a specific debugger in debuginfo tests", "gdb | lldb | cdb", + ) + .optopt( + "", + "codegen-backend", + "the codegen backend currently used", + "CODEGEN BACKEND NAME", ); let (argv0, args_) = args.split_first().unwrap(); @@ -264,6 +270,15 @@ pub fn parse_config(args: Vec<String>) -> Config { || directives::extract_llvm_version_from_binary(&matches.opt_str("llvm-filecheck")?), ); + let codegen_backend = match matches.opt_str("codegen-backend").as_deref() { + Some(backend) => match CodegenBackend::try_from(backend) { + Ok(backend) => backend, + Err(error) => panic!("invalid value `{backend}` for `--codegen-backend`: {error}"), + }, + // By default, it's always llvm. + None => CodegenBackend::Llvm, + }; + let run_ignored = matches.opt_present("ignored"); let with_rustc_debug_assertions = matches.opt_present("with-rustc-debug-assertions"); let with_std_debug_assertions = matches.opt_present("with-std-debug-assertions"); @@ -449,6 +464,8 @@ pub fn parse_config(args: Vec<String>) -> Config { diff_command: matches.opt_str("compiletest-diff-tool"), minicore_path: opt_path(matches, "minicore-path"), + + codegen_backend, } } @@ -782,6 +799,23 @@ fn collect_tests_from_dir( return Ok(TestCollector::new()); } + let mut components = dir.components().rev(); + if let Some(Utf8Component::Normal(last)) = components.next() + && let Some(("assembly" | "codegen", backend)) = last.split_once('-') + && let Some(Utf8Component::Normal(parent)) = components.next() + && parent == "tests" + && let Ok(backend) = CodegenBackend::try_from(backend) + && backend != cx.config.codegen_backend + { + // We ignore asm tests which don't match the current codegen backend. + warning!( + "Ignoring tests in `{dir}` because they don't match the configured codegen \ + backend (`{}`)", + cx.config.codegen_backend.as_str(), + ); + return Ok(TestCollector::new()); + } + // For run-make tests, a "test file" is actually a directory that contains an `rmake.rs`. if cx.config.mode == TestMode::RunMake { let mut collector = TestCollector::new(); diff --git a/src/tools/compiletest/src/runtest.rs b/src/tools/compiletest/src/runtest.rs index cb8f593c9df..f66d4f98f1f 100644 --- a/src/tools/compiletest/src/runtest.rs +++ b/src/tools/compiletest/src/runtest.rs @@ -16,8 +16,8 @@ use regex::{Captures, Regex}; use tracing::*; use crate::common::{ - CompareMode, Config, Debugger, FailMode, PassMode, TestMode, TestPaths, TestSuite, - UI_EXTENSIONS, UI_FIXED, UI_RUN_STDERR, UI_RUN_STDOUT, UI_STDERR, UI_STDOUT, UI_SVG, + CompareMode, Config, Debugger, FailMode, PassMode, RunFailMode, RunResult, TestMode, TestPaths, + TestSuite, UI_EXTENSIONS, UI_FIXED, UI_RUN_STDERR, UI_RUN_STDOUT, UI_STDERR, UI_STDOUT, UI_SVG, UI_WINDOWS_SVG, expected_output_path, incremental_dir, output_base_dir, output_base_name, output_testname_unique, }; @@ -282,7 +282,8 @@ impl<'test> TestCx<'test> { fn should_run(&self, pm: Option<PassMode>) -> WillExecute { let test_should_run = match self.config.mode { TestMode::Ui - if pm == Some(PassMode::Run) || self.props.fail_mode == Some(FailMode::Run) => + if pm == Some(PassMode::Run) + || matches!(self.props.fail_mode, Some(FailMode::Run(_))) => { true } diff --git a/src/tools/compiletest/src/runtest/debugger.rs b/src/tools/compiletest/src/runtest/debugger.rs index a4103c5b4a9..ba824124e87 100644 --- a/src/tools/compiletest/src/runtest/debugger.rs +++ b/src/tools/compiletest/src/runtest/debugger.rs @@ -47,10 +47,14 @@ impl DebuggerCommands { continue; }; - if let Some(command) = config.parse_name_value_directive(&line, &command_directive) { + if let Some(command) = + config.parse_name_value_directive(&line, &command_directive, file, line_no) + { commands.push(command); } - if let Some(pattern) = config.parse_name_value_directive(&line, &check_directive) { + if let Some(pattern) = + config.parse_name_value_directive(&line, &check_directive, file, line_no) + { check_lines.push((line_no, pattern)); } } diff --git a/src/tools/compiletest/src/runtest/ui.rs b/src/tools/compiletest/src/runtest/ui.rs index f6bc85cd051..0507c2600ae 100644 --- a/src/tools/compiletest/src/runtest/ui.rs +++ b/src/tools/compiletest/src/runtest/ui.rs @@ -6,8 +6,8 @@ use rustfix::{Filter, apply_suggestions, get_suggestions_from_json}; use tracing::debug; use super::{ - AllowUnused, Emit, FailMode, LinkToAux, PassMode, TargetLocation, TestCx, TestOutput, - Truncated, UI_FIXED, WillExecute, + AllowUnused, Emit, FailMode, LinkToAux, PassMode, RunFailMode, RunResult, TargetLocation, + TestCx, TestOutput, Truncated, UI_FIXED, WillExecute, }; use crate::json; @@ -140,12 +140,53 @@ impl TestCx<'_> { &proc_res, ); } + let code = proc_res.status.code(); + let run_result = if proc_res.status.success() { + RunResult::Pass + } else if code.is_some_and(|c| c >= 1 && c <= 127) { + RunResult::Fail + } else { + RunResult::Crash + }; + // Help users understand why the test failed by including the actual + // exit code and actual run result in the failure message. + let pass_hint = format!("code={code:?} so test would pass with `{run_result}`"); if self.should_run_successfully(pm) { - if !proc_res.status.success() { - self.fatal_proc_rec("test run failed!", &proc_res); + if run_result != RunResult::Pass { + self.fatal_proc_rec( + &format!("test did not exit with success! {pass_hint}"), + &proc_res, + ); + } + } else if self.props.fail_mode == Some(FailMode::Run(RunFailMode::Fail)) { + // If the test is marked as `run-fail` but do not support + // unwinding we allow it to crash, since a panic will trigger an + // abort (crash) instead of unwind (exit with code 101). + let crash_ok = !self.config.can_unwind(); + if run_result != RunResult::Fail && !(crash_ok && run_result == RunResult::Crash) { + let err = if crash_ok { + format!( + "test did not exit with failure or crash (`{}` can't unwind)! {pass_hint}", + self.config.target + ) + } else { + format!("test did not exit with failure! {pass_hint}") + }; + self.fatal_proc_rec(&err, &proc_res); } - } else if proc_res.status.success() { - self.fatal_proc_rec("test run succeeded!", &proc_res); + } else if self.props.fail_mode == Some(FailMode::Run(RunFailMode::Crash)) { + if run_result != RunResult::Crash { + self.fatal_proc_rec(&format!("test did not crash! {pass_hint}"), &proc_res); + } + } else if self.props.fail_mode == Some(FailMode::Run(RunFailMode::FailOrCrash)) { + if run_result != RunResult::Fail && run_result != RunResult::Crash { + self.fatal_proc_rec( + &format!("test did not exit with failure or crash! {pass_hint}"), + &proc_res, + ); + } + } else { + unreachable!("run_ui_test() must not be called if the test should not run"); } self.get_output(&proc_res) diff --git a/src/tools/enzyme b/src/tools/enzyme -Subproject b5098d515d5e1bd0f5470553bc0d18da9794ca8 +Subproject 2cccfba93c1650f26f1cf8be8aa875a7c1d23fb diff --git a/src/tools/generate-copyright/Cargo.toml b/src/tools/generate-copyright/Cargo.toml index e420a450d42..bcb3165de45 100644 --- a/src/tools/generate-copyright/Cargo.toml +++ b/src/tools/generate-copyright/Cargo.toml @@ -9,7 +9,7 @@ description = "Produces a manifest of all the copyrighted materials in the Rust [dependencies] anyhow = "1.0.65" askama = "0.14.0" -cargo_metadata = "0.18.1" +cargo_metadata = "0.21" serde = { version = "1.0.147", features = ["derive"] } serde_json = "1.0.85" thiserror = "1" diff --git a/src/tools/generate-copyright/src/cargo_metadata.rs b/src/tools/generate-copyright/src/cargo_metadata.rs index 3fae26bda47..87cd85c8def 100644 --- a/src/tools/generate-copyright/src/cargo_metadata.rs +++ b/src/tools/generate-copyright/src/cargo_metadata.rs @@ -92,7 +92,8 @@ pub fn get_metadata( continue; } // otherwise it's an out-of-tree dependency - let package_id = Package { name: package.name, version: package.version.to_string() }; + let package_id = + Package { name: package.name.to_string(), version: package.version.to_string() }; output.insert( package_id, PackageMetadata { diff --git a/src/tools/linkchecker/Cargo.toml b/src/tools/linkchecker/Cargo.toml index 7123d43eb56..fb5bff3fe63 100644 --- a/src/tools/linkchecker/Cargo.toml +++ b/src/tools/linkchecker/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "linkchecker" version = "0.1.0" -edition = "2021" +edition = "2024" [[bin]] name = "linkchecker" diff --git a/src/tools/linkchecker/main.rs b/src/tools/linkchecker/main.rs index 84cba3f8c44..1dc45728c90 100644 --- a/src/tools/linkchecker/main.rs +++ b/src/tools/linkchecker/main.rs @@ -17,12 +17,13 @@ //! should catch the majority of "broken link" cases. use std::cell::{Cell, RefCell}; +use std::collections::hash_map::Entry; use std::collections::{HashMap, HashSet}; -use std::io::ErrorKind; +use std::fs; +use std::iter::once; use std::path::{Component, Path, PathBuf}; use std::rc::Rc; use std::time::Instant; -use std::{env, fs}; use html5ever::tendril::ByteTendril; use html5ever::tokenizer::{ @@ -110,10 +111,25 @@ macro_rules! t { }; } +struct Cli { + docs: PathBuf, + link_targets_dirs: Vec<PathBuf>, +} + fn main() { - let docs = env::args_os().nth(1).expect("doc path should be first argument"); - let docs = env::current_dir().unwrap().join(docs); - let mut checker = Checker { root: docs.clone(), cache: HashMap::new() }; + let cli = match parse_cli() { + Ok(cli) => cli, + Err(err) => { + eprintln!("error: {err}"); + usage_and_exit(1); + } + }; + + let mut checker = Checker { + root: cli.docs.clone(), + link_targets_dirs: cli.link_targets_dirs, + cache: HashMap::new(), + }; let mut report = Report { errors: 0, start: Instant::now(), @@ -125,7 +141,7 @@ fn main() { intra_doc_exceptions: 0, has_broken_urls: false, }; - checker.walk(&docs, &mut report); + checker.walk(&cli.docs, &mut report); report.report(); if report.errors != 0 { println!("found some broken links"); @@ -133,8 +149,50 @@ fn main() { } } +fn parse_cli() -> Result<Cli, String> { + fn to_absolute_path(arg: &str) -> Result<PathBuf, String> { + std::path::absolute(arg).map_err(|e| format!("could not convert to absolute {arg}: {e}")) + } + + let mut verbatim = false; + let mut docs = None; + let mut link_targets_dirs = Vec::new(); + + let mut args = std::env::args().skip(1); + while let Some(arg) = args.next() { + if !verbatim && arg == "--" { + verbatim = true; + } else if !verbatim && (arg == "-h" || arg == "--help") { + usage_and_exit(0) + } else if !verbatim && arg == "--link-targets-dir" { + link_targets_dirs.push(to_absolute_path( + &args.next().ok_or("missing value for --link-targets-dir")?, + )?); + } else if !verbatim && let Some(value) = arg.strip_prefix("--link-targets-dir=") { + link_targets_dirs.push(to_absolute_path(value)?); + } else if !verbatim && arg.starts_with('-') { + return Err(format!("unknown flag: {arg}")); + } else if docs.is_none() { + docs = Some(arg); + } else { + return Err("too many positional arguments".into()); + } + } + + Ok(Cli { + docs: to_absolute_path(&docs.ok_or("missing first positional argument")?)?, + link_targets_dirs, + }) +} + +fn usage_and_exit(code: i32) -> ! { + eprintln!("usage: linkchecker PATH [--link-targets-dir=PATH ...]"); + std::process::exit(code) +} + struct Checker { root: PathBuf, + link_targets_dirs: Vec<PathBuf>, cache: Cache, } @@ -420,37 +478,34 @@ impl Checker { /// Load a file from disk, or from the cache if available. fn load_file(&mut self, file: &Path, report: &mut Report) -> (String, &FileEntry) { - // https://docs.microsoft.com/en-us/windows/win32/debug/system-error-codes--0-499- - #[cfg(windows)] - const ERROR_INVALID_NAME: i32 = 123; - let pretty_path = file.strip_prefix(&self.root).unwrap_or(file).to_str().unwrap().to_string(); - let entry = - self.cache.entry(pretty_path.clone()).or_insert_with(|| match fs::metadata(file) { + for base in once(&self.root).chain(self.link_targets_dirs.iter()) { + let entry = self.cache.entry(pretty_path.clone()); + if let Entry::Occupied(e) = &entry + && !matches!(e.get(), FileEntry::Missing) + { + break; + } + + let file = base.join(&pretty_path); + entry.insert_entry(match fs::metadata(&file) { Ok(metadata) if metadata.is_dir() => FileEntry::Dir, Ok(_) => { if file.extension().and_then(|s| s.to_str()) != Some("html") { FileEntry::OtherFile } else { report.html_files += 1; - load_html_file(file, report) + load_html_file(&file, report) } } - Err(e) if e.kind() == ErrorKind::NotFound => FileEntry::Missing, - Err(e) => { - // If a broken intra-doc link contains `::`, on windows, it will cause `ERROR_INVALID_NAME` rather than `NotFound`. - // Explicitly check for that so that the broken link can be allowed in `LINKCHECK_EXCEPTIONS`. - #[cfg(windows)] - if e.raw_os_error() == Some(ERROR_INVALID_NAME) - && file.as_os_str().to_str().map_or(false, |s| s.contains("::")) - { - return FileEntry::Missing; - } - panic!("unexpected read error for {}: {}", file.display(), e); - } + Err(e) if is_not_found_error(&file, &e) => FileEntry::Missing, + Err(e) => panic!("unexpected read error for {}: {}", file.display(), e), }); + } + + let entry = self.cache.get(&pretty_path).unwrap(); (pretty_path, entry) } } @@ -629,3 +684,16 @@ fn parse_ids(ids: &mut HashSet<String>, file: &str, source: &str, report: &mut R ids.insert(encoded); } } + +fn is_not_found_error(path: &Path, error: &std::io::Error) -> bool { + // https://docs.microsoft.com/en-us/windows/win32/debug/system-error-codes--0-499- + const WINDOWS_ERROR_INVALID_NAME: i32 = 123; + + error.kind() == std::io::ErrorKind::NotFound + // If a broken intra-doc link contains `::`, on windows, it will cause `ERROR_INVALID_NAME` + // rather than `NotFound`. Explicitly check for that so that the broken link can be allowed + // in `LINKCHECK_EXCEPTIONS`. + || (cfg!(windows) + && error.raw_os_error() == Some(WINDOWS_ERROR_INVALID_NAME) + && path.as_os_str().to_str().map_or(false, |s| s.contains("::"))) +} diff --git a/src/tools/miri/.github/workflows/ci.yml b/src/tools/miri/.github/workflows/ci.yml index 11c0f08debe..c47f9695624 100644 --- a/src/tools/miri/.github/workflows/ci.yml +++ b/src/tools/miri/.github/workflows/ci.yml @@ -45,11 +45,17 @@ jobs: os: macos-latest - host_target: i686-pc-windows-msvc os: windows-latest + - host_target: aarch64-pc-windows-msvc + os: windows-11-arm runs-on: ${{ matrix.os }} env: HOST_TARGET: ${{ matrix.host_target }} steps: - uses: actions/checkout@v4 + - name: apt update + if: ${{ startsWith(matrix.os, 'ubuntu') }} + # The runners seem to have outdated apt repos sometimes + run: sudo apt update - name: install qemu if: ${{ matrix.qemu }} run: sudo apt install qemu-user qemu-user-binfmt @@ -63,6 +69,12 @@ jobs: sudo apt update # Install needed packages sudo apt install $(echo "libatomic1: zlib1g-dev:" | sed 's/:/:${{ matrix.multiarch }}/g') + - name: Install rustup on Windows ARM + if: ${{ matrix.os == 'windows-11-arm' }} + run: | + curl -LOs https://static.rust-lang.org/rustup/dist/aarch64-pc-windows-msvc/rustup-init.exe + ./rustup-init.exe -y --no-modify-path + echo "$USERPROFILE/.cargo/bin" >> "$GITHUB_PATH" - uses: ./.github/workflows/setup with: toolchain_flags: "--host ${{ matrix.host_target }}" @@ -147,35 +159,48 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 256 # get a bit more of the history - - name: install josh-proxy - run: cargo +stable install josh-proxy --git https://github.com/josh-project/josh --tag r24.10.04 + - name: install josh-sync + run: cargo +stable install --locked --git https://github.com/rust-lang/josh-sync - name: setup bot git name and email run: | git config --global user.name 'The Miri Cronjob Bot' git config --global user.email 'miri@cron.bot' - name: Install nightly toolchain run: rustup toolchain install nightly --profile minimal - - name: get changes from rustc - run: ./miri rustc-pull - name: Install rustup-toolchain-install-master run: cargo install -f rustup-toolchain-install-master - - name: format changes (if any) + - name: Push changes to a branch and create PR run: | + # Make it easier to see what happens. + set -x + # Temporarily disable early exit to examine the status code of rustc-josh-sync + set +e + rustc-josh-sync pull + exitcode=$? + set -e + + # If there were no changes to pull, rustc-josh-sync returns status code 2. + # In that case, skip the rest of the job. + if [ $exitcode -eq 2 ]; then + echo "Nothing changed in rustc, skipping PR" + exit 0 + elif [ $exitcode -ne 0 ]; then + # If return code was not 0 or 2, rustc-josh-sync actually failed + echo "error: rustc-josh-sync failed" + exit ${exitcode} + fi + + # Format changes ./miri toolchain ./miri fmt --check || (./miri fmt && git commit -am "fmt") - - name: Push changes to a branch and create PR - run: | - # `git diff --exit-code` "succeeds" if the diff is empty. - if git diff --exit-code HEAD^; then echo "Nothing changed in rustc, skipping PR"; exit 0; fi - # The diff is non-empty, create a PR. + + # Create a PR BRANCH="rustup-$(date -u +%Y-%m-%d)" git switch -c $BRANCH git push -u origin $BRANCH gh pr create -B master --title 'Automatic Rustup' --body 'Please close and re-open this PR to trigger CI, then enable auto-merge.' env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - ZULIP_BOT_EMAIL: ${{ secrets.ZULIP_BOT_EMAIL }} - ZULIP_API_TOKEN: ${{ secrets.ZULIP_API_TOKEN }} cron-fail-notify: name: cronjob failure notification @@ -198,7 +223,7 @@ jobs: It would appear that the [Miri cron job build]('"https://github.com/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID"') failed. This likely means that rustc changed the miri directory and - we now need to do a [`./miri rustc-pull`](https://github.com/rust-lang/miri/blob/master/CONTRIBUTING.md#importing-changes-from-the-rustc-repo). + we now need to do a [`rustc-josh-sync pull`](https://github.com/rust-lang/miri/blob/master/CONTRIBUTING.md#importing-changes-from-the-rustc-repo). Would you mind investigating this issue? diff --git a/src/tools/miri/.gitignore b/src/tools/miri/.gitignore index ed2d0ba7ba0..4a238dc0313 100644 --- a/src/tools/miri/.gitignore +++ b/src/tools/miri/.gitignore @@ -1,5 +1,4 @@ target -/doc tex/*/out *.dot *.out diff --git a/src/tools/miri/CONTRIBUTING.md b/src/tools/miri/CONTRIBUTING.md index fef7f807e93..7d78fdddbad 100644 --- a/src/tools/miri/CONTRIBUTING.md +++ b/src/tools/miri/CONTRIBUTING.md @@ -13,16 +13,20 @@ for a list of Miri maintainers. [Rust Zulip]: https://rust-lang.zulipchat.com -### Pull review process +### PR review process When you get a review, please take care of the requested changes in new commits. Do not amend existing commits. Generally avoid force-pushing. The only time you should force push is when there is a conflict with the master branch (in that case you should rebase across master, not merge), and all the way at the end of the review process when the reviewer tells you that the PR is done and you -should squash the commits. If you are unsure how to use `git rebase` to squash commits, use `./miri -squash` which automates the process but leaves little room for customization. (All this is to work -around the fact that Github is quite bad at dealing with force pushes and does not support `git -range-diff`. Maybe one day Github will be good at git and then life can become easier.) +should squash the commits. (All this is to work around the fact that Github is quite bad at +dealing with force pushes and does not support `git range-diff`.) + +The recommended way to squash commits is to use `./miri squash`, which will make everything into a +single commit. You will be asked for the commit message; please ensure it describes the entire PR. +You can also use `git rebase` manually if you need more control (e.g. if there should be more than +one commit at the end), but then please use `--keep-base` to ensure the PR remains based on the same +upstream commit. Most PRs bounce back and forth between the reviewer and the author several times, so it is good to keep track of who is expected to take the next step. We are using the `S-waiting-for-review` and @@ -293,14 +297,14 @@ You can also directly run Miri on a Rust source file: ## Advanced topic: Syncing with the rustc repo -We use the [`josh` proxy](https://github.com/josh-project/josh) to transmit changes between the +We use the [`josh-sync`](https://github.com/rust-lang/josh-sync) tool to transmit changes between the rustc and Miri repositories. You can install it as follows: ```sh -cargo +stable install josh-proxy --git https://github.com/josh-project/josh --tag r24.10.04 +cargo install --locked --git https://github.com/rust-lang/josh-sync ``` -Josh will automatically be started and stopped by `./miri`. +The commands below will automatically install and manage the [Josh](https://github.com/josh-project/josh) proxy that performs the actual work. ### Importing changes from the rustc repo @@ -308,10 +312,12 @@ Josh will automatically be started and stopped by `./miri`. We assume we start on an up-to-date master branch in the Miri repo. +1) First, create a branch for the pull, e.g. `git checkout -b rustup` +2) Then run the following: ```sh # Fetch and merge rustc side of the history. Takes ca 5 min the first time. # This will also update the `rustc-version` file. -./miri rustc-pull +rustc-josh-sync pull # Update local toolchain and apply formatting. ./miri toolchain && ./miri fmt git commit -am "rustup" @@ -324,12 +330,12 @@ needed. ### Exporting changes to the rustc repo -We will use the josh proxy to push to your fork of rustc. Run the following in the Miri repo, +We will use the `josh-sync` tool to push to your fork of rustc. Run the following in the Miri repo, assuming we are on an up-to-date master branch: ```sh # Push the Miri changes to your rustc fork (substitute your github handle for YOUR_NAME). -./miri rustc-push YOUR_NAME miri +rustc-josh-sync push miri YOUR_NAME ``` This will create a new branch called `miri` in your fork, and the output should include a link that @@ -348,6 +354,7 @@ https. Add the following to your `.gitconfig`: The following environment variables are relevant to `./miri`: +* `CARGO` sets the binary used to execute Cargo; if none is specified, defaults to `cargo`. * `MIRI_AUTO_OPS` indicates whether the automatic execution of rustfmt, clippy and toolchain setup (as controlled by the `./auto-*` files) should be skipped. If it is set to `no`, they are skipped. This is used to allow automated IDE actions to avoid the auto ops. diff --git a/src/tools/miri/Cargo.lock b/src/tools/miri/Cargo.lock index aa6f059cec2..b46f0f83420 100644 --- a/src/tools/miri/Cargo.lock +++ b/src/tools/miri/Cargo.lock @@ -4,18 +4,18 @@ version = 4 [[package]] name = "addr2line" -version = "0.21.0" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" dependencies = [ "gimli", ] [[package]] -name = "adler" -version = "1.0.2" +name = "adler2" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" [[package]] name = "aes" @@ -44,40 +44,40 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "710e8eae58854cdc1790fcb56cca04d712a17be849eeb81da2a724bf4bae2bc4" dependencies = [ "anstyle", - "unicode-width 0.2.0", + "unicode-width 0.2.1", ] [[package]] name = "anstyle" -version = "1.0.10" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" +checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd" [[package]] name = "anyhow" -version = "1.0.97" +version = "1.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcfed56ad506cb2c684a14971b8861fdc3baaaae314b9e5f9bb532cbe3ba7a4f" +checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" [[package]] name = "autocfg" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" [[package]] name = "backtrace" -version = "0.3.71" +version = "0.3.75" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26b05800d2e817c8b3b4b54abd461726265fa9789ae34330622f2db9ee696f9d" +checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002" dependencies = [ "addr2line", - "cc", "cfg-if", "libc", "miniz_oxide", "object", "rustc-demangle", + "windows-targets 0.52.6", ] [[package]] @@ -91,15 +91,15 @@ dependencies = [ [[package]] name = "bitflags" -version = "2.9.0" +version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" +checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967" [[package]] name = "bstr" -version = "1.11.3" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "531a9155a481e2ee699d4f98f43c0ca4ff8ee1bfd55c31e9e98fb29d2b176fe0" +checksum = "234113d19d0d7d613b40e86fb654acf958910802bcceab913a4f9e7cda03b1a4" dependencies = [ "memchr", "regex-automata", @@ -108,15 +108,15 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.17.0" +version = "3.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" +checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" [[package]] name = "camino" -version = "1.1.9" +version = "1.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3" +checksum = "0da45bc31171d8d6960122e222a67740df867c1dd53b4d51caa297084c185cab" dependencies = [ "serde", ] @@ -166,18 +166,20 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.17" +version = "1.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fcb57c740ae1daf453ae85f16e37396f672b039e00d9d866e07ddb24e328e3a" +checksum = "be714c154be609ec7f5dad223a33bf1482fff90472de28f7362806e6d4832b8c" dependencies = [ + "jobserver", + "libc", "shlex", ] [[package]] name = "cfg-if" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268" [[package]] name = "cfg_aliases" @@ -187,35 +189,24 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" [[package]] name = "chrono" -version = "0.4.40" +version = "0.4.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a7964611d71df112cb1730f2ee67324fcf4d0fc6606acbbe9bfe06df124637c" +checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d" dependencies = [ "num-traits", ] [[package]] name = "chrono-tz" -version = "0.10.3" +version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efdce149c370f133a071ca8ef6ea340b7b88748ab0810097a9e2976eaa34b4f3" +checksum = "a6139a8597ed92cf816dfb33f5dd6cf0bb93a6adc938f11039f371bc5bcd26c3" dependencies = [ "chrono", - "chrono-tz-build", "phf", ] [[package]] -name = "chrono-tz-build" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f10f8c9340e31fc120ff885fcdb54a0b48e474bbd77cab557f0c30a3e569402" -dependencies = [ - "parse-zoneinfo", - "phf_codegen", -] - -[[package]] name = "cipher" version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -226,28 +217,74 @@ dependencies = [ ] [[package]] +name = "clap" +version = "4.5.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be92d32e80243a54711e5d7ce823c35c41c9d929dc4ab58e1276f625841aadf9" +dependencies = [ + "clap_builder", +] + +[[package]] +name = "clap_builder" +version = "4.5.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "707eab41e9622f9139419d573eca0900137718000c517d47da73045f54331c3d" +dependencies = [ + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_lex" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675" + +[[package]] +name = "cmake" +version = "0.1.54" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7caa3f9de89ddbe2c607f4101924c5abec803763ae9534e4f4d7d8f84aa81f0" +dependencies = [ + "cc", +] + +[[package]] +name = "codespan-reporting" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe6d2e5af09e8c8ad56c969f2157a3d4238cebc7c55f0a517728c38f7b200f81" +dependencies = [ + "serde", + "termcolor", + "unicode-width 0.2.1", +] + +[[package]] name = "color-eyre" -version = "0.6.3" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55146f5e46f237f7423d74111267d4597b59b0dad0ffaf7303bce9945d843ad5" +checksum = "e5920befb47832a6d61ee3a3a846565cfa39b331331e68a3b1d1116630f2f26d" dependencies = [ "backtrace", "color-spantrace", "eyre", "indenter", "once_cell", - "owo-colors", + "owo-colors 4.2.2", "tracing-error", ] [[package]] name = "color-spantrace" -version = "0.2.1" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd6be1b2a7e382e2b98b43b2adcca6bb0e465af0bdd38123873ae61eb17a72c2" +checksum = "b8b88ea9df13354b55bc7234ebcce36e6ef896aca2e42a15de9e10edce01b427" dependencies = [ "once_cell", - "owo-colors", + "owo-colors 4.2.2", "tracing-core", "tracing-error", ] @@ -263,6 +300,15 @@ dependencies = [ ] [[package]] +name = "colored" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fde0e0ec90c9dfb3b4b1a0891a7dcd0e2bffde2f7efed5fe7c9bb00e5bfb915e" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] name = "comma" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -277,7 +323,7 @@ dependencies = [ "encode_unicode", "libc", "once_cell", - "unicode-width 0.2.0", + "unicode-width 0.2.1", "windows-sys 0.59.0", ] @@ -316,6 +362,68 @@ dependencies = [ ] [[package]] +name = "cxx" +version = "1.0.161" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3523cc02ad831111491dd64b27ad999f1ae189986728e477604e61b81f828df" +dependencies = [ + "cc", + "cxxbridge-cmd", + "cxxbridge-flags", + "cxxbridge-macro", + "foldhash", + "link-cplusplus", +] + +[[package]] +name = "cxx-build" +version = "1.0.161" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "212b754247a6f07b10fa626628c157593f0abf640a3dd04cce2760eca970f909" +dependencies = [ + "cc", + "codespan-reporting", + "indexmap", + "proc-macro2", + "quote", + "scratch", + "syn", +] + +[[package]] +name = "cxxbridge-cmd" +version = "1.0.161" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f426a20413ec2e742520ba6837c9324b55ffac24ead47491a6e29f933c5b135a" +dependencies = [ + "clap", + "codespan-reporting", + "indexmap", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "cxxbridge-flags" +version = "1.0.161" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a258b6069020b4e5da6415df94a50ee4f586a6c38b037a180e940a43d06a070d" + +[[package]] +name = "cxxbridge-macro" +version = "1.0.161" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8dec184b52be5008d6eaf7e62fc1802caf1ad1227d11b3b7df2c409c7ffc3f4" +dependencies = [ + "indexmap", + "proc-macro2", + "quote", + "rustversion", + "syn", +] + +[[package]] name = "directories" version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -333,7 +441,18 @@ dependencies = [ "libc", "option-ext", "redox_users", - "windows-sys 0.59.0", + "windows-sys 0.60.2", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", ] [[package]] @@ -343,13 +462,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" [[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] name = "errno" -version = "0.3.11" +version = "0.3.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "976dd42dc7e85965fe702eb8164f21f450704bdde31faefd6471dba214cb594e" +checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad" dependencies = [ "libc", - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] @@ -375,6 +500,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] name = "generic-array" version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -385,21 +525,32 @@ dependencies = [ ] [[package]] +name = "genmc-sys" +version = "0.1.0" +dependencies = [ + "cc", + "cmake", + "cxx", + "cxx-build", + "git2", +] + +[[package]] name = "getrandom" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" dependencies = [ "cfg-if", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi 0.11.1+wasi-snapshot-preview1", ] [[package]] name = "getrandom" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73fea8450eea4bac3940448fb7ae50d91f034f941199fcd9d909a5a07aa455f0" +checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" dependencies = [ "cfg-if", "libc", @@ -409,9 +560,137 @@ dependencies = [ [[package]] name = "gimli" -version = "0.28.1" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" + +[[package]] +name = "git2" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2deb07a133b1520dc1a5690e9bd08950108873d7ed5de38dcc74d3b5ebffa110" +dependencies = [ + "bitflags", + "libc", + "libgit2-sys", + "log", + "openssl-probe", + "openssl-sys", + "url", +] + +[[package]] +name = "hashbrown" +version = "0.15.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5" + +[[package]] +name = "icu_collections" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" +checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" + +[[package]] +name = "icu_properties" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "potential_utf", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632" + +[[package]] +name = "icu_provider" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af" +dependencies = [ + "displaydoc", + "icu_locale_core", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "idna" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] [[package]] name = "indenter" @@ -420,6 +699,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" [[package]] +name = "indexmap" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661" +dependencies = [ + "equivalent", + "hashbrown", +] + +[[package]] name = "indicatif" version = "0.17.11" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -428,7 +717,7 @@ dependencies = [ "console", "number_prefix", "portable-atomic", - "unicode-width 0.2.0", + "unicode-width 0.2.1", "web-time", ] @@ -443,17 +732,16 @@ dependencies = [ [[package]] name = "ipc-channel" -version = "0.19.0" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fb8251fb7bcd9ccd3725ed8deae9fe7db8e586495c9eb5b0c52e6233e5e75ea" +checksum = "5b1c98b70019c830a1fc39cecfe1f60ff99c4122f0a189697c810c90ec545c14" dependencies = [ "bincode", "crossbeam-channel", "fnv", - "lazy_static", "libc", "mio", - "rand 0.8.5", + "rand", "serde", "tempfile", "uuid", @@ -467,6 +755,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" [[package]] +name = "jobserver" +version = "0.1.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38f262f097c174adebe41eb73d66ae9c06b2844fb0da69969647bbddd9b0538a" +dependencies = [ + "getrandom 0.3.3", + "libc", +] + +[[package]] name = "js-sys" version = "0.3.77" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -490,15 +788,15 @@ checksum = "db13adb97ab515a3691f56e4dbab09283d0b86cb45abd991d8634a9d6f501760" [[package]] name = "libc" -version = "0.2.171" +version = "0.2.174" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c19937216e9d3aa9956d9bb8dfc0b0c8beb6058fc4f7a4dc4d850edf86a237d6" +checksum = "1171693293099992e19cddea4e8b849964e9846f4acee11b3948bcc337be8776" [[package]] name = "libffi" -version = "4.0.0" +version = "4.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a9434b6fc77375fb624698d5f8c49d7e80b10d59eb1219afda27d1f824d4074" +checksum = "e7681c6fab541f799a829e44a445a0666cf8d8a6cfebf89419e6aed52c604e87" dependencies = [ "libc", "libffi-sys", @@ -506,44 +804,84 @@ dependencies = [ [[package]] name = "libffi-sys" -version = "3.2.0" +version = "3.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ead36a2496acfc8edd6cc32352110e9478ac5b9b5f5b9856ebd3d28019addb84" +checksum = "7b0d828d367b4450ed08e7d510dc46636cd660055f50d67ac943bfe788767c29" dependencies = [ "cc", ] [[package]] +name = "libgit2-sys" +version = "0.18.2+1.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c42fe03df2bd3c53a3a9c7317ad91d80c81cd1fb0caec8d7cc4cd2bfa10c222" +dependencies = [ + "cc", + "libc", + "libz-sys", + "openssl-sys", + "pkg-config", +] + +[[package]] name = "libloading" -version = "0.8.6" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" +checksum = "07033963ba89ebaf1584d767badaa2e8fcec21aedea6b8c0346d487d49c28667" dependencies = [ "cfg-if", - "windows-targets", + "windows-targets 0.53.2", ] [[package]] name = "libredox" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" +checksum = "1580801010e535496706ba011c15f8532df6b42297d2e471fec38ceadd8c0638" dependencies = [ "bitflags", "libc", ] [[package]] +name = "libz-sys" +version = "1.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b70e7a7df205e92a1a4cd9aaae7898dac0aa555503cc0a649494d0d60e7651d" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "link-cplusplus" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a6f6da007f968f9def0d65a05b187e2960183de70c160204ecfccf0ee330212" +dependencies = [ + "cc", +] + +[[package]] name = "linux-raw-sys" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe7db12097d22ec582439daf8618b8fdd1a7bef6270e9af3b1ebcd30893cf413" +checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" + +[[package]] +name = "litemap" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" [[package]] name = "lock_api" -version = "0.4.12" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +checksum = "96936507f153605bddfcda068dd804796c84324ed2510809e5b2a624c81da765" dependencies = [ "autocfg", "scopeguard", @@ -557,9 +895,9 @@ checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" [[package]] name = "measureme" -version = "12.0.1" +version = "12.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "570a507d8948a66a97f42cbbaf8a6bb9516a51017d4ee949502ad7a10a864395" +checksum = "6ebd1ebda747ae161a4a377bf93f87e18d46faad2331cc0c7d25b84b1d445f49" dependencies = [ "log", "memmap2", @@ -571,9 +909,9 @@ dependencies = [ [[package]] name = "memchr" -version = "2.7.4" +version = "2.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" +checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" [[package]] name = "memmap2" @@ -586,23 +924,22 @@ dependencies = [ [[package]] name = "miniz_oxide" -version = "0.7.4" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" dependencies = [ - "adler", + "adler2", ] [[package]] name = "mio" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" +checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c" dependencies = [ "libc", - "log", - "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys 0.52.0", + "wasi 0.11.1+wasi-snapshot-preview1", + "windows-sys 0.59.0", ] [[package]] @@ -614,16 +951,17 @@ dependencies = [ "capstone", "chrono", "chrono-tz", - "colored", + "colored 3.0.0", "directories", - "getrandom 0.3.2", + "genmc-sys", + "getrandom 0.3.3", "ipc-channel", "libc", "libffi", "libloading", "measureme", "nix", - "rand 0.9.0", + "rand", "regex", "rustc_version", "serde", @@ -663,9 +1001,9 @@ checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" [[package]] name = "object" -version = "0.32.2" +version = "0.36.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" +checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" dependencies = [ "memchr", ] @@ -677,6 +1015,24 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] +name = "openssl-probe" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" + +[[package]] +name = "openssl-sys" +version = "0.9.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90096e2e47630d78b7d1c20952dc621f957103f8bc2c8359ec81290d75238571" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] name = "option-ext" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -689,6 +1045,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1b04fb49957986fdce4d6ee7a65027d55d4b6d2265e5848bbb507b58ccfdb6f" [[package]] +name = "owo-colors" +version = "4.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48dd4f4a2c8405440fd0462561f0e5806bd0f77e86f51c761481bdd4018b545e" + +[[package]] name = "pad" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -699,9 +1061,9 @@ dependencies = [ [[package]] name = "parking_lot" -version = "0.12.3" +version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +checksum = "70d58bf43669b5795d1576d0641cfb6fbb2057bf629506267a92807158584a13" dependencies = [ "lock_api", "parking_lot_core", @@ -709,25 +1071,22 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.10" +version = "0.9.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +checksum = "bc838d2a56b5b1a6c25f55575dfc605fabb63bb2365f6c2353ef9159aa69e4a5" dependencies = [ "cfg-if", "libc", "redox_syscall", "smallvec", - "windows-targets", + "windows-targets 0.52.6", ] [[package]] -name = "parse-zoneinfo" -version = "0.3.1" +name = "percent-encoding" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f2a05b18d44e2957b88f96ba460715e295bc1d7510468a2f3d3b44535d26c24" -dependencies = [ - "regex", -] +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "perf-event-open-sys" @@ -740,38 +1099,18 @@ dependencies = [ [[package]] name = "phf" -version = "0.11.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078" -dependencies = [ - "phf_shared", -] - -[[package]] -name = "phf_codegen" -version = "0.11.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a" -dependencies = [ - "phf_generator", - "phf_shared", -] - -[[package]] -name = "phf_generator" -version = "0.11.3" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" +checksum = "913273894cec178f401a31ec4b656318d95473527be05c0752cc41cdc32be8b7" dependencies = [ "phf_shared", - "rand 0.8.5", ] [[package]] name = "phf_shared" -version = "0.11.3" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" +checksum = "06005508882fb681fd97892ecff4b7fd0fee13ef1aa569f8695dae7ab9099981" dependencies = [ "siphasher", ] @@ -783,10 +1122,25 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" [[package]] +name = "pkg-config" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" + +[[package]] name = "portable-atomic" -version = "1.11.0" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483" + +[[package]] +name = "potential_utf" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "350e9b48cbc6b0e028b0473b114454c6316e57336ee184ceab6e53f72c178b3e" +checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585" +dependencies = [ + "zerovec", +] [[package]] name = "ppv-lite86" @@ -803,15 +1157,15 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "abec3fb083c10660b3854367697da94c674e9e82aa7511014dc958beeb7215e9" dependencies = [ - "owo-colors", + "owo-colors 3.5.0", "pad", ] [[package]] name = "proc-macro2" -version = "1.0.94" +version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84" +checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" dependencies = [ "unicode-ident", ] @@ -827,40 +1181,18 @@ dependencies = [ [[package]] name = "r-efi" -version = "5.2.0" +version = "5.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" [[package]] name = "rand" -version = "0.8.5" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97" dependencies = [ - "libc", - "rand_chacha 0.3.1", - "rand_core 0.6.4", -] - -[[package]] -name = "rand" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94" -dependencies = [ - "rand_chacha 0.9.0", - "rand_core 0.9.3", - "zerocopy", -] - -[[package]] -name = "rand_chacha" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" -dependencies = [ - "ppv-lite86", - "rand_core 0.6.4", + "rand_chacha", + "rand_core", ] [[package]] @@ -870,16 +1202,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" dependencies = [ "ppv-lite86", - "rand_core 0.9.3", -] - -[[package]] -name = "rand_core" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" -dependencies = [ - "getrandom 0.2.15", + "rand_core", ] [[package]] @@ -888,14 +1211,14 @@ version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" dependencies = [ - "getrandom 0.3.2", + "getrandom 0.3.3", ] [[package]] name = "redox_syscall" -version = "0.5.10" +version = "0.5.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b8c0c260b63a8219631167be35e6a988e9554dbd323f8bd08439c8ed1302bd1" +checksum = "0d04b7d0ee6b4a0207a0a7adb104d23ecb0b47d6beae7152d0fa34b692b29fd6" dependencies = [ "bitflags", ] @@ -906,7 +1229,7 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd6f9d3d47bdd2ad6945c5015a226ec6155d0bcdfd8f7cd29f86b71f8de99d2b" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", "libredox", "thiserror 2.0.12", ] @@ -942,9 +1265,9 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "rustc-demangle" -version = "0.1.24" +version = "0.1.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" +checksum = "989e6739f80c4ad5b13e0fd7fe89531180375b18520cc8c82080e4dc4035b84f" [[package]] name = "rustc-hash" @@ -975,18 +1298,24 @@ dependencies = [ [[package]] name = "rustix" -version = "1.0.5" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d97817398dd4bb2e6da002002db259209759911da105da92bec29ccb12cf58bf" +checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8" dependencies = [ "bitflags", "errno", "libc", "linux-raw-sys", - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] +name = "rustversion" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a0d197bd2c9dc6e53b84da9556a69ba4cdfab8619eb41a8bd1cc2027a0f6b1d" + +[[package]] name = "ryu" version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -999,6 +1328,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] +name = "scratch" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f6280af86e5f559536da57a45ebc84948833b3bee313a7dd25232e09c878a52" + +[[package]] name = "semver" version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1029,9 +1364,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.140" +version = "1.0.141" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" +checksum = "30b9eff21ebe718216c6ec64e1d9ac57087aad11efc64e32002bce4a0d4c03d3" dependencies = [ "itoa", "memchr", @@ -1062,25 +1397,38 @@ checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" [[package]] name = "smallvec" -version = "1.14.0" +version = "1.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" [[package]] name = "spanned" -version = "0.3.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86af297923fbcfd107c20a189a6e9c872160df71a7190ae4a7a6c5dce4b2feb6" +checksum = "c92d4b0c055fde758f086eb4a6e73410247df8a3837fd606d2caeeaf72aa566d" dependencies = [ + "anyhow", "bstr", "color-eyre", ] [[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] name = "syn" -version = "2.0.100" +version = "2.0.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0" +checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40" dependencies = [ "proc-macro2", "quote", @@ -1088,19 +1436,39 @@ dependencies = [ ] [[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] name = "tempfile" -version = "3.19.1" +version = "3.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7437ac7763b9b123ccf33c338a5cc1bac6f69b45a136c19bdd8a65e3916435bf" +checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1" dependencies = [ "fastrand", - "getrandom 0.3.2", + "getrandom 0.3.3", "once_cell", "rustix", "windows-sys 0.59.0", ] [[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", +] + +[[package]] name = "thiserror" version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1142,12 +1510,11 @@ dependencies = [ [[package]] name = "thread_local" -version = "1.1.8" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" dependencies = [ "cfg-if", - "once_cell", ] [[package]] @@ -1161,6 +1528,16 @@ dependencies = [ ] [[package]] +name = "tinystr" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] name = "tracing" version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1172,9 +1549,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.33" +version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" +checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" dependencies = [ "once_cell", "valuable", @@ -1209,9 +1586,9 @@ checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" [[package]] name = "ui_test" -version = "0.29.2" +version = "0.30.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1211b1111c752c73b33073d2958072be08825fd97c9ab4d83444da361a06634b" +checksum = "b56a6897cc4bb6f8daf1939b0b39cd9645856997f46f4d0b3e3cb7122dfe9251" dependencies = [ "annotate-snippets", "anyhow", @@ -1219,7 +1596,7 @@ dependencies = [ "cargo-platform", "cargo_metadata", "color-eyre", - "colored", + "colored 2.2.0", "comma", "crossbeam-channel", "indicatif", @@ -1247,17 +1624,36 @@ checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" [[package]] name = "unicode-width" -version = "0.2.0" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c" + +[[package]] +name = "url" +version = "2.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "utf8_iter" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" [[package]] name = "uuid" -version = "1.16.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "458f7a779bf54acc9f347480ac654f68407d3aab21269a6e3c9f922acd9e2da9" +checksum = "3cf4199d1e5d15ddd86a694e4d0dffa9c323ce759fea589f00fef9d81cc1931d" dependencies = [ - "getrandom 0.3.2", + "getrandom 0.3.3", + "js-sys", + "wasm-bindgen", ] [[package]] @@ -1267,6 +1663,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" [[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] name = "version_check" version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1274,9 +1676,9 @@ checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" +version = "0.11.1+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] name = "wasi" @@ -1295,6 +1697,7 @@ checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" dependencies = [ "cfg-if", "once_cell", + "rustversion", "wasm-bindgen-macro", ] @@ -1355,13 +1758,22 @@ dependencies = [ ] [[package]] +name = "winapi-util" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] name = "windows" version = "0.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd04d41d93c4992d421894c18c8b43496aa748dd4c081bac0dc93eb0489272b6" dependencies = [ "windows-core", - "windows-targets", + "windows-targets 0.52.6", ] [[package]] @@ -1374,7 +1786,7 @@ dependencies = [ "windows-interface", "windows-result", "windows-strings", - "windows-targets", + "windows-targets 0.52.6", ] [[package]] @@ -1405,7 +1817,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" dependencies = [ - "windows-targets", + "windows-targets 0.52.6", ] [[package]] @@ -1415,25 +1827,25 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" dependencies = [ "windows-result", - "windows-targets", + "windows-targets 0.52.6", ] [[package]] name = "windows-sys" -version = "0.52.0" +version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ - "windows-targets", + "windows-targets 0.52.6", ] [[package]] name = "windows-sys" -version = "0.59.0" +version = "0.60.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" dependencies = [ - "windows-targets", + "windows-targets 0.53.2", ] [[package]] @@ -1442,14 +1854,30 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_gnullvm", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm 0.52.6", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.53.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c66f69fcc9ce11da9966ddb31a40968cad001c5bedeb5c2b82ede4253ab48aef" +dependencies = [ + "windows_aarch64_gnullvm 0.53.0", + "windows_aarch64_msvc 0.53.0", + "windows_i686_gnu 0.53.0", + "windows_i686_gnullvm 0.53.0", + "windows_i686_msvc 0.53.0", + "windows_x86_64_gnu 0.53.0", + "windows_x86_64_gnullvm 0.53.0", + "windows_x86_64_msvc 0.53.0", ] [[package]] @@ -1459,48 +1887,96 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" + +[[package]] name = "windows_aarch64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] +name = "windows_aarch64_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" + +[[package]] name = "windows_i686_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] +name = "windows_i686_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" + +[[package]] name = "windows_i686_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] +name = "windows_i686_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" + +[[package]] name = "windows_i686_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] +name = "windows_i686_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" + +[[package]] name = "windows_x86_64_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] +name = "windows_x86_64_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" + +[[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" + +[[package]] name = "windows_x86_64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] +name = "windows_x86_64_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" + +[[package]] name = "wit-bindgen-rt" version = "0.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1510,19 +1986,103 @@ dependencies = [ ] [[package]] +name = "writeable" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" + +[[package]] +name = "yoke" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] name = "zerocopy" -version = "0.8.24" +version = "0.8.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2586fea28e186957ef732a5f8b3be2da217d65c5969d4b1e17f973ebbe876879" +checksum = "1039dd0d3c310cf05de012d8a39ff557cb0d23087fd44cad61df08fc31907a2f" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.8.24" +version = "0.8.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerotrie" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a05eb080e015ba39cc9e23bbe5e7fb04d5fb040350f99f34e338d5fdd294428" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a996a8f63c5c4448cd959ac1bab0aaa3306ccfd060472f85943ee0750f0169be" +checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" dependencies = [ "proc-macro2", "quote", diff --git a/src/tools/miri/Cargo.toml b/src/tools/miri/Cargo.toml index 75476d7923c..91dadf78a2f 100644 --- a/src/tools/miri/Cargo.toml +++ b/src/tools/miri/Cargo.toml @@ -38,18 +38,23 @@ features = ['unprefixed_malloc_on_supported_platforms'] [target.'cfg(unix)'.dependencies] libc = "0.2" -libffi = "4.0.0" -libloading = "0.8" +# native-lib dependencies +libffi = { version = "4.0.0", optional = true } +libloading = { version = "0.8", optional = true } +serde = { version = "1.0.219", features = ["derive"], optional = true } [target.'cfg(target_os = "linux")'.dependencies] -nix = { version = "0.30.1", features = ["mman", "ptrace", "signal"] } -ipc-channel = "0.19.0" -serde = { version = "1.0.219", features = ["derive"] } -capstone = "0.13" +nix = { version = "0.30.1", features = ["mman", "ptrace", "signal"], optional = true } +ipc-channel = { version = "0.20.0", optional = true } +capstone = { version = "0.13", optional = true } + +# FIXME(genmc,macos): Add `target_os = "macos"` once https://github.com/dtolnay/cxx/issues/1535 is fixed. +[target.'cfg(all(target_os = "linux", target_pointer_width = "64", target_endian = "little"))'.dependencies] +genmc-sys = { path = "./genmc-sys/", version = "0.1.0", optional = true } [dev-dependencies] -ui_test = "0.29.1" -colored = "2" +ui_test = "0.30.2" +colored = "3" rustc_version = "0.4" regex = "1.5.5" tempfile = "3" @@ -64,11 +69,12 @@ name = "ui" harness = false [features] -default = ["stack-cache"] -genmc = [] +default = ["stack-cache", "native-lib"] +genmc = ["dep:genmc-sys"] # this enables a GPL dependency! stack-cache = [] stack-cache-consistency-check = ["stack-cache"] tracing = ["serde_json"] +native-lib = ["dep:libffi", "dep:libloading", "dep:capstone", "dep:ipc-channel", "dep:nix", "dep:serde"] [lints.rust.unexpected_cfgs] level = "warn" diff --git a/src/tools/miri/README.md b/src/tools/miri/README.md index 7816ce1ac56..7ccd27d7b83 100644 --- a/src/tools/miri/README.md +++ b/src/tools/miri/README.md @@ -286,11 +286,6 @@ environment variable. We first document the most relevant and most commonly used specific circumstances, but Miri's behavior will also be more stable across versions and targets. This is equivalent to `-Zmiri-fixed-schedule -Zmiri-compare-exchange-weak-failure-rate=0.0 -Zmiri-address-reuse-cross-thread-rate=0.0 -Zmiri-disable-weak-memory-emulation`. -* `-Zmiri-deterministic-floats` makes Miri's floating-point behavior fully deterministic. This means - that operations will always return the preferred NaN, imprecise operations will not have any - random error applied to them, and `min`/`max` as "maybe fused" multiply-add all behave - deterministically. Note that Miri still uses host floats for some operations, so behavior can - still differ depending on the host target and setup. * `-Zmiri-disable-isolation` disables host isolation. As a consequence, the program has access to host resources such as environment variables, file systems, and randomness. @@ -324,6 +319,8 @@ environment variable. We first document the most relevant and most commonly used Can be used without a value; in that case the range defaults to `0..64`. * `-Zmiri-many-seeds-keep-going` tells Miri to really try all the seeds in the given range, even if a failing seed has already been found. This is useful to determine which fraction of seeds fails. +* `-Zmiri-no-extra-rounding-error` stops Miri from adding extra rounding errors to float operations + that do not have a guaranteed precision. * `-Zmiri-num-cpus` states the number of available CPUs to be reported by miri. By default, the number of available CPUs is `1`. Note that this flag does not affect how miri handles threads in any way. @@ -376,6 +373,12 @@ to Miri failing to detect cases of undefined behavior in a program. will always fail and `0.0` means it will never fail. Note that setting it to `1.0` will likely cause hangs, since it means programs using `compare_exchange_weak` cannot make progress. +* `-Zmiri-deterministic-floats` makes Miri's floating-point behavior fully deterministic. This means + that operations will always return the preferred NaN, imprecise operations will not have any + random error applied to them, and `min`/`max` and "maybe fused" multiply-add all behave + deterministically. Note that Miri still uses host floats for some operations, so behavior can + still differ depending on the host target and setup. See `-Zmiri-no-extra-rounding-error` for + a flag that specifically only disables the random error. * `-Zmiri-disable-alignment-check` disables checking pointer alignment, so you can focus on other failures, but it means Miri can miss bugs in your program. Using this flag is **unsound**. diff --git a/src/tools/miri/cargo-miri/Cargo.lock b/src/tools/miri/cargo-miri/Cargo.lock index d37f8750bde..b3f5dafab64 100644 --- a/src/tools/miri/cargo-miri/Cargo.lock +++ b/src/tools/miri/cargo-miri/Cargo.lock @@ -3,31 +3,28 @@ version = 4 [[package]] -name = "aho-corasick" -version = "1.1.3" +name = "anyhow" +version = "1.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" -dependencies = [ - "memchr", -] +checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" [[package]] -name = "anyhow" -version = "1.0.97" +name = "autocfg" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcfed56ad506cb2c684a14971b8861fdc3baaaae314b9e5f9bb532cbe3ba7a4f" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" [[package]] name = "bitflags" -version = "2.9.0" +version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" +checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967" [[package]] name = "camino" -version = "1.1.9" +version = "1.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3" +checksum = "0da45bc31171d8d6960122e222a67740df867c1dd53b4d51caa297084c185cab" dependencies = [ "serde", ] @@ -47,21 +44,38 @@ dependencies = [ [[package]] name = "cargo-platform" -version = "0.1.9" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e35af189006b9c0f00a064685c727031e3ed2d8020f7ba284d78cc2671bd36ea" +checksum = "84982c6c0ae343635a3a4ee6dedef965513735c8b183caa7289fa6e27399ebd4" dependencies = [ "serde", ] [[package]] +name = "cargo-util-schemas" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dc1a6f7b5651af85774ae5a34b4e8be397d9cf4bc063b7e6dbd99a841837830" +dependencies = [ + "semver", + "serde", + "serde-untagged", + "serde-value", + "thiserror", + "toml", + "unicode-xid", + "url", +] + +[[package]] name = "cargo_metadata" -version = "0.19.2" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd5eb614ed4c27c5d706420e4320fbe3216ab31fa1c33cd8246ac36dae4479ba" +checksum = "5cfca2aaa699835ba88faf58a06342a314a950d2b9686165e038286c30316868" dependencies = [ "camino", "cargo-platform", + "cargo-util-schemas", "semver", "serde", "serde_json", @@ -70,9 +84,9 @@ dependencies = [ [[package]] name = "cfg-if" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268" [[package]] name = "directories" @@ -92,17 +106,44 @@ dependencies = [ "libc", "option-ext", "redox_users", - "windows-sys", + "windows-sys 0.60.2", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "erased-serde" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e004d887f51fcb9fef17317a2f3525c887d8aa3f4f50fed920816a688284a5b7" +dependencies = [ + "serde", + "typeid", ] [[package]] name = "errno" -version = "0.3.11" +version = "0.3.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "976dd42dc7e85965fe702eb8164f21f450704bdde31faefd6471dba214cb594e" +checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad" dependencies = [ "libc", - "windows-sys", + "windows-sys 0.60.2", ] [[package]] @@ -112,21 +153,30 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] name = "getrandom" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" dependencies = [ "cfg-if", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi 0.11.1+wasi-snapshot-preview1", ] [[package]] name = "getrandom" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73fea8450eea4bac3940448fb7ae50d91f034f941199fcd9d909a5a07aa455f0" +checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" dependencies = [ "cfg-if", "libc", @@ -135,6 +185,129 @@ dependencies = [ ] [[package]] +name = "hashbrown" +version = "0.15.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5" + +[[package]] +name = "icu_collections" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" + +[[package]] +name = "icu_properties" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "potential_utf", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632" + +[[package]] +name = "icu_provider" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af" +dependencies = [ + "displaydoc", + "icu_locale_core", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "idna" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "indexmap" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661" +dependencies = [ + "equivalent", + "hashbrown", +] + +[[package]] name = "itoa" version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -142,15 +315,15 @@ checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" [[package]] name = "libc" -version = "0.2.171" +version = "0.2.174" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c19937216e9d3aa9956d9bb8dfc0b0c8beb6058fc4f7a4dc4d850edf86a237d6" +checksum = "1171693293099992e19cddea4e8b849964e9846f4acee11b3948bcc337be8776" [[package]] name = "libredox" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" +checksum = "1580801010e535496706ba011c15f8532df6b42297d2e471fec38ceadd8c0638" dependencies = [ "bitflags", "libc", @@ -158,15 +331,30 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe7db12097d22ec582439daf8618b8fdd1a7bef6270e9af3b1ebcd30893cf413" +checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" + +[[package]] +name = "litemap" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" [[package]] name = "memchr" -version = "2.7.4" +version = "2.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" +checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] [[package]] name = "once_cell" @@ -181,10 +369,34 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" [[package]] +name = "ordered-float" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c" +dependencies = [ + "num-traits", +] + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "potential_utf" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585" +dependencies = [ + "zerovec", +] + +[[package]] name = "proc-macro2" -version = "1.0.94" +version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84" +checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" dependencies = [ "unicode-ident", ] @@ -200,9 +412,9 @@ dependencies = [ [[package]] name = "r-efi" -version = "5.2.0" +version = "5.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" [[package]] name = "redox_users" @@ -210,50 +422,21 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd6f9d3d47bdd2ad6945c5015a226ec6155d0bcdfd8f7cd29f86b71f8de99d2b" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", "libredox", "thiserror", ] [[package]] -name = "regex" -version = "1.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" -dependencies = [ - "aho-corasick", - "memchr", - "regex-automata", - "regex-syntax", -] - -[[package]] -name = "regex-automata" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" -dependencies = [ - "aho-corasick", - "memchr", - "regex-syntax", -] - -[[package]] -name = "regex-syntax" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" - -[[package]] name = "rustc-build-sysroot" -version = "0.5.8" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16d115ad7e26e0d1337f64ae6598f758194696afc2e9f34c8a6f24582529c3dc" +checksum = "fdb13874a0e55baf4ac3d49d38206aecb31a55b75d6c4d04fd850b53942c8cc8" dependencies = [ "anyhow", - "regex", "rustc_version", "tempfile", + "toml", "walkdir", ] @@ -274,15 +457,15 @@ dependencies = [ [[package]] name = "rustix" -version = "1.0.5" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d97817398dd4bb2e6da002002db259209759911da105da92bec29ccb12cf58bf" +checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8" dependencies = [ "bitflags", "errno", "libc", "linux-raw-sys", - "windows-sys", + "windows-sys 0.60.2", ] [[package]] @@ -319,6 +502,27 @@ dependencies = [ ] [[package]] +name = "serde-untagged" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "299d9c19d7d466db4ab10addd5703e4c615dec2a5a16dbbafe191045e87ee66e" +dependencies = [ + "erased-serde", + "serde", + "typeid", +] + +[[package]] +name = "serde-value" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c" +dependencies = [ + "ordered-float", + "serde", +] + +[[package]] name = "serde_derive" version = "1.0.219" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -331,9 +535,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.140" +version = "1.0.141" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" +checksum = "30b9eff21ebe718216c6ec64e1d9ac57087aad11efc64e32002bce4a0d4c03d3" dependencies = [ "itoa", "memchr", @@ -342,10 +546,31 @@ dependencies = [ ] [[package]] +name = "serde_spanned" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" +dependencies = [ + "serde", +] + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + +[[package]] name = "syn" -version = "2.0.100" +version = "2.0.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0" +checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40" dependencies = [ "proc-macro2", "quote", @@ -353,16 +578,27 @@ dependencies = [ ] [[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] name = "tempfile" -version = "3.19.1" +version = "3.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7437ac7763b9b123ccf33c338a5cc1bac6f69b45a136c19bdd8a65e3916435bf" +checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1" dependencies = [ "fastrand", - "getrandom 0.3.2", + "getrandom 0.3.3", "once_cell", "rustix", - "windows-sys", + "windows-sys 0.59.0", ] [[package]] @@ -386,12 +622,93 @@ dependencies = [ ] [[package]] +name = "tinystr" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "toml" +version = "0.8.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" +dependencies = [ + "indexmap", + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "toml_datetime" +version = "0.6.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.22.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" +dependencies = [ + "indexmap", + "serde", + "serde_spanned", + "toml_datetime", + "toml_write", + "winnow", +] + +[[package]] +name = "toml_write" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" + +[[package]] +name = "typeid" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc7d623258602320d5c55d1bc22793b57daff0ec7efc270ea7d55ce1d5f5471c" + +[[package]] name = "unicode-ident" version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" [[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + +[[package]] +name = "url" +version = "2.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] name = "walkdir" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -403,9 +720,9 @@ dependencies = [ [[package]] name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" +version = "0.11.1+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] name = "wasi" @@ -422,7 +739,7 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys", + "windows-sys 0.59.0", ] [[package]] @@ -431,7 +748,16 @@ version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ - "windows-targets", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" +dependencies = [ + "windows-targets 0.53.2", ] [[package]] @@ -440,14 +766,30 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_gnullvm", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm 0.52.6", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.53.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c66f69fcc9ce11da9966ddb31a40968cad001c5bedeb5c2b82ede4253ab48aef" +dependencies = [ + "windows_aarch64_gnullvm 0.53.0", + "windows_aarch64_msvc 0.53.0", + "windows_i686_gnu 0.53.0", + "windows_i686_gnullvm 0.53.0", + "windows_i686_msvc 0.53.0", + "windows_x86_64_gnu 0.53.0", + "windows_x86_64_gnullvm 0.53.0", + "windows_x86_64_msvc 0.53.0", ] [[package]] @@ -457,48 +799,105 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" + +[[package]] name = "windows_aarch64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] +name = "windows_aarch64_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" + +[[package]] name = "windows_i686_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] +name = "windows_i686_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" + +[[package]] name = "windows_i686_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] +name = "windows_i686_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" + +[[package]] name = "windows_i686_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] +name = "windows_i686_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" + +[[package]] name = "windows_x86_64_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] +name = "windows_x86_64_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" + +[[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" + +[[package]] name = "windows_x86_64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] +name = "windows_x86_64_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" + +[[package]] +name = "winnow" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3edebf492c8125044983378ecb5766203ad3b4c2f7a922bd7dd207f6d443e95" +dependencies = [ + "memchr", +] + +[[package]] name = "wit-bindgen-rt" version = "0.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -506,3 +905,87 @@ checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" dependencies = [ "bitflags", ] + +[[package]] +name = "writeable" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" + +[[package]] +name = "yoke" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerotrie" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a05eb080e015ba39cc9e23bbe5e7fb04d5fb040350f99f34e338d5fdd294428" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] diff --git a/src/tools/miri/cargo-miri/Cargo.toml b/src/tools/miri/cargo-miri/Cargo.toml index e08733959cc..77cb1df8e74 100644 --- a/src/tools/miri/cargo-miri/Cargo.toml +++ b/src/tools/miri/cargo-miri/Cargo.toml @@ -17,7 +17,7 @@ doctest = false # and no doc tests directories = "6" rustc_version = "0.4" serde_json = "1.0.40" -cargo_metadata = "0.19" +cargo_metadata = "0.21" rustc-build-sysroot = "0.5.8" # Enable some feature flags that dev-dependencies need but dependencies diff --git a/src/tools/miri/cargo-miri/src/phases.rs b/src/tools/miri/cargo-miri/src/phases.rs index b72b974bdbd..efb9053f69a 100644 --- a/src/tools/miri/cargo-miri/src/phases.rs +++ b/src/tools/miri/cargo-miri/src/phases.rs @@ -1,9 +1,9 @@ //! Implements the various phases of `cargo miri run/test`. use std::env; -use std::fs::{self, File}; +use std::fs::File; use std::io::BufReader; -use std::path::{Path, PathBuf}; +use std::path::{self, Path, PathBuf}; use std::process::Command; use rustc_version::VersionMeta; @@ -222,12 +222,12 @@ pub fn phase_cargo_miri(mut args: impl Iterator<Item = String>) { // that to be the Miri driver, but acting as rustc, in host mode. // // In `main`, we need the value of `RUSTC` to distinguish RUSTC_WRAPPER invocations from rustdoc - // or TARGET_RUNNER invocations, so we canonicalize it here to make it exceedingly unlikely that + // or TARGET_RUNNER invocations, so we make it absolute to make it exceedingly unlikely that // there would be a collision with other invocations of cargo-miri (as rustdoc or as runner). We // explicitly do this even if RUSTC_STAGE is set, since for these builds we do *not* want the // bootstrap `rustc` thing in our way! Instead, we have MIRI_HOST_SYSROOT to use for host // builds. - cmd.env("RUSTC", fs::canonicalize(find_miri()).unwrap()); + cmd.env("RUSTC", path::absolute(find_miri()).unwrap()); // In case we get invoked as RUSTC without the wrapper, let's be a host rustc. This makes no // sense for cross-interpretation situations, but without the wrapper, this will use the host // sysroot, so asking it to behave like a target build makes even less sense. diff --git a/src/tools/miri/cargo-miri/src/util.rs b/src/tools/miri/cargo-miri/src/util.rs index 43b2a1b6173..82c6a929357 100644 --- a/src/tools/miri/cargo-miri/src/util.rs +++ b/src/tools/miri/cargo-miri/src/util.rs @@ -129,7 +129,8 @@ pub fn exec(mut cmd: Command) -> ! { // On non-Unix imitate POSIX exec as closely as we can #[cfg(not(unix))] { - let exit_status = cmd.status().expect("failed to run command"); + let exit_status = + cmd.status().unwrap_or_else(|err| panic!("failed to run `{cmd:?}`:\n{err}")); std::process::exit(exit_status.code().unwrap_or(-1)) } // On Unix targets, actually exec. @@ -138,8 +139,8 @@ pub fn exec(mut cmd: Command) -> ! { #[cfg(unix)] { use std::os::unix::process::CommandExt; - let error = cmd.exec(); - panic!("failed to run command: {error}") + let err = cmd.exec(); + panic!("failed to run `{cmd:?}`:\n{err}") } } diff --git a/src/tools/miri/ci/ci.sh b/src/tools/miri/ci/ci.sh index 5767d178279..b66530e77b8 100755 --- a/src/tools/miri/ci/ci.sh +++ b/src/tools/miri/ci/ci.sh @@ -142,7 +142,6 @@ case $HOST_TARGET in # Host GC_STRESS=1 MIR_OPT=1 MANY_SEEDS=64 TEST_BENCH=1 CARGO_MIRI_ENV=1 run_tests # Extra tier 1 - MANY_SEEDS=64 TEST_TARGET=i686-unknown-linux-gnu run_tests MANY_SEEDS=64 TEST_TARGET=x86_64-apple-darwin run_tests MANY_SEEDS=64 TEST_TARGET=x86_64-pc-windows-gnu run_tests ;; @@ -161,8 +160,6 @@ case $HOST_TARGET in aarch64-unknown-linux-gnu) # Host GC_STRESS=1 MIR_OPT=1 MANY_SEEDS=64 TEST_BENCH=1 CARGO_MIRI_ENV=1 run_tests - # Extra tier 1 candidate - MANY_SEEDS=64 TEST_TARGET=aarch64-pc-windows-msvc run_tests # Extra tier 2 MANY_SEEDS=16 TEST_TARGET=arm-unknown-linux-gnueabi run_tests # 32bit ARM MANY_SEEDS=16 TEST_TARGET=aarch64-pc-windows-gnullvm run_tests # gnullvm ABI @@ -189,13 +186,20 @@ case $HOST_TARGET in ;; i686-pc-windows-msvc) # Host - # Without GC_STRESS as this is the slowest runner. + # Without GC_STRESS as this is a very slow runner. MIR_OPT=1 MANY_SEEDS=64 TEST_BENCH=1 run_tests # Extra tier 1 # We really want to ensure a Linux target works on a Windows host, # and a 64bit target works on a 32bit host. TEST_TARGET=x86_64-unknown-linux-gnu run_tests ;; + aarch64-pc-windows-msvc) + # Host + # Without GC_STRESS as this is a very slow runner. + MIR_OPT=1 MANY_SEEDS=64 TEST_BENCH=1 CARGO_MIRI_ENV=1 run_tests + # Extra tier 1 + MANY_SEEDS=64 TEST_TARGET=i686-unknown-linux-gnu run_tests + ;; *) echo "FATAL: unknown host target: $HOST_TARGET" exit 1 diff --git a/src/tools/miri/doc/genmc.md b/src/tools/miri/doc/genmc.md new file mode 100644 index 00000000000..5aabe90b5da --- /dev/null +++ b/src/tools/miri/doc/genmc.md @@ -0,0 +1,62 @@ +# **(WIP)** Documentation for Miri-GenMC + +[GenMC](https://github.com/MPI-SWS/genmc) is a stateless model checker for exploring concurrent executions of a program. +Miri-GenMC integrates that model checker into Miri. + +**NOTE: Currently, no actual GenMC functionality is part of Miri, this is still WIP.** + +<!-- FIXME(genmc): add explanation. --> + +## Usage + +**IMPORTANT: The license of GenMC and thus the `genmc-sys` crate in the Miri repo is currently "GPL-3.0-or-later", so a binary produced with the `genmc` feature is subject to the requirements of the GPL. As long as that remains the case, the `genmc` feature of Miri is OFF-BY-DEFAULT and must be OFF for all Miri releases.** + +For testing/developing Miri-GenMC (while keeping in mind the licensing issues): +- clone the Miri repo. +- build Miri-GenMC with `./miri build --features=genmc`. +- OR: install Miri-GenMC in the current system with `./miri install --features=genmc` + +Basic usage: +```shell +MIRIFLAGS="-Zmiri-genmc" cargo miri run +``` + +<!-- FIXME(genmc): explain options. --> + +<!-- FIXME(genmc): explain Miri-GenMC specific functions. --> + +## Tips + +<!-- FIXME(genmc): add tips for using Miri-GenMC more efficiently. --> + +## Limitations + +Some or all of these limitations might get removed in the future: + +- Borrow tracking is currently incompatible (stacked/tree borrows). +- Only Linux is supported for now. +- No support for 32-bit or big-endian targets. +- No cross-target interpretation. + +<!-- FIXME(genmc): document remaining limitations --> + +## Development + +GenMC is written in C++, which complicates development a bit. +The prerequisites for building Miri-GenMC are: +- A compiler with C++23 support. +- LLVM developments headers and clang. + <!-- FIXME(genmc,llvm): remove once LLVM dependency is no longer required. --> + +The actual code for GenMC is not contained in the Miri repo itself, but in a [separate GenMC repo](https://github.com/MPI-SWS/genmc) (with its own maintainers). +These sources need to be available to build Miri-GenMC. +The process for obtaining them is as follows: +- By default, a fixed commit of GenMC is downloaded to `genmc-sys/genmc-src` and built automatically. + (The commit is determined by `GENMC_COMMIT` in `genmc-sys/build.rs`.) +- If you want to overwrite that, set the `GENMC_SRC_PATH` environment variable to a path that contains the GenMC sources. + If you place this directory inside the Miri folder, it is recommended to call it `genmc-src` as that tells `./miri fmt` to avoid + formatting the Rust files inside that folder. + +<!-- FIXME(genmc): explain how submitting code to GenMC should be handled. --> + +<!-- FIXME(genmc): explain development. --> diff --git a/src/tools/miri/etc/rust_analyzer_helix.toml b/src/tools/miri/etc/rust_analyzer_helix.toml index 9bfb09120d8..c46b246049f 100644 --- a/src/tools/miri/etc/rust_analyzer_helix.toml +++ b/src/tools/miri/etc/rust_analyzer_helix.toml @@ -5,6 +5,7 @@ source = "discover" linkedProjects = [ "Cargo.toml", "cargo-miri/Cargo.toml", + "genmc-sys/Cargo.toml", "miri-script/Cargo.toml", ] @@ -26,5 +27,6 @@ invocationStrategy = "once" overrideCommand = [ "./miri", "check", + "--no-default-features", "--message-format=json", ] diff --git a/src/tools/miri/etc/rust_analyzer_vscode.json b/src/tools/miri/etc/rust_analyzer_vscode.json index c646953e92b..8e647f5331f 100644 --- a/src/tools/miri/etc/rust_analyzer_vscode.json +++ b/src/tools/miri/etc/rust_analyzer_vscode.json @@ -3,6 +3,7 @@ "rust-analyzer.linkedProjects": [ "Cargo.toml", "cargo-miri/Cargo.toml", + "genmc-sys/Cargo.toml", "miri-script/Cargo.toml", ], "rust-analyzer.check.invocationStrategy": "once", @@ -20,6 +21,7 @@ "rust-analyzer.cargo.buildScripts.overrideCommand": [ "./miri", "check", + "--no-default-features", "--message-format=json", ], } diff --git a/src/tools/miri/genmc-sys/.gitignore b/src/tools/miri/genmc-sys/.gitignore new file mode 100644 index 00000000000..276a053cd05 --- /dev/null +++ b/src/tools/miri/genmc-sys/.gitignore @@ -0,0 +1 @@ +genmc-src*/ diff --git a/src/tools/miri/genmc-sys/Cargo.toml b/src/tools/miri/genmc-sys/Cargo.toml new file mode 100644 index 00000000000..737ab9073bf --- /dev/null +++ b/src/tools/miri/genmc-sys/Cargo.toml @@ -0,0 +1,17 @@ +[package] +authors = ["Miri Team"] +# The parts in this repo are MIT OR Apache-2.0, but we are linking in +# code from https://github.com/MPI-SWS/genmc which is GPL-3.0-or-later. +license = "(MIT OR Apache-2.0) AND GPL-3.0-or-later" +name = "genmc-sys" +version = "0.1.0" +edition = "2024" + +[dependencies] +cxx = { version = "1.0.160", features = ["c++20"] } + +[build-dependencies] +cc = "1.2.16" +cmake = "0.1.54" +git2 = { version = "0.20.2", default-features = false, features = ["https"] } +cxx-build = { version = "1.0.160", features = ["parallel"] } diff --git a/src/tools/miri/genmc-sys/build.rs b/src/tools/miri/genmc-sys/build.rs new file mode 100644 index 00000000000..479a3bd7186 --- /dev/null +++ b/src/tools/miri/genmc-sys/build.rs @@ -0,0 +1,269 @@ +use std::path::{Path, PathBuf}; +use std::str::FromStr; + +// Build script for running Miri with GenMC. +// Check out doc/genmc.md for more info. + +/// Path where the downloaded GenMC repository will be stored (relative to the `genmc-sys` directory). +/// Note that this directory is *not* cleaned up automatically by `cargo clean`. +const GENMC_DOWNLOAD_PATH: &str = "./genmc-src/"; + +/// Name of the library of the GenMC model checker. +const GENMC_MODEL_CHECKER: &str = "genmc_lib"; + +/// Path where the `cxx_bridge!` macro is used to define the Rust-C++ interface. +const RUST_CXX_BRIDGE_FILE_PATH: &str = "src/lib.rs"; + +/// The profile with which to build GenMC. +const GENMC_CMAKE_PROFILE: &str = "RelWithDebInfo"; + +mod downloading { + use std::path::PathBuf; + use std::str::FromStr; + + use git2::{Commit, Oid, Remote, Repository, StatusOptions}; + + use super::GENMC_DOWNLOAD_PATH; + + /// The GenMC repository the we get our commit from. + pub(crate) const GENMC_GITHUB_URL: &str = "https://github.com/MPI-SWS/genmc.git"; + /// The GenMC commit we depend on. It must be available on the specified GenMC repository. + pub(crate) const GENMC_COMMIT: &str = "3438dd2c1202cd4a47ed7881d099abf23e4167ab"; + + pub(crate) fn download_genmc() -> PathBuf { + let Ok(genmc_download_path) = PathBuf::from_str(GENMC_DOWNLOAD_PATH); + let commit_oid = Oid::from_str(GENMC_COMMIT).expect("Commit should be valid."); + + match Repository::open(&genmc_download_path) { + Ok(repo) => { + assert_repo_unmodified(&repo); + let commit = update_local_repo(&repo, commit_oid); + checkout_commit(&repo, &commit); + } + Err(_) => { + let repo = clone_remote_repo(&genmc_download_path); + let Ok(commit) = repo.find_commit(commit_oid) else { + panic!( + "Cloned GenMC repository does not contain required commit '{GENMC_COMMIT}'" + ); + }; + checkout_commit(&repo, &commit); + } + }; + + genmc_download_path + } + + fn get_remote(repo: &Repository) -> Remote<'_> { + let remote = repo.find_remote("origin").unwrap_or_else(|e| { + panic!( + "Could not load commit ({GENMC_COMMIT}) from remote repository '{GENMC_GITHUB_URL}'. Error: {e}" + ); + }); + + // Ensure that the correct remote URL is set. + let remote_url = remote.url(); + if let Some(remote_url) = remote_url + && remote_url == GENMC_GITHUB_URL + { + return remote; + } + + // Update remote URL. + println!( + "cargo::warning=GenMC repository remote URL has changed from '{remote_url:?}' to '{GENMC_GITHUB_URL}'" + ); + repo.remote_set_url("origin", GENMC_GITHUB_URL) + .expect("cannot rename url of remote 'origin'"); + + // Reacquire the `Remote`, since `remote_set_url` doesn't update Remote objects already in memory. + repo.find_remote("origin").unwrap() + } + + // Check if the required commit exists already, otherwise try fetching it. + fn update_local_repo(repo: &Repository, commit_oid: Oid) -> Commit<'_> { + repo.find_commit(commit_oid).unwrap_or_else(|_find_error| { + println!("GenMC repository at path '{GENMC_DOWNLOAD_PATH}' does not contain commit '{GENMC_COMMIT}'."); + // The commit is not in the checkout. Try `git fetch` and hope that we find the commit then. + let mut remote = get_remote(repo); + remote.fetch(&[GENMC_COMMIT], None, None).expect("Failed to fetch from remote."); + + repo.find_commit(commit_oid) + .expect("Remote repository should contain expected commit") + }) + } + + fn clone_remote_repo(genmc_download_path: &PathBuf) -> Repository { + Repository::clone(GENMC_GITHUB_URL, &genmc_download_path).unwrap_or_else(|e| { + panic!("Cannot clone GenMC repo from '{GENMC_GITHUB_URL}': {e:?}"); + }) + } + + /// Set the state of the repo to a specific commit + fn checkout_commit(repo: &Repository, commit: &Commit<'_>) { + repo.checkout_tree(commit.as_object(), None).expect("Failed to checkout"); + repo.set_head_detached(commit.id()).expect("Failed to set HEAD"); + println!("Successfully set checked out commit {commit:?}"); + } + + /// Check that the downloaded repository is unmodified. + /// If it is modified, explain that it shouldn't be, and hint at how to do local development with GenMC. + /// We don't overwrite any changes made to the directory, to prevent data loss. + fn assert_repo_unmodified(repo: &Repository) { + let statuses = repo + .statuses(Some( + StatusOptions::new() + .include_untracked(true) + .include_ignored(false) + .include_unmodified(false), + )) + .expect("should be able to get repository status"); + if statuses.is_empty() { + return; + } + + panic!( + "Downloaded GenMC repository at path '{GENMC_DOWNLOAD_PATH}' has been modified. Please undo any changes made, or delete the '{GENMC_DOWNLOAD_PATH}' directory to have it downloaded again.\n\ + HINT: For local development, set the environment variable 'GENMC_SRC_PATH' to the path of a GenMC repository." + ); + } +} + +// FIXME(genmc,llvm): Remove once the LLVM dependency of the GenMC model checker is removed. +/// The linked LLVM version is in the generated `config.h`` file, which we parse and use to link to LLVM. +/// Returns c++ compiler definitions required for building with/including LLVM, and the include path for LLVM headers. +fn link_to_llvm(config_file: &Path) -> (String, String) { + /// Search a string for a line matching `//@VARIABLE_NAME: VARIABLE CONTENT` + fn extract_value<'a>(input: &'a str, name: &str) -> Option<&'a str> { + input + .lines() + .find_map(|line| line.strip_prefix("//@")?.strip_prefix(name)?.strip_prefix(": ")) + } + + let file_content = std::fs::read_to_string(&config_file).unwrap_or_else(|err| { + panic!("GenMC config file ({}) should exist, but got errror {err:?}", config_file.display()) + }); + + let llvm_definitions = extract_value(&file_content, "LLVM_DEFINITIONS") + .expect("Config file should contain LLVM_DEFINITIONS"); + let llvm_include_dirs = extract_value(&file_content, "LLVM_INCLUDE_DIRS") + .expect("Config file should contain LLVM_INCLUDE_DIRS"); + let llvm_library_dir = extract_value(&file_content, "LLVM_LIBRARY_DIR") + .expect("Config file should contain LLVM_LIBRARY_DIR"); + let llvm_config_path = extract_value(&file_content, "LLVM_CONFIG_PATH") + .expect("Config file should contain LLVM_CONFIG_PATH"); + + // Add linker search path. + let lib_dir = PathBuf::from_str(llvm_library_dir).unwrap(); + println!("cargo::rustc-link-search=native={}", lib_dir.display()); + + // Add libraries to link. + let output = std::process::Command::new(llvm_config_path) + .arg("--libs") // Print the libraries to link to (space-separated list) + .output() + .expect("failed to execute llvm-config"); + let llvm_link_libs = + String::try_from(output.stdout).expect("llvm-config output should be a valid string"); + + for link_lib in llvm_link_libs.trim().split(" ") { + let link_lib = + link_lib.strip_prefix("-l").expect("Linker parameter should start with \"-l\""); + println!("cargo::rustc-link-lib=dylib={link_lib}"); + } + + (llvm_definitions.to_string(), llvm_include_dirs.to_string()) +} + +/// Build the GenMC model checker library and the Rust-C++ interop library with cxx.rs +fn compile_cpp_dependencies(genmc_path: &Path) { + // Part 1: + // Compile the GenMC library using cmake. + + let cmakelists_path = genmc_path.join("CMakeLists.txt"); + + // FIXME(genmc,cargo): Switch to using `CARGO_CFG_DEBUG_ASSERTIONS` once https://github.com/rust-lang/cargo/issues/15760 is completed. + // Enable/disable additional debug checks, prints and options for GenMC, based on the Rust profile (debug/release) + let enable_genmc_debug = matches!(std::env::var("PROFILE").as_deref().unwrap(), "debug"); + + let mut config = cmake::Config::new(cmakelists_path); + config.profile(GENMC_CMAKE_PROFILE); + config.define("GENMC_DEBUG", if enable_genmc_debug { "ON" } else { "OFF" }); + + // The actual compilation happens here: + let genmc_install_dir = config.build(); + + // Add the model checker library to be linked and tell rustc where to find it: + let cmake_lib_dir = genmc_install_dir.join("lib").join("genmc"); + println!("cargo::rustc-link-search=native={}", cmake_lib_dir.display()); + println!("cargo::rustc-link-lib=static={GENMC_MODEL_CHECKER}"); + + // FIXME(genmc,llvm): Remove once the LLVM dependency of the GenMC model checker is removed. + let config_file = genmc_install_dir.join("include").join("genmc").join("config.h"); + let (llvm_definitions, llvm_include_dirs) = link_to_llvm(&config_file); + + // Part 2: + // Compile the cxx_bridge (the link between the Rust and C++ code). + + let genmc_include_dir = genmc_install_dir.join("include").join("genmc"); + + // FIXME(genmc,llvm): remove once LLVM dependency is removed. + // These definitions are parsed into a cmake list and then printed to the config.h file, so they are ';' separated. + let definitions = llvm_definitions.split(";"); + + let mut bridge = cxx_build::bridge("src/lib.rs"); + // FIXME(genmc,cmake): Remove once the GenMC debug setting is available in the config.h file. + if enable_genmc_debug { + bridge.define("ENABLE_GENMC_DEBUG", None); + } + for definition in definitions { + bridge.flag(definition); + } + bridge + .opt_level(2) + .debug(true) // Same settings that GenMC uses (default for cmake `RelWithDebInfo`) + .warnings(false) // NOTE: enabling this produces a lot of warnings. + .std("c++23") + .include(genmc_include_dir) + .include(llvm_include_dirs) + .include("./src_cpp") + .file("./src_cpp/MiriInterface.hpp") + .file("./src_cpp/MiriInterface.cpp") + .compile("genmc_interop"); + + // Link the Rust-C++ interface library generated by cxx_build: + println!("cargo::rustc-link-lib=static=genmc_interop"); +} + +fn main() { + // Make sure we don't accidentally distribute a binary with GPL code. + if option_env!("RUSTC_STAGE").is_some() { + panic!( + "genmc should not be enabled in the rustc workspace since it includes a GPL dependency" + ); + } + + // Select which path to use for the GenMC repo: + let genmc_path = if let Ok(genmc_src_path) = std::env::var("GENMC_SRC_PATH") { + let genmc_src_path = + PathBuf::from_str(&genmc_src_path).expect("GENMC_SRC_PATH should contain a valid path"); + assert!( + genmc_src_path.exists(), + "GENMC_SRC_PATH={} does not exist!", + genmc_src_path.display() + ); + genmc_src_path + } else { + downloading::download_genmc() + }; + + // Build all required components: + compile_cpp_dependencies(&genmc_path); + + // Only rebuild if anything changes: + // Note that we don't add the downloaded GenMC repo, since that should never be modified + // manually. Adding that path here would also trigger an unnecessary rebuild after the repo is + // cloned (since cargo detects that as a file modification). + println!("cargo::rerun-if-changed={RUST_CXX_BRIDGE_FILE_PATH}"); + println!("cargo::rerun-if-changed=./src"); + println!("cargo::rerun-if-changed=./src_cpp"); +} diff --git a/src/tools/miri/genmc-sys/src/lib.rs b/src/tools/miri/genmc-sys/src/lib.rs new file mode 100644 index 00000000000..ab46d729ea1 --- /dev/null +++ b/src/tools/miri/genmc-sys/src/lib.rs @@ -0,0 +1,30 @@ +pub use self::ffi::*; + +impl Default for GenmcParams { + fn default() -> Self { + Self { + print_random_schedule_seed: false, + do_symmetry_reduction: false, + // FIXME(GenMC): Add defaults for remaining parameters + } + } +} + +#[cxx::bridge] +mod ffi { + /// Parameters that will be given to GenMC for setting up the model checker. + /// (The fields of this struct are visible to both Rust and C++) + #[derive(Clone, Debug)] + struct GenmcParams { + pub print_random_schedule_seed: bool, + pub do_symmetry_reduction: bool, + // FIXME(GenMC): Add remaining parameters. + } + unsafe extern "C++" { + include!("MiriInterface.hpp"); + + type MiriGenMCShim; + + fn createGenmcHandle(config: &GenmcParams) -> UniquePtr<MiriGenMCShim>; + } +} diff --git a/src/tools/miri/genmc-sys/src_cpp/MiriInterface.cpp b/src/tools/miri/genmc-sys/src_cpp/MiriInterface.cpp new file mode 100644 index 00000000000..0827bb3d407 --- /dev/null +++ b/src/tools/miri/genmc-sys/src_cpp/MiriInterface.cpp @@ -0,0 +1,50 @@ +#include "MiriInterface.hpp" + +#include "genmc-sys/src/lib.rs.h" + +auto MiriGenMCShim::createHandle(const GenmcParams &config) + -> std::unique_ptr<MiriGenMCShim> +{ + auto conf = std::make_shared<Config>(); + + // Miri needs all threads to be replayed, even fully completed ones. + conf->replayCompletedThreads = true; + + // We only support the RC11 memory model for Rust. + conf->model = ModelType::RC11; + + conf->printRandomScheduleSeed = config.print_random_schedule_seed; + + // FIXME(genmc): disable any options we don't support currently: + conf->ipr = false; + conf->disableBAM = true; + conf->instructionCaching = false; + + ERROR_ON(config.do_symmetry_reduction, "Symmetry reduction is currently unsupported in GenMC mode."); + conf->symmetryReduction = config.do_symmetry_reduction; + + // FIXME(genmc): Should there be a way to change this option from Miri? + conf->schedulePolicy = SchedulePolicy::WF; + + // FIXME(genmc): implement estimation mode: + conf->estimate = false; + conf->estimationMax = 1000; + const auto mode = conf->estimate ? GenMCDriver::Mode(GenMCDriver::EstimationMode{}) + : GenMCDriver::Mode(GenMCDriver::VerificationMode{}); + + // Running Miri-GenMC without race detection is not supported. + // Disabling this option also changes the behavior of the replay scheduler to only schedule at atomic operations, which is required with Miri. + // This happens because Miri can generate multiple GenMC events for a single MIR terminator. Without this option, + // the scheduler might incorrectly schedule an atomic MIR terminator because the first event it creates is a non-atomic (e.g., `StorageLive`). + conf->disableRaceDetection = false; + + // Miri can already check for unfreed memory. Also, GenMC cannot distinguish between memory + // that is allowed to leak and memory that is not. + conf->warnUnfreedMemory = false; + + // FIXME(genmc): check config: + // checkConfigOptions(*conf); + + auto driver = std::make_unique<MiriGenMCShim>(std::move(conf), mode); + return driver; +} diff --git a/src/tools/miri/genmc-sys/src_cpp/MiriInterface.hpp b/src/tools/miri/genmc-sys/src_cpp/MiriInterface.hpp new file mode 100644 index 00000000000..e55522ef418 --- /dev/null +++ b/src/tools/miri/genmc-sys/src_cpp/MiriInterface.hpp @@ -0,0 +1,44 @@ +#ifndef GENMC_MIRI_INTERFACE_HPP +#define GENMC_MIRI_INTERFACE_HPP + +#include "rust/cxx.h" + +#include "config.h" + +#include "Config/Config.hpp" +#include "Verification/GenMCDriver.hpp" + +#include <iostream> + +/**** Types available to Miri ****/ + +// Config struct defined on the Rust side and translated to C++ by cxx.rs: +struct GenmcParams; + +struct MiriGenMCShim : private GenMCDriver +{ + +public: + MiriGenMCShim(std::shared_ptr<const Config> conf, Mode mode /* = VerificationMode{} */) + : GenMCDriver(std::move(conf), nullptr, mode) + { + std::cerr << "C++: GenMC handle created!" << std::endl; + } + + virtual ~MiriGenMCShim() + { + std::cerr << "C++: GenMC handle destroyed!" << std::endl; + } + + static std::unique_ptr<MiriGenMCShim> createHandle(const GenmcParams &config); +}; + +/**** Functions available to Miri ****/ + +// NOTE: CXX doesn't support exposing static methods to Rust currently, so we expose this function instead. +static inline auto createGenmcHandle(const GenmcParams &config) -> std::unique_ptr<MiriGenMCShim> +{ + return MiriGenMCShim::createHandle(config); +} + +#endif /* GENMC_MIRI_INTERFACE_HPP */ diff --git a/src/tools/miri/josh-sync.toml b/src/tools/miri/josh-sync.toml new file mode 100644 index 00000000000..86208b3742d --- /dev/null +++ b/src/tools/miri/josh-sync.toml @@ -0,0 +1,2 @@ +repo = "miri" +filter = ":rev(75dd959a3a40eb5b4574f8d2e23aa6efbeb33573:prefix=src/tools/miri):/src/tools/miri" diff --git a/src/tools/miri/miri b/src/tools/miri/miri index 549998ae44a..a5f2bb1550a 100755 --- a/src/tools/miri/miri +++ b/src/tools/miri/miri @@ -15,8 +15,9 @@ if [ -n "$MIRI_IN_RA" ]; then CARGO_FLAGS+=("--message-format=json" "-Zroot-dir=$ROOT_DIR") TARGET_DIR="$ROOT_DIR"/target fi + # Run cargo. -cargo $TOOLCHAIN build --manifest-path "$ROOT_DIR"/miri-script/Cargo.toml \ +${CARGO:-cargo} $TOOLCHAIN build --manifest-path "$ROOT_DIR"/miri-script/Cargo.toml \ --target-dir "$TARGET_DIR" "${CARGO_FLAGS[@]}" || \ ( echo "Failed to build miri-script. Is the 'stable' toolchain installed?"; exit 1 ) # Instead of doing just `cargo run --manifest-path .. $@`, we invoke miri-script binary directly. diff --git a/src/tools/miri/miri-script/Cargo.lock b/src/tools/miri/miri-script/Cargo.lock index 3494a241ec5..044a678869e 100644 --- a/src/tools/miri/miri-script/Cargo.lock +++ b/src/tools/miri/miri-script/Cargo.lock @@ -4,9 +4,9 @@ version = 4 [[package]] name = "anstream" -version = "0.6.18" +version = "0.6.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" +checksum = "301af1932e46185686725e0fad2f8f2aa7da69dd70bf6ecc44d6b703844a3933" dependencies = [ "anstyle", "anstyle-parse", @@ -19,62 +19,62 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.10" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" +checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd" [[package]] name = "anstyle-parse" -version = "0.2.6" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" +checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.1.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" +checksum = "6c8bdeb6047d8983be085bab0ba1472e6dc604e7041dbf6fcd5e71523014fae9" dependencies = [ - "windows-sys", + "windows-sys 0.59.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.7" +version = "3.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3534e77181a9cc07539ad51f2141fe32f6c3ffd4df76db8ad92346b003ae4e" +checksum = "403f75924867bb1033c59fbf0797484329750cfbe3c4325cd33127941fabc882" dependencies = [ "anstyle", - "once_cell", - "windows-sys", + "once_cell_polyfill", + "windows-sys 0.59.0", ] [[package]] name = "anyhow" -version = "1.0.97" +version = "1.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcfed56ad506cb2c684a14971b8861fdc3baaaae314b9e5f9bb532cbe3ba7a4f" +checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" [[package]] name = "bitflags" -version = "2.9.0" +version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" +checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967" [[package]] name = "cfg-if" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268" [[package]] name = "clap" -version = "4.5.35" +version = "4.5.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8aa86934b44c19c50f87cc2790e19f54f7a67aedb64101c2e1a2e5ecfb73944" +checksum = "be92d32e80243a54711e5d7ce823c35c41c9d929dc4ab58e1276f625841aadf9" dependencies = [ "clap_builder", "clap_derive", @@ -82,9 +82,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.35" +version = "4.5.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2414dbb2dd0695280da6ea9261e327479e9d37b0630f6b53ba2a11c60c679fd9" +checksum = "707eab41e9622f9139419d573eca0900137718000c517d47da73045f54331c3d" dependencies = [ "anstream", "anstyle", @@ -94,9 +94,9 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.32" +version = "4.5.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09176aae279615badda0765c0c0b3f6ed53f4709118af73cf4655d85d1530cd7" +checksum = "ef4f52386a59ca4c860f7393bcf8abd8dfd91ecccc0f774635ff68e92eeef491" dependencies = [ "heck", "proc-macro2", @@ -106,36 +106,15 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.7.4" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" +checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675" [[package]] name = "colorchoice" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" - -[[package]] -name = "directories" -version = "6.0.0" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16f5094c54661b38d03bd7e50df373292118db60b585c08a411c6d840017fe7d" -dependencies = [ - "dirs-sys", -] - -[[package]] -name = "dirs-sys" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab" -dependencies = [ - "libc", - "option-ext", - "redox_users", - "windows-sys", -] +checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" [[package]] name = "dunce" @@ -150,19 +129,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" [[package]] -name = "env_home" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7f84e12ccf0a7ddc17a6c41c93326024c42920d7ee630d04950e6926645c0fe" - -[[package]] name = "errno" -version = "0.3.11" +version = "0.3.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "976dd42dc7e85965fe702eb8164f21f450704bdde31faefd6471dba214cb594e" +checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad" dependencies = [ "libc", - "windows-sys", + "windows-sys 0.60.2", ] [[package]] @@ -173,25 +146,14 @@ checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "getrandom" -version = "0.2.15" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" -dependencies = [ - "cfg-if", - "libc", - "wasi 0.11.0+wasi-snapshot-preview1", -] - -[[package]] -name = "getrandom" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73fea8450eea4bac3940448fb7ae50d91f034f941199fcd9d909a5a07aa455f0" +checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" dependencies = [ "cfg-if", "libc", "r-efi", - "wasi 0.14.2+wasi-0.2.4", + "wasi", ] [[package]] @@ -223,37 +185,21 @@ checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" [[package]] name = "libc" -version = "0.2.171" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c19937216e9d3aa9956d9bb8dfc0b0c8beb6058fc4f7a4dc4d850edf86a237d6" - -[[package]] -name = "libredox" -version = "0.1.3" +version = "0.2.174" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" -dependencies = [ - "bitflags", - "libc", -] - -[[package]] -name = "linux-raw-sys" -version = "0.4.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" +checksum = "1171693293099992e19cddea4e8b849964e9846f4acee11b3948bcc337be8776" [[package]] name = "linux-raw-sys" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe7db12097d22ec582439daf8618b8fdd1a7bef6270e9af3b1ebcd30893cf413" +checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" [[package]] name = "memchr" -version = "2.7.4" +version = "2.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" +checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" [[package]] name = "miri-script" @@ -261,7 +207,6 @@ version = "0.1.0" dependencies = [ "anyhow", "clap", - "directories", "dunce", "itertools", "path_macro", @@ -272,7 +217,6 @@ dependencies = [ "shell-words", "tempfile", "walkdir", - "which", "xshell", ] @@ -283,10 +227,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] -name = "option-ext" -version = "0.2.0" +name = "once_cell_polyfill" +version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" +checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad" [[package]] name = "path_macro" @@ -296,9 +240,9 @@ checksum = "a6e819bbd49d5939f682638fa54826bf1650abddcd65d000923de8ad63cc7d15" [[package]] name = "proc-macro2" -version = "1.0.94" +version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84" +checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" dependencies = [ "unicode-ident", ] @@ -314,20 +258,9 @@ dependencies = [ [[package]] name = "r-efi" -version = "5.2.0" +version = "5.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" - -[[package]] -name = "redox_users" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd6f9d3d47bdd2ad6945c5015a226ec6155d0bcdfd8f7cd29f86b71f8de99d2b" -dependencies = [ - "getrandom 0.2.15", - "libredox", - "thiserror", -] +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" [[package]] name = "rustc_version" @@ -340,28 +273,15 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.44" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" +checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8" dependencies = [ "bitflags", "errno", "libc", - "linux-raw-sys 0.4.15", - "windows-sys", -] - -[[package]] -name = "rustix" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d97817398dd4bb2e6da002002db259209759911da105da92bec29ccb12cf58bf" -dependencies = [ - "bitflags", - "errno", - "libc", - "linux-raw-sys 0.9.3", - "windows-sys", + "linux-raw-sys", + "windows-sys 0.60.2", ] [[package]] @@ -407,9 +327,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.140" +version = "1.0.141" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" +checksum = "30b9eff21ebe718216c6ec64e1d9ac57087aad11efc64e32002bce4a0d4c03d3" dependencies = [ "itoa", "memchr", @@ -431,9 +351,9 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "syn" -version = "2.0.100" +version = "2.0.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0" +checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40" dependencies = [ "proc-macro2", "quote", @@ -442,35 +362,15 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.19.1" +version = "3.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7437ac7763b9b123ccf33c338a5cc1bac6f69b45a136c19bdd8a65e3916435bf" +checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1" dependencies = [ "fastrand", - "getrandom 0.3.2", + "getrandom", "once_cell", - "rustix 1.0.5", - "windows-sys", -] - -[[package]] -name = "thiserror" -version = "2.0.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" -dependencies = [ - "thiserror-impl", -] - -[[package]] -name = "thiserror-impl" -version = "2.0.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" -dependencies = [ - "proc-macro2", - "quote", - "syn", + "rustix", + "windows-sys 0.59.0", ] [[package]] @@ -497,12 +397,6 @@ dependencies = [ [[package]] name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" - -[[package]] -name = "wasi" version = "0.14.2+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" @@ -511,24 +405,12 @@ dependencies = [ ] [[package]] -name = "which" -version = "7.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2774c861e1f072b3aadc02f8ba886c26ad6321567ecc294c935434cad06f1283" -dependencies = [ - "either", - "env_home", - "rustix 0.38.44", - "winsafe", -] - -[[package]] name = "winapi-util" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys", + "windows-sys 0.59.0", ] [[package]] @@ -537,7 +419,16 @@ version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ - "windows-targets", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" +dependencies = [ + "windows-targets 0.53.2", ] [[package]] @@ -546,14 +437,30 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_gnullvm", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm 0.52.6", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.53.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c66f69fcc9ce11da9966ddb31a40968cad001c5bedeb5c2b82ede4253ab48aef" +dependencies = [ + "windows_aarch64_gnullvm 0.53.0", + "windows_aarch64_msvc 0.53.0", + "windows_i686_gnu 0.53.0", + "windows_i686_gnullvm 0.53.0", + "windows_i686_msvc 0.53.0", + "windows_x86_64_gnu 0.53.0", + "windows_x86_64_gnullvm 0.53.0", + "windows_x86_64_msvc 0.53.0", ] [[package]] @@ -563,52 +470,94 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" + +[[package]] name = "windows_aarch64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] +name = "windows_aarch64_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" + +[[package]] name = "windows_i686_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] +name = "windows_i686_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" + +[[package]] name = "windows_i686_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] +name = "windows_i686_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" + +[[package]] name = "windows_i686_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] +name = "windows_i686_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" + +[[package]] name = "windows_x86_64_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] +name = "windows_x86_64_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" + +[[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" + +[[package]] name = "windows_x86_64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] -name = "winsafe" -version = "0.0.19" +name = "windows_x86_64_msvc" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904" +checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" [[package]] name = "wit-bindgen-rt" diff --git a/src/tools/miri/miri-script/Cargo.toml b/src/tools/miri/miri-script/Cargo.toml index 9240788d6bc..39858880e8c 100644 --- a/src/tools/miri/miri-script/Cargo.toml +++ b/src/tools/miri/miri-script/Cargo.toml @@ -14,7 +14,6 @@ rust-version = "1.85" # This is needed to make this package build on stable when the parent package uses unstable cargo features. [dependencies] -which = "7" walkdir = "2.3" itertools = "0.14" path_macro = "1.0" @@ -23,7 +22,6 @@ anyhow = "1.0" xshell = "0.2.6" rustc_version = "0.4" dunce = "1.0.4" -directories = "6" serde = "1" serde_json = "1" serde_derive = "1" diff --git a/src/tools/miri/miri-script/src/commands.rs b/src/tools/miri/miri-script/src/commands.rs index e6ebdf54e38..ee09b9b4b73 100644 --- a/src/tools/miri/miri-script/src/commands.rs +++ b/src/tools/miri/miri-script/src/commands.rs @@ -2,11 +2,9 @@ use std::collections::BTreeMap; use std::ffi::{OsStr, OsString}; use std::fmt::Write as _; use std::fs::{self, File}; -use std::io::{self, BufRead, BufReader, BufWriter, Write as _}; -use std::ops::Not; +use std::io::{self, BufRead, BufReader, BufWriter}; use std::path::PathBuf; -use std::time::Duration; -use std::{env, net, process}; +use std::{env, process}; use anyhow::{Context, Result, anyhow, bail}; use path_macro::path; @@ -18,11 +16,6 @@ use xshell::{Shell, cmd}; use crate::Command; use crate::util::*; -/// Used for rustc syncs. -const JOSH_FILTER: &str = - ":rev(75dd959a3a40eb5b4574f8d2e23aa6efbeb33573:prefix=src/tools/miri):/src/tools/miri"; -const JOSH_PORT: u16 = 42042; - impl MiriEnv { /// Prepares the environment: builds miri and cargo-miri and a sysroot. /// Returns the location of the sysroot. @@ -99,66 +92,6 @@ impl Command { Ok(()) } - fn start_josh() -> Result<impl Drop> { - // Determine cache directory. - let local_dir = { - let user_dirs = - directories::ProjectDirs::from("org", "rust-lang", "miri-josh").unwrap(); - user_dirs.cache_dir().to_owned() - }; - - // Start josh, silencing its output. - let mut cmd = process::Command::new("josh-proxy"); - cmd.arg("--local").arg(local_dir); - cmd.arg("--remote").arg("https://github.com"); - cmd.arg("--port").arg(JOSH_PORT.to_string()); - cmd.arg("--no-background"); - cmd.stdout(process::Stdio::null()); - cmd.stderr(process::Stdio::null()); - let josh = cmd.spawn().context("failed to start josh-proxy, make sure it is installed")?; - - // Create a wrapper that stops it on drop. - struct Josh(process::Child); - impl Drop for Josh { - fn drop(&mut self) { - #[cfg(unix)] - { - // Try to gracefully shut it down. - process::Command::new("kill") - .args(["-s", "INT", &self.0.id().to_string()]) - .output() - .expect("failed to SIGINT josh-proxy"); - // Sadly there is no "wait with timeout"... so we just give it some time to finish. - std::thread::sleep(Duration::from_millis(100)); - // Now hopefully it is gone. - if self.0.try_wait().expect("failed to wait for josh-proxy").is_some() { - return; - } - } - // If that didn't work (or we're not on Unix), kill it hard. - eprintln!( - "I have to kill josh-proxy the hard way, let's hope this does not break anything." - ); - self.0.kill().expect("failed to SIGKILL josh-proxy"); - } - } - - // Wait until the port is open. We try every 10ms until 1s passed. - for _ in 0..100 { - // This will generally fail immediately when the port is still closed. - let josh_ready = net::TcpStream::connect_timeout( - &net::SocketAddr::from(([127, 0, 0, 1], JOSH_PORT)), - Duration::from_millis(1), - ); - if josh_ready.is_ok() { - return Ok(Josh(josh)); - } - // Not ready yet. - std::thread::sleep(Duration::from_millis(10)); - } - bail!("Even after waiting for 1s, josh-proxy is still not available.") - } - pub fn exec(self) -> Result<()> { // First, and crucially only once, run the auto-actions -- but not for all commands. match &self { @@ -170,11 +103,7 @@ impl Command { | Command::Fmt { .. } | Command::Doc { .. } | Command::Clippy { .. } => Self::auto_actions()?, - | Command::Toolchain { .. } - | Command::Bench { .. } - | Command::RustcPull { .. } - | Command::RustcPush { .. } - | Command::Squash => {} + | Command::Toolchain { .. } | Command::Bench { .. } | Command::Squash => {} } // Then run the actual command. match self { @@ -191,16 +120,11 @@ impl Command { Command::Bench { target, no_install, save_baseline, load_baseline, benches } => Self::bench(target, no_install, save_baseline, load_baseline, benches), Command::Toolchain { flags } => Self::toolchain(flags), - Command::RustcPull { commit } => Self::rustc_pull(commit.clone()), - Command::RustcPush { github_user, branch } => Self::rustc_push(github_user, branch), Command::Squash => Self::squash(), } } fn toolchain(flags: Vec<String>) -> Result<()> { - // Make sure rustup-toolchain-install-master is installed. - which::which("rustup-toolchain-install-master") - .context("Please install rustup-toolchain-install-master by running 'cargo install rustup-toolchain-install-master'")?; let sh = Shell::new()?; sh.change_dir(miri_dir()?); let new_commit = sh.read_file("rust-version")?.trim().to_owned(); @@ -227,163 +151,15 @@ impl Command { // Install and setup new toolchain. cmd!(sh, "rustup toolchain uninstall miri").run()?; - cmd!(sh, "rustup-toolchain-install-master -n miri -c cargo -c rust-src -c rustc-dev -c llvm-tools -c rustfmt -c clippy {flags...} -- {new_commit}").run()?; + cmd!(sh, "rustup-toolchain-install-master -n miri -c cargo -c rust-src -c rustc-dev -c llvm-tools -c rustfmt -c clippy {flags...} -- {new_commit}") + .run() + .context("Failed to run rustup-toolchain-install-master. If it is not installed, run 'cargo install rustup-toolchain-install-master'.")?; cmd!(sh, "rustup override set miri").run()?; // Cleanup. cmd!(sh, "cargo clean").run()?; Ok(()) } - fn rustc_pull(commit: Option<String>) -> Result<()> { - let sh = Shell::new()?; - sh.change_dir(miri_dir()?); - let commit = commit.map(Result::Ok).unwrap_or_else(|| { - let rust_repo_head = - cmd!(sh, "git ls-remote https://github.com/rust-lang/rust/ HEAD").read()?; - rust_repo_head - .split_whitespace() - .next() - .map(|front| front.trim().to_owned()) - .ok_or_else(|| anyhow!("Could not obtain Rust repo HEAD from remote.")) - })?; - // Make sure the repo is clean. - if cmd!(sh, "git status --untracked-files=no --porcelain").read()?.is_empty().not() { - bail!("working directory must be clean before running `./miri rustc-pull`"); - } - // Make sure josh is running. - let josh = Self::start_josh()?; - let josh_url = - format!("http://localhost:{JOSH_PORT}/rust-lang/rust.git@{commit}{JOSH_FILTER}.git"); - - // Update rust-version file. As a separate commit, since making it part of - // the merge has confused the heck out of josh in the past. - // We pass `--no-verify` to avoid running git hooks like `./miri fmt` that could in turn - // trigger auto-actions. - // We do this before the merge so that if there are merge conflicts, we have - // the right rust-version file while resolving them. - sh.write_file("rust-version", format!("{commit}\n"))?; - const PREPARING_COMMIT_MESSAGE: &str = "Preparing for merge from rustc"; - cmd!(sh, "git commit rust-version --no-verify -m {PREPARING_COMMIT_MESSAGE}") - .run() - .context("FAILED to commit rust-version file, something went wrong")?; - - // Fetch given rustc commit. - cmd!(sh, "git fetch {josh_url}") - .run() - .inspect_err(|_| { - // Try to un-do the previous `git commit`, to leave the repo in the state we found it. - cmd!(sh, "git reset --hard HEAD^") - .run() - .expect("FAILED to clean up again after failed `git fetch`, sorry for that"); - }) - .context("FAILED to fetch new commits, something went wrong (committing the rust-version file has been undone)")?; - - // This should not add any new root commits. So count those before and after merging. - let num_roots = || -> Result<u32> { - Ok(cmd!(sh, "git rev-list HEAD --max-parents=0 --count") - .read() - .context("failed to determine the number of root commits")? - .parse::<u32>()?) - }; - let num_roots_before = num_roots()?; - - // Merge the fetched commit. - const MERGE_COMMIT_MESSAGE: &str = "Merge from rustc"; - cmd!(sh, "git merge FETCH_HEAD --no-verify --no-ff -m {MERGE_COMMIT_MESSAGE}") - .run() - .context("FAILED to merge new commits, something went wrong")?; - - // Check that the number of roots did not increase. - if num_roots()? != num_roots_before { - bail!("Josh created a new root commit. This is probably not the history you want."); - } - - drop(josh); - Ok(()) - } - - fn rustc_push(github_user: String, branch: String) -> Result<()> { - let sh = Shell::new()?; - sh.change_dir(miri_dir()?); - let base = sh.read_file("rust-version")?.trim().to_owned(); - // Make sure the repo is clean. - if cmd!(sh, "git status --untracked-files=no --porcelain").read()?.is_empty().not() { - bail!("working directory must be clean before running `./miri rustc-push`"); - } - // Make sure josh is running. - let josh = Self::start_josh()?; - let josh_url = - format!("http://localhost:{JOSH_PORT}/{github_user}/rust.git{JOSH_FILTER}.git"); - - // Find a repo we can do our preparation in. - if let Ok(rustc_git) = env::var("RUSTC_GIT") { - // If rustc_git is `Some`, we'll use an existing fork for the branch updates. - sh.change_dir(rustc_git); - } else { - // Otherwise, do this in the local Miri repo. - println!( - "This will pull a copy of the rust-lang/rust history into this Miri checkout, growing it by about 1GB." - ); - print!( - "To avoid that, abort now and set the `RUSTC_GIT` environment variable to an existing rustc checkout. Proceed? [y/N] " - ); - std::io::stdout().flush()?; - let mut answer = String::new(); - std::io::stdin().read_line(&mut answer)?; - if answer.trim().to_lowercase() != "y" { - std::process::exit(1); - } - }; - // Prepare the branch. Pushing works much better if we use as base exactly - // the commit that we pulled from last time, so we use the `rust-version` - // file to find out which commit that would be. - println!("Preparing {github_user}/rust (base: {base})..."); - if cmd!(sh, "git fetch https://github.com/{github_user}/rust {branch}") - .ignore_stderr() - .read() - .is_ok() - { - println!( - "The branch '{branch}' seems to already exist in 'https://github.com/{github_user}/rust'. Please delete it and try again." - ); - std::process::exit(1); - } - cmd!(sh, "git fetch https://github.com/rust-lang/rust {base}").run()?; - cmd!(sh, "git push https://github.com/{github_user}/rust {base}:refs/heads/{branch}") - .ignore_stdout() - .ignore_stderr() // silence the "create GitHub PR" message - .run()?; - println!(); - - // Do the actual push. - sh.change_dir(miri_dir()?); - println!("Pushing miri changes..."); - cmd!(sh, "git push {josh_url} HEAD:{branch}").run()?; - println!(); - - // Do a round-trip check to make sure the push worked as expected. - cmd!(sh, "git fetch {josh_url} {branch}").ignore_stderr().read()?; - let head = cmd!(sh, "git rev-parse HEAD").read()?; - let fetch_head = cmd!(sh, "git rev-parse FETCH_HEAD").read()?; - if head != fetch_head { - bail!( - "Josh created a non-roundtrip push! Do NOT merge this into rustc!\n\ - Expected {head}, got {fetch_head}." - ); - } - println!( - "Confirmed that the push round-trips back to Miri properly. Please create a rustc PR:" - ); - println!( - // Open PR with `subtree update` title to silence the `no-merges` triagebot check - // See https://github.com/rust-lang/rust/pull/114157 - " https://github.com/rust-lang/rust/compare/{github_user}:{branch}?quick_pull=1&title=Miri+subtree+update&body=r?+@ghost" - ); - - drop(josh); - Ok(()) - } - fn squash() -> Result<()> { let sh = Shell::new()?; sh.change_dir(miri_dir()?); @@ -758,8 +534,8 @@ impl Command { if ty.is_file() { name.ends_with(".rs") } else { - // dir or symlink. skip `target` and `.git`. - &name != "target" && &name != ".git" + // dir or symlink. skip `target`, `.git` and `genmc-src*` + &name != "target" && &name != ".git" && !name.starts_with("genmc-src") } }) .filter_ok(|item| item.file_type().is_file()) diff --git a/src/tools/miri/miri-script/src/main.rs b/src/tools/miri/miri-script/src/main.rs index 673d658cf1d..761ec5979fa 100644 --- a/src/tools/miri/miri-script/src/main.rs +++ b/src/tools/miri/miri-script/src/main.rs @@ -75,7 +75,7 @@ pub enum Command { /// /// Also respects MIRIFLAGS environment variable. Run { - /// Build the program with the dependencies declared in `test_dependencies/Cargo.toml`. + /// Build the program with the dependencies declared in `tests/deps/Cargo.toml`. #[arg(long)] dep: bool, /// Show build progress. @@ -142,25 +142,6 @@ pub enum Command { #[arg(trailing_var_arg = true, allow_hyphen_values = true)] flags: Vec<String>, }, - /// Pull and merge Miri changes from the rustc repo. - /// - /// The fetched commit is stored in the `rust-version` file, so the next `./miri toolchain` will - /// install the rustc that just got pulled. - RustcPull { - /// The commit to fetch (default: latest rustc commit). - commit: Option<String>, - }, - /// Push Miri changes back to the rustc repo. - /// - /// This will pull a copy of the rustc history into the Miri repo, unless you set the RUSTC_GIT - /// env var to an existing clone of the rustc repo. - RustcPush { - /// The Github user that owns the rustc fork to which we should push. - github_user: String, - /// The branch to push to. - #[arg(default_value = "miri-sync")] - branch: String, - }, /// Squash the commits of the current feature branch into one. Squash, } @@ -184,8 +165,7 @@ impl Command { flags.extend(remainder); Ok(()) } - Self::Bench { .. } | Self::RustcPull { .. } | Self::RustcPush { .. } | Self::Squash => - bail!("unexpected \"--\" found in arguments"), + Self::Bench { .. } | Self::Squash => bail!("unexpected \"--\" found in arguments"), } } } diff --git a/src/tools/miri/miri-script/src/util.rs b/src/tools/miri/miri-script/src/util.rs index c100cf195ba..6121096f823 100644 --- a/src/tools/miri/miri-script/src/util.rs +++ b/src/tools/miri/miri-script/src/util.rs @@ -38,6 +38,8 @@ pub struct MiriEnv { pub miri_dir: PathBuf, /// active_toolchain is passed as `+toolchain` argument to cargo/rustc invocations. toolchain: String, + /// The cargo binary to use. + cargo_bin: String, /// Extra flags to pass to cargo. cargo_extra_flags: Vec<String>, /// The rustc sysroot @@ -106,6 +108,9 @@ impl MiriEnv { sh.set_var("PATH", new_path); } + // Get the cargo binary to use, if one is set. + let cargo_bin = std::env::var("CARGO").unwrap_or_else(|_| "cargo".to_string()); + // Get extra flags for cargo. let cargo_extra_flags = std::env::var("CARGO_EXTRA_FLAGS").unwrap_or_default(); let mut cargo_extra_flags = flagsplit(&cargo_extra_flags); @@ -119,7 +124,7 @@ impl MiriEnv { // Also set `-Zroot-dir` for cargo, to print diagnostics relative to the miri dir. cargo_extra_flags.push(format!("-Zroot-dir={}", miri_dir.display())); - Ok(MiriEnv { miri_dir, toolchain, sh, sysroot, cargo_extra_flags, libdir }) + Ok(MiriEnv { miri_dir, toolchain, sh, sysroot, cargo_bin, cargo_extra_flags, libdir }) } /// Make sure the `features` you pass here exist for the specified `crate_dir`. For example, the @@ -130,12 +135,12 @@ impl MiriEnv { cmd: &str, features: &[String], ) -> Cmd<'_> { - let MiriEnv { toolchain, cargo_extra_flags, .. } = self; + let MiriEnv { toolchain, cargo_extra_flags, cargo_bin, .. } = self; let manifest_path = path!(self.miri_dir / crate_dir.as_ref() / "Cargo.toml"); let features = features_to_args(features); cmd!( self.sh, - "cargo +{toolchain} {cmd} {cargo_extra_flags...} --manifest-path {manifest_path} {features...}" + "{cargo_bin} +{toolchain} {cmd} {cargo_extra_flags...} --manifest-path {manifest_path} {features...}" ) } @@ -147,12 +152,12 @@ impl MiriEnv { features: &[String], args: impl IntoIterator<Item = impl AsRef<OsStr>>, ) -> Result<()> { - let MiriEnv { sysroot, toolchain, cargo_extra_flags, .. } = self; + let MiriEnv { sysroot, toolchain, cargo_extra_flags, cargo_bin, .. } = self; let path = path!(self.miri_dir / crate_dir.as_ref()); let features = features_to_args(features); // Install binaries to the miri toolchain's `sysroot` so they do not interact with other toolchains. // (Not using `cargo_cmd` as `install` is special and doesn't use `--manifest-path`.) - cmd!(self.sh, "cargo +{toolchain} install {cargo_extra_flags...} --path {path} --force --root {sysroot} {features...} {args...}").run()?; + cmd!(self.sh, "{cargo_bin} +{toolchain} install {cargo_extra_flags...} --path {path} --force --root {sysroot} {features...} {args...}").run()?; Ok(()) } diff --git a/src/tools/miri/rust-version b/src/tools/miri/rust-version index 67f27e7aa2c..2178caf6396 100644 --- a/src/tools/miri/rust-version +++ b/src/tools/miri/rust-version @@ -1 +1 @@ -7f2065a4bae1faed5bab928c670964eafbf43b55 +733dab558992d902d6d17576de1da768094e2cf3 diff --git a/src/tools/miri/src/alloc/alloc_bytes.rs b/src/tools/miri/src/alloc/alloc_bytes.rs index 2a253952b27..5d00d3eafcb 100644 --- a/src/tools/miri/src/alloc/alloc_bytes.rs +++ b/src/tools/miri/src/alloc/alloc_bytes.rs @@ -1,20 +1,18 @@ use std::alloc::Layout; use std::borrow::Cow; +use std::cell::RefCell; +use std::rc::Rc; use std::{alloc, slice}; -#[cfg(target_os = "linux")] -use std::{cell::RefCell, rc::Rc}; use rustc_abi::{Align, Size}; use rustc_middle::mir::interpret::AllocBytes; -#[cfg(target_os = "linux")] use crate::alloc::isolated_alloc::IsolatedAlloc; use crate::helpers::ToU64 as _; #[derive(Clone, Debug)] pub enum MiriAllocParams { Global, - #[cfg(target_os = "linux")] Isolated(Rc<RefCell<IsolatedAlloc>>), } @@ -56,7 +54,6 @@ impl Drop for MiriAllocBytes { unsafe { match self.params.clone() { MiriAllocParams::Global => alloc::dealloc(self.ptr, alloc_layout), - #[cfg(target_os = "linux")] MiriAllocParams::Isolated(alloc) => alloc.borrow_mut().dealloc(self.ptr, alloc_layout), } @@ -123,7 +120,6 @@ impl AllocBytes for MiriAllocBytes { let alloc_fn = |layout, params: &MiriAllocParams| unsafe { match params { MiriAllocParams::Global => alloc::alloc(layout), - #[cfg(target_os = "linux")] MiriAllocParams::Isolated(alloc) => alloc.borrow_mut().alloc(layout), } }; @@ -144,7 +140,6 @@ impl AllocBytes for MiriAllocBytes { let alloc_fn = |layout, params: &MiriAllocParams| unsafe { match params { MiriAllocParams::Global => alloc::alloc_zeroed(layout), - #[cfg(target_os = "linux")] MiriAllocParams::Isolated(alloc) => alloc.borrow_mut().alloc_zeroed(layout), } }; diff --git a/src/tools/miri/src/alloc/isolated_alloc.rs b/src/tools/miri/src/alloc/isolated_alloc.rs index 7b2f1a3eebf..1745727b16b 100644 --- a/src/tools/miri/src/alloc/isolated_alloc.rs +++ b/src/tools/miri/src/alloc/isolated_alloc.rs @@ -1,7 +1,6 @@ use std::alloc::Layout; use std::ptr::NonNull; -use nix::sys::mman; use rustc_index::bit_set::DenseBitSet; /// How many bytes of memory each bit in the bitset represents. @@ -44,6 +43,10 @@ impl IsolatedAlloc { } } + pub fn page_size(&self) -> usize { + self.page_size + } + /// For simplicity, we serve small allocations in multiples of COMPRESSION_FACTOR /// bytes with at least that alignment. #[inline] @@ -302,50 +305,11 @@ impl IsolatedAlloc { } } - /// Returns a list of page addresses managed by the allocator. - pub fn pages(&self) -> impl Iterator<Item = usize> { - let pages = self.page_ptrs.iter().map(|p| p.expose_provenance().get()); - pages.chain(self.huge_ptrs.iter().flat_map(|(ptr, size)| { - (0..size / self.page_size) - .map(|i| ptr.expose_provenance().get().strict_add(i * self.page_size)) - })) - } - - /// Protects all owned memory as `PROT_NONE`, preventing accesses. - /// - /// SAFETY: Accessing memory after this point will result in a segfault - /// unless it is first unprotected. - pub unsafe fn start_ffi(&mut self) -> Result<(), nix::errno::Errno> { - let prot = mman::ProtFlags::PROT_NONE; - unsafe { self.mprotect(prot) } - } - - /// Deprotects all owned memory by setting it to RW. Erroring here is very - /// likely unrecoverable, so it may panic if applying those permissions - /// fails. - pub fn end_ffi(&mut self) { - let prot = mman::ProtFlags::PROT_READ | mman::ProtFlags::PROT_WRITE; - unsafe { - self.mprotect(prot).unwrap(); - } - } - - /// Applies `prot` to every page managed by the allocator. - /// - /// SAFETY: Accessing memory in violation of the protection flags will - /// trigger a segfault. - unsafe fn mprotect(&mut self, prot: mman::ProtFlags) -> Result<(), nix::errno::Errno> { - for &pg in &self.page_ptrs { - unsafe { - mman::mprotect(pg.cast(), self.page_size, prot)?; - } - } - for &(hpg, size) in &self.huge_ptrs { - unsafe { - mman::mprotect(hpg.cast(), size.next_multiple_of(self.page_size), prot)?; - } - } - Ok(()) + /// Returns a list of page ranges managed by the allocator, given in terms of pointers + /// and size (in bytes). + pub fn pages(&self) -> impl Iterator<Item = (NonNull<u8>, usize)> { + let pages = self.page_ptrs.iter().map(|&p| (p, self.page_size)); + pages.chain(self.huge_ptrs.iter().copied()) } } diff --git a/src/tools/miri/src/alloc/mod.rs b/src/tools/miri/src/alloc/mod.rs index 3be885920d2..35158f50a8f 100644 --- a/src/tools/miri/src/alloc/mod.rs +++ b/src/tools/miri/src/alloc/mod.rs @@ -1,5 +1,31 @@ mod alloc_bytes; -#[cfg(target_os = "linux")] +#[cfg(all(unix, feature = "native-lib"))] pub mod isolated_alloc; +#[cfg(not(all(unix, feature = "native-lib")))] +pub mod isolated_alloc { + use std::alloc::Layout; + + /// Stub allocator to avoid `cfg`s in the rest of Miri. + #[derive(Debug)] + pub struct IsolatedAlloc(!); + + impl IsolatedAlloc { + pub fn new() -> Self { + unreachable!() + } + + pub unsafe fn alloc(&mut self, _layout: Layout) -> *mut u8 { + match self.0 {} + } + + pub unsafe fn alloc_zeroed(&mut self, _layout: Layout) -> *mut u8 { + match self.0 {} + } + + pub unsafe fn dealloc(&mut self, _ptr: *mut u8, _layout: Layout) { + match self.0 {} + } + } +} pub use self::alloc_bytes::{MiriAllocBytes, MiriAllocParams}; diff --git a/src/tools/miri/src/alloc_addresses/mod.rs b/src/tools/miri/src/alloc_addresses/mod.rs index 3cc38fa087c..334503d2994 100644 --- a/src/tools/miri/src/alloc_addresses/mod.rs +++ b/src/tools/miri/src/alloc_addresses/mod.rs @@ -116,14 +116,6 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { let this = self.eval_context_ref(); let info = this.get_alloc_info(alloc_id); - // Miri's address assignment leaks state across thread boundaries, which is incompatible - // with GenMC execution. So we instead let GenMC assign addresses to allocations. - if let Some(genmc_ctx) = this.machine.data_race.as_genmc_ref() { - let addr = genmc_ctx.handle_alloc(&this.machine, info.size, info.align, memory_kind)?; - return interp_ok(addr); - } - - let mut rng = this.machine.rng.borrow_mut(); // This is either called immediately after allocation (and then cached), or when // adjusting `tcx` pointers (which never get freed). So assert that we are looking // at a live allocation. This also ensures that we never re-assign an address to an @@ -131,6 +123,19 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { // information was removed. assert!(!matches!(info.kind, AllocKind::Dead)); + // TypeId allocations always have a "base address" of 0 (i.e., the relative offset is the + // hash fragment and therefore equal to the actual integer value). + if matches!(info.kind, AllocKind::TypeId) { + return interp_ok(0); + } + + // Miri's address assignment leaks state across thread boundaries, which is incompatible + // with GenMC execution. So we instead let GenMC assign addresses to allocations. + if let Some(genmc_ctx) = this.machine.data_race.as_genmc_ref() { + let addr = genmc_ctx.handle_alloc(&this.machine, info.size, info.align, memory_kind)?; + return interp_ok(addr); + } + // This allocation does not have a base address yet, pick or reuse one. if !this.machine.native_lib.is_empty() { // In native lib mode, we use the "real" address of the bytes for this allocation. @@ -169,12 +174,13 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { std::mem::forget(alloc_bytes); ptr } - AllocKind::Dead => unreachable!(), + AllocKind::TypeId | AllocKind::Dead => unreachable!(), }; // We don't have to expose this pointer yet, we do that in `prepare_for_native_call`. return interp_ok(base_ptr.addr().to_u64()); } // We are not in native lib mode, so we control the addresses ourselves. + let mut rng = this.machine.rng.borrow_mut(); if let Some((reuse_addr, clock)) = global_state.reuse.take_addr( &mut *rng, info.size, @@ -295,21 +301,25 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Store address in cache. global_state.base_addr.try_insert(alloc_id, base_addr).unwrap(); - // Also maintain the opposite mapping in `int_to_ptr_map`, ensuring we keep it sorted. - // We have a fast-path for the common case that this address is bigger than all previous ones. - let pos = if global_state - .int_to_ptr_map - .last() - .is_some_and(|(last_addr, _)| *last_addr < base_addr) - { - global_state.int_to_ptr_map.len() - } else { - global_state + // Also maintain the opposite mapping in `int_to_ptr_map`, ensuring we keep it + // sorted. We have a fast-path for the common case that this address is bigger than + // all previous ones. We skip this for allocations at address 0; those can't be + // real, they must be TypeId "fake allocations". + if base_addr != 0 { + let pos = if global_state .int_to_ptr_map - .binary_search_by_key(&base_addr, |(addr, _)| *addr) - .unwrap_err() - }; - global_state.int_to_ptr_map.insert(pos, (base_addr, alloc_id)); + .last() + .is_some_and(|(last_addr, _)| *last_addr < base_addr) + { + global_state.int_to_ptr_map.len() + } else { + global_state + .int_to_ptr_map + .binary_search_by_key(&base_addr, |(addr, _)| *addr) + .unwrap_err() + }; + global_state.int_to_ptr_map.insert(pos, (base_addr, alloc_id)); + } interp_ok(base_addr) } diff --git a/src/tools/miri/src/bin/log/setup.rs b/src/tools/miri/src/bin/log/setup.rs index da0ba528b2c..a9392d010f8 100644 --- a/src/tools/miri/src/bin/log/setup.rs +++ b/src/tools/miri/src/bin/log/setup.rs @@ -60,7 +60,7 @@ fn init_logger_once(early_dcx: &EarlyDiagCtxt) { #[cfg(not(feature = "tracing"))] { crate::fatal_error!( - "fatal error: cannot enable MIRI_TRACING since Miri was not built with the \"tracing\" feature" + "Cannot enable MIRI_TRACING since Miri was not built with the \"tracing\" feature" ); } diff --git a/src/tools/miri/src/bin/log/tracing_chrome.rs b/src/tools/miri/src/bin/log/tracing_chrome.rs index 5a96633c99e..3379816550c 100644 --- a/src/tools/miri/src/bin/log/tracing_chrome.rs +++ b/src/tools/miri/src/bin/log/tracing_chrome.rs @@ -12,6 +12,7 @@ //! ```rust //! tracing::info_span!("my_span", tracing_separate_thread = tracing::field::Empty, /* ... */) //! ``` +//! - use i64 instead of u64 for the "id" in [ChromeLayer::get_root_id] to be compatible with Perfetto //! //! Depending on the tracing-chrome crate from crates.io is unfortunately not possible, since it //! depends on `tracing_core` which conflicts with rustc_private's `tracing_core` (meaning it would @@ -285,9 +286,9 @@ struct Callsite { } enum Message { - Enter(f64, Callsite, Option<u64>), + Enter(f64, Callsite, Option<i64>), Event(f64, Callsite), - Exit(f64, Callsite, Option<u64>), + Exit(f64, Callsite, Option<i64>), NewThread(usize, String), Flush, Drop, @@ -519,14 +520,17 @@ where } } - fn get_root_id(&self, span: SpanRef<S>) -> Option<u64> { + fn get_root_id(&self, span: SpanRef<S>) -> Option<i64> { + // Returns `Option<i64>` instead of `Option<u64>` because apparently Perfetto gives an + // error if an id does not fit in a 64-bit signed integer in 2's complement. We cast the + // span id from `u64` to `i64` with wraparound, since negative values are fine. match self.trace_style { TraceStyle::Threaded => { if span.fields().field("tracing_separate_thread").is_some() { // assign an independent "id" to spans with argument "tracing_separate_thread", // so they appear a separate trace line in trace visualization tools, see // https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/preview#heading=h.jh64i9l3vwa1 - Some(span.id().into_u64()) + Some(span.id().into_u64().cast_signed()) // the comment above explains the cast } else { None } @@ -539,6 +543,7 @@ where .unwrap_or(span) .id() .into_u64() + .cast_signed() // the comment above explains the cast ), } } diff --git a/src/tools/miri/src/bin/miri.rs b/src/tools/miri/src/bin/miri.rs index 61cebedf081..ae1b25f8857 100644 --- a/src/tools/miri/src/bin/miri.rs +++ b/src/tools/miri/src/bin/miri.rs @@ -67,8 +67,6 @@ use crate::log::setup::{deinit_loggers, init_early_loggers, init_late_loggers}; struct MiriCompilerCalls { miri_config: Option<MiriConfig>, many_seeds: Option<ManySeedsConfig>, - /// Settings for using GenMC with Miri. - genmc_config: Option<GenmcConfig>, } struct ManySeedsConfig { @@ -77,12 +75,8 @@ struct ManySeedsConfig { } impl MiriCompilerCalls { - fn new( - miri_config: MiriConfig, - many_seeds: Option<ManySeedsConfig>, - genmc_config: Option<GenmcConfig>, - ) -> Self { - Self { miri_config: Some(miri_config), many_seeds, genmc_config } + fn new(miri_config: MiriConfig, many_seeds: Option<ManySeedsConfig>) -> Self { + Self { miri_config: Some(miri_config), many_seeds } } } @@ -192,8 +186,8 @@ impl rustc_driver::Callbacks for MiriCompilerCalls { optimizations is usually marginal at best."); } - if let Some(genmc_config) = &self.genmc_config { - let _genmc_ctx = Rc::new(GenmcCtx::new(&config, genmc_config)); + if let Some(_genmc_config) = &config.genmc_config { + let _genmc_ctx = Rc::new(GenmcCtx::new(&config)); todo!("GenMC mode not yet implemented"); }; @@ -335,9 +329,10 @@ impl rustc_driver::Callbacks for MiriBeRustCompilerCalls { fn exit(exit_code: i32) -> ! { // Drop the tracing guard before exiting, so tracing calls are flushed correctly. deinit_loggers(); - // Make sure the supervisor knows about the code code. - #[cfg(target_os = "linux")] + // Make sure the supervisor knows about the exit code. + #[cfg(all(unix, feature = "native-lib"))] miri::native_lib::register_retcode_sv(exit_code); + // Actually exit. std::process::exit(exit_code); } @@ -486,7 +481,6 @@ fn main() { let mut many_seeds_keep_going = false; let mut miri_config = MiriConfig::default(); miri_config.env = env_snapshot; - let mut genmc_config = None; let mut rustc_args = vec![]; let mut after_dashdash = false; @@ -561,6 +555,8 @@ fn main() { miri_config.force_intrinsic_fallback = true; } else if arg == "-Zmiri-deterministic-floats" { miri_config.float_nondet = false; + } else if arg == "-Zmiri-no-extra-rounding-error" { + miri_config.float_rounding_error = false; } else if arg == "-Zmiri-strict-provenance" { miri_config.provenance_mode = ProvenanceMode::Strict; } else if arg == "-Zmiri-permissive-provenance" { @@ -600,9 +596,9 @@ fn main() { } else if arg == "-Zmiri-many-seeds-keep-going" { many_seeds_keep_going = true; } else if let Some(trimmed_arg) = arg.strip_prefix("-Zmiri-genmc") { - // FIXME(GenMC): Currently, GenMC mode is incompatible with aliasing model checking. - miri_config.borrow_tracker = None; - GenmcConfig::parse_arg(&mut genmc_config, trimmed_arg); + if let Err(msg) = GenmcConfig::parse_arg(&mut miri_config.genmc_config, trimmed_arg) { + fatal_error!("{msg}"); + } } else if let Some(param) = arg.strip_prefix("-Zmiri-env-forward=") { miri_config.forwarded_env_vars.push(param.to_owned()); } else if let Some(param) = arg.strip_prefix("-Zmiri-env-set=") { @@ -737,13 +733,18 @@ fn main() { many_seeds.map(|seeds| ManySeedsConfig { seeds, keep_going: many_seeds_keep_going }); // Validate settings for data race detection and GenMC mode. - assert_eq!(genmc_config.is_some(), miri_config.genmc_mode); - if genmc_config.is_some() { + if miri_config.genmc_config.is_some() { if !miri_config.data_race_detector { fatal_error!("Cannot disable data race detection in GenMC mode (currently)"); } else if !miri_config.weak_memory_emulation { fatal_error!("Cannot disable weak memory emulation in GenMC mode"); } + if miri_config.borrow_tracker.is_some() { + eprintln!( + "warning: borrow tracking has been disabled, it is not (yet) supported in GenMC mode." + ); + miri_config.borrow_tracker = None; + } } else if miri_config.weak_memory_emulation && !miri_config.data_race_detector { fatal_error!( "Weak memory emulation cannot be enabled when the data race detector is disabled" @@ -754,7 +755,7 @@ fn main() { debug!("crate arguments: {:?}", miri_config.args); if !miri_config.native_lib.is_empty() && miri_config.native_lib_enable_tracing { // SAFETY: No other threads are running - #[cfg(target_os = "linux")] + #[cfg(all(unix, feature = "native-lib"))] if unsafe { miri::native_lib::init_sv() }.is_err() { eprintln!( "warning: The native-lib tracer could not be started. Is this an x86 Linux system, and does Miri have permissions to ptrace?\n\ @@ -762,8 +763,5 @@ fn main() { ); } } - run_compiler_and_exit( - &rustc_args, - &mut MiriCompilerCalls::new(miri_config, many_seeds, genmc_config), - ) + run_compiler_and_exit(&rustc_args, &mut MiriCompilerCalls::new(miri_config, many_seeds)) } diff --git a/src/tools/miri/src/borrow_tracker/stacked_borrows/mod.rs b/src/tools/miri/src/borrow_tracker/stacked_borrows/mod.rs index 834a4b41f22..2977efaae04 100644 --- a/src/tools/miri/src/borrow_tracker/stacked_borrows/mod.rs +++ b/src/tools/miri/src/borrow_tracker/stacked_borrows/mod.rs @@ -650,7 +650,7 @@ trait EvalContextPrivExt<'tcx, 'ecx>: crate::MiriInterpCxExt<'tcx> { dcx.log_protector(); } }, - AllocKind::Function | AllocKind::VTable | AllocKind::Dead => { + AllocKind::Function | AllocKind::VTable | AllocKind::TypeId | AllocKind::Dead => { // No stacked borrows on these allocations. } } @@ -1021,7 +1021,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { trace!("Stacked Borrows tag {tag:?} exposed in {alloc_id:?}"); alloc_extra.borrow_tracker_sb().borrow_mut().exposed_tags.insert(tag); } - AllocKind::Function | AllocKind::VTable | AllocKind::Dead => { + AllocKind::Function | AllocKind::VTable | AllocKind::TypeId | AllocKind::Dead => { // No stacked borrows on these allocations. } } diff --git a/src/tools/miri/src/borrow_tracker/tree_borrows/mod.rs b/src/tools/miri/src/borrow_tracker/tree_borrows/mod.rs index c157c69d7c8..ad2a67160f4 100644 --- a/src/tools/miri/src/borrow_tracker/tree_borrows/mod.rs +++ b/src/tools/miri/src/borrow_tracker/tree_borrows/mod.rs @@ -673,7 +673,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { trace!("Tree Borrows tag {tag:?} exposed in {alloc_id:?}"); alloc_extra.borrow_tracker_tb().borrow_mut().expose_tag(tag); } - AllocKind::Function | AllocKind::VTable | AllocKind::Dead => { + AllocKind::Function | AllocKind::VTable | AllocKind::TypeId | AllocKind::Dead => { // No tree borrows on these allocations. } } diff --git a/src/tools/miri/src/concurrency/genmc/config.rs b/src/tools/miri/src/concurrency/genmc/config.rs index f91211a670f..c56adab90fe 100644 --- a/src/tools/miri/src/concurrency/genmc/config.rs +++ b/src/tools/miri/src/concurrency/genmc/config.rs @@ -1,19 +1,35 @@ -use crate::MiriConfig; +use super::GenmcParams; +/// Configuration for GenMC mode. +/// The `params` field is shared with the C++ side. +/// The remaining options are kept on the Rust side. #[derive(Debug, Default, Clone)] pub struct GenmcConfig { - // TODO: add fields + pub(super) params: GenmcParams, + do_estimation: bool, + // FIXME(GenMC): add remaining options. } impl GenmcConfig { /// Function for parsing command line options for GenMC mode. + /// /// All GenMC arguments start with the string "-Zmiri-genmc". + /// Passing any GenMC argument will enable GenMC mode. /// - /// `trimmed_arg` should be the argument to be parsed, with the suffix "-Zmiri-genmc" removed - pub fn parse_arg(genmc_config: &mut Option<GenmcConfig>, trimmed_arg: &str) { + /// `trimmed_arg` should be the argument to be parsed, with the suffix "-Zmiri-genmc" removed. + pub fn parse_arg( + genmc_config: &mut Option<GenmcConfig>, + trimmed_arg: &str, + ) -> Result<(), String> { + // FIXME(genmc): Ensure host == target somewhere. + if genmc_config.is_none() { *genmc_config = Some(Default::default()); } - todo!("implement parsing of GenMC options") + if trimmed_arg.is_empty() { + return Ok(()); // this corresponds to "-Zmiri-genmc" + } + // FIXME(GenMC): implement remaining parameters. + todo!(); } } diff --git a/src/tools/miri/src/concurrency/genmc/dummy.rs b/src/tools/miri/src/concurrency/genmc/dummy.rs index 3d0558fb685..79d27c4be15 100644 --- a/src/tools/miri/src/concurrency/genmc/dummy.rs +++ b/src/tools/miri/src/concurrency/genmc/dummy.rs @@ -16,7 +16,7 @@ pub struct GenmcCtx {} pub struct GenmcConfig {} impl GenmcCtx { - pub fn new(_miri_config: &MiriConfig, _genmc_config: &GenmcConfig) -> Self { + pub fn new(_miri_config: &MiriConfig) -> Self { unreachable!() } @@ -227,10 +227,15 @@ impl VisitProvenance for GenmcCtx { } impl GenmcConfig { - pub fn parse_arg(_genmc_config: &mut Option<GenmcConfig>, trimmed_arg: &str) { - unimplemented!( - "GenMC feature im Miri is disabled, cannot handle argument: \"-Zmiri-genmc{trimmed_arg}\"" - ); + pub fn parse_arg( + _genmc_config: &mut Option<GenmcConfig>, + trimmed_arg: &str, + ) -> Result<(), String> { + if cfg!(feature = "genmc") { + Err(format!("GenMC is disabled in this build of Miri")) + } else { + Err(format!("GenMC is not supported on this target")) + } } pub fn should_print_graph(&self, _rep: usize) -> bool { diff --git a/src/tools/miri/src/concurrency/genmc/mod.rs b/src/tools/miri/src/concurrency/genmc/mod.rs index 0dfd4b9b80f..3617775e27e 100644 --- a/src/tools/miri/src/concurrency/genmc/mod.rs +++ b/src/tools/miri/src/concurrency/genmc/mod.rs @@ -2,6 +2,7 @@ use std::cell::Cell; +use genmc_sys::{GenmcParams, createGenmcHandle}; use rustc_abi::{Align, Size}; use rustc_const_eval::interpret::{InterpCx, InterpResult, interp_ok}; use rustc_middle::mir; @@ -24,9 +25,19 @@ pub struct GenmcCtx { impl GenmcCtx { /// Create a new `GenmcCtx` from a given config. - pub fn new(miri_config: &MiriConfig, genmc_config: &GenmcConfig) -> Self { - assert!(miri_config.genmc_mode); - todo!() + pub fn new(miri_config: &MiriConfig) -> Self { + let genmc_config = miri_config.genmc_config.as_ref().unwrap(); + + let handle = createGenmcHandle(&genmc_config.params); + assert!(!handle.is_null()); + + eprintln!("Miri: GenMC handle creation successful!"); + + drop(handle); + eprintln!("Miri: Dropping GenMC handle successful!"); + + // FIXME(GenMC): implement + std::process::exit(0); } pub fn get_stuck_execution_count(&self) -> usize { diff --git a/src/tools/miri/src/concurrency/mod.rs b/src/tools/miri/src/concurrency/mod.rs index c2ea8a00dec..435615efd9f 100644 --- a/src/tools/miri/src/concurrency/mod.rs +++ b/src/tools/miri/src/concurrency/mod.rs @@ -8,7 +8,17 @@ mod vector_clock; pub mod weak_memory; // Import either the real genmc adapter or a dummy module. -#[cfg_attr(not(feature = "genmc"), path = "genmc/dummy.rs")] +// On unsupported platforms, we always include the dummy module, even if the `genmc` feature is enabled. +// FIXME(genmc,macos): Add `target_os = "macos"` once `https://github.com/dtolnay/cxx/issues/1535` is fixed. +#[cfg_attr( + not(all( + feature = "genmc", + target_os = "linux", + target_pointer_width = "64", + target_endian = "little" + )), + path = "genmc/dummy.rs" +)] mod genmc; pub use self::data_race_handler::{AllocDataRaceHandler, GlobalDataRaceHandler}; diff --git a/src/tools/miri/src/concurrency/thread.rs b/src/tools/miri/src/concurrency/thread.rs index 878afdf2517..fe1ef86ccd3 100644 --- a/src/tools/miri/src/concurrency/thread.rs +++ b/src/tools/miri/src/concurrency/thread.rs @@ -375,7 +375,7 @@ impl Timeout { } /// The clock to use for the timeout you are asking for. -#[derive(Debug, Copy, Clone)] +#[derive(Debug, Copy, Clone, PartialEq)] pub enum TimeoutClock { Monotonic, RealTime, diff --git a/src/tools/miri/src/eval.rs b/src/tools/miri/src/eval.rs index be6404f64e8..4c531a8d1f5 100644 --- a/src/tools/miri/src/eval.rs +++ b/src/tools/miri/src/eval.rs @@ -125,8 +125,8 @@ pub struct MiriConfig { pub data_race_detector: bool, /// Determine if weak memory emulation should be enabled. Requires data race detection to be enabled. pub weak_memory_emulation: bool, - /// Determine if we are running in GenMC mode. In this mode, Miri will explore multiple concurrent executions of the given program. - pub genmc_mode: bool, + /// Determine if we are running in GenMC mode and with which settings. In GenMC mode, Miri will explore multiple concurrent executions of the given program. + pub genmc_config: Option<GenmcConfig>, /// Track when an outdated (weak memory) load happens. pub track_outdated_loads: bool, /// Rate of spurious failures for compare_exchange_weak atomic operations, @@ -170,6 +170,8 @@ pub struct MiriConfig { pub force_intrinsic_fallback: bool, /// Whether floating-point operations can behave non-deterministically. pub float_nondet: bool, + /// Whether floating-point operations can have a non-deterministic rounding error. + pub float_rounding_error: bool, } impl Default for MiriConfig { @@ -190,7 +192,7 @@ impl Default for MiriConfig { track_alloc_accesses: false, data_race_detector: true, weak_memory_emulation: true, - genmc_mode: false, + genmc_config: None, track_outdated_loads: false, cmpxchg_weak_failure_rate: 0.8, // 80% measureme_out: None, @@ -211,6 +213,7 @@ impl Default for MiriConfig { fixed_scheduling: false, force_intrinsic_fallback: false, float_nondet: true, + float_rounding_error: true, } } } @@ -331,8 +334,8 @@ pub fn create_ecx<'tcx>( helpers::try_resolve_path(tcx, &["core", "ascii", "escape_default"], Namespace::ValueNS); if !matches!(sentinel, Some(s) if tcx.is_mir_available(s.def.def_id())) { tcx.dcx().fatal( - "the current sysroot was built without `-Zalways-encode-mir`, or libcore seems missing. \ - Use `cargo miri setup` to prepare a sysroot that is suitable for Miri." + "the current sysroot was built without `-Zalways-encode-mir`, or libcore seems missing.\n\ + Note that directly invoking the `miri` binary is not supported; please use `cargo miri` instead." ); } diff --git a/src/tools/miri/src/helpers.rs b/src/tools/miri/src/helpers.rs index ccfff7fa94b..43cb1c9ae05 100644 --- a/src/tools/miri/src/helpers.rs +++ b/src/tools/miri/src/helpers.rs @@ -3,7 +3,7 @@ use std::time::Duration; use std::{cmp, iter}; use rand::RngCore; -use rustc_abi::{Align, CanonAbi, ExternAbi, FieldIdx, FieldsShape, Size, Variants}; +use rustc_abi::{Align, ExternAbi, FieldIdx, FieldsShape, Size, Variants}; use rustc_apfloat::Float; use rustc_apfloat::ieee::{Double, Half, Quad, Single}; use rustc_hir::Safety; @@ -14,11 +14,10 @@ use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; use rustc_middle::middle::dependency_format::Linkage; use rustc_middle::middle::exported_symbols::ExportedSymbol; use rustc_middle::ty::layout::{LayoutOf, MaybeResult, TyAndLayout}; -use rustc_middle::ty::{self, Binder, FloatTy, FnSig, IntTy, Ty, TyCtxt, UintTy}; +use rustc_middle::ty::{self, FloatTy, IntTy, Ty, TyCtxt, UintTy}; use rustc_session::config::CrateType; use rustc_span::{Span, Symbol}; use rustc_symbol_mangling::mangle_internal_symbol; -use rustc_target::callconv::FnAbi; use crate::*; @@ -437,7 +436,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { /// For now, arguments must be scalars (so that the caller does not have to know the layout). /// /// If you do not provide a return place, a dangling zero-sized place will be created - /// for your convenience. + /// for your convenience. This is only valid if the return type is `()`. fn call_function( &mut self, f: ty::Instance<'tcx>, @@ -452,7 +451,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let mir = this.load_mir(f.def, None)?; let dest = match dest { Some(dest) => dest.clone(), - None => MPlaceTy::fake_alloc_zst(this.layout_of(mir.return_ty())?), + None => MPlaceTy::fake_alloc_zst(this.machine.layouts.unit), }; // Construct a function pointer type representing the caller perspective. @@ -465,6 +464,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { ); let caller_fn_abi = this.fn_abi_of_fn_ptr(ty::Binder::dummy(sig), ty::List::empty())?; + // This will also show proper errors if there is any ABI mismatch. this.init_stack_frame( f, mir, @@ -929,21 +929,6 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { self.read_c_str_with_char_size(ptr, wchar_t.size, wchar_t.align.abi) } - /// Check that the calling convention is what we expect. - fn check_callconv<'a>( - &self, - fn_abi: &FnAbi<'tcx, Ty<'tcx>>, - exp_abi: CanonAbi, - ) -> InterpResult<'a, ()> { - if fn_abi.conv != exp_abi { - throw_ub_format!( - r#"calling a function with calling convention "{exp_abi}" using caller calling convention "{}""#, - fn_abi.conv - ); - } - interp_ok(()) - } - fn frame_in_std(&self) -> bool { let this = self.eval_context_ref(); let frame = this.frame(); @@ -967,161 +952,6 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { crate_name == "std" || crate_name == "std_miri_test" } - fn check_abi_and_shim_symbol_clash( - &mut self, - abi: &FnAbi<'tcx, Ty<'tcx>>, - exp_abi: CanonAbi, - link_name: Symbol, - ) -> InterpResult<'tcx, ()> { - self.check_callconv(abi, exp_abi)?; - if let Some((body, instance)) = self.eval_context_mut().lookup_exported_symbol(link_name)? { - // If compiler-builtins is providing the symbol, then don't treat it as a clash. - // We'll use our built-in implementation in `emulate_foreign_item_inner` for increased - // performance. Note that this means we won't catch any undefined behavior in - // compiler-builtins when running other crates, but Miri can still be run on - // compiler-builtins itself (or any crate that uses it as a normal dependency) - if self.eval_context_ref().tcx.is_compiler_builtins(instance.def_id().krate) { - return interp_ok(()); - } - - throw_machine_stop!(TerminationInfo::SymbolShimClashing { - link_name, - span: body.span.data(), - }) - } - interp_ok(()) - } - - fn check_shim<'a, const N: usize>( - &mut self, - abi: &FnAbi<'tcx, Ty<'tcx>>, - exp_abi: CanonAbi, - link_name: Symbol, - args: &'a [OpTy<'tcx>], - ) -> InterpResult<'tcx, &'a [OpTy<'tcx>; N]> { - self.check_abi_and_shim_symbol_clash(abi, exp_abi, link_name)?; - - if abi.c_variadic { - throw_ub_format!( - "calling a non-variadic function with a variadic caller-side signature" - ); - } - if let Ok(ops) = args.try_into() { - return interp_ok(ops); - } - throw_ub_format!( - "incorrect number of arguments for `{link_name}`: got {}, expected {}", - args.len(), - N - ) - } - - /// Check that the given `caller_fn_abi` matches the expected ABI described by - /// `callee_abi`, `callee_input_tys`, `callee_output_ty`, and then returns the list of - /// arguments. - fn check_shim_abi<'a, const N: usize>( - &mut self, - link_name: Symbol, - caller_fn_abi: &FnAbi<'tcx, Ty<'tcx>>, - callee_abi: ExternAbi, - callee_input_tys: [Ty<'tcx>; N], - callee_output_ty: Ty<'tcx>, - caller_args: &'a [OpTy<'tcx>], - ) -> InterpResult<'tcx, &'a [OpTy<'tcx>; N]> { - let this = self.eval_context_mut(); - let mut inputs_and_output = callee_input_tys.to_vec(); - inputs_and_output.push(callee_output_ty); - let fn_sig_binder = Binder::dummy(FnSig { - inputs_and_output: this.machine.tcx.mk_type_list(&inputs_and_output), - c_variadic: false, - // This does not matter for the ABI. - safety: Safety::Safe, - abi: callee_abi, - }); - let callee_fn_abi = this.fn_abi_of_fn_ptr(fn_sig_binder, Default::default())?; - - this.check_abi_and_shim_symbol_clash(caller_fn_abi, callee_fn_abi.conv, link_name)?; - - if caller_fn_abi.c_variadic { - throw_ub_format!( - "ABI mismatch: calling a non-variadic function with a variadic caller-side signature" - ); - } - - if callee_fn_abi.fixed_count != caller_fn_abi.fixed_count { - throw_ub_format!( - "ABI mismatch: expected {} arguments, found {} arguments ", - callee_fn_abi.fixed_count, - caller_fn_abi.fixed_count - ); - } - - if callee_fn_abi.can_unwind && !caller_fn_abi.can_unwind { - throw_ub_format!( - "ABI mismatch: callee may unwind, but caller-side signature prohibits unwinding", - ); - } - - if !this.check_argument_compat(&caller_fn_abi.ret, &callee_fn_abi.ret)? { - throw_ub!(AbiMismatchReturn { - caller_ty: caller_fn_abi.ret.layout.ty, - callee_ty: callee_fn_abi.ret.layout.ty - }); - } - - if let Some(index) = caller_fn_abi - .args - .iter() - .zip(callee_fn_abi.args.iter()) - .map(|(caller_arg, callee_arg)| this.check_argument_compat(caller_arg, callee_arg)) - .collect::<InterpResult<'tcx, Vec<bool>>>()? - .into_iter() - .position(|b| !b) - { - throw_ub!(AbiMismatchArgument { - caller_ty: caller_fn_abi.args[index].layout.ty, - callee_ty: callee_fn_abi.args[index].layout.ty - }); - } - - if let Ok(ops) = caller_args.try_into() { - return interp_ok(ops); - } - unreachable!() - } - - /// Check shim for variadic function. - /// Returns a tuple that consisting of an array of fixed args, and a slice of varargs. - fn check_shim_variadic<'a, const N: usize>( - &mut self, - abi: &FnAbi<'tcx, Ty<'tcx>>, - exp_abi: CanonAbi, - link_name: Symbol, - args: &'a [OpTy<'tcx>], - ) -> InterpResult<'tcx, (&'a [OpTy<'tcx>; N], &'a [OpTy<'tcx>])> - where - &'a [OpTy<'tcx>; N]: TryFrom<&'a [OpTy<'tcx>]>, - { - self.check_abi_and_shim_symbol_clash(abi, exp_abi, link_name)?; - - if !abi.c_variadic { - throw_ub_format!( - "calling a variadic function with a non-variadic caller-side signature" - ); - } - if abi.fixed_count != u32::try_from(N).unwrap() { - throw_ub_format!( - "incorrect number of fixed arguments for variadic function `{}`: got {}, expected {N}", - link_name.as_str(), - abi.fixed_count - ) - } - if let Some(args) = args.split_first_chunk() { - return interp_ok(args); - } - panic!("mismatch between signature and `args` slice"); - } - /// Mark a machine allocation that was just created as immutable. fn mark_immutable(&mut self, mplace: &MPlaceTy<'tcx>) { let this = self.eval_context_mut(); @@ -1257,8 +1087,21 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "failed to evaluate static in required link_section: {def_id:?}\n{err:?}" ) }); - let val = this.read_immediate(&const_val)?; - array.push(val); + match const_val.layout.ty.kind() { + ty::FnPtr(..) => { + array.push(this.read_immediate(&const_val)?); + } + ty::Array(elem_ty, _) if matches!(elem_ty.kind(), ty::FnPtr(..)) => { + let mut elems = this.project_array_fields(&const_val)?; + while let Some((_idx, elem)) = elems.next(this)? { + array.push(this.read_immediate(&elem)?); + } + } + _ => + throw_unsup_format!( + "only function pointers and arrays of function pointers are supported in well-known linker sections" + ), + } } interp_ok(()) })?; @@ -1317,39 +1160,6 @@ impl<'tcx> MiriMachine<'tcx> { } } -/// Check that the number of args is what we expect. -pub fn check_intrinsic_arg_count<'a, 'tcx, const N: usize>( - args: &'a [OpTy<'tcx>], -) -> InterpResult<'tcx, &'a [OpTy<'tcx>; N]> -where - &'a [OpTy<'tcx>; N]: TryFrom<&'a [OpTy<'tcx>]>, -{ - if let Ok(ops) = args.try_into() { - return interp_ok(ops); - } - throw_ub_format!( - "incorrect number of arguments for intrinsic: got {}, expected {}", - args.len(), - N - ) -} - -/// Check that the number of varargs is at least the minimum what we expect. -/// Fixed args should not be included. -pub fn check_min_vararg_count<'a, 'tcx, const N: usize>( - name: &'a str, - args: &'a [OpTy<'tcx>], -) -> InterpResult<'tcx, &'a [OpTy<'tcx>; N]> { - if let Some((ops, _)) = args.split_first_chunk() { - return interp_ok(ops); - } - throw_ub_format!( - "not enough variadic arguments for `{name}`: got {}, expected at least {}", - args.len(), - N - ) -} - pub fn isolation_abort_error<'tcx>(name: &str) -> InterpResult<'tcx> { throw_machine_stop!(TerminationInfo::UnsupportedInIsolation(format!( "{name} not available when isolation is enabled", @@ -1435,43 +1245,14 @@ impl ToU64 for usize { } } -/// This struct is needed to enforce `#[must_use]` on values produced by [enter_trace_span] even -/// when the "tracing" feature is not enabled. -#[must_use] -pub struct MaybeEnteredTraceSpan { - #[cfg(feature = "tracing")] - pub _entered_span: tracing::span::EnteredSpan, -} - /// Enters a [tracing::info_span] only if the "tracing" feature is enabled, otherwise does nothing. -/// This is like [rustc_const_eval::enter_trace_span] except that it does not depend on the -/// [Machine] trait to check if tracing is enabled, because from the Miri codebase we can directly -/// check whether the "tracing" feature is enabled, unlike from the rustc_const_eval codebase. -/// -/// In addition to the syntax accepted by [tracing::span!], this macro optionally allows passing -/// the span name (i.e. the first macro argument) in the form `NAME::SUBNAME` (without quotes) to -/// indicate that the span has name "NAME" (usually the name of the component) and has an additional -/// more specific name "SUBNAME" (usually the function name). The latter is passed to the [tracing] -/// infrastructure as a span field with the name "NAME". This allows not being distracted by -/// subnames when looking at the trace in <https://ui.perfetto.dev>, but when deeper introspection -/// is needed within a component, it's still possible to view the subnames directly in the UI by -/// selecting a span, clicking on the "NAME" argument on the right, and clicking on "Visualize -/// argument values". -/// ```rust -/// // for example, the first will expand to the second -/// enter_trace_span!(borrow_tracker::on_stack_pop, /* ... */) -/// enter_trace_span!("borrow_tracker", borrow_tracker = "on_stack_pop", /* ... */) -/// ``` +/// This calls [rustc_const_eval::enter_trace_span] with [MiriMachine] as the first argument, which +/// will in turn call [MiriMachine::enter_trace_span], which takes care of determining at compile +/// time whether to trace or not (and supposedly the call is compiled out if tracing is disabled). +/// Look at [rustc_const_eval::enter_trace_span] for complete documentation, examples and tips. #[macro_export] macro_rules! enter_trace_span { - ($name:ident :: $subname:ident $($tt:tt)*) => {{ - enter_trace_span!(stringify!($name), $name = %stringify!(subname) $($tt)*) - }}; - ($($tt:tt)*) => { - $crate::MaybeEnteredTraceSpan { - #[cfg(feature = "tracing")] - _entered_span: tracing::info_span!($($tt)*).entered() - } + rustc_const_eval::enter_trace_span!($crate::MiriMachine<'static>, $($tt)*) }; } diff --git a/src/tools/miri/src/intrinsics/atomic.rs b/src/tools/miri/src/intrinsics/atomic.rs index 0a59a707a10..bcc3e9ec885 100644 --- a/src/tools/miri/src/intrinsics/atomic.rs +++ b/src/tools/miri/src/intrinsics/atomic.rs @@ -2,7 +2,7 @@ use rustc_middle::mir::BinOp; use rustc_middle::ty::AtomicOrdering; use rustc_middle::{mir, ty}; -use self::helpers::check_intrinsic_arg_count; +use super::check_intrinsic_arg_count; use crate::*; pub enum AtomicOp { diff --git a/src/tools/miri/src/intrinsics/mod.rs b/src/tools/miri/src/intrinsics/mod.rs index 4efa7dd4dcf..b5e81460773 100644 --- a/src/tools/miri/src/intrinsics/mod.rs +++ b/src/tools/miri/src/intrinsics/mod.rs @@ -14,11 +14,28 @@ use rustc_middle::ty::{self, FloatTy, ScalarInt}; use rustc_span::{Symbol, sym}; use self::atomic::EvalContextExt as _; -use self::helpers::{ToHost, ToSoft, check_intrinsic_arg_count}; +use self::helpers::{ToHost, ToSoft}; use self::simd::EvalContextExt as _; use crate::math::{IeeeExt, apply_random_float_error_ulp}; use crate::*; +/// Check that the number of args is what we expect. +fn check_intrinsic_arg_count<'a, 'tcx, const N: usize>( + args: &'a [OpTy<'tcx>], +) -> InterpResult<'tcx, &'a [OpTy<'tcx>; N]> +where + &'a [OpTy<'tcx>; N]: TryFrom<&'a [OpTy<'tcx>]>, +{ + if let Ok(ops) = args.try_into() { + return interp_ok(ops); + } + throw_ub_format!( + "incorrect number of arguments for intrinsic: got {}, expected {}", + args.len(), + N + ) +} + impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {} pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { fn call_intrinsic( @@ -114,7 +131,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { )); } "catch_unwind" => { - this.handle_catch_unwind(args, dest, ret)?; + let [try_fn, data, catch_fn] = check_intrinsic_arg_count(args)?; + this.handle_catch_unwind(try_fn, data, catch_fn, dest, ret)?; // This pushed a stack frame, don't jump to `ret`. return interp_ok(EmulateItemResult::AlreadyJumped); } diff --git a/src/tools/miri/src/intrinsics/simd.rs b/src/tools/miri/src/intrinsics/simd.rs index e63992aa95f..b26516c0ff0 100644 --- a/src/tools/miri/src/intrinsics/simd.rs +++ b/src/tools/miri/src/intrinsics/simd.rs @@ -6,9 +6,8 @@ use rustc_middle::ty::FloatTy; use rustc_middle::{mir, ty}; use rustc_span::{Symbol, sym}; -use crate::helpers::{ - ToHost, ToSoft, bool_to_simd_element, check_intrinsic_arg_count, simd_element_to_bool, -}; +use super::check_intrinsic_arg_count; +use crate::helpers::{ToHost, ToSoft, bool_to_simd_element, simd_element_to_bool}; use crate::*; #[derive(Copy, Clone)] diff --git a/src/tools/miri/src/lib.rs b/src/tools/miri/src/lib.rs index a591d21071d..2b92c25a424 100644 --- a/src/tools/miri/src/lib.rs +++ b/src/tools/miri/src/lib.rs @@ -1,3 +1,4 @@ +#![feature(abort_unwind)] #![feature(cfg_select)] #![feature(rustc_private)] #![feature(float_gamma)] @@ -6,6 +7,7 @@ #![feature(never_type)] #![feature(try_blocks)] #![feature(io_error_more)] +#![feature(if_let_guard)] #![feature(variant_count)] #![feature(yeet_expr)] #![feature(nonzero_ops)] @@ -55,7 +57,6 @@ extern crate tracing; extern crate rustc_abi; extern crate rustc_apfloat; extern crate rustc_ast; -extern crate rustc_attr_data_structures; extern crate rustc_const_eval; extern crate rustc_data_structures; extern crate rustc_errors; @@ -97,7 +98,7 @@ pub use rustc_const_eval::interpret::{self, AllocMap, Provenance as _}; use rustc_middle::{bug, span_bug}; use tracing::{info, trace}; -#[cfg(target_os = "linux")] +#[cfg(all(unix, feature = "native-lib"))] pub mod native_lib { pub use crate::shims::{init_sv, register_retcode_sv}; } @@ -141,9 +142,7 @@ pub use crate::eval::{ AlignmentCheck, BacktraceStyle, IsolatedOp, MiriConfig, MiriEntryFnType, RejectOpWith, ValidationMode, create_ecx, eval_entry, }; -pub use crate::helpers::{ - AccessKind, EvalContextExt as _, MaybeEnteredTraceSpan, ToU64 as _, ToUsize as _, -}; +pub use crate::helpers::{AccessKind, EvalContextExt as _, ToU64 as _, ToUsize as _}; pub use crate::intrinsics::EvalContextExt as _; pub use crate::machine::{ AllocExtra, DynMachineCallback, FrameExtra, MachineCallback, MemoryKind, MiriInterpCx, @@ -157,6 +156,7 @@ pub use crate::shims::foreign_items::{DynSym, EvalContextExt as _}; pub use crate::shims::io_error::{EvalContextExt as _, IoError, LibcError}; pub use crate::shims::os_str::EvalContextExt as _; pub use crate::shims::panic::EvalContextExt as _; +pub use crate::shims::sig::EvalContextExt as _; pub use crate::shims::time::EvalContextExt as _; pub use crate::shims::tls::TlsData; pub use crate::shims::unwind::{CatchUnwindData, EvalContextExt as _}; diff --git a/src/tools/miri/src/machine.rs b/src/tools/miri/src/machine.rs index f309e34c75b..00c3373bb0f 100644 --- a/src/tools/miri/src/machine.rs +++ b/src/tools/miri/src/machine.rs @@ -4,7 +4,6 @@ use std::any::Any; use std::borrow::Cow; use std::cell::{Cell, RefCell}; -use std::collections::hash_map::Entry; use std::path::Path; use std::rc::Rc; use std::{fmt, process}; @@ -13,7 +12,7 @@ use rand::rngs::StdRng; use rand::{Rng, SeedableRng}; use rustc_abi::{Align, ExternAbi, Size}; use rustc_apfloat::{Float, FloatConvert}; -use rustc_attr_data_structures::InlineAttr; +use rustc_hir::attrs::InlineAttr; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; #[allow(unused)] use rustc_data_structures::static_assert_size; @@ -70,12 +69,6 @@ pub struct FrameExtra<'tcx> { /// This is used by `MiriMachine::current_span` and `MiriMachine::caller_span` pub is_user_relevant: bool, - /// We have a cache for the mapping from [`mir::Const`] to resulting [`AllocId`]. - /// However, we don't want all frames to always get the same result, so we insert - /// an additional bit of "salt" into the cache key. This salt is fixed per-frame - /// so that within a call, a const will have a stable address. - salt: usize, - /// Data race detector per-frame data. pub data_race: Option<data_race::FrameState>, } @@ -83,19 +76,12 @@ pub struct FrameExtra<'tcx> { impl<'tcx> std::fmt::Debug for FrameExtra<'tcx> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { // Omitting `timing`, it does not support `Debug`. - let FrameExtra { - borrow_tracker, - catch_unwind, - timing: _, - is_user_relevant, - salt, - data_race, - } = self; + let FrameExtra { borrow_tracker, catch_unwind, timing: _, is_user_relevant, data_race } = + self; f.debug_struct("FrameData") .field("borrow_tracker", borrow_tracker) .field("catch_unwind", catch_unwind) .field("is_user_relevant", is_user_relevant) - .field("salt", salt) .field("data_race", data_race) .finish() } @@ -108,7 +94,6 @@ impl VisitProvenance for FrameExtra<'_> { borrow_tracker, timing: _, is_user_relevant: _, - salt: _, data_race: _, } = self; @@ -530,7 +515,6 @@ pub struct MiriMachine<'tcx> { pub(crate) rng: RefCell<StdRng>, /// The allocator used for the machine's `AllocBytes` in native-libs mode. - #[cfg(target_os = "linux")] pub(crate) allocator: Option<Rc<RefCell<crate::alloc::isolated_alloc::IsolatedAlloc>>>, /// The allocation IDs to report when they are being allocated @@ -554,9 +538,9 @@ pub struct MiriMachine<'tcx> { pub(crate) basic_block_count: u64, /// Handle of the optional shared object file for native functions. - #[cfg(unix)] + #[cfg(all(unix, feature = "native-lib"))] pub native_lib: Vec<(libloading::Library, std::path::PathBuf)>, - #[cfg(not(unix))] + #[cfg(not(all(unix, feature = "native-lib")))] pub native_lib: Vec<!>, /// Run a garbage collector for BorTags every N basic blocks. @@ -579,11 +563,6 @@ pub struct MiriMachine<'tcx> { /// diagnostics. pub(crate) allocation_spans: RefCell<FxHashMap<AllocId, (Span, Option<Span>)>>, - /// Maps MIR consts to their evaluated result. We combine the const with a "salt" (`usize`) - /// that is fixed per stack frame; this lets us have sometimes different results for the - /// same const while ensuring consistent results within a single call. - const_cache: RefCell<FxHashMap<(mir::Const<'tcx>, usize), OpTy<'tcx>>>, - /// For each allocation, an offset inside that allocation that was deemed aligned even for /// symbolic alignment checks. This cannot be stored in `AllocExtra` since it needs to be /// tracked for vtables and function allocations as well as regular allocations. @@ -603,7 +582,7 @@ pub struct MiriMachine<'tcx> { /// Remembers whether we already warned about an extern type with Stacked Borrows. pub(crate) sb_extern_type_warned: Cell<bool>, /// Remember whether we already warned about sharing memory with a native call. - #[cfg(unix)] + #[allow(unused)] pub(crate) native_call_mem_warned: Cell<bool>, /// Remembers which shims have already shown the warning about erroring in isolation. pub(crate) reject_in_isolation_warned: RefCell<FxHashSet<String>>, @@ -618,9 +597,14 @@ pub struct MiriMachine<'tcx> { /// Whether floating-point operations can behave non-deterministically. pub float_nondet: bool, + /// Whether floating-point operations can have a non-deterministic rounding error. + pub float_rounding_error: bool, } impl<'tcx> MiriMachine<'tcx> { + /// Create a new MiriMachine. + /// + /// Invariant: `genmc_ctx.is_some() == config.genmc_config.is_some()` pub(crate) fn new( config: &MiriConfig, layout_cx: LayoutCx<'tcx>, @@ -644,7 +628,7 @@ impl<'tcx> MiriMachine<'tcx> { }); let rng = StdRng::seed_from_u64(config.seed.unwrap_or(0)); let borrow_tracker = config.borrow_tracker.map(|bt| bt.instantiate_global_state(config)); - let data_race = if config.genmc_mode { + let data_race = if config.genmc_config.is_some() { // `genmc_ctx` persists across executions, so we don't create a new one here. GlobalDataRaceHandler::Genmc(genmc_ctx.unwrap()) } else if config.data_race_detector { @@ -718,7 +702,6 @@ impl<'tcx> MiriMachine<'tcx> { local_crates, extern_statics: FxHashMap::default(), rng: RefCell::new(rng), - #[cfg(target_os = "linux")] allocator: if !config.native_lib.is_empty() { Some(Rc::new(RefCell::new(crate::alloc::isolated_alloc::IsolatedAlloc::new()))) } else { None }, @@ -730,7 +713,7 @@ impl<'tcx> MiriMachine<'tcx> { report_progress: config.report_progress, basic_block_count: 0, monotonic_clock: MonotonicClock::new(config.isolated_op == IsolatedOp::Allow), - #[cfg(unix)] + #[cfg(all(unix, feature = "native-lib"))] native_lib: config.native_lib.iter().map(|lib_file_path| { let host_triple = rustc_session::config::host_tuple(); let target_triple = tcx.sess.opts.target_triple.tuple(); @@ -752,9 +735,9 @@ impl<'tcx> MiriMachine<'tcx> { lib_file_path.clone(), ) }).collect(), - #[cfg(not(unix))] + #[cfg(not(all(unix, feature = "native-lib")))] native_lib: config.native_lib.iter().map(|_| { - panic!("calling functions from native libraries via FFI is only supported on Unix") + panic!("calling functions from native libraries via FFI is not supported in this build of Miri") }).collect(), gc_interval: config.gc_interval, since_gc: 0, @@ -764,20 +747,19 @@ impl<'tcx> MiriMachine<'tcx> { stack_size, collect_leak_backtraces: config.collect_leak_backtraces, allocation_spans: RefCell::new(FxHashMap::default()), - const_cache: RefCell::new(FxHashMap::default()), symbolic_alignment: RefCell::new(FxHashMap::default()), union_data_ranges: FxHashMap::default(), pthread_mutex_sanity: Cell::new(false), pthread_rwlock_sanity: Cell::new(false), pthread_condvar_sanity: Cell::new(false), sb_extern_type_warned: Cell::new(false), - #[cfg(unix)] native_call_mem_warned: Cell::new(false), reject_in_isolation_warned: Default::default(), int2ptr_warned: Default::default(), mangle_internal_symbol_cache: Default::default(), force_intrinsic_fallback: config.force_intrinsic_fallback, float_nondet: config.float_nondet, + float_rounding_error: config.float_rounding_error, } } @@ -924,7 +906,6 @@ impl VisitProvenance for MiriMachine<'_> { backtrace_style: _, local_crates: _, rng: _, - #[cfg(target_os = "linux")] allocator: _, tracked_alloc_ids: _, track_alloc_accesses: _, @@ -942,20 +923,19 @@ impl VisitProvenance for MiriMachine<'_> { stack_size: _, collect_leak_backtraces: _, allocation_spans: _, - const_cache: _, symbolic_alignment: _, union_data_ranges: _, pthread_mutex_sanity: _, pthread_rwlock_sanity: _, pthread_condvar_sanity: _, sb_extern_type_warned: _, - #[cfg(unix)] native_call_mem_warned: _, reject_in_isolation_warned: _, int2ptr_warned: _, mangle_internal_symbol_cache: _, force_intrinsic_fallback: _, float_nondet: _, + float_rounding_error: _, } = self; threads.visit_provenance(visit); @@ -1579,7 +1559,6 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> { catch_unwind: None, timing, is_user_relevant: ecx.machine.is_user_relevant(&frame), - salt: ecx.machine.rng.borrow_mut().random_range(0..ADDRS_PER_ANON_GLOBAL), data_race: ecx .machine .data_race @@ -1738,33 +1717,6 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> { interp_ok(()) } - fn eval_mir_constant<F>( - ecx: &InterpCx<'tcx, Self>, - val: mir::Const<'tcx>, - span: Span, - layout: Option<TyAndLayout<'tcx>>, - eval: F, - ) -> InterpResult<'tcx, OpTy<'tcx>> - where - F: Fn( - &InterpCx<'tcx, Self>, - mir::Const<'tcx>, - Span, - Option<TyAndLayout<'tcx>>, - ) -> InterpResult<'tcx, OpTy<'tcx>>, - { - let frame = ecx.active_thread_stack().last().unwrap(); - let mut cache = ecx.machine.const_cache.borrow_mut(); - match cache.entry((val, frame.extra.salt)) { - Entry::Vacant(ve) => { - let op = eval(ecx, val, span, layout)?; - ve.insert(op.clone()); - interp_ok(op) - } - Entry::Occupied(oe) => interp_ok(oe.get().clone()), - } - } - fn get_global_alloc_salt( ecx: &InterpCx<'tcx, Self>, instance: Option<ty::Instance<'tcx>>, @@ -1817,13 +1769,10 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> { fn get_default_alloc_params(&self) -> <Self::Bytes as AllocBytes>::AllocParams { use crate::alloc::MiriAllocParams; - #[cfg(target_os = "linux")] match &self.allocator { Some(alloc) => MiriAllocParams::Isolated(alloc.clone()), None => MiriAllocParams::Global, } - #[cfg(not(target_os = "linux"))] - MiriAllocParams::Global } fn enter_trace_span(span: impl FnOnce() -> tracing::Span) -> impl EnteredTraceSpan { diff --git a/src/tools/miri/src/math.rs b/src/tools/miri/src/math.rs index cf16a5676d6..e9e5a1070c9 100644 --- a/src/tools/miri/src/math.rs +++ b/src/tools/miri/src/math.rs @@ -15,7 +15,7 @@ pub(crate) fn apply_random_float_error<F: rustc_apfloat::Float>( val: F, err_scale: i32, ) -> F { - if !ecx.machine.float_nondet { + if !ecx.machine.float_nondet || !ecx.machine.float_rounding_error { return val; } diff --git a/src/tools/miri/src/shims/aarch64.rs b/src/tools/miri/src/shims/aarch64.rs index 44ad5081ad5..6e422b4ab71 100644 --- a/src/tools/miri/src/shims/aarch64.rs +++ b/src/tools/miri/src/shims/aarch64.rs @@ -20,7 +20,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let unprefixed_name = link_name.as_str().strip_prefix("llvm.aarch64.").unwrap(); match unprefixed_name { "isb" => { - let [arg] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [arg] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let arg = this.read_scalar(arg)?.to_i32()?; match arg { // SY ("full system scope") @@ -38,7 +38,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // `left` input, the second half of the output from the `right` input. // https://developer.arm.com/architectures/instruction-sets/intrinsics/vpmaxq_u8 "neon.umaxp.v16i8" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; diff --git a/src/tools/miri/src/shims/backtrace.rs b/src/tools/miri/src/shims/backtrace.rs index 18d60915d20..bd3914b652a 100644 --- a/src/tools/miri/src/shims/backtrace.rs +++ b/src/tools/miri/src/shims/backtrace.rs @@ -15,7 +15,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { dest: &MPlaceTy<'tcx>, ) -> InterpResult<'tcx> { let this = self.eval_context_mut(); - let [flags] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [flags] = this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let flags = this.read_scalar(flags)?.to_u64()?; if flags != 0 { @@ -37,7 +37,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let ptr_ty = this.machine.layouts.mut_raw_ptr.ty; let ptr_layout = this.layout_of(ptr_ty)?; - let [flags, buf] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [flags, buf] = this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let flags = this.read_scalar(flags)?.to_u64()?; let buf_place = this.deref_pointer_as(buf, ptr_layout)?; @@ -117,7 +117,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { dest: &MPlaceTy<'tcx>, ) -> InterpResult<'tcx> { let this = self.eval_context_mut(); - let [ptr, flags] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [ptr, flags] = this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let flags = this.read_scalar(flags)?.to_u64()?; @@ -195,7 +195,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let this = self.eval_context_mut(); let [ptr, flags, name_ptr, filename_ptr] = - this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let flags = this.read_scalar(flags)?.to_u64()?; if flags != 0 { diff --git a/src/tools/miri/src/shims/files.rs b/src/tools/miri/src/shims/files.rs index 606d1ffbea6..0d4642c6ad0 100644 --- a/src/tools/miri/src/shims/files.rs +++ b/src/tools/miri/src/shims/files.rs @@ -1,7 +1,7 @@ use std::any::Any; use std::collections::BTreeMap; use std::fs::{File, Metadata}; -use std::io::{IsTerminal, Seek, SeekFrom, Write}; +use std::io::{ErrorKind, IsTerminal, Seek, SeekFrom, Write}; use std::marker::CoercePointee; use std::ops::Deref; use std::rc::{Rc, Weak}; @@ -167,6 +167,11 @@ pub trait FileDescription: std::fmt::Debug + FileDescriptionExt { throw_unsup_format!("cannot write to {}", self.name()); } + /// Determines whether this FD non-deterministically has its reads and writes shortened. + fn nondet_short_accesses(&self) -> bool { + true + } + /// Seeks to the given offset (which can be relative to the beginning, end, or current position). /// Returns the new position from the start of the stream. fn seek<'tcx>( @@ -334,6 +339,15 @@ impl FileDescription for FileHandle { ) -> InterpResult<'tcx> { assert!(communicate_allowed, "isolation should have prevented even opening a file"); + if !self.writable { + // Linux hosts return EBADF here which we can't translate via the platform-independent + // code since it does not map to any `io::ErrorKind` -- so if we don't do anything + // special, we'd throw an "unsupported error code" here. Windows returns something that + // gets translated to `PermissionDenied`. That seems like a good value so let's just use + // this everywhere, even if it means behavior on Unix targets does not match the real + // thing. + return finish.call(ecx, Err(ErrorKind::PermissionDenied.into())); + } let result = ecx.write_to_host(&self.file, len, ptr)?; finish.call(ecx, result) } diff --git a/src/tools/miri/src/shims/foreign_items.rs b/src/tools/miri/src/shims/foreign_items.rs index 9ddba8c2b48..21545b68029 100644 --- a/src/tools/miri/src/shims/foreign_items.rs +++ b/src/tools/miri/src/shims/foreign_items.rs @@ -237,7 +237,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { let this = self.eval_context_mut(); // First deal with any external C functions in linked .so file. - #[cfg(unix)] + #[cfg(all(unix, feature = "native-lib"))] if !this.machine.native_lib.is_empty() { use crate::shims::native_lib::EvalContextExt as _; // An Ok(false) here means that the function being called was not exported @@ -288,16 +288,17 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { match link_name.as_str() { // Miri-specific extern functions "miri_start_unwind" => { - let [payload] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [payload] = + this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; this.handle_miri_start_unwind(payload)?; return interp_ok(EmulateItemResult::NeedsUnwind); } "miri_run_provenance_gc" => { - let [] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; this.run_provenance_gc(); } "miri_get_alloc_id" => { - let [ptr] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [ptr] = this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let ptr = this.read_pointer(ptr)?; let (alloc_id, _, _) = this.ptr_get_alloc_id(ptr, 0).map_err_kind(|_e| { err_machine_stop!(TerminationInfo::Abort(format!( @@ -307,7 +308,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(Scalar::from_u64(alloc_id.0.get()), dest)?; } "miri_print_borrow_state" => { - let [id, show_unnamed] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [id, show_unnamed] = + this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let id = this.read_scalar(id)?.to_u64()?; let show_unnamed = this.read_scalar(show_unnamed)?.to_bool()?; if let Some(id) = std::num::NonZero::new(id).map(AllocId) @@ -322,7 +324,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { // This associates a name to a tag. Very useful for debugging, and also makes // tests more strict. let [ptr, nth_parent, name] = - this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let ptr = this.read_pointer(ptr)?; let nth_parent = this.read_scalar(nth_parent)?.to_u8()?; let name = this.read_immediate(name)?; @@ -335,7 +337,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { this.give_pointer_debug_name(ptr, nth_parent, &name)?; } "miri_static_root" => { - let [ptr] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [ptr] = this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let ptr = this.read_pointer(ptr)?; let (alloc_id, offset, _) = this.ptr_get_alloc_id(ptr, 0)?; if offset != Size::ZERO { @@ -346,7 +348,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { this.machine.static_roots.push(alloc_id); } "miri_host_to_target_path" => { - let [ptr, out, out_size] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [ptr, out, out_size] = + this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let ptr = this.read_pointer(ptr)?; let out = this.read_pointer(out)?; let out_size = this.read_scalar(out_size)?.to_target_usize(this)?; @@ -382,7 +385,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { // Writes some bytes to the interpreter's stdout/stderr. See the // README for details. "miri_write_to_stdout" | "miri_write_to_stderr" => { - let [msg] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [msg] = this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let msg = this.read_immediate(msg)?; let msg = this.read_byte_slice(&msg)?; // Note: we're ignoring errors writing to host stdout/stderr. @@ -396,7 +399,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { "miri_promise_symbolic_alignment" => { use rustc_abi::AlignFromBytesError; - let [ptr, align] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [ptr, align] = + this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let ptr = this.read_pointer(ptr)?; let align = this.read_target_usize(align)?; if !align.is_power_of_two() { @@ -437,12 +441,12 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { // Aborting the process. "exit" => { - let [code] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [code] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let code = this.read_scalar(code)?.to_i32()?; throw_machine_stop!(TerminationInfo::Exit { code, leak_check: false }); } "abort" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; throw_machine_stop!(TerminationInfo::Abort( "the program aborted execution".to_owned() )) @@ -450,7 +454,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { // Standard C allocation "malloc" => { - let [size] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [size] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let size = this.read_target_usize(size)?; if size <= this.max_size_of_val().bytes() { let res = this.malloc(size, AllocInit::Uninit)?; @@ -464,7 +468,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { } } "calloc" => { - let [items, elem_size] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [items, elem_size] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let items = this.read_target_usize(items)?; let elem_size = this.read_target_usize(elem_size)?; if let Some(size) = this.compute_size_in_bytes(Size::from_bytes(elem_size), items) { @@ -479,12 +484,13 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { } } "free" => { - let [ptr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let ptr = this.read_pointer(ptr)?; this.free(ptr)?; } "realloc" => { - let [old_ptr, new_size] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [old_ptr, new_size] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let old_ptr = this.read_pointer(old_ptr)?; let new_size = this.read_target_usize(new_size)?; if new_size <= this.max_size_of_val().bytes() { @@ -504,7 +510,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { let default = |ecx: &mut MiriInterpCx<'tcx>| { // Only call `check_shim` when `#[global_allocator]` isn't used. When that // macro is used, we act like no shim exists, so that the exported function can run. - let [size, align] = ecx.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [size, align] = + ecx.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let size = ecx.read_target_usize(size)?; let align = ecx.read_target_usize(align)?; @@ -537,7 +544,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { return this.emulate_allocator(|this| { // See the comment for `__rust_alloc` why `check_shim` is only called in the // default case. - let [size, align] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [size, align] = + this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let size = this.read_target_usize(size)?; let align = this.read_target_usize(align)?; @@ -559,7 +567,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { // See the comment for `__rust_alloc` why `check_shim` is only called in the // default case. let [ptr, old_size, align] = - ecx.check_shim(abi, CanonAbi::Rust, link_name, args)?; + ecx.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let ptr = ecx.read_pointer(ptr)?; let old_size = ecx.read_target_usize(old_size)?; let align = ecx.read_target_usize(align)?; @@ -590,7 +598,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { // See the comment for `__rust_alloc` why `check_shim` is only called in the // default case. let [ptr, old_size, align, new_size] = - this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let ptr = this.read_pointer(ptr)?; let old_size = this.read_target_usize(old_size)?; let align = this.read_target_usize(align)?; @@ -613,20 +621,21 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { } name if name == this.mangle_internal_symbol("__rust_no_alloc_shim_is_unstable_v2") => { // This is a no-op shim that only exists to prevent making the allocator shims instantly stable. - let [] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; } name if name == this.mangle_internal_symbol("__rust_alloc_error_handler_should_panic_v2") => { // Gets the value of the `oom` option. - let [] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let val = this.tcx.sess.opts.unstable_opts.oom.should_panic(); this.write_int(val, dest)?; } // C memory handling functions "memcmp" => { - let [left, right, n] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, n] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let left = this.read_pointer(left)?; let right = this.read_pointer(right)?; let n = Size::from_bytes(this.read_target_usize(n)?); @@ -650,7 +659,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(Scalar::from_i32(result), dest)?; } "memrchr" => { - let [ptr, val, num] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr, val, num] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let ptr = this.read_pointer(ptr)?; let val = this.read_scalar(val)?.to_i32()?; let num = this.read_target_usize(num)?; @@ -676,7 +686,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { } } "memchr" => { - let [ptr, val, num] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr, val, num] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let ptr = this.read_pointer(ptr)?; let val = this.read_scalar(val)?.to_i32()?; let num = this.read_target_usize(num)?; @@ -699,7 +710,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { } } "strlen" => { - let [ptr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let ptr = this.read_pointer(ptr)?; // This reads at least 1 byte, so we are already enforcing that this is a valid pointer. let n = this.read_c_str(ptr)?.len(); @@ -709,7 +720,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { )?; } "wcslen" => { - let [ptr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let ptr = this.read_pointer(ptr)?; // This reads at least 1 byte, so we are already enforcing that this is a valid pointer. let n = this.read_wchar_t_str(ptr)?.len(); @@ -719,7 +730,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { )?; } "memcpy" => { - let [ptr_dest, ptr_src, n] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr_dest, ptr_src, n] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let ptr_dest = this.read_pointer(ptr_dest)?; let ptr_src = this.read_pointer(ptr_src)?; let n = this.read_target_usize(n)?; @@ -733,7 +745,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_pointer(ptr_dest, dest)?; } "strcpy" => { - let [ptr_dest, ptr_src] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr_dest, ptr_src] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let ptr_dest = this.read_pointer(ptr_dest)?; let ptr_src = this.read_pointer(ptr_src)?; @@ -764,7 +777,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { | "erff" | "erfcf" => { - let [f] = this.check_shim(abi, CanonAbi::C , link_name, args)?; + let [f] = this.check_shim_sig_lenient(abi, CanonAbi::C , link_name, args)?; let f = this.read_scalar(f)?.to_f32()?; // Using host floats (but it's fine, these operations do not have guaranteed precision). let f_host = f.to_host(); @@ -802,7 +815,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { | "atan2f" | "fdimf" => { - let [f1, f2] = this.check_shim(abi, CanonAbi::C , link_name, args)?; + let [f1, f2] = this.check_shim_sig_lenient(abi, CanonAbi::C , link_name, args)?; let f1 = this.read_scalar(f1)?.to_f32()?; let f2 = this.read_scalar(f2)?.to_f32()?; // underscore case for windows, here and below @@ -841,7 +854,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { | "erf" | "erfc" => { - let [f] = this.check_shim(abi, CanonAbi::C , link_name, args)?; + let [f] = this.check_shim_sig_lenient(abi, CanonAbi::C , link_name, args)?; let f = this.read_scalar(f)?.to_f64()?; // Using host floats (but it's fine, these operations do not have guaranteed precision). let f_host = f.to_host(); @@ -879,7 +892,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { | "atan2" | "fdim" => { - let [f1, f2] = this.check_shim(abi, CanonAbi::C , link_name, args)?; + let [f1, f2] = this.check_shim_sig_lenient(abi, CanonAbi::C , link_name, args)?; let f1 = this.read_scalar(f1)?.to_f64()?; let f2 = this.read_scalar(f2)?.to_f64()?; // underscore case for windows, here and below @@ -908,7 +921,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { | "ldexp" | "scalbn" => { - let [x, exp] = this.check_shim(abi, CanonAbi::C , link_name, args)?; + let [x, exp] = this.check_shim_sig_lenient(abi, CanonAbi::C , link_name, args)?; // For radix-2 (binary) systems, `ldexp` and `scalbn` are the same. let x = this.read_scalar(x)?.to_f64()?; let exp = this.read_scalar(exp)?.to_i32()?; @@ -918,7 +931,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(res, dest)?; } "lgammaf_r" => { - let [x, signp] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [x, signp] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let x = this.read_scalar(x)?.to_f32()?; let signp = this.deref_pointer_as(signp, this.machine.layouts.i32)?; @@ -934,7 +947,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(res, dest)?; } "lgamma_r" => { - let [x, signp] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [x, signp] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let x = this.read_scalar(x)?.to_f64()?; let signp = this.deref_pointer_as(signp, this.machine.layouts.i32)?; @@ -952,7 +965,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { // LLVM intrinsics "llvm.prefetch" => { - let [p, rw, loc, ty] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [p, rw, loc, ty] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let _ = this.read_pointer(p)?; let rw = this.read_scalar(rw)?.to_i32()?; @@ -979,7 +993,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { // Used to implement the x86 `_mm{,256,512}_popcnt_epi{8,16,32,64}` and wasm // `{i,u}8x16_popcnt` functions. name if name.starts_with("llvm.ctpop.v") => { - let [op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (op, op_len) = this.project_to_simd(op)?; let (dest, dest_len) = this.project_to_simd(dest)?; @@ -1015,7 +1029,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { } // FIXME: Move this to an `arm` submodule. "llvm.arm.hint" if this.tcx.sess.target.arch == "arm" => { - let [arg] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [arg] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let arg = this.read_scalar(arg)?.to_i32()?; // Note that different arguments might have different target feature requirements. match arg { diff --git a/src/tools/miri/src/shims/mod.rs b/src/tools/miri/src/shims/mod.rs index 75540f6f150..7f594d4fdd6 100644 --- a/src/tools/miri/src/shims/mod.rs +++ b/src/tools/miri/src/shims/mod.rs @@ -4,7 +4,7 @@ mod aarch64; mod alloc; mod backtrace; mod files; -#[cfg(unix)] +#[cfg(all(unix, feature = "native-lib"))] mod native_lib; mod unix; mod wasi; @@ -18,12 +18,13 @@ pub mod global_ctor; pub mod io_error; pub mod os_str; pub mod panic; +pub mod sig; pub mod time; pub mod tls; pub mod unwind; pub use self::files::FdTable; -#[cfg(target_os = "linux")] +#[cfg(all(unix, feature = "native-lib"))] pub use self::native_lib::trace::{init_sv, register_retcode_sv}; pub use self::unix::{DirTable, EpollInterestTable}; diff --git a/src/tools/miri/src/shims/native_lib/mod.rs b/src/tools/miri/src/shims/native_lib/mod.rs index fb7b1df41a4..2827ed997a7 100644 --- a/src/tools/miri/src/shims/native_lib/mod.rs +++ b/src/tools/miri/src/shims/native_lib/mod.rs @@ -8,6 +8,7 @@ use rustc_abi::{BackendRepr, HasDataLayout, Size}; use rustc_middle::mir::interpret::Pointer; use rustc_middle::ty::{self as ty, IntTy, UintTy}; use rustc_span::Symbol; +use serde::{Deserialize, Serialize}; #[cfg_attr( not(all( @@ -23,18 +24,14 @@ use crate::*; /// The final results of an FFI trace, containing every relevant event detected /// by the tracer. -#[allow(dead_code)] -#[cfg_attr(target_os = "linux", derive(serde::Serialize, serde::Deserialize))] -#[derive(Debug)] +#[derive(Serialize, Deserialize, Debug)] pub struct MemEvents { /// An list of memory accesses that occurred, in the order they occurred in. pub acc_events: Vec<AccessEvent>, } /// A single memory access. -#[allow(dead_code)] -#[cfg_attr(target_os = "linux", derive(serde::Serialize, serde::Deserialize))] -#[derive(Clone, Debug)] +#[derive(Serialize, Deserialize, Clone, Debug)] pub enum AccessEvent { /// A read occurred on this memory range. Read(AccessRange), @@ -56,9 +53,7 @@ impl AccessEvent { } /// The memory touched by a given access. -#[allow(dead_code)] -#[cfg_attr(target_os = "linux", derive(serde::Serialize, serde::Deserialize))] -#[derive(Clone, Debug)] +#[derive(Serialize, Deserialize, Clone, Debug)] pub struct AccessRange { /// The base address in memory where an access occurred. pub addr: usize, diff --git a/src/tools/miri/src/shims/native_lib/trace/child.rs b/src/tools/miri/src/shims/native_lib/trace/child.rs index de26cb0fe55..b998ba822dd 100644 --- a/src/tools/miri/src/shims/native_lib/trace/child.rs +++ b/src/tools/miri/src/shims/native_lib/trace/child.rs @@ -1,8 +1,9 @@ use std::cell::RefCell; +use std::ptr::NonNull; use std::rc::Rc; use ipc_channel::ipc; -use nix::sys::{ptrace, signal}; +use nix::sys::{mman, ptrace, signal}; use nix::unistd; use rustc_const_eval::interpret::InterpResult; @@ -44,6 +45,16 @@ impl Supervisor { SUPERVISOR.lock().unwrap().is_some() } + unsafe fn protect_pages( + pages: impl Iterator<Item = (NonNull<u8>, usize)>, + prot: mman::ProtFlags, + ) -> Result<(), nix::errno::Errno> { + for (pg, sz) in pages { + unsafe { mman::mprotect(pg.cast(), sz, prot)? }; + } + Ok(()) + } + /// Performs an arbitrary FFI call, enabling tracing from the supervisor. /// As this locks the supervisor via a mutex, no other threads may enter FFI /// until this function returns. @@ -60,47 +71,67 @@ impl Supervisor { // Get pointers to all the pages the supervisor must allow accesses in // and prepare the callback stack. - let page_ptrs = alloc.borrow().pages().collect(); + let alloc = alloc.borrow(); + let page_size = alloc.page_size(); + let page_ptrs = alloc + .pages() + .flat_map(|(pg, sz)| { + // Convert (page, size) pair into list of pages. + let start = pg.expose_provenance().get(); + (0..sz.strict_div(alloc.page_size())) + .map(move |i| start.strict_add(i.strict_mul(page_size))) + }) + .collect(); let raw_stack_ptr: *mut [u8; CALLBACK_STACK_SIZE] = Box::leak(Box::new([0u8; CALLBACK_STACK_SIZE])).as_mut_ptr().cast(); let stack_ptr = raw_stack_ptr.expose_provenance(); let start_info = StartFfiInfo { page_ptrs, stack_ptr }; - // SAFETY: We do not access machine memory past this point until the - // supervisor is ready to allow it. - unsafe { - if alloc.borrow_mut().start_ffi().is_err() { - // Don't mess up unwinding by maybe leaving the memory partly protected - alloc.borrow_mut().end_ffi(); - panic!("Cannot protect memory for FFI call!"); + // Unwinding might be messed up due to partly protected memory, so let's abort if something + // breaks inside here. + let res = std::panic::abort_unwind(|| { + // SAFETY: We do not access machine memory past this point until the + // supervisor is ready to allow it. + // FIXME: this is sketchy, as technically the memory is still in the Rust Abstract Machine, + // and the compiler would be allowed to reorder accesses below this block... + unsafe { + Self::protect_pages(alloc.pages(), mman::ProtFlags::PROT_NONE).unwrap(); } - } - // Send over the info. - // NB: if we do not wait to receive a blank confirmation response, it is - // possible that the supervisor is alerted of the SIGSTOP *before* it has - // actually received the start_info, thus deadlocking! This way, we can - // enforce an ordering for these events. - sv.message_tx.send(TraceRequest::StartFfi(start_info)).unwrap(); - sv.confirm_rx.recv().unwrap(); - // We need to be stopped for the supervisor to be able to make certain - // modifications to our memory - simply waiting on the recv() doesn't - // count. - signal::raise(signal::SIGSTOP).unwrap(); - - let res = f(); - - // We can't use IPC channels here to signal that FFI mode has ended, - // since they might allocate memory which could get us stuck in a SIGTRAP - // with no easy way out! While this could be worked around, it is much - // simpler and more robust to simply use the signals which are left for - // arbitrary usage. Since this will block until we are continued by the - // supervisor, we can assume past this point that everything is back to - // normal. - signal::raise(signal::SIGUSR1).unwrap(); - - // This is safe! It just sets memory to normal expected permissions. - alloc.borrow_mut().end_ffi(); + // Send over the info. + // NB: if we do not wait to receive a blank confirmation response, it is + // possible that the supervisor is alerted of the SIGSTOP *before* it has + // actually received the start_info, thus deadlocking! This way, we can + // enforce an ordering for these events. + sv.message_tx.send(TraceRequest::StartFfi(start_info)).unwrap(); + sv.confirm_rx.recv().unwrap(); + // We need to be stopped for the supervisor to be able to make certain + // modifications to our memory - simply waiting on the recv() doesn't + // count. + signal::raise(signal::SIGSTOP).unwrap(); + + let res = f(); + + // We can't use IPC channels here to signal that FFI mode has ended, + // since they might allocate memory which could get us stuck in a SIGTRAP + // with no easy way out! While this could be worked around, it is much + // simpler and more robust to simply use the signals which are left for + // arbitrary usage. Since this will block until we are continued by the + // supervisor, we can assume past this point that everything is back to + // normal. + signal::raise(signal::SIGUSR1).unwrap(); + + // SAFETY: We set memory back to normal, so this is safe. + unsafe { + Self::protect_pages( + alloc.pages(), + mman::ProtFlags::PROT_READ | mman::ProtFlags::PROT_WRITE, + ) + .unwrap(); + } + + res + }); // SAFETY: Caller upholds that this pointer was allocated as a box with // this type. diff --git a/src/tools/miri/src/shims/native_lib/trace/messages.rs b/src/tools/miri/src/shims/native_lib/trace/messages.rs index 1f9df556b57..bef6cc1b2f3 100644 --- a/src/tools/miri/src/shims/native_lib/trace/messages.rs +++ b/src/tools/miri/src/shims/native_lib/trace/messages.rs @@ -45,8 +45,7 @@ pub enum TraceRequest { /// Information needed to begin tracing. #[derive(serde::Serialize, serde::Deserialize, Debug, Clone)] pub struct StartFfiInfo { - /// A vector of page addresses. These should have been automatically obtained - /// with `IsolatedAlloc::pages` and prepared with `IsolatedAlloc::start_ffi`. + /// A vector of page addresses that store the miri heap which is accessible from C. pub page_ptrs: Vec<usize>, /// The address of an allocation that can serve as a temporary stack. /// This should be a leaked `Box<[u8; CALLBACK_STACK_SIZE]>` cast to an int. diff --git a/src/tools/miri/src/shims/sig.rs b/src/tools/miri/src/shims/sig.rs new file mode 100644 index 00000000000..bc5e7f584f5 --- /dev/null +++ b/src/tools/miri/src/shims/sig.rs @@ -0,0 +1,266 @@ +//! Everything related to checking the signature of shim invocations. + +use rustc_abi::{CanonAbi, ExternAbi}; +use rustc_hir::Safety; +use rustc_middle::ty::{Binder, FnSig, Ty}; +use rustc_span::Symbol; +use rustc_target::callconv::FnAbi; + +use crate::*; + +/// Describes the expected signature of a shim. +pub struct ShimSig<'tcx, const ARGS: usize> { + pub abi: ExternAbi, + pub args: [Ty<'tcx>; ARGS], + pub ret: Ty<'tcx>, +} + +/// Construct a `ShimSig` with convenient syntax: +/// ```rust,ignore +/// shim_sig!(this, extern "C" fn (*const T, i32) -> usize) +/// ``` +#[macro_export] +macro_rules! shim_sig { + (extern $abi:literal fn($($arg:ty),*) -> $ret:ty) => { + |this| $crate::shims::sig::ShimSig { + abi: std::str::FromStr::from_str($abi).expect("incorrect abi specified"), + args: [$(shim_sig_arg!(this, $arg)),*], + ret: shim_sig_arg!(this, $ret), + } + }; +} + +/// Helper for `shim_sig!`. +#[macro_export] +macro_rules! shim_sig_arg { + // Unfortuantely we cannot take apart a `ty`-typed token at compile time, + // so we have to stringify it and match at runtime. + ($this:ident, $x:ty) => {{ + match stringify!($x) { + "i8" => $this.tcx.types.i8, + "i16" => $this.tcx.types.i16, + "i32" => $this.tcx.types.i32, + "i64" => $this.tcx.types.i64, + "i128" => $this.tcx.types.i128, + "isize" => $this.tcx.types.isize, + "u8" => $this.tcx.types.u8, + "u16" => $this.tcx.types.u16, + "u32" => $this.tcx.types.u32, + "u64" => $this.tcx.types.u64, + "u128" => $this.tcx.types.u128, + "usize" => $this.tcx.types.usize, + "()" => $this.tcx.types.unit, + "*const _" => $this.machine.layouts.const_raw_ptr.ty, + "*mut _" => $this.machine.layouts.mut_raw_ptr.ty, + ty if let Some(libc_ty) = ty.strip_prefix("libc::") => $this.libc_ty_layout(libc_ty).ty, + ty => panic!("unsupported signature type {ty:?}"), + } + }}; +} + +/// Helper function to compare two ABIs. +fn check_shim_abi<'tcx>( + this: &MiriInterpCx<'tcx>, + callee_abi: &FnAbi<'tcx, Ty<'tcx>>, + caller_abi: &FnAbi<'tcx, Ty<'tcx>>, +) -> InterpResult<'tcx> { + if callee_abi.conv != caller_abi.conv { + throw_ub_format!( + r#"calling a function with calling convention "{callee}" using caller calling convention "{caller}""#, + callee = callee_abi.conv, + caller = caller_abi.conv, + ); + } + if callee_abi.can_unwind && !caller_abi.can_unwind { + throw_ub_format!( + "ABI mismatch: callee may unwind, but caller-side signature prohibits unwinding", + ); + } + if caller_abi.c_variadic && !callee_abi.c_variadic { + throw_ub_format!( + "ABI mismatch: calling a non-variadic function with a variadic caller-side signature" + ); + } + if !caller_abi.c_variadic && callee_abi.c_variadic { + throw_ub_format!( + "ABI mismatch: calling a variadic function with a non-variadic caller-side signature" + ); + } + + if callee_abi.fixed_count != caller_abi.fixed_count { + throw_ub_format!( + "ABI mismatch: expected {} arguments, found {} arguments ", + callee_abi.fixed_count, + caller_abi.fixed_count + ); + } + + if !this.check_argument_compat(&caller_abi.ret, &callee_abi.ret)? { + throw_ub!(AbiMismatchReturn { + caller_ty: caller_abi.ret.layout.ty, + callee_ty: callee_abi.ret.layout.ty + }); + } + + for (idx, (caller_arg, callee_arg)) in + caller_abi.args.iter().zip(callee_abi.args.iter()).enumerate() + { + if !this.check_argument_compat(caller_arg, callee_arg)? { + throw_ub!(AbiMismatchArgument { + arg_idx: idx, + caller_ty: caller_abi.args[idx].layout.ty, + callee_ty: callee_abi.args[idx].layout.ty + }); + } + } + + interp_ok(()) +} + +fn check_shim_symbol_clash<'tcx>( + this: &mut MiriInterpCx<'tcx>, + link_name: Symbol, +) -> InterpResult<'tcx, ()> { + if let Some((body, instance)) = this.lookup_exported_symbol(link_name)? { + // If compiler-builtins is providing the symbol, then don't treat it as a clash. + // We'll use our built-in implementation in `emulate_foreign_item_inner` for increased + // performance. Note that this means we won't catch any undefined behavior in + // compiler-builtins when running other crates, but Miri can still be run on + // compiler-builtins itself (or any crate that uses it as a normal dependency) + if this.tcx.is_compiler_builtins(instance.def_id().krate) { + return interp_ok(()); + } + + throw_machine_stop!(TerminationInfo::SymbolShimClashing { + link_name, + span: body.span.data(), + }) + } + interp_ok(()) +} + +impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {} +pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { + fn check_shim_sig_lenient<'a, const N: usize>( + &mut self, + abi: &FnAbi<'tcx, Ty<'tcx>>, + exp_abi: CanonAbi, + link_name: Symbol, + args: &'a [OpTy<'tcx>], + ) -> InterpResult<'tcx, &'a [OpTy<'tcx>; N]> { + let this = self.eval_context_mut(); + check_shim_symbol_clash(this, link_name)?; + + if abi.conv != exp_abi { + throw_ub_format!( + r#"calling a function with calling convention "{exp_abi}" using caller calling convention "{}""#, + abi.conv + ); + } + if abi.c_variadic { + throw_ub_format!( + "calling a non-variadic function with a variadic caller-side signature" + ); + } + + if let Ok(ops) = args.try_into() { + return interp_ok(ops); + } + throw_ub_format!( + "incorrect number of arguments for `{link_name}`: got {}, expected {}", + args.len(), + N + ) + } + + /// Check that the given `caller_fn_abi` matches the expected ABI described by `shim_sig`, and + /// then returns the list of arguments. + fn check_shim_sig<'a, const N: usize>( + &mut self, + shim_sig: fn(&MiriInterpCx<'tcx>) -> ShimSig<'tcx, N>, + link_name: Symbol, + caller_fn_abi: &FnAbi<'tcx, Ty<'tcx>>, + caller_args: &'a [OpTy<'tcx>], + ) -> InterpResult<'tcx, &'a [OpTy<'tcx>; N]> { + let this = self.eval_context_mut(); + let shim_sig = shim_sig(this); + + // Compute full callee ABI. + let mut inputs_and_output = Vec::with_capacity(N.strict_add(1)); + inputs_and_output.extend(&shim_sig.args); + inputs_and_output.push(shim_sig.ret); + let fn_sig_binder = Binder::dummy(FnSig { + inputs_and_output: this.machine.tcx.mk_type_list(&inputs_and_output), + c_variadic: false, + // This does not matter for the ABI. + safety: Safety::Safe, + abi: shim_sig.abi, + }); + let callee_fn_abi = this.fn_abi_of_fn_ptr(fn_sig_binder, Default::default())?; + + // Check everything. + check_shim_abi(this, callee_fn_abi, caller_fn_abi)?; + check_shim_symbol_clash(this, link_name)?; + + // Return arguments. + if let Ok(ops) = caller_args.try_into() { + return interp_ok(ops); + } + unreachable!() + } + + /// Check shim for variadic function. + /// Returns a tuple that consisting of an array of fixed args, and a slice of varargs. + fn check_shim_sig_variadic_lenient<'a, const N: usize>( + &mut self, + abi: &FnAbi<'tcx, Ty<'tcx>>, + exp_abi: CanonAbi, + link_name: Symbol, + args: &'a [OpTy<'tcx>], + ) -> InterpResult<'tcx, (&'a [OpTy<'tcx>; N], &'a [OpTy<'tcx>])> + where + &'a [OpTy<'tcx>; N]: TryFrom<&'a [OpTy<'tcx>]>, + { + let this = self.eval_context_mut(); + check_shim_symbol_clash(this, link_name)?; + + if abi.conv != exp_abi { + throw_ub_format!( + r#"calling a function with calling convention "{exp_abi}" using caller calling convention "{}""#, + abi.conv + ); + } + if !abi.c_variadic { + throw_ub_format!( + "calling a variadic function with a non-variadic caller-side signature" + ); + } + if abi.fixed_count != u32::try_from(N).unwrap() { + throw_ub_format!( + "incorrect number of fixed arguments for variadic function `{}`: got {}, expected {N}", + link_name.as_str(), + abi.fixed_count + ) + } + if let Some(args) = args.split_first_chunk() { + return interp_ok(args); + } + panic!("mismatch between signature and `args` slice"); + } +} + +/// Check that the number of varargs is at least the minimum what we expect. +/// Fixed args should not be included. +pub fn check_min_vararg_count<'a, 'tcx, const N: usize>( + name: &'a str, + args: &'a [OpTy<'tcx>], +) -> InterpResult<'tcx, &'a [OpTy<'tcx>; N]> { + if let Some((ops, _)) = args.split_first_chunk() { + return interp_ok(ops); + } + throw_ub_format!( + "not enough variadic arguments for `{name}`: got {}, expected at least {}", + args.len(), + N + ) +} diff --git a/src/tools/miri/src/shims/time.rs b/src/tools/miri/src/shims/time.rs index eb21abc2a45..b5b35797fec 100644 --- a/src/tools/miri/src/shims/time.rs +++ b/src/tools/miri/src/shims/time.rs @@ -17,73 +17,71 @@ pub fn system_time_to_duration<'tcx>(time: &SystemTime) -> InterpResult<'tcx, Du impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {} pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { - fn clock_gettime( - &mut self, - clk_id_op: &OpTy<'tcx>, - tp_op: &OpTy<'tcx>, - dest: &MPlaceTy<'tcx>, - ) -> InterpResult<'tcx> { + fn parse_clockid(&self, clk_id: Scalar) -> Option<TimeoutClock> { // This clock support is deliberately minimal because a lot of clock types have fiddly // properties (is it possible for Miri to be suspended independently of the host?). If you // have a use for another clock type, please open an issue. + let this = self.eval_context_ref(); - let this = self.eval_context_mut(); - - this.assert_target_os_is_unix("clock_gettime"); - let clockid_t_size = this.libc_ty_layout("clockid_t").size; - - let clk_id = this.read_scalar(clk_id_op)?.to_int(clockid_t_size)?; - let tp = this.deref_pointer_as(tp_op, this.libc_ty_layout("timespec"))?; - - let absolute_clocks; - let mut relative_clocks; + // Portable names that exist everywhere. + if clk_id == this.eval_libc("CLOCK_REALTIME") { + return Some(TimeoutClock::RealTime); + } else if clk_id == this.eval_libc("CLOCK_MONOTONIC") { + return Some(TimeoutClock::Monotonic); + } + // Some further platform-specific names we support. match this.tcx.sess.target.os.as_ref() { "linux" | "freebsd" | "android" => { - // Linux, Android, and FreeBSD have two main kinds of clocks. REALTIME clocks return the actual time since the - // Unix epoch, including effects which may cause time to move backwards such as NTP. // Linux further distinguishes regular and "coarse" clocks, but the "coarse" version - // is just specified to be "faster and less precise", so we implement both the same way. - absolute_clocks = vec![ - this.eval_libc("CLOCK_REALTIME").to_int(clockid_t_size)?, - this.eval_libc("CLOCK_REALTIME_COARSE").to_int(clockid_t_size)?, - ]; - // The second kind is MONOTONIC clocks for which 0 is an arbitrary time point, but they are - // never allowed to go backwards. We don't need to do any additional monotonicity - // enforcement because std::time::Instant already guarantees that it is monotonic. - relative_clocks = vec![ - this.eval_libc("CLOCK_MONOTONIC").to_int(clockid_t_size)?, - this.eval_libc("CLOCK_MONOTONIC_COARSE").to_int(clockid_t_size)?, - ]; + // is just specified to be "faster and less precise", so we treat it like normal + // clocks. + if clk_id == this.eval_libc("CLOCK_REALTIME_COARSE") { + return Some(TimeoutClock::RealTime); + } else if clk_id == this.eval_libc("CLOCK_MONOTONIC_COARSE") { + return Some(TimeoutClock::Monotonic); + } } "macos" => { - absolute_clocks = vec![this.eval_libc("CLOCK_REALTIME").to_int(clockid_t_size)?]; - relative_clocks = vec![this.eval_libc("CLOCK_MONOTONIC").to_int(clockid_t_size)?]; // `CLOCK_UPTIME_RAW` supposed to not increment while the system is asleep... but // that's not really something a program running inside Miri can tell, anyway. // We need to support it because std uses it. - relative_clocks.push(this.eval_libc("CLOCK_UPTIME_RAW").to_int(clockid_t_size)?); - } - "solaris" | "illumos" => { - // The REALTIME clock returns the actual time since the Unix epoch. - absolute_clocks = vec![this.eval_libc("CLOCK_REALTIME").to_int(clockid_t_size)?]; - // MONOTONIC, in the other hand, is the high resolution, non-adjustable - // clock from an arbitrary time in the past. - // Note that the man page mentions HIGHRES but it is just - // an alias of MONOTONIC and the libc crate does not expose it anyway. - // https://docs.oracle.com/cd/E23824_01/html/821-1465/clock-gettime-3c.html - relative_clocks = vec![this.eval_libc("CLOCK_MONOTONIC").to_int(clockid_t_size)?]; + if clk_id == this.eval_libc("CLOCK_UPTIME_RAW") { + return Some(TimeoutClock::Monotonic); + } } - target => throw_unsup_format!("`clock_gettime` is not supported on target OS {target}"), + _ => {} } - let duration = if absolute_clocks.contains(&clk_id) { - this.check_no_isolation("`clock_gettime` with `REALTIME` clocks")?; - system_time_to_duration(&SystemTime::now())? - } else if relative_clocks.contains(&clk_id) { - this.machine.monotonic_clock.now().duration_since(this.machine.monotonic_clock.epoch()) - } else { - return this.set_last_error_and_return(LibcError("EINVAL"), dest); + None + } + + fn clock_gettime( + &mut self, + clk_id_op: &OpTy<'tcx>, + tp_op: &OpTy<'tcx>, + dest: &MPlaceTy<'tcx>, + ) -> InterpResult<'tcx> { + let this = self.eval_context_mut(); + + this.assert_target_os_is_unix("clock_gettime"); + + let clk_id = this.read_scalar(clk_id_op)?; + let tp = this.deref_pointer_as(tp_op, this.libc_ty_layout("timespec"))?; + + let duration = match this.parse_clockid(clk_id) { + Some(TimeoutClock::RealTime) => { + this.check_no_isolation("`clock_gettime` with `REALTIME` clocks")?; + system_time_to_duration(&SystemTime::now())? + } + Some(TimeoutClock::Monotonic) => + this.machine + .monotonic_clock + .now() + .duration_since(this.machine.monotonic_clock.epoch()), + None => { + return this.set_last_error_and_return(LibcError("EINVAL"), dest); + } }; let tv_sec = duration.as_secs(); diff --git a/src/tools/miri/src/shims/unix/android/foreign_items.rs b/src/tools/miri/src/shims/unix/android/foreign_items.rs index 690b5295681..04c5d28838b 100644 --- a/src/tools/miri/src/shims/unix/android/foreign_items.rs +++ b/src/tools/miri/src/shims/unix/android/foreign_items.rs @@ -26,29 +26,30 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { match link_name.as_str() { // epoll, eventfd "epoll_create1" => { - let [flag] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [flag] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.epoll_create1(flag)?; this.write_scalar(result, dest)?; } "epoll_ctl" => { - let [epfd, op, fd, event] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [epfd, op, fd, event] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.epoll_ctl(epfd, op, fd, event)?; this.write_scalar(result, dest)?; } "epoll_wait" => { let [epfd, events, maxevents, timeout] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.epoll_wait(epfd, events, maxevents, timeout, dest)?; } "eventfd" => { - let [val, flag] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [val, flag] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.eventfd(val, flag)?; this.write_scalar(result, dest)?; } // Miscellaneous "__errno" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let errno_place = this.last_error_place()?; this.write_scalar(errno_place.to_ref(this).to_scalar(), dest)?; } diff --git a/src/tools/miri/src/shims/unix/android/thread.rs b/src/tools/miri/src/shims/unix/android/thread.rs index 5d17d6c8517..4e7b21d7d94 100644 --- a/src/tools/miri/src/shims/unix/android/thread.rs +++ b/src/tools/miri/src/shims/unix/android/thread.rs @@ -3,7 +3,7 @@ use rustc_middle::ty::Ty; use rustc_span::Symbol; use rustc_target::callconv::FnAbi; -use crate::helpers::check_min_vararg_count; +use crate::shims::sig::check_min_vararg_count; use crate::shims::unix::thread::{EvalContextExt as _, ThreadNameResult}; use crate::*; @@ -16,7 +16,7 @@ pub fn prctl<'tcx>( args: &[OpTy<'tcx>], dest: &MPlaceTy<'tcx>, ) -> InterpResult<'tcx> { - let ([op], varargs) = ecx.check_shim_variadic(abi, CanonAbi::C, link_name, args)?; + let ([op], varargs) = ecx.check_shim_sig_variadic_lenient(abi, CanonAbi::C, link_name, args)?; // FIXME: Use constants once https://github.com/rust-lang/libc/pull/3941 backported to the 0.2 branch. let pr_set_name = 15; diff --git a/src/tools/miri/src/shims/unix/fd.rs b/src/tools/miri/src/shims/unix/fd.rs index 71102d9f2f3..e226a55d8b1 100644 --- a/src/tools/miri/src/shims/unix/fd.rs +++ b/src/tools/miri/src/shims/unix/fd.rs @@ -4,10 +4,11 @@ use std::io; use std::io::ErrorKind; +use rand::Rng; use rustc_abi::Size; -use crate::helpers::check_min_vararg_count; use crate::shims::files::FileDescription; +use crate::shims::sig::check_min_vararg_count; use crate::shims::unix::linux_like::epoll::EpollReadyEvents; use crate::shims::unix::*; use crate::*; @@ -263,9 +264,18 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { return this.set_last_error_and_return(LibcError("EBADF"), dest); }; + // Non-deterministically decide to further reduce the count, simulating a partial read (but + // never to 0, that has different behavior). + let count = + if fd.nondet_short_accesses() && count >= 2 && this.machine.rng.get_mut().random() { + count / 2 + } else { + count + }; + trace!("read: FD mapped to {fd:?}"); // We want to read at most `count` bytes. We are sure that `count` is not negative - // because it was a target's `usize`. Also we are sure that its smaller than + // because it was a target's `usize`. Also we are sure that it's smaller than // `usize::MAX` because it is bounded by the host's `isize`. let finish = { @@ -328,6 +338,15 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { return this.set_last_error_and_return(LibcError("EBADF"), dest); }; + // Non-deterministically decide to further reduce the count, simulating a partial write (but + // never to 0, that has different behavior). + let count = + if fd.nondet_short_accesses() && count >= 2 && this.machine.rng.get_mut().random() { + count / 2 + } else { + count + }; + let finish = { let dest = dest.clone(); callback!( diff --git a/src/tools/miri/src/shims/unix/foreign_items.rs b/src/tools/miri/src/shims/unix/foreign_items.rs index 548eabb1b9f..55906f4eb95 100644 --- a/src/tools/miri/src/shims/unix/foreign_items.rs +++ b/src/tools/miri/src/shims/unix/foreign_items.rs @@ -1,7 +1,7 @@ use std::ffi::OsStr; use std::str; -use rustc_abi::{CanonAbi, ExternAbi, Size}; +use rustc_abi::{CanonAbi, Size}; use rustc_middle::ty::Ty; use rustc_span::Symbol; use rustc_target::callconv::FnAbi; @@ -14,7 +14,7 @@ use self::shims::unix::solarish::foreign_items as solarish; use crate::concurrency::cpu_affinity::CpuAffinityMask; use crate::shims::alloc::EvalContextExt as _; use crate::shims::unix::*; -use crate::*; +use crate::{shim_sig, *}; pub fn is_dyn_sym(name: &str, target_os: &str) -> bool { match name { @@ -111,40 +111,30 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { match link_name.as_str() { // Environment related shims "getenv" => { - let [name] = this.check_shim_abi( + let [name] = this.check_shim_sig( + shim_sig!(extern "C" fn(*const _) -> *mut _), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.const_raw_ptr.ty], - this.machine.layouts.mut_raw_ptr.ty, args, )?; let result = this.getenv(name)?; this.write_pointer(result, dest)?; } "unsetenv" => { - let [name] = this.check_shim_abi( + let [name] = this.check_shim_sig( + shim_sig!(extern "C" fn(*const _) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.const_raw_ptr.ty], - this.tcx.types.i32, args, )?; let result = this.unsetenv(name)?; this.write_scalar(result, dest)?; } "setenv" => { - let [name, value, overwrite] = this.check_shim_abi( + let [name, value, overwrite] = this.check_shim_sig( + shim_sig!(extern "C" fn(*const _, *const _, i32) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [ - this.machine.layouts.const_raw_ptr.ty, - this.machine.layouts.const_raw_ptr.ty, - this.tcx.types.i32, - ], - this.tcx.types.i32, args, )?; this.read_scalar(overwrite)?.to_i32()?; @@ -152,48 +142,40 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(result, dest)?; } "getcwd" => { - let [buf, size] = this.check_shim_abi( + let [buf, size] = this.check_shim_sig( + shim_sig!(extern "C" fn(*mut _, usize) -> *mut _), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.mut_raw_ptr.ty, this.tcx.types.usize], - this.machine.layouts.mut_raw_ptr.ty, args, )?; let result = this.getcwd(buf, size)?; this.write_pointer(result, dest)?; } "chdir" => { - let [path] = this.check_shim_abi( + let [path] = this.check_shim_sig( + shim_sig!(extern "C" fn(*const _) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.const_raw_ptr.ty], - this.tcx.types.i32, args, )?; let result = this.chdir(path)?; this.write_scalar(result, dest)?; } "getpid" => { - let [] = this.check_shim_abi( + let [] = this.check_shim_sig( + shim_sig!(extern "C" fn() -> libc::pid_t), link_name, abi, - ExternAbi::C { unwind: false }, - [], - this.libc_ty_layout("pid_t").ty, args, )?; let result = this.getpid()?; this.write_scalar(result, dest)?; } "sysconf" => { - let [val] = this.check_shim_abi( + let [val] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32) -> isize), link_name, abi, - ExternAbi::C { unwind: false }, - [this.tcx.types.i32], - this.tcx.types.isize, args, )?; let result = this.sysconf(val)?; @@ -201,12 +183,10 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } // File descriptors "read" => { - let [fd, buf, count] = this.check_shim_abi( + let [fd, buf, count] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, *mut _, usize) -> isize), link_name, abi, - ExternAbi::C { unwind: false }, - [this.tcx.types.i32, this.machine.layouts.mut_raw_ptr.ty, this.tcx.types.usize], - this.tcx.types.isize, args, )?; let fd = this.read_scalar(fd)?.to_i32()?; @@ -215,16 +195,10 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.read(fd, buf, count, None, dest)?; } "write" => { - let [fd, buf, n] = this.check_shim_abi( + let [fd, buf, n] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, *const _, usize) -> isize), link_name, abi, - ExternAbi::C { unwind: false }, - [ - this.tcx.types.i32, - this.machine.layouts.const_raw_ptr.ty, - this.tcx.types.usize, - ], - this.tcx.types.isize, args, )?; let fd = this.read_scalar(fd)?.to_i32()?; @@ -234,98 +208,64 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write(fd, buf, count, None, dest)?; } "pread" => { - let off_t = this.libc_ty_layout("off_t"); - let [fd, buf, count, offset] = this.check_shim_abi( + let [fd, buf, count, offset] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, *mut _, usize, libc::off_t) -> isize), link_name, abi, - ExternAbi::C { unwind: false }, - [ - this.tcx.types.i32, - this.machine.layouts.mut_raw_ptr.ty, - this.tcx.types.usize, - off_t.ty, - ], - this.tcx.types.isize, args, )?; let fd = this.read_scalar(fd)?.to_i32()?; let buf = this.read_pointer(buf)?; let count = this.read_target_usize(count)?; - let offset = this.read_scalar(offset)?.to_int(off_t.size)?; + let offset = this.read_scalar(offset)?.to_int(offset.layout.size)?; this.read(fd, buf, count, Some(offset), dest)?; } "pwrite" => { - let off_t = this.libc_ty_layout("off_t"); - let [fd, buf, n, offset] = this.check_shim_abi( + let [fd, buf, n, offset] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, *const _, usize, libc::off_t) -> isize), link_name, abi, - ExternAbi::C { unwind: false }, - [ - this.tcx.types.i32, - this.machine.layouts.const_raw_ptr.ty, - this.tcx.types.usize, - off_t.ty, - ], - this.tcx.types.isize, args, )?; let fd = this.read_scalar(fd)?.to_i32()?; let buf = this.read_pointer(buf)?; let count = this.read_target_usize(n)?; - let offset = this.read_scalar(offset)?.to_int(off_t.size)?; + let offset = this.read_scalar(offset)?.to_int(offset.layout.size)?; trace!("Called pwrite({:?}, {:?}, {:?}, {:?})", fd, buf, count, offset); this.write(fd, buf, count, Some(offset), dest)?; } "pread64" => { - let off64_t = this.libc_ty_layout("off64_t"); - let [fd, buf, count, offset] = this.check_shim_abi( + let [fd, buf, count, offset] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, *mut _, usize, libc::off64_t) -> isize), link_name, abi, - ExternAbi::C { unwind: false }, - [ - this.tcx.types.i32, - this.machine.layouts.mut_raw_ptr.ty, - this.tcx.types.usize, - off64_t.ty, - ], - this.tcx.types.isize, args, )?; let fd = this.read_scalar(fd)?.to_i32()?; let buf = this.read_pointer(buf)?; let count = this.read_target_usize(count)?; - let offset = this.read_scalar(offset)?.to_int(off64_t.size)?; + let offset = this.read_scalar(offset)?.to_int(offset.layout.size)?; this.read(fd, buf, count, Some(offset), dest)?; } "pwrite64" => { - let off64_t = this.libc_ty_layout("off64_t"); - let [fd, buf, n, offset] = this.check_shim_abi( + let [fd, buf, n, offset] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, *const _, usize, libc::off64_t) -> isize), link_name, abi, - ExternAbi::C { unwind: false }, - [ - this.tcx.types.i32, - this.machine.layouts.const_raw_ptr.ty, - this.tcx.types.usize, - off64_t.ty, - ], - this.tcx.types.isize, args, )?; let fd = this.read_scalar(fd)?.to_i32()?; let buf = this.read_pointer(buf)?; let count = this.read_target_usize(n)?; - let offset = this.read_scalar(offset)?.to_int(off64_t.size)?; + let offset = this.read_scalar(offset)?.to_int(offset.layout.size)?; trace!("Called pwrite64({:?}, {:?}, {:?}, {:?})", fd, buf, count, offset); this.write(fd, buf, count, Some(offset), dest)?; } "close" => { - let [fd] = this.check_shim_abi( + let [fd] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.tcx.types.i32], - this.tcx.types.i32, args, )?; let result = this.close(fd)?; @@ -333,17 +273,15 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "fcntl" => { let ([fd_num, cmd], varargs) = - this.check_shim_variadic(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_variadic_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.fcntl(fd_num, cmd, varargs)?; this.write_scalar(result, dest)?; } "dup" => { - let [old_fd] = this.check_shim_abi( + let [old_fd] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.tcx.types.i32], - this.tcx.types.i32, args, )?; let old_fd = this.read_scalar(old_fd)?.to_i32()?; @@ -351,12 +289,10 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(new_fd, dest)?; } "dup2" => { - let [old_fd, new_fd] = this.check_shim_abi( + let [old_fd, new_fd] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, i32) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.tcx.types.i32, this.tcx.types.i32], - this.tcx.types.i32, args, )?; let old_fd = this.read_scalar(old_fd)?.to_i32()?; @@ -367,12 +303,10 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "flock" => { // Currently this function does not exist on all Unixes, e.g. on Solaris. this.check_target_os(&["linux", "freebsd", "macos", "illumos"], link_name)?; - let [fd, op] = this.check_shim_abi( + let [fd, op] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, i32) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.tcx.types.i32, this.tcx.types.i32], - this.tcx.types.i32, args, )?; let fd = this.read_scalar(fd)?.to_i32()?; @@ -386,230 +320,187 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // `open` is variadic, the third argument is only present when the second argument // has O_CREAT (or on linux O_TMPFILE, but miri doesn't support that) set let ([path_raw, flag], varargs) = - this.check_shim_variadic(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_variadic_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.open(path_raw, flag, varargs)?; this.write_scalar(result, dest)?; } "unlink" => { - let [path] = this.check_shim_abi( + let [path] = this.check_shim_sig( + shim_sig!(extern "C" fn(*const _) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.const_raw_ptr.ty], - this.tcx.types.i32, args, )?; let result = this.unlink(path)?; this.write_scalar(result, dest)?; } "symlink" => { - let [target, linkpath] = this.check_shim_abi( + let [target, linkpath] = this.check_shim_sig( + shim_sig!(extern "C" fn(*const _, *const _) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.const_raw_ptr.ty, this.machine.layouts.const_raw_ptr.ty], - this.tcx.types.i32, args, )?; let result = this.symlink(target, linkpath)?; this.write_scalar(result, dest)?; } "rename" => { - let [oldpath, newpath] = this.check_shim_abi( + let [oldpath, newpath] = this.check_shim_sig( + shim_sig!(extern "C" fn(*const _, *const _) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.const_raw_ptr.ty, this.machine.layouts.const_raw_ptr.ty], - this.tcx.types.i32, args, )?; let result = this.rename(oldpath, newpath)?; this.write_scalar(result, dest)?; } "mkdir" => { - let [path, mode] = this.check_shim_abi( + let [path, mode] = this.check_shim_sig( + shim_sig!(extern "C" fn(*const _, libc::mode_t) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.const_raw_ptr.ty, this.libc_ty_layout("mode_t").ty], - this.tcx.types.i32, args, )?; let result = this.mkdir(path, mode)?; this.write_scalar(result, dest)?; } "rmdir" => { - let [path] = this.check_shim_abi( + let [path] = this.check_shim_sig( + shim_sig!(extern "C" fn(*const _) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.const_raw_ptr.ty], - this.tcx.types.i32, args, )?; let result = this.rmdir(path)?; this.write_scalar(result, dest)?; } "opendir" => { - let [name] = this.check_shim_abi( + let [name] = this.check_shim_sig( + shim_sig!(extern "C" fn(*const _) -> *mut _), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.const_raw_ptr.ty], - this.machine.layouts.mut_raw_ptr.ty, args, )?; let result = this.opendir(name)?; this.write_scalar(result, dest)?; } "closedir" => { - let [dirp] = this.check_shim_abi( + let [dirp] = this.check_shim_sig( + shim_sig!(extern "C" fn(*mut _) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.mut_raw_ptr.ty], - this.tcx.types.i32, args, )?; let result = this.closedir(dirp)?; this.write_scalar(result, dest)?; } "lseek64" => { - let off64_t = this.libc_ty_layout("off64_t"); - let [fd, offset, whence] = this.check_shim_abi( + let [fd, offset, whence] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, libc::off64_t, i32) -> libc::off64_t), link_name, abi, - ExternAbi::C { unwind: false }, - [this.tcx.types.i32, off64_t.ty, this.tcx.types.i32], - off64_t.ty, args, )?; let fd = this.read_scalar(fd)?.to_i32()?; - let offset = this.read_scalar(offset)?.to_int(off64_t.size)?; + let offset = this.read_scalar(offset)?.to_int(offset.layout.size)?; let whence = this.read_scalar(whence)?.to_i32()?; this.lseek64(fd, offset, whence, dest)?; } "lseek" => { - let off_t = this.libc_ty_layout("off_t"); - let [fd, offset, whence] = this.check_shim_abi( + let [fd, offset, whence] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, libc::off_t, i32) -> libc::off_t), link_name, abi, - ExternAbi::C { unwind: false }, - [this.tcx.types.i32, off_t.ty, this.tcx.types.i32], - off_t.ty, args, )?; let fd = this.read_scalar(fd)?.to_i32()?; - let offset = this.read_scalar(offset)?.to_int(off_t.size)?; + let offset = this.read_scalar(offset)?.to_int(offset.layout.size)?; let whence = this.read_scalar(whence)?.to_i32()?; this.lseek64(fd, offset, whence, dest)?; } "ftruncate64" => { - let off64_t = this.libc_ty_layout("off64_t"); - let [fd, length] = this.check_shim_abi( + let [fd, length] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, libc::off64_t) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.tcx.types.i32, off64_t.ty], - this.tcx.types.i32, args, )?; let fd = this.read_scalar(fd)?.to_i32()?; - let length = this.read_scalar(length)?.to_int(off64_t.size)?; + let length = this.read_scalar(length)?.to_int(length.layout.size)?; let result = this.ftruncate64(fd, length)?; this.write_scalar(result, dest)?; } "ftruncate" => { - let off_t = this.libc_ty_layout("off_t"); - let [fd, length] = this.check_shim_abi( + let [fd, length] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, libc::off_t) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.tcx.types.i32, off_t.ty], - this.tcx.types.i32, args, )?; let fd = this.read_scalar(fd)?.to_i32()?; - let length = this.read_scalar(length)?.to_int(off_t.size)?; + let length = this.read_scalar(length)?.to_int(length.layout.size)?; let result = this.ftruncate64(fd, length)?; this.write_scalar(result, dest)?; } "fsync" => { - let [fd] = this.check_shim_abi( + let [fd] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.tcx.types.i32], - this.tcx.types.i32, args, )?; let result = this.fsync(fd)?; this.write_scalar(result, dest)?; } "fdatasync" => { - let [fd] = this.check_shim_abi( + let [fd] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.tcx.types.i32], - this.tcx.types.i32, args, )?; let result = this.fdatasync(fd)?; this.write_scalar(result, dest)?; } "readlink" => { - let [pathname, buf, bufsize] = this.check_shim_abi( + let [pathname, buf, bufsize] = this.check_shim_sig( + shim_sig!(extern "C" fn(*const _, *mut _, usize) -> isize), link_name, abi, - ExternAbi::C { unwind: false }, - [ - this.machine.layouts.const_raw_ptr.ty, - this.machine.layouts.mut_raw_ptr.ty, - this.tcx.types.usize, - ], - this.tcx.types.isize, args, )?; let result = this.readlink(pathname, buf, bufsize)?; this.write_scalar(Scalar::from_target_isize(result, this), dest)?; } "posix_fadvise" => { - let off_t = this.libc_ty_layout("off_t"); - let [fd, offset, len, advice] = this.check_shim_abi( + let [fd, offset, len, advice] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, libc::off_t, libc::off_t, i32) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.tcx.types.i32, off_t.ty, off_t.ty, this.tcx.types.i32], - this.tcx.types.i32, args, )?; this.read_scalar(fd)?.to_i32()?; - this.read_scalar(offset)?.to_int(off_t.size)?; - this.read_scalar(len)?.to_int(off_t.size)?; + this.read_scalar(offset)?.to_int(offset.layout.size)?; + this.read_scalar(len)?.to_int(len.layout.size)?; this.read_scalar(advice)?.to_i32()?; // fadvise is only informational, we can ignore it. this.write_null(dest)?; } "realpath" => { - let [path, resolved_path] = this.check_shim_abi( + let [path, resolved_path] = this.check_shim_sig( + shim_sig!(extern "C" fn(*const _, *mut _) -> *mut _), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.const_raw_ptr.ty, this.machine.layouts.mut_raw_ptr.ty], - this.machine.layouts.mut_raw_ptr.ty, args, )?; let result = this.realpath(path, resolved_path)?; this.write_scalar(result, dest)?; } "mkstemp" => { - let [template] = this.check_shim_abi( + let [template] = this.check_shim_sig( + shim_sig!(extern "C" fn(*mut _) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.mut_raw_ptr.ty], - this.tcx.types.i32, args, )?; let result = this.mkstemp(template)?; @@ -618,29 +509,20 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Unnamed sockets and pipes "socketpair" => { - let [domain, type_, protocol, sv] = this.check_shim_abi( + let [domain, type_, protocol, sv] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, i32, i32, *mut _) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [ - this.tcx.types.i32, - this.tcx.types.i32, - this.tcx.types.i32, - this.machine.layouts.mut_raw_ptr.ty, - ], - this.tcx.types.i32, args, )?; let result = this.socketpair(domain, type_, protocol, sv)?; this.write_scalar(result, dest)?; } "pipe" => { - let [pipefd] = this.check_shim_abi( + let [pipefd] = this.check_shim_sig( + shim_sig!(extern "C" fn(*mut _) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.mut_raw_ptr.ty], - this.tcx.types.i32, args, )?; let result = this.pipe2(pipefd, /*flags*/ None)?; @@ -649,12 +531,10 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "pipe2" => { // Currently this function does not exist on all Unixes, e.g. on macOS. this.check_target_os(&["linux", "freebsd", "solaris", "illumos"], link_name)?; - let [pipefd, flags] = this.check_shim_abi( + let [pipefd, flags] = this.check_shim_sig( + shim_sig!(extern "C" fn(*mut _, i32) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.mut_raw_ptr.ty, this.tcx.types.i32], - this.tcx.types.i32, args, )?; let result = this.pipe2(pipefd, Some(flags))?; @@ -663,36 +543,30 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Time "gettimeofday" => { - let [tv, tz] = this.check_shim_abi( + let [tv, tz] = this.check_shim_sig( + shim_sig!(extern "C" fn(*mut _, *mut _) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.mut_raw_ptr.ty, this.machine.layouts.mut_raw_ptr.ty], - this.tcx.types.i32, args, )?; let result = this.gettimeofday(tv, tz)?; this.write_scalar(result, dest)?; } "localtime_r" => { - let [timep, result_op] = this.check_shim_abi( + let [timep, result_op] = this.check_shim_sig( + shim_sig!(extern "C" fn(*const _, *mut _) -> *mut _), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.const_raw_ptr.ty, this.machine.layouts.mut_raw_ptr.ty], - this.machine.layouts.mut_raw_ptr.ty, args, )?; let result = this.localtime_r(timep, result_op)?; this.write_pointer(result, dest)?; } "clock_gettime" => { - let [clk_id, tp] = this.check_shim_abi( + let [clk_id, tp] = this.check_shim_sig( + shim_sig!(extern "C" fn(libc::clockid_t, *mut _) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.libc_ty_layout("clockid_t").ty, this.machine.layouts.mut_raw_ptr.ty], - this.tcx.types.i32, args, )?; this.clock_gettime(clk_id, tp, dest)?; @@ -700,20 +574,22 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Allocation "posix_memalign" => { - let [memptr, align, size] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [memptr, align, size] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.posix_memalign(memptr, align, size)?; this.write_scalar(result, dest)?; } "mmap" => { let [addr, length, prot, flags, fd, offset] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let offset = this.read_scalar(offset)?.to_int(this.libc_ty_layout("off_t").size)?; let ptr = this.mmap(addr, length, prot, flags, fd, offset)?; this.write_scalar(ptr, dest)?; } "munmap" => { - let [addr, length] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [addr, length] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.munmap(addr, length)?; this.write_scalar(result, dest)?; } @@ -721,7 +597,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "reallocarray" => { // Currently this function does not exist on all Unixes, e.g. on macOS. this.check_target_os(&["linux", "freebsd", "android"], link_name)?; - let [ptr, nmemb, size] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr, nmemb, size] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let ptr = this.read_pointer(ptr)?; let nmemb = this.read_target_usize(nmemb)?; let size = this.read_target_usize(size)?; @@ -744,14 +621,16 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "aligned_alloc" => { // This is a C11 function, we assume all Unixes have it. // (MSVC explicitly does not support this.) - let [align, size] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [align, size] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let res = this.aligned_alloc(align, size)?; this.write_pointer(res, dest)?; } // Dynamic symbol loading "dlsym" => { - let [handle, symbol] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [handle, symbol] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.read_target_usize(handle)?; let symbol = this.read_pointer(symbol)?; let name = this.read_c_str(symbol)?; @@ -767,7 +646,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Thread-local storage "pthread_key_create" => { - let [key, dtor] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [key, dtor] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let key_place = this.deref_pointer_as(key, this.libc_ty_layout("pthread_key_t"))?; let dtor = this.read_pointer(dtor)?; @@ -795,21 +674,22 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_null(dest)?; } "pthread_key_delete" => { - let [key] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [key] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let key = this.read_scalar(key)?.to_bits(key.layout.size)?; this.machine.tls.delete_tls_key(key)?; // Return success (0) this.write_null(dest)?; } "pthread_getspecific" => { - let [key] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [key] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let key = this.read_scalar(key)?.to_bits(key.layout.size)?; let active_thread = this.active_thread(); let ptr = this.machine.tls.load_tls(key, active_thread, this)?; this.write_scalar(ptr, dest)?; } "pthread_setspecific" => { - let [key, new_ptr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [key, new_ptr] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let key = this.read_scalar(key)?.to_bits(key.layout.size)?; let active_thread = this.active_thread(); let new_data = this.read_scalar(new_ptr)?; @@ -821,117 +701,124 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Synchronization primitives "pthread_mutexattr_init" => { - let [attr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [attr] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_mutexattr_init(attr)?; this.write_null(dest)?; } "pthread_mutexattr_settype" => { - let [attr, kind] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [attr, kind] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.pthread_mutexattr_settype(attr, kind)?; this.write_scalar(result, dest)?; } "pthread_mutexattr_destroy" => { - let [attr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [attr] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_mutexattr_destroy(attr)?; this.write_null(dest)?; } "pthread_mutex_init" => { - let [mutex, attr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [mutex, attr] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_mutex_init(mutex, attr)?; this.write_null(dest)?; } "pthread_mutex_lock" => { - let [mutex] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [mutex] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_mutex_lock(mutex, dest)?; } "pthread_mutex_trylock" => { - let [mutex] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [mutex] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.pthread_mutex_trylock(mutex)?; this.write_scalar(result, dest)?; } "pthread_mutex_unlock" => { - let [mutex] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [mutex] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.pthread_mutex_unlock(mutex)?; this.write_scalar(result, dest)?; } "pthread_mutex_destroy" => { - let [mutex] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [mutex] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_mutex_destroy(mutex)?; this.write_int(0, dest)?; } "pthread_rwlock_rdlock" => { - let [rwlock] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [rwlock] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_rwlock_rdlock(rwlock, dest)?; } "pthread_rwlock_tryrdlock" => { - let [rwlock] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [rwlock] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.pthread_rwlock_tryrdlock(rwlock)?; this.write_scalar(result, dest)?; } "pthread_rwlock_wrlock" => { - let [rwlock] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [rwlock] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_rwlock_wrlock(rwlock, dest)?; } "pthread_rwlock_trywrlock" => { - let [rwlock] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [rwlock] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.pthread_rwlock_trywrlock(rwlock)?; this.write_scalar(result, dest)?; } "pthread_rwlock_unlock" => { - let [rwlock] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [rwlock] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_rwlock_unlock(rwlock)?; this.write_null(dest)?; } "pthread_rwlock_destroy" => { - let [rwlock] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [rwlock] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_rwlock_destroy(rwlock)?; this.write_null(dest)?; } "pthread_condattr_init" => { - let [attr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [attr] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_condattr_init(attr)?; this.write_null(dest)?; } "pthread_condattr_setclock" => { - let [attr, clock_id] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [attr, clock_id] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.pthread_condattr_setclock(attr, clock_id)?; this.write_scalar(result, dest)?; } "pthread_condattr_getclock" => { - let [attr, clock_id] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [attr, clock_id] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_condattr_getclock(attr, clock_id)?; this.write_null(dest)?; } "pthread_condattr_destroy" => { - let [attr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [attr] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_condattr_destroy(attr)?; this.write_null(dest)?; } "pthread_cond_init" => { - let [cond, attr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [cond, attr] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_cond_init(cond, attr)?; this.write_null(dest)?; } "pthread_cond_signal" => { - let [cond] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [cond] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_cond_signal(cond)?; this.write_null(dest)?; } "pthread_cond_broadcast" => { - let [cond] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [cond] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_cond_broadcast(cond)?; this.write_null(dest)?; } "pthread_cond_wait" => { - let [cond, mutex] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [cond, mutex] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_cond_wait(cond, mutex, dest)?; } "pthread_cond_timedwait" => { - let [cond, mutex, abstime] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [cond, mutex, abstime] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_cond_timedwait(cond, mutex, abstime, dest)?; } "pthread_cond_destroy" => { - let [cond] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [cond] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_cond_destroy(cond)?; this.write_null(dest)?; } @@ -939,31 +826,33 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Threading "pthread_create" => { let [thread, attr, start, arg] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_create(thread, attr, start, arg)?; this.write_null(dest)?; } "pthread_join" => { - let [thread, retval] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [thread, retval] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_join(thread, retval, dest)?; } "pthread_detach" => { - let [thread] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [thread] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let res = this.pthread_detach(thread)?; this.write_scalar(res, dest)?; } "pthread_self" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let res = this.pthread_self()?; this.write_scalar(res, dest)?; } "sched_yield" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.sched_yield()?; this.write_null(dest)?; } "nanosleep" => { - let [duration, rem] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [duration, rem] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.nanosleep(duration, rem)?; this.write_scalar(result, dest)?; } @@ -974,14 +863,15 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { link_name, )?; let [clock_id, flags, req, rem] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.clock_nanosleep(clock_id, flags, req, rem)?; this.write_scalar(result, dest)?; } "sched_getaffinity" => { // Currently this function does not exist on all Unixes, e.g. on macOS. this.check_target_os(&["linux", "freebsd", "android"], link_name)?; - let [pid, cpusetsize, mask] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [pid, cpusetsize, mask] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let pid = this.read_scalar(pid)?.to_u32()?; let cpusetsize = this.read_target_usize(cpusetsize)?; let mask = this.read_pointer(mask)?; @@ -1018,7 +908,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "sched_setaffinity" => { // Currently this function does not exist on all Unixes, e.g. on macOS. this.check_target_os(&["linux", "freebsd", "android"], link_name)?; - let [pid, cpusetsize, mask] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [pid, cpusetsize, mask] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let pid = this.read_scalar(pid)?.to_u32()?; let cpusetsize = this.read_target_usize(cpusetsize)?; let mask = this.read_pointer(mask)?; @@ -1058,13 +949,13 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Miscellaneous "isatty" => { - let [fd] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [fd] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.isatty(fd)?; this.write_scalar(result, dest)?; } "pthread_atfork" => { let [prepare, parent, child] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.read_pointer(prepare)?; this.read_pointer(parent)?; this.read_pointer(child)?; @@ -1078,7 +969,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { &["linux", "macos", "freebsd", "illumos", "solaris", "android"], link_name, )?; - let [buf, bufsize] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [buf, bufsize] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let buf = this.read_pointer(buf)?; let bufsize = this.read_target_usize(bufsize)?; @@ -1096,7 +988,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "strerror_r" => { - let [errnum, buf, buflen] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [errnum, buf, buflen] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.strerror_r(errnum, buf, buflen)?; this.write_scalar(result, dest)?; } @@ -1108,7 +1001,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { &["linux", "freebsd", "illumos", "solaris", "android"], link_name, )?; - let [ptr, len, flags] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr, len, flags] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let ptr = this.read_pointer(ptr)?; let len = this.read_target_usize(len)?; let _flags = this.read_scalar(flags)?.to_i32()?; @@ -1120,7 +1014,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // This function is non-standard but exists with the same signature and // same behavior (eg never fails) on FreeBSD and Solaris/Illumos. this.check_target_os(&["freebsd", "illumos", "solaris"], link_name)?; - let [ptr, len] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr, len] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let ptr = this.read_pointer(ptr)?; let len = this.read_target_usize(len)?; this.gen_random(ptr, len)?; @@ -1144,12 +1038,12 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { link_name, )?; // This function looks and behaves excatly like miri_start_unwind. - let [payload] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [payload] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.handle_miri_start_unwind(payload)?; return interp_ok(EmulateItemResult::NeedsUnwind); } "getuid" | "geteuid" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; // For now, just pretend we always have this fixed UID. this.write_int(UID, dest)?; } @@ -1157,7 +1051,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Incomplete shims that we "stub out" just to get pre-main initialization code to work. // These shims are enabled only when the caller is in the standard library. "pthread_attr_getguardsize" if this.frame_in_std() => { - let [_attr, guard_size] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [_attr, guard_size] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let guard_size_layout = this.machine.layouts.usize; let guard_size = this.deref_pointer_as(guard_size, guard_size_layout)?; this.write_scalar( @@ -1170,11 +1065,11 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "pthread_attr_init" | "pthread_attr_destroy" if this.frame_in_std() => { - let [_] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [_] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.write_null(dest)?; } "pthread_attr_setstacksize" if this.frame_in_std() => { - let [_, _] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [_, _] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.write_null(dest)?; } @@ -1182,7 +1077,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // We don't support "pthread_attr_setstack", so we just pretend all stacks have the same values here. // Hence we can mostly ignore the input `attr_place`. let [attr_place, addr_place, size_place] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let _attr_place = this.deref_pointer_as(attr_place, this.libc_ty_layout("pthread_attr_t"))?; let addr_place = this.deref_pointer_as(addr_place, this.machine.layouts.usize)?; @@ -1202,18 +1097,18 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "signal" | "sigaltstack" if this.frame_in_std() => { - let [_, _] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [_, _] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.write_null(dest)?; } "sigaction" | "mprotect" if this.frame_in_std() => { - let [_, _, _] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [_, _, _] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.write_null(dest)?; } "getpwuid_r" | "__posix_getpwuid_r" if this.frame_in_std() => { // getpwuid_r is the standard name, __posix_getpwuid_r is used on solarish let [uid, pwd, buf, buflen, result] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.check_no_isolation("`getpwuid_r`")?; let uid = this.read_scalar(uid)?.to_u32()?; diff --git a/src/tools/miri/src/shims/unix/freebsd/foreign_items.rs b/src/tools/miri/src/shims/unix/freebsd/foreign_items.rs index 33564a2f84c..9e247053fbc 100644 --- a/src/tools/miri/src/shims/unix/freebsd/foreign_items.rs +++ b/src/tools/miri/src/shims/unix/freebsd/foreign_items.rs @@ -24,7 +24,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { match link_name.as_str() { // Threading "pthread_setname_np" => { - let [thread, name] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [thread, name] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let max_len = u64::MAX; // FreeBSD does not seem to have a limit. let res = match this.pthread_setname_np( this.read_scalar(thread)?, @@ -39,7 +40,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(res, dest)?; } "pthread_getname_np" => { - let [thread, name, len] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [thread, name, len] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; // FreeBSD's pthread_getname_np uses strlcpy, which truncates the resulting value, // but always adds a null terminator (except for zero-sized buffers). // https://github.com/freebsd/freebsd-src/blob/c2d93a803acef634bd0eede6673aeea59e90c277/lib/libthr/thread/thr_info.c#L119-L144 @@ -57,7 +59,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(res, dest)?; } "pthread_getthreadid_np" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.unix_gettid(link_name.as_str())?; this.write_scalar(result, dest)?; } @@ -65,7 +67,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "cpuset_getaffinity" => { // The "same" kind of api as `sched_getaffinity` but more fine grained control for FreeBSD specifically. let [level, which, id, set_size, mask] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let level = this.read_scalar(level)?.to_i32()?; let which = this.read_scalar(which)?.to_i32()?; @@ -129,7 +131,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Synchronization primitives "_umtx_op" => { let [obj, op, val, uaddr, uaddr2] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this._umtx_op(obj, op, val, uaddr, uaddr2, dest)?; } @@ -137,29 +139,30 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // For those, we both intercept `func` and `call@FBSD_1.0` symbols cases // since freebsd 12 the former form can be expected. "stat" | "stat@FBSD_1.0" => { - let [path, buf] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [path, buf] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.macos_fbsd_solarish_stat(path, buf)?; this.write_scalar(result, dest)?; } "lstat" | "lstat@FBSD_1.0" => { - let [path, buf] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [path, buf] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.macos_fbsd_solarish_lstat(path, buf)?; this.write_scalar(result, dest)?; } "fstat" | "fstat@FBSD_1.0" => { - let [fd, buf] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [fd, buf] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.macos_fbsd_solarish_fstat(fd, buf)?; this.write_scalar(result, dest)?; } "readdir_r" | "readdir_r@FBSD_1.0" => { - let [dirp, entry, result] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [dirp, entry, result] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.macos_fbsd_readdir_r(dirp, entry, result)?; this.write_scalar(result, dest)?; } // Miscellaneous "__error" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let errno_place = this.last_error_place()?; this.write_scalar(errno_place.to_ref(this).to_scalar(), dest)?; } @@ -167,7 +170,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Incomplete shims that we "stub out" just to get pre-main initialization code to work. // These shims are enabled only when the caller is in the standard library. "pthread_attr_get_np" if this.frame_in_std() => { - let [_thread, _attr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [_thread, _attr] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.write_null(dest)?; } diff --git a/src/tools/miri/src/shims/unix/freebsd/sync.rs b/src/tools/miri/src/shims/unix/freebsd/sync.rs index f4e7d9e58f9..13d30e05573 100644 --- a/src/tools/miri/src/shims/unix/freebsd/sync.rs +++ b/src/tools/miri/src/shims/unix/freebsd/sync.rs @@ -228,26 +228,14 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let abs_time_flag = flags == abs_time; let clock_id_place = this.project_field(ut, FieldIdx::from_u32(2))?; - let clock_id = this.read_scalar(&clock_id_place)?.to_i32()?; - let timeout_clock = this.translate_umtx_time_clock_id(clock_id)?; + let clock_id = this.read_scalar(&clock_id_place)?; + let Some(timeout_clock) = this.parse_clockid(clock_id) else { + throw_unsup_format!("unsupported clock") + }; + if timeout_clock == TimeoutClock::RealTime { + this.check_no_isolation("`_umtx_op` with `CLOCK_REALTIME`")?; + } interp_ok(Some(UmtxTime { timeout: duration, abs_time: abs_time_flag, timeout_clock })) } - - /// Translate raw FreeBSD clockid to a Miri TimeoutClock. - /// FIXME: share this code with the pthread and clock_gettime shims. - fn translate_umtx_time_clock_id(&mut self, raw_id: i32) -> InterpResult<'tcx, TimeoutClock> { - let this = self.eval_context_mut(); - - let timeout = if raw_id == this.eval_libc_i32("CLOCK_REALTIME") { - // RealTime clock can't be used in isolation mode. - this.check_no_isolation("`_umtx_op` with `CLOCK_REALTIME` timeout")?; - TimeoutClock::RealTime - } else if raw_id == this.eval_libc_i32("CLOCK_MONOTONIC") { - TimeoutClock::Monotonic - } else { - throw_unsup_format!("unsupported clock id {raw_id}"); - }; - interp_ok(timeout) - } } diff --git a/src/tools/miri/src/shims/unix/fs.rs b/src/tools/miri/src/shims/unix/fs.rs index 0f2878ad26c..f9bcacf64c4 100644 --- a/src/tools/miri/src/shims/unix/fs.rs +++ b/src/tools/miri/src/shims/unix/fs.rs @@ -13,9 +13,9 @@ use rustc_abi::Size; use rustc_data_structures::fx::FxHashMap; use self::shims::time::system_time_to_duration; -use crate::helpers::check_min_vararg_count; use crate::shims::files::FileHandle; use crate::shims::os_str::bytes_to_os_str; +use crate::shims::sig::check_min_vararg_count; use crate::shims::unix::fd::{FlockOp, UnixFileDescription}; use crate::*; diff --git a/src/tools/miri/src/shims/unix/linux/foreign_items.rs b/src/tools/miri/src/shims/unix/linux/foreign_items.rs index b3e99e6cc68..e7e0c3b6ecd 100644 --- a/src/tools/miri/src/shims/unix/linux/foreign_items.rs +++ b/src/tools/miri/src/shims/unix/linux/foreign_items.rs @@ -37,48 +37,50 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { match link_name.as_str() { // File related shims "readdir64" => { - let [dirp] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [dirp] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.linux_solarish_readdir64("dirent64", dirp)?; this.write_scalar(result, dest)?; } "sync_file_range" => { let [fd, offset, nbytes, flags] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.sync_file_range(fd, offset, nbytes, flags)?; this.write_scalar(result, dest)?; } "statx" => { let [dirfd, pathname, flags, mask, statxbuf] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.linux_statx(dirfd, pathname, flags, mask, statxbuf)?; this.write_scalar(result, dest)?; } // epoll, eventfd "epoll_create1" => { - let [flag] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [flag] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.epoll_create1(flag)?; this.write_scalar(result, dest)?; } "epoll_ctl" => { - let [epfd, op, fd, event] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [epfd, op, fd, event] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.epoll_ctl(epfd, op, fd, event)?; this.write_scalar(result, dest)?; } "epoll_wait" => { let [epfd, events, maxevents, timeout] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.epoll_wait(epfd, events, maxevents, timeout, dest)?; } "eventfd" => { - let [val, flag] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [val, flag] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.eventfd(val, flag)?; this.write_scalar(result, dest)?; } // Threading "pthread_setname_np" => { - let [thread, name] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [thread, name] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let res = match this.pthread_setname_np( this.read_scalar(thread)?, this.read_scalar(name)?, @@ -93,7 +95,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(res, dest)?; } "pthread_getname_np" => { - let [thread, name, len] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [thread, name, len] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; // The function's behavior isn't portable between platforms. // In case of glibc, the length of the output buffer must // be not shorter than TASK_COMM_LEN. @@ -116,7 +119,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(res, dest)?; } "gettid" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.unix_gettid(link_name.as_str())?; this.write_scalar(result, dest)?; } @@ -129,34 +132,35 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Miscellaneous "mmap64" => { let [addr, length, prot, flags, fd, offset] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let offset = this.read_scalar(offset)?.to_i64()?; let ptr = this.mmap(addr, length, prot, flags, fd, offset.into())?; this.write_scalar(ptr, dest)?; } "mremap" => { let ([old_address, old_size, new_size, flags], _) = - this.check_shim_variadic(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_variadic_lenient(abi, CanonAbi::C, link_name, args)?; let ptr = this.mremap(old_address, old_size, new_size, flags)?; this.write_scalar(ptr, dest)?; } "__xpg_strerror_r" => { - let [errnum, buf, buflen] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [errnum, buf, buflen] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.strerror_r(errnum, buf, buflen)?; this.write_scalar(result, dest)?; } "__errno_location" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let errno_place = this.last_error_place()?; this.write_scalar(errno_place.to_ref(this).to_scalar(), dest)?; } "__libc_current_sigrtmin" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.write_int(SIGRTMIN, dest)?; } "__libc_current_sigrtmax" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.write_int(SIGRTMAX, dest)?; } @@ -164,7 +168,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Incomplete shims that we "stub out" just to get pre-main initialization code to work. // These shims are enabled only when the caller is in the standard library. "pthread_getattr_np" if this.frame_in_std() => { - let [_thread, _attr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [_thread, _attr] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.write_null(dest)?; } diff --git a/src/tools/miri/src/shims/unix/linux_like/eventfd.rs b/src/tools/miri/src/shims/unix/linux_like/eventfd.rs index ee7deb8d383..2d35ef064db 100644 --- a/src/tools/miri/src/shims/unix/linux_like/eventfd.rs +++ b/src/tools/miri/src/shims/unix/linux_like/eventfd.rs @@ -37,6 +37,11 @@ impl FileDescription for EventFd { "event" } + fn nondet_short_accesses(&self) -> bool { + // We always read and write exactly one `u64`. + false + } + fn close<'tcx>( self, _communicate_allowed: bool, diff --git a/src/tools/miri/src/shims/unix/linux_like/sync.rs b/src/tools/miri/src/shims/unix/linux_like/sync.rs index 9fad74c0241..5f032c52dee 100644 --- a/src/tools/miri/src/shims/unix/linux_like/sync.rs +++ b/src/tools/miri/src/shims/unix/linux_like/sync.rs @@ -1,5 +1,5 @@ use crate::concurrency::sync::FutexRef; -use crate::helpers::check_min_vararg_count; +use crate::shims::sig::check_min_vararg_count; use crate::*; struct LinuxFutex { diff --git a/src/tools/miri/src/shims/unix/linux_like/syscall.rs b/src/tools/miri/src/shims/unix/linux_like/syscall.rs index d3534e6e1bc..106e6c448d0 100644 --- a/src/tools/miri/src/shims/unix/linux_like/syscall.rs +++ b/src/tools/miri/src/shims/unix/linux_like/syscall.rs @@ -3,7 +3,7 @@ use rustc_middle::ty::Ty; use rustc_span::Symbol; use rustc_target::callconv::FnAbi; -use crate::helpers::check_min_vararg_count; +use crate::shims::sig::check_min_vararg_count; use crate::shims::unix::env::EvalContextExt; use crate::shims::unix::linux_like::eventfd::EvalContextExt as _; use crate::shims::unix::linux_like::sync::futex; @@ -16,7 +16,7 @@ pub fn syscall<'tcx>( args: &[OpTy<'tcx>], dest: &MPlaceTy<'tcx>, ) -> InterpResult<'tcx> { - let ([op], varargs) = ecx.check_shim_variadic(abi, CanonAbi::C, link_name, args)?; + let ([op], varargs) = ecx.check_shim_sig_variadic_lenient(abi, CanonAbi::C, link_name, args)?; // The syscall variadic function is legal to call with more arguments than needed, // extra arguments are simply ignored. The important check is that when we use an // argument, we have to also check all arguments *before* it to ensure that they diff --git a/src/tools/miri/src/shims/unix/macos/foreign_items.rs b/src/tools/miri/src/shims/unix/macos/foreign_items.rs index 23303718091..297d903c6ba 100644 --- a/src/tools/miri/src/shims/unix/macos/foreign_items.rs +++ b/src/tools/miri/src/shims/unix/macos/foreign_items.rs @@ -35,64 +35,67 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { match link_name.as_str() { // errno "__error" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let errno_place = this.last_error_place()?; this.write_scalar(errno_place.to_ref(this).to_scalar(), dest)?; } // File related shims "close$NOCANCEL" => { - let [result] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [result] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.close(result)?; this.write_scalar(result, dest)?; } "stat" | "stat64" | "stat$INODE64" => { - let [path, buf] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [path, buf] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.macos_fbsd_solarish_stat(path, buf)?; this.write_scalar(result, dest)?; } "lstat" | "lstat64" | "lstat$INODE64" => { - let [path, buf] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [path, buf] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.macos_fbsd_solarish_lstat(path, buf)?; this.write_scalar(result, dest)?; } "fstat" | "fstat64" | "fstat$INODE64" => { - let [fd, buf] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [fd, buf] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.macos_fbsd_solarish_fstat(fd, buf)?; this.write_scalar(result, dest)?; } "opendir$INODE64" => { - let [name] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [name] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.opendir(name)?; this.write_scalar(result, dest)?; } "readdir_r" | "readdir_r$INODE64" => { - let [dirp, entry, result] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [dirp, entry, result] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.macos_fbsd_readdir_r(dirp, entry, result)?; this.write_scalar(result, dest)?; } "realpath$DARWIN_EXTSN" => { - let [path, resolved_path] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [path, resolved_path] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.realpath(path, resolved_path)?; this.write_scalar(result, dest)?; } "ioctl" => { let ([fd_num, cmd], varargs) = - this.check_shim_variadic(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_variadic_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.ioctl(fd_num, cmd, varargs)?; this.write_scalar(result, dest)?; } // Environment related shims "_NSGetEnviron" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let environ = this.machine.env_vars.unix().environ(); this.write_pointer(environ, dest)?; } // Random data generation "CCRandomGenerateBytes" => { - let [bytes, count] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [bytes, count] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let bytes = this.read_pointer(bytes)?; let count = this.read_target_usize(count)?; let success = this.eval_libc_i32("kCCSuccess"); @@ -102,28 +105,29 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Time related shims "mach_absolute_time" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.mach_absolute_time()?; this.write_scalar(result, dest)?; } "mach_timebase_info" => { - let [info] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [info] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.mach_timebase_info(info)?; this.write_scalar(result, dest)?; } // Access to command-line arguments "_NSGetArgc" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.write_pointer(this.machine.argc.expect("machine must be initialized"), dest)?; } "_NSGetArgv" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.write_pointer(this.machine.argv.expect("machine must be initialized"), dest)?; } "_NSGetExecutablePath" => { - let [buf, bufsize] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [buf, bufsize] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.check_no_isolation("`_NSGetExecutablePath`")?; let buf_ptr = this.read_pointer(buf)?; @@ -148,7 +152,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Thread-local storage "_tlv_atexit" => { - let [dtor, data] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [dtor, data] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let dtor = this.read_pointer(dtor)?; let dtor = this.get_ptr_fn(dtor)?.as_instance()?; let data = this.read_scalar(data)?; @@ -158,13 +163,13 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Querying system information "pthread_get_stackaddr_np" => { - let [thread] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [thread] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.read_target_usize(thread)?; let stack_addr = Scalar::from_uint(this.machine.stack_addr, this.pointer_size()); this.write_scalar(stack_addr, dest)?; } "pthread_get_stacksize_np" => { - let [thread] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [thread] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.read_target_usize(thread)?; let stack_size = Scalar::from_uint(this.machine.stack_size, this.pointer_size()); this.write_scalar(stack_size, dest)?; @@ -172,7 +177,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Threading "pthread_setname_np" => { - let [name] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [name] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; // The real implementation has logic in two places: // * in userland at https://github.com/apple-oss-distributions/libpthread/blob/c032e0b076700a0a47db75528a282b8d3a06531a/src/pthread.c#L1178-L1200, @@ -199,7 +204,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(res, dest)?; } "pthread_getname_np" => { - let [thread, name, len] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [thread, name, len] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; // The function's behavior isn't portable between platforms. // In case of macOS, a truncated name (due to a too small buffer) @@ -223,7 +229,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(res, dest)?; } "pthread_threadid_np" => { - let [thread, tid_ptr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [thread, tid_ptr] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let res = this.apple_pthread_threadip_np(thread, tid_ptr)?; this.write_scalar(res, dest)?; } @@ -231,7 +238,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Synchronization primitives "os_sync_wait_on_address" => { let [addr_op, value_op, size_op, flags_op] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.os_sync_wait_on_address( addr_op, value_op, @@ -243,7 +250,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "os_sync_wait_on_address_with_deadline" => { let [addr_op, value_op, size_op, flags_op, clock_op, timeout_op] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.os_sync_wait_on_address( addr_op, value_op, @@ -255,7 +262,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "os_sync_wait_on_address_with_timeout" => { let [addr_op, value_op, size_op, flags_op, clock_op, timeout_op] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.os_sync_wait_on_address( addr_op, value_op, @@ -267,36 +274,36 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "os_sync_wake_by_address_any" => { let [addr_op, size_op, flags_op] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.os_sync_wake_by_address( addr_op, size_op, flags_op, /* all */ false, dest, )?; } "os_sync_wake_by_address_all" => { let [addr_op, size_op, flags_op] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.os_sync_wake_by_address( addr_op, size_op, flags_op, /* all */ true, dest, )?; } "os_unfair_lock_lock" => { - let [lock_op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [lock_op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.os_unfair_lock_lock(lock_op)?; } "os_unfair_lock_trylock" => { - let [lock_op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [lock_op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.os_unfair_lock_trylock(lock_op, dest)?; } "os_unfair_lock_unlock" => { - let [lock_op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [lock_op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.os_unfair_lock_unlock(lock_op)?; } "os_unfair_lock_assert_owner" => { - let [lock_op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [lock_op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.os_unfair_lock_assert_owner(lock_op)?; } "os_unfair_lock_assert_not_owner" => { - let [lock_op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [lock_op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.os_unfair_lock_assert_not_owner(lock_op)?; } diff --git a/src/tools/miri/src/shims/unix/solarish/foreign_items.rs b/src/tools/miri/src/shims/unix/solarish/foreign_items.rs index e3d15b89be6..d7033a65fe2 100644 --- a/src/tools/miri/src/shims/unix/solarish/foreign_items.rs +++ b/src/tools/miri/src/shims/unix/solarish/foreign_items.rs @@ -27,32 +27,34 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // epoll, eventfd (NOT available on Solaris!) "epoll_create1" => { this.assert_target_os("illumos", "epoll_create1"); - let [flag] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [flag] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.epoll_create1(flag)?; this.write_scalar(result, dest)?; } "epoll_ctl" => { this.assert_target_os("illumos", "epoll_ctl"); - let [epfd, op, fd, event] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [epfd, op, fd, event] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.epoll_ctl(epfd, op, fd, event)?; this.write_scalar(result, dest)?; } "epoll_wait" => { this.assert_target_os("illumos", "epoll_wait"); let [epfd, events, maxevents, timeout] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.epoll_wait(epfd, events, maxevents, timeout, dest)?; } "eventfd" => { this.assert_target_os("illumos", "eventfd"); - let [val, flag] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [val, flag] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.eventfd(val, flag)?; this.write_scalar(result, dest)?; } // Threading "pthread_setname_np" => { - let [thread, name] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [thread, name] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; // THREAD_NAME_MAX allows a thread name of 31+1 length // https://github.com/illumos/illumos-gate/blob/7671517e13b8123748eda4ef1ee165c6d9dba7fe/usr/src/uts/common/sys/thread.h#L613 let max_len = 32; @@ -70,7 +72,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(res, dest)?; } "pthread_getname_np" => { - let [thread, name, len] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [thread, name, len] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; // See https://illumos.org/man/3C/pthread_getname_np for the error codes. let res = match this.pthread_getname_np( this.read_scalar(thread)?, @@ -87,22 +90,22 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // File related shims "stat" | "stat64" => { - let [path, buf] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [path, buf] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.macos_fbsd_solarish_stat(path, buf)?; this.write_scalar(result, dest)?; } "lstat" | "lstat64" => { - let [path, buf] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [path, buf] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.macos_fbsd_solarish_lstat(path, buf)?; this.write_scalar(result, dest)?; } "fstat" | "fstat64" => { - let [fd, buf] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [fd, buf] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.macos_fbsd_solarish_fstat(fd, buf)?; this.write_scalar(result, dest)?; } "readdir" => { - let [dirp] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [dirp] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.linux_solarish_readdir64("dirent", dirp)?; this.write_scalar(result, dest)?; } @@ -110,20 +113,20 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Sockets and pipes "__xnet_socketpair" => { let [domain, type_, protocol, sv] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.socketpair(domain, type_, protocol, sv)?; this.write_scalar(result, dest)?; } // Miscellaneous "___errno" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let errno_place = this.last_error_place()?; this.write_scalar(errno_place.to_ref(this).to_scalar(), dest)?; } "stack_getbounds" => { - let [stack] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [stack] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let stack = this.deref_pointer_as(stack, this.libc_ty_layout("stack_t"))?; this.write_int_fields_named( @@ -141,7 +144,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "pset_info" => { - let [pset, tpe, cpus, list] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [pset, tpe, cpus, list] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; // We do not need to handle the current process cpu mask, available_parallelism // implementation pass null anyway. We only care for the number of // cpus. @@ -170,7 +174,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "__sysconf_xpg7" => { - let [val] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [val] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.sysconf(val)?; this.write_scalar(result, dest)?; } diff --git a/src/tools/miri/src/shims/unix/sync.rs b/src/tools/miri/src/shims/unix/sync.rs index e20e3b79c3b..5ad4fd501a6 100644 --- a/src/tools/miri/src/shims/unix/sync.rs +++ b/src/tools/miri/src/shims/unix/sync.rs @@ -297,14 +297,13 @@ fn condattr_clock_offset<'tcx>(ecx: &MiriInterpCx<'tcx>) -> InterpResult<'tcx, u fn condattr_get_clock_id<'tcx>( ecx: &MiriInterpCx<'tcx>, attr_ptr: &OpTy<'tcx>, -) -> InterpResult<'tcx, i32> { +) -> InterpResult<'tcx, Scalar> { ecx.deref_pointer_and_read( attr_ptr, condattr_clock_offset(ecx)?, ecx.libc_ty_layout("pthread_condattr_t"), ecx.machine.layouts.i32, - )? - .to_i32() + ) } fn condattr_set_clock_id<'tcx>( @@ -321,20 +320,6 @@ fn condattr_set_clock_id<'tcx>( ) } -/// Translates the clock from what is stored in pthread_condattr_t to our enum. -fn condattr_translate_clock_id<'tcx>( - ecx: &MiriInterpCx<'tcx>, - raw_id: i32, -) -> InterpResult<'tcx, ClockId> { - interp_ok(if raw_id == ecx.eval_libc_i32("CLOCK_REALTIME") { - ClockId::Realtime - } else if raw_id == ecx.eval_libc_i32("CLOCK_MONOTONIC") { - ClockId::Monotonic - } else { - throw_unsup_format!("unsupported clock id: {raw_id}"); - }) -} - // # pthread_cond_t // We store some data directly inside the type, ignoring the platform layout: // - init: u32 @@ -363,22 +348,16 @@ fn cond_init_offset<'tcx>(ecx: &MiriInterpCx<'tcx>) -> InterpResult<'tcx, Size> interp_ok(offset) } -#[derive(Debug, Clone, Copy)] -enum ClockId { - Realtime, - Monotonic, -} - #[derive(Debug, Clone)] struct PthreadCondvar { condvar_ref: CondvarRef, - clock: ClockId, + clock: TimeoutClock, } fn cond_create<'tcx>( ecx: &mut MiriInterpCx<'tcx>, cond_ptr: &OpTy<'tcx>, - clock: ClockId, + clock: TimeoutClock, ) -> InterpResult<'tcx, PthreadCondvar> { let cond = ecx.deref_pointer_as(cond_ptr, ecx.libc_ty_layout("pthread_cond_t"))?; let data = PthreadCondvar { condvar_ref: CondvarRef::new(), clock }; @@ -407,7 +386,10 @@ where throw_unsup_format!("unsupported static initializer used for `pthread_cond_t`"); } // This used the static initializer. The clock there is always CLOCK_REALTIME. - interp_ok(PthreadCondvar { condvar_ref: CondvarRef::new(), clock: ClockId::Realtime }) + interp_ok(PthreadCondvar { + condvar_ref: CondvarRef::new(), + clock: TimeoutClock::RealTime, + }) }, ) } @@ -742,11 +724,9 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { ) -> InterpResult<'tcx, Scalar> { let this = self.eval_context_mut(); - let clock_id = this.read_scalar(clock_id_op)?.to_i32()?; - if clock_id == this.eval_libc_i32("CLOCK_REALTIME") - || clock_id == this.eval_libc_i32("CLOCK_MONOTONIC") - { - condattr_set_clock_id(this, attr_op, clock_id)?; + let clock_id = this.read_scalar(clock_id_op)?; + if this.parse_clockid(clock_id).is_some() { + condattr_set_clock_id(this, attr_op, clock_id.to_i32()?)?; } else { let einval = this.eval_libc_i32("EINVAL"); return interp_ok(Scalar::from_i32(einval)); @@ -764,7 +744,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let clock_id = condattr_get_clock_id(this, attr_op)?; this.write_scalar( - Scalar::from_i32(clock_id), + clock_id, &this.deref_pointer_as(clk_id_op, this.libc_ty_layout("clockid_t"))?, )?; @@ -799,13 +779,16 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let attr = this.read_pointer(attr_op)?; // Default clock if `attr` is null, and on macOS where there is no clock attribute. let clock_id = if this.ptr_is_null(attr)? || this.tcx.sess.target.os == "macos" { - this.eval_libc_i32("CLOCK_REALTIME") + this.eval_libc("CLOCK_REALTIME") } else { condattr_get_clock_id(this, attr_op)? }; - let clock_id = condattr_translate_clock_id(this, clock_id)?; + let Some(clock) = this.parse_clockid(clock_id) else { + // This is UB since this situation cannot arise when using pthread_condattr_setclock. + throw_ub_format!("pthread_cond_init: invalid attributes (unsupported clock)") + }; - cond_create(this, cond_op, clock_id)?; + cond_create(this, cond_op, clock)?; interp_ok(()) } @@ -870,18 +853,14 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { return interp_ok(()); } }; - let timeout_clock = match data.clock { - ClockId::Realtime => { - this.check_no_isolation("`pthread_cond_timedwait` with `CLOCK_REALTIME`")?; - TimeoutClock::RealTime - } - ClockId::Monotonic => TimeoutClock::Monotonic, - }; + if data.clock == TimeoutClock::RealTime { + this.check_no_isolation("`pthread_cond_timedwait` with `CLOCK_REALTIME`")?; + } this.condvar_wait( data.condvar_ref, mutex_ref, - Some((timeout_clock, TimeoutAnchor::Absolute, duration)), + Some((data.clock, TimeoutAnchor::Absolute, duration)), Scalar::from_i32(0), this.eval_libc("ETIMEDOUT"), // retval_timeout dest.clone(), diff --git a/src/tools/miri/src/shims/unwind.rs b/src/tools/miri/src/shims/unwind.rs index ba0c50b54b4..0dd2b20487d 100644 --- a/src/tools/miri/src/shims/unwind.rs +++ b/src/tools/miri/src/shims/unwind.rs @@ -16,7 +16,6 @@ use rustc_abi::ExternAbi; use rustc_middle::mir; use rustc_target::spec::PanicStrategy; -use self::helpers::check_intrinsic_arg_count; use crate::*; /// Holds all of the relevant data for when unwinding hits a `try` frame. @@ -60,7 +59,9 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { /// Handles the `catch_unwind` intrinsic. fn handle_catch_unwind( &mut self, - args: &[OpTy<'tcx>], + try_fn: &OpTy<'tcx>, + data: &OpTy<'tcx>, + catch_fn: &OpTy<'tcx>, dest: &MPlaceTy<'tcx>, ret: Option<mir::BasicBlock>, ) -> InterpResult<'tcx> { @@ -78,7 +79,6 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // a pointer to `Box<dyn Any + Send + 'static>`. // Get all the arguments. - let [try_fn, data, catch_fn] = check_intrinsic_arg_count(args)?; let try_fn = this.read_pointer(try_fn)?; let data = this.read_immediate(data)?; let catch_fn = this.read_pointer(catch_fn)?; diff --git a/src/tools/miri/src/shims/wasi/foreign_items.rs b/src/tools/miri/src/shims/wasi/foreign_items.rs index 8d92d0f3381..bfcdbd8130d 100644 --- a/src/tools/miri/src/shims/wasi/foreign_items.rs +++ b/src/tools/miri/src/shims/wasi/foreign_items.rs @@ -23,12 +23,14 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { match link_name.as_str() { // Allocation "posix_memalign" => { - let [memptr, align, size] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [memptr, align, size] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.posix_memalign(memptr, align, size)?; this.write_scalar(result, dest)?; } "aligned_alloc" => { - let [align, size] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [align, size] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let res = this.aligned_alloc(align, size)?; this.write_pointer(res, dest)?; } diff --git a/src/tools/miri/src/shims/windows/foreign_items.rs b/src/tools/miri/src/shims/windows/foreign_items.rs index 959abc0baca..7b13f1d9080 100644 --- a/src/tools/miri/src/shims/windows/foreign_items.rs +++ b/src/tools/miri/src/shims/windows/foreign_items.rs @@ -157,42 +157,44 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { match link_name.as_str() { // Environment related shims "GetEnvironmentVariableW" => { - let [name, buf, size] = this.check_shim(abi, sys_conv, link_name, args)?; + let [name, buf, size] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let result = this.GetEnvironmentVariableW(name, buf, size)?; this.write_scalar(result, dest)?; } "SetEnvironmentVariableW" => { - let [name, value] = this.check_shim(abi, sys_conv, link_name, args)?; + let [name, value] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let result = this.SetEnvironmentVariableW(name, value)?; this.write_scalar(result, dest)?; } "GetEnvironmentStringsW" => { - let [] = this.check_shim(abi, sys_conv, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let result = this.GetEnvironmentStringsW()?; this.write_pointer(result, dest)?; } "FreeEnvironmentStringsW" => { - let [env_block] = this.check_shim(abi, sys_conv, link_name, args)?; + let [env_block] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let result = this.FreeEnvironmentStringsW(env_block)?; this.write_scalar(result, dest)?; } "GetCurrentDirectoryW" => { - let [size, buf] = this.check_shim(abi, sys_conv, link_name, args)?; + let [size, buf] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let result = this.GetCurrentDirectoryW(size, buf)?; this.write_scalar(result, dest)?; } "SetCurrentDirectoryW" => { - let [path] = this.check_shim(abi, sys_conv, link_name, args)?; + let [path] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let result = this.SetCurrentDirectoryW(path)?; this.write_scalar(result, dest)?; } "GetUserProfileDirectoryW" => { - let [token, buf, size] = this.check_shim(abi, sys_conv, link_name, args)?; + let [token, buf, size] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let result = this.GetUserProfileDirectoryW(token, buf, size)?; this.write_scalar(result, dest)?; } "GetCurrentProcessId" => { - let [] = this.check_shim(abi, sys_conv, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let result = this.GetCurrentProcessId()?; this.write_scalar(result, dest)?; } @@ -209,7 +211,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { n, byte_offset, key, - ] = this.check_shim(abi, sys_conv, link_name, args)?; + ] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.NtWriteFile( handle, event, @@ -234,7 +236,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { n, byte_offset, key, - ] = this.check_shim(abi, sys_conv, link_name, args)?; + ] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.NtReadFile( handle, event, @@ -250,7 +252,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "GetFullPathNameW" => { let [filename, size, buffer, filepart] = - this.check_shim(abi, sys_conv, link_name, args)?; + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.check_no_isolation("`GetFullPathNameW`")?; let filename = this.read_pointer(filename)?; @@ -287,7 +289,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { creation_disposition, flags_and_attributes, template_file, - ] = this.check_shim(abi, sys_conv, link_name, args)?; + ] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let handle = this.CreateFileW( file_name, desired_access, @@ -300,18 +302,18 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(handle.to_scalar(this), dest)?; } "GetFileInformationByHandle" => { - let [handle, info] = this.check_shim(abi, sys_conv, link_name, args)?; + let [handle, info] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let res = this.GetFileInformationByHandle(handle, info)?; this.write_scalar(res, dest)?; } "DeleteFileW" => { - let [file_name] = this.check_shim(abi, sys_conv, link_name, args)?; + let [file_name] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let res = this.DeleteFileW(file_name)?; this.write_scalar(res, dest)?; } "SetFilePointerEx" => { let [file, distance_to_move, new_file_pointer, move_method] = - this.check_shim(abi, sys_conv, link_name, args)?; + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let res = this.SetFilePointerEx(file, distance_to_move, new_file_pointer, move_method)?; this.write_scalar(res, dest)?; @@ -319,7 +321,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Allocation "HeapAlloc" => { - let [handle, flags, size] = this.check_shim(abi, sys_conv, link_name, args)?; + let [handle, flags, size] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.read_target_isize(handle)?; let flags = this.read_scalar(flags)?.to_u32()?; let size = this.read_target_usize(size)?; @@ -341,7 +344,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_pointer(ptr, dest)?; } "HeapFree" => { - let [handle, flags, ptr] = this.check_shim(abi, sys_conv, link_name, args)?; + let [handle, flags, ptr] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.read_target_isize(handle)?; this.read_scalar(flags)?.to_u32()?; let ptr = this.read_pointer(ptr)?; @@ -354,7 +358,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "HeapReAlloc" => { let [handle, flags, old_ptr, size] = - this.check_shim(abi, sys_conv, link_name, args)?; + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.read_target_isize(handle)?; this.read_scalar(flags)?.to_u32()?; let old_ptr = this.read_pointer(old_ptr)?; @@ -374,7 +378,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_pointer(new_ptr, dest)?; } "LocalFree" => { - let [ptr] = this.check_shim(abi, sys_conv, link_name, args)?; + let [ptr] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let ptr = this.read_pointer(ptr)?; // "If the hMem parameter is NULL, LocalFree ignores the parameter and returns NULL." // (https://learn.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-localfree) @@ -386,17 +390,17 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // errno "SetLastError" => { - let [error] = this.check_shim(abi, sys_conv, link_name, args)?; + let [error] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let error = this.read_scalar(error)?; this.set_last_error(error)?; } "GetLastError" => { - let [] = this.check_shim(abi, sys_conv, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let last_error = this.get_last_error()?; this.write_scalar(last_error, dest)?; } "RtlNtStatusToDosError" => { - let [status] = this.check_shim(abi, sys_conv, link_name, args)?; + let [status] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let status = this.read_scalar(status)?.to_u32()?; let err = match status { // STATUS_MEDIA_WRITE_PROTECTED => ERROR_WRITE_PROTECT @@ -418,7 +422,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Querying system information "GetSystemInfo" => { // Also called from `page_size` crate. - let [system_info] = this.check_shim(abi, sys_conv, link_name, args)?; + let [system_info] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let system_info = this.deref_pointer_as(system_info, this.windows_ty_layout("SYSTEM_INFO"))?; // Initialize with `0`. @@ -441,19 +445,19 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // This just creates a key; Windows does not natively support TLS destructors. // Create key and return it. - let [] = this.check_shim(abi, sys_conv, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let key = this.machine.tls.create_tls_key(None, dest.layout.size)?; this.write_scalar(Scalar::from_uint(key, dest.layout.size), dest)?; } "TlsGetValue" => { - let [key] = this.check_shim(abi, sys_conv, link_name, args)?; + let [key] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let key = u128::from(this.read_scalar(key)?.to_u32()?); let active_thread = this.active_thread(); let ptr = this.machine.tls.load_tls(key, active_thread, this)?; this.write_scalar(ptr, dest)?; } "TlsSetValue" => { - let [key, new_ptr] = this.check_shim(abi, sys_conv, link_name, args)?; + let [key, new_ptr] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let key = u128::from(this.read_scalar(key)?.to_u32()?); let active_thread = this.active_thread(); let new_data = this.read_scalar(new_ptr)?; @@ -463,7 +467,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_int(1, dest)?; } "TlsFree" => { - let [key] = this.check_shim(abi, sys_conv, link_name, args)?; + let [key] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let key = u128::from(this.read_scalar(key)?.to_u32()?); this.machine.tls.delete_tls_key(key)?; @@ -473,7 +477,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Access to command-line arguments "GetCommandLineW" => { - let [] = this.check_shim(abi, sys_conv, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.write_pointer( this.machine.cmd_line.expect("machine must be initialized"), dest, @@ -483,29 +487,30 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Time related shims "GetSystemTimeAsFileTime" | "GetSystemTimePreciseAsFileTime" => { #[allow(non_snake_case)] - let [LPFILETIME] = this.check_shim(abi, sys_conv, link_name, args)?; + let [LPFILETIME] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.GetSystemTimeAsFileTime(link_name.as_str(), LPFILETIME)?; } "QueryPerformanceCounter" => { #[allow(non_snake_case)] - let [lpPerformanceCount] = this.check_shim(abi, sys_conv, link_name, args)?; + let [lpPerformanceCount] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let result = this.QueryPerformanceCounter(lpPerformanceCount)?; this.write_scalar(result, dest)?; } "QueryPerformanceFrequency" => { #[allow(non_snake_case)] - let [lpFrequency] = this.check_shim(abi, sys_conv, link_name, args)?; + let [lpFrequency] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let result = this.QueryPerformanceFrequency(lpFrequency)?; this.write_scalar(result, dest)?; } "Sleep" => { - let [timeout] = this.check_shim(abi, sys_conv, link_name, args)?; + let [timeout] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.Sleep(timeout)?; } "CreateWaitableTimerExW" => { let [attributes, name, flags, access] = - this.check_shim(abi, sys_conv, link_name, args)?; + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.read_pointer(attributes)?; this.read_pointer(name)?; this.read_scalar(flags)?.to_u32()?; @@ -519,27 +524,28 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Synchronization primitives "InitOnceBeginInitialize" => { let [ptr, flags, pending, context] = - this.check_shim(abi, sys_conv, link_name, args)?; + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.InitOnceBeginInitialize(ptr, flags, pending, context, dest)?; } "InitOnceComplete" => { - let [ptr, flags, context] = this.check_shim(abi, sys_conv, link_name, args)?; + let [ptr, flags, context] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let result = this.InitOnceComplete(ptr, flags, context)?; this.write_scalar(result, dest)?; } "WaitOnAddress" => { let [ptr_op, compare_op, size_op, timeout_op] = - this.check_shim(abi, sys_conv, link_name, args)?; + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.WaitOnAddress(ptr_op, compare_op, size_op, timeout_op, dest)?; } "WakeByAddressSingle" => { - let [ptr_op] = this.check_shim(abi, sys_conv, link_name, args)?; + let [ptr_op] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.WakeByAddressSingle(ptr_op)?; } "WakeByAddressAll" => { - let [ptr_op] = this.check_shim(abi, sys_conv, link_name, args)?; + let [ptr_op] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.WakeByAddressAll(ptr_op)?; } @@ -547,7 +553,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Dynamic symbol loading "GetProcAddress" => { #[allow(non_snake_case)] - let [hModule, lpProcName] = this.check_shim(abi, sys_conv, link_name, args)?; + let [hModule, lpProcName] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.read_target_isize(hModule)?; let name = this.read_c_str(this.read_pointer(lpProcName)?)?; if let Ok(name) = str::from_utf8(name) @@ -563,7 +570,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Threading "CreateThread" => { let [security, stacksize, start, arg, flags, thread] = - this.check_shim(abi, sys_conv, link_name, args)?; + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let thread_id = this.CreateThread(security, stacksize, start, arg, flags, thread)?; @@ -571,12 +578,13 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(Handle::Thread(thread_id).to_scalar(this), dest)?; } "WaitForSingleObject" => { - let [handle, timeout] = this.check_shim(abi, sys_conv, link_name, args)?; + let [handle, timeout] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.WaitForSingleObject(handle, timeout, dest)?; } "GetCurrentProcess" => { - let [] = this.check_shim(abi, sys_conv, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.write_scalar( Handle::Pseudo(PseudoHandle::CurrentProcess).to_scalar(this), @@ -584,7 +592,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { )?; } "GetCurrentThread" => { - let [] = this.check_shim(abi, sys_conv, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.write_scalar( Handle::Pseudo(PseudoHandle::CurrentThread).to_scalar(this), @@ -592,7 +600,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { )?; } "SetThreadDescription" => { - let [handle, name] = this.check_shim(abi, sys_conv, link_name, args)?; + let [handle, name] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let handle = this.read_handle(handle, "SetThreadDescription")?; let name = this.read_wide_str(this.read_pointer(name)?)?; @@ -607,7 +615,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(Scalar::from_u32(0), dest)?; } "GetThreadDescription" => { - let [handle, name_ptr] = this.check_shim(abi, sys_conv, link_name, args)?; + let [handle, name_ptr] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let handle = this.read_handle(handle, "GetThreadDescription")?; let name_ptr = this.deref_pointer_as(name_ptr, this.machine.layouts.mut_raw_ptr)?; // the pointer where we should store the ptr to the name @@ -630,7 +639,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(res, dest)?; } "GetThreadId" => { - let [handle] = this.check_shim(abi, sys_conv, link_name, args)?; + let [handle] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let handle = this.read_handle(handle, "GetThreadId")?; let thread = match handle { Handle::Thread(thread) => thread, @@ -641,7 +650,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(Scalar::from_u32(tid), dest)?; } "GetCurrentThreadId" => { - let [] = this.check_shim(abi, sys_conv, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let thread = this.active_thread(); let tid = this.get_tid(thread); this.write_scalar(Scalar::from_u32(tid), dest)?; @@ -649,7 +658,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Miscellaneous "ExitProcess" => { - let [code] = this.check_shim(abi, sys_conv, link_name, args)?; + let [code] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; // Windows technically uses u32, but we unify everything to a Unix-style i32. let code = this.read_scalar(code)?.to_i32()?; throw_machine_stop!(TerminationInfo::Exit { code, leak_check: false }); @@ -657,7 +666,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "SystemFunction036" => { // used by getrandom 0.1 // This is really 'RtlGenRandom'. - let [ptr, len] = this.check_shim(abi, sys_conv, link_name, args)?; + let [ptr, len] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let ptr = this.read_pointer(ptr)?; let len = this.read_scalar(len)?.to_u32()?; this.gen_random(ptr, len.into())?; @@ -665,7 +674,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "ProcessPrng" => { // used by `std` - let [ptr, len] = this.check_shim(abi, sys_conv, link_name, args)?; + let [ptr, len] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let ptr = this.read_pointer(ptr)?; let len = this.read_target_usize(len)?; this.gen_random(ptr, len)?; @@ -674,7 +683,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "BCryptGenRandom" => { // used by getrandom 0.2 let [algorithm, ptr, len, flags] = - this.check_shim(abi, sys_conv, link_name, args)?; + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let algorithm = this.read_scalar(algorithm)?; let algorithm = algorithm.to_target_usize(this)?; let ptr = this.read_pointer(ptr)?; @@ -708,7 +717,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "GetConsoleScreenBufferInfo" => { // `term` needs this, so we fake it. - let [console, buffer_info] = this.check_shim(abi, sys_conv, link_name, args)?; + let [console, buffer_info] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.read_target_isize(console)?; // FIXME: this should use deref_pointer_as, but CONSOLE_SCREEN_BUFFER_INFO is not in std this.deref_pointer(buffer_info)?; @@ -717,13 +727,13 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_null(dest)?; } "GetStdHandle" => { - let [which] = this.check_shim(abi, sys_conv, link_name, args)?; + let [which] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let res = this.GetStdHandle(which)?; this.write_scalar(res, dest)?; } "DuplicateHandle" => { let [src_proc, src_handle, target_proc, target_handle, access, inherit, options] = - this.check_shim(abi, sys_conv, link_name, args)?; + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let res = this.DuplicateHandle( src_proc, src_handle, @@ -736,14 +746,15 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(res, dest)?; } "CloseHandle" => { - let [handle] = this.check_shim(abi, sys_conv, link_name, args)?; + let [handle] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let ret = this.CloseHandle(handle)?; this.write_scalar(ret, dest)?; } "GetModuleFileNameW" => { - let [handle, filename, size] = this.check_shim(abi, sys_conv, link_name, args)?; + let [handle, filename, size] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.check_no_isolation("`GetModuleFileNameW`")?; let handle = this.read_handle(handle, "GetModuleFileNameW")?; @@ -777,7 +788,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "FormatMessageW" => { let [flags, module, message_id, language_id, buffer, size, arguments] = - this.check_shim(abi, sys_conv, link_name, args)?; + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let flags = this.read_scalar(flags)?.to_u32()?; let _module = this.read_pointer(module)?; // seems to contain a module name @@ -812,26 +823,28 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Incomplete shims that we "stub out" just to get pre-main initialization code to work. // These shims are enabled only when the caller is in the standard library. "GetProcessHeap" if this.frame_in_std() => { - let [] = this.check_shim(abi, sys_conv, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; // Just fake a HANDLE // It's fine to not use the Handle type here because its a stub this.write_int(1, dest)?; } "GetModuleHandleA" if this.frame_in_std() => { #[allow(non_snake_case)] - let [_lpModuleName] = this.check_shim(abi, sys_conv, link_name, args)?; + let [_lpModuleName] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; // We need to return something non-null here to make `compat_fn!` work. this.write_int(1, dest)?; } "SetConsoleTextAttribute" if this.frame_in_std() => { #[allow(non_snake_case)] let [_hConsoleOutput, _wAttribute] = - this.check_shim(abi, sys_conv, link_name, args)?; + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; // Pretend these does not exist / nothing happened, by returning zero. this.write_null(dest)?; } "GetConsoleMode" if this.frame_in_std() => { - let [console, mode] = this.check_shim(abi, sys_conv, link_name, args)?; + let [console, mode] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.read_target_isize(console)?; this.deref_pointer_as(mode, this.machine.layouts.u32)?; // Indicate an error. @@ -839,25 +852,27 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "GetFileType" if this.frame_in_std() => { #[allow(non_snake_case)] - let [_hFile] = this.check_shim(abi, sys_conv, link_name, args)?; + let [_hFile] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; // Return unknown file type. this.write_null(dest)?; } "AddVectoredExceptionHandler" if this.frame_in_std() => { #[allow(non_snake_case)] - let [_First, _Handler] = this.check_shim(abi, sys_conv, link_name, args)?; + let [_First, _Handler] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; // Any non zero value works for the stdlib. This is just used for stack overflows anyway. this.write_int(1, dest)?; } "SetThreadStackGuarantee" if this.frame_in_std() => { #[allow(non_snake_case)] - let [_StackSizeInBytes] = this.check_shim(abi, sys_conv, link_name, args)?; + let [_StackSizeInBytes] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; // Any non zero value works for the stdlib. This is just used for stack overflows anyway. this.write_int(1, dest)?; } // this is only callable from std because we know that std ignores the return value "SwitchToThread" if this.frame_in_std() => { - let [] = this.check_shim(abi, sys_conv, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.yield_active_thread(); @@ -876,7 +891,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { ); } // This function looks and behaves excatly like miri_start_unwind. - let [payload] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [payload] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.handle_miri_start_unwind(payload)?; return interp_ok(EmulateItemResult::NeedsUnwind); } diff --git a/src/tools/miri/src/shims/windows/fs.rs b/src/tools/miri/src/shims/windows/fs.rs index 72e016c12e9..e4ec1b0130c 100644 --- a/src/tools/miri/src/shims/windows/fs.rs +++ b/src/tools/miri/src/shims/windows/fs.rs @@ -462,6 +462,9 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { }; let io_status_info = this.project_field_named(&io_status_block, "Information")?; + // It seems like short writes are not a thing on Windows, so we don't truncate `count` here. + // FIXME: if we are on a Unix host, short host writes are still visible to the program! + let finish = { let io_status = io_status.clone(); let io_status_info = io_status_info.clone(); @@ -491,7 +494,6 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { }} ) }; - desc.write(this.machine.communicate(), buf, count.try_into().unwrap(), this, finish)?; // Return status is written to `dest` and `io_status_block` on callback completion. @@ -556,6 +558,16 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { }; let io_status_info = this.project_field_named(&io_status_block, "Information")?; + let fd = match handle { + Handle::File(fd) => fd, + _ => this.invalid_handle("NtWriteFile")?, + }; + + let Some(desc) = this.machine.fds.get(fd) else { this.invalid_handle("NtReadFile")? }; + + // It seems like short reads are not a thing on Windows, so we don't truncate `count` here. + // FIXME: if we are on a Unix host, short host reads are still visible to the program! + let finish = { let io_status = io_status.clone(); let io_status_info = io_status_info.clone(); @@ -585,14 +597,6 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { }} ) }; - - let fd = match handle { - Handle::File(fd) => fd, - _ => this.invalid_handle("NtWriteFile")?, - }; - - let Some(desc) = this.machine.fds.get(fd) else { this.invalid_handle("NtReadFile")? }; - desc.read(this.machine.communicate(), buf, count.try_into().unwrap(), this, finish)?; // See NtWriteFile for commentary on this diff --git a/src/tools/miri/src/shims/x86/aesni.rs b/src/tools/miri/src/shims/x86/aesni.rs index 058ca24e730..fdd3e78c610 100644 --- a/src/tools/miri/src/shims/x86/aesni.rs +++ b/src/tools/miri/src/shims/x86/aesni.rs @@ -26,7 +26,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // `state` with the corresponding 128-bit key of `key`. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_aesdec_si128 "aesdec" | "aesdec.256" | "aesdec.512" => { - let [state, key] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [state, key] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; aes_round(this, state, key, dest, |state, key| { let key = aes::Block::from(key.to_le_bytes()); let mut state = aes::Block::from(state.to_le_bytes()); @@ -42,7 +43,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // `state` with the corresponding 128-bit key of `key`. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_aesdeclast_si128 "aesdeclast" | "aesdeclast.256" | "aesdeclast.512" => { - let [state, key] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [state, key] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; aes_round(this, state, key, dest, |state, key| { let mut state = aes::Block::from(state.to_le_bytes()); @@ -66,7 +68,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // `state` with the corresponding 128-bit key of `key`. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_aesenc_si128 "aesenc" | "aesenc.256" | "aesenc.512" => { - let [state, key] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [state, key] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; aes_round(this, state, key, dest, |state, key| { let key = aes::Block::from(key.to_le_bytes()); let mut state = aes::Block::from(state.to_le_bytes()); @@ -82,7 +85,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // `state` with the corresponding 128-bit key of `key`. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_aesenclast_si128 "aesenclast" | "aesenclast.256" | "aesenclast.512" => { - let [state, key] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [state, key] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; aes_round(this, state, key, dest, |state, key| { let mut state = aes::Block::from(state.to_le_bytes()); // `aes::hazmat::cipher_round` does the following operations: @@ -102,7 +106,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Used to implement the _mm_aesimc_si128 function. // Performs the AES InvMixColumns operation on `op` "aesimc" => { - let [op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; // Transmute to `u128` let op = op.transmute(this.machine.layouts.u128, this)?; let dest = dest.transmute(this.machine.layouts.u128, this)?; diff --git a/src/tools/miri/src/shims/x86/avx.rs b/src/tools/miri/src/shims/x86/avx.rs index 83d23d6ad36..269ce3b51b9 100644 --- a/src/tools/miri/src/shims/x86/avx.rs +++ b/src/tools/miri/src/shims/x86/avx.rs @@ -33,7 +33,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // matches the IEEE min/max operations, while x86 has different // semantics. "min.ps.256" | "max.ps.256" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "min.ps.256" => FloatBinOp::Min, @@ -45,7 +46,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } // Used to implement _mm256_min_pd and _mm256_max_pd functions. "min.pd.256" | "max.pd.256" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "min.pd.256" => FloatBinOp::Min, @@ -58,21 +60,23 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Used to implement the _mm256_round_ps function. // Rounds the elements of `op` according to `rounding`. "round.ps.256" => { - let [op, rounding] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op, rounding] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; round_all::<rustc_apfloat::ieee::Single>(this, op, rounding, dest)?; } // Used to implement the _mm256_round_pd function. // Rounds the elements of `op` according to `rounding`. "round.pd.256" => { - let [op, rounding] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op, rounding] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; round_all::<rustc_apfloat::ieee::Double>(this, op, rounding, dest)?; } // Used to implement _mm256_{rcp,rsqrt}_ps functions. // Performs the operations on all components of `op`. "rcp.ps.256" | "rsqrt.ps.256" => { - let [op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "rcp.ps.256" => FloatUnaryOp::Rcp, @@ -84,7 +88,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } // Used to implement the _mm256_dp_ps function. "dp.ps.256" => { - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; conditional_dot_product(this, left, right, imm, dest)?; } @@ -92,7 +97,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Horizontally add/subtract adjacent floating point values // in `left` and `right`. "hadd.ps.256" | "hadd.pd.256" | "hsub.ps.256" | "hsub.pd.256" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "hadd.ps.256" | "hadd.pd.256" => mir::BinOp::Add, @@ -107,7 +113,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // and `right`. For each component, returns 0 if false or u32::MAX // if true. "cmp.ps.256" => { - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = FloatBinOp::cmp_from_imm(this, this.read_scalar(imm)?.to_i8()?, link_name)?; @@ -119,7 +126,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // and `right`. For each component, returns 0 if false or u64::MAX // if true. "cmp.pd.256" => { - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = FloatBinOp::cmp_from_imm(this, this.read_scalar(imm)?.to_i8()?, link_name)?; @@ -130,7 +138,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // and _mm256_cvttpd_epi32 functions. // Converts packed f32/f64 to packed i32. "cvt.ps2dq.256" | "cvtt.ps2dq.256" | "cvt.pd2dq.256" | "cvtt.pd2dq.256" => { - let [op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let rnd = match unprefixed_name { // "current SSE rounding mode", assume nearest @@ -148,7 +156,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // sequence of 4-element arrays, and we shuffle each of these arrays, where // `control` determines which element of the current `data` array is written. "vpermilvar.ps" | "vpermilvar.ps.256" => { - let [data, control] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [data, control] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (data, data_len) = this.project_to_simd(data)?; let (control, control_len) = this.project_to_simd(control)?; @@ -181,7 +190,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // where `right` determines which element of the current `left` array is // written. "vpermilvar.pd" | "vpermilvar.pd.256" => { - let [data, control] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [data, control] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (data, data_len) = this.project_to_simd(data)?; let (control, control_len) = this.project_to_simd(control)?; @@ -213,7 +223,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // For each 128-bit element of `dest`, copies one from `left`, `right` or // zero, according to `imm`. "vperm2f128.ps.256" | "vperm2f128.pd.256" | "vperm2f128.si.256" => { - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; assert_eq!(dest.layout, left.layout); assert_eq!(dest.layout, right.layout); @@ -256,7 +267,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // is one, it is loaded from `ptr.wrapping_add(i)`, otherwise zero is // loaded. "maskload.ps" | "maskload.pd" | "maskload.ps.256" | "maskload.pd.256" => { - let [ptr, mask] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr, mask] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; mask_load(this, ptr, mask, dest)?; } @@ -266,7 +277,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // is one, it is stored into `ptr.wapping_add(i)`. // Unlike SSE2's _mm_maskmoveu_si128, these are not non-temporal stores. "maskstore.ps" | "maskstore.pd" | "maskstore.ps.256" | "maskstore.pd.256" => { - let [ptr, mask, value] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr, mask, value] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; mask_store(this, ptr, mask, value)?; } @@ -276,7 +288,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // the data crosses a cache line, but for Miri this is just a regular // unaligned read. "ldu.dq.256" => { - let [src_ptr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [src_ptr] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let src_ptr = this.read_pointer(src_ptr)?; let dest = dest.force_mplace(this)?; @@ -288,7 +300,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Tests `op & mask == 0`, `op & mask == mask` or // `op & mask != 0 && op & mask != mask` "ptestz.256" | "ptestc.256" | "ptestnzc.256" => { - let [op, mask] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op, mask] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (all_zero, masked_set) = test_bits_masked(this, op, mask)?; let res = match unprefixed_name { @@ -311,7 +323,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "vtestz.pd.256" | "vtestc.pd.256" | "vtestnzc.pd.256" | "vtestz.pd" | "vtestc.pd" | "vtestnzc.pd" | "vtestz.ps.256" | "vtestc.ps.256" | "vtestnzc.ps.256" | "vtestz.ps" | "vtestc.ps" | "vtestnzc.ps" => { - let [op, mask] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op, mask] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (direct, negated) = test_high_bits_masked(this, op, mask)?; let res = match unprefixed_name { @@ -333,7 +345,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // compiler, making these functions no-ops. // The only thing that needs to be ensured is the correct calling convention. - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; } _ => return interp_ok(EmulateItemResult::NotSupported), } diff --git a/src/tools/miri/src/shims/x86/avx2.rs b/src/tools/miri/src/shims/x86/avx2.rs index 49d5977078b..ca80c0eba1e 100644 --- a/src/tools/miri/src/shims/x86/avx2.rs +++ b/src/tools/miri/src/shims/x86/avx2.rs @@ -28,7 +28,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Used to implement the _mm256_abs_epi{8,16,32} functions. // Calculates the absolute value of packed 8/16/32-bit integers. "pabs.b" | "pabs.w" | "pabs.d" => { - let [op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; int_abs(this, op, dest)?; } @@ -36,7 +36,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Horizontally add / add with saturation / subtract adjacent 16/32-bit // integer values in `left` and `right`. "phadd.w" | "phadd.sw" | "phadd.d" | "phsub.w" | "phsub.sw" | "phsub.d" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (which, saturating) = match unprefixed_name { "phadd.w" | "phadd.d" => (mir::BinOp::Add, false), @@ -57,7 +58,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { | "gather.d.pd.256" | "gather.q.pd" | "gather.q.pd.256" | "gather.d.ps" | "gather.d.ps.256" | "gather.q.ps" | "gather.q.ps.256" => { let [src, slice, offsets, mask, scale] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; assert_eq!(dest.layout, src.layout); @@ -114,7 +115,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // intermediate signed 32-bit integers. Horizontally add adjacent pairs of // intermediate 32-bit integers, and pack the results in `dest`. "pmadd.wd" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; @@ -150,7 +152,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // the saturating sum of the products with indices `2*i` and `2*i+1` // produces the output at index `i`. "pmadd.ub.sw" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; @@ -184,7 +187,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // is one, it is loaded from `ptr.wrapping_add(i)`, otherwise zero is // loaded. "maskload.d" | "maskload.q" | "maskload.d.256" | "maskload.q.256" => { - let [ptr, mask] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr, mask] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; mask_load(this, ptr, mask, dest)?; } @@ -194,7 +197,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // is one, it is stored into `ptr.wapping_add(i)`. // Unlike SSE2's _mm_maskmoveu_si128, these are not non-temporal stores. "maskstore.d" | "maskstore.q" | "maskstore.d.256" | "maskstore.q.256" => { - let [ptr, mask, value] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr, mask, value] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; mask_store(this, ptr, mask, value)?; } @@ -205,7 +209,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // offsets specified in `imm`. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm256_mpsadbw_epu8 "mpsadbw" => { - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; mpsadbw(this, left, right, imm, dest)?; } @@ -216,7 +221,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // 1 and then taking the bits `1..=16`. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm256_mulhrs_epi16 "pmul.hr.sw" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; pmulhrsw(this, left, right, dest)?; } @@ -224,7 +230,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Converts two 16-bit integer vectors to a single 8-bit integer // vector with signed saturation. "packsswb" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; packsswb(this, left, right, dest)?; } @@ -232,7 +239,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Converts two 32-bit integer vectors to a single 16-bit integer // vector with signed saturation. "packssdw" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; packssdw(this, left, right, dest)?; } @@ -240,7 +248,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Converts two 16-bit signed integer vectors to a single 8-bit // unsigned integer vector with saturation. "packuswb" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; packuswb(this, left, right, dest)?; } @@ -248,7 +257,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Concatenates two 32-bit signed integer vectors and converts // the result to a 16-bit unsigned integer vector with saturation. "packusdw" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; packusdw(this, left, right, dest)?; } @@ -257,7 +267,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Shuffles `left` using the three low bits of each element of `right` // as indices. "permd" | "permps" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; @@ -277,7 +288,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Used to implement the _mm256_permute2x128_si256 function. // Shuffles 128-bit blocks of `a` and `b` using `imm` as pattern. "vperm2i128" => { - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; assert_eq!(left.layout.size.bits(), 256); assert_eq!(right.layout.size.bits(), 256); @@ -314,7 +326,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // in `dest`. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm256_sad_epu8 "psad.bw" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; @@ -346,7 +359,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Shuffles bytes from `left` using `right` as pattern. // Each 128-bit block is shuffled independently. "pshuf.b" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; @@ -377,7 +391,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // is writen to the corresponding output element. // Basically, we multiply `left` with `right.signum()`. "psign.b" | "psign.w" | "psign.d" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; psign(this, left, right, dest)?; } @@ -391,7 +406,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // is copied to remaining bits. "psll.w" | "psrl.w" | "psra.w" | "psll.d" | "psrl.d" | "psra.d" | "psll.q" | "psrl.q" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "psll.w" | "psll.d" | "psll.q" => ShiftOp::Left, @@ -406,7 +422,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // (except _mm{,256}_srav_epi64, which are not available in AVX2). "psllv.d" | "psllv.d.256" | "psllv.q" | "psllv.q.256" | "psrlv.d" | "psrlv.d.256" | "psrlv.q" | "psrlv.q.256" | "psrav.d" | "psrav.d.256" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "psllv.d" | "psllv.d.256" | "psllv.q" | "psllv.q.256" => ShiftOp::Left, diff --git a/src/tools/miri/src/shims/x86/bmi.rs b/src/tools/miri/src/shims/x86/bmi.rs index 80b1b2e16e6..140e31cc513 100644 --- a/src/tools/miri/src/shims/x86/bmi.rs +++ b/src/tools/miri/src/shims/x86/bmi.rs @@ -35,7 +35,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { return interp_ok(EmulateItemResult::NotSupported); } - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let left = this.read_scalar(left)?; let right = this.read_scalar(right)?; diff --git a/src/tools/miri/src/shims/x86/gfni.rs b/src/tools/miri/src/shims/x86/gfni.rs index f83ce560c84..9a98a80d6dc 100644 --- a/src/tools/miri/src/shims/x86/gfni.rs +++ b/src/tools/miri/src/shims/x86/gfni.rs @@ -31,14 +31,16 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // See `affine_transform` for details. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=gf2p8affine_ "vgf2p8affineqb.128" | "vgf2p8affineqb.256" | "vgf2p8affineqb.512" => { - let [left, right, imm8] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm8] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; affine_transform(this, left, right, imm8, dest, /* inverse */ false)?; } // Used to implement the `_mm{, 256, 512}_gf2p8affineinv_epi64_epi8` functions. // See `affine_transform` for details. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=gf2p8affineinv "vgf2p8affineinvqb.128" | "vgf2p8affineinvqb.256" | "vgf2p8affineinvqb.512" => { - let [left, right, imm8] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm8] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; affine_transform(this, left, right, imm8, dest, /* inverse */ true)?; } // Used to implement the `_mm{, 256, 512}_gf2p8mul_epi8` functions. @@ -47,7 +49,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // polynomial representation with the reduction polynomial x^8 + x^4 + x^3 + x + 1. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=gf2p8mul "vgf2p8mulb.128" | "vgf2p8mulb.256" | "vgf2p8mulb.512" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; let (dest, dest_len) = this.project_to_simd(dest)?; diff --git a/src/tools/miri/src/shims/x86/mod.rs b/src/tools/miri/src/shims/x86/mod.rs index fbfe459711e..3324b7b024a 100644 --- a/src/tools/miri/src/shims/x86/mod.rs +++ b/src/tools/miri/src/shims/x86/mod.rs @@ -45,7 +45,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { return interp_ok(EmulateItemResult::NotSupported); } - let [cb_in, a, b] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [cb_in, a, b] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let op = if unprefixed_name.starts_with("add") { mir::BinOp::AddWithOverflow } else { @@ -67,7 +68,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { if is_u64 && this.tcx.sess.target.arch != "x86_64" { return interp_ok(EmulateItemResult::NotSupported); } - let [c_in, a, b, out] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [c_in, a, b, out] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let out = this.deref_pointer_as( out, if is_u64 { this.machine.layouts.u64 } else { this.machine.layouts.u32 }, @@ -84,7 +86,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // the instruction behaves like a no-op, so it is always safe to call the // intrinsic. "sse2.pause" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; // Only exhibit the spin-loop hint behavior when SSE2 is enabled. if this.tcx.sess.unstable_target_features.contains(&Symbol::intern("sse2")) { this.yield_active_thread(); @@ -103,7 +105,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { len = 8; } - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; pclmulqdq(this, left, right, imm, dest, len)?; } diff --git a/src/tools/miri/src/shims/x86/sha.rs b/src/tools/miri/src/shims/x86/sha.rs index d37fad3e6c7..00fe58119e4 100644 --- a/src/tools/miri/src/shims/x86/sha.rs +++ b/src/tools/miri/src/shims/x86/sha.rs @@ -53,7 +53,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { match unprefixed_name { // Used to implement the _mm_sha256rnds2_epu32 function. "256rnds2" => { - let [a, b, k] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [a, b, k] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (a_reg, a_len) = this.project_to_simd(a)?; let (b_reg, b_len) = this.project_to_simd(b)?; @@ -74,7 +74,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } // Used to implement the _mm_sha256msg1_epu32 function. "256msg1" => { - let [a, b] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [a, b] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (a_reg, a_len) = this.project_to_simd(a)?; let (b_reg, b_len) = this.project_to_simd(b)?; @@ -92,7 +92,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } // Used to implement the _mm_sha256msg2_epu32 function. "256msg2" => { - let [a, b] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [a, b] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (a_reg, a_len) = this.project_to_simd(a)?; let (b_reg, b_len) = this.project_to_simd(b)?; diff --git a/src/tools/miri/src/shims/x86/sse.rs b/src/tools/miri/src/shims/x86/sse.rs index 1ec15d609c6..6d8def5b53f 100644 --- a/src/tools/miri/src/shims/x86/sse.rs +++ b/src/tools/miri/src/shims/x86/sse.rs @@ -34,7 +34,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Performs the operations on the first component of `left` and // `right` and copies the remaining components from `left`. "min.ss" | "max.ss" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "min.ss" => FloatBinOp::Min, @@ -50,7 +51,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // matches the IEEE min/max operations, while x86 has different // semantics. "min.ps" | "max.ps" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "min.ps" => FloatBinOp::Min, @@ -64,7 +66,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Performs the operations on the first component of `op` and // copies the remaining components from `op`. "rcp.ss" | "rsqrt.ss" => { - let [op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "rcp.ss" => FloatUnaryOp::Rcp, @@ -77,7 +79,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Used to implement _mm_{sqrt,rcp,rsqrt}_ps functions. // Performs the operations on all components of `op`. "rcp.ps" | "rsqrt.ps" => { - let [op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "rcp.ps" => FloatUnaryOp::Rcp, @@ -96,7 +98,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // _mm_cmp{eq,lt,le,gt,ge,neq,nlt,nle,ngt,nge,ord,unord}_ss are SSE functions // with hard-coded operations. "cmp.ss" => { - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = FloatBinOp::cmp_from_imm(this, this.read_scalar(imm)?.to_i8()?, link_name)?; @@ -112,7 +115,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // _mm_cmp{eq,lt,le,gt,ge,neq,nlt,nle,ngt,nge,ord,unord}_ps are SSE functions // with hard-coded operations. "cmp.ps" => { - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = FloatBinOp::cmp_from_imm(this, this.read_scalar(imm)?.to_i8()?, link_name)?; @@ -125,7 +129,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "comieq.ss" | "comilt.ss" | "comile.ss" | "comigt.ss" | "comige.ss" | "comineq.ss" | "ucomieq.ss" | "ucomilt.ss" | "ucomile.ss" | "ucomigt.ss" | "ucomige.ss" | "ucomineq.ss" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; @@ -153,7 +158,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // _mm_cvtss_si64 and _mm_cvttss_si64 functions. // Converts the first component of `op` from f32 to i32/i64. "cvtss2si" | "cvttss2si" | "cvtss2si64" | "cvttss2si64" => { - let [op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (op, _) = this.project_to_simd(op)?; let op = this.read_immediate(&this.project_index(&op, 0)?)?; @@ -181,7 +186,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // are copied from `left`. // https://www.felixcloutier.com/x86/cvtsi2ss "cvtsi2ss" | "cvtsi642ss" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (dest, dest_len) = this.project_to_simd(dest)?; diff --git a/src/tools/miri/src/shims/x86/sse2.rs b/src/tools/miri/src/shims/x86/sse2.rs index d6052f83077..8f53adfb5ec 100644 --- a/src/tools/miri/src/shims/x86/sse2.rs +++ b/src/tools/miri/src/shims/x86/sse2.rs @@ -41,7 +41,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // intermediate signed 32-bit integers. Horizontally add adjacent pairs of // intermediate 32-bit integers, and pack the results in `dest`. "pmadd.wd" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; @@ -79,7 +80,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_sad_epu8 "psad.bw" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; @@ -117,7 +119,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // is copied to remaining bits. "psll.w" | "psrl.w" | "psra.w" | "psll.d" | "psrl.d" | "psra.d" | "psll.q" | "psrl.q" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "psll.w" | "psll.d" | "psll.q" => ShiftOp::Left, @@ -132,7 +135,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // and _mm_cvttpd_epi32 functions. // Converts packed f32/f64 to packed i32. "cvtps2dq" | "cvttps2dq" | "cvtpd2dq" | "cvttpd2dq" => { - let [op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (op_len, _) = op.layout.ty.simd_size_and_type(*this.tcx); let (dest_len, _) = dest.layout.ty.simd_size_and_type(*this.tcx); @@ -169,7 +172,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Converts two 16-bit integer vectors to a single 8-bit integer // vector with signed saturation. "packsswb.128" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; packsswb(this, left, right, dest)?; } @@ -177,7 +181,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Converts two 16-bit signed integer vectors to a single 8-bit // unsigned integer vector with saturation. "packuswb.128" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; packuswb(this, left, right, dest)?; } @@ -185,7 +190,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Converts two 32-bit integer vectors to a single 16-bit integer // vector with signed saturation. "packssdw.128" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; packssdw(this, left, right, dest)?; } @@ -195,7 +201,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // matches the IEEE min/max operations, while x86 has different // semantics. "min.sd" | "max.sd" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "min.sd" => FloatBinOp::Min, @@ -211,7 +218,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // matches the IEEE min/max operations, while x86 has different // semantics. "min.pd" | "max.pd" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "min.pd" => FloatBinOp::Min, @@ -230,7 +238,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // _mm_cmp{eq,lt,le,gt,ge,neq,nlt,nle,ngt,nge,ord,unord}_sd are SSE2 functions // with hard-coded operations. "cmp.sd" => { - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = FloatBinOp::cmp_from_imm(this, this.read_scalar(imm)?.to_i8()?, link_name)?; @@ -246,7 +255,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // _mm_cmp{eq,lt,le,gt,ge,neq,nlt,nle,ngt,nge,ord,unord}_pd are SSE2 functions // with hard-coded operations. "cmp.pd" => { - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = FloatBinOp::cmp_from_imm(this, this.read_scalar(imm)?.to_i8()?, link_name)?; @@ -259,7 +269,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "comieq.sd" | "comilt.sd" | "comile.sd" | "comigt.sd" | "comige.sd" | "comineq.sd" | "ucomieq.sd" | "ucomilt.sd" | "ucomile.sd" | "ucomigt.sd" | "ucomige.sd" | "ucomineq.sd" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; @@ -287,7 +298,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // _mm_cvtsd_si64 and _mm_cvttsd_si64 functions. // Converts the first component of `op` from f64 to i32/i64. "cvtsd2si" | "cvttsd2si" | "cvtsd2si64" | "cvttsd2si64" => { - let [op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (op, _) = this.project_to_simd(op)?; let op = this.read_immediate(&this.project_index(&op, 0)?)?; @@ -313,7 +324,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Converts the first f64/f32 from `right` to f32/f64 and copies // the remaining elements from `left` "cvtsd2ss" | "cvtss2sd" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, _) = this.project_to_simd(right)?; diff --git a/src/tools/miri/src/shims/x86/sse3.rs b/src/tools/miri/src/shims/x86/sse3.rs index ebf3cb5c3ee..0fd8c3bc389 100644 --- a/src/tools/miri/src/shims/x86/sse3.rs +++ b/src/tools/miri/src/shims/x86/sse3.rs @@ -26,7 +26,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Horizontally add/subtract adjacent floating point values // in `left` and `right`. "hadd.ps" | "hadd.pd" | "hsub.ps" | "hsub.pd" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "hadd.ps" | "hadd.pd" => mir::BinOp::Add, @@ -42,7 +43,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // the data crosses a cache line, but for Miri this is just a regular // unaligned read. "ldu.dq" => { - let [src_ptr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [src_ptr] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let src_ptr = this.read_pointer(src_ptr)?; let dest = dest.force_mplace(this)?; diff --git a/src/tools/miri/src/shims/x86/sse41.rs b/src/tools/miri/src/shims/x86/sse41.rs index 6797039cf56..7736b5e443d 100644 --- a/src/tools/miri/src/shims/x86/sse41.rs +++ b/src/tools/miri/src/shims/x86/sse41.rs @@ -28,7 +28,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // bits `4..=5` if `imm`, and `i`th bit specifies whether element // `i` is zeroed. "insertps" => { - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; @@ -63,7 +64,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Concatenates two 32-bit signed integer vectors and converts // the result to a 16-bit unsigned integer vector with saturation. "packusdw" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; packusdw(this, left, right, dest)?; } @@ -73,7 +75,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // products, and conditionally stores the sum in `dest` using the low // 4 bits of `imm`. "dpps" | "dppd" => { - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; conditional_dot_product(this, left, right, imm, dest)?; } @@ -81,14 +84,16 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // functions. Rounds the first element of `right` according to `rounding` // and copies the remaining elements from `left`. "round.ss" => { - let [left, right, rounding] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, rounding] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; round_first::<rustc_apfloat::ieee::Single>(this, left, right, rounding, dest)?; } // Used to implement the _mm_floor_ps, _mm_ceil_ps and _mm_round_ps // functions. Rounds the elements of `op` according to `rounding`. "round.ps" => { - let [op, rounding] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op, rounding] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; round_all::<rustc_apfloat::ieee::Single>(this, op, rounding, dest)?; } @@ -96,14 +101,16 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // functions. Rounds the first element of `right` according to `rounding` // and copies the remaining elements from `left`. "round.sd" => { - let [left, right, rounding] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, rounding] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; round_first::<rustc_apfloat::ieee::Double>(this, left, right, rounding, dest)?; } // Used to implement the _mm_floor_pd, _mm_ceil_pd and _mm_round_pd // functions. Rounds the elements of `op` according to `rounding`. "round.pd" => { - let [op, rounding] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op, rounding] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; round_all::<rustc_apfloat::ieee::Double>(this, op, rounding, dest)?; } @@ -111,7 +118,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Find the minimum unsinged 16-bit integer in `op` and // returns its value and position. "phminposuw" => { - let [op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (op, op_len) = this.project_to_simd(op)?; let (dest, dest_len) = this.project_to_simd(dest)?; @@ -145,7 +152,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // offsets specified in `imm`. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_mpsadbw_epu8 "mpsadbw" => { - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; mpsadbw(this, left, right, imm, dest)?; } @@ -154,7 +162,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Tests `(op & mask) == 0`, `(op & mask) == mask` or // `(op & mask) != 0 && (op & mask) != mask` "ptestz" | "ptestc" | "ptestnzc" => { - let [op, mask] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op, mask] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (all_zero, masked_set) = test_bits_masked(this, op, mask)?; let res = match unprefixed_name { diff --git a/src/tools/miri/src/shims/x86/sse42.rs b/src/tools/miri/src/shims/x86/sse42.rs index 7e1e1482ef4..72c5039a12d 100644 --- a/src/tools/miri/src/shims/x86/sse42.rs +++ b/src/tools/miri/src/shims/x86/sse42.rs @@ -222,7 +222,8 @@ fn deconstruct_args<'tcx>( }; if is_explicit { - let [str1, len1, str2, len2, imm] = ecx.check_shim(abi, CanonAbi::C, link_name, args)?; + let [str1, len1, str2, len2, imm] = + ecx.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let imm = ecx.read_scalar(imm)?.to_u8()?; let default_len = default_len::<u32>(imm); @@ -235,7 +236,7 @@ fn deconstruct_args<'tcx>( interp_ok((str1, str2, Some((len1, len2)), imm)) } else { - let [str1, str2, imm] = ecx.check_shim(abi, CanonAbi::C, link_name, args)?; + let [str1, str2, imm] = ecx.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let imm = ecx.read_scalar(imm)?.to_u8()?; let array_layout = array_layout_fn(ecx, imm)?; @@ -385,7 +386,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // search for a null terminator (see `deconstruct_args` for more details). // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#ig_expand=924,925 "pcmpistriz128" | "pcmpistris128" => { - let [str1, str2, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [str1, str2, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let imm = this.read_scalar(imm)?.to_u8()?; let str = if unprefixed_name == "pcmpistris128" { str1 } else { str2 }; @@ -405,7 +407,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // than 16 for byte-sized operands or 8 for word-sized operands. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#ig_expand=1046,1047 "pcmpestriz128" | "pcmpestris128" => { - let [_, len1, _, len2, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [_, len1, _, len2, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let len = if unprefixed_name == "pcmpestris128" { len1 } else { len2 }; let len = this.read_scalar(len)?.to_i32()?; let imm = this.read_scalar(imm)?.to_u8()?; @@ -432,7 +435,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { return interp_ok(EmulateItemResult::NotSupported); } - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let left = this.read_scalar(left)?; let right = this.read_scalar(right)?; diff --git a/src/tools/miri/src/shims/x86/ssse3.rs b/src/tools/miri/src/shims/x86/ssse3.rs index 310d6b8f765..52ad6bd4419 100644 --- a/src/tools/miri/src/shims/x86/ssse3.rs +++ b/src/tools/miri/src/shims/x86/ssse3.rs @@ -25,7 +25,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Used to implement the _mm_abs_epi{8,16,32} functions. // Calculates the absolute value of packed 8/16/32-bit integers. "pabs.b.128" | "pabs.w.128" | "pabs.d.128" => { - let [op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; int_abs(this, op, dest)?; } @@ -33,7 +33,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Shuffles bytes from `left` using `right` as pattern. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_shuffle_epi8 "pshuf.b.128" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; @@ -62,7 +63,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // integer values in `left` and `right`. "phadd.w.128" | "phadd.sw.128" | "phadd.d.128" | "phsub.w.128" | "phsub.sw.128" | "phsub.d.128" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (which, saturating) = match unprefixed_name { "phadd.w.128" | "phadd.d.128" => (mir::BinOp::Add, false), @@ -81,7 +83,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // produces the output at index `i`. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_maddubs_epi16 "pmadd.ub.sw.128" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; @@ -116,7 +119,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // 1 and then taking the bits `1..=16`. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_mulhrs_epi16 "pmul.hr.sw.128" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; pmulhrsw(this, left, right, dest)?; } @@ -126,7 +130,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // is writen to the corresponding output element. // Basically, we multiply `left` with `right.signum()`. "psign.b.128" | "psign.w.128" | "psign.d.128" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; psign(this, left, right, dest)?; } diff --git a/src/tools/miri/test_dependencies/Cargo.lock b/src/tools/miri/tests/deps/Cargo.lock index 276c518e74f..4b783ebdc4e 100644 --- a/src/tools/miri/test_dependencies/Cargo.lock +++ b/src/tools/miri/tests/deps/Cargo.lock @@ -13,15 +13,15 @@ dependencies = [ [[package]] name = "adler2" -version = "2.0.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" [[package]] name = "backtrace" -version = "0.3.74" +version = "0.3.75" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" +checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002" dependencies = [ "addr2line", "cfg-if", @@ -29,20 +29,20 @@ dependencies = [ "miniz_oxide", "object", "rustc-demangle", - "windows-targets", + "windows-targets 0.52.6", ] [[package]] name = "bitflags" -version = "2.9.0" +version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd" +checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967" [[package]] name = "bumpalo" -version = "3.17.0" +version = "3.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" +checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" [[package]] name = "bytes" @@ -52,18 +52,18 @@ checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" [[package]] name = "cfg-if" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268" [[package]] name = "errno" -version = "0.3.11" +version = "0.3.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "976dd42dc7e85965fe702eb8164f21f450704bdde31faefd6471dba214cb594e" +checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad" dependencies = [ "libc", - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] @@ -85,22 +85,22 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" dependencies = [ "cfg-if", "js-sys", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi 0.11.1+wasi-snapshot-preview1", "wasm-bindgen", ] [[package]] name = "getrandom" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73fea8450eea4bac3940448fb7ae50d91f034f941199fcd9d909a5a07aa455f0" +checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" dependencies = [ "cfg-if", "libc", @@ -116,9 +116,20 @@ checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] name = "hermit-abi" -version = "0.3.9" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" + +[[package]] +name = "io-uring" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" +checksum = "b86e202f00093dcba4275d4636b93ef9dd75d025ae560d2521b45ea28ab49013" +dependencies = [ + "bitflags", + "cfg-if", + "libc", +] [[package]] name = "js-sys" @@ -132,15 +143,15 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.171" +version = "0.2.174" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c19937216e9d3aa9956d9bb8dfc0b0c8beb6058fc4f7a4dc4d850edf86a237d6" +checksum = "1171693293099992e19cddea4e8b849964e9846f4acee11b3948bcc337be8776" [[package]] name = "linux-raw-sys" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe7db12097d22ec582439daf8618b8fdd1a7bef6270e9af3b1ebcd30893cf413" +checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" [[package]] name = "log" @@ -150,28 +161,28 @@ checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" [[package]] name = "memchr" -version = "2.7.4" +version = "2.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" +checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" [[package]] name = "miniz_oxide" -version = "0.8.8" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3be647b768db090acb35d5ec5db2b0e1f1de11133ca123b9eacf5137868f892a" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" dependencies = [ "adler2", ] [[package]] name = "mio" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" +checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c" dependencies = [ "libc", - "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys 0.52.0", + "wasi 0.11.1+wasi-snapshot-preview1", + "windows-sys 0.59.0", ] [[package]] @@ -180,21 +191,21 @@ version = "0.1.0" dependencies = [ "cfg-if", "getrandom 0.1.16", - "getrandom 0.2.15", - "getrandom 0.3.2", + "getrandom 0.2.16", + "getrandom 0.3.3", "libc", "num_cpus", "page_size", "tempfile", "tokio", - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] name = "num_cpus" -version = "1.16.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +checksum = "91df4bbde75afed763b708b7eee1e8e7651e02d97f6d5dd763e89367e957b23b" dependencies = [ "hermit-abi", "libc", @@ -233,9 +244,9 @@ checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" [[package]] name = "proc-macro2" -version = "1.0.94" +version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84" +checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" dependencies = [ "unicode-ident", ] @@ -251,43 +262,49 @@ dependencies = [ [[package]] name = "r-efi" -version = "5.2.0" +version = "5.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" [[package]] name = "rustc-demangle" -version = "0.1.24" +version = "0.1.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" +checksum = "989e6739f80c4ad5b13e0fd7fe89531180375b18520cc8c82080e4dc4035b84f" [[package]] name = "rustix" -version = "1.0.5" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d97817398dd4bb2e6da002002db259209759911da105da92bec29ccb12cf58bf" +checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8" dependencies = [ "bitflags", "errno", "libc", "linux-raw-sys", - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] name = "signal-hook-registry" -version = "1.4.2" +version = "1.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" +checksum = "9203b8055f63a2a00e2f593bb0510367fe707d7ff1e5c872de2f537b339e5410" dependencies = [ "libc", ] [[package]] +name = "slab" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04dc19736151f35336d325007ac991178d504a119863a2fcb3758cdb5e52c50d" + +[[package]] name = "socket2" -version = "0.5.9" +version = "0.5.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f5fd57c80058a56cf5c777ab8a126398ece8e442983605d280a44ce79d0edef" +checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" dependencies = [ "libc", "windows-sys 0.52.0", @@ -295,9 +312,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.100" +version = "2.0.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0" +checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40" dependencies = [ "proc-macro2", "quote", @@ -306,12 +323,12 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.19.1" +version = "3.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7437ac7763b9b123ccf33c338a5cc1bac6f69b45a136c19bdd8a65e3916435bf" +checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1" dependencies = [ "fastrand", - "getrandom 0.3.2", + "getrandom 0.3.3", "once_cell", "rustix", "windows-sys 0.59.0", @@ -319,16 +336,18 @@ dependencies = [ [[package]] name = "tokio" -version = "1.44.2" +version = "1.46.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6b88822cbe49de4185e3a4cbf8321dd487cf5fe0c5c65695fef6346371e9c48" +checksum = "0cc3a2344dafbe23a245241fe8b09735b521110d30fcefbbd5feb1797ca35d17" dependencies = [ "backtrace", "bytes", + "io-uring", "libc", "mio", "pin-project-lite", "signal-hook-registry", + "slab", "socket2", "tokio-macros", "windows-sys 0.52.0", @@ -359,9 +378,9 @@ checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" [[package]] name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" +version = "0.11.1+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] name = "wasi" @@ -457,7 +476,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets", + "windows-targets 0.52.6", ] [[package]] @@ -466,7 +485,16 @@ version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ - "windows-targets", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" +dependencies = [ + "windows-targets 0.53.2", ] [[package]] @@ -475,14 +503,30 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_gnullvm", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm 0.52.6", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.53.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c66f69fcc9ce11da9966ddb31a40968cad001c5bedeb5c2b82ede4253ab48aef" +dependencies = [ + "windows_aarch64_gnullvm 0.53.0", + "windows_aarch64_msvc 0.53.0", + "windows_i686_gnu 0.53.0", + "windows_i686_gnullvm 0.53.0", + "windows_i686_msvc 0.53.0", + "windows_x86_64_gnu 0.53.0", + "windows_x86_64_gnullvm 0.53.0", + "windows_x86_64_msvc 0.53.0", ] [[package]] @@ -492,48 +536,96 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" + +[[package]] name = "windows_aarch64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] +name = "windows_aarch64_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" + +[[package]] name = "windows_i686_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] +name = "windows_i686_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" + +[[package]] name = "windows_i686_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] +name = "windows_i686_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" + +[[package]] name = "windows_i686_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] +name = "windows_i686_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" + +[[package]] name = "windows_x86_64_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] +name = "windows_x86_64_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" + +[[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" + +[[package]] name = "windows_x86_64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] +name = "windows_x86_64_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" + +[[package]] name = "wit-bindgen-rt" version = "0.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" diff --git a/src/tools/miri/test_dependencies/Cargo.toml b/src/tools/miri/tests/deps/Cargo.toml index 35555723f5d..d85723f0915 100644 --- a/src/tools/miri/test_dependencies/Cargo.toml +++ b/src/tools/miri/tests/deps/Cargo.toml @@ -25,7 +25,7 @@ page_size = "0.6" tokio = { version = "1", features = ["macros", "rt-multi-thread", "time", "net", "fs", "sync", "signal", "io-util"] } [target.'cfg(windows)'.dependencies] -windows-sys = { version = "0.59", features = [ +windows-sys = { version = "0.60", features = [ "Win32_Foundation", "Win32_System_Threading", "Win32_Storage_FileSystem", diff --git a/src/tools/miri/test_dependencies/src/main.rs b/src/tools/miri/tests/deps/src/main.rs index f328e4d9d04..f328e4d9d04 100644 --- a/src/tools/miri/test_dependencies/src/main.rs +++ b/src/tools/miri/tests/deps/src/main.rs diff --git a/src/tools/miri/tests/fail-dep/libc/libc-epoll-data-race.rs b/src/tools/miri/tests/fail-dep/libc/libc-epoll-data-race.rs index 314ce90cfb5..f6ec5be61bb 100644 --- a/src/tools/miri/tests/fail-dep/libc/libc-epoll-data-race.rs +++ b/src/tools/miri/tests/fail-dep/libc/libc-epoll-data-race.rs @@ -10,6 +10,9 @@ use std::convert::TryInto; use std::thread; use std::thread::spawn; +#[path = "../../utils/libc.rs"] +mod libc_utils; + #[track_caller] fn check_epoll_wait<const N: usize>(epfd: i32, expected_notifications: &[(u32, u64)]) { let epoll_event = libc::epoll_event { events: 0, u64: 0 }; @@ -69,12 +72,12 @@ fn main() { unsafe { VAL_ONE = 41 }; let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds_a[0], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds_a[0], data as *const libc::c_void, 5) }; assert_eq!(res, 5); unsafe { VAL_TWO = 51 }; - let res = unsafe { libc::write(fds_b[0], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds_b[0], data as *const libc::c_void, 5) }; assert_eq!(res, 5); }); thread::yield_now(); diff --git a/src/tools/miri/tests/fail-dep/libc/libc-read-and-uninit-premature-eof.rs b/src/tools/miri/tests/fail-dep/libc/libc-read-and-uninit-premature-eof.rs index 1dc334486c3..e2fd6463a11 100644 --- a/src/tools/miri/tests/fail-dep/libc/libc-read-and-uninit-premature-eof.rs +++ b/src/tools/miri/tests/fail-dep/libc/libc-read-and-uninit-premature-eof.rs @@ -10,6 +10,9 @@ use std::mem::MaybeUninit; #[path = "../../utils/mod.rs"] mod utils; +#[path = "../../utils/libc.rs"] +mod libc_utils; + fn main() { let path = utils::prepare_with_content("fail-libc-read-and-uninit-premature-eof.txt", &[1u8, 2, 3]); @@ -18,8 +21,9 @@ fn main() { let fd = libc::open(cpath.as_ptr(), libc::O_RDONLY); assert_ne!(fd, -1); let mut buf: MaybeUninit<[u8; 4]> = std::mem::MaybeUninit::uninit(); - // Read 4 bytes from a 3-byte file. - assert_eq!(libc::read(fd, buf.as_mut_ptr().cast::<std::ffi::c_void>(), 4), 3); + // Read as much as we can from a 3-byte file. + let res = libc_utils::read_all(fd, buf.as_mut_ptr().cast::<std::ffi::c_void>(), 4); + assert!(res == 3); buf.assume_init(); //~ERROR: encountered uninitialized memory, but expected an integer assert_eq!(libc::close(fd), 0); } diff --git a/src/tools/miri/tests/fail-dep/libc/libc_epoll_block_two_thread.rs b/src/tools/miri/tests/fail-dep/libc/libc_epoll_block_two_thread.rs index f6f2e2b9312..054cb812d9e 100644 --- a/src/tools/miri/tests/fail-dep/libc/libc_epoll_block_two_thread.rs +++ b/src/tools/miri/tests/fail-dep/libc/libc_epoll_block_two_thread.rs @@ -75,9 +75,10 @@ fn main() { }); let thread3 = spawn(move || { + // Just a single write, so we only wake up one of them. let data = "abcde".as_bytes().as_ptr(); let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 5) }; - assert_eq!(res, 5); + assert!(res > 0 && res <= 5); }); thread1.join().unwrap(); diff --git a/src/tools/miri/tests/fail-dep/libc/socketpair_block_read_twice.rs b/src/tools/miri/tests/fail-dep/libc/socketpair_block_read_twice.rs index b3839859500..0fecfb8f663 100644 --- a/src/tools/miri/tests/fail-dep/libc/socketpair_block_read_twice.rs +++ b/src/tools/miri/tests/fail-dep/libc/socketpair_block_read_twice.rs @@ -4,6 +4,7 @@ // test_race depends on a deterministic schedule. //@compile-flags: -Zmiri-deterministic-concurrency //@error-in-other-file: deadlock +//@require-annotations-for-level: error use std::thread; @@ -22,24 +23,26 @@ fn main() { assert_eq!(res, 0); let thread1 = thread::spawn(move || { // Let this thread block on read. - let mut buf: [u8; 3] = [0; 3]; + let mut buf: [u8; 1] = [0; 1]; let res = unsafe { libc::read(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; - assert_eq!(res, 3); - assert_eq!(&buf, "abc".as_bytes()); + assert_eq!(res, buf.len().cast_signed()); + assert_eq!(&buf, "a".as_bytes()); }); let thread2 = thread::spawn(move || { // Let this thread block on read. - let mut buf: [u8; 3] = [0; 3]; - let res = unsafe { libc::read(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; - //~^ERROR: deadlocked - assert_eq!(res, 3); - assert_eq!(&buf, "abc".as_bytes()); + let mut buf: [u8; 1] = [0; 1]; + let res = unsafe { + libc::read(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) + //~^ERROR: deadlock + }; + assert_eq!(res, buf.len().cast_signed()); + assert_eq!(&buf, "a".as_bytes()); }); let thread3 = thread::spawn(move || { // Unblock thread1 by writing something. - let data = "abc".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 3) }; - assert_eq!(res, 3); + let data = "a".as_bytes(); + let res = unsafe { libc::write(fds[0], data.as_ptr() as *const libc::c_void, data.len()) }; + assert_eq!(res, data.len().cast_signed()); }); thread1.join().unwrap(); thread2.join().unwrap(); diff --git a/src/tools/miri/tests/fail-dep/libc/socketpair_block_read_twice.stderr b/src/tools/miri/tests/fail-dep/libc/socketpair_block_read_twice.stderr index 9f19a60e6ae..99d242ec7da 100644 --- a/src/tools/miri/tests/fail-dep/libc/socketpair_block_read_twice.stderr +++ b/src/tools/miri/tests/fail-dep/libc/socketpair_block_read_twice.stderr @@ -23,8 +23,8 @@ error: the evaluated program deadlocked error: the evaluated program deadlocked --> tests/fail-dep/libc/socketpair_block_read_twice.rs:LL:CC | -LL | let res = unsafe { libc::read(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; - | ^ this thread got stuck here +LL | libc::read(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) + | ^ this thread got stuck here | = note: BACKTRACE on thread `unnamed-ID`: = note: inside closure at tests/fail-dep/libc/socketpair_block_read_twice.rs:LL:CC diff --git a/src/tools/miri/tests/fail-dep/libc/socketpair_block_write_twice.rs b/src/tools/miri/tests/fail-dep/libc/socketpair_block_write_twice.rs index 7d84d87ebbb..048938c091e 100644 --- a/src/tools/miri/tests/fail-dep/libc/socketpair_block_write_twice.rs +++ b/src/tools/miri/tests/fail-dep/libc/socketpair_block_write_twice.rs @@ -4,16 +4,20 @@ // test_race depends on a deterministic schedule. //@compile-flags: -Zmiri-deterministic-concurrency //@error-in-other-file: deadlock +//@require-annotations-for-level: error use std::thread; +#[path = "../../utils/libc.rs"] +mod libc_utils; + // Test the behaviour of a thread being blocked on write, get unblocked, then blocked again. // The expected execution is // 1. Thread 1 blocks. // 2. Thread 2 blocks. // 3. Thread 3 unblocks both thread 1 and thread 2. -// 4. Thread 1 reads. +// 4. Thread 1 writes. // 5. Thread 2's `write` can never complete -> deadlocked. fn main() { let mut fds = [-1, -1]; @@ -21,27 +25,28 @@ fn main() { assert_eq!(res, 0); let arr1: [u8; 212992] = [1; 212992]; // Exhaust the space in the buffer so the subsequent write will block. - let res = unsafe { libc::write(fds[0], arr1.as_ptr() as *const libc::c_void, 212992) }; + let res = + unsafe { libc_utils::write_all(fds[0], arr1.as_ptr() as *const libc::c_void, 212992) }; assert_eq!(res, 212992); let thread1 = thread::spawn(move || { - let data = "abc".as_bytes().as_ptr(); + let data = "a".as_bytes(); // The write below will be blocked because the buffer is already full. - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 3) }; - assert_eq!(res, 3); + let res = unsafe { libc::write(fds[0], data.as_ptr() as *const libc::c_void, data.len()) }; + assert_eq!(res, data.len().cast_signed()); }); let thread2 = thread::spawn(move || { - let data = "abc".as_bytes().as_ptr(); + let data = "a".as_bytes(); // The write below will be blocked because the buffer is already full. - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 3) }; - //~^ERROR: deadlocked - assert_eq!(res, 3); + let res = unsafe { libc::write(fds[0], data.as_ptr() as *const libc::c_void, data.len()) }; + //~^ERROR: deadlock + assert_eq!(res, data.len().cast_signed()); }); let thread3 = thread::spawn(move || { // Unblock thread1 by freeing up some space. - let mut buf: [u8; 3] = [0; 3]; + let mut buf: [u8; 1] = [0; 1]; let res = unsafe { libc::read(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; - assert_eq!(res, 3); - assert_eq!(buf, [1, 1, 1]); + assert_eq!(res, buf.len().cast_signed()); + assert_eq!(buf, [1]); }); thread1.join().unwrap(); thread2.join().unwrap(); diff --git a/src/tools/miri/tests/fail-dep/libc/socketpair_block_write_twice.stderr b/src/tools/miri/tests/fail-dep/libc/socketpair_block_write_twice.stderr index b29cd70f35e..f766500d331 100644 --- a/src/tools/miri/tests/fail-dep/libc/socketpair_block_write_twice.stderr +++ b/src/tools/miri/tests/fail-dep/libc/socketpair_block_write_twice.stderr @@ -23,8 +23,8 @@ error: the evaluated program deadlocked error: the evaluated program deadlocked --> tests/fail-dep/libc/socketpair_block_write_twice.rs:LL:CC | -LL | let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 3) }; - | ^ this thread got stuck here +LL | let res = unsafe { libc::write(fds[0], data.as_ptr() as *const libc::c_void, data.len()) }; + | ^ this thread got stuck here | = note: BACKTRACE on thread `unnamed-ID`: = note: inside closure at tests/fail-dep/libc/socketpair_block_write_twice.rs:LL:CC diff --git a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_array_vs_struct.rs b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_array_vs_struct.rs index 4468eb299f3..26f2e73dd75 100644 --- a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_array_vs_struct.rs +++ b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_array_vs_struct.rs @@ -17,5 +17,5 @@ fn main() { // These two types have the same size but are still not compatible. let g = unsafe { std::mem::transmute::<fn(S), fn(A)>(f) }; - g(Default::default()) //~ ERROR: calling a function with argument of type S passing data of type [i32; 4] + g(Default::default()) //~ ERROR: type S passing argument of type [i32; 4] } diff --git a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_array_vs_struct.stderr b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_array_vs_struct.stderr index cabefa8bee9..f793abb0b62 100644 --- a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_array_vs_struct.stderr +++ b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_array_vs_struct.stderr @@ -1,4 +1,4 @@ -error: Undefined Behavior: calling a function with argument of type S passing data of type [i32; 4] +error: Undefined Behavior: calling a function whose parameter #1 has type S passing argument of type [i32; 4] --> tests/fail/function_pointers/abi_mismatch_array_vs_struct.rs:LL:CC | LL | g(Default::default()) diff --git a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_int_vs_float.rs b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_int_vs_float.rs index a1fda329e8d..0cca4a13233 100644 --- a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_int_vs_float.rs +++ b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_int_vs_float.rs @@ -3,5 +3,5 @@ fn main() { let g = unsafe { std::mem::transmute::<fn(f32), fn(i32)>(f) }; - g(42) //~ ERROR: calling a function with argument of type f32 passing data of type i32 + g(42) //~ ERROR: type f32 passing argument of type i32 } diff --git a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_int_vs_float.stderr b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_int_vs_float.stderr index 52cc48d58ce..3651fc9b3f7 100644 --- a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_int_vs_float.stderr +++ b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_int_vs_float.stderr @@ -1,4 +1,4 @@ -error: Undefined Behavior: calling a function with argument of type f32 passing data of type i32 +error: Undefined Behavior: calling a function whose parameter #1 has type f32 passing argument of type i32 --> tests/fail/function_pointers/abi_mismatch_int_vs_float.rs:LL:CC | LL | g(42) diff --git a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_raw_pointer.rs b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_raw_pointer.rs index f0ea5ccfe0f..053a4a5f284 100644 --- a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_raw_pointer.rs +++ b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_raw_pointer.rs @@ -3,5 +3,5 @@ fn main() { let g = unsafe { std::mem::transmute::<fn(*const [i32]), fn(*const i32)>(f) }; - g(&42 as *const i32) //~ ERROR: calling a function with argument of type *const [i32] passing data of type *const i32 + g(&42 as *const i32) //~ ERROR: type *const [i32] passing argument of type *const i32 } diff --git a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_raw_pointer.stderr b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_raw_pointer.stderr index 2fbb0408c59..88345a0688c 100644 --- a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_raw_pointer.stderr +++ b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_raw_pointer.stderr @@ -1,4 +1,4 @@ -error: Undefined Behavior: calling a function with argument of type *const [i32] passing data of type *const i32 +error: Undefined Behavior: calling a function whose parameter #1 has type *const [i32] passing argument of type *const i32 --> tests/fail/function_pointers/abi_mismatch_raw_pointer.rs:LL:CC | LL | g(&42 as *const i32) diff --git a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_repr_C.rs b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_repr_C.rs index c5900489b4c..f3dffcc4e86 100644 --- a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_repr_C.rs +++ b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_repr_C.rs @@ -12,5 +12,5 @@ fn main() { let fnptr: fn(S2) = callee; let fnptr: fn(S1) = unsafe { std::mem::transmute(fnptr) }; fnptr(S1(NonZero::new(1).unwrap())); - //~^ ERROR: calling a function with argument of type S2 passing data of type S1 + //~^ ERROR: type S2 passing argument of type S1 } diff --git a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_repr_C.stderr b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_repr_C.stderr index 2c1ac0ee702..47658395132 100644 --- a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_repr_C.stderr +++ b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_repr_C.stderr @@ -1,4 +1,4 @@ -error: Undefined Behavior: calling a function with argument of type S2 passing data of type S1 +error: Undefined Behavior: calling a function whose parameter #1 has type S2 passing argument of type S1 --> tests/fail/function_pointers/abi_mismatch_repr_C.rs:LL:CC | LL | fnptr(S1(NonZero::new(1).unwrap())); diff --git a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_return_type.rs b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_return_type.rs index 0fdab49b94b..05b645cf75a 100644 --- a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_return_type.rs +++ b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_return_type.rs @@ -5,5 +5,5 @@ fn main() { let g = unsafe { std::mem::transmute::<fn() -> u32, fn()>(f) }; - g() //~ ERROR: calling a function with return type u32 passing return place of type () + g() //~ ERROR: type u32 passing return place of type () } diff --git a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_simple.rs b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_simple.rs index 20384f0965b..ca43c06008f 100644 --- a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_simple.rs +++ b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_simple.rs @@ -3,5 +3,5 @@ fn main() { let g = unsafe { std::mem::transmute::<fn((i32, i32)), fn(i32)>(f) }; - g(42) //~ ERROR: calling a function with argument of type (i32, i32) passing data of type i32 + g(42) //~ ERROR: type (i32, i32) passing argument of type i32 } diff --git a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_simple.stderr b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_simple.stderr index e45ad12ec05..2ed9ac2e6da 100644 --- a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_simple.stderr +++ b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_simple.stderr @@ -1,4 +1,4 @@ -error: Undefined Behavior: calling a function with argument of type (i32, i32) passing data of type i32 +error: Undefined Behavior: calling a function whose parameter #1 has type (i32, i32) passing argument of type i32 --> tests/fail/function_pointers/abi_mismatch_simple.rs:LL:CC | LL | g(42) diff --git a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_vector.rs b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_vector.rs index 80f357b61ba..dedcaac6142 100644 --- a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_vector.rs +++ b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_vector.rs @@ -7,5 +7,5 @@ fn main() { // These two vector types have the same size but are still not compatible. let g = unsafe { std::mem::transmute::<fn(simd::u32x8), fn(simd::u64x4)>(f) }; - g(Default::default()) //~ ERROR: calling a function with argument of type std::simd::Simd<u32, 8> passing data of type std::simd::Simd<u64, 4> + g(Default::default()) //~ ERROR: type std::simd::Simd<u32, 8> passing argument of type std::simd::Simd<u64, 4> } diff --git a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_vector.stderr b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_vector.stderr index bad2495cb39..b13e8d936db 100644 --- a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_vector.stderr +++ b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_vector.stderr @@ -1,4 +1,4 @@ -error: Undefined Behavior: calling a function with argument of type std::simd::Simd<u32, 8> passing data of type std::simd::Simd<u64, 4> +error: Undefined Behavior: calling a function whose parameter #1 has type std::simd::Simd<u32, 8> passing argument of type std::simd::Simd<u64, 4> --> tests/fail/function_pointers/abi_mismatch_vector.rs:LL:CC | LL | g(Default::default()) diff --git a/src/tools/miri/tests/fail/shims/ctor_wrong_ret_type.rs b/src/tools/miri/tests/fail/shims/ctor_wrong_ret_type.rs new file mode 100644 index 00000000000..1e10f682e71 --- /dev/null +++ b/src/tools/miri/tests/fail/shims/ctor_wrong_ret_type.rs @@ -0,0 +1,39 @@ +unsafe extern "C" fn ctor() -> i32 { + //~^ERROR: calling a function with return type i32 passing return place of type () + 0 +} + +#[rustfmt::skip] +macro_rules! ctor { + ($ident:ident = $ctor:ident) => { + #[cfg_attr( + all(any( + target_os = "linux", + target_os = "android", + target_os = "dragonfly", + target_os = "freebsd", + target_os = "haiku", + target_os = "illumos", + target_os = "netbsd", + target_os = "openbsd", + target_os = "solaris", + target_os = "none", + target_family = "wasm", + )), + link_section = ".init_array" + )] + #[cfg_attr(windows, link_section = ".CRT$XCU")] + #[cfg_attr( + any(target_os = "macos", target_os = "ios"), + // We do not set the `mod_init_funcs` flag here since ctor/inventory also do not do + // that. See <https://github.com/rust-lang/miri/pull/4459#discussion_r2200115629>. + link_section = "__DATA,__mod_init_func" + )] + #[used] + static $ident: unsafe extern "C" fn() -> i32 = $ctor; + }; +} + +ctor! { CTOR = ctor } + +fn main() {} diff --git a/src/tools/miri/tests/fail/shims/ctor_wrong_ret_type.stderr b/src/tools/miri/tests/fail/shims/ctor_wrong_ret_type.stderr new file mode 100644 index 00000000000..664bfbd32db --- /dev/null +++ b/src/tools/miri/tests/fail/shims/ctor_wrong_ret_type.stderr @@ -0,0 +1,12 @@ +error: Undefined Behavior: calling a function with return type i32 passing return place of type () + | + = note: Undefined Behavior occurred here + = note: (no span available) + = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior + = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information + = help: this means these two types are not *guaranteed* to be ABI-compatible across all targets + = help: if you think this code should be accepted anyway, please report an issue with Miri + = note: BACKTRACE: + +error: aborting due to 1 previous error + diff --git a/src/tools/miri/tests/fail/shims/input_arg_mismatch.rs b/src/tools/miri/tests/fail/shims/input_arg_mismatch.rs index eb8de04dcc4..77699776aea 100644 --- a/src/tools/miri/tests/fail/shims/input_arg_mismatch.rs +++ b/src/tools/miri/tests/fail/shims/input_arg_mismatch.rs @@ -16,6 +16,6 @@ fn main() { } as u32; let _ = unsafe { close(fd); - //~^ ERROR: calling a function with argument of type i32 passing data of type u32 + //~^ ERROR: type i32 passing argument of type u32 }; } diff --git a/src/tools/miri/tests/fail/shims/input_arg_mismatch.stderr b/src/tools/miri/tests/fail/shims/input_arg_mismatch.stderr index ce00b624a42..ec27fd5ebb8 100644 --- a/src/tools/miri/tests/fail/shims/input_arg_mismatch.stderr +++ b/src/tools/miri/tests/fail/shims/input_arg_mismatch.stderr @@ -1,4 +1,4 @@ -error: Undefined Behavior: calling a function with argument of type i32 passing data of type u32 +error: Undefined Behavior: calling a function whose parameter #1 has type i32 passing argument of type u32 --> tests/fail/shims/input_arg_mismatch.rs:LL:CC | LL | close(fd); diff --git a/src/tools/miri/tests/fail/tail_calls/signature-mismatch-arg.rs b/src/tools/miri/tests/fail/tail_calls/signature-mismatch-arg.rs index 6df132d3255..36bd1e99cfb 100644 --- a/src/tools/miri/tests/fail/tail_calls/signature-mismatch-arg.rs +++ b/src/tools/miri/tests/fail/tail_calls/signature-mismatch-arg.rs @@ -6,7 +6,7 @@ fn main() { // the error should point to `become g(x)`, // but tail calls mess up the backtrace it seems like... f(0); - //~^ error: Undefined Behavior: calling a function with argument of type i32 passing data of type u32 + //~^ error: type i32 passing argument of type u32 } fn f(x: u32) { diff --git a/src/tools/miri/tests/fail/tail_calls/signature-mismatch-arg.stderr b/src/tools/miri/tests/fail/tail_calls/signature-mismatch-arg.stderr index fbb0d3d565d..cabea5df85d 100644 --- a/src/tools/miri/tests/fail/tail_calls/signature-mismatch-arg.stderr +++ b/src/tools/miri/tests/fail/tail_calls/signature-mismatch-arg.stderr @@ -1,4 +1,4 @@ -error: Undefined Behavior: calling a function with argument of type i32 passing data of type u32 +error: Undefined Behavior: calling a function whose parameter #1 has type i32 passing argument of type u32 --> tests/fail/tail_calls/signature-mismatch-arg.rs:LL:CC | LL | f(0); diff --git a/src/tools/miri/tests/genmc/pass/test_cxx_build.rs b/src/tools/miri/tests/genmc/pass/test_cxx_build.rs new file mode 100644 index 00000000000..f621bd9114f --- /dev/null +++ b/src/tools/miri/tests/genmc/pass/test_cxx_build.rs @@ -0,0 +1,8 @@ +//@compile-flags: -Zmiri-genmc + +#![no_main] + +#[unsafe(no_mangle)] +fn miri_start(_argc: isize, _argv: *const *const u8) -> isize { + 0 +} diff --git a/src/tools/miri/tests/genmc/pass/test_cxx_build.stderr b/src/tools/miri/tests/genmc/pass/test_cxx_build.stderr new file mode 100644 index 00000000000..4b7aa824bd1 --- /dev/null +++ b/src/tools/miri/tests/genmc/pass/test_cxx_build.stderr @@ -0,0 +1,5 @@ +warning: borrow tracking has been disabled, it is not (yet) supported in GenMC mode. +C++: GenMC handle created! +Miri: GenMC handle creation successful! +C++: GenMC handle destroyed! +Miri: Dropping GenMC handle successful! diff --git a/src/tools/miri/tests/panic/mir-validation.stderr b/src/tools/miri/tests/panic/mir-validation.stderr index dc70d129da3..f801ac907e6 100644 --- a/src/tools/miri/tests/panic/mir-validation.stderr +++ b/src/tools/miri/tests/panic/mir-validation.stderr @@ -1,11 +1,15 @@ +error: internal compiler error: compiler/rustc_mir_transform/src/validate.rs:LL:CC: broken MIR in Item(DefId) (after phase change to runtime-optimized) at bb0[1]: + place (*(_2.0: *mut i32)) has deref as a later projection (it is only permitted as the first projection) + --> tests/panic/mir-validation.rs:LL:CC + | +LL | *(tuple.0) = 1; + | ^^^^^^^^^^^^^^ + thread 'rustc' panicked at compiler/rustc_mir_transform/src/validate.rs:LL:CC: -broken MIR in Item(DefId) (after phase change to runtime-optimized) at bb0[1]: -place (*(_2.0: *mut i32)) has deref as a later projection (it is only permitted as the first projection) +Box<dyn Any> stack backtrace: -error: the compiler unexpectedly panicked. this is a bug. - @@ -20,3 +24,5 @@ LL | extern "rust-call" fn call_once(self, args: Args) -> Self::Output; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | +error: aborting due to 1 previous error + diff --git a/src/tools/miri/tests/pass-dep/libc/libc-epoll-blocking.rs b/src/tools/miri/tests/pass-dep/libc/libc-epoll-blocking.rs index 54ebfa9d198..c97206487a1 100644 --- a/src/tools/miri/tests/pass-dep/libc/libc-epoll-blocking.rs +++ b/src/tools/miri/tests/pass-dep/libc/libc-epoll-blocking.rs @@ -6,6 +6,9 @@ use std::convert::TryInto; use std::thread; use std::thread::spawn; +#[path = "../../utils/libc.rs"] +mod libc_utils; + // This is a set of testcases for blocking epoll. fn main() { @@ -97,7 +100,7 @@ fn test_epoll_block_then_unblock() { let thread1 = spawn(move || { thread::yield_now(); let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[1], data as *const libc::c_void, 5) }; assert_eq!(res, 5); }); check_epoll_wait::<1>(epfd, &[(expected_event, expected_value)], 10); @@ -130,7 +133,7 @@ fn test_notification_after_timeout() { // Trigger epoll notification after timeout. let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[1], data as *const libc::c_void, 5) }; assert_eq!(res, 5); // Check the result of the notification. diff --git a/src/tools/miri/tests/pass-dep/libc/libc-epoll-no-blocking.rs b/src/tools/miri/tests/pass-dep/libc/libc-epoll-no-blocking.rs index dc3ab2828fa..7130790b86d 100644 --- a/src/tools/miri/tests/pass-dep/libc/libc-epoll-no-blocking.rs +++ b/src/tools/miri/tests/pass-dep/libc/libc-epoll-no-blocking.rs @@ -2,6 +2,9 @@ use std::convert::TryInto; +#[path = "../../utils/libc.rs"] +mod libc_utils; + fn main() { test_epoll_socketpair(); test_epoll_socketpair_both_sides(); @@ -64,7 +67,7 @@ fn test_epoll_socketpair() { // Write to fd[0] let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[0], data as *const libc::c_void, 5) }; assert_eq!(res, 5); // Register fd[1] with EPOLLIN|EPOLLOUT|EPOLLET|EPOLLRDHUP @@ -85,7 +88,7 @@ fn test_epoll_socketpair() { // Write some more to fd[0]. let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[0], data as *const libc::c_void, 5) }; assert_eq!(res, 5); // This did not change the readiness of fd[1]. And yet, we're seeing the event reported @@ -153,7 +156,7 @@ fn test_epoll_ctl_del() { // Write to fd[0] let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[0], data as *const libc::c_void, 5) }; assert_eq!(res, 5); // Register fd[1] with EPOLLIN|EPOLLOUT|EPOLLET @@ -182,7 +185,7 @@ fn test_two_epoll_instance() { // Write to the socketpair. let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[0], data as *const libc::c_void, 5) }; assert_eq!(res, 5); // Register one side of the socketpair with EPOLLIN | EPOLLOUT | EPOLLET. @@ -224,7 +227,7 @@ fn test_two_same_fd_in_same_epoll_instance() { // Write to the socketpair. let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[0], data as *const libc::c_void, 5) }; assert_eq!(res, 5); //Two notification should be received. @@ -243,7 +246,7 @@ fn test_epoll_eventfd() { // Write to the eventfd instance. let sized_8_data: [u8; 8] = 1_u64.to_ne_bytes(); - let res = unsafe { libc::write(fd, sized_8_data.as_ptr() as *const libc::c_void, 8) }; + let res = unsafe { libc_utils::write_all(fd, sized_8_data.as_ptr() as *const libc::c_void, 8) }; assert_eq!(res, 8); // Create an epoll instance. @@ -282,7 +285,7 @@ fn test_epoll_socketpair_both_sides() { // Write to fds[1]. let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[1], data as *const libc::c_void, 5) }; assert_eq!(res, 5); //Two notification should be received. @@ -297,7 +300,8 @@ fn test_epoll_socketpair_both_sides() { // Read from fds[0]. let mut buf: [u8; 5] = [0; 5]; - let res = unsafe { libc::read(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; + let res = + unsafe { libc_utils::read_all(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; assert_eq!(res, 5); assert_eq!(buf, "abcde".as_bytes()); @@ -325,7 +329,7 @@ fn test_closed_fd() { // Write to the eventfd instance. let sized_8_data: [u8; 8] = 1_u64.to_ne_bytes(); - let res = unsafe { libc::write(fd, sized_8_data.as_ptr() as *const libc::c_void, 8) }; + let res = unsafe { libc_utils::write_all(fd, sized_8_data.as_ptr() as *const libc::c_void, 8) }; assert_eq!(res, 8); // Close the eventfd. @@ -371,7 +375,8 @@ fn test_not_fully_closed_fd() { // Write to the eventfd instance to produce notification. let sized_8_data: [u8; 8] = 1_u64.to_ne_bytes(); - let res = unsafe { libc::write(newfd, sized_8_data.as_ptr() as *const libc::c_void, 8) }; + let res = + unsafe { libc_utils::write_all(newfd, sized_8_data.as_ptr() as *const libc::c_void, 8) }; assert_eq!(res, 8); // Close the dupped fd. @@ -391,7 +396,7 @@ fn test_event_overwrite() { // Write to the eventfd instance. let sized_8_data: [u8; 8] = 1_u64.to_ne_bytes(); - let res = unsafe { libc::write(fd, sized_8_data.as_ptr() as *const libc::c_void, 8) }; + let res = unsafe { libc_utils::write_all(fd, sized_8_data.as_ptr() as *const libc::c_void, 8) }; assert_eq!(res, 8); // Create an epoll instance. @@ -445,7 +450,7 @@ fn test_socketpair_read() { // Write 5 bytes to fds[1]. let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[1], data as *const libc::c_void, 5) }; assert_eq!(res, 5); //Two notification should be received. @@ -460,7 +465,8 @@ fn test_socketpair_read() { // Read 3 bytes from fds[0]. let mut buf: [u8; 3] = [0; 3]; - let res = unsafe { libc::read(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; + let res = + unsafe { libc_utils::read_all(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; assert_eq!(res, 3); assert_eq!(buf, "abc".as_bytes()); @@ -478,7 +484,8 @@ fn test_socketpair_read() { // Read until the buffer is empty. let mut buf: [u8; 2] = [0; 2]; - let res = unsafe { libc::read(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; + let res = + unsafe { libc_utils::read_all(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; assert_eq!(res, 2); assert_eq!(buf, "de".as_bytes()); @@ -510,8 +517,9 @@ fn test_no_notification_for_unregister_flag() { // Write to fd[1]. let data = "abcde".as_bytes().as_ptr(); - let res: i32 = - unsafe { libc::write(fds[1], data as *const libc::c_void, 5).try_into().unwrap() }; + let res: i32 = unsafe { + libc_utils::write_all(fds[1], data as *const libc::c_void, 5).try_into().unwrap() + }; assert_eq!(res, 5); // Check result from epoll_wait. Since we didn't register EPOLLIN flag, the notification won't @@ -546,7 +554,7 @@ fn test_socketpair_epollerr() { // Write to fd[0] let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[0], data as *const libc::c_void, 5) }; assert_eq!(res, 5); // Close fds[1]. @@ -717,6 +725,6 @@ fn test_issue_3858() { // Write to the eventfd instance. let sized_8_data: [u8; 8] = 1_u64.to_ne_bytes(); - let res = unsafe { libc::write(fd, sized_8_data.as_ptr() as *const libc::c_void, 8) }; + let res = unsafe { libc_utils::write_all(fd, sized_8_data.as_ptr() as *const libc::c_void, 8) }; assert_eq!(res, 8); } diff --git a/src/tools/miri/tests/pass-dep/libc/libc-fs.rs b/src/tools/miri/tests/pass-dep/libc/libc-fs.rs index 0ff48c389e8..86cf2a041f0 100644 --- a/src/tools/miri/tests/pass-dep/libc/libc-fs.rs +++ b/src/tools/miri/tests/pass-dep/libc/libc-fs.rs @@ -14,6 +14,9 @@ use std::path::PathBuf; #[path = "../../utils/mod.rs"] mod utils; +#[path = "../../utils/libc.rs"] +mod libc_utils; + fn main() { test_dup(); test_dup_stdout_stderr(); @@ -74,8 +77,8 @@ fn test_dup_stdout_stderr() { unsafe { let new_stdout = libc::fcntl(1, libc::F_DUPFD, 0); let new_stderr = libc::fcntl(2, libc::F_DUPFD, 0); - libc::write(new_stdout, bytes.as_ptr() as *const libc::c_void, bytes.len()); - libc::write(new_stderr, bytes.as_ptr() as *const libc::c_void, bytes.len()); + libc_utils::write_all(new_stdout, bytes.as_ptr() as *const libc::c_void, bytes.len()); + libc_utils::write_all(new_stderr, bytes.as_ptr() as *const libc::c_void, bytes.len()); } } @@ -92,16 +95,24 @@ fn test_dup() { let new_fd2 = libc::dup2(fd, 8); let mut first_buf = [0u8; 4]; - libc::read(fd, first_buf.as_mut_ptr() as *mut libc::c_void, 4); - assert_eq!(&first_buf, b"dup "); + let first_len = libc::read(fd, first_buf.as_mut_ptr() as *mut libc::c_void, 4); + assert!(first_len > 0); + let first_len = first_len as usize; + assert_eq!(first_buf[..first_len], bytes[..first_len]); + let remaining_bytes = &bytes[first_len..]; let mut second_buf = [0u8; 4]; - libc::read(new_fd, second_buf.as_mut_ptr() as *mut libc::c_void, 4); - assert_eq!(&second_buf, b"and "); + let second_len = libc::read(new_fd, second_buf.as_mut_ptr() as *mut libc::c_void, 4); + assert!(second_len > 0); + let second_len = second_len as usize; + assert_eq!(second_buf[..second_len], remaining_bytes[..second_len]); + let remaining_bytes = &remaining_bytes[second_len..]; let mut third_buf = [0u8; 4]; - libc::read(new_fd2, third_buf.as_mut_ptr() as *mut libc::c_void, 4); - assert_eq!(&third_buf, b"dup2"); + let third_len = libc::read(new_fd2, third_buf.as_mut_ptr() as *mut libc::c_void, 4); + assert!(third_len > 0); + let third_len = third_len as usize; + assert_eq!(third_buf[..third_len], remaining_bytes[..third_len]); } } @@ -145,7 +156,7 @@ fn test_ftruncate<T: From<i32>>( let bytes = b"hello"; let path = utils::prepare("miri_test_libc_fs_ftruncate.txt"); let mut file = File::create(&path).unwrap(); - file.write(bytes).unwrap(); + file.write_all(bytes).unwrap(); file.sync_all().unwrap(); assert_eq!(file.metadata().unwrap().len(), 5); @@ -402,10 +413,10 @@ fn test_read_and_uninit() { unsafe { let fd = libc::open(cpath.as_ptr(), libc::O_RDONLY); assert_ne!(fd, -1); - let mut buf: MaybeUninit<[u8; 2]> = std::mem::MaybeUninit::uninit(); - assert_eq!(libc::read(fd, buf.as_mut_ptr().cast::<std::ffi::c_void>(), 2), 2); + let mut buf: MaybeUninit<u8> = std::mem::MaybeUninit::uninit(); + assert_eq!(libc::read(fd, buf.as_mut_ptr().cast::<std::ffi::c_void>(), 1), 1); let buf = buf.assume_init(); - assert_eq!(buf, [1, 2]); + assert_eq!(buf, 1); assert_eq!(libc::close(fd), 0); } remove_file(&path).unwrap(); @@ -413,14 +424,22 @@ fn test_read_and_uninit() { { // We test that if we requested to read 4 bytes, but actually read 3 bytes, then // 3 bytes (not 4) will be overwritten, and remaining byte will be left as-is. - let path = utils::prepare_with_content("pass-libc-read-and-uninit-2.txt", &[1u8, 2, 3]); + let data = [1u8, 2, 3]; + let path = utils::prepare_with_content("pass-libc-read-and-uninit-2.txt", &data); let cpath = CString::new(path.clone().into_os_string().into_encoded_bytes()).unwrap(); unsafe { let fd = libc::open(cpath.as_ptr(), libc::O_RDONLY); assert_ne!(fd, -1); let mut buf = [42u8; 5]; - assert_eq!(libc::read(fd, buf.as_mut_ptr().cast::<std::ffi::c_void>(), 4), 3); - assert_eq!(buf, [1, 2, 3, 42, 42]); + let res = libc::read(fd, buf.as_mut_ptr().cast::<std::ffi::c_void>(), 4); + assert!(res > 0 && res < 4); + for i in 0..buf.len() { + assert_eq!( + buf[i], + if i < res as usize { data[i] } else { 42 }, + "wrong result at pos {i}" + ); + } assert_eq!(libc::close(fd), 0); } remove_file(&path).unwrap(); diff --git a/src/tools/miri/tests/pass-dep/libc/libc-pipe.rs b/src/tools/miri/tests/pass-dep/libc/libc-pipe.rs index bc755af864c..ffbcf633b98 100644 --- a/src/tools/miri/tests/pass-dep/libc/libc-pipe.rs +++ b/src/tools/miri/tests/pass-dep/libc/libc-pipe.rs @@ -2,6 +2,10 @@ // test_race depends on a deterministic schedule. //@compile-flags: -Zmiri-deterministic-concurrency use std::thread; + +#[path = "../../utils/libc.rs"] +mod libc_utils; + fn main() { test_pipe(); test_pipe_threaded(); @@ -26,21 +30,29 @@ fn test_pipe() { // Read size == data available in buffer. let data = "12345".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[1], data as *const libc::c_void, 5) }; assert_eq!(res, 5); let mut buf3: [u8; 5] = [0; 5]; - let res = unsafe { libc::read(fds[0], buf3.as_mut_ptr().cast(), buf3.len() as libc::size_t) }; + let res = unsafe { + libc_utils::read_all(fds[0], buf3.as_mut_ptr().cast(), buf3.len() as libc::size_t) + }; assert_eq!(res, 5); assert_eq!(buf3, "12345".as_bytes()); // Read size > data available in buffer. - let data = "123".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 3) }; + let data = "123".as_bytes(); + let res = unsafe { libc_utils::write_all(fds[1], data.as_ptr() as *const libc::c_void, 3) }; assert_eq!(res, 3); let mut buf4: [u8; 5] = [0; 5]; let res = unsafe { libc::read(fds[0], buf4.as_mut_ptr().cast(), buf4.len() as libc::size_t) }; - assert_eq!(res, 3); - assert_eq!(&buf4[0..3], "123".as_bytes()); + assert!(res > 0 && res <= 3); + let res = res as usize; + assert_eq!(buf4[..res], data[..res]); + if res < 3 { + // Drain the rest from the read end. + let res = unsafe { libc_utils::read_all(fds[0], buf4[res..].as_mut_ptr().cast(), 3 - res) }; + assert!(res > 0); + } } fn test_pipe_threaded() { @@ -51,7 +63,7 @@ fn test_pipe_threaded() { let thread1 = thread::spawn(move || { let mut buf: [u8; 5] = [0; 5]; let res: i64 = unsafe { - libc::read(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) + libc_utils::read_all(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) .try_into() .unwrap() }; @@ -60,7 +72,7 @@ fn test_pipe_threaded() { }); thread::yield_now(); let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[1], data as *const libc::c_void, 5) }; assert_eq!(res, 5); thread1.join().unwrap(); @@ -68,11 +80,12 @@ fn test_pipe_threaded() { let thread2 = thread::spawn(move || { thread::yield_now(); let data = "12345".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[1], data as *const libc::c_void, 5) }; assert_eq!(res, 5); }); let mut buf: [u8; 5] = [0; 5]; - let res = unsafe { libc::read(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; + let res = + unsafe { libc_utils::read_all(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; assert_eq!(res, 5); assert_eq!(buf, "12345".as_bytes()); thread2.join().unwrap(); @@ -90,7 +103,7 @@ fn test_race() { // write() from the main thread will occur before the read() here // because preemption is disabled and the main thread yields after write(). let res: i32 = unsafe { - libc::read(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) + libc_utils::read_all(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) .try_into() .unwrap() }; @@ -101,7 +114,7 @@ fn test_race() { }); unsafe { VAL = 1 }; let data = "a".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 1) }; + let res = unsafe { libc_utils::write_all(fds[1], data as *const libc::c_void, 1) }; assert_eq!(res, 1); thread::yield_now(); thread1.join().unwrap(); @@ -186,11 +199,12 @@ fn test_pipe_fcntl_threaded() { // the socket is now "non-blocking", the shim needs to deal correctly // with threads that were blocked before the socket was made non-blocking. let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[1], data as *const libc::c_void, 5) }; assert_eq!(res, 5); }); // The `read` below will block. - let res = unsafe { libc::read(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; + let res = + unsafe { libc_utils::read_all(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; thread1.join().unwrap(); assert_eq!(res, 5); } diff --git a/src/tools/miri/tests/pass-dep/libc/libc-socketpair.rs b/src/tools/miri/tests/pass-dep/libc/libc-socketpair.rs index c36f6b11224..9c211ffbdbe 100644 --- a/src/tools/miri/tests/pass-dep/libc/libc-socketpair.rs +++ b/src/tools/miri/tests/pass-dep/libc/libc-socketpair.rs @@ -6,6 +6,10 @@ #![allow(static_mut_refs)] use std::thread; + +#[path = "../../utils/libc.rs"] +mod libc_utils; + fn main() { test_socketpair(); test_socketpair_threaded(); @@ -22,54 +26,71 @@ fn test_socketpair() { // Read size == data available in buffer. let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[0], data as *const libc::c_void, 5) }; assert_eq!(res, 5); let mut buf: [u8; 5] = [0; 5]; - let res = unsafe { libc::read(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; + let res = + unsafe { libc_utils::read_all(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; assert_eq!(res, 5); assert_eq!(buf, "abcde".as_bytes()); // Read size > data available in buffer. - let data = "abc".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 3) }; + let data = "abc".as_bytes(); + let res = unsafe { libc_utils::write_all(fds[0], data.as_ptr() as *const libc::c_void, 3) }; assert_eq!(res, 3); let mut buf2: [u8; 5] = [0; 5]; let res = unsafe { libc::read(fds[1], buf2.as_mut_ptr().cast(), buf2.len() as libc::size_t) }; - assert_eq!(res, 3); - assert_eq!(&buf2[0..3], "abc".as_bytes()); + assert!(res > 0 && res <= 3); + let res = res as usize; + assert_eq!(buf2[..res], data[..res]); + if res < 3 { + // Drain the rest from the read end. + let res = unsafe { libc_utils::read_all(fds[1], buf2[res..].as_mut_ptr().cast(), 3 - res) }; + assert!(res > 0); + } // Test read and write from another direction. // Read size == data available in buffer. let data = "12345".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[1], data as *const libc::c_void, 5) }; assert_eq!(res, 5); let mut buf3: [u8; 5] = [0; 5]; - let res = unsafe { libc::read(fds[0], buf3.as_mut_ptr().cast(), buf3.len() as libc::size_t) }; + let res = unsafe { + libc_utils::read_all(fds[0], buf3.as_mut_ptr().cast(), buf3.len() as libc::size_t) + }; assert_eq!(res, 5); assert_eq!(buf3, "12345".as_bytes()); // Read size > data available in buffer. - let data = "123".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 3) }; + let data = "123".as_bytes(); + let res = unsafe { libc_utils::write_all(fds[1], data.as_ptr() as *const libc::c_void, 3) }; assert_eq!(res, 3); let mut buf4: [u8; 5] = [0; 5]; let res = unsafe { libc::read(fds[0], buf4.as_mut_ptr().cast(), buf4.len() as libc::size_t) }; - assert_eq!(res, 3); - assert_eq!(&buf4[0..3], "123".as_bytes()); + assert!(res > 0 && res <= 3); + let res = res as usize; + assert_eq!(buf4[..res], data[..res]); + if res < 3 { + // Drain the rest from the read end. + let res = unsafe { libc_utils::read_all(fds[0], buf4[res..].as_mut_ptr().cast(), 3 - res) }; + assert!(res > 0); + } // Test when happens when we close one end, with some data in the buffer. - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 3) }; + let res = unsafe { libc_utils::write_all(fds[0], data.as_ptr() as *const libc::c_void, 3) }; assert_eq!(res, 3); unsafe { libc::close(fds[0]) }; // Reading the other end should return that data, then EOF. let mut buf: [u8; 5] = [0; 5]; - let res = unsafe { libc::read(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; + let res = + unsafe { libc_utils::read_all(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; assert_eq!(res, 3); assert_eq!(&buf[0..3], "123".as_bytes()); - let res = unsafe { libc::read(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; + let res = + unsafe { libc_utils::read_all(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; assert_eq!(res, 0); // 0-sized read: EOF. // Writing the other end should emit EPIPE. - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 1) }; + let res = unsafe { libc_utils::write_all(fds[1], data.as_ptr() as *const libc::c_void, 1) }; assert_eq!(res, -1); assert_eq!(std::io::Error::last_os_error().raw_os_error(), Some(libc::EPIPE)); } @@ -82,7 +103,7 @@ fn test_socketpair_threaded() { let thread1 = thread::spawn(move || { let mut buf: [u8; 5] = [0; 5]; let res: i64 = unsafe { - libc::read(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) + libc_utils::read_all(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) .try_into() .unwrap() }; @@ -91,7 +112,7 @@ fn test_socketpair_threaded() { }); thread::yield_now(); let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[0], data as *const libc::c_void, 5) }; assert_eq!(res, 5); thread1.join().unwrap(); @@ -99,11 +120,12 @@ fn test_socketpair_threaded() { let thread2 = thread::spawn(move || { thread::yield_now(); let data = "12345".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[1], data as *const libc::c_void, 5) }; assert_eq!(res, 5); }); let mut buf: [u8; 5] = [0; 5]; - let res = unsafe { libc::read(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; + let res = + unsafe { libc_utils::read_all(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; assert_eq!(res, 5); assert_eq!(buf, "12345".as_bytes()); thread2.join().unwrap(); @@ -119,7 +141,7 @@ fn test_race() { // write() from the main thread will occur before the read() here // because preemption is disabled and the main thread yields after write(). let res: i32 = unsafe { - libc::read(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) + libc_utils::read_all(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) .try_into() .unwrap() }; @@ -130,7 +152,7 @@ fn test_race() { }); unsafe { VAL = 1 }; let data = "a".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 1) }; + let res = unsafe { libc_utils::write_all(fds[0], data as *const libc::c_void, 1) }; assert_eq!(res, 1); thread::yield_now(); thread1.join().unwrap(); @@ -144,14 +166,16 @@ fn test_blocking_read() { let thread1 = thread::spawn(move || { // Let this thread block on read. let mut buf: [u8; 3] = [0; 3]; - let res = unsafe { libc::read(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; + let res = unsafe { + libc_utils::read_all(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) + }; assert_eq!(res, 3); assert_eq!(&buf, "abc".as_bytes()); }); let thread2 = thread::spawn(move || { // Unblock thread1 by doing writing something. let data = "abc".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 3) }; + let res = unsafe { libc_utils::write_all(fds[0], data as *const libc::c_void, 3) }; assert_eq!(res, 3); }); thread1.join().unwrap(); @@ -165,18 +189,21 @@ fn test_blocking_write() { assert_eq!(res, 0); let arr1: [u8; 212992] = [1; 212992]; // Exhaust the space in the buffer so the subsequent write will block. - let res = unsafe { libc::write(fds[0], arr1.as_ptr() as *const libc::c_void, 212992) }; + let res = + unsafe { libc_utils::write_all(fds[0], arr1.as_ptr() as *const libc::c_void, 212992) }; assert_eq!(res, 212992); let thread1 = thread::spawn(move || { let data = "abc".as_bytes().as_ptr(); // The write below will be blocked because the buffer is already full. - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 3) }; + let res = unsafe { libc_utils::write_all(fds[0], data as *const libc::c_void, 3) }; assert_eq!(res, 3); }); let thread2 = thread::spawn(move || { // Unblock thread1 by freeing up some space. let mut buf: [u8; 3] = [0; 3]; - let res = unsafe { libc::read(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; + let res = unsafe { + libc_utils::read_all(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) + }; assert_eq!(res, 3); assert_eq!(buf, [1, 1, 1]); }); diff --git a/src/tools/miri/tests/pass-dep/libc/libc-time.rs b/src/tools/miri/tests/pass-dep/libc/libc-time.rs index e8957846ad5..9e9fadfca9e 100644 --- a/src/tools/miri/tests/pass-dep/libc/libc-time.rs +++ b/src/tools/miri/tests/pass-dep/libc/libc-time.rs @@ -336,7 +336,7 @@ fn test_nanosleep() { let remainder = ptr::null_mut::<libc::timespec>(); let is_error = unsafe { libc::nanosleep(&duration_zero, remainder) }; assert_eq!(is_error, 0); - assert!(start_test_sleep.elapsed() < Duration::from_millis(10)); + assert!(start_test_sleep.elapsed() < Duration::from_millis(100)); let start_test_sleep = Instant::now(); let duration_100_millis = libc::timespec { tv_sec: 0, tv_nsec: 1_000_000_000 / 10 }; @@ -390,7 +390,7 @@ mod test_clock_nanosleep { ) }; assert_eq!(error, 0); - assert!(start_test_sleep.elapsed() < Duration::from_millis(10)); + assert!(start_test_sleep.elapsed() < Duration::from_millis(100)); let start_test_sleep = Instant::now(); let hunderd_millis_after_start = add_100_millis(timespec_now(libc::CLOCK_MONOTONIC)); @@ -417,7 +417,7 @@ mod test_clock_nanosleep { libc::clock_nanosleep(libc::CLOCK_MONOTONIC, NO_FLAGS, &duration_zero, remainder) }; assert_eq!(error, 0); - assert!(start_test_sleep.elapsed() < Duration::from_millis(10)); + assert!(start_test_sleep.elapsed() < Duration::from_millis(100)); let start_test_sleep = Instant::now(); let duration_100_millis = libc::timespec { tv_sec: 0, tv_nsec: 1_000_000_000 / 10 }; diff --git a/src/tools/miri/tests/pass/const-addrs.rs b/src/tools/miri/tests/pass/const-addrs.rs index af68b28b2b8..0d1531c73cd 100644 --- a/src/tools/miri/tests/pass/const-addrs.rs +++ b/src/tools/miri/tests/pass/const-addrs.rs @@ -1,14 +1,10 @@ -// The const fn interpreter creates a new AllocId every time it evaluates any const. -// If we do that in Miri, repeatedly evaluating a const causes unbounded memory use -// we need to keep track of the base address for that AllocId, and the allocation is never -// deallocated. -// In Miri we explicitly store previously-assigned AllocIds for each const and ensure -// that we only hand out a finite number of AllocIds per const. -// MIR inlining will put every evaluation of the const we're repeatedly evaluating into the same -// stack frame, breaking this test. +// The interpreter used to create a new AllocId every time it evaluates any const. +// This caused unbounded memory use in Miri. +// This test verifies that we only create a bounded amount of addresses for any given const. +// In practice, the interpreter always returns the same address, but we *do not guarantee* that. //@compile-flags: -Zinline-mir=no -const EVALS: usize = 256; +const EVALS: usize = 64; use std::collections::HashSet; fn main() { @@ -16,10 +12,8 @@ fn main() { for _ in 0..EVALS { addrs.insert(const_addr()); } - // Check that the const allocation has multiple base addresses - assert!(addrs.len() > 1); - // But also that we get a limited number of unique base addresses - assert!(addrs.len() < EVALS); + // Check that we always return the same base address for the const allocation. + assert_eq!(addrs.len(), 1); // Check that within a call we always produce the same address let mut prev = 0; diff --git a/src/tools/miri/tests/pass/fn_align.rs b/src/tools/miri/tests/pass/fn_align.rs index 28f92995880..9752d033458 100644 --- a/src/tools/miri/tests/pass/fn_align.rs +++ b/src/tools/miri/tests/pass/fn_align.rs @@ -1,15 +1,19 @@ //@compile-flags: -Zmin-function-alignment=8 + +// FIXME(rust-lang/rust#82232, rust-lang/rust#143834): temporarily renamed to mitigate `#[align]` +// nameres ambiguity +#![feature(rustc_attrs)] #![feature(fn_align)] // When a function uses `align(N)`, the function address should be a multiple of `N`. -#[align(256)] +#[rustc_align(256)] fn foo() {} -#[align(16)] +#[rustc_align(16)] fn bar() {} -#[align(4)] +#[rustc_align(4)] fn baz() {} fn main() { diff --git a/src/tools/miri/tests/pass/intrinsics/portable-simd.rs b/src/tools/miri/tests/pass/intrinsics/portable-simd.rs index 726d4c01cc3..e2cd08733af 100644 --- a/src/tools/miri/tests/pass/intrinsics/portable-simd.rs +++ b/src/tools/miri/tests/pass/intrinsics/portable-simd.rs @@ -349,12 +349,15 @@ fn simd_mask() { // Non-power-of-2 multi-byte mask. #[repr(simd, packed)] #[allow(non_camel_case_types)] - #[derive(Copy, Clone, Debug, PartialEq)] + #[derive(Copy, Clone)] struct i32x10([i32; 10]); impl i32x10 { fn splat(x: i32) -> Self { Self([x; 10]) } + fn into_array(self) -> [i32; 10] { + unsafe { std::mem::transmute(self) } + } } unsafe { let mask = i32x10([!0, !0, 0, !0, 0, 0, !0, 0, !0, 0]); @@ -377,19 +380,22 @@ fn simd_mask() { i32x10::splat(!0), // yes i32x10::splat(0), // no ); - assert_eq!(selected1, mask); - assert_eq!(selected2, mask); + assert_eq!(selected1.into_array(), mask.into_array()); + assert_eq!(selected2.into_array(), mask.into_array()); } // Test for a mask where the next multiple of 8 is not a power of two. #[repr(simd, packed)] #[allow(non_camel_case_types)] - #[derive(Copy, Clone, Debug, PartialEq)] + #[derive(Copy, Clone)] struct i32x20([i32; 20]); impl i32x20 { fn splat(x: i32) -> Self { Self([x; 20]) } + fn into_array(self) -> [i32; 20] { + unsafe { std::mem::transmute(self) } + } } unsafe { let mask = i32x20([!0, !0, 0, !0, 0, 0, !0, 0, !0, 0, 0, 0, 0, !0, !0, !0, !0, !0, !0, !0]); @@ -419,8 +425,8 @@ fn simd_mask() { i32x20::splat(!0), // yes i32x20::splat(0), // no ); - assert_eq!(selected1, mask); - assert_eq!(selected2, mask); + assert_eq!(selected1.into_array(), mask.into_array()); + assert_eq!(selected2.into_array(), mask.into_array()); } } @@ -708,12 +714,12 @@ fn simd_ops_non_pow2() { let x = SimdPacked([1u32; 3]); let y = SimdPacked([2u32; 3]); let z = unsafe { intrinsics::simd_add(x, y) }; - assert_eq!({ z.0 }, [3u32; 3]); + assert_eq!(unsafe { *(&raw const z).cast::<[u32; 3]>() }, [3u32; 3]); let x = SimdPadded([1u32; 3]); let y = SimdPadded([2u32; 3]); let z = unsafe { intrinsics::simd_add(x, y) }; - assert_eq!(z.0, [3u32; 3]); + assert_eq!(unsafe { *(&raw const z).cast::<[u32; 3]>() }, [3u32; 3]); } fn main() { diff --git a/src/tools/miri/tests/pass/intrinsics/type-id.rs b/src/tools/miri/tests/pass/intrinsics/type-id.rs new file mode 100644 index 00000000000..123fdbdc9ce --- /dev/null +++ b/src/tools/miri/tests/pass/intrinsics/type-id.rs @@ -0,0 +1,19 @@ +use std::any::{Any, TypeId}; + +fn main() { + let t1 = TypeId::of::<u64>(); + let t2 = TypeId::of::<u64>(); + assert_eq!(t1, t2); + let t3 = TypeId::of::<usize>(); + assert_ne!(t1, t3); + + let _ = format!("{t1:?}"); // test that we can debug-print + + let b = Box::new(0u64) as Box<dyn Any>; + assert_eq!(*b.downcast_ref::<u64>().unwrap(), 0); + assert!(b.downcast_ref::<usize>().is_none()); + + // Get the first pointer chunk and try to make it a ZST ref. + // This used to trigger an error because TypeId allocs got misclassified as "LiveData". + let _raw_chunk = unsafe { (&raw const t1).cast::<&()>().read() }; +} diff --git a/src/tools/miri/tests/pass/shims/ctor.rs b/src/tools/miri/tests/pass/shims/ctor.rs index b997d2386b8..a0fcdb1081e 100644 --- a/src/tools/miri/tests/pass/shims/ctor.rs +++ b/src/tools/miri/tests/pass/shims/ctor.rs @@ -2,13 +2,13 @@ use std::sync::atomic::{AtomicUsize, Ordering}; static COUNT: AtomicUsize = AtomicUsize::new(0); -unsafe extern "C" fn ctor() { - COUNT.fetch_add(1, Ordering::Relaxed); +unsafe extern "C" fn ctor<const N: usize>() { + COUNT.fetch_add(N, Ordering::Relaxed); } #[rustfmt::skip] macro_rules! ctor { - ($ident:ident = $ctor:ident) => { + ($ident:ident: $ty:ty = $ctor:expr) => { #[cfg_attr( all(any( target_os = "linux", @@ -33,14 +33,13 @@ macro_rules! ctor { link_section = "__DATA,__mod_init_func" )] #[used] - static $ident: unsafe extern "C" fn() = $ctor; + static $ident: $ty = $ctor; }; } -ctor! { CTOR1 = ctor } -ctor! { CTOR2 = ctor } -ctor! { CTOR3 = ctor } +ctor! { CTOR1: unsafe extern "C" fn() = ctor::<1> } +ctor! { CTOR2: [unsafe extern "C" fn(); 2] = [ctor::<2>, ctor::<3>] } fn main() { - assert_eq!(COUNT.load(Ordering::Relaxed), 3, "ctors did not run"); + assert_eq!(COUNT.load(Ordering::Relaxed), 6, "ctors did not run"); } diff --git a/src/tools/miri/tests/pass/shims/fs.rs b/src/tools/miri/tests/pass/shims/fs.rs index 9d5725773e6..e7f11c54704 100644 --- a/src/tools/miri/tests/pass/shims/fs.rs +++ b/src/tools/miri/tests/pass/shims/fs.rs @@ -17,6 +17,10 @@ mod utils; fn main() { test_path_conversion(); test_file(); + // Partial reads/writes are apparently not a thing on Windows. + if cfg!(not(windows)) { + test_file_partial_reads_writes(); + } test_file_create_new(); test_metadata(); test_seek(); @@ -53,7 +57,7 @@ fn test_file() { file.write(&mut []).unwrap(); assert_eq!(file.metadata().unwrap().len(), 0); - file.write(bytes).unwrap(); + file.write_all(bytes).unwrap(); assert_eq!(file.metadata().unwrap().len(), bytes.len() as u64); // Test opening, reading and closing a file. let mut file = File::open(&path).unwrap(); @@ -66,10 +70,36 @@ fn test_file() { assert!(!file.is_terminal()); + // Writing to a file opened for reading should error (and not stop interpretation). std does not + // categorize the error so we don't check for details. + file.write(&[]).unwrap_err(); + // Removing file should succeed. remove_file(&path).unwrap(); } +fn test_file_partial_reads_writes() { + let path = utils::prepare_with_content("miri_test_fs_file.txt", b"abcdefg"); + + // Ensure we sometimes do incomplete writes. + let got_short_write = (0..16).any(|_| { + let _ = remove_file(&path); // FIXME(win, issue #4483): errors if the file already exists + let mut file = File::create(&path).unwrap(); + file.write(&[0; 4]).unwrap() != 4 + }); + assert!(got_short_write); + // Ensure we sometimes do incomplete reads. + let got_short_read = (0..16).any(|_| { + let mut file = File::open(&path).unwrap(); + let mut buf = [0; 4]; + file.read(&mut buf).unwrap() != 4 + }); + assert!(got_short_read); + + // Clean up + remove_file(&path).unwrap(); +} + fn test_file_clone() { let bytes = b"Hello, World!\n"; let path = utils::prepare_with_content("miri_test_fs_file_clone.txt", bytes); diff --git a/src/tools/miri/tests/pass/shims/pipe.rs b/src/tools/miri/tests/pass/shims/pipe.rs index c47feb8774a..4915e54c533 100644 --- a/src/tools/miri/tests/pass/shims/pipe.rs +++ b/src/tools/miri/tests/pass/shims/pipe.rs @@ -4,8 +4,8 @@ use std::io::{Read, Write, pipe}; fn main() { let (mut ping_rx, mut ping_tx) = pipe().unwrap(); - ping_tx.write(b"hello").unwrap(); + ping_tx.write_all(b"hello").unwrap(); let mut buf: [u8; 5] = [0; 5]; - ping_rx.read(&mut buf).unwrap(); + ping_rx.read_exact(&mut buf).unwrap(); assert_eq!(&buf, "hello".as_bytes()); } diff --git a/src/tools/miri/tests/ui.rs b/src/tools/miri/tests/ui.rs index 43f855d57dd..73fbe2cc020 100644 --- a/src/tools/miri/tests/ui.rs +++ b/src/tools/miri/tests/ui.rs @@ -13,7 +13,8 @@ use ui_test::custom_flags::edition::Edition; use ui_test::dependencies::DependencyBuilder; use ui_test::per_test_config::TestConfig; use ui_test::spanned::Spanned; -use ui_test::{CommandBuilder, Config, Format, Match, ignore_output_conflict, status_emitter}; +use ui_test::status_emitter::StatusEmitter; +use ui_test::{CommandBuilder, Config, Match, ignore_output_conflict}; #[derive(Copy, Clone, Debug)] enum Mode { @@ -141,7 +142,7 @@ fn miri_config( envs: vec![("RUSTFLAGS".into(), None)], ..CommandBuilder::cargo() }, - crate_manifest_path: Path::new("test_dependencies").join("Cargo.toml"), + crate_manifest_path: Path::new("tests/deps").join("Cargo.toml"), build_std: None, bless_lockfile: bless, }, @@ -216,10 +217,7 @@ fn run_tests( // This could be used to overwrite the `Config` on a per-test basis. |_, _| {}, // No GHA output as that would also show in the main rustc repo. - match args.format { - Format::Terse => status_emitter::Text::quiet(), - Format::Pretty => status_emitter::Text::verbose(), - }, + Box::<dyn StatusEmitter>::from(args.format), ) } @@ -335,11 +333,25 @@ fn main() -> Result<()> { ui(Mode::Panic, "tests/panic", &target, WithDependencies, tmpdir.path())?; ui(Mode::Fail, "tests/fail", &target, WithoutDependencies, tmpdir.path())?; ui(Mode::Fail, "tests/fail-dep", &target, WithDependencies, tmpdir.path())?; - if cfg!(unix) && target == host { + if cfg!(all(unix, feature = "native-lib")) && target == host { ui(Mode::Pass, "tests/native-lib/pass", &target, WithoutDependencies, tmpdir.path())?; ui(Mode::Fail, "tests/native-lib/fail", &target, WithoutDependencies, tmpdir.path())?; } + // We only enable GenMC tests when the `genmc` feature is enabled, but also only on platforms we support: + // FIXME(genmc,macos): Add `target_os = "macos"` once `https://github.com/dtolnay/cxx/issues/1535` is fixed. + // FIXME(genmc,cross-platform): remove `host == target` check once cross-platform support with GenMC is possible. + if cfg!(all( + feature = "genmc", + target_os = "linux", + target_pointer_width = "64", + target_endian = "little" + )) && host == target + { + ui(Mode::Pass, "tests/genmc/pass", &target, WithDependencies, tmpdir.path())?; + ui(Mode::Fail, "tests/genmc/fail", &target, WithDependencies, tmpdir.path())?; + } + Ok(()) } diff --git a/src/tools/miri/tests/utils/fs.rs b/src/tools/miri/tests/utils/fs.rs index 7340908626f..7d75b3fced3 100644 --- a/src/tools/miri/tests/utils/fs.rs +++ b/src/tools/miri/tests/utils/fs.rs @@ -1,6 +1,6 @@ use std::ffi::OsString; -use std::fs; use std::path::PathBuf; +use std::{fs, io}; use super::miri_extern; diff --git a/src/tools/miri/tests/utils/libc.rs b/src/tools/miri/tests/utils/libc.rs new file mode 100644 index 00000000000..1a3cd067c04 --- /dev/null +++ b/src/tools/miri/tests/utils/libc.rs @@ -0,0 +1,44 @@ +//! Utils that need libc. +#![allow(dead_code)] + +pub unsafe fn read_all( + fd: libc::c_int, + buf: *mut libc::c_void, + count: libc::size_t, +) -> libc::ssize_t { + assert!(count > 0); + let mut read_so_far = 0; + while read_so_far < count { + let res = libc::read(fd, buf.add(read_so_far), count - read_so_far); + if res < 0 { + return res; + } + if res == 0 { + // EOF + break; + } + read_so_far += res as libc::size_t; + } + return read_so_far as libc::ssize_t; +} + +pub unsafe fn write_all( + fd: libc::c_int, + buf: *const libc::c_void, + count: libc::size_t, +) -> libc::ssize_t { + assert!(count > 0); + let mut written_so_far = 0; + while written_so_far < count { + let res = libc::write(fd, buf.add(written_so_far), count - written_so_far); + if res < 0 { + return res; + } + if res == 0 { + // EOF? + break; + } + written_so_far += res as libc::size_t; + } + return written_so_far as libc::ssize_t; +} diff --git a/src/tools/miri/tests/x86_64-unknown-kernel.json b/src/tools/miri/tests/x86_64-unknown-kernel.json index 8da67d3a1c6..a5eaceb4f68 100644 --- a/src/tools/miri/tests/x86_64-unknown-kernel.json +++ b/src/tools/miri/tests/x86_64-unknown-kernel.json @@ -2,7 +2,7 @@ "llvm-target": "x86_64-unknown-none", "target-endian": "little", "target-pointer-width": "64", - "target-c-int-width": "32", + "target-c-int-width": 32, "data-layout": "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-i128:128-f80:128-n8:16:32:64-S128", "arch": "x86_64", "os": "none", diff --git a/src/tools/miri/triagebot.toml b/src/tools/miri/triagebot.toml index 60e80c3f673..a0ce9f80024 100644 --- a/src/tools/miri/triagebot.toml +++ b/src/tools/miri/triagebot.toml @@ -16,7 +16,13 @@ allow-unauthenticated = [ # Enables assigning users to issues and PRs. [assign] warn_non_default_branch = true -contributing_url = "https://github.com/rust-lang/miri/blob/master/CONTRIBUTING.md" +contributing_url = "https://github.com/rust-lang/miri/blob/master/CONTRIBUTING.md#pr-review-process" +[assign.custom_welcome_messages] +welcome-message = "(unused)" +welcome-message-no-reviewer = """ +Thank you for contributing to Miri! +Please remember to not force-push to the PR branch except when you need to rebase due to a conflict or when the reviewer asks you for it. +""" [no-merges] exclude_titles = ["Rustup"] @@ -38,6 +44,12 @@ remove = [] add = ["S-waiting-on-author"] unless = ["S-blocked", "S-waiting-on-team", "S-waiting-on-review"] +[autolabel."S-waiting-on-review"] +new_pr = true + +[autolabel."S-waiting-on-author"] +new_draft = true + # Automatically close and reopen PRs made by bots to run CI on them [bot-pull-requests] diff --git a/src/tools/miropt-test-tools/Cargo.toml b/src/tools/miropt-test-tools/Cargo.toml index 09b4c7d16dc..3eb5020968d 100644 --- a/src/tools/miropt-test-tools/Cargo.toml +++ b/src/tools/miropt-test-tools/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "miropt-test-tools" version = "0.1.0" -edition = "2021" +edition = "2024" [dependencies] diff --git a/src/tools/miropt-test-tools/src/lib.rs b/src/tools/miropt-test-tools/src/lib.rs index 41b53d2ad0e..10769c9c8ab 100644 --- a/src/tools/miropt-test-tools/src/lib.rs +++ b/src/tools/miropt-test-tools/src/lib.rs @@ -34,7 +34,7 @@ fn output_file_suffix(testfile: &Path, bit_width: u32, panic_strategy: PanicStra let mut suffix = String::new(); if each_bit_width { - suffix.push_str(&format!(".{}bit", bit_width)); + suffix.push_str(&format!(".{bit_width}bit")); } if each_panic_strategy { match panic_strategy { @@ -51,7 +51,7 @@ pub fn files_for_miropt_test( panic_strategy: PanicStrategy, ) -> MiroptTest { let mut out = Vec::new(); - let test_file_contents = fs::read_to_string(&testfile).unwrap(); + let test_file_contents = fs::read_to_string(testfile).unwrap(); let test_dir = testfile.parent().unwrap(); let test_crate = testfile.file_stem().unwrap().to_str().unwrap().replace('-', "_"); @@ -76,10 +76,10 @@ pub fn files_for_miropt_test( if test_name.ends_with(".diff") { let trimmed = test_name.trim_end_matches(".diff"); - passes.push(trimmed.split('.').last().unwrap().to_owned()); - let test_against = format!("{}.after.mir", trimmed); - from_file = format!("{}.before.mir", trimmed); - expected_file = format!("{}{}.diff", trimmed, suffix); + passes.push(trimmed.split('.').next_back().unwrap().to_owned()); + let test_against = format!("{trimmed}.after.mir"); + from_file = format!("{trimmed}.before.mir"); + expected_file = format!("{trimmed}{suffix}.diff"); assert!(test_names.next().is_none(), "two mir pass names specified for MIR diff"); to_file = Some(test_against); } else if let Some(first_pass) = test_names.next() { @@ -92,10 +92,9 @@ pub fn files_for_miropt_test( } assert!(test_names.next().is_none(), "three mir pass names specified for MIR diff"); - expected_file = - format!("{}{}.{}-{}.diff", test_name, suffix, first_pass, second_pass); - let second_file = format!("{}.{}.mir", test_name, second_pass); - from_file = format!("{}.{}.mir", test_name, first_pass); + expected_file = format!("{test_name}{suffix}.{first_pass}-{second_pass}.diff"); + let second_file = format!("{test_name}.{second_pass}.mir"); + from_file = format!("{test_name}.{first_pass}.mir"); to_file = Some(second_file); } else { // Allow-list for file extensions that can be produced by MIR dumps. @@ -112,7 +111,7 @@ pub fn files_for_miropt_test( ) } - expected_file = format!("{}{}.{}", test_name_wo_ext, suffix, test_name_ext); + expected_file = format!("{test_name_wo_ext}{suffix}.{test_name_ext}"); from_file = test_name.to_string(); assert!(test_names.next().is_none(), "two mir pass names specified for MIR dump"); to_file = None; @@ -123,7 +122,7 @@ pub fn files_for_miropt_test( ); }; if !expected_file.starts_with(&test_crate) { - expected_file = format!("{}.{}", test_crate, expected_file); + expected_file = format!("{test_crate}.{expected_file}"); } let expected_file = test_dir.join(expected_file); diff --git a/src/tools/opt-dist/src/environment.rs b/src/tools/opt-dist/src/environment.rs index d41dc80e6b2..2cae0785f33 100644 --- a/src/tools/opt-dist/src/environment.rs +++ b/src/tools/opt-dist/src/environment.rs @@ -28,6 +28,8 @@ pub struct Environment { run_tests: bool, fast_try_build: bool, build_llvm: bool, + #[builder(default)] + stage0_root: Option<Utf8PathBuf>, } impl Environment { @@ -48,7 +50,7 @@ impl Environment { } pub fn build_artifacts(&self) -> Utf8PathBuf { - self.build_root().join("build").join(&self.host_tuple) + self.build_root().join(&self.host_tuple) } pub fn artifact_dir(&self) -> Utf8PathBuf { @@ -56,17 +58,11 @@ impl Environment { } pub fn cargo_stage_0(&self) -> Utf8PathBuf { - self.build_artifacts() - .join("stage0") - .join("bin") - .join(format!("cargo{}", executable_extension())) + self.stage0().join("bin").join(format!("cargo{}", executable_extension())) } pub fn rustc_stage_0(&self) -> Utf8PathBuf { - self.build_artifacts() - .join("stage0") - .join("bin") - .join(format!("rustc{}", executable_extension())) + self.stage0().join("bin").join(format!("rustc{}", executable_extension())) } pub fn rustc_stage_2(&self) -> Utf8PathBuf { @@ -116,6 +112,10 @@ impl Environment { pub fn build_llvm(&self) -> bool { self.build_llvm } + + pub fn stage0(&self) -> Utf8PathBuf { + self.stage0_root.clone().unwrap_or_else(|| self.build_artifacts().join("stage0")) + } } /// What is the extension of binary executables on this platform? diff --git a/src/tools/opt-dist/src/exec.rs b/src/tools/opt-dist/src/exec.rs index 56eff2ca2a7..a8d4c93d160 100644 --- a/src/tools/opt-dist/src/exec.rs +++ b/src/tools/opt-dist/src/exec.rs @@ -99,7 +99,7 @@ pub struct Bootstrap { impl Bootstrap { pub fn build(env: &Environment) -> Self { - let metrics_path = env.build_root().join("build").join("metrics.json"); + let metrics_path = env.build_root().join("metrics.json"); let cmd = cmd(&[ env.python_binary(), env.checkout_path().join("x.py").as_str(), @@ -119,7 +119,7 @@ impl Bootstrap { } pub fn dist(env: &Environment, dist_args: &[String]) -> Self { - let metrics_path = env.build_root().join("build").join("metrics.json"); + let metrics_path = env.build_root().join("metrics.json"); let args = dist_args.iter().map(|arg| arg.as_str()).collect::<Vec<_>>(); let cmd = cmd(&args).env("RUST_BACKTRACE", "full"); let mut cmd = add_shared_x_flags(env, cmd); diff --git a/src/tools/opt-dist/src/main.rs b/src/tools/opt-dist/src/main.rs index 7857f196626..19706b4a4f0 100644 --- a/src/tools/opt-dist/src/main.rs +++ b/src/tools/opt-dist/src/main.rs @@ -62,7 +62,7 @@ enum EnvironmentCmd { python: String, /// Directory where artifacts (like PGO profiles or rustc-perf) of this workflow - /// will be stored. + /// will be stored. Relative to `checkout_dir` #[arg(long, default_value = "opt-artifacts")] artifact_dir: Utf8PathBuf, @@ -102,6 +102,15 @@ enum EnvironmentCmd { /// Will be LLVM built during the run? #[arg(long, default_value_t = true, action(clap::ArgAction::Set))] build_llvm: bool, + + /// Set build artifacts dir. Relative to `checkout_dir`, should point to the directory set + /// in bootstrap.toml via `build.build-dir` option + #[arg(long, default_value = "build")] + build_dir: Utf8PathBuf, + + /// Path to custom stage0 root + #[arg(long)] + stage0_root: Option<Utf8PathBuf>, }, /// Perform an optimized build on Linux CI, from inside Docker. LinuxCi { @@ -138,14 +147,16 @@ fn create_environment(args: Args) -> anyhow::Result<(Environment, Vec<String>)> shared, run_tests, build_llvm, + build_dir, + stage0_root, } => { let env = EnvironmentBuilder::default() .host_tuple(target_triple) .python_binary(python) .checkout_dir(checkout_dir.clone()) .host_llvm_dir(llvm_dir) - .artifact_dir(artifact_dir) - .build_dir(checkout_dir) + .artifact_dir(checkout_dir.join(artifact_dir)) + .build_dir(checkout_dir.join(build_dir)) .prebuilt_rustc_perf(rustc_perf_checkout_dir) .shared_llvm(llvm_shared) .use_bolt(use_bolt) @@ -154,6 +165,7 @@ fn create_environment(args: Args) -> anyhow::Result<(Environment, Vec<String>)> .run_tests(run_tests) .fast_try_build(is_fast_try_build) .build_llvm(build_llvm) + .stage0_root(stage0_root) .build()?; (env, shared.build_args) @@ -171,7 +183,7 @@ fn create_environment(args: Args) -> anyhow::Result<(Environment, Vec<String>)> .checkout_dir(checkout_dir.clone()) .host_llvm_dir(Utf8PathBuf::from("/rustroot")) .artifact_dir(Utf8PathBuf::from("/tmp/tmp-multistage/opt-artifacts")) - .build_dir(checkout_dir.join("obj")) + .build_dir(checkout_dir.join("obj").join("build")) .shared_llvm(true) // FIXME: Enable bolt for aarch64 once it's fixed upstream. Broken as of December 2024. .use_bolt(!is_aarch64) @@ -194,7 +206,7 @@ fn create_environment(args: Args) -> anyhow::Result<(Environment, Vec<String>)> .checkout_dir(checkout_dir.clone()) .host_llvm_dir(checkout_dir.join("citools").join("clang-rust")) .artifact_dir(checkout_dir.join("opt-artifacts")) - .build_dir(checkout_dir) + .build_dir(checkout_dir.join("build")) .shared_llvm(false) .use_bolt(false) .skipped_tests(vec![]) diff --git a/src/tools/opt-dist/src/tests.rs b/src/tools/opt-dist/src/tests.rs index 2d2aab86eda..d5121b8c786 100644 --- a/src/tools/opt-dist/src/tests.rs +++ b/src/tools/opt-dist/src/tests.rs @@ -13,7 +13,7 @@ pub fn run_tests(env: &Environment) -> anyhow::Result<()> { // and then use that extracted rustc as a stage0 compiler. // Then we run a subset of tests using that compiler, to have a basic smoke test which checks // whether the optimization pipeline hasn't broken something. - let build_dir = env.build_root().join("build"); + let build_dir = env.build_root(); let dist_dir = build_dir.join("dist"); let unpacked_dist_dir = build_dir.join("unpacked-dist"); std::fs::create_dir_all(&unpacked_dist_dir)?; @@ -79,6 +79,7 @@ lld = false rustc = "{rustc}" cargo = "{cargo}" local-rebuild = true +compiletest-allow-stage0=true [target.{host_triple}] llvm-config = "{llvm_config}" @@ -100,8 +101,8 @@ llvm-config = "{llvm_config}" env.host_tuple(), "--stage", "0", - "tests/assembly", - "tests/codegen", + "tests/assembly-llvm", + "tests/codegen-llvm", "tests/codegen-units", "tests/incremental", "tests/mir-opt", @@ -117,7 +118,6 @@ llvm-config = "{llvm_config}" args.extend(["--skip", test_path]); } cmd(&args) - .env("COMPILETEST_FORCE_STAGE0", "1") // Also run dist-only tests .env("COMPILETEST_ENABLE_DIST_TESTS", "1") .run() diff --git a/src/tools/run-make-support/src/artifact_names.rs b/src/tools/run-make-support/src/artifact_names.rs index a889b30e145..a2bb1186944 100644 --- a/src/tools/run-make-support/src/artifact_names.rs +++ b/src/tools/run-make-support/src/artifact_names.rs @@ -35,6 +35,8 @@ pub fn dynamic_lib_extension() -> &'static str { "dylib" } else if target.contains("windows") { "dll" + } else if target.contains("aix") { + "a" } else { "so" } diff --git a/src/tools/run-make-support/src/external_deps/llvm.rs b/src/tools/run-make-support/src/external_deps/llvm.rs index 9a6e35da3fe..939160d9f41 100644 --- a/src/tools/run-make-support/src/external_deps/llvm.rs +++ b/src/tools/run-make-support/src/external_deps/llvm.rs @@ -60,6 +60,12 @@ pub fn llvm_pdbutil() -> LlvmPdbutil { LlvmPdbutil::new() } +/// Construct a new `llvm-as` invocation. This assumes that `llvm-as` is available +/// at `$LLVM_BIN_DIR/llvm-as`. +pub fn llvm_as() -> LlvmAs { + LlvmAs::new() +} + /// Construct a new `llvm-dis` invocation. This assumes that `llvm-dis` is available /// at `$LLVM_BIN_DIR/llvm-dis`. pub fn llvm_dis() -> LlvmDis { @@ -135,6 +141,13 @@ pub struct LlvmPdbutil { cmd: Command, } +/// A `llvm-as` invocation builder. +#[derive(Debug)] +#[must_use] +pub struct LlvmAs { + cmd: Command, +} + /// A `llvm-dis` invocation builder. #[derive(Debug)] #[must_use] @@ -158,6 +171,7 @@ crate::macros::impl_common_helpers!(LlvmNm); crate::macros::impl_common_helpers!(LlvmBcanalyzer); crate::macros::impl_common_helpers!(LlvmDwarfdump); crate::macros::impl_common_helpers!(LlvmPdbutil); +crate::macros::impl_common_helpers!(LlvmAs); crate::macros::impl_common_helpers!(LlvmDis); crate::macros::impl_common_helpers!(LlvmObjcopy); @@ -441,6 +455,22 @@ impl LlvmObjcopy { } } +impl LlvmAs { + /// Construct a new `llvm-as` invocation. This assumes that `llvm-as` is available + /// at `$LLVM_BIN_DIR/llvm-as`. + pub fn new() -> Self { + let llvm_as = llvm_bin_dir().join("llvm-as"); + let cmd = Command::new(llvm_as); + Self { cmd } + } + + /// Provide an input file. + pub fn input<P: AsRef<Path>>(&mut self, path: P) -> &mut Self { + self.cmd.arg(path.as_ref()); + self + } +} + impl LlvmDis { /// Construct a new `llvm-dis` invocation. This assumes that `llvm-dis` is available /// at `$LLVM_BIN_DIR/llvm-dis`. diff --git a/src/tools/run-make-support/src/external_deps/rustc.rs b/src/tools/run-make-support/src/external_deps/rustc.rs index 1ea549ca7ea..60d3366ee98 100644 --- a/src/tools/run-make-support/src/external_deps/rustc.rs +++ b/src/tools/run-make-support/src/external_deps/rustc.rs @@ -52,13 +52,20 @@ impl Rustc { // `rustc` invocation constructor methods /// Construct a new `rustc` invocation. This will automatically set the library - /// search path as `-L cwd()` and also the compilation target. + /// search path as `-L cwd()`, configure the compilation target and enable + /// dynamic linkage by default on musl hosts. /// Use [`bare_rustc`] to avoid this. #[track_caller] pub fn new() -> Self { let mut cmd = setup_common(); cmd.arg("-L").arg(cwd()); + // FIXME: On musl hosts, we currently default to static linkage, while + // for running run-make tests, we rely on dynamic linkage by default + if std::env::var("IS_MUSL_HOST").is_ok_and(|i| i == "1") { + cmd.arg("-Ctarget-feature=-crt-static"); + } + // Automatically default to cross-compilation Self { cmd, target: Some(target()) } } @@ -166,6 +173,12 @@ impl Rustc { self } + /// This flag enables LTO in the specified form. + pub fn lto(&mut self, option: &str) -> &mut Self { + self.cmd.arg(format!("-Clto={option}")); + self + } + /// This flag defers LTO optimizations to the linker. pub fn linker_plugin_lto(&mut self, option: &str) -> &mut Self { self.cmd.arg(format!("-Clinker-plugin-lto={option}")); diff --git a/src/tools/run-make-support/src/lib.rs b/src/tools/run-make-support/src/lib.rs index 29cd6c4ad15..b7d89b130c6 100644 --- a/src/tools/run-make-support/src/lib.rs +++ b/src/tools/run-make-support/src/lib.rs @@ -63,8 +63,9 @@ pub use crate::external_deps::clang::{Clang, clang}; pub use crate::external_deps::htmldocck::htmldocck; pub use crate::external_deps::llvm::{ self, LlvmAr, LlvmBcanalyzer, LlvmDis, LlvmDwarfdump, LlvmFilecheck, LlvmNm, LlvmObjcopy, - LlvmObjdump, LlvmProfdata, LlvmReadobj, llvm_ar, llvm_bcanalyzer, llvm_dis, llvm_dwarfdump, - llvm_filecheck, llvm_nm, llvm_objcopy, llvm_objdump, llvm_profdata, llvm_readobj, + LlvmObjdump, LlvmProfdata, LlvmReadobj, llvm_ar, llvm_as, llvm_bcanalyzer, llvm_dis, + llvm_dwarfdump, llvm_filecheck, llvm_nm, llvm_objcopy, llvm_objdump, llvm_profdata, + llvm_readobj, }; pub use crate::external_deps::python::python_command; pub use crate::external_deps::rustc::{self, Rustc, bare_rustc, rustc, rustc_path}; diff --git a/src/tools/rust-analyzer/.github/workflows/rustc-pull.yml b/src/tools/rust-analyzer/.github/workflows/rustc-pull.yml new file mode 100644 index 00000000000..2a842f3b311 --- /dev/null +++ b/src/tools/rust-analyzer/.github/workflows/rustc-pull.yml @@ -0,0 +1,20 @@ +name: rustc-pull + +on: + workflow_dispatch: + schedule: + # Run at 04:00 UTC every Monday and Thursday + - cron: '0 4 * * 1,4' + +jobs: + pull: + if: github.repository == 'rust-lang/rust-analyzer' + uses: rust-lang/josh-sync/.github/workflows/rustc-pull.yml@main + with: + zulip-stream-id: 185405 + zulip-bot-email: "rust-analyzer-ci-bot@rust-lang.zulipchat.com" + pr-base-branch: master + branch-name: rustc-pull + secrets: + zulip-api-token: ${{ secrets.ZULIP_API_TOKEN }} + token: ${{ secrets.GITHUB_TOKEN }} diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index e55cd80943d..7d03300c221 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -396,15 +396,6 @@ dependencies = [ ] [[package]] -name = "directories" -version = "6.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16f5094c54661b38d03bd7e50df373292118db60b585c08a411c6d840017fe7d" -dependencies = [ - "dirs-sys", -] - -[[package]] name = "dirs" version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1268,7 +1259,7 @@ dependencies = [ "expect-test", "intern", "parser", - "ra-ap-rustc_lexer", + "ra-ap-rustc_lexer 0.123.0", "rustc-hash 2.1.1", "smallvec", "span", @@ -1504,7 +1495,7 @@ dependencies = [ "drop_bomb", "edition", "expect-test", - "ra-ap-rustc_lexer", + "ra-ap-rustc_lexer 0.123.0", "rustc-literal-escaper", "stdx", "tracing", @@ -1614,7 +1605,7 @@ dependencies = [ "object", "paths", "proc-macro-test", - "ra-ap-rustc_lexer", + "ra-ap-rustc_lexer 0.123.0", "span", "syntax-bridge", "tt", @@ -1688,6 +1679,7 @@ dependencies = [ "serde_json", "span", "stdx", + "temp-dir", "toolchain", "tracing", "triomphe", @@ -1756,9 +1748,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_abi" -version = "0.121.0" +version = "0.123.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ee51482d1c9d3e538acda8cce723db8eea1a81540544bf362bf4c3d841b2329" +checksum = "f18c877575c259d127072e9bfc41d985202262fb4d6bfdae3d1252147c2562c2" dependencies = [ "bitflags 2.9.1", "ra-ap-rustc_hashes", @@ -1768,18 +1760,18 @@ dependencies = [ [[package]] name = "ra-ap-rustc_hashes" -version = "0.121.0" +version = "0.123.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19c8f1e0c28e24e1b4c55dc08058c6c9829df2204497d4034259f491d348c204" +checksum = "2439ed1df3472443133b66949f81080dff88089b42f825761455463709ee1cad" dependencies = [ "rustc-stable-hash", ] [[package]] name = "ra-ap-rustc_index" -version = "0.121.0" +version = "0.123.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f33f429cec6b92fa2c7243883279fb29dd233fdc3e94099aff32aa91aa87f50" +checksum = "57a24fe0be21be1f8ebc21dcb40129214fb4cefb0f2753f3d46b6dbe656a1a45" dependencies = [ "ra-ap-rustc_index_macros", "smallvec", @@ -1787,9 +1779,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_index_macros" -version = "0.121.0" +version = "0.123.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9b55910dbe1fe7ef34bdc1d1bcb41e99b377eb680ea58a1218d95d6b4152257" +checksum = "844a27ddcad0116facae2df8e741fd788662cf93dc13029cd864f2b8013b81f9" dependencies = [ "proc-macro2", "quote", @@ -1808,20 +1800,31 @@ dependencies = [ ] [[package]] +name = "ra-ap-rustc_lexer" +version = "0.123.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b734cfcb577d09877799a22742f1bd398be6c00bc428d9de56d48d11ece5771" +dependencies = [ + "memchr", + "unicode-properties", + "unicode-xid", +] + +[[package]] name = "ra-ap-rustc_parse_format" version = "0.121.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "81057891bc2063ad9e353f29462fbc47a0f5072560af34428ae9313aaa5e9d97" dependencies = [ - "ra-ap-rustc_lexer", + "ra-ap-rustc_lexer 0.121.0", "rustc-literal-escaper", ] [[package]] name = "ra-ap-rustc_pattern_analysis" -version = "0.121.0" +version = "0.123.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe21a3542980d56d2435e96c2720773cac1c63fd4db666417e414729da192eb3" +checksum = "75b0ee1f059b9dea0818c6c7267478926eee95ba4c7dcf89c8db32fa165d3904" dependencies = [ "ra-ap-rustc_index", "rustc-hash 2.1.1", @@ -2283,6 +2286,12 @@ dependencies = [ ] [[package]] +name = "temp-dir" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83176759e9416cf81ee66cb6508dbfe9c96f20b8b56265a39917551c23c70964" + +[[package]] name = "tenthash" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -2581,7 +2590,7 @@ version = "0.0.0" dependencies = [ "arrayvec", "intern", - "ra-ap-rustc_lexer", + "ra-ap-rustc_lexer 0.123.0", "stdx", "text-size", ] @@ -3094,7 +3103,6 @@ name = "xtask" version = "0.1.0" dependencies = [ "anyhow", - "directories", "edition", "either", "flate2", diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml index 41fa06a76a7..e7cf0212bf2 100644 --- a/src/tools/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/Cargo.toml @@ -89,11 +89,11 @@ vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" } vfs = { path = "./crates/vfs", version = "0.0.0" } edition = { path = "./crates/edition", version = "0.0.0" } -ra-ap-rustc_lexer = { version = "0.121", default-features = false } +ra-ap-rustc_lexer = { version = "0.123", default-features = false } ra-ap-rustc_parse_format = { version = "0.121", default-features = false } -ra-ap-rustc_index = { version = "0.121", default-features = false } -ra-ap-rustc_abi = { version = "0.121", default-features = false } -ra-ap-rustc_pattern_analysis = { version = "0.121", default-features = false } +ra-ap-rustc_index = { version = "0.123", default-features = false } +ra-ap-rustc_abi = { version = "0.123", default-features = false } +ra-ap-rustc_pattern_analysis = { version = "0.123", default-features = false } # local crates that aren't published to crates.io. These should not have versions. @@ -156,6 +156,7 @@ smallvec = { version = "1.15.1", features = [ "const_generics", ] } smol_str = "0.3.2" +temp-dir = "0.1.16" text-size = "1.1.1" tracing = "0.1.41" tracing-tree = "0.4.0" diff --git a/src/tools/rust-analyzer/crates/base-db/src/input.rs b/src/tools/rust-analyzer/crates/base-db/src/input.rs index 8c9393bcc93..0bf4fbdfbd6 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/input.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/input.rs @@ -30,6 +30,7 @@ pub type ProcMacroPaths = pub enum ProcMacroLoadingError { Disabled, FailedToBuild, + ExpectedProcMacroArtifact, MissingDylibPath, NotYetBuilt, NoProcMacros, @@ -39,7 +40,8 @@ impl ProcMacroLoadingError { pub fn is_hard_error(&self) -> bool { match self { ProcMacroLoadingError::Disabled | ProcMacroLoadingError::NotYetBuilt => false, - ProcMacroLoadingError::FailedToBuild + ProcMacroLoadingError::ExpectedProcMacroArtifact + | ProcMacroLoadingError::FailedToBuild | ProcMacroLoadingError::MissingDylibPath | ProcMacroLoadingError::NoProcMacros | ProcMacroLoadingError::ProcMacroSrvError(_) => true, @@ -51,10 +53,16 @@ impl Error for ProcMacroLoadingError {} impl fmt::Display for ProcMacroLoadingError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { + ProcMacroLoadingError::ExpectedProcMacroArtifact => { + write!(f, "proc-macro crate did not build proc-macro artifact") + } ProcMacroLoadingError::Disabled => write!(f, "proc-macro expansion is disabled"), ProcMacroLoadingError::FailedToBuild => write!(f, "proc-macro failed to build"), ProcMacroLoadingError::MissingDylibPath => { - write!(f, "proc-macro crate build data is missing a dylib path") + write!( + f, + "proc-macro crate built but the dylib path is missing, this indicates a problem with your build system." + ) } ProcMacroLoadingError::NotYetBuilt => write!(f, "proc-macro not yet built"), ProcMacroLoadingError::NoProcMacros => { diff --git a/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs b/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs index 0ec082dfa7f..aed00aa9fc4 100644 --- a/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs +++ b/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs @@ -68,6 +68,11 @@ impl CfgExpr { next_cfg_expr(&mut tt.iter()).unwrap_or(CfgExpr::Invalid) } + #[cfg(feature = "tt")] + pub fn parse_from_iter<S: Copy>(tt: &mut tt::iter::TtIter<'_, S>) -> CfgExpr { + next_cfg_expr(tt).unwrap_or(CfgExpr::Invalid) + } + /// Fold the cfg by querying all basic `Atom` and `KeyValue` predicates. pub fn fold(&self, query: &dyn Fn(&CfgAtom) -> bool) -> Option<bool> { match self { @@ -96,7 +101,14 @@ fn next_cfg_expr<S: Copy>(it: &mut tt::iter::TtIter<'_, S>) -> Option<CfgExpr> { }; let ret = match it.peek() { - Some(TtElement::Leaf(tt::Leaf::Punct(punct))) if punct.char == '=' => { + Some(TtElement::Leaf(tt::Leaf::Punct(punct))) + // Don't consume on e.g. `=>`. + if punct.char == '=' + && (punct.spacing == tt::Spacing::Alone + || it.remaining().flat_tokens().get(1).is_none_or(|peek2| { + !matches!(peek2, tt::TokenTree::Leaf(tt::Leaf::Punct(_))) + })) => + { match it.remaining().flat_tokens().get(1) { Some(tt::TokenTree::Leaf(tt::Leaf::Literal(literal))) => { it.next(); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs index d3dfc05eb29..5695ab7ed00 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs @@ -16,7 +16,7 @@ use std::{ use cfg::{CfgExpr, CfgOptions}; use either::Either; -use hir_expand::{ExpandError, InFile, MacroCallId, mod_path::ModPath, name::Name}; +use hir_expand::{InFile, MacroCallId, mod_path::ModPath, name::Name}; use la_arena::{Arena, ArenaMap}; use rustc_hash::FxHashMap; use smallvec::SmallVec; @@ -281,7 +281,6 @@ struct FormatTemplate { #[derive(Debug, Eq, PartialEq)] pub enum ExpressionStoreDiagnostics { InactiveCode { node: InFile<SyntaxNodePtr>, cfg: CfgExpr, opts: CfgOptions }, - MacroError { node: InFile<MacroCallPtr>, err: ExpandError }, UnresolvedMacroCall { node: InFile<MacroCallPtr>, path: ModPath }, UnreachableLabel { node: InFile<AstPtr<ast::Lifetime>>, name: Name }, AwaitOutsideOfAsync { node: InFile<AstPtr<ast::AwaitExpr>>, location: String }, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs index 4e877748ca2..abd1382801d 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs @@ -960,38 +960,29 @@ impl ExprCollector<'_> { impl_trait_lower_fn: ImplTraitLowerFn<'_>, ) -> TypeBound { match node.kind() { - ast::TypeBoundKind::PathType(path_type) => { + ast::TypeBoundKind::PathType(binder, path_type) => { + let binder = match binder.and_then(|it| it.generic_param_list()) { + Some(gpl) => gpl + .lifetime_params() + .flat_map(|lp| lp.lifetime().map(|lt| Name::new_lifetime(<.text()))) + .collect(), + None => ThinVec::default(), + }; let m = match node.question_mark_token() { Some(_) => TraitBoundModifier::Maybe, None => TraitBoundModifier::None, }; self.lower_path_type(&path_type, impl_trait_lower_fn) .map(|p| { - TypeBound::Path(self.alloc_path(p, AstPtr::new(&path_type).upcast()), m) + let path = self.alloc_path(p, AstPtr::new(&path_type).upcast()); + if binder.is_empty() { + TypeBound::Path(path, m) + } else { + TypeBound::ForLifetime(binder, path) + } }) .unwrap_or(TypeBound::Error) } - ast::TypeBoundKind::ForType(for_type) => { - let lt_refs = match for_type.generic_param_list() { - Some(gpl) => gpl - .lifetime_params() - .flat_map(|lp| lp.lifetime().map(|lt| Name::new_lifetime(<.text()))) - .collect(), - None => ThinVec::default(), - }; - let path = for_type.ty().and_then(|ty| match &ty { - ast::Type::PathType(path_type) => { - self.lower_path_type(path_type, impl_trait_lower_fn).map(|p| (p, ty)) - } - _ => None, - }); - match path { - Some((p, ty)) => { - TypeBound::ForLifetime(lt_refs, self.alloc_path(p, AstPtr::new(&ty))) - } - None => TypeBound::Error, - } - } ast::TypeBoundKind::Use(gal) => TypeBound::Use( gal.use_bound_generic_args() .map(|p| match p { @@ -1981,13 +1972,7 @@ impl ExprCollector<'_> { return collector(self, None); } }; - if record_diagnostics { - if let Some(err) = res.err { - self.store - .diagnostics - .push(ExpressionStoreDiagnostics::MacroError { node: macro_call_ptr, err }); - } - } + // No need to push macro and parsing errors as they'll be recreated from `macro_calls()`. match res.value { Some((mark, expansion)) => { @@ -1997,10 +1982,6 @@ impl ExprCollector<'_> { self.store.expansions.insert(macro_call_ptr, macro_file); } - if record_diagnostics { - // FIXME: Report parse errors here - } - let id = collector(self, expansion.map(|it| it.tree())); self.expander.exit(mark); id diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/generics.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/generics.rs index 02a1d274fb5..c570df42b2f 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/generics.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/generics.rs @@ -180,17 +180,18 @@ impl GenericParamsCollector { continue; }; - let lifetimes: Option<Box<_>> = pred.generic_param_list().map(|param_list| { - // Higher-Ranked Trait Bounds - param_list - .lifetime_params() - .map(|lifetime_param| { - lifetime_param - .lifetime() - .map_or_else(Name::missing, |lt| Name::new_lifetime(<.text())) - }) - .collect() - }); + let lifetimes: Option<Box<_>> = + pred.for_binder().and_then(|it| it.generic_param_list()).map(|param_list| { + // Higher-Ranked Trait Bounds + param_list + .lifetime_params() + .map(|lifetime_param| { + lifetime_param + .lifetime() + .map_or_else(Name::missing, |lt| Name::new_lifetime(<.text())) + }) + .collect() + }); for bound in pred.type_bound_list().iter().flat_map(|l| l.bounds()) { self.lower_type_bound_as_predicate(ec, bound, lifetimes.as_deref(), target); } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/path.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/path.rs index 19c7ce0ce04..55e738b58bd 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/path.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/path.rs @@ -27,7 +27,7 @@ pub enum Path { } // This type is being used a lot, make sure it doesn't grow unintentionally. -#[cfg(target_arch = "x86_64")] +#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] const _: () = { assert!(size_of::<Path>() == 24); assert!(size_of::<Option<Path>>() == 24); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs index eacc3f3cedf..da0f058a9cb 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs @@ -148,7 +148,7 @@ pub enum TypeRef { Error, } -#[cfg(target_arch = "x86_64")] +#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] const _: () = assert!(size_of::<TypeRef>() == 24); pub type TypeRefId = Idx<TypeRef>; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs index 1c3af47d522..eeaf865338b 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs @@ -550,3 +550,51 @@ fn main() { "\"hello\""; } "##]], ); } + +#[test] +fn cfg_select() { + check( + r#" +#[rustc_builtin_macro] +pub macro cfg_select($($tt:tt)*) {} + +cfg_select! { + false => { fn false_1() {} } + any(false, true) => { fn true_1() {} } +} + +cfg_select! { + false => { fn false_2() {} } + _ => { fn true_2() {} } +} + +cfg_select! { + false => { fn false_3() {} } +} + +cfg_select! { + false +} + +cfg_select! { + false => +} + + "#, + expect![[r#" +#[rustc_builtin_macro] +pub macro cfg_select($($tt:tt)*) {} + +fn true_1() {} + +fn true_2() {} + +/* error: none of the predicates in this `cfg_select` evaluated to true */ + +/* error: expected `=>` after cfg expression */ + +/* error: expected a token tree after `=>` */ + + "#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs index 60fbc660652..4a9af01091f 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs @@ -127,6 +127,7 @@ register_builtin! { (asm, Asm) => asm_expand, (global_asm, GlobalAsm) => global_asm_expand, (naked_asm, NakedAsm) => naked_asm_expand, + (cfg_select, CfgSelect) => cfg_select_expand, (cfg, Cfg) => cfg_expand, (core_panic, CorePanic) => panic_expand, (std_panic, StdPanic) => panic_expand, @@ -355,6 +356,71 @@ fn naked_asm_expand( ExpandResult::ok(expanded) } +fn cfg_select_expand( + db: &dyn ExpandDatabase, + id: MacroCallId, + tt: &tt::TopSubtree, + span: Span, +) -> ExpandResult<tt::TopSubtree> { + let loc = db.lookup_intern_macro_call(id); + let cfg_options = loc.krate.cfg_options(db); + + let mut iter = tt.iter(); + let mut expand_to = None; + while let Some(next) = iter.peek() { + let active = if let tt::TtElement::Leaf(tt::Leaf::Ident(ident)) = next + && ident.sym == sym::underscore + { + iter.next(); + true + } else { + cfg_options.check(&CfgExpr::parse_from_iter(&mut iter)) != Some(false) + }; + match iter.expect_glued_punct() { + Ok(it) if it.len() == 2 && it[0].char == '=' && it[1].char == '>' => {} + _ => { + let err_span = iter.peek().map(|it| it.first_span()).unwrap_or(span); + return ExpandResult::new( + tt::TopSubtree::empty(tt::DelimSpan::from_single(span)), + ExpandError::other(err_span, "expected `=>` after cfg expression"), + ); + } + } + let expand_to_if_active = match iter.next() { + Some(tt::TtElement::Subtree(_, tt)) => tt.remaining(), + _ => { + let err_span = iter.peek().map(|it| it.first_span()).unwrap_or(span); + return ExpandResult::new( + tt::TopSubtree::empty(tt::DelimSpan::from_single(span)), + ExpandError::other(err_span, "expected a token tree after `=>`"), + ); + } + }; + + if expand_to.is_none() && active { + expand_to = Some(expand_to_if_active); + } + } + match expand_to { + Some(expand_to) => { + let mut builder = tt::TopSubtreeBuilder::new(tt::Delimiter { + kind: tt::DelimiterKind::Invisible, + open: span, + close: span, + }); + builder.extend_with_tt(expand_to); + ExpandResult::ok(builder.build()) + } + None => ExpandResult::new( + tt::TopSubtree::empty(tt::DelimSpan::from_single(span)), + ExpandError::other( + span, + "none of the predicates in this `cfg_select` evaluated to true", + ), + ), + } +} + fn cfg_expand( db: &dyn ExpandDatabase, id: MacroCallId, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs index 5ae6bf6dffd..cc531f076dd 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs @@ -175,8 +175,9 @@ impl ExprValidator { }); } - let receiver_ty = self.infer[*receiver].clone(); - checker.prev_receiver_ty = Some(receiver_ty); + if let Some(receiver_ty) = self.infer.type_of_expr_with_adjust(*receiver) { + checker.prev_receiver_ty = Some(receiver_ty.clone()); + } } } @@ -187,7 +188,9 @@ impl ExprValidator { arms: &[MatchArm], db: &dyn HirDatabase, ) { - let scrut_ty = &self.infer[scrutinee_expr]; + let Some(scrut_ty) = self.infer.type_of_expr_with_adjust(scrutinee_expr) else { + return; + }; if scrut_ty.contains_unknown() { return; } @@ -200,7 +203,7 @@ impl ExprValidator { // Note: Skipping the entire diagnostic rather than just not including a faulty match arm is // preferred to avoid the chance of false positives. for arm in arms { - let Some(pat_ty) = self.infer.type_of_pat.get(arm.pat) else { + let Some(pat_ty) = self.infer.type_of_pat_with_adjust(arm.pat) else { return; }; if pat_ty.contains_unknown() { @@ -328,7 +331,7 @@ impl ExprValidator { continue; } let Some(initializer) = initializer else { continue }; - let ty = &self.infer[initializer]; + let Some(ty) = self.infer.type_of_expr_with_adjust(initializer) else { continue }; if ty.contains_unknown() { continue; } @@ -433,44 +436,44 @@ impl ExprValidator { Statement::Expr { expr, .. } => Some(*expr), _ => None, }); - if let Some(last_then_expr) = last_then_expr { - let last_then_expr_ty = &self.infer[last_then_expr]; - if last_then_expr_ty.is_never() { - // Only look at sources if the then branch diverges and we have an else branch. - let source_map = db.body_with_source_map(self.owner).1; - let Ok(source_ptr) = source_map.expr_syntax(id) else { - return; - }; - let root = source_ptr.file_syntax(db); - let either::Left(ast::Expr::IfExpr(if_expr)) = - source_ptr.value.to_node(&root) - else { + if let Some(last_then_expr) = last_then_expr + && let Some(last_then_expr_ty) = + self.infer.type_of_expr_with_adjust(last_then_expr) + && last_then_expr_ty.is_never() + { + // Only look at sources if the then branch diverges and we have an else branch. + let source_map = db.body_with_source_map(self.owner).1; + let Ok(source_ptr) = source_map.expr_syntax(id) else { + return; + }; + let root = source_ptr.file_syntax(db); + let either::Left(ast::Expr::IfExpr(if_expr)) = source_ptr.value.to_node(&root) + else { + return; + }; + let mut top_if_expr = if_expr; + loop { + let parent = top_if_expr.syntax().parent(); + let has_parent_expr_stmt_or_stmt_list = + parent.as_ref().is_some_and(|node| { + ast::ExprStmt::can_cast(node.kind()) + | ast::StmtList::can_cast(node.kind()) + }); + if has_parent_expr_stmt_or_stmt_list { + // Only emit diagnostic if parent or direct ancestor is either + // an expr stmt or a stmt list. + break; + } + let Some(parent_if_expr) = parent.and_then(ast::IfExpr::cast) else { + // Bail if parent is neither an if expr, an expr stmt nor a stmt list. return; }; - let mut top_if_expr = if_expr; - loop { - let parent = top_if_expr.syntax().parent(); - let has_parent_expr_stmt_or_stmt_list = - parent.as_ref().is_some_and(|node| { - ast::ExprStmt::can_cast(node.kind()) - | ast::StmtList::can_cast(node.kind()) - }); - if has_parent_expr_stmt_or_stmt_list { - // Only emit diagnostic if parent or direct ancestor is either - // an expr stmt or a stmt list. - break; - } - let Some(parent_if_expr) = parent.and_then(ast::IfExpr::cast) else { - // Bail if parent is neither an if expr, an expr stmt nor a stmt list. - return; - }; - // Check parent if expr. - top_if_expr = parent_if_expr; - } - - self.diagnostics - .push(BodyValidationDiagnostic::RemoveUnnecessaryElse { if_expr: id }) + // Check parent if expr. + top_if_expr = parent_if_expr; } + + self.diagnostics + .push(BodyValidationDiagnostic::RemoveUnnecessaryElse { if_expr: id }) } } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs index e880438e3a7..7c39afa0ef8 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs @@ -561,6 +561,32 @@ impl InferenceResult { ExprOrPatId::PatId(id) => self.type_of_pat.get(id), } } + pub fn type_of_expr_with_adjust(&self, id: ExprId) -> Option<&Ty> { + match self.expr_adjustments.get(&id).and_then(|adjustments| { + adjustments + .iter() + .filter(|adj| { + // https://github.com/rust-lang/rust/blob/67819923ac8ea353aaa775303f4c3aacbf41d010/compiler/rustc_mir_build/src/thir/cx/expr.rs#L140 + !matches!( + adj, + Adjustment { + kind: Adjust::NeverToAny, + target, + } if target.is_never() + ) + }) + .next_back() + }) { + Some(adjustment) => Some(&adjustment.target), + None => self.type_of_expr.get(id), + } + } + pub fn type_of_pat_with_adjust(&self, id: PatId) -> Option<&Ty> { + match self.pat_adjustments.get(&id).and_then(|adjustments| adjustments.last()) { + adjusted @ Some(_) => adjusted, + None => self.type_of_pat.get(id), + } + } pub fn is_erroneous(&self) -> bool { self.has_errors && self.type_of_expr.iter().count() == 0 } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs index 236f316366d..3f310c26ec1 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs @@ -85,16 +85,6 @@ pub fn layout_of_adt_query( let d = db.const_eval_discriminant(e.enum_variants(db).variants[id.0].0).ok()?; Some((id, d)) }), - // FIXME: The current code for niche-filling relies on variant indices - // instead of actual discriminants, so enums with - // explicit discriminants (RFC #2363) would misbehave and we should disable - // niche optimization for them. - // The code that do it in rustc: - // repr.inhibit_enum_layout_opt() || def - // .variants() - // .iter_enumerated() - // .any(|(i, v)| v.discr != ty::VariantDiscr::Relative(i.as_u32())) - repr.inhibit_enum_layout_opt(), !matches!(def, AdtId::EnumId(..)) && variants .iter() diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs index f32b6af4d85..d61e7de6672 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs @@ -590,9 +590,14 @@ impl<'a> TyLoweringContext<'a> { .resolve_trait(ctx.ty_ctx().db, ctx.ty_ctx().resolver.krate()); let pointee_sized = LangItem::PointeeSized .resolve_trait(ctx.ty_ctx().db, ctx.ty_ctx().resolver.krate()); - if meta_sized.is_some_and(|it| it == trait_ref.hir_trait_id()) { + let destruct = LangItem::Destruct + .resolve_trait(ctx.ty_ctx().db, ctx.ty_ctx().resolver.krate()); + let hir_trait_id = trait_ref.hir_trait_id(); + if meta_sized.is_some_and(|it| it == hir_trait_id) + || destruct.is_some_and(|it| it == hir_trait_id) + { // Ignore this bound - } else if pointee_sized.is_some_and(|it| it == trait_ref.hir_trait_id()) { + } else if pointee_sized.is_some_and(|it| it == hir_trait_id) { // Regard this as `?Sized` bound ctx.ty_ctx().unsized_types.insert(self_ty); } else { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs index 238753e12e4..c4c17a93c9c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs @@ -2349,3 +2349,37 @@ fn test() { "#]], ); } + +#[test] +fn rust_destruct_option_clone() { + check_types( + r#" +//- minicore: option, drop +fn test(o: &Option<i32>) { + o.my_clone(); + //^^^^^^^^^^^^ Option<i32> +} +pub trait MyClone: Sized { + fn my_clone(&self) -> Self; +} +impl<T> const MyClone for Option<T> +where + T: ~const MyClone + ~const Destruct, +{ + fn my_clone(&self) -> Self { + match self { + Some(x) => Some(x.my_clone()), + None => None, + } + } +} +impl const MyClone for i32 { + fn my_clone(&self) -> Self { + *self + } +} +#[lang = "destruct"] +pub trait Destruct {} +"#, + ); +} diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index 1b2b76999f7..4ddb04b24f7 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -1922,10 +1922,6 @@ impl DefWithBody { Module { id: def_map.module_id(DefMap::ROOT) }.diagnostics(db, acc, style_lints); } - source_map - .macro_calls() - .for_each(|(_ast_id, call_id)| macro_call_diagnostics(db, call_id, acc)); - expr_store_diagnostics(db, acc, &source_map); let infer = db.infer(self.into()); @@ -2130,9 +2126,9 @@ impl DefWithBody { } } -fn expr_store_diagnostics( - db: &dyn HirDatabase, - acc: &mut Vec<AnyDiagnostic<'_>>, +fn expr_store_diagnostics<'db>( + db: &'db dyn HirDatabase, + acc: &mut Vec<AnyDiagnostic<'db>>, source_map: &ExpressionStoreSourceMap, ) { for diag in source_map.diagnostics() { @@ -2140,30 +2136,6 @@ fn expr_store_diagnostics( ExpressionStoreDiagnostics::InactiveCode { node, cfg, opts } => { InactiveCode { node: *node, cfg: cfg.clone(), opts: opts.clone() }.into() } - ExpressionStoreDiagnostics::MacroError { node, err } => { - let RenderedExpandError { message, error, kind } = err.render_to_string(db); - - let editioned_file_id = EditionedFileId::from_span(db, err.span().anchor.file_id); - let precise_location = if editioned_file_id == node.file_id { - Some( - err.span().range - + db.ast_id_map(editioned_file_id.into()) - .get_erased(err.span().anchor.ast_id) - .text_range() - .start(), - ) - } else { - None - }; - MacroError { - node: (node).map(|it| it.into()), - precise_location, - message, - error, - kind, - } - .into() - } ExpressionStoreDiagnostics::UnresolvedMacroCall { node, path } => UnresolvedMacroCall { macro_call: (*node).map(|ast_ptr| ast_ptr.into()), precise_location: None, @@ -2182,6 +2154,10 @@ fn expr_store_diagnostics( } }); } + + source_map + .macro_calls() + .for_each(|(_ast_id, call_id)| macro_call_diagnostics(db, call_id, acc)); } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct Function { diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs index ecc6e5f3d03..0b554a9d4e3 100644 --- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs +++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs @@ -441,7 +441,7 @@ impl<'db> SourceAnalyzer<'db> { ) -> Option<GenericSubstitution<'db>> { let body = self.store()?; if let Expr::Field { expr: object_expr, name: _ } = body[field_expr] { - let (adt, subst) = type_of_expr_including_adjust(infer, object_expr)?.as_adt()?; + let (adt, subst) = infer.type_of_expr_with_adjust(object_expr)?.as_adt()?; return Some(GenericSubstitution::new( adt.into(), subst.clone(), @@ -1780,10 +1780,3 @@ pub(crate) fn name_hygiene(db: &dyn HirDatabase, name: InFile<&SyntaxNode>) -> H let ctx = span_map.span_at(name.value.text_range().start()).ctx; HygieneId::new(ctx.opaque_and_semitransparent(db)) } - -fn type_of_expr_including_adjust(infer: &InferenceResult, id: ExprId) -> Option<&Ty> { - match infer.expr_adjustment(id).and_then(|adjustments| adjustments.last()) { - Some(adjustment) => Some(&adjustment.target), - None => Some(&infer[id]), - } -} diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs index 9f9d21923ff..ab183ac7089 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs @@ -2,6 +2,7 @@ use hir::HasSource; use syntax::{ Edition, ast::{self, AstNode, make}, + syntax_editor::{Position, SyntaxEditor}, }; use crate::{ @@ -147,45 +148,78 @@ fn add_missing_impl_members_inner( let target = impl_def.syntax().text_range(); acc.add(AssistId::quick_fix(assist_id), label, target, |edit| { - let new_impl_def = edit.make_mut(impl_def.clone()); - let first_new_item = add_trait_assoc_items_to_impl( + let new_item = add_trait_assoc_items_to_impl( &ctx.sema, ctx.config, &missing_items, trait_, - &new_impl_def, + &impl_def, &target_scope, ); + let Some((first_new_item, other_items)) = new_item.split_first() else { + return; + }; + + let mut first_new_item = if let DefaultMethods::No = mode + && let ast::AssocItem::Fn(func) = &first_new_item + && let Some(body) = try_gen_trait_body( + ctx, + func, + trait_ref, + &impl_def, + target_scope.krate().edition(ctx.sema.db), + ) + && let Some(func_body) = func.body() + { + let mut func_editor = SyntaxEditor::new(first_new_item.syntax().clone_subtree()); + func_editor.replace(func_body.syntax(), body.syntax()); + ast::AssocItem::cast(func_editor.finish().new_root().clone()) + } else { + Some(first_new_item.clone()) + }; + + let new_assoc_items = first_new_item + .clone() + .into_iter() + .chain(other_items.iter().cloned()) + .map(either::Either::Right) + .collect::<Vec<_>>(); + + let mut editor = edit.make_editor(impl_def.syntax()); + if let Some(assoc_item_list) = impl_def.assoc_item_list() { + let items = new_assoc_items.into_iter().filter_map(either::Either::right).collect(); + assoc_item_list.add_items(&mut editor, items); + } else { + let assoc_item_list = make::assoc_item_list(Some(new_assoc_items)).clone_for_update(); + editor.insert_all( + Position::after(impl_def.syntax()), + vec![make::tokens::whitespace(" ").into(), assoc_item_list.syntax().clone().into()], + ); + first_new_item = assoc_item_list.assoc_items().next(); + } + if let Some(cap) = ctx.config.snippet_cap { let mut placeholder = None; if let DefaultMethods::No = mode { - if let ast::AssocItem::Fn(func) = &first_new_item { - if try_gen_trait_body( - ctx, - func, - trait_ref, - &impl_def, - target_scope.krate().edition(ctx.sema.db), - ) - .is_none() + if let Some(ast::AssocItem::Fn(func)) = &first_new_item { + if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast) + && m.syntax().text() == "todo!()" { - if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast) - { - if m.syntax().text() == "todo!()" { - placeholder = Some(m); - } - } + placeholder = Some(m); } } } if let Some(macro_call) = placeholder { - edit.add_placeholder_snippet(cap, macro_call); - } else { - edit.add_tabstop_before(cap, first_new_item); + let placeholder = edit.make_placeholder_snippet(cap); + editor.add_annotation(macro_call.syntax(), placeholder); + } else if let Some(first_new_item) = first_new_item { + let tabstop = edit.make_tabstop_before(cap); + editor.add_annotation(first_new_item.syntax(), tabstop); }; }; + edit.add_file_edits(ctx.vfs_file_id(), editor); }) } @@ -195,7 +229,7 @@ fn try_gen_trait_body( trait_ref: hir::TraitRef<'_>, impl_def: &ast::Impl, edition: Edition, -) -> Option<()> { +) -> Option<ast::BlockExpr> { let trait_path = make::ext::ident_path( &trait_ref.trait_().name(ctx.db()).display(ctx.db(), edition).to_string(), ); @@ -322,7 +356,7 @@ impl Foo for S { } #[test] - fn test_impl_def_without_braces() { + fn test_impl_def_without_braces_macro() { check_assist( add_missing_impl_members, r#" @@ -341,6 +375,33 @@ impl Foo for S { } #[test] + fn test_impl_def_without_braces_tabstop_first_item() { + check_assist( + add_missing_impl_members, + r#" +trait Foo { + type Output; + fn foo(&self); +} +struct S; +impl Foo for S { $0 }"#, + r#" +trait Foo { + type Output; + fn foo(&self); +} +struct S; +impl Foo for S { + $0type Output; + + fn foo(&self) { + todo!() + } +}"#, + ); + } + + #[test] fn fill_in_type_params_1() { check_assist( add_missing_impl_members, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs index bcd06c1ef72..d7b7e8d9cad 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs @@ -228,8 +228,7 @@ pub(crate) fn convert_bool_then_to_if(acc: &mut Assists, ctx: &AssistContext<'_> closure_body, Some(ast::ElseBranch::Block(make.block_expr(None, Some(none_path)))), ) - .indent(mcall.indent_level()) - .clone_for_update(); + .indent(mcall.indent_level()); editor.replace(mcall.syntax().clone(), if_expr.syntax().clone()); editor.add_mappings(make.finish_with_mappings()); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs index 71a61f2db00..2ea032fb62b 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs @@ -13,7 +13,6 @@ use syntax::{ edit::{AstNodeEdit, IndentLevel}, make, }, - ted, }; use crate::{ @@ -117,7 +116,7 @@ fn if_expr_to_guarded_return( then_block.syntax().last_child_or_token().filter(|t| t.kind() == T!['}'])?; - let then_block_items = then_block.dedent(IndentLevel(1)).clone_for_update(); + let then_block_items = then_block.dedent(IndentLevel(1)); let end_of_then = then_block_items.syntax().last_child_or_token()?; let end_of_then = if end_of_then.prev_sibling_or_token().map(|n| n.kind()) == Some(WHITESPACE) { @@ -132,7 +131,6 @@ fn if_expr_to_guarded_return( "Convert to guarded return", target, |edit| { - let if_expr = edit.make_mut(if_expr); let if_indent_level = IndentLevel::from_node(if_expr.syntax()); let replacement = match if_let_pat { None => { @@ -143,7 +141,7 @@ fn if_expr_to_guarded_return( let cond = invert_boolean_expression_legacy(cond_expr); make::expr_if(cond, then_branch, None).indent(if_indent_level) }; - new_expr.syntax().clone_for_update() + new_expr.syntax().clone() } Some(pat) => { // If-let. @@ -154,7 +152,7 @@ fn if_expr_to_guarded_return( ast::make::tail_only_block_expr(early_expression), ); let let_else_stmt = let_else_stmt.indent(if_indent_level); - let_else_stmt.syntax().clone_for_update() + let_else_stmt.syntax().clone() } }; @@ -168,8 +166,9 @@ fn if_expr_to_guarded_return( .take_while(|i| *i != end_of_then), ) .collect(); - - ted::replace_with_many(if_expr.syntax(), then_statements) + let mut editor = edit.make_editor(if_expr.syntax()); + editor.replace_with_many(if_expr.syntax(), then_statements); + edit.add_file_edits(ctx.vfs_file_id(), editor); }, ) } @@ -214,7 +213,6 @@ fn let_stmt_to_guarded_return( "Convert to guarded return", target, |edit| { - let let_stmt = edit.make_mut(let_stmt); let let_indent_level = IndentLevel::from_node(let_stmt.syntax()); let replacement = { @@ -225,10 +223,11 @@ fn let_stmt_to_guarded_return( ast::make::tail_only_block_expr(early_expression), ); let let_else_stmt = let_else_stmt.indent(let_indent_level); - let_else_stmt.syntax().clone_for_update() + let_else_stmt.syntax().clone() }; - - ted::replace(let_stmt.syntax(), replacement) + let mut editor = edit.make_editor(let_stmt.syntax()); + editor.replace(let_stmt.syntax(), replacement); + edit.add_file_edits(ctx.vfs_file_id(), editor); }, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs index 54699a9454f..cdc0e967101 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs @@ -8,8 +8,7 @@ use syntax::{ AstNode, AstToken, NodeOrToken, SyntaxKind::WHITESPACE, T, - ast::{self, make}, - ted, + ast::{self, make, syntax_factory::SyntaxFactory}, }; // Assist: extract_expressions_from_format_string @@ -58,8 +57,6 @@ pub(crate) fn extract_expressions_from_format_string( "Extract format expressions", tt.syntax().text_range(), |edit| { - let tt = edit.make_mut(tt); - // Extract existing arguments in macro let tokens = tt.token_trees_and_tokens().collect_vec(); @@ -131,8 +128,10 @@ pub(crate) fn extract_expressions_from_format_string( } // Insert new args - let new_tt = make::token_tree(tt_delimiter, new_tt_bits).clone_for_update(); - ted::replace(tt.syntax(), new_tt.syntax()); + let make = SyntaxFactory::with_mappings(); + let new_tt = make.token_tree(tt_delimiter, new_tt_bits); + let mut editor = edit.make_editor(tt.syntax()); + editor.replace(tt.syntax(), new_tt.syntax()); if let Some(cap) = ctx.config.snippet_cap { // Add placeholder snippets over placeholder args @@ -145,15 +144,19 @@ pub(crate) fn extract_expressions_from_format_string( }; if stdx::always!(placeholder.kind() == T![_]) { - edit.add_placeholder_snippet_token(cap, placeholder); + let annotation = edit.make_placeholder_snippet(cap); + editor.add_annotation(placeholder, annotation); } } // Add the final tabstop after the format literal if let Some(NodeOrToken::Token(literal)) = new_tt.token_trees_and_tokens().nth(1) { - edit.add_tabstop_after_token(cap, literal); + let annotation = edit.make_tabstop_after(cap); + editor.add_annotation(literal, annotation); } } + editor.add_mappings(make.finish_with_mappings()); + edit.add_file_edits(ctx.vfs_file_id(), editor); }, ); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs index b9c42285d25..9095b1825f5 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs @@ -16,8 +16,9 @@ use syntax::{ SyntaxKind::*, SyntaxNode, T, ast::{ - self, AstNode, HasAttrs, HasGenericParams, HasName, HasVisibility, edit::IndentLevel, - edit_in_place::Indent, make, + self, AstNode, HasAttrs, HasGenericParams, HasName, HasVisibility, + edit::{AstNodeEdit, IndentLevel}, + make, }, match_ast, ted, }; @@ -110,20 +111,30 @@ pub(crate) fn extract_struct_from_enum_variant( let generics = generic_params.as_ref().map(|generics| generics.clone_for_update()); // resolve GenericArg in field_list to actual type - let field_list = field_list.clone_for_update(); - if let Some((target_scope, source_scope)) = + let field_list = if let Some((target_scope, source_scope)) = ctx.sema.scope(enum_ast.syntax()).zip(ctx.sema.scope(field_list.syntax())) { - PathTransform::generic_transformation(&target_scope, &source_scope) - .apply(field_list.syntax()); - } + let field_list = field_list.reset_indent(); + let field_list = + PathTransform::generic_transformation(&target_scope, &source_scope) + .apply(field_list.syntax()); + match_ast! { + match field_list { + ast::RecordFieldList(field_list) => Either::Left(field_list), + ast::TupleFieldList(field_list) => Either::Right(field_list), + _ => unreachable!(), + } + } + } else { + field_list.clone_for_update() + }; let def = create_struct_def(variant_name.clone(), &variant, &field_list, generics, &enum_ast); let enum_ast = variant.parent_enum(); let indent = enum_ast.indent_level(); - def.reindent_to(indent); + let def = def.indent(indent); ted::insert_all( ted::Position::before(enum_ast.syntax()), @@ -279,7 +290,7 @@ fn create_struct_def( field_list.clone().into() } }; - field_list.reindent_to(IndentLevel::single()); + let field_list = field_list.indent(IndentLevel::single()); let strukt = make::struct_(enum_vis, name, generics, field_list).clone_for_update(); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs index 31e84e9adcf..db2d316d58e 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs @@ -7,7 +7,9 @@ use syntax::{ NodeOrToken, SyntaxKind, SyntaxNode, T, algo::ancestors_at_offset, ast::{ - self, AstNode, edit::IndentLevel, edit_in_place::Indent, make, + self, AstNode, + edit::{AstNodeEdit, IndentLevel}, + make, syntax_factory::SyntaxFactory, }, syntax_editor::Position, @@ -253,12 +255,11 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op // `expr_replace` is a descendant of `to_wrap`, so we just replace it with `name_expr`. editor.replace(expr_replace, name_expr.syntax()); make.block_expr([new_stmt], Some(to_wrap.clone())) - }; + } + // fixup indentation of block + .indent_with_mapping(indent_to, &make); editor.replace(to_wrap.syntax(), block.syntax()); - - // fixup indentation of block - block.indent(indent_to); } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs index ca66cb69dcc..60638980760 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs @@ -114,9 +114,13 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' let source_scope = ctx.sema.scope(v.syntax()); let target_scope = ctx.sema.scope(strukt.syntax()); if let (Some(s), Some(t)) = (source_scope, target_scope) { - PathTransform::generic_transformation(&t, &s).apply(v.syntax()); + ast::Fn::cast( + PathTransform::generic_transformation(&t, &s).apply(v.syntax()), + ) + .unwrap_or(v) + } else { + v } - v } None => return, }; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs index 848c63810a4..e96250f3c50 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs @@ -255,7 +255,6 @@ fn generate_impl( delegee: &Delegee, edition: Edition, ) -> Option<ast::Impl> { - let delegate: ast::Impl; let db = ctx.db(); let ast_strukt = &strukt.strukt; let strukt_ty = make::ty_path(make::ext::ident_path(&strukt.name.to_string())); @@ -266,7 +265,7 @@ fn generate_impl( let bound_def = ctx.sema.source(delegee.to_owned())?.value; let bound_params = bound_def.generic_param_list(); - delegate = make::impl_trait( + let delegate = make::impl_trait( delegee.is_unsafe(db), bound_params.clone(), bound_params.map(|params| params.to_generic_args()), @@ -304,7 +303,7 @@ fn generate_impl( let target_scope = ctx.sema.scope(strukt.strukt.syntax())?; let source_scope = ctx.sema.scope(bound_def.syntax())?; let transform = PathTransform::generic_transformation(&target_scope, &source_scope); - transform.apply(delegate.syntax()); + ast::Impl::cast(transform.apply(delegate.syntax())) } Delegee::Impls(trait_, old_impl) => { let old_impl = ctx.sema.source(old_impl.to_owned())?.value; @@ -358,20 +357,28 @@ fn generate_impl( // 2.3) Instantiate generics with `transform_impl`, this step also // remove unused params. - let mut trait_gen_args = old_impl.trait_()?.generic_arg_list(); - if let Some(trait_args) = &mut trait_gen_args { - *trait_args = trait_args.clone_for_update(); - transform_impl(ctx, ast_strukt, &old_impl, &transform_args, trait_args.syntax())?; - } + let trait_gen_args = old_impl.trait_()?.generic_arg_list().and_then(|trait_args| { + let trait_args = &mut trait_args.clone_for_update(); + if let Some(new_args) = transform_impl( + ctx, + ast_strukt, + &old_impl, + &transform_args, + trait_args.clone_subtree(), + ) { + *trait_args = new_args.clone_subtree(); + Some(new_args) + } else { + None + } + }); let type_gen_args = strukt_params.clone().map(|params| params.to_generic_args()); - let path_type = make::ty(&trait_.name(db).display_no_db(edition).to_smolstr()).clone_for_update(); - transform_impl(ctx, ast_strukt, &old_impl, &transform_args, path_type.syntax())?; - + let path_type = transform_impl(ctx, ast_strukt, &old_impl, &transform_args, path_type)?; // 3) Generate delegate trait impl - delegate = make::impl_trait( + let delegate = make::impl_trait( trait_.is_unsafe(db), trait_gen_params, trait_gen_args, @@ -385,7 +392,6 @@ fn generate_impl( None, ) .clone_for_update(); - // Goto link : https://doc.rust-lang.org/reference/paths.html#qualified-paths let qualified_path_type = make::path_from_text(&format!("<{} as {}>", field_ty, delegate.trait_()?)); @@ -398,7 +404,7 @@ fn generate_impl( .filter(|item| matches!(item, AssocItem::MacroCall(_)).not()) { let item = item.clone_for_update(); - transform_impl(ctx, ast_strukt, &old_impl, &transform_args, item.syntax())?; + let item = transform_impl(ctx, ast_strukt, &old_impl, &transform_args, item)?; let assoc = process_assoc_item(item, qualified_path_type.clone(), field_name)?; delegate_assoc_items.add_item(assoc); @@ -408,19 +414,18 @@ fn generate_impl( if let Some(wc) = delegate.where_clause() { remove_useless_where_clauses(&delegate.trait_()?, &delegate.self_ty()?, wc); } + Some(delegate) } } - - Some(delegate) } -fn transform_impl( +fn transform_impl<N: ast::AstNode>( ctx: &AssistContext<'_>, strukt: &ast::Struct, old_impl: &ast::Impl, args: &Option<GenericArgList>, - syntax: &syntax::SyntaxNode, -) -> Option<()> { + syntax: N, +) -> Option<N> { let source_scope = ctx.sema.scope(old_impl.self_ty()?.syntax())?; let target_scope = ctx.sema.scope(strukt.syntax())?; let hir_old_impl = ctx.sema.to_impl_def(old_impl)?; @@ -437,8 +442,7 @@ fn transform_impl( }, ); - transform.apply(syntax); - Some(()) + N::cast(transform.apply(syntax.syntax())) } fn remove_instantiated_params( @@ -570,9 +574,7 @@ where let scope = ctx.sema.scope(item.syntax())?; let transform = PathTransform::adt_transformation(&scope, &scope, hir_adt, args.clone()); - transform.apply(item.syntax()); - - Some(item) + N::cast(transform.apply(item.syntax())) } fn has_self_type(trait_: hir::Trait, ctx: &AssistContext<'_>) -> Option<()> { @@ -767,7 +769,7 @@ fn func_assoc_item( ) .clone_for_update(); - Some(AssocItem::Fn(func.indent(edit::IndentLevel(1)).clone_for_update())) + Some(AssocItem::Fn(func.indent(edit::IndentLevel(1)))) } fn ty_assoc_item(item: syntax::ast::TypeAlias, qual_path_ty: Path) -> Option<AssocItem> { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs index c7b97dcd231..55a09c5d775 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_deref.rs @@ -10,7 +10,7 @@ use syntax::{ use crate::{ AssistId, assist_context::{AssistContext, Assists, SourceChangeBuilder}, - utils::generate_trait_impl_text, + utils::generate_trait_impl_text_intransitive, }; // Assist: generate_deref @@ -150,7 +150,7 @@ fn generate_edit( ), }; let strukt_adt = ast::Adt::Struct(strukt); - let deref_impl = generate_trait_impl_text( + let deref_impl = generate_trait_impl_text_intransitive( &strukt_adt, &trait_path.display(db, edition).to_string(), &impl_code, @@ -228,6 +228,28 @@ impl core::ops::Deref for B { } #[test] + fn test_generate_record_deref_with_generic() { + check_assist( + generate_deref, + r#" +//- minicore: deref +struct A<T>($0T); +"#, + r#" +struct A<T>(T); + +impl<T> core::ops::Deref for A<T> { + type Target = T; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} +"#, + ); + } + + #[test] fn test_generate_record_deref_short_path() { check_assist( generate_deref, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs index 78ae815dc87..3290a70e1c6 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs @@ -743,17 +743,30 @@ fn fn_generic_params( let where_preds: Vec<ast::WherePred> = where_preds.into_iter().map(|it| it.node.clone_for_update()).collect(); - // 4. Rewrite paths - if let Some(param) = generic_params.first() { - let source_scope = ctx.sema.scope(param.syntax())?; - let target_scope = ctx.sema.scope(&target.parent())?; - if source_scope.module() != target_scope.module() { + let (generic_params, where_preds): (Vec<ast::GenericParam>, Vec<ast::WherePred>) = + if let Some(param) = generic_params.first() + && let source_scope = ctx.sema.scope(param.syntax())? + && let target_scope = ctx.sema.scope(&target.parent())? + && source_scope.module() != target_scope.module() + { + // 4. Rewrite paths let transform = PathTransform::generic_transformation(&target_scope, &source_scope); let generic_params = generic_params.iter().map(|it| it.syntax()); let where_preds = where_preds.iter().map(|it| it.syntax()); - transform.apply_all(generic_params.chain(where_preds)); - } - } + transform + .apply_all(generic_params.chain(where_preds)) + .into_iter() + .filter_map(|it| { + if let Some(it) = ast::GenericParam::cast(it.clone()) { + Some(either::Either::Left(it)) + } else { + ast::WherePred::cast(it).map(either::Either::Right) + } + }) + .partition_map(|it| it) + } else { + (generic_params, where_preds) + }; let generic_param_list = make::generic_param_list(generic_params); let where_clause = diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs index 14601ca0207..31cadcf5ea8 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs @@ -1,12 +1,17 @@ use syntax::{ - ast::{self, AstNode, HasName, edit_in_place::Indent, make}, + ast::{self, AstNode, HasGenericParams, HasName, edit_in_place::Indent, make}, syntax_editor::{Position, SyntaxEditor}, }; -use crate::{AssistContext, AssistId, Assists, utils}; +use crate::{ + AssistContext, AssistId, Assists, + utils::{self, DefaultMethods, IgnoreAssocItems}, +}; -fn insert_impl(editor: &mut SyntaxEditor, impl_: &ast::Impl, nominal: &ast::Adt) { +fn insert_impl(editor: &mut SyntaxEditor, impl_: &ast::Impl, nominal: &impl Indent) { let indent = nominal.indent_level(); + + impl_.indent(indent); editor.insert_all( Position::after(nominal.syntax()), vec![ @@ -120,6 +125,126 @@ pub(crate) fn generate_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> ) } +// Assist: generate_impl_trait +// +// Adds this trait impl for a type. +// +// ``` +// trait $0Foo { +// fn foo(&self) -> i32; +// } +// ``` +// -> +// ``` +// trait Foo { +// fn foo(&self) -> i32; +// } +// +// impl Foo for ${1:_} { +// fn foo(&self) -> i32 { +// $0todo!() +// } +// } +// ``` +pub(crate) fn generate_impl_trait(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let name = ctx.find_node_at_offset::<ast::Name>()?; + let trait_ = ast::Trait::cast(name.syntax().parent()?)?; + let target_scope = ctx.sema.scope(trait_.syntax())?; + let hir_trait = ctx.sema.to_def(&trait_)?; + + let target = trait_.syntax().text_range(); + acc.add( + AssistId::generate("generate_impl_trait"), + format!("Generate `{name}` impl for type"), + target, + |edit| { + let mut editor = edit.make_editor(trait_.syntax()); + + let holder_arg = ast::GenericArg::TypeArg(make::type_arg(make::ty_placeholder())); + let missing_items = utils::filter_assoc_items( + &ctx.sema, + &hir_trait.items(ctx.db()), + DefaultMethods::No, + IgnoreAssocItems::DocHiddenAttrPresent, + ); + + let trait_gen_args = trait_.generic_param_list().map(|list| { + make::generic_arg_list(list.generic_params().map(|_| holder_arg.clone())) + }); + + let make_impl_ = |body| { + make::impl_trait( + trait_.unsafe_token().is_some(), + None, + trait_gen_args.clone(), + None, + None, + false, + make::ty(&name.text()), + make::ty_placeholder(), + None, + None, + body, + ) + .clone_for_update() + }; + + let impl_ = if missing_items.is_empty() { + make_impl_(None) + } else { + let impl_ = make_impl_(None); + let assoc_items = utils::add_trait_assoc_items_to_impl( + &ctx.sema, + ctx.config, + &missing_items, + hir_trait, + &impl_, + &target_scope, + ); + let assoc_items = assoc_items.into_iter().map(either::Either::Right).collect(); + let assoc_item_list = make::assoc_item_list(Some(assoc_items)); + make_impl_(Some(assoc_item_list)) + }; + + if let Some(cap) = ctx.config.snippet_cap { + if let Some(generics) = impl_.trait_().and_then(|it| it.generic_arg_list()) { + for generic in generics.generic_args() { + let placeholder = edit.make_placeholder_snippet(cap); + editor.add_annotation(generic.syntax(), placeholder); + } + } + + if let Some(ty) = impl_.self_ty() { + let placeholder = edit.make_placeholder_snippet(cap); + editor.add_annotation(ty.syntax(), placeholder); + } + + if let Some(expr) = + impl_.assoc_item_list().and_then(|it| it.assoc_items().find_map(extract_expr)) + { + let tabstop = edit.make_tabstop_before(cap); + editor.add_annotation(expr.syntax(), tabstop); + } else if let Some(l_curly) = + impl_.assoc_item_list().and_then(|it| it.l_curly_token()) + { + let tabstop = edit.make_tabstop_after(cap); + editor.add_annotation(l_curly, tabstop); + } + } + + insert_impl(&mut editor, &impl_, &trait_); + edit.add_file_edits(ctx.vfs_file_id(), editor); + }, + ) +} + +fn extract_expr(item: ast::AssocItem) -> Option<ast::Expr> { + let ast::AssocItem::Fn(f) = item else { + return None; + }; + f.body()?.tail_expr() +} + #[cfg(test)] mod tests { use crate::tests::{check_assist, check_assist_target}; @@ -492,4 +617,209 @@ mod tests { "#, ); } + + #[test] + fn test_add_impl_trait() { + check_assist( + generate_impl_trait, + r#" + trait $0Foo { + fn foo(&self) -> i32; + + fn bar(&self) -> i32 { + self.foo() + } + } + "#, + r#" + trait Foo { + fn foo(&self) -> i32; + + fn bar(&self) -> i32 { + self.foo() + } + } + + impl Foo for ${1:_} { + fn foo(&self) -> i32 { + $0todo!() + } + } + "#, + ); + } + + #[test] + fn test_add_impl_trait_use_generic() { + check_assist( + generate_impl_trait, + r#" + trait $0Foo<T> { + fn foo(&self) -> T; + + fn bar(&self) -> T { + self.foo() + } + } + "#, + r#" + trait Foo<T> { + fn foo(&self) -> T; + + fn bar(&self) -> T { + self.foo() + } + } + + impl Foo<${1:_}> for ${2:_} { + fn foo(&self) -> _ { + $0todo!() + } + } + "#, + ); + check_assist( + generate_impl_trait, + r#" + trait $0Foo<T, U> { + fn foo(&self) -> T; + + fn bar(&self) -> T { + self.foo() + } + } + "#, + r#" + trait Foo<T, U> { + fn foo(&self) -> T; + + fn bar(&self) -> T { + self.foo() + } + } + + impl Foo<${1:_}, ${2:_}> for ${3:_} { + fn foo(&self) -> _ { + $0todo!() + } + } + "#, + ); + } + + #[test] + fn test_add_impl_trait_docs() { + check_assist( + generate_impl_trait, + r#" + /// foo + trait $0Foo { + /// foo method + fn foo(&self) -> i32; + + fn bar(&self) -> i32 { + self.foo() + } + } + "#, + r#" + /// foo + trait Foo { + /// foo method + fn foo(&self) -> i32; + + fn bar(&self) -> i32 { + self.foo() + } + } + + impl Foo for ${1:_} { + fn foo(&self) -> i32 { + $0todo!() + } + } + "#, + ); + } + + #[test] + fn test_add_impl_trait_assoc_types() { + check_assist( + generate_impl_trait, + r#" + trait $0Foo { + type Output; + + fn foo(&self) -> Self::Output; + } + "#, + r#" + trait Foo { + type Output; + + fn foo(&self) -> Self::Output; + } + + impl Foo for ${1:_} { + type Output; + + fn foo(&self) -> Self::Output { + $0todo!() + } + } + "#, + ); + } + + #[test] + fn test_add_impl_trait_indent() { + check_assist( + generate_impl_trait, + r#" + mod foo { + mod bar { + trait $0Foo { + type Output; + + fn foo(&self) -> Self::Output; + } + } + } + "#, + r#" + mod foo { + mod bar { + trait Foo { + type Output; + + fn foo(&self) -> Self::Output; + } + + impl Foo for ${1:_} { + type Output; + + fn foo(&self) -> Self::Output { + $0todo!() + } + } + } + } + "#, + ); + } + + #[test] + fn test_add_impl_trait_empty() { + check_assist( + generate_impl_trait, + r#" + trait $0Foo {} + "#, + r#" + trait Foo {} + + impl Foo for ${1:_} {$0} + "#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs index 4ddab2cfad0..9c4bcdd4030 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs @@ -94,7 +94,7 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_> })?; let _ = process_ref_mut(&fn_); - let assoc_list = make::assoc_item_list().clone_for_update(); + let assoc_list = make::assoc_item_list(None).clone_for_update(); ted::replace(impl_def.assoc_item_list()?.syntax(), assoc_list.syntax()); impl_def.get_or_create_assoc_item_list().add_item(syntax::ast::AssocItem::Fn(fn_)); @@ -134,6 +134,9 @@ fn get_trait_mut(apply_trait: &hir::Trait, famous: FamousDefs<'_, '_>) -> Option if trait_ == famous.core_borrow_Borrow().as_ref() { return Some("BorrowMut"); } + if trait_ == famous.core_ops_Deref().as_ref() { + return Some("DerefMut"); + } None } @@ -142,6 +145,7 @@ fn process_method_name(name: ast::Name) -> Option<(ast::Name, &'static str)> { "index" => "index_mut", "as_ref" => "as_mut", "borrow" => "borrow_mut", + "deref" => "deref_mut", _ => return None, }; Some((name, new_name)) @@ -260,6 +264,39 @@ impl core::convert::AsRef<i32> for Foo { } "#, ); + + check_assist( + generate_mut_trait_impl, + r#" +//- minicore: deref +struct Foo(i32); + +impl core::ops::Deref$0 for Foo { + type Target = i32; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} +"#, + r#" +struct Foo(i32); + +$0impl core::ops::DerefMut for Foo { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl core::ops::Deref for Foo { + type Target = i32; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} +"#, + ); } #[test] diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs index 51c2f65e025..5bda1226cda 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs @@ -4,12 +4,12 @@ use ide_db::{ }; use syntax::{ ast::{self, AstNode, HasName, HasVisibility, StructKind, edit_in_place::Indent, make}, - ted, + syntax_editor::Position, }; use crate::{ AssistContext, AssistId, Assists, - utils::{find_struct_impl, generate_impl}, + utils::{find_struct_impl, generate_impl_with_item}, }; // Assist: generate_new @@ -149,7 +149,53 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option .clone_for_update(); fn_.indent(1.into()); - if let Some(cap) = ctx.config.snippet_cap { + let mut editor = builder.make_editor(strukt.syntax()); + + // Get the node for set annotation + let contain_fn = if let Some(impl_def) = impl_def { + fn_.indent(impl_def.indent_level()); + + if let Some(l_curly) = impl_def.assoc_item_list().and_then(|list| list.l_curly_token()) + { + editor.insert_all( + Position::after(l_curly), + vec![ + make::tokens::whitespace(&format!("\n{}", impl_def.indent_level() + 1)) + .into(), + fn_.syntax().clone().into(), + make::tokens::whitespace("\n").into(), + ], + ); + fn_.syntax().clone() + } else { + let items = vec![either::Either::Right(ast::AssocItem::Fn(fn_))]; + let list = make::assoc_item_list(Some(items)); + editor.insert(Position::after(impl_def.syntax()), list.syntax()); + list.syntax().clone() + } + } else { + // Generate a new impl to add the method to + let indent_level = strukt.indent_level(); + let body = vec![either::Either::Right(ast::AssocItem::Fn(fn_))]; + let list = make::assoc_item_list(Some(body)); + let impl_def = generate_impl_with_item(&ast::Adt::Struct(strukt.clone()), Some(list)); + + impl_def.indent(strukt.indent_level()); + + // Insert it after the adt + editor.insert_all( + Position::after(strukt.syntax()), + vec![ + make::tokens::whitespace(&format!("\n\n{indent_level}")).into(), + impl_def.syntax().clone().into(), + ], + ); + impl_def.syntax().clone() + }; + + if let Some(fn_) = contain_fn.descendants().find_map(ast::Fn::cast) + && let Some(cap) = ctx.config.snippet_cap + { match strukt.kind() { StructKind::Tuple(_) => { let struct_args = fn_ @@ -168,8 +214,8 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option for (struct_arg, fn_param) in struct_args.zip(fn_params.params()) { if let Some(fn_pat) = fn_param.pat() { let fn_pat = fn_pat.syntax().clone(); - builder - .add_placeholder_snippet_group(cap, vec![struct_arg, fn_pat]); + let placeholder = builder.make_placeholder_snippet(cap); + editor.add_annotation_all(vec![struct_arg, fn_pat], placeholder) } } } @@ -179,36 +225,12 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option // Add a tabstop before the name if let Some(name) = fn_.name() { - builder.add_tabstop_before(cap, name); + let tabstop_before = builder.make_tabstop_before(cap); + editor.add_annotation(name.syntax(), tabstop_before); } } - // Get the mutable version of the impl to modify - let impl_def = if let Some(impl_def) = impl_def { - fn_.indent(impl_def.indent_level()); - builder.make_mut(impl_def) - } else { - // Generate a new impl to add the method to - let impl_def = generate_impl(&ast::Adt::Struct(strukt.clone())); - let indent_level = strukt.indent_level(); - fn_.indent(indent_level); - - // Insert it after the adt - let strukt = builder.make_mut(strukt.clone()); - - ted::insert_all_raw( - ted::Position::after(strukt.syntax()), - vec![ - make::tokens::whitespace(&format!("\n\n{indent_level}")).into(), - impl_def.syntax().clone().into(), - ], - ); - - impl_def - }; - - // Add the `new` method at the start of the impl - impl_def.get_or_create_assoc_item_list().add_item_at_start(fn_.into()); + builder.add_file_edits(ctx.vfs_file_id(), editor); }) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs index 154b502e1bf..92a4bd35b3e 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs @@ -3,7 +3,7 @@ use ide_db::assists::AssistId; use syntax::{ AstNode, SyntaxKind, T, ast::{ - self, HasGenericParams, HasName, + self, HasGenericParams, HasName, HasVisibility, edit_in_place::{HasVisibilityEdit, Indent}, make, }, @@ -164,6 +164,12 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ /// `E0449` Trait items always share the visibility of their trait fn remove_items_visibility(item: &ast::AssocItem) { if let Some(has_vis) = ast::AnyHasVisibility::cast(item.syntax().clone()) { + if let Some(vis) = has_vis.visibility() + && let Some(token) = vis.syntax().next_sibling_or_token() + && token.kind() == SyntaxKind::WHITESPACE + { + ted::remove(token); + } has_vis.set_visibility(None); } } @@ -333,11 +339,11 @@ impl F$0oo { struct Foo; trait NewTrait { - fn a_func() -> Option<()>; + fn a_func() -> Option<()>; } impl NewTrait for Foo { - fn a_func() -> Option<()> { + fn a_func() -> Option<()> { Some(()) } }"#, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs index b7b8bc604a5..1549b414dcc 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs @@ -537,8 +537,13 @@ fn inline( if let Some(generic_arg_list) = generic_arg_list.clone() { if let Some((target, source)) = &sema.scope(node.syntax()).zip(sema.scope(fn_body.syntax())) { - PathTransform::function_call(target, source, function, generic_arg_list) - .apply(body.syntax()); + body.reindent_to(IndentLevel(0)); + if let Some(new_body) = ast::BlockExpr::cast( + PathTransform::function_call(target, source, function, generic_arg_list) + .apply(body.syntax()), + ) { + body = new_body; + } } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs index 806c8fba9ea..45bb6ce9129 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs @@ -5,12 +5,12 @@ use syntax::{ SyntaxKind::WHITESPACE, T, ast::{self, AstNode, HasName, make}, - ted::{self, Position}, + syntax_editor::{Position, SyntaxEditor}, }; use crate::{ AssistConfig, AssistId, - assist_context::{AssistContext, Assists, SourceChangeBuilder}, + assist_context::{AssistContext, Assists}, utils::{ DefaultMethods, IgnoreAssocItems, add_trait_assoc_items_to_impl, filter_assoc_items, gen_trait_fn_body, generate_trait_impl, @@ -126,98 +126,56 @@ fn add_assist( let label = format!("Convert to manual `impl {replace_trait_path} for {annotated_name}`"); acc.add(AssistId::refactor("replace_derive_with_manual_impl"), label, target, |builder| { - let insert_after = ted::Position::after(builder.make_mut(adt.clone()).syntax()); + let insert_after = Position::after(adt.syntax()); let impl_is_unsafe = trait_.map(|s| s.is_unsafe(ctx.db())).unwrap_or(false); - let impl_def_with_items = impl_def_from_trait( + let impl_def = impl_def_from_trait( &ctx.sema, ctx.config, adt, &annotated_name, trait_, replace_trait_path, + impl_is_unsafe, ); - update_attribute(builder, old_derives, old_tree, old_trait_path, attr); - let trait_path = make::ty_path(replace_trait_path.clone()); + let mut editor = builder.make_editor(attr.syntax()); + update_attribute(&mut editor, old_derives, old_tree, old_trait_path, attr); - match (ctx.config.snippet_cap, impl_def_with_items) { - (None, None) => { - let impl_def = generate_trait_impl(adt, trait_path); - if impl_is_unsafe { - ted::insert( - Position::first_child_of(impl_def.syntax()), - make::token(T![unsafe]), - ); - } + let trait_path = make::ty_path(replace_trait_path.clone()); - ted::insert_all( - insert_after, - vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()], - ); - } - (None, Some((impl_def, _))) => { - if impl_is_unsafe { - ted::insert( - Position::first_child_of(impl_def.syntax()), - make::token(T![unsafe]), - ); - } - ted::insert_all( - insert_after, - vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()], - ); - } - (Some(cap), None) => { - let impl_def = generate_trait_impl(adt, trait_path); - - if impl_is_unsafe { - ted::insert( - Position::first_child_of(impl_def.syntax()), - make::token(T![unsafe]), - ); - } + let (impl_def, first_assoc_item) = if let Some(impl_def) = impl_def { + ( + impl_def.clone(), + impl_def.assoc_item_list().and_then(|list| list.assoc_items().next()), + ) + } else { + (generate_trait_impl(impl_is_unsafe, adt, trait_path), None) + }; - if let Some(l_curly) = impl_def.assoc_item_list().and_then(|it| it.l_curly_token()) + if let Some(cap) = ctx.config.snippet_cap { + if let Some(first_assoc_item) = first_assoc_item { + if let ast::AssocItem::Fn(ref func) = first_assoc_item + && let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast) + && m.syntax().text() == "todo!()" { - builder.add_tabstop_after_token(cap, l_curly); - } - - ted::insert_all( - insert_after, - vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()], - ); - } - (Some(cap), Some((impl_def, first_assoc_item))) => { - let mut added_snippet = false; - - if impl_is_unsafe { - ted::insert( - Position::first_child_of(impl_def.syntax()), - make::token(T![unsafe]), - ); - } - - if let ast::AssocItem::Fn(ref func) = first_assoc_item { - if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast) { - if m.syntax().text() == "todo!()" { - // Make the `todo!()` a placeholder - builder.add_placeholder_snippet(cap, m); - added_snippet = true; - } - } - } - - if !added_snippet { + // Make the `todo!()` a placeholder + builder.add_placeholder_snippet(cap, m); + } else { // If we haven't already added a snippet, add a tabstop before the generated function builder.add_tabstop_before(cap, first_assoc_item); } - - ted::insert_all( - insert_after, - vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()], - ); + } else if let Some(l_curly) = + impl_def.assoc_item_list().and_then(|it| it.l_curly_token()) + { + builder.add_tabstop_after_token(cap, l_curly); } - }; + } + + editor.insert_all( + insert_after, + vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()], + ); + builder.add_file_edits(ctx.vfs_file_id(), editor); }) } @@ -228,7 +186,8 @@ fn impl_def_from_trait( annotated_name: &ast::Name, trait_: Option<hir::Trait>, trait_path: &ast::Path, -) -> Option<(ast::Impl, ast::AssocItem)> { + impl_is_unsafe: bool, +) -> Option<ast::Impl> { let trait_ = trait_?; let target_scope = sema.scope(annotated_name.syntax())?; @@ -245,21 +204,43 @@ fn impl_def_from_trait( if trait_items.is_empty() { return None; } - let impl_def = generate_trait_impl(adt, make::ty_path(trait_path.clone())); + let impl_def = generate_trait_impl(impl_is_unsafe, adt, make::ty_path(trait_path.clone())); - let first_assoc_item = + let assoc_items = add_trait_assoc_items_to_impl(sema, config, &trait_items, trait_, &impl_def, &target_scope); + let assoc_item_list = if let Some((first, other)) = + assoc_items.split_first().map(|(first, other)| (first.clone_subtree(), other)) + { + let first_item = if let ast::AssocItem::Fn(ref func) = first + && let Some(body) = gen_trait_fn_body(func, trait_path, adt, None) + && let Some(func_body) = func.body() + { + let mut editor = SyntaxEditor::new(first.syntax().clone()); + editor.replace(func_body.syntax(), body.syntax()); + ast::AssocItem::cast(editor.finish().new_root().clone()) + } else { + Some(first.clone()) + }; + let items = first_item + .into_iter() + .chain(other.iter().cloned()) + .map(either::Either::Right) + .collect(); + make::assoc_item_list(Some(items)) + } else { + make::assoc_item_list(None) + } + .clone_for_update(); - // Generate a default `impl` function body for the derived trait. - if let ast::AssocItem::Fn(ref func) = first_assoc_item { - let _ = gen_trait_fn_body(func, trait_path, adt, None); - }; - - Some((impl_def, first_assoc_item)) + let impl_def = impl_def.clone_subtree(); + let mut editor = SyntaxEditor::new(impl_def.syntax().clone()); + editor.replace(impl_def.assoc_item_list()?.syntax(), assoc_item_list.syntax()); + let impl_def = ast::Impl::cast(editor.finish().new_root().clone())?; + Some(impl_def) } fn update_attribute( - builder: &mut SourceChangeBuilder, + editor: &mut SyntaxEditor, old_derives: &[ast::Path], old_tree: &ast::TokenTree, old_trait_path: &ast::Path, @@ -272,8 +253,6 @@ fn update_attribute( let has_more_derives = !new_derives.is_empty(); if has_more_derives { - let old_tree = builder.make_mut(old_tree.clone()); - // Make the paths into flat lists of tokens in a vec let tt = new_derives.iter().map(|path| path.syntax().clone()).map(|node| { node.descendants_with_tokens() @@ -288,18 +267,17 @@ fn update_attribute( let tt = tt.collect::<Vec<_>>(); let new_tree = make::token_tree(T!['('], tt).clone_for_update(); - ted::replace(old_tree.syntax(), new_tree.syntax()); + editor.replace(old_tree.syntax(), new_tree.syntax()); } else { // Remove the attr and any trailing whitespace - let attr = builder.make_mut(attr.clone()); if let Some(line_break) = attr.syntax().next_sibling_or_token().filter(|t| t.kind() == WHITESPACE) { - ted::remove(line_break) + editor.delete(line_break) } - ted::remove(attr.syntax()) + editor.delete(attr.syntax()) } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs index cde0d875e0d..4682c047323 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs @@ -302,6 +302,7 @@ mod handlers { generate_function::generate_function, generate_impl::generate_impl, generate_impl::generate_trait_impl, + generate_impl::generate_impl_trait, generate_is_empty_from_len::generate_is_empty_from_len, generate_mut_trait_impl::generate_mut_trait_impl, generate_new::generate_new, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs index fc1c6928ff3..91348be97eb 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs @@ -1881,6 +1881,29 @@ impl<T: Clone> Ctx<T> {$0} } #[test] +fn doctest_generate_impl_trait() { + check_doc_test( + "generate_impl_trait", + r#####" +trait $0Foo { + fn foo(&self) -> i32; +} +"#####, + r#####" +trait Foo { + fn foo(&self) -> i32; +} + +impl Foo for ${1:_} { + fn foo(&self) -> i32 { + $0todo!() + } +} +"#####, + ) +} + +#[test] fn doctest_generate_is_empty_from_len() { check_doc_test( "generate_is_empty_from_len", diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs index 2c8cb6e4d91..15c7a6a3fc2 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs @@ -23,10 +23,11 @@ use syntax::{ ast::{ self, HasArgList, HasAttrs, HasGenericParams, HasName, HasTypeBounds, Whitespace, edit::{AstNodeEdit, IndentLevel}, - edit_in_place::{AttrsOwnerEdit, Indent, Removable}, + edit_in_place::{AttrsOwnerEdit, Removable}, make, syntax_factory::SyntaxFactory, }, + syntax_editor::SyntaxEditor, ted, }; @@ -178,6 +179,7 @@ pub fn filter_assoc_items( /// [`filter_assoc_items()`]), clones each item for update and applies path transformation to it, /// then inserts into `impl_`. Returns the modified `impl_` and the first associated item that got /// inserted. +#[must_use] pub fn add_trait_assoc_items_to_impl( sema: &Semantics<'_, RootDatabase>, config: &AssistConfig, @@ -185,71 +187,66 @@ pub fn add_trait_assoc_items_to_impl( trait_: hir::Trait, impl_: &ast::Impl, target_scope: &hir::SemanticsScope<'_>, -) -> ast::AssocItem { +) -> Vec<ast::AssocItem> { let new_indent_level = IndentLevel::from_node(impl_.syntax()) + 1; - let items = original_items.iter().map(|InFile { file_id, value: original_item }| { - let cloned_item = { - if let Some(macro_file) = file_id.macro_file() { - let span_map = sema.db.expansion_span_map(macro_file); - let item_prettified = prettify_macro_expansion( - sema.db, - original_item.syntax().clone(), - &span_map, - target_scope.krate().into(), - ); - if let Some(formatted) = ast::AssocItem::cast(item_prettified) { - return formatted; - } else { - stdx::never!("formatted `AssocItem` could not be cast back to `AssocItem`"); + original_items + .iter() + .map(|InFile { file_id, value: original_item }| { + let mut cloned_item = { + if let Some(macro_file) = file_id.macro_file() { + let span_map = sema.db.expansion_span_map(macro_file); + let item_prettified = prettify_macro_expansion( + sema.db, + original_item.syntax().clone(), + &span_map, + target_scope.krate().into(), + ); + if let Some(formatted) = ast::AssocItem::cast(item_prettified) { + return formatted; + } else { + stdx::never!("formatted `AssocItem` could not be cast back to `AssocItem`"); + } } + original_item.clone_for_update() } - original_item.clone_for_update() - }; - - if let Some(source_scope) = sema.scope(original_item.syntax()) { - // FIXME: Paths in nested macros are not handled well. See - // `add_missing_impl_members::paths_in_nested_macro_should_get_transformed` test. - let transform = - PathTransform::trait_impl(target_scope, &source_scope, trait_, impl_.clone()); - transform.apply(cloned_item.syntax()); - } - cloned_item.remove_attrs_and_docs(); - cloned_item.reindent_to(new_indent_level); - cloned_item - }); - - let assoc_item_list = impl_.get_or_create_assoc_item_list(); - - let mut first_item = None; - for item in items { - first_item.get_or_insert_with(|| item.clone()); - match &item { - ast::AssocItem::Fn(fn_) if fn_.body().is_none() => { - let body = AstNodeEdit::indent( - &make::block_expr( - None, - Some(match config.expr_fill_default { - ExprFillDefaultMode::Todo => make::ext::expr_todo(), - ExprFillDefaultMode::Underscore => make::ext::expr_underscore(), - ExprFillDefaultMode::Default => make::ext::expr_todo(), - }), - ), - new_indent_level, - ); - ted::replace(fn_.get_or_create_body().syntax(), body.clone_for_update().syntax()) + .reset_indent(); + + if let Some(source_scope) = sema.scope(original_item.syntax()) { + // FIXME: Paths in nested macros are not handled well. See + // `add_missing_impl_members::paths_in_nested_macro_should_get_transformed` test. + let transform = + PathTransform::trait_impl(target_scope, &source_scope, trait_, impl_.clone()); + cloned_item = ast::AssocItem::cast(transform.apply(cloned_item.syntax())).unwrap(); } - ast::AssocItem::TypeAlias(type_alias) => { - if let Some(type_bound_list) = type_alias.type_bound_list() { - type_bound_list.remove() + cloned_item.remove_attrs_and_docs(); + cloned_item + }) + .map(|item| { + match &item { + ast::AssocItem::Fn(fn_) if fn_.body().is_none() => { + let body = AstNodeEdit::indent( + &make::block_expr( + None, + Some(match config.expr_fill_default { + ExprFillDefaultMode::Todo => make::ext::expr_todo(), + ExprFillDefaultMode::Underscore => make::ext::expr_underscore(), + ExprFillDefaultMode::Default => make::ext::expr_todo(), + }), + ), + IndentLevel::single(), + ); + ted::replace(fn_.get_or_create_body().syntax(), body.syntax()); } + ast::AssocItem::TypeAlias(type_alias) => { + if let Some(type_bound_list) = type_alias.type_bound_list() { + type_bound_list.remove() + } + } + _ => {} } - _ => {} - } - - assoc_item_list.add_item(item) - } - - first_item.unwrap() + AstNodeEdit::indent(&item, new_indent_level) + }) + .collect() } pub(crate) fn vis_offset(node: &SyntaxNode) -> TextSize { @@ -334,7 +331,7 @@ fn invert_special_case(make: &SyntaxFactory, expr: &ast::Expr) -> Option<ast::Ex fn invert_special_case_legacy(expr: &ast::Expr) -> Option<ast::Expr> { match expr { ast::Expr::BinExpr(bin) => { - let bin = bin.clone_for_update(); + let bin = bin.clone_subtree(); let op_token = bin.op_token()?; let rev_token = match op_token.kind() { T![==] => T![!=], @@ -350,8 +347,9 @@ fn invert_special_case_legacy(expr: &ast::Expr) -> Option<ast::Expr> { ); } }; - ted::replace(op_token, make::token(rev_token)); - Some(bin.into()) + let mut bin_editor = SyntaxEditor::new(bin.syntax().clone()); + bin_editor.replace(op_token, make::token(rev_token)); + ast::Expr::cast(bin_editor.finish().new_root().clone()) } ast::Expr::MethodCallExpr(mce) => { let receiver = mce.receiver()?; @@ -567,6 +565,7 @@ pub(crate) fn generate_impl_text(adt: &ast::Adt, code: &str) -> String { /// /// This is useful for traits like `PartialEq`, since `impl<T> PartialEq for U<T>` often requires `T: PartialEq`. // FIXME: migrate remaining uses to `generate_trait_impl` +#[allow(dead_code)] pub(crate) fn generate_trait_impl_text(adt: &ast::Adt, trait_text: &str, code: &str) -> String { generate_impl_text_inner(adt, Some(trait_text), true, code) } @@ -663,16 +662,23 @@ fn generate_impl_text_inner( /// Generates the corresponding `impl Type {}` including type and lifetime /// parameters. +pub(crate) fn generate_impl_with_item( + adt: &ast::Adt, + body: Option<ast::AssocItemList>, +) -> ast::Impl { + generate_impl_inner(false, adt, None, true, body) +} + pub(crate) fn generate_impl(adt: &ast::Adt) -> ast::Impl { - generate_impl_inner(adt, None, true) + generate_impl_inner(false, adt, None, true, None) } /// Generates the corresponding `impl <trait> for Type {}` including type /// and lifetime parameters, with `<trait>` appended to `impl`'s generic parameters' bounds. /// /// This is useful for traits like `PartialEq`, since `impl<T> PartialEq for U<T>` often requires `T: PartialEq`. -pub(crate) fn generate_trait_impl(adt: &ast::Adt, trait_: ast::Type) -> ast::Impl { - generate_impl_inner(adt, Some(trait_), true) +pub(crate) fn generate_trait_impl(is_unsafe: bool, adt: &ast::Adt, trait_: ast::Type) -> ast::Impl { + generate_impl_inner(is_unsafe, adt, Some(trait_), true, None) } /// Generates the corresponding `impl <trait> for Type {}` including type @@ -680,13 +686,15 @@ pub(crate) fn generate_trait_impl(adt: &ast::Adt, trait_: ast::Type) -> ast::Imp /// /// This is useful for traits like `From<T>`, since `impl<T> From<T> for U<T>` doesn't require `T: From<T>`. pub(crate) fn generate_trait_impl_intransitive(adt: &ast::Adt, trait_: ast::Type) -> ast::Impl { - generate_impl_inner(adt, Some(trait_), false) + generate_impl_inner(false, adt, Some(trait_), false, None) } fn generate_impl_inner( + is_unsafe: bool, adt: &ast::Adt, trait_: Option<ast::Type>, trait_is_transitive: bool, + body: Option<ast::AssocItemList>, ) -> ast::Impl { // Ensure lifetime params are before type & const params let generic_params = adt.generic_param_list().map(|generic_params| { @@ -726,7 +734,7 @@ fn generate_impl_inner( let impl_ = match trait_ { Some(trait_) => make::impl_trait( - false, + is_unsafe, None, None, generic_params, @@ -736,9 +744,9 @@ fn generate_impl_inner( ty, None, adt.where_clause(), - None, + body, ), - None => make::impl_(generic_params, generic_args, ty, adt.where_clause(), None), + None => make::impl_(generic_params, generic_args, ty, adt.where_clause(), body), } .clone_for_update(); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs index c58bdd9e8ed..87e90e85193 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs @@ -1,10 +1,7 @@ //! This module contains functions to generate default trait impl function bodies where possible. use hir::TraitRef; -use syntax::{ - ast::{self, AstNode, BinaryOp, CmpOp, HasName, LogicOp, edit::AstNodeEdit, make}, - ted, -}; +use syntax::ast::{self, AstNode, BinaryOp, CmpOp, HasName, LogicOp, edit::AstNodeEdit, make}; /// Generate custom trait bodies without default implementation where possible. /// @@ -18,21 +15,33 @@ pub(crate) fn gen_trait_fn_body( trait_path: &ast::Path, adt: &ast::Adt, trait_ref: Option<TraitRef<'_>>, -) -> Option<()> { +) -> Option<ast::BlockExpr> { + let _ = func.body()?; match trait_path.segment()?.name_ref()?.text().as_str() { - "Clone" => gen_clone_impl(adt, func), - "Debug" => gen_debug_impl(adt, func), - "Default" => gen_default_impl(adt, func), - "Hash" => gen_hash_impl(adt, func), - "PartialEq" => gen_partial_eq(adt, func, trait_ref), - "PartialOrd" => gen_partial_ord(adt, func, trait_ref), + "Clone" => { + stdx::always!(func.name().is_some_and(|name| name.text() == "clone")); + gen_clone_impl(adt) + } + "Debug" => gen_debug_impl(adt), + "Default" => gen_default_impl(adt), + "Hash" => { + stdx::always!(func.name().is_some_and(|name| name.text() == "hash")); + gen_hash_impl(adt) + } + "PartialEq" => { + stdx::always!(func.name().is_some_and(|name| name.text() == "eq")); + gen_partial_eq(adt, trait_ref) + } + "PartialOrd" => { + stdx::always!(func.name().is_some_and(|name| name.text() == "partial_cmp")); + gen_partial_ord(adt, trait_ref) + } _ => None, } } /// Generate a `Clone` impl based on the fields and members of the target type. -fn gen_clone_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { - stdx::always!(func.name().is_some_and(|name| name.text() == "clone")); +fn gen_clone_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> { fn gen_clone_call(target: ast::Expr) -> ast::Expr { let method = make::name_ref("clone"); make::expr_method_call(target, method, make::arg_list(None)).into() @@ -139,12 +148,11 @@ fn gen_clone_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { } }; let body = make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1)); - ted::replace(func.body()?.syntax(), body.clone_for_update().syntax()); - Some(()) + Some(body) } /// Generate a `Debug` impl based on the fields and members of the target type. -fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { +fn gen_debug_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> { let annotated_name = adt.name()?; match adt { // `Debug` cannot be derived for unions, so no default impl can be provided. @@ -248,8 +256,7 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { let body = make::block_expr(None, Some(match_expr.into())); let body = body.indent(ast::edit::IndentLevel(1)); - ted::replace(func.body()?.syntax(), body.clone_for_update().syntax()); - Some(()) + Some(body) } ast::Adt::Struct(strukt) => { @@ -296,14 +303,13 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { let method = make::name_ref("finish"); let expr = make::expr_method_call(expr, method, make::arg_list(None)).into(); let body = make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1)); - ted::replace(func.body()?.syntax(), body.clone_for_update().syntax()); - Some(()) + Some(body) } } } /// Generate a `Debug` impl based on the fields and members of the target type. -fn gen_default_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { +fn gen_default_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> { fn gen_default_call() -> Option<ast::Expr> { let fn_name = make::ext::path_from_idents(["Default", "default"])?; Some(make::expr_call(make::expr_path(fn_name), make::arg_list(None)).into()) @@ -342,15 +348,13 @@ fn gen_default_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { } }; let body = make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1)); - ted::replace(func.body()?.syntax(), body.clone_for_update().syntax()); - Some(()) + Some(body) } } } /// Generate a `Hash` impl based on the fields and members of the target type. -fn gen_hash_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { - stdx::always!(func.name().is_some_and(|name| name.text() == "hash")); +fn gen_hash_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> { fn gen_hash_call(target: ast::Expr) -> ast::Stmt { let method = make::name_ref("hash"); let arg = make::expr_path(make::ext::ident_path("state")); @@ -400,13 +404,11 @@ fn gen_hash_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { }, }; - ted::replace(func.body()?.syntax(), body.clone_for_update().syntax()); - Some(()) + Some(body) } /// Generate a `PartialEq` impl based on the fields and members of the target type. -fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef<'_>>) -> Option<()> { - stdx::always!(func.name().is_some_and(|name| name.text() == "eq")); +fn gen_partial_eq(adt: &ast::Adt, trait_ref: Option<TraitRef<'_>>) -> Option<ast::BlockExpr> { fn gen_eq_chain(expr: Option<ast::Expr>, cmp: ast::Expr) -> Option<ast::Expr> { match expr { Some(expr) => Some(make::expr_bin_op(expr, BinaryOp::LogicOp(LogicOp::And), cmp)), @@ -595,12 +597,10 @@ fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef<'_> }, }; - ted::replace(func.body()?.syntax(), body.clone_for_update().syntax()); - Some(()) + Some(body) } -fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef<'_>>) -> Option<()> { - stdx::always!(func.name().is_some_and(|name| name.text() == "partial_cmp")); +fn gen_partial_ord(adt: &ast::Adt, trait_ref: Option<TraitRef<'_>>) -> Option<ast::BlockExpr> { fn gen_partial_eq_match(match_target: ast::Expr) -> Option<ast::Stmt> { let mut arms = vec![]; @@ -686,8 +686,7 @@ fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef<'_ }, }; - ted::replace(func.body()?.syntax(), body.clone_for_update().syntax()); - Some(()) + Some(body) } fn make_discriminant() -> Option<ast::Expr> { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs index 975c2f02259..bcf8c0ec527 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs @@ -276,7 +276,7 @@ fn get_transformed_assoc_item( let assoc_item = assoc_item.clone_for_update(); // FIXME: Paths in nested macros are not handled well. See // `macro_generated_assoc_item2` test. - transform.apply(assoc_item.syntax()); + let assoc_item = ast::AssocItem::cast(transform.apply(assoc_item.syntax()))?; assoc_item.remove_attrs_and_docs(); Some(assoc_item) } @@ -301,7 +301,7 @@ fn get_transformed_fn( let fn_ = fn_.clone_for_update(); // FIXME: Paths in nested macros are not handled well. See // `macro_generated_assoc_item2` test. - transform.apply(fn_.syntax()); + let fn_ = ast::Fn::cast(transform.apply(fn_.syntax()))?; fn_.remove_attrs_and_docs(); match async_ { AsyncSugaring::Desugar => { diff --git a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs index 0ab880bcfe7..b7432d89c7b 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs @@ -12,15 +12,16 @@ use span::Edition; use syntax::{ NodeOrToken, SyntaxNode, ast::{self, AstNode, HasGenericArgs, make}, - ted, + syntax_editor::{self, SyntaxEditor}, }; -#[derive(Default)] +#[derive(Default, Debug)] struct AstSubsts { types_and_consts: Vec<TypeOrConst>, lifetimes: Vec<ast::LifetimeArg>, } +#[derive(Debug)] enum TypeOrConst { Either(ast::TypeArg), // indistinguishable type or const param Const(ast::ConstArg), @@ -128,15 +129,18 @@ impl<'a> PathTransform<'a> { } } - pub fn apply(&self, syntax: &SyntaxNode) { + #[must_use] + pub fn apply(&self, syntax: &SyntaxNode) -> SyntaxNode { self.build_ctx().apply(syntax) } - pub fn apply_all<'b>(&self, nodes: impl IntoIterator<Item = &'b SyntaxNode>) { + #[must_use] + pub fn apply_all<'b>( + &self, + nodes: impl IntoIterator<Item = &'b SyntaxNode>, + ) -> Vec<SyntaxNode> { let ctx = self.build_ctx(); - for node in nodes { - ctx.apply(node); - } + nodes.into_iter().map(|node| ctx.apply(&node.clone())).collect() } fn prettify_target_node(&self, node: SyntaxNode) -> SyntaxNode { @@ -236,7 +240,7 @@ impl<'a> PathTransform<'a> { Some((k.name(db).display(db, target_edition).to_string(), v.lifetime()?)) }) .collect(); - let ctx = Ctx { + let mut ctx = Ctx { type_substs, const_substs, lifetime_substs, @@ -272,42 +276,75 @@ fn preorder_rev(item: &SyntaxNode) -> impl Iterator<Item = SyntaxNode> { } impl Ctx<'_> { - fn apply(&self, item: &SyntaxNode) { + fn apply(&self, item: &SyntaxNode) -> SyntaxNode { // `transform_path` may update a node's parent and that would break the // tree traversal. Thus all paths in the tree are collected into a vec // so that such operation is safe. - let paths = preorder_rev(item).filter_map(ast::Path::cast).collect::<Vec<_>>(); - for path in paths { - self.transform_path(path); - } - - preorder_rev(item).filter_map(ast::Lifetime::cast).for_each(|lifetime| { + let item = self.transform_path(item).clone_subtree(); + let mut editor = SyntaxEditor::new(item.clone()); + preorder_rev(&item).filter_map(ast::Lifetime::cast).for_each(|lifetime| { if let Some(subst) = self.lifetime_substs.get(&lifetime.syntax().text().to_string()) { - ted::replace(lifetime.syntax(), subst.clone_subtree().clone_for_update().syntax()); + editor + .replace(lifetime.syntax(), subst.clone_subtree().clone_for_update().syntax()); } }); + + editor.finish().new_root().clone() } - fn transform_default_values(&self, defaulted_params: Vec<DefaultedParam>) { + fn transform_default_values(&mut self, defaulted_params: Vec<DefaultedParam>) { // By now the default values are simply copied from where they are declared // and should be transformed. As any value is allowed to refer to previous // generic (both type and const) parameters, they should be all iterated left-to-right. for param in defaulted_params { - let value = match param { - Either::Left(k) => self.type_substs.get(&k).unwrap().syntax(), - Either::Right(k) => self.const_substs.get(&k).unwrap(), + let value = match ¶m { + Either::Left(k) => self.type_substs.get(k).unwrap().syntax(), + Either::Right(k) => self.const_substs.get(k).unwrap(), }; // `transform_path` may update a node's parent and that would break the // tree traversal. Thus all paths in the tree are collected into a vec // so that such operation is safe. - let paths = preorder_rev(value).filter_map(ast::Path::cast).collect::<Vec<_>>(); - for path in paths { - self.transform_path(path); + let new_value = self.transform_path(value); + match param { + Either::Left(k) => { + self.type_substs.insert(k, ast::Type::cast(new_value.clone()).unwrap()); + } + Either::Right(k) => { + self.const_substs.insert(k, new_value.clone()); + } } } } - fn transform_path(&self, path: ast::Path) -> Option<()> { + fn transform_path(&self, path: &SyntaxNode) -> SyntaxNode { + fn find_child_paths(root_path: &SyntaxNode) -> Vec<ast::Path> { + let mut result = Vec::new(); + for child in root_path.children() { + if let Some(child_path) = ast::Path::cast(child.clone()) { + result.push(child_path); + } else { + result.extend(find_child_paths(&child)); + } + } + result + } + let root_path = path.clone_subtree(); + let result = find_child_paths(&root_path); + let mut editor = SyntaxEditor::new(root_path.clone()); + for sub_path in result { + let new = self.transform_path(sub_path.syntax()); + editor.replace(sub_path.syntax(), new); + } + let update_sub_item = editor.finish().new_root().clone().clone_subtree(); + let item = find_child_paths(&update_sub_item); + let mut editor = SyntaxEditor::new(update_sub_item); + for sub_path in item { + self.transform_path_(&mut editor, &sub_path); + } + editor.finish().new_root().clone() + } + + fn transform_path_(&self, editor: &mut SyntaxEditor, path: &ast::Path) -> Option<()> { if path.qualifier().is_some() { return None; } @@ -319,8 +356,7 @@ impl Ctx<'_> { // don't try to qualify sole `self` either, they are usually locals, but are returned as modules due to namespace clashing return None; } - - let resolution = self.source_scope.speculative_resolve(&path)?; + let resolution = self.source_scope.speculative_resolve(path)?; match resolution { hir::PathResolution::TypeParam(tp) => { @@ -360,12 +396,12 @@ impl Ctx<'_> { let segment = make::path_segment_ty(subst.clone(), trait_ref); let qualified = make::path_from_segments(std::iter::once(segment), false); - ted::replace(path.syntax(), qualified.clone_for_update().syntax()); + editor.replace(path.syntax(), qualified.clone_for_update().syntax()); } else if let Some(path_ty) = ast::PathType::cast(parent) { let old = path_ty.syntax(); if old.parent().is_some() { - ted::replace(old, subst.clone_subtree().clone_for_update().syntax()); + editor.replace(old, subst.clone_subtree().clone_for_update().syntax()); } else { // Some `path_ty` has no parent, especially ones made for default value // of type parameters. @@ -377,13 +413,13 @@ impl Ctx<'_> { } let start = path_ty.syntax().first_child().map(NodeOrToken::Node)?; let end = path_ty.syntax().last_child().map(NodeOrToken::Node)?; - ted::replace_all( + editor.replace_all( start..=end, new.syntax().children().map(NodeOrToken::Node).collect::<Vec<_>>(), ); } } else { - ted::replace( + editor.replace( path.syntax(), subst.clone_subtree().clone_for_update().syntax(), ); @@ -409,17 +445,28 @@ impl Ctx<'_> { }; let found_path = self.target_module.find_path(self.source_scope.db, def, cfg)?; let res = mod_path_to_ast(&found_path, self.target_edition).clone_for_update(); + let mut res_editor = SyntaxEditor::new(res.syntax().clone_subtree()); if let Some(args) = path.segment().and_then(|it| it.generic_arg_list()) { if let Some(segment) = res.segment() { - let old = segment.get_or_create_generic_arg_list(); - ted::replace(old.syntax(), args.clone_subtree().syntax().clone_for_update()) + if let Some(old) = segment.generic_arg_list() { + res_editor.replace( + old.syntax(), + args.clone_subtree().syntax().clone_for_update(), + ) + } else { + res_editor.insert( + syntax_editor::Position::last_child_of(segment.syntax()), + args.clone_subtree().syntax().clone_for_update(), + ); + } } } - ted::replace(path.syntax(), res.syntax()) + let res = res_editor.finish().new_root().clone(); + editor.replace(path.syntax().clone(), res); } hir::PathResolution::ConstParam(cp) => { if let Some(subst) = self.const_substs.get(&cp) { - ted::replace(path.syntax(), subst.clone_subtree().clone_for_update()); + editor.replace(path.syntax(), subst.clone_subtree().clone_for_update()); } } hir::PathResolution::SelfType(imp) => { @@ -456,13 +503,13 @@ impl Ctx<'_> { mod_path_to_ast(&found_path, self.target_edition).qualifier() { let res = make::path_concat(qual, path_ty.path()?).clone_for_update(); - ted::replace(path.syntax(), res.syntax()); + editor.replace(path.syntax(), res.syntax()); return Some(()); } } } - ted::replace(path.syntax(), ast_ty.syntax()); + editor.replace(path.syntax(), ast_ty.syntax()); } hir::PathResolution::Local(_) | hir::PathResolution::Def(_) diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs index 4efb83ba323..9cf0bcf9190 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs @@ -531,7 +531,7 @@ impl<'a> FindUsages<'a> { node.token_at_offset(offset) .find(|it| { // `name` is stripped of raw ident prefix. See the comment on name retrieval below. - it.text().trim_start_matches("r#") == name + it.text().trim_start_matches('\'').trim_start_matches("r#") == name }) .into_iter() .flat_map(move |token| { @@ -938,7 +938,12 @@ impl<'a> FindUsages<'a> { }) }; // We need to search without the `r#`, hence `as_str` access. - self.def.name(sema.db).or_else(self_kw_refs).map(|it| it.as_str().to_smolstr()) + // We strip `'` from lifetimes and labels as otherwise they may not match with raw-escaped ones, + // e.g. if we search `'foo` we won't find `'r#foo`. + self.def + .name(sema.db) + .or_else(self_kw_refs) + .map(|it| it.as_str().trim_start_matches('\'').to_smolstr()) } }; let name = match &name { diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs index f20b6dea122..e31367f3b14 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs @@ -131,4 +131,28 @@ fn foo(v: Enum<()>) { "#, ); } + + #[test] + fn regression_20259() { + check_diagnostics( + r#" +//- minicore: deref +use core::ops::Deref; + +struct Foo<T>(T); + +impl<T> Deref for Foo<T> { + type Target = T; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +fn test(x: Foo<(i32, bool)>) { + let (_a, _b): &(i32, bool) = &x; +} +"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs b/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs index c081796d078..1901bcc797e 100755 --- a/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs +++ b/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs @@ -48,7 +48,6 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> { let mut res = vec![]; let mut visited_comments = FxHashSet::default(); let mut visited_nodes = FxHashSet::default(); - let mut merged_fn_bodies = FxHashSet::default(); // regions can be nested, here is a LIFO buffer let mut region_starts: Vec<TextSize> = vec![]; @@ -73,15 +72,16 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> { continue; } - if let Some(body) = fn_node.body() { + if fn_node.body().is_some() { + // Get the actual start of the function (excluding doc comments) + let fn_start = fn_node + .fn_token() + .map(|token| token.text_range().start()) + .unwrap_or(node.text_range().start()); res.push(Fold { - range: TextRange::new( - node.text_range().start(), - node.text_range().end(), - ), + range: TextRange::new(fn_start, node.text_range().end()), kind: FoldKind::Function, }); - merged_fn_bodies.insert(body.syntax().text_range()); continue; } } @@ -690,4 +690,21 @@ type Foo<T, U> = foo<fold arglist>< "#, ) } + + #[test] + fn test_fold_doc_comments_with_multiline_paramlist_function() { + check( + r#" +<fold comment>/// A very very very very very very very very very very very very very very very +/// very very very long description</fold> +<fold function>fn foo<fold arglist>( + very_long_parameter_name: u32, + another_very_long_parameter_name: u32, + third_very_long_param: u32, +)</fold> <fold block>{ + todo!() +}</fold></fold> +"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs index 0069452e7b9..49fec0a793c 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs @@ -77,17 +77,18 @@ pub(super) fn fn_ptr_hints( return None; } - let parent_for_type = func + let parent_for_binder = func .syntax() .ancestors() .skip(1) .take_while(|it| matches!(it.kind(), SyntaxKind::PAREN_TYPE | SyntaxKind::FOR_TYPE)) - .find_map(ast::ForType::cast); + .find_map(ast::ForType::cast) + .and_then(|it| it.for_binder()); let param_list = func.param_list()?; - let generic_param_list = parent_for_type.as_ref().and_then(|it| it.generic_param_list()); + let generic_param_list = parent_for_binder.as_ref().and_then(|it| it.generic_param_list()); let ret_type = func.ret_type(); - let for_kw = parent_for_type.as_ref().and_then(|it| it.for_token()); + let for_kw = parent_for_binder.as_ref().and_then(|it| it.for_token()); hints_( acc, ctx, @@ -143,15 +144,16 @@ pub(super) fn fn_path_hints( // FIXME: Support general path types let (param_list, ret_type) = func.path().as_ref().and_then(path_as_fn)?; - let parent_for_type = func + let parent_for_binder = func .syntax() .ancestors() .skip(1) .take_while(|it| matches!(it.kind(), SyntaxKind::PAREN_TYPE | SyntaxKind::FOR_TYPE)) - .find_map(ast::ForType::cast); + .find_map(ast::ForType::cast) + .and_then(|it| it.for_binder()); - let generic_param_list = parent_for_type.as_ref().and_then(|it| it.generic_param_list()); - let for_kw = parent_for_type.as_ref().and_then(|it| it.for_token()); + let generic_param_list = parent_for_binder.as_ref().and_then(|it| it.generic_param_list()); + let for_kw = parent_for_binder.as_ref().and_then(|it| it.for_token()); hints_( acc, ctx, diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs index fe874bc99b4..86b88a17c75 100644 --- a/src/tools/rust-analyzer/crates/ide/src/references.rs +++ b/src/tools/rust-analyzer/crates/ide/src/references.rs @@ -3088,4 +3088,42 @@ fn main() { "#]], ); } + + #[test] + fn raw_labels_and_lifetimes() { + check( + r#" +fn foo<'r#fn>(s: &'r#fn str) { + let _a: &'r#fn str = s; + let _b: &'r#fn str; + 'r#break$0: { + break 'r#break; + } +} + "#, + expect![[r#" + 'r#break Label FileId(0) 87..96 87..95 + + FileId(0) 113..121 + "#]], + ); + check( + r#" +fn foo<'r#fn$0>(s: &'r#fn str) { + let _a: &'r#fn str = s; + let _b: &'r#fn str; + 'r#break: { + break 'r#break; + } +} + "#, + expect![[r#" + 'r#fn LifetimeParam FileId(0) 7..12 + + FileId(0) 18..23 + FileId(0) 44..49 + FileId(0) 72..77 + "#]], + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide/src/rename.rs b/src/tools/rust-analyzer/crates/ide/src/rename.rs index fb84e8e6b47..a07c647c2cb 100644 --- a/src/tools/rust-analyzer/crates/ide/src/rename.rs +++ b/src/tools/rust-analyzer/crates/ide/src/rename.rs @@ -12,6 +12,7 @@ use ide_db::{ source_change::SourceChangeBuilder, }; use itertools::Itertools; +use std::fmt::Write; use stdx::{always, never}; use syntax::{AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize, ast}; @@ -459,35 +460,22 @@ fn rename_self_to_param( } fn text_edit_from_self_param(self_param: &ast::SelfParam, new_name: String) -> Option<TextEdit> { - fn target_type_name(impl_def: &ast::Impl) -> Option<String> { - if let Some(ast::Type::PathType(p)) = impl_def.self_ty() { - return Some(p.path()?.segment()?.name_ref()?.text().to_string()); - } - None - } + let mut replacement_text = new_name; + replacement_text.push_str(": "); - match self_param.syntax().ancestors().find_map(ast::Impl::cast) { - Some(impl_def) => { - let type_name = target_type_name(&impl_def)?; + if self_param.amp_token().is_some() { + replacement_text.push('&'); + } + if let Some(lifetime) = self_param.lifetime() { + write!(replacement_text, "{lifetime} ").unwrap(); + } + if self_param.amp_token().and(self_param.mut_token()).is_some() { + replacement_text.push_str("mut "); + } - let mut replacement_text = new_name; - replacement_text.push_str(": "); - match (self_param.amp_token(), self_param.mut_token()) { - (Some(_), None) => replacement_text.push('&'), - (Some(_), Some(_)) => replacement_text.push_str("&mut "), - (_, _) => (), - }; - replacement_text.push_str(type_name.as_str()); + replacement_text.push_str("Self"); - Some(TextEdit::replace(self_param.syntax().text_range(), replacement_text)) - } - None => { - cov_mark::hit!(rename_self_outside_of_methods); - let mut replacement_text = new_name; - replacement_text.push_str(": _"); - Some(TextEdit::replace(self_param.syntax().text_range(), replacement_text)) - } - } + Some(TextEdit::replace(self_param.syntax().text_range(), replacement_text)) } #[cfg(test)] @@ -2069,7 +2057,7 @@ impl Foo { struct Foo { i: i32 } impl Foo { - fn f(foo: &mut Foo) -> i32 { + fn f(foo: &mut Self) -> i32 { foo.i } } @@ -2095,7 +2083,33 @@ impl Foo { struct Foo { i: i32 } impl Foo { - fn f(foo: Foo) -> i32 { + fn f(foo: Self) -> i32 { + foo.i + } +} +"#, + ); + } + + #[test] + fn test_owned_self_to_parameter_with_lifetime() { + cov_mark::check!(rename_self_to_param); + check( + "foo", + r#" +struct Foo<'a> { i: &'a i32 } + +impl<'a> Foo<'a> { + fn f(&'a $0self) -> i32 { + self.i + } +} +"#, + r#" +struct Foo<'a> { i: &'a i32 } + +impl<'a> Foo<'a> { + fn f(foo: &'a Self) -> i32 { foo.i } } @@ -2105,7 +2119,6 @@ impl Foo { #[test] fn test_self_outside_of_methods() { - cov_mark::check!(rename_self_outside_of_methods); check( "foo", r#" @@ -2114,7 +2127,7 @@ fn f($0self) -> i32 { } "#, r#" -fn f(foo: _) -> i32 { +fn f(foo: Self) -> i32 { foo.i } "#, @@ -2159,7 +2172,7 @@ impl Foo { struct Foo { i: i32 } impl Foo { - fn f(foo: &Foo) -> i32 { + fn f(foo: &Self) -> i32 { let self_var = 1; foo.i } diff --git a/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs b/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs index 1ccd20c25e9..4780743c4d9 100644 --- a/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs +++ b/src/tools/rust-analyzer/crates/intern/src/symbol/symbols.rs @@ -156,6 +156,7 @@ define_symbols! { cfg_attr, cfg_eval, cfg, + cfg_select, char, clone, Clone, diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs index 76656567e7f..ed8a91c39c0 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs @@ -572,9 +572,7 @@ fn closure_expr(p: &mut Parser<'_>) -> CompletedMarker { // test closure_binder // fn main() { for<'a> || (); } if p.at(T![for]) { - let b = p.start(); types::for_binder(p); - b.complete(p, CLOSURE_BINDER); } // test const_closure // fn main() { let cl = const || _ = 0; } diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs index 55c5dc400b9..cb1b59f6497 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs @@ -13,7 +13,7 @@ pub(super) fn opt_generic_param_list(p: &mut Parser<'_>) { // test_err generic_param_list_recover // fn f<T: Clone,, U:, V>() {} -fn generic_param_list(p: &mut Parser<'_>) { +pub(super) fn generic_param_list(p: &mut Parser<'_>) { assert!(p.at(T![<])); let m = p.start(); delimited( @@ -147,7 +147,15 @@ fn type_bound(p: &mut Parser<'_>) -> bool { let has_paren = p.eat(T!['(']); match p.current() { LIFETIME_IDENT => lifetime(p), - T![for] => types::for_type(p, false), + // test for_binder_bound + // fn foo<T: for<'a> [const] async Trait>() {} + T![for] => { + types::for_binder(p); + if path_type_bound(p).is_err() { + m.abandon(p); + return false; + } + } // test precise_capturing // fn captures<'a: 'a, 'b: 'b, T>() -> impl Sized + use<'b, T, Self> {} @@ -180,44 +188,8 @@ fn type_bound(p: &mut Parser<'_>) -> bool { p.bump_any(); types::for_type(p, false) } - current => { - match current { - T![?] => p.bump_any(), - T![~] => { - p.bump_any(); - p.expect(T![const]); - } - T!['['] => { - p.bump_any(); - p.expect(T![const]); - p.expect(T![']']); - } - // test const_trait_bound - // const fn foo(_: impl const Trait) {} - T![const] => { - p.bump_any(); - } - // test async_trait_bound - // fn async_foo(_: impl async Fn(&i32)) {} - T![async] => { - p.bump_any(); - } - _ => (), - } - if paths::is_use_path_start(p) { - types::path_type_bounds(p, false); - // test_err type_bounds_macro_call_recovery - // fn foo<T: T![], T: T!, T: T!{}>() -> Box<T! + T!{}> {} - if p.at(T![!]) { - let m = p.start(); - p.bump(T![!]); - p.error("unexpected `!` in type path, macro calls are not allowed here"); - if p.at_ts(TokenSet::new(&[T!['{'], T!['['], T!['(']])) { - items::token_tree(p); - } - m.complete(p, ERROR); - } - } else { + _ => { + if path_type_bound(p).is_err() { m.abandon(p); return false; } @@ -231,6 +203,43 @@ fn type_bound(p: &mut Parser<'_>) -> bool { true } +fn path_type_bound(p: &mut Parser<'_>) -> Result<(), ()> { + if p.eat(T![~]) { + p.expect(T![const]); + } else if p.eat(T!['[']) { + // test maybe_const_trait_bound + // const fn foo(_: impl [const] Trait) {} + p.expect(T![const]); + p.expect(T![']']); + } else { + // test const_trait_bound + // const fn foo(_: impl const Trait) {} + p.eat(T![const]); + } + // test async_trait_bound + // fn async_foo(_: impl async Fn(&i32)) {} + p.eat(T![async]); + p.eat(T![?]); + + if paths::is_use_path_start(p) { + types::path_type_bounds(p, false); + // test_err type_bounds_macro_call_recovery + // fn foo<T: T![], T: T!, T: T!{}>() -> Box<T! + T!{}> {} + if p.at(T![!]) { + let m = p.start(); + p.bump(T![!]); + p.error("unexpected `!` in type path, macro calls are not allowed here"); + if p.at_ts(TokenSet::new(&[T!['{'], T!['['], T!['(']])) { + items::token_tree(p); + } + m.complete(p, ERROR); + } + Ok(()) + } else { + Err(()) + } +} + // test where_clause // fn foo() // where diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/types.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/types.rs index 908440b5d05..a7e97c5f850 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/types.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/types.rs @@ -249,13 +249,14 @@ fn fn_ptr_type(p: &mut Parser<'_>) { } pub(super) fn for_binder(p: &mut Parser<'_>) { - assert!(p.at(T![for])); + let m = p.start(); p.bump(T![for]); if p.at(T![<]) { - generic_params::opt_generic_param_list(p); + generic_params::generic_param_list(p); } else { p.error("expected `<`"); } + m.complete(p, FOR_BINDER); } // test for_type diff --git a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs index 12a13caa4d9..3a8041d2df9 100644 --- a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs +++ b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs @@ -185,7 +185,6 @@ pub enum SyntaxKind { BREAK_EXPR, CALL_EXPR, CAST_EXPR, - CLOSURE_BINDER, CLOSURE_EXPR, CONST, CONST_ARG, @@ -203,6 +202,7 @@ pub enum SyntaxKind { FN_PTR_TYPE, FORMAT_ARGS_ARG, FORMAT_ARGS_EXPR, + FOR_BINDER, FOR_EXPR, FOR_TYPE, GENERIC_ARG_LIST, @@ -358,7 +358,6 @@ impl SyntaxKind { | BREAK_EXPR | CALL_EXPR | CAST_EXPR - | CLOSURE_BINDER | CLOSURE_EXPR | CONST | CONST_ARG @@ -376,6 +375,7 @@ impl SyntaxKind { | FN_PTR_TYPE | FORMAT_ARGS_ARG | FORMAT_ARGS_EXPR + | FOR_BINDER | FOR_EXPR | FOR_TYPE | GENERIC_ARG_LIST diff --git a/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs b/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs index cef7b0ee239..c642e1a3354 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs @@ -253,6 +253,10 @@ mod ok { run_and_expect_no_errors("test_data/parser/inline/ok/fn_pointer_unnamed_arg.rs"); } #[test] + fn for_binder_bound() { + run_and_expect_no_errors("test_data/parser/inline/ok/for_binder_bound.rs"); + } + #[test] fn for_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/for_expr.rs"); } #[test] fn for_range_from() { @@ -402,6 +406,10 @@ mod ok { #[test] fn match_guard() { run_and_expect_no_errors("test_data/parser/inline/ok/match_guard.rs"); } #[test] + fn maybe_const_trait_bound() { + run_and_expect_no_errors("test_data/parser/inline/ok/maybe_const_trait_bound.rs"); + } + #[test] fn metas() { run_and_expect_no_errors("test_data/parser/inline/ok/metas.rs"); } #[test] fn method_call_expr() { diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0024_many_type_parens.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0024_many_type_parens.rast index 025c12e4c2a..2fd172539e4 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0024_many_type_parens.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0024_many_type_parens.rast @@ -37,7 +37,7 @@ SOURCE_FILE WHITESPACE " " TYPE_BOUND L_PAREN "(" - FOR_TYPE + FOR_BINDER FOR_KW "for" GENERIC_PARAM_LIST L_ANGLE "<" @@ -45,18 +45,18 @@ SOURCE_FILE LIFETIME LIFETIME_IDENT "'a" R_ANGLE ">" - WHITESPACE " " - PATH_TYPE - PATH - PATH_SEGMENT - NAME_REF - IDENT "Trait" - GENERIC_ARG_LIST - L_ANGLE "<" - LIFETIME_ARG - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Trait" + GENERIC_ARG_LIST + L_ANGLE "<" + LIFETIME_ARG + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" R_PAREN ")" R_ANGLE ">" PARAM_LIST @@ -124,7 +124,7 @@ SOURCE_FILE WHITESPACE " " TYPE_BOUND L_PAREN "(" - FOR_TYPE + FOR_BINDER FOR_KW "for" GENERIC_PARAM_LIST L_ANGLE "<" @@ -132,18 +132,18 @@ SOURCE_FILE LIFETIME LIFETIME_IDENT "'a" R_ANGLE ">" - WHITESPACE " " - PATH_TYPE - PATH - PATH_SEGMENT - NAME_REF - IDENT "Trait" - GENERIC_ARG_LIST - L_ANGLE "<" - LIFETIME_ARG - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Trait" + GENERIC_ARG_LIST + L_ANGLE "<" + LIFETIME_ARG + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" R_PAREN ")" ERROR R_ANGLE ">" @@ -186,7 +186,7 @@ SOURCE_FILE TUPLE_EXPR L_PAREN "(" CLOSURE_EXPR - CLOSURE_BINDER + FOR_BINDER FOR_KW "for" GENERIC_PARAM_LIST L_ANGLE "<" @@ -243,13 +243,14 @@ SOURCE_FILE PAREN_TYPE L_PAREN "(" FOR_TYPE - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " PATH_TYPE PATH diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0027_incomplete_where_for.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0027_incomplete_where_for.rast index 674c8d536ca..3768a55d530 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0027_incomplete_where_for.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0027_incomplete_where_for.rast @@ -12,13 +12,14 @@ SOURCE_FILE WHERE_KW "where" WHITESPACE " " WHERE_PRED - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE "\n" BLOCK_EXPR STMT_LIST diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0043_unexpected_for_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0043_unexpected_for_type.rast index cb4fb1642d9..9c4ee6f712a 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0043_unexpected_for_type.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0043_unexpected_for_type.rast @@ -8,13 +8,14 @@ SOURCE_FILE EQ "=" WHITESPACE " " FOR_TYPE - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " REF_TYPE AMP "&" @@ -37,13 +38,14 @@ SOURCE_FILE EQ "=" WHITESPACE " " FOR_TYPE - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " TUPLE_TYPE L_PAREN "(" @@ -70,13 +72,14 @@ SOURCE_FILE EQ "=" WHITESPACE " " FOR_TYPE - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " SLICE_TYPE L_BRACK "[" @@ -97,22 +100,24 @@ SOURCE_FILE EQ "=" WHITESPACE " " FOR_TYPE - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" - WHITESPACE " " - FOR_TYPE + FOR_BINDER FOR_KW "for" GENERIC_PARAM_LIST L_ANGLE "<" LIFETIME_PARAM LIFETIME - LIFETIME_IDENT "'b" + LIFETIME_IDENT "'a" R_ANGLE ">" + WHITESPACE " " + FOR_TYPE + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'b" + R_ANGLE ">" WHITESPACE " " FN_PTR_TYPE FN_KW "fn" @@ -164,31 +169,34 @@ SOURCE_FILE WHERE_KW "where" WHITESPACE "\n " WHERE_PRED - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" - WHITESPACE " " - FOR_TYPE + FOR_BINDER FOR_KW "for" GENERIC_PARAM_LIST L_ANGLE "<" LIFETIME_PARAM LIFETIME - LIFETIME_IDENT "'b" + LIFETIME_IDENT "'a" R_ANGLE ">" - WHITESPACE " " - FOR_TYPE + WHITESPACE " " + FOR_TYPE + FOR_BINDER FOR_KW "for" GENERIC_PARAM_LIST L_ANGLE "<" LIFETIME_PARAM LIFETIME - LIFETIME_IDENT "'c" + LIFETIME_IDENT "'b" R_ANGLE ">" + WHITESPACE " " + FOR_TYPE + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'c" + R_ANGLE ">" WHITESPACE " " FN_PTR_TYPE FN_KW "fn" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/closure_binder.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/closure_binder.rast index c04dbe1ea0a..c96ccf7c7f1 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/closure_binder.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/closure_binder.rast @@ -14,7 +14,7 @@ SOURCE_FILE WHITESPACE " " EXPR_STMT CLOSURE_EXPR - CLOSURE_BINDER + FOR_BINDER FOR_KW "for" GENERIC_PARAM_LIST L_ANGLE "<" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/dyn_trait_type_weak.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/dyn_trait_type_weak.rast index dcc66dc1e2b..6578809cb0e 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/dyn_trait_type_weak.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/dyn_trait_type_weak.rast @@ -103,7 +103,7 @@ SOURCE_FILE WHITESPACE " " TYPE_BOUND_LIST TYPE_BOUND - FOR_TYPE + FOR_BINDER FOR_KW "for" GENERIC_PARAM_LIST L_ANGLE "<" @@ -111,12 +111,12 @@ SOURCE_FILE LIFETIME LIFETIME_IDENT "'a" R_ANGLE ">" - WHITESPACE " " - PATH_TYPE - PATH - PATH_SEGMENT - NAME_REF - IDENT "Path" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Path" SEMICOLON ";" WHITESPACE "\n" TYPE_ALIAS diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_binder_bound.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_binder_bound.rast new file mode 100644 index 00000000000..17dbbf30a7b --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_binder_bound.rast @@ -0,0 +1,45 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "foo" + GENERIC_PARAM_LIST + L_ANGLE "<" + TYPE_PARAM + NAME + IDENT "T" + COLON ":" + WHITESPACE " " + TYPE_BOUND_LIST + TYPE_BOUND + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" + WHITESPACE " " + L_BRACK "[" + CONST_KW "const" + R_BRACK "]" + WHITESPACE " " + ASYNC_KW "async" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Trait" + R_ANGLE ">" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + R_CURLY "}" + WHITESPACE "\n" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_binder_bound.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_binder_bound.rs new file mode 100644 index 00000000000..427cf558710 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_binder_bound.rs @@ -0,0 +1 @@ +fn foo<T: for<'a> [const] async Trait>() {} diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_type.rast index 7600457a9b8..58623058cae 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_type.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_type.rast @@ -8,13 +8,14 @@ SOURCE_FILE EQ "=" WHITESPACE " " FOR_TYPE - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " FN_PTR_TYPE FN_KW "fn" @@ -39,13 +40,14 @@ SOURCE_FILE EQ "=" WHITESPACE " " FOR_TYPE - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " FN_PTR_TYPE UNSAFE_KW "unsafe" @@ -86,13 +88,14 @@ SOURCE_FILE EQ "=" WHITESPACE " " FOR_TYPE - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " PATH_TYPE PATH diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/lambda_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/lambda_expr.rast index ea401d224e6..bf24a579124 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/lambda_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/lambda_expr.rast @@ -202,7 +202,7 @@ SOURCE_FILE WHITESPACE "\n " EXPR_STMT CLOSURE_EXPR - CLOSURE_BINDER + FOR_BINDER FOR_KW "for" GENERIC_PARAM_LIST L_ANGLE "<" @@ -223,7 +223,7 @@ SOURCE_FILE WHITESPACE "\n " EXPR_STMT CLOSURE_EXPR - CLOSURE_BINDER + FOR_BINDER FOR_KW "for" GENERIC_PARAM_LIST L_ANGLE "<" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/maybe_const_trait_bound.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/maybe_const_trait_bound.rast new file mode 100644 index 00000000000..8d12f814c2a --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/maybe_const_trait_bound.rast @@ -0,0 +1,36 @@ +SOURCE_FILE + FN + CONST_KW "const" + WHITESPACE " " + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "foo" + PARAM_LIST + L_PAREN "(" + PARAM + WILDCARD_PAT + UNDERSCORE "_" + COLON ":" + WHITESPACE " " + IMPL_TRAIT_TYPE + IMPL_KW "impl" + WHITESPACE " " + TYPE_BOUND_LIST + TYPE_BOUND + L_BRACK "[" + CONST_KW "const" + R_BRACK "]" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Trait" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + R_CURLY "}" + WHITESPACE "\n" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/maybe_const_trait_bound.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/maybe_const_trait_bound.rs new file mode 100644 index 00000000000..e1da9206098 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/maybe_const_trait_bound.rs @@ -0,0 +1 @@ +const fn foo(_: impl [const] Trait) {} diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/no_dyn_trait_leading_for.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/no_dyn_trait_leading_for.rast index 30a2842e538..6afa0613f39 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/no_dyn_trait_leading_for.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/no_dyn_trait_leading_for.rast @@ -11,13 +11,14 @@ SOURCE_FILE TYPE_BOUND_LIST TYPE_BOUND FOR_TYPE - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " PATH_TYPE PATH diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/question_for_type_trait_bound.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/question_for_type_trait_bound.rast index 56e2d1095d2..cb296153c8f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/question_for_type_trait_bound.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/question_for_type_trait_bound.rast @@ -29,10 +29,11 @@ SOURCE_FILE TYPE_BOUND QUESTION "?" FOR_TYPE - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + R_ANGLE ">" WHITESPACE " " PATH_TYPE PATH diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/where_pred_for.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/where_pred_for.rast index 0cc365efbe6..b10b953f2fb 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/where_pred_for.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/where_pred_for.rast @@ -18,13 +18,14 @@ SOURCE_FILE WHERE_KW "where" WHITESPACE "\n " WHERE_PRED - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " PATH_TYPE PATH diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0032_where_for.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0032_where_for.rast index 86f6af97c73..dcaf58f7f98 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0032_where_for.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0032_where_for.rast @@ -36,7 +36,7 @@ SOURCE_FILE PLUS "+" WHITESPACE " " TYPE_BOUND - FOR_TYPE + FOR_BINDER FOR_KW "for" GENERIC_PARAM_LIST L_ANGLE "<" @@ -44,18 +44,18 @@ SOURCE_FILE LIFETIME LIFETIME_IDENT "'de" R_ANGLE ">" - WHITESPACE " " - PATH_TYPE - PATH - PATH_SEGMENT - NAME_REF - IDENT "Deserialize" - GENERIC_ARG_LIST - L_ANGLE "<" - LIFETIME_ARG - LIFETIME - LIFETIME_IDENT "'de" - R_ANGLE ">" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Deserialize" + GENERIC_ARG_LIST + L_ANGLE "<" + LIFETIME_ARG + LIFETIME + LIFETIME_IDENT "'de" + R_ANGLE ">" WHITESPACE " " PLUS "+" WHITESPACE " " diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0067_where_for_pred.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0067_where_for_pred.rast index 8bf1090f9cf..5cef4dff062 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0067_where_for_pred.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0067_where_for_pred.rast @@ -18,13 +18,14 @@ SOURCE_FILE WHERE_KW "where" WHITESPACE "\n " WHERE_PRED - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " PATH_TYPE PATH @@ -81,13 +82,14 @@ SOURCE_FILE WHERE_KW "where" WHITESPACE "\n " WHERE_PRED - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " REF_TYPE AMP "&" @@ -135,13 +137,14 @@ SOURCE_FILE WHERE_KW "where" WHITESPACE "\n " WHERE_PRED - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " PAREN_TYPE L_PAREN "(" @@ -206,13 +209,14 @@ SOURCE_FILE WHERE_KW "where" WHITESPACE "\n " WHERE_PRED - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " SLICE_TYPE L_BRACK "[" @@ -276,13 +280,14 @@ SOURCE_FILE WHERE_KW "where" WHITESPACE "\n " WHERE_PRED - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " PATH_TYPE PATH @@ -349,22 +354,24 @@ SOURCE_FILE WHERE_KW "where" WHITESPACE "\n " WHERE_PRED - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" - WHITESPACE " " - FOR_TYPE + FOR_BINDER FOR_KW "for" GENERIC_PARAM_LIST L_ANGLE "<" LIFETIME_PARAM LIFETIME - LIFETIME_IDENT "'b" + LIFETIME_IDENT "'a" R_ANGLE ">" + WHITESPACE " " + FOR_TYPE + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'b" + R_ANGLE ">" WHITESPACE " " FN_PTR_TYPE FN_KW "fn" diff --git a/src/tools/rust-analyzer/crates/project-model/Cargo.toml b/src/tools/rust-analyzer/crates/project-model/Cargo.toml index 27fe9f79bbc..0dbb309a62a 100644 --- a/src/tools/rust-analyzer/crates/project-model/Cargo.toml +++ b/src/tools/rust-analyzer/crates/project-model/Cargo.toml @@ -20,6 +20,7 @@ semver.workspace = true serde_json.workspace = true serde.workspace = true serde_derive.workspace = true +temp-dir.workspace = true tracing.workspace = true triomphe.workspace = true la-arena.workspace = true diff --git a/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs b/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs index 499caa622c4..5bea74bed7e 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs @@ -16,11 +16,13 @@ use la_arena::ArenaMap; use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; use rustc_hash::{FxHashMap, FxHashSet}; use serde::Deserialize as _; +use stdx::never; use toolchain::Tool; use crate::{ CargoConfig, CargoFeatures, CargoWorkspace, InvocationStrategy, ManifestPath, Package, Sysroot, - TargetKind, utf8_stdout, + TargetKind, cargo_config_file::make_lockfile_copy, + cargo_workspace::MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH, utf8_stdout, }; /// Output of the build script and proc-macro building steps for a workspace. @@ -30,6 +32,15 @@ pub struct WorkspaceBuildScripts { error: Option<String>, } +#[derive(Debug, Clone, Default, PartialEq, Eq)] +pub enum ProcMacroDylibPath { + Path(AbsPathBuf), + DylibNotFound, + NotProcMacro, + #[default] + NotBuilt, +} + /// Output of the build script and proc-macro building step for a concrete package. #[derive(Debug, Clone, Default, PartialEq, Eq)] pub(crate) struct BuildScriptOutput { @@ -43,7 +54,7 @@ pub(crate) struct BuildScriptOutput { /// Directory where a build script might place its output. pub(crate) out_dir: Option<AbsPathBuf>, /// Path to the proc-macro library file if this package exposes proc-macros. - pub(crate) proc_macro_dylib_path: Option<AbsPathBuf>, + pub(crate) proc_macro_dylib_path: ProcMacroDylibPath, } impl BuildScriptOutput { @@ -51,7 +62,10 @@ impl BuildScriptOutput { self.cfgs.is_empty() && self.envs.is_empty() && self.out_dir.is_none() - && self.proc_macro_dylib_path.is_none() + && matches!( + self.proc_macro_dylib_path, + ProcMacroDylibPath::NotBuilt | ProcMacroDylibPath::NotProcMacro + ) } } @@ -67,7 +81,7 @@ impl WorkspaceBuildScripts { let current_dir = workspace.workspace_root(); let allowed_features = workspace.workspace_features(); - let cmd = Self::build_command( + let (_guard, cmd) = Self::build_command( config, &allowed_features, workspace.manifest_path(), @@ -88,7 +102,7 @@ impl WorkspaceBuildScripts { ) -> io::Result<Vec<WorkspaceBuildScripts>> { assert_eq!(config.invocation_strategy, InvocationStrategy::Once); - let cmd = Self::build_command( + let (_guard, cmd) = Self::build_command( config, &Default::default(), // This is not gonna be used anyways, so just construct a dummy here @@ -126,6 +140,8 @@ impl WorkspaceBuildScripts { |package, cb| { if let Some(&(package, workspace)) = by_id.get(package) { cb(&workspaces[workspace][package].name, &mut res[workspace].outputs[package]); + } else { + never!("Received compiler message for unknown package: {}", package); } }, progress, @@ -140,12 +156,9 @@ impl WorkspaceBuildScripts { if tracing::enabled!(tracing::Level::INFO) { for (idx, workspace) in workspaces.iter().enumerate() { for package in workspace.packages() { - let package_build_data = &mut res[idx].outputs[package]; + let package_build_data: &mut BuildScriptOutput = &mut res[idx].outputs[package]; if !package_build_data.is_empty() { - tracing::info!( - "{}: {package_build_data:?}", - workspace[package].manifest.parent(), - ); + tracing::info!("{}: {package_build_data:?}", workspace[package].manifest,); } } } @@ -198,10 +211,33 @@ impl WorkspaceBuildScripts { let path = dir_entry.path(); let extension = path.extension()?; if extension == std::env::consts::DLL_EXTENSION { - let name = path.file_stem()?.to_str()?.split_once('-')?.0.to_owned(); - let path = AbsPathBuf::try_from(Utf8PathBuf::from_path_buf(path).ok()?) - .ok()?; - return Some((name, path)); + let name = path + .file_stem()? + .to_str()? + .split_once('-')? + .0 + .trim_start_matches("lib") + .to_owned(); + let path = match Utf8PathBuf::from_path_buf(path) { + Ok(path) => path, + Err(path) => { + tracing::warn!( + "Proc-macro dylib path contains non-UTF8 characters: {:?}", + path.display() + ); + return None; + } + }; + return match AbsPathBuf::try_from(path) { + Ok(path) => Some((name, path)), + Err(path) => { + tracing::error!( + "proc-macro dylib path is not absolute: {:?}", + path + ); + None + } + }; } } None @@ -209,28 +245,24 @@ impl WorkspaceBuildScripts { .collect(); for p in rustc.packages() { let package = &rustc[p]; - if package - .targets - .iter() - .any(|&it| matches!(rustc[it].kind, TargetKind::Lib { is_proc_macro: true })) - { - if let Some((_, path)) = proc_macro_dylibs - .iter() - .find(|(name, _)| *name.trim_start_matches("lib") == package.name) - { - bs.outputs[p].proc_macro_dylib_path = Some(path.clone()); + bs.outputs[p].proc_macro_dylib_path = + if package.targets.iter().any(|&it| { + matches!(rustc[it].kind, TargetKind::Lib { is_proc_macro: true }) + }) { + match proc_macro_dylibs.iter().find(|(name, _)| *name == package.name) { + Some((_, path)) => ProcMacroDylibPath::Path(path.clone()), + _ => ProcMacroDylibPath::DylibNotFound, + } + } else { + ProcMacroDylibPath::NotProcMacro } - } } if tracing::enabled!(tracing::Level::INFO) { for package in rustc.packages() { let package_build_data = &bs.outputs[package]; if !package_build_data.is_empty() { - tracing::info!( - "{}: {package_build_data:?}", - rustc[package].manifest.parent(), - ); + tracing::info!("{}: {package_build_data:?}", rustc[package].manifest,); } } } @@ -263,6 +295,12 @@ impl WorkspaceBuildScripts { |package, cb| { if let Some(&package) = by_id.get(package) { cb(&workspace[package].name, &mut outputs[package]); + } else { + never!( + "Received compiler message for unknown package: {}\n {}", + package, + by_id.keys().join(", ") + ); } }, progress, @@ -272,10 +310,7 @@ impl WorkspaceBuildScripts { for package in workspace.packages() { let package_build_data = &outputs[package]; if !package_build_data.is_empty() { - tracing::info!( - "{}: {package_build_data:?}", - workspace[package].manifest.parent(), - ); + tracing::info!("{}: {package_build_data:?}", workspace[package].manifest,); } } } @@ -348,15 +383,23 @@ impl WorkspaceBuildScripts { progress(format!( "building compile-time-deps: proc-macro {name} built" )); - if message.target.kind.contains(&cargo_metadata::TargetKind::ProcMacro) + if data.proc_macro_dylib_path == ProcMacroDylibPath::NotBuilt { + data.proc_macro_dylib_path = ProcMacroDylibPath::NotProcMacro; + } + if !matches!(data.proc_macro_dylib_path, ProcMacroDylibPath::Path(_)) + && message + .target + .kind + .contains(&cargo_metadata::TargetKind::ProcMacro) { - // Skip rmeta file - if let Some(filename) = - message.filenames.iter().find(|file| is_dylib(file)) - { - let filename = AbsPath::assert(filename); - data.proc_macro_dylib_path = Some(filename.to_owned()); - } + data.proc_macro_dylib_path = + match message.filenames.iter().find(|file| is_dylib(file)) { + Some(filename) => { + let filename = AbsPath::assert(filename); + ProcMacroDylibPath::Path(filename.to_owned()) + } + None => ProcMacroDylibPath::DylibNotFound, + }; } }); } @@ -393,14 +436,15 @@ impl WorkspaceBuildScripts { current_dir: &AbsPath, sysroot: &Sysroot, toolchain: Option<&semver::Version>, - ) -> io::Result<Command> { + ) -> io::Result<(Option<temp_dir::TempDir>, Command)> { match config.run_build_script_command.as_deref() { Some([program, args @ ..]) => { let mut cmd = toolchain::command(program, current_dir, &config.extra_env); cmd.args(args); - Ok(cmd) + Ok((None, cmd)) } _ => { + let mut requires_unstable_options = false; let mut cmd = sysroot.tool(Tool::Cargo, current_dir, &config.extra_env); cmd.args(["check", "--quiet", "--workspace", "--message-format=json"]); @@ -416,7 +460,19 @@ impl WorkspaceBuildScripts { if let Some(target) = &config.target { cmd.args(["--target", target]); } - + let mut temp_dir_guard = None; + if toolchain + .is_some_and(|v| *v >= MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH) + { + let lockfile_path = + <_ as AsRef<Utf8Path>>::as_ref(manifest_path).with_extension("lock"); + if let Some((temp_dir, target_lockfile)) = make_lockfile_copy(&lockfile_path) { + requires_unstable_options = true; + temp_dir_guard = Some(temp_dir); + cmd.arg("--lockfile-path"); + cmd.arg(target_lockfile.as_str()); + } + } match &config.features { CargoFeatures::All => { cmd.arg("--all-features"); @@ -438,6 +494,7 @@ impl WorkspaceBuildScripts { } if manifest_path.is_rust_manifest() { + requires_unstable_options = true; cmd.arg("-Zscript"); } @@ -457,8 +514,7 @@ impl WorkspaceBuildScripts { toolchain.is_some_and(|v| *v >= COMP_TIME_DEPS_MIN_TOOLCHAIN_VERSION); if cargo_comp_time_deps_available { - cmd.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly"); - cmd.arg("-Zunstable-options"); + requires_unstable_options = true; cmd.arg("--compile-time-deps"); // we can pass this unconditionally, because we won't actually build the // binaries, and as such, this will succeed even on targets without libtest @@ -481,7 +537,11 @@ impl WorkspaceBuildScripts { cmd.env("RA_RUSTC_WRAPPER", "1"); } } - Ok(cmd) + if requires_unstable_options { + cmd.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly"); + cmd.arg("-Zunstable-options"); + } + Ok((temp_dir_guard, cmd)) } } } diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_config_file.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_config_file.rs index 7966f74df30..a1e7ed09232 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/cargo_config_file.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_config_file.rs @@ -1,4 +1,5 @@ //! Read `.cargo/config.toml` as a JSON object +use paths::{Utf8Path, Utf8PathBuf}; use rustc_hash::FxHashMap; use toolchain::Tool; @@ -32,3 +33,24 @@ pub(crate) fn read( Some(json) } + +pub(crate) fn make_lockfile_copy( + lockfile_path: &Utf8Path, +) -> Option<(temp_dir::TempDir, Utf8PathBuf)> { + let temp_dir = temp_dir::TempDir::with_prefix("rust-analyzer").ok()?; + let target_lockfile = temp_dir.path().join("Cargo.lock").try_into().ok()?; + match std::fs::copy(lockfile_path, &target_lockfile) { + Ok(_) => { + tracing::debug!("Copied lock file from `{}` to `{}`", lockfile_path, target_lockfile); + Some((temp_dir, target_lockfile)) + } + // lockfile does not yet exist, so we can just create a new one in the temp dir + Err(e) if e.kind() == std::io::ErrorKind::NotFound => Some((temp_dir, target_lockfile)), + Err(e) => { + tracing::warn!( + "Failed to copy lock file from `{lockfile_path}` to `{target_lockfile}`: {e}", + ); + None + } + } +} diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs index daadcd9d79a..e613fd590c7 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs @@ -15,16 +15,18 @@ use span::Edition; use stdx::process::spawn_with_streaming_output; use toolchain::Tool; +use crate::cargo_config_file::make_lockfile_copy; use crate::{CfgOverrides, InvocationStrategy}; use crate::{ManifestPath, Sysroot}; -const MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH: semver::Version = semver::Version { - major: 1, - minor: 82, - patch: 0, - pre: semver::Prerelease::EMPTY, - build: semver::BuildMetadata::EMPTY, -}; +pub(crate) const MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH: semver::Version = + semver::Version { + major: 1, + minor: 82, + patch: 0, + pre: semver::Prerelease::EMPTY, + build: semver::BuildMetadata::EMPTY, + }; /// [`CargoWorkspace`] represents the logical structure of, well, a Cargo /// workspace. It pretty closely mirrors `cargo metadata` output. @@ -245,7 +247,7 @@ pub enum TargetKind { } impl TargetKind { - fn new(kinds: &[cargo_metadata::TargetKind]) -> TargetKind { + pub fn new(kinds: &[cargo_metadata::TargetKind]) -> TargetKind { for kind in kinds { return match kind { cargo_metadata::TargetKind::Bin => TargetKind::Bin, @@ -552,7 +554,10 @@ impl CargoWorkspace { pub(crate) struct FetchMetadata { command: cargo_metadata::MetadataCommand, + #[expect(dead_code)] + manifest_path: ManifestPath, lockfile_path: Option<Utf8PathBuf>, + #[expect(dead_code)] kind: &'static str, no_deps: bool, no_deps_result: anyhow::Result<cargo_metadata::Metadata>, @@ -596,25 +601,22 @@ impl FetchMetadata { } command.current_dir(current_dir); - let mut needs_nightly = false; let mut other_options = vec![]; // cargo metadata only supports a subset of flags of what cargo usually accepts, and usually // the only relevant flags for metadata here are unstable ones, so we pass those along // but nothing else let mut extra_args = config.extra_args.iter(); while let Some(arg) = extra_args.next() { - if arg == "-Z" { - if let Some(arg) = extra_args.next() { - needs_nightly = true; - other_options.push("-Z".to_owned()); - other_options.push(arg.to_owned()); - } + if arg == "-Z" + && let Some(arg) = extra_args.next() + { + other_options.push("-Z".to_owned()); + other_options.push(arg.to_owned()); } } let mut lockfile_path = None; if cargo_toml.is_rust_manifest() { - needs_nightly = true; other_options.push("-Zscript".to_owned()); } else if config .toolchain_version @@ -632,10 +634,6 @@ impl FetchMetadata { command.other_options(other_options.clone()); - if needs_nightly { - command.env("RUSTC_BOOTSTRAP", "1"); - } - // Pre-fetch basic metadata using `--no-deps`, which: // - avoids fetching registries like crates.io, // - skips dependency resolution and does not modify lockfiles, @@ -655,7 +653,15 @@ impl FetchMetadata { } .with_context(|| format!("Failed to run `{cargo_command:?}`")); - Self { command, lockfile_path, kind: config.kind, no_deps, no_deps_result, other_options } + Self { + manifest_path: cargo_toml.clone(), + command, + lockfile_path, + kind: config.kind, + no_deps, + no_deps_result, + other_options, + } } pub(crate) fn no_deps_metadata(&self) -> Option<&cargo_metadata::Metadata> { @@ -672,40 +678,34 @@ impl FetchMetadata { locked: bool, progress: &dyn Fn(String), ) -> anyhow::Result<(cargo_metadata::Metadata, Option<anyhow::Error>)> { - let Self { mut command, lockfile_path, kind, no_deps, no_deps_result, mut other_options } = - self; + _ = target_dir; + let Self { + mut command, + manifest_path: _, + lockfile_path, + kind: _, + no_deps, + no_deps_result, + mut other_options, + } = self; if no_deps { return no_deps_result.map(|m| (m, None)); } let mut using_lockfile_copy = false; - // The manifest is a rust file, so this means its a script manifest - if let Some(lockfile) = lockfile_path { - let target_lockfile = - target_dir.join("rust-analyzer").join("metadata").join(kind).join("Cargo.lock"); - match std::fs::copy(&lockfile, &target_lockfile) { - Ok(_) => { - using_lockfile_copy = true; - other_options.push("--lockfile-path".to_owned()); - other_options.push(target_lockfile.to_string()); - } - Err(e) if e.kind() == std::io::ErrorKind::NotFound => { - // There exists no lockfile yet - using_lockfile_copy = true; - other_options.push("--lockfile-path".to_owned()); - other_options.push(target_lockfile.to_string()); - } - Err(e) => { - tracing::warn!( - "Failed to copy lock file from `{lockfile}` to `{target_lockfile}`: {e}", - ); - } - } + let mut _temp_dir_guard; + if let Some(lockfile) = lockfile_path + && let Some((temp_dir, target_lockfile)) = make_lockfile_copy(&lockfile) + { + _temp_dir_guard = temp_dir; + other_options.push("--lockfile-path".to_owned()); + other_options.push(target_lockfile.to_string()); + using_lockfile_copy = true; } - if using_lockfile_copy { + if using_lockfile_copy || other_options.iter().any(|it| it.starts_with("-Z")) { + command.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly"); other_options.push("-Zunstable-options".to_owned()); - command.env("RUSTC_BOOTSTRAP", "1"); } // No need to lock it if we copied the lockfile, we won't modify the original after all/ // This way cargo cannot error out on us if the lockfile requires updating. @@ -714,13 +714,11 @@ impl FetchMetadata { } command.other_options(other_options); - // FIXME: Fetching metadata is a slow process, as it might require - // calling crates.io. We should be reporting progress here, but it's - // unclear whether cargo itself supports it. progress("cargo metadata: started".to_owned()); let res = (|| -> anyhow::Result<(_, _)> { let mut errored = false; + tracing::debug!("Running `{:?}`", command.cargo_command()); let output = spawn_with_streaming_output(command.cargo_command(), &mut |_| (), &mut |line| { errored = errored || line.starts_with("error") || line.starts_with("warning"); diff --git a/src/tools/rust-analyzer/crates/project-model/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/src/lib.rs index 3bf3d06e6b1..d39781b1506 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/lib.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/lib.rs @@ -59,7 +59,7 @@ use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; use rustc_hash::FxHashSet; pub use crate::{ - build_dependencies::WorkspaceBuildScripts, + build_dependencies::{ProcMacroDylibPath, WorkspaceBuildScripts}, cargo_workspace::{ CargoConfig, CargoFeatures, CargoMetadataConfig, CargoWorkspace, Package, PackageData, PackageDependency, RustLibSource, Target, TargetData, TargetKind, @@ -139,21 +139,22 @@ impl ProjectManifest { } fn find_in_parent_dirs(path: &AbsPath, target_file_name: &str) -> Option<ManifestPath> { - if path.file_name().unwrap_or_default() == target_file_name { - if let Ok(manifest) = ManifestPath::try_from(path.to_path_buf()) { - return Some(manifest); - } + if path.file_name().unwrap_or_default() == target_file_name + && let Ok(manifest) = ManifestPath::try_from(path.to_path_buf()) + { + return Some(manifest); } let mut curr = Some(path); while let Some(path) = curr { let candidate = path.join(target_file_name); - if fs::metadata(&candidate).is_ok() { - if let Ok(manifest) = ManifestPath::try_from(candidate) { - return Some(manifest); - } + if fs::metadata(&candidate).is_ok() + && let Ok(manifest) = ManifestPath::try_from(candidate) + { + return Some(manifest); } + curr = path.parent(); } diff --git a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs index 9781c46737d..c0a5009afba 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs @@ -143,12 +143,11 @@ impl Sysroot { Some(root) => { // special case rustc, we can look that up directly in the sysroot's bin folder // as it should never invoke another cargo binary - if let Tool::Rustc = tool { - if let Some(path) = + if let Tool::Rustc = tool + && let Some(path) = probe_for_binary(root.join("bin").join(Tool::Rustc.name()).into()) - { - return toolchain::command(path, current_dir, envs); - } + { + return toolchain::command(path, current_dir, envs); } let mut cmd = toolchain::command(tool.prefer_proxy(), current_dir, envs); @@ -291,29 +290,26 @@ impl Sysroot { pub fn set_workspace(&mut self, workspace: RustLibSrcWorkspace) { self.workspace = workspace; - if self.error.is_none() { - if let Some(src_root) = &self.rust_lib_src_root { - let has_core = match &self.workspace { - RustLibSrcWorkspace::Workspace(ws) => { - ws.packages().any(|p| ws[p].name == "core") - } - RustLibSrcWorkspace::Json(project_json) => project_json - .crates() - .filter_map(|(_, krate)| krate.display_name.clone()) - .any(|name| name.canonical_name().as_str() == "core"), - RustLibSrcWorkspace::Stitched(stitched) => stitched.by_name("core").is_some(), - RustLibSrcWorkspace::Empty => true, + if self.error.is_none() + && let Some(src_root) = &self.rust_lib_src_root + { + let has_core = match &self.workspace { + RustLibSrcWorkspace::Workspace(ws) => ws.packages().any(|p| ws[p].name == "core"), + RustLibSrcWorkspace::Json(project_json) => project_json + .crates() + .filter_map(|(_, krate)| krate.display_name.clone()) + .any(|name| name.canonical_name().as_str() == "core"), + RustLibSrcWorkspace::Stitched(stitched) => stitched.by_name("core").is_some(), + RustLibSrcWorkspace::Empty => true, + }; + if !has_core { + let var_note = if env::var_os("RUST_SRC_PATH").is_some() { + " (env var `RUST_SRC_PATH` is set and may be incorrect, try unsetting it)" + } else { + ", try running `rustup component add rust-src` to possibly fix this" }; - if !has_core { - let var_note = if env::var_os("RUST_SRC_PATH").is_some() { - " (env var `RUST_SRC_PATH` is set and may be incorrect, try unsetting it)" - } else { - ", try running `rustup component add rust-src` to possibly fix this" - }; - self.error = Some(format!( - "sysroot at `{src_root}` is missing a `core` library{var_note}", - )); - } + self.error = + Some(format!("sysroot at `{src_root}` is missing a `core` library{var_note}",)); } } } diff --git a/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/rustc_cfg.rs b/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/rustc_cfg.rs index 6e06e88bf7a..ab69c8e0e4a 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/rustc_cfg.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/rustc_cfg.rs @@ -65,6 +65,7 @@ fn rustc_print_cfg( let (sysroot, current_dir) = match config { QueryConfig::Cargo(sysroot, cargo_toml, _) => { let mut cmd = sysroot.tool(Tool::Cargo, cargo_toml.parent(), extra_env); + cmd.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly"); cmd.args(["rustc", "-Z", "unstable-options"]).args(RUSTC_ARGS); if let Some(target) = target { cmd.args(["--target", target]); diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs index 677f29e3c60..5b36e10fd69 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs @@ -24,7 +24,7 @@ use crate::{ CargoConfig, CargoWorkspace, CfgOverrides, InvocationStrategy, ManifestPath, Package, ProjectJson, ProjectManifest, RustSourceWorkspaceConfig, Sysroot, TargetData, TargetKind, WorkspaceBuildScripts, - build_dependencies::BuildScriptOutput, + build_dependencies::{BuildScriptOutput, ProcMacroDylibPath}, cargo_config_file, cargo_workspace::{CargoMetadataConfig, DepKind, FetchMetadata, PackageData, RustLibSource}, env::{cargo_config_env, inject_cargo_env, inject_cargo_package_env, inject_rustc_tool_env}, @@ -424,12 +424,12 @@ impl ProjectWorkspace { sysroot.set_workspace(loaded_sysroot); } - if !cargo.requires_rustc_private() { - if let Err(e) = &mut rustc { - // We don't need the rustc sources here, - // so just discard the error. - _ = e.take(); - } + if !cargo.requires_rustc_private() + && let Err(e) = &mut rustc + { + // We don't need the rustc sources here, + // so just discard the error. + _ = e.take(); } Ok(ProjectWorkspace { @@ -1163,17 +1163,15 @@ fn project_json_to_crate_graph( crate = display_name.as_ref().map(|name| name.canonical_name().as_str()), "added root to crate graph" ); - if *is_proc_macro { - if let Some(path) = proc_macro_dylib_path.clone() { - let node = Ok(( - display_name - .as_ref() - .map(|it| it.canonical_name().as_str().to_owned()) - .unwrap_or_else(|| format!("crate{}", idx.0)), - path, - )); - proc_macros.insert(crate_graph_crate_id, node); - } + if *is_proc_macro && let Some(path) = proc_macro_dylib_path.clone() { + let node = Ok(( + display_name + .as_ref() + .map(|it| it.canonical_name().as_str().to_owned()) + .unwrap_or_else(|| format!("crate{}", idx.0)), + path, + )); + proc_macros.insert(crate_graph_crate_id, node); } (idx, crate_graph_crate_id) }, @@ -1318,16 +1316,17 @@ fn cargo_to_crate_graph( public_deps.add_to_crate_graph(crate_graph, from); // Add dep edge of all targets to the package's lib target - if let Some((to, name)) = lib_tgt.clone() { - if to != from && kind != TargetKind::BuildScript { - // (build script can not depend on its library target) - - // For root projects with dashes in their name, - // cargo metadata does not do any normalization, - // so we do it ourselves currently - let name = CrateName::normalize_dashes(&name); - add_dep(crate_graph, from, name, to); - } + if let Some((to, name)) = lib_tgt.clone() + && to != from + && kind != TargetKind::BuildScript + { + // (build script can not depend on its library target) + + // For root projects with dashes in their name, + // cargo metadata does not do any normalization, + // so we do it ourselves currently + let name = CrateName::normalize_dashes(&name); + add_dep(crate_graph, from, name, to); } } } @@ -1638,9 +1637,19 @@ fn add_target_crate_root( let proc_macro = match build_data { Some((BuildScriptOutput { proc_macro_dylib_path, .. }, has_errors)) => { match proc_macro_dylib_path { - Some(path) => Ok((cargo_name.to_owned(), path.clone())), - None if has_errors => Err(ProcMacroLoadingError::FailedToBuild), - None => Err(ProcMacroLoadingError::MissingDylibPath), + ProcMacroDylibPath::Path(path) => Ok((cargo_name.to_owned(), path.clone())), + ProcMacroDylibPath::NotBuilt => Err(ProcMacroLoadingError::NotYetBuilt), + ProcMacroDylibPath::NotProcMacro | ProcMacroDylibPath::DylibNotFound + if has_errors => + { + Err(ProcMacroLoadingError::FailedToBuild) + } + ProcMacroDylibPath::NotProcMacro => { + Err(ProcMacroLoadingError::ExpectedProcMacroArtifact) + } + ProcMacroDylibPath::DylibNotFound => { + Err(ProcMacroLoadingError::MissingDylibPath) + } } } None => Err(ProcMacroLoadingError::NotYetBuilt), @@ -1905,7 +1914,8 @@ fn cargo_target_dir( meta.manifest_path(manifest); // `--no-deps` doesn't (over)write lockfiles as it doesn't do any package resolve. // So we can use it to get `target_directory` before copying lockfiles - let mut other_options = vec!["--no-deps".to_owned()]; + meta.no_deps(); + let mut other_options = vec![]; if manifest.is_rust_manifest() { meta.env("RUSTC_BOOTSTRAP", "1"); other_options.push("-Zscript".to_owned()); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs index fc89f486f84..4f75d14834c 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -656,22 +656,26 @@ impl flags::AnalysisStats { let mut sw = self.stop_watch(); let mut all = 0; let mut fail = 0; - for &body in bodies { - if matches!(body, DefWithBody::Variant(_)) { + for &body_id in bodies { + if matches!(body_id, DefWithBody::Variant(_)) { + continue; + } + let module = body_id.module(db); + if !self.should_process(db, body_id, module) { continue; } + all += 1; - let Err(e) = db.mir_body(body.into()) else { + let Err(e) = db.mir_body(body_id.into()) else { continue; }; if verbosity.is_spammy() { - let full_name = body - .module(db) + let full_name = module .path_to_root(db) .into_iter() .rev() .filter_map(|it| it.name(db)) - .chain(Some(body.name(db).unwrap_or_else(Name::missing))) + .chain(Some(body_id.name(db).unwrap_or_else(Name::missing))) .map(|it| it.display(db, Edition::LATEST).to_string()) .join("::"); bar.println(format!("Mir body for {full_name} failed due {e:?}")); @@ -727,26 +731,9 @@ impl flags::AnalysisStats { let name = body_id.name(db).unwrap_or_else(Name::missing); let module = body_id.module(db); let display_target = module.krate().to_display_target(db); - let full_name = move || { - module - .krate() - .display_name(db) - .map(|it| it.canonical_name().as_str().to_owned()) - .into_iter() - .chain( - module - .path_to_root(db) - .into_iter() - .filter_map(|it| it.name(db)) - .rev() - .chain(Some(body_id.name(db).unwrap_or_else(Name::missing))) - .map(|it| it.display(db, Edition::LATEST).to_string()), - ) - .join("::") - }; if let Some(only_name) = self.only.as_deref() { if name.display(db, Edition::LATEST).to_string() != only_name - && full_name() != only_name + && full_name(db, body_id, module) != only_name { continue; } @@ -763,12 +750,17 @@ impl flags::AnalysisStats { let original_file = src.file_id.original_file(db); let path = vfs.file_path(original_file.file_id(db)); let syntax_range = src.text_range(); - format!("processing: {} ({} {:?})", full_name(), path, syntax_range) + format!( + "processing: {} ({} {:?})", + full_name(db, body_id, module), + path, + syntax_range + ) } else { - format!("processing: {}", full_name()) + format!("processing: {}", full_name(db, body_id, module)) } } else { - format!("processing: {}", full_name()) + format!("processing: {}", full_name(db, body_id, module)) } }; if verbosity.is_spammy() { @@ -781,9 +773,11 @@ impl flags::AnalysisStats { Ok(inference_result) => inference_result, Err(p) => { if let Some(s) = p.downcast_ref::<&str>() { - eprintln!("infer panicked for {}: {}", full_name(), s); + eprintln!("infer panicked for {}: {}", full_name(db, body_id, module), s); } else if let Some(s) = p.downcast_ref::<String>() { - eprintln!("infer panicked for {}: {}", full_name(), s); + eprintln!("infer panicked for {}: {}", full_name(db, body_id, module), s); + } else { + eprintln!("infer panicked for {}", full_name(db, body_id, module)); } panics += 1; bar.inc(1); @@ -890,7 +884,7 @@ impl flags::AnalysisStats { if verbosity.is_spammy() { bar.println(format!( "In {}: {} exprs, {} unknown, {} partial", - full_name(), + full_name(db, body_id, module), num_exprs - previous_exprs, num_exprs_unknown - previous_unknown, num_exprs_partially_unknown - previous_partially_unknown @@ -993,7 +987,7 @@ impl flags::AnalysisStats { if verbosity.is_spammy() { bar.println(format!( "In {}: {} pats, {} unknown, {} partial", - full_name(), + full_name(db, body_id, module), num_pats - previous_pats, num_pats_unknown - previous_unknown, num_pats_partially_unknown - previous_partially_unknown @@ -1049,34 +1043,8 @@ impl flags::AnalysisStats { bar.tick(); for &body_id in bodies { let module = body_id.module(db); - let full_name = move || { - module - .krate() - .display_name(db) - .map(|it| it.canonical_name().as_str().to_owned()) - .into_iter() - .chain( - module - .path_to_root(db) - .into_iter() - .filter_map(|it| it.name(db)) - .rev() - .chain(Some(body_id.name(db).unwrap_or_else(Name::missing))) - .map(|it| it.display(db, Edition::LATEST).to_string()), - ) - .join("::") - }; - if let Some(only_name) = self.only.as_deref() { - if body_id - .name(db) - .unwrap_or_else(Name::missing) - .display(db, Edition::LATEST) - .to_string() - != only_name - && full_name() != only_name - { - continue; - } + if !self.should_process(db, body_id, module) { + continue; } let msg = move || { if verbosity.is_verbose() { @@ -1090,12 +1058,17 @@ impl flags::AnalysisStats { let original_file = src.file_id.original_file(db); let path = vfs.file_path(original_file.file_id(db)); let syntax_range = src.text_range(); - format!("processing: {} ({} {:?})", full_name(), path, syntax_range) + format!( + "processing: {} ({} {:?})", + full_name(db, body_id, module), + path, + syntax_range + ) } else { - format!("processing: {}", full_name()) + format!("processing: {}", full_name(db, body_id, module)) } } else { - format!("processing: {}", full_name()) + format!("processing: {}", full_name(db, body_id, module)) } }; if verbosity.is_spammy() { @@ -1205,11 +1178,42 @@ impl flags::AnalysisStats { eprintln!("{:<20} {} ({} files)", "IDE:", ide_time, file_ids.len()); } + fn should_process(&self, db: &RootDatabase, body_id: DefWithBody, module: hir::Module) -> bool { + if let Some(only_name) = self.only.as_deref() { + let name = body_id.name(db).unwrap_or_else(Name::missing); + + if name.display(db, Edition::LATEST).to_string() != only_name + && full_name(db, body_id, module) != only_name + { + return false; + } + } + true + } + fn stop_watch(&self) -> StopWatch { StopWatch::start() } } +fn full_name(db: &RootDatabase, body_id: DefWithBody, module: hir::Module) -> String { + module + .krate() + .display_name(db) + .map(|it| it.canonical_name().as_str().to_owned()) + .into_iter() + .chain( + module + .path_to_root(db) + .into_iter() + .filter_map(|it| it.name(db)) + .rev() + .chain(Some(body_id.name(db).unwrap_or_else(Name::missing))) + .map(|it| it.display(db, Edition::LATEST).to_string()), + ) + .join("::") +} + fn location_csv_expr(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, expr_id: ExprId) -> String { let src = match sm.expr_syntax(expr_id) { Ok(s) => s, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index 51d4c29aa74..9456fd8809b 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -2162,6 +2162,7 @@ impl Config { extra_test_bin_args: self.runnables_extraTestBinaryArgs(source_root).clone(), extra_env: self.extra_env(source_root).clone(), target_dir: self.target_dir_from_config(source_root), + set_test: true, } } @@ -2219,6 +2220,7 @@ impl Config { extra_test_bin_args: self.runnables_extraTestBinaryArgs(source_root).clone(), extra_env: self.check_extra_env(source_root), target_dir: self.target_dir_from_config(source_root), + set_test: *self.cfg_setTest(source_root), }, ansi_color_output: self.color_diagnostic_output(), }, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs index 91d37bd7c9e..512ce0b9de3 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs @@ -31,6 +31,7 @@ pub(crate) enum InvocationStrategy { pub(crate) struct CargoOptions { pub(crate) target_tuples: Vec<String>, pub(crate) all_targets: bool, + pub(crate) set_test: bool, pub(crate) no_default_features: bool, pub(crate) all_features: bool, pub(crate) features: Vec<String>, @@ -54,7 +55,13 @@ impl CargoOptions { cmd.args(["--target", target.as_str()]); } if self.all_targets { - cmd.arg("--all-targets"); + if self.set_test { + cmd.arg("--all-targets"); + } else { + // No --benches unfortunately, as this implies --tests (see https://github.com/rust-lang/cargo/issues/6454), + // and users setting `cfg.seTest = false` probably prefer disabling benches than enabling tests. + cmd.args(["--lib", "--bins", "--examples"]); + } } if self.all_features { cmd.arg("--all-features"); @@ -104,7 +111,18 @@ impl fmt::Display for FlycheckConfig { match self { FlycheckConfig::CargoCommand { command, .. } => write!(f, "cargo {command}"), FlycheckConfig::CustomCommand { command, args, .. } => { - write!(f, "{command} {}", args.join(" ")) + // Don't show `my_custom_check --foo $saved_file` literally to the user, as it + // looks like we've forgotten to substitute $saved_file. + // + // Instead, show `my_custom_check --foo ...`. The + // actual path is often too long to be worth showing + // in the IDE (e.g. in the VS Code status bar). + let display_args = args + .iter() + .map(|arg| if arg == SAVED_FILE_PLACEHOLDER { "..." } else { arg }) + .collect::<Vec<_>>(); + + write!(f, "{command} {}", display_args.join(" ")) } } } diff --git a/src/tools/rust-analyzer/crates/syntax/rust.ungram b/src/tools/rust-analyzer/crates/syntax/rust.ungram index 4cbc88cfb5e..6d8a360d715 100644 --- a/src/tools/rust-analyzer/crates/syntax/rust.ungram +++ b/src/tools/rust-analyzer/crates/syntax/rust.ungram @@ -101,7 +101,7 @@ WhereClause = 'where' predicates:(WherePred (',' WherePred)* ','?) WherePred = - ('for' GenericParamList)? (Lifetime | Type) ':' TypeBoundList? + ForBinder? (Lifetime | Type) ':' TypeBoundList? //*************************// @@ -534,10 +534,10 @@ FieldExpr = Attr* Expr '.' NameRef ClosureExpr = - Attr* ClosureBinder? 'const'? 'static'? 'async'? 'gen'? 'move'? ParamList RetType? + Attr* ForBinder? 'const'? 'static'? 'async'? 'gen'? 'move'? ParamList RetType? body:Expr -ClosureBinder = +ForBinder = 'for' GenericParamList IfExpr = @@ -658,7 +658,7 @@ FnPtrType = 'const'? 'async'? 'unsafe'? Abi? 'fn' ParamList RetType? ForType = - 'for' GenericParamList Type + ForBinder Type ImplTraitType = 'impl' TypeBoundList @@ -671,7 +671,7 @@ TypeBoundList = TypeBound = Lifetime -| ('~' 'const' | '[' 'const' ']' | 'const')? 'async'? '?'? Type +| ForBinder? ('~' 'const' | '[' 'const' ']' | 'const')? 'async'? '?'? Type | 'use' UseBoundGenericArgs UseBoundGenericArgs = diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast.rs b/src/tools/rust-analyzer/crates/syntax/src/ast.rs index d787fd076fc..a9aeeedb654 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast.rs @@ -393,8 +393,7 @@ where let pred = predicates.next().unwrap(); let mut bounds = pred.type_bound_list().unwrap().bounds(); - assert!(pred.for_token().is_none()); - assert!(pred.generic_param_list().is_none()); + assert!(pred.for_binder().is_none()); assert_eq!("T", pred.ty().unwrap().syntax().text().to_string()); assert_bound("Clone", bounds.next()); assert_bound("Copy", bounds.next()); @@ -432,8 +431,10 @@ where let pred = predicates.next().unwrap(); let mut bounds = pred.type_bound_list().unwrap().bounds(); - assert!(pred.for_token().is_some()); - assert_eq!("<'a>", pred.generic_param_list().unwrap().syntax().text().to_string()); + assert_eq!( + "<'a>", + pred.for_binder().unwrap().generic_param_list().unwrap().syntax().text().to_string() + ); assert_eq!("F", pred.ty().unwrap().syntax().text().to_string()); assert_bound("Fn(&'a str)", bounds.next()); } diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs index 37cb4a434f3..d97fdec524f 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs @@ -6,9 +6,12 @@ use std::{fmt, iter, ops}; use crate::{ AstToken, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken, ast::{self, AstNode, make}, + syntax_editor::{SyntaxEditor, SyntaxMappingBuilder}, ted, }; +use super::syntax_factory::SyntaxFactory; + #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct IndentLevel(pub u8); @@ -95,6 +98,24 @@ impl IndentLevel { } } + pub(super) fn clone_increase_indent(self, node: &SyntaxNode) -> SyntaxNode { + let node = node.clone_subtree(); + let mut editor = SyntaxEditor::new(node.clone()); + let tokens = node + .preorder_with_tokens() + .filter_map(|event| match event { + rowan::WalkEvent::Leave(NodeOrToken::Token(it)) => Some(it), + _ => None, + }) + .filter_map(ast::Whitespace::cast) + .filter(|ws| ws.text().contains('\n')); + for ws in tokens { + let new_ws = make::tokens::whitespace(&format!("{}{self}", ws.syntax())); + editor.replace(ws.syntax(), &new_ws); + } + editor.finish().new_root().clone() + } + pub(super) fn decrease_indent(self, node: &SyntaxNode) { let tokens = node.preorder_with_tokens().filter_map(|event| match event { rowan::WalkEvent::Leave(NodeOrToken::Token(it)) => Some(it), @@ -111,36 +132,54 @@ impl IndentLevel { } } } + + pub(super) fn clone_decrease_indent(self, node: &SyntaxNode) -> SyntaxNode { + let node = node.clone_subtree(); + let mut editor = SyntaxEditor::new(node.clone()); + let tokens = node + .preorder_with_tokens() + .filter_map(|event| match event { + rowan::WalkEvent::Leave(NodeOrToken::Token(it)) => Some(it), + _ => None, + }) + .filter_map(ast::Whitespace::cast) + .filter(|ws| ws.text().contains('\n')); + for ws in tokens { + let new_ws = + make::tokens::whitespace(&ws.syntax().text().replace(&format!("\n{self}"), "\n")); + editor.replace(ws.syntax(), &new_ws); + } + editor.finish().new_root().clone() + } } fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> { iter::successors(Some(token), |token| token.prev_token()) } -/// Soft-deprecated in favor of mutable tree editing API `edit_in_place::Ident`. pub trait AstNodeEdit: AstNode + Clone + Sized { fn indent_level(&self) -> IndentLevel { IndentLevel::from_node(self.syntax()) } #[must_use] fn indent(&self, level: IndentLevel) -> Self { - fn indent_inner(node: &SyntaxNode, level: IndentLevel) -> SyntaxNode { - let res = node.clone_subtree().clone_for_update(); - level.increase_indent(&res); - res.clone_subtree() + Self::cast(level.clone_increase_indent(self.syntax())).unwrap() + } + #[must_use] + fn indent_with_mapping(&self, level: IndentLevel, make: &SyntaxFactory) -> Self { + let new_node = self.indent(level); + if let Some(mut mapping) = make.mappings() { + let mut builder = SyntaxMappingBuilder::new(new_node.syntax().clone()); + for (old, new) in self.syntax().children().zip(new_node.syntax().children()) { + builder.map_node(old, new); + } + builder.finish(&mut mapping); } - - Self::cast(indent_inner(self.syntax(), level)).unwrap() + new_node } #[must_use] fn dedent(&self, level: IndentLevel) -> Self { - fn dedent_inner(node: &SyntaxNode, level: IndentLevel) -> SyntaxNode { - let res = node.clone_subtree().clone_for_update(); - level.decrease_indent(&res); - res.clone_subtree() - } - - Self::cast(dedent_inner(self.syntax(), level)).unwrap() + Self::cast(level.clone_decrease_indent(self.syntax())).unwrap() } #[must_use] fn reset_indent(&self) -> Self { diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs index e902516471d..28b543ea706 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs @@ -644,7 +644,7 @@ impl Removable for ast::Use { impl ast::Impl { pub fn get_or_create_assoc_item_list(&self) -> ast::AssocItemList { if self.assoc_item_list().is_none() { - let assoc_item_list = make::assoc_item_list().clone_for_update(); + let assoc_item_list = make::assoc_item_list(None).clone_for_update(); ted::append_child(self.syntax(), assoc_item_list.syntax()); } self.assoc_item_list().unwrap() diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs index 2b862465420..ceb2866ebcd 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs @@ -377,22 +377,13 @@ impl CastExpr { #[inline] pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) } } -pub struct ClosureBinder { - pub(crate) syntax: SyntaxNode, -} -impl ClosureBinder { - #[inline] - pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) } - #[inline] - pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) } -} pub struct ClosureExpr { pub(crate) syntax: SyntaxNode, } impl ast::HasAttrs for ClosureExpr {} impl ClosureExpr { #[inline] - pub fn closure_binder(&self) -> Option<ClosureBinder> { support::child(&self.syntax) } + pub fn for_binder(&self) -> Option<ForBinder> { support::child(&self.syntax) } #[inline] pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) } #[inline] @@ -615,6 +606,15 @@ impl FnPtrType { #[inline] pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) } } +pub struct ForBinder { + pub(crate) syntax: SyntaxNode, +} +impl ForBinder { + #[inline] + pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) } + #[inline] + pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) } +} pub struct ForExpr { pub(crate) syntax: SyntaxNode, } @@ -632,11 +632,9 @@ pub struct ForType { } impl ForType { #[inline] - pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) } + pub fn for_binder(&self) -> Option<ForBinder> { support::child(&self.syntax) } #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } - #[inline] - pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) } } pub struct FormatArgsArg { pub(crate) syntax: SyntaxNode, @@ -1766,6 +1764,8 @@ pub struct TypeBound { } impl TypeBound { #[inline] + pub fn for_binder(&self) -> Option<ForBinder> { support::child(&self.syntax) } + #[inline] pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) } #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } @@ -1938,13 +1938,11 @@ pub struct WherePred { impl ast::HasTypeBounds for WherePred {} impl WherePred { #[inline] - pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) } + pub fn for_binder(&self) -> Option<ForBinder> { support::child(&self.syntax) } #[inline] pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) } #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } - #[inline] - pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) } } pub struct WhileExpr { pub(crate) syntax: SyntaxNode, @@ -3239,42 +3237,6 @@ impl fmt::Debug for CastExpr { f.debug_struct("CastExpr").field("syntax", &self.syntax).finish() } } -impl AstNode for ClosureBinder { - #[inline] - fn kind() -> SyntaxKind - where - Self: Sized, - { - CLOSURE_BINDER - } - #[inline] - fn can_cast(kind: SyntaxKind) -> bool { kind == CLOSURE_BINDER } - #[inline] - fn cast(syntax: SyntaxNode) -> Option<Self> { - if Self::can_cast(syntax.kind()) { - Some(Self { syntax }) - } else { - None - } - } - #[inline] - fn syntax(&self) -> &SyntaxNode { &self.syntax } -} -impl hash::Hash for ClosureBinder { - fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); } -} -impl Eq for ClosureBinder {} -impl PartialEq for ClosureBinder { - fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax } -} -impl Clone for ClosureBinder { - fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } } -} -impl fmt::Debug for ClosureBinder { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("ClosureBinder").field("syntax", &self.syntax).finish() - } -} impl AstNode for ClosureExpr { #[inline] fn kind() -> SyntaxKind @@ -3815,6 +3777,42 @@ impl fmt::Debug for FnPtrType { f.debug_struct("FnPtrType").field("syntax", &self.syntax).finish() } } +impl AstNode for ForBinder { + #[inline] + fn kind() -> SyntaxKind + where + Self: Sized, + { + FOR_BINDER + } + #[inline] + fn can_cast(kind: SyntaxKind) -> bool { kind == FOR_BINDER } + #[inline] + fn cast(syntax: SyntaxNode) -> Option<Self> { + if Self::can_cast(syntax.kind()) { + Some(Self { syntax }) + } else { + None + } + } + #[inline] + fn syntax(&self) -> &SyntaxNode { &self.syntax } +} +impl hash::Hash for ForBinder { + fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); } +} +impl Eq for ForBinder {} +impl PartialEq for ForBinder { + fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax } +} +impl Clone for ForBinder { + fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } } +} +impl fmt::Debug for ForBinder { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("ForBinder").field("syntax", &self.syntax).finish() + } +} impl AstNode for ForExpr { #[inline] fn kind() -> SyntaxKind @@ -10146,11 +10144,6 @@ impl std::fmt::Display for CastExpr { std::fmt::Display::fmt(self.syntax(), f) } } -impl std::fmt::Display for ClosureBinder { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - std::fmt::Display::fmt(self.syntax(), f) - } -} impl std::fmt::Display for ClosureExpr { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) @@ -10226,6 +10219,11 @@ impl std::fmt::Display for FnPtrType { std::fmt::Display::fmt(self.syntax(), f) } } +impl std::fmt::Display for ForBinder { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Display::fmt(self.syntax(), f) + } +} impl std::fmt::Display for ForExpr { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs index d67f24fda96..2a7b51c3c24 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs @@ -229,8 +229,18 @@ pub fn ty_fn_ptr<I: Iterator<Item = Param>>( } } -pub fn assoc_item_list() -> ast::AssocItemList { - ast_from_text("impl C for D {}") +pub fn assoc_item_list( + body: Option<Vec<either::Either<ast::Attr, ast::AssocItem>>>, +) -> ast::AssocItemList { + let is_break_braces = body.is_some(); + let body_newline = if is_break_braces { "\n".to_owned() } else { String::new() }; + let body_indent = if is_break_braces { " ".to_owned() } else { String::new() }; + + let body = match body { + Some(bd) => bd.iter().map(|elem| elem.to_string()).join("\n\n "), + None => String::new(), + }; + ast_from_text(&format!("impl C for D {{{body_newline}{body_indent}{body}{body_newline}}}")) } fn merge_gen_params( @@ -273,7 +283,7 @@ pub fn impl_( generic_args: Option<ast::GenericArgList>, path_type: ast::Type, where_clause: Option<ast::WhereClause>, - body: Option<Vec<either::Either<ast::Attr, ast::AssocItem>>>, + body: Option<ast::AssocItemList>, ) -> ast::Impl { let gen_args = generic_args.map_or_else(String::new, |it| it.to_string()); @@ -281,20 +291,13 @@ pub fn impl_( let body_newline = if where_clause.is_some() && body.is_none() { "\n".to_owned() } else { String::new() }; - let where_clause = match where_clause { Some(pr) => format!("\n{pr}\n"), None => " ".to_owned(), }; - let body = match body { - Some(bd) => bd.iter().map(|elem| elem.to_string()).join(""), - None => String::new(), - }; - - ast_from_text(&format!( - "impl{gen_params} {path_type}{gen_args}{where_clause}{{{body_newline}{body}}}" - )) + let body = body.map_or_else(|| format!("{{{body_newline}}}"), |it| it.to_string()); + ast_from_text(&format!("impl{gen_params} {path_type}{gen_args}{where_clause}{body}")) } pub fn impl_trait( @@ -308,7 +311,7 @@ pub fn impl_trait( ty: ast::Type, trait_where_clause: Option<ast::WhereClause>, ty_where_clause: Option<ast::WhereClause>, - body: Option<Vec<either::Either<ast::Attr, ast::AssocItem>>>, + body: Option<ast::AssocItemList>, ) -> ast::Impl { let is_unsafe = if is_unsafe { "unsafe " } else { "" }; @@ -330,13 +333,10 @@ pub fn impl_trait( let where_clause = merge_where_clause(ty_where_clause, trait_where_clause) .map_or_else(|| " ".to_owned(), |wc| format!("\n{wc}\n")); - let body = match body { - Some(bd) => bd.iter().map(|elem| elem.to_string()).join(""), - None => String::new(), - }; + let body = body.map_or_else(|| format!("{{{body_newline}}}"), |it| it.to_string()); ast_from_text(&format!( - "{is_unsafe}impl{gen_params} {is_negative}{path_type}{trait_gen_args} for {ty}{type_gen_args}{where_clause}{{{body_newline}{body}}}" + "{is_unsafe}impl{gen_params} {is_negative}{path_type}{trait_gen_args} for {ty}{type_gen_args}{where_clause}{body}" )) } diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs index f5530c5fffd..62a7d4df2cf 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs @@ -805,9 +805,7 @@ impl ast::SelfParam { #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub enum TypeBoundKind { /// Trait - PathType(ast::PathType), - /// for<'a> ... - ForType(ast::ForType), + PathType(Option<ast::ForBinder>, ast::PathType), /// use Use(ast::UseBoundGenericArgs), /// 'a @@ -817,9 +815,7 @@ pub enum TypeBoundKind { impl ast::TypeBound { pub fn kind(&self) -> TypeBoundKind { if let Some(path_type) = support::children(self.syntax()).next() { - TypeBoundKind::PathType(path_type) - } else if let Some(for_type) = support::children(self.syntax()).next() { - TypeBoundKind::ForType(for_type) + TypeBoundKind::PathType(self.for_binder(), path_type) } else if let Some(args) = self.use_bound_generic_args() { TypeBoundKind::Use(args) } else if let Some(lifetime) = self.lifetime() { diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory.rs index 7142e4f6e1b..f3ae7544cc3 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory.rs @@ -38,7 +38,7 @@ impl SyntaxFactory { self.mappings.as_ref().map(|mappings| mappings.take()).unwrap_or_default() } - fn mappings(&self) -> Option<RefMut<'_, SyntaxMapping>> { + pub(crate) fn mappings(&self) -> Option<RefMut<'_, SyntaxMapping>> { self.mappings.as_ref().map(|it| it.borrow_mut()) } } diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs index 3fa584850f7..5107754b182 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs @@ -5,7 +5,7 @@ //! [`SyntaxEditor`]: https://github.com/dotnet/roslyn/blob/43b0b05cc4f492fd5de00f6f6717409091df8daa/src/Workspaces/Core/Portable/Editing/SyntaxEditor.cs use std::{ - fmt, + fmt, iter, num::NonZeroU32, ops::RangeInclusive, sync::atomic::{AtomicU32, Ordering}, @@ -41,6 +41,15 @@ impl SyntaxEditor { self.annotations.push((element.syntax_element(), annotation)) } + pub fn add_annotation_all( + &mut self, + elements: Vec<impl Element>, + annotation: SyntaxAnnotation, + ) { + self.annotations + .extend(elements.into_iter().map(|e| e.syntax_element()).zip(iter::repeat(annotation))); + } + pub fn merge(&mut self, mut other: SyntaxEditor) { debug_assert!( self.root == other.root || other.root.ancestors().any(|node| node == self.root), diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edits.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edits.rs index d66ea8aa28c..840e7697979 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edits.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edits.rs @@ -92,6 +92,42 @@ fn get_or_insert_comma_after(editor: &mut SyntaxEditor, syntax: &SyntaxNode) -> } } +impl ast::AssocItemList { + /// Adds a new associated item after all of the existing associated items. + /// + /// Attention! This function does align the first line of `item` with respect to `self`, + /// but it does _not_ change indentation of other lines (if any). + pub fn add_items(&self, editor: &mut SyntaxEditor, items: Vec<ast::AssocItem>) { + let (indent, position, whitespace) = match self.assoc_items().last() { + Some(last_item) => ( + IndentLevel::from_node(last_item.syntax()), + Position::after(last_item.syntax()), + "\n\n", + ), + None => match self.l_curly_token() { + Some(l_curly) => { + normalize_ws_between_braces(editor, self.syntax()); + (IndentLevel::from_token(&l_curly) + 1, Position::after(&l_curly), "\n") + } + None => (IndentLevel::single(), Position::last_child_of(self.syntax()), "\n"), + }, + }; + + let elements: Vec<SyntaxElement> = items + .into_iter() + .enumerate() + .flat_map(|(i, item)| { + let whitespace = if i != 0 { "\n\n" } else { whitespace }; + vec![ + make::tokens::whitespace(&format!("{whitespace}{indent}")).into(), + item.syntax().clone().into(), + ] + }) + .collect(); + editor.insert_all(position, elements); + } +} + impl ast::VariantList { pub fn add_variant(&self, editor: &mut SyntaxEditor, variant: &ast::Variant) { let make = SyntaxFactory::without_mappings(); diff --git a/src/tools/rust-analyzer/crates/tt/src/iter.rs b/src/tools/rust-analyzer/crates/tt/src/iter.rs index 3246156f1cb..2e89d762a0e 100644 --- a/src/tools/rust-analyzer/crates/tt/src/iter.rs +++ b/src/tools/rust-analyzer/crates/tt/src/iter.rs @@ -217,6 +217,17 @@ pub enum TtElement<'a, S> { Subtree(&'a Subtree<S>, TtIter<'a, S>), } +impl<S: Copy + fmt::Debug> fmt::Debug for TtElement<'_, S> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Leaf(leaf) => f.debug_tuple("Leaf").field(leaf).finish(), + Self::Subtree(subtree, inner) => { + f.debug_tuple("Subtree").field(subtree).field(inner).finish() + } + } + } +} + impl<S: Copy> TtElement<'_, S> { #[inline] pub fn first_span(&self) -> S { diff --git a/src/tools/rust-analyzer/docs/book/src/contributing/README.md b/src/tools/rust-analyzer/docs/book/src/contributing/README.md index beb94cdfc41..57c7a9c5996 100644 --- a/src/tools/rust-analyzer/docs/book/src/contributing/README.md +++ b/src/tools/rust-analyzer/docs/book/src/contributing/README.md @@ -252,18 +252,8 @@ Release steps: 4. Commit & push the changelog. 5. Run `cargo xtask publish-release-notes <CHANGELOG>` -- this will convert the changelog entry in AsciiDoc to Markdown and update the body of GitHub Releases entry. 6. Tweet. -7. Make a new branch and run `cargo xtask rustc-pull`, open a PR, and merge it. - This will pull any changes from `rust-lang/rust` into `rust-analyzer`. -8. Switch to `master`, pull, then run `cargo xtask rustc-push --rust-path ../rust-rust-analyzer --rust-fork matklad/rust`. - Replace `matklad/rust` with your own fork of `rust-lang/rust`. - You can use the token to authenticate when you get prompted for a password, since `josh` will push over HTTPS, not SSH. - This will push the `rust-analyzer` changes to your fork. - You can then open a PR against `rust-lang/rust`. - -Note: besides the `rust-rust-analyzer` clone, the Josh cache (stored under `~/.cache/rust-analyzer-josh`) will contain a bare clone of `rust-lang/rust`. -This currently takes about 3.5 GB. - -This [HackMD](https://hackmd.io/7pOuxnkdQDaL1Y1FQr65xg) has details about how `josh` syncs work. +7. Perform a subtree [pull](#performing-a-pull). +8. Perform a subtree [push](#performing-a-push). If the GitHub Actions release fails because of a transient problem like a timeout, you can re-run the job from the Actions console. If it fails because of something that needs to be fixed, remove the release tag (if needed), fix the problem, then start over. @@ -288,3 +278,43 @@ There are two sets of people with extra permissions: If you don't feel like reviewing for whatever reason, someone else will pick the review up (but please speak up if you don't feel like it)! * The [rust-lang](https://github.com/rust-lang) team [t-rust-analyzer-contributors]([https://github.com/orgs/rust-analyzer/teams/triage](https://github.com/rust-lang/team/blob/master/teams/rust-analyzer-contributors.toml)). This team has general triaging permissions allowing to label, close and re-open issues. + +## Synchronizing subtree changes +`rust-analyzer` is a [josh](https://josh-project.github.io/josh/intro.html) subtree of the [rust-lang/rust](https://github.com/rust-lang/rust) +repository. We use the [rustc-josh-sync](https://github.com/rust-lang/josh-sync) tool to perform synchronization between these two +repositories. You can find documentation of the tool [here](https://github.com/rust-lang/josh-sync). + +You can install the synchronization tool using the following commands: +``` +cargo install --locked --git https://github.com/rust-lang/josh-sync +``` + +Both pulls (synchronizing changes from rust-lang/rust into rust-analyzer) and pushes (synchronizing +changes from rust-analyzer into rust-lang/rust) are performed from this repository. +changes from rust-analyzer to rust-lang/rust) are performed from this repository. + +Usually we first perform a pull, wait for it to be merged, and then perform a push. + +### Performing a pull +1) Checkout a new branch that will be used to create a PR against rust-analyzer +2) Run the pull command + ``` + rustc-josh-sync pull + ``` +3) Push the branch to your fork of `rust-analyzer` and create a PR + - If you have the `gh` CLI installed, `rustc-josh-sync` can create the PR for you. + +### Performing a push + +Wait for the previous pull to be merged. + +1) Switch to `master` and pull +2) Run the push command to create a branch named `<branch-name>` in a `rustc` fork under the `<gh-username>` account + ``` + rustc-josh-sync push <branch-name> <gh-username> + ``` + - The push will ask you to download a checkout of the `rust-lang/rust` repository. + - If you get prompted for a password, see [this](https://github.com/rust-lang/josh-sync?tab=readme-ov-file#git-peculiarities). +3) Create a PR from `<branch-name>` into `rust-lang/rust` + +> Besides the `rust` checkout, the Josh cache (stored under `~/.cache/rustc-josh`) will contain a bare clone of `rust-lang/rust`. This currently takes several GBs. diff --git a/src/tools/rust-analyzer/editors/code/package-lock.json b/src/tools/rust-analyzer/editors/code/package-lock.json index 57d67a69b2e..534c24be52e 100644 --- a/src/tools/rust-analyzer/editors/code/package-lock.json +++ b/src/tools/rust-analyzer/editors/code/package-lock.json @@ -3336,15 +3336,16 @@ } }, "node_modules/form-data": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.2.tgz", - "integrity": "sha512-hGfm/slu0ZabnNt4oaRZ6uREyfCj6P4fT/n6A1rGV+Z0VdGXjfOhVUpkn6qVQONHGIFwmveGXyDs75+nr6FM8w==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", + "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", "dev": true, "license": "MIT", "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", "mime-types": "^2.1.12" }, "engines": { diff --git a/src/tools/rust-analyzer/editors/code/src/config.ts b/src/tools/rust-analyzer/editors/code/src/config.ts index d2dc740c09b..3b1b0768d3c 100644 --- a/src/tools/rust-analyzer/editors/code/src/config.ts +++ b/src/tools/rust-analyzer/editors/code/src/config.ts @@ -8,10 +8,9 @@ import type { Disposable } from "vscode"; export type RunnableEnvCfgItem = { mask?: string; - env: Record<string, string>; + env: { [key: string]: { toString(): string } | null }; platform?: string | string[]; }; -export type RunnableEnvCfg = Record<string, string> | RunnableEnvCfgItem[]; type ShowStatusBar = "always" | "never" | { documentSelector: vscode.DocumentSelector }; @@ -261,18 +260,13 @@ export class Config { return this.get<boolean | undefined>("testExplorer"); } - runnablesExtraEnv(label: string): Record<string, string> | undefined { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const item = this.get<any>("runnables.extraEnv") ?? this.get<any>("runnableEnv"); - if (!item) return undefined; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const fixRecord = (r: Record<string, any>) => { - for (const key in r) { - if (typeof r[key] !== "string") { - r[key] = String(r[key]); - } - } - }; + runnablesExtraEnv(label: string): Env { + const serverEnv = this.serverExtraEnv; + let extraEnv = + this.get< + RunnableEnvCfgItem[] | { [key: string]: { toString(): string } | null } | null + >("runnables.extraEnv") ?? {}; + if (!extraEnv) return serverEnv; const platform = process.platform; const checkPlatform = (it: RunnableEnvCfgItem) => { @@ -283,19 +277,25 @@ export class Config { return true; }; - if (item instanceof Array) { + if (extraEnv instanceof Array) { const env = {}; - for (const it of item) { + for (const it of extraEnv) { const masked = !it.mask || new RegExp(it.mask).test(label); if (masked && checkPlatform(it)) { Object.assign(env, it.env); } } - fixRecord(env); - return env; + extraEnv = env; } - fixRecord(item); - return item; + const runnableExtraEnv = substituteVariablesInEnv( + Object.fromEntries( + Object.entries(extraEnv).map(([k, v]) => [ + k, + typeof v === "string" ? v : v?.toString(), + ]), + ), + ); + return { ...runnableExtraEnv, ...serverEnv }; } get restartServerOnConfigChange() { diff --git a/src/tools/rust-analyzer/editors/code/src/debug.ts b/src/tools/rust-analyzer/editors/code/src/debug.ts index adb75c23c70..24f8d908730 100644 --- a/src/tools/rust-analyzer/editors/code/src/debug.ts +++ b/src/tools/rust-analyzer/editors/code/src/debug.ts @@ -6,7 +6,14 @@ import type * as ra from "./lsp_ext"; import { Cargo } from "./toolchain"; import type { Ctx } from "./ctx"; import { createTaskFromRunnable, prepareEnv } from "./run"; -import { execute, isCargoRunnableArgs, unwrapUndefinable, log, normalizeDriveLetter } from "./util"; +import { + execute, + isCargoRunnableArgs, + unwrapUndefinable, + log, + normalizeDriveLetter, + Env, +} from "./util"; import type { Config } from "./config"; // Here we want to keep track on everything that's currently running @@ -206,10 +213,7 @@ type SourceFileMap = { destination: string; }; -async function discoverSourceFileMap( - env: Record<string, string>, - cwd: string, -): Promise<SourceFileMap | undefined> { +async function discoverSourceFileMap(env: Env, cwd: string): Promise<SourceFileMap | undefined> { const sysroot = env["RUSTC_TOOLCHAIN"]; if (sysroot) { // let's try to use the default toolchain @@ -232,7 +236,7 @@ type PropertyFetcher<Config, Input, Key extends keyof Config> = ( type DebugConfigProvider<Type extends string, DebugConfig extends BaseDebugConfig<Type>> = { executableProperty: keyof DebugConfig; - environmentProperty: PropertyFetcher<DebugConfig, Record<string, string>, keyof DebugConfig>; + environmentProperty: PropertyFetcher<DebugConfig, Env, keyof DebugConfig>; runnableArgsProperty: PropertyFetcher<DebugConfig, ra.CargoRunnableArgs, keyof DebugConfig>; sourceFileMapProperty?: keyof DebugConfig; type: Type; @@ -276,7 +280,7 @@ const knownEngines: { "environment", Object.entries(env).map((entry) => ({ name: entry[0], - value: entry[1], + value: entry[1] ?? "", })), ], runnableArgsProperty: (runnableArgs: ra.CargoRunnableArgs) => [ @@ -304,10 +308,7 @@ const knownEngines: { }, }; -async function getDebugExecutable( - runnableArgs: ra.CargoRunnableArgs, - env: Record<string, string>, -): Promise<string> { +async function getDebugExecutable(runnableArgs: ra.CargoRunnableArgs, env: Env): Promise<string> { const cargo = new Cargo(runnableArgs.workspaceRoot || ".", env); const executable = await cargo.executableFromArgs(runnableArgs); @@ -328,7 +329,7 @@ function getDebugConfig( runnable: ra.Runnable, runnableArgs: ra.CargoRunnableArgs, executable: string, - env: Record<string, string>, + env: Env, sourceFileMap?: Record<string, string>, ): vscode.DebugConfiguration { const { @@ -380,14 +381,14 @@ type CodeLldbDebugConfig = { args: string[]; sourceMap: Record<string, string> | undefined; sourceLanguages: ["rust"]; - env: Record<string, string>; + env: Env; } & BaseDebugConfig<"lldb">; type NativeDebugConfig = { target: string; // See https://github.com/WebFreak001/code-debug/issues/359 arguments: string; - env: Record<string, string>; + env: Env; valuesFormatting: "prettyPrinters"; } & BaseDebugConfig<"gdb">; diff --git a/src/tools/rust-analyzer/editors/code/src/run.ts b/src/tools/rust-analyzer/editors/code/src/run.ts index 95166c427b2..87c1d529f7e 100644 --- a/src/tools/rust-analyzer/editors/code/src/run.ts +++ b/src/tools/rust-analyzer/editors/code/src/run.ts @@ -7,7 +7,7 @@ import type { CtxInit } from "./ctx"; import { makeDebugConfig } from "./debug"; import type { Config } from "./config"; import type { LanguageClient } from "vscode-languageclient/node"; -import { log, unwrapUndefinable, type RustEditor } from "./util"; +import { Env, log, unwrapUndefinable, type RustEditor } from "./util"; const quickPickButtons = [ { iconPath: new vscode.ThemeIcon("save"), tooltip: "Save as a launch.json configuration." }, @@ -122,11 +122,8 @@ export class RunnableQuickPick implements vscode.QuickPickItem { } } -export function prepareBaseEnv( - inheritEnv: boolean, - base?: Record<string, string>, -): Record<string, string> { - const env: Record<string, string> = { RUST_BACKTRACE: "short" }; +export function prepareBaseEnv(inheritEnv: boolean, base?: Env): Env { + const env: Env = { RUST_BACKTRACE: "short" }; if (inheritEnv) { Object.assign(env, process.env); } @@ -136,11 +133,7 @@ export function prepareBaseEnv( return env; } -export function prepareEnv( - inheritEnv: boolean, - runnableEnv?: Record<string, string>, - runnableEnvCfg?: Record<string, string>, -): Record<string, string> { +export function prepareEnv(inheritEnv: boolean, runnableEnv?: Env, runnableEnvCfg?: Env): Env { const env = prepareBaseEnv(inheritEnv, runnableEnv); if (runnableEnvCfg) { diff --git a/src/tools/rust-analyzer/editors/code/src/tasks.ts b/src/tools/rust-analyzer/editors/code/src/tasks.ts index 730ec6d1e90..eb0748a704b 100644 --- a/src/tools/rust-analyzer/editors/code/src/tasks.ts +++ b/src/tools/rust-analyzer/editors/code/src/tasks.ts @@ -1,6 +1,7 @@ import * as vscode from "vscode"; import type { Config } from "./config"; import * as toolchain from "./toolchain"; +import { Env } from "./util"; // This ends up as the `type` key in tasks.json. RLS also uses `cargo` and // our configuration should be compatible with it so use the same key. @@ -117,8 +118,8 @@ export async function buildRustTask( export async function targetToExecution( definition: TaskDefinition, options?: { - env?: { [key: string]: string }; cwd?: string; + env?: Env; }, cargo?: string, ): Promise<vscode.ProcessExecution | vscode.ShellExecution> { @@ -131,7 +132,12 @@ export async function targetToExecution( command = definition.command; args = definition.args || []; } - return new vscode.ProcessExecution(command, args, options); + return new vscode.ProcessExecution(command, args, { + cwd: options?.cwd, + env: Object.fromEntries( + Object.entries(options?.env ?? {}).map(([key, value]) => [key, value ?? ""]), + ), + }); } export function activateTaskProvider(config: Config): vscode.Disposable { diff --git a/src/tools/rust-analyzer/editors/code/src/toolchain.ts b/src/tools/rust-analyzer/editors/code/src/toolchain.ts index a859ce6ff00..06f75a8f8d6 100644 --- a/src/tools/rust-analyzer/editors/code/src/toolchain.ts +++ b/src/tools/rust-analyzer/editors/code/src/toolchain.ts @@ -3,7 +3,7 @@ import * as os from "os"; import * as path from "path"; import * as readline from "readline"; import * as vscode from "vscode"; -import { log, memoizeAsync, unwrapUndefinable } from "./util"; +import { Env, log, memoizeAsync, unwrapUndefinable } from "./util"; import type { CargoRunnableArgs } from "./lsp_ext"; interface CompilationArtifact { @@ -37,7 +37,7 @@ interface CompilerMessage { export class Cargo { constructor( readonly rootFolder: string, - readonly env: Record<string, string>, + readonly env: Env, ) {} // Made public for testing purposes @@ -156,7 +156,7 @@ export class Cargo { /** Mirrors `toolchain::cargo()` implementation */ // FIXME: The server should provide this -export function cargoPath(env?: Record<string, string>): Promise<string> { +export function cargoPath(env?: Env): Promise<string> { if (env?.["RUSTC_TOOLCHAIN"]) { return Promise.resolve("cargo"); } diff --git a/src/tools/rust-analyzer/josh-sync.toml b/src/tools/rust-analyzer/josh-sync.toml new file mode 100644 index 00000000000..51ff0d71e71 --- /dev/null +++ b/src/tools/rust-analyzer/josh-sync.toml @@ -0,0 +1,2 @@ +repo = "rust-analyzer" +filter = ":rev(55d9a533b309119c8acd13061581b43ae8840823:prefix=src/tools/rust-analyzer):/src/tools/rust-analyzer" diff --git a/src/tools/rust-analyzer/rust-version b/src/tools/rust-analyzer/rust-version index 57ff326ce5a..2178caf6396 100644 --- a/src/tools/rust-analyzer/rust-version +++ b/src/tools/rust-analyzer/rust-version @@ -1 +1 @@ -a9fb6103b05c6ad6eee6bed4c0bb5a2e8e1024c6 +733dab558992d902d6d17576de1da768094e2cf3 diff --git a/src/tools/rust-analyzer/triagebot.toml b/src/tools/rust-analyzer/triagebot.toml index 2201b5a5e7c..27fdb672455 100644 --- a/src/tools/rust-analyzer/triagebot.toml +++ b/src/tools/rust-analyzer/triagebot.toml @@ -17,6 +17,7 @@ exclude_titles = [ # exclude syncs from subtree in rust-lang/rust "sync from downstream", "Sync from rust", "sync from rust", + "Rustc pull update", ] labels = ["has-merge-commits", "S-waiting-on-author"] @@ -27,3 +28,6 @@ labels = ["has-merge-commits", "S-waiting-on-author"] # Prevents mentions in commits to avoid users being spammed [no-mentions] + +# Automatically close and reopen PRs made by bots to run CI on them +[bot-pull-requests] diff --git a/src/tools/rust-analyzer/xtask/Cargo.toml b/src/tools/rust-analyzer/xtask/Cargo.toml index 8cd5811c0a6..9d8a1956d0a 100644 --- a/src/tools/rust-analyzer/xtask/Cargo.toml +++ b/src/tools/rust-analyzer/xtask/Cargo.toml @@ -8,7 +8,6 @@ rust-version.workspace = true [dependencies] anyhow.workspace = true -directories = "6.0" flate2 = "1.1.2" write-json = "0.1.4" xshell.workspace = true diff --git a/src/tools/rust-analyzer/xtask/src/flags.rs b/src/tools/rust-analyzer/xtask/src/flags.rs index 2fd471b35c7..72f6215d4c3 100644 --- a/src/tools/rust-analyzer/xtask/src/flags.rs +++ b/src/tools/rust-analyzer/xtask/src/flags.rs @@ -59,20 +59,6 @@ xflags::xflags! { optional --dry-run } - cmd rustc-pull { - /// rustc commit to pull. - optional --commit refspec: String - } - - cmd rustc-push { - /// rust local path, e.g. `../rust-rust-analyzer`. - required --rust-path rust_path: String - /// rust fork name, e.g. `matklad/rust`. - required --rust-fork rust_fork: String - /// branch name. - optional --branch branch: String - } - cmd dist { /// Use mimalloc allocator for server optional --mimalloc @@ -121,8 +107,6 @@ pub enum XtaskCmd { Install(Install), FuzzTests(FuzzTests), Release(Release), - RustcPull(RustcPull), - RustcPush(RustcPush), Dist(Dist), PublishReleaseNotes(PublishReleaseNotes), Metrics(Metrics), @@ -152,18 +136,6 @@ pub struct Release { } #[derive(Debug)] -pub struct RustcPull { - pub commit: Option<String>, -} - -#[derive(Debug)] -pub struct RustcPush { - pub rust_path: String, - pub rust_fork: String, - pub branch: Option<String>, -} - -#[derive(Debug)] pub struct Dist { pub mimalloc: bool, pub jemalloc: bool, diff --git a/src/tools/rust-analyzer/xtask/src/main.rs b/src/tools/rust-analyzer/xtask/src/main.rs index aaa8d0e1d4d..c5ad49cdcea 100644 --- a/src/tools/rust-analyzer/xtask/src/main.rs +++ b/src/tools/rust-analyzer/xtask/src/main.rs @@ -42,8 +42,6 @@ fn main() -> anyhow::Result<()> { flags::XtaskCmd::Install(cmd) => cmd.run(sh), flags::XtaskCmd::FuzzTests(_) => run_fuzzer(sh), flags::XtaskCmd::Release(cmd) => cmd.run(sh), - flags::XtaskCmd::RustcPull(cmd) => cmd.run(sh), - flags::XtaskCmd::RustcPush(cmd) => cmd.run(sh), flags::XtaskCmd::Dist(cmd) => cmd.run(sh), flags::XtaskCmd::PublishReleaseNotes(cmd) => cmd.run(sh), flags::XtaskCmd::Metrics(cmd) => cmd.run(sh), diff --git a/src/tools/rust-analyzer/xtask/src/release.rs b/src/tools/rust-analyzer/xtask/src/release.rs index e41f4ceb435..d06a25c8929 100644 --- a/src/tools/rust-analyzer/xtask/src/release.rs +++ b/src/tools/rust-analyzer/xtask/src/release.rs @@ -1,12 +1,5 @@ mod changelog; -use std::process::{Command, Stdio}; -use std::thread; -use std::time::Duration; - -use anyhow::{Context as _, bail}; -use directories::ProjectDirs; -use stdx::JodChild; use xshell::{Shell, cmd}; use crate::{date_iso, flags, is_release_tag, project_root}; @@ -59,171 +52,3 @@ impl flags::Release { Ok(()) } } - -// git sync implementation adapted from https://github.com/rust-lang/miri/blob/62039ac/miri-script/src/commands.rs -impl flags::RustcPull { - pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> { - sh.change_dir(project_root()); - let commit = self.commit.map(Result::Ok).unwrap_or_else(|| { - let rust_repo_head = - cmd!(sh, "git ls-remote https://github.com/rust-lang/rust/ HEAD").read()?; - rust_repo_head - .split_whitespace() - .next() - .map(|front| front.trim().to_owned()) - .ok_or_else(|| anyhow::format_err!("Could not obtain Rust repo HEAD from remote.")) - })?; - // Make sure the repo is clean. - if !cmd!(sh, "git status --untracked-files=no --porcelain").read()?.is_empty() { - bail!("working directory must be clean before running `cargo xtask pull`"); - } - // This should not add any new root commits. So count those before and after merging. - let num_roots = || -> anyhow::Result<u32> { - Ok(cmd!(sh, "git rev-list HEAD --max-parents=0 --count") - .read() - .context("failed to determine the number of root commits")? - .parse::<u32>()?) - }; - let num_roots_before = num_roots()?; - // Make sure josh is running. - let josh = start_josh()?; - - // Update rust-version file. As a separate commit, since making it part of - // the merge has confused the heck out of josh in the past. - // We pass `--no-verify` to avoid running any git hooks that might exist, - // in case they dirty the repository. - sh.write_file("rust-version", format!("{commit}\n"))?; - const PREPARING_COMMIT_MESSAGE: &str = "Preparing for merge from rust-lang/rust"; - cmd!(sh, "git commit rust-version --no-verify -m {PREPARING_COMMIT_MESSAGE}") - .run() - .context("FAILED to commit rust-version file, something went wrong")?; - - // Fetch given rustc commit. - cmd!(sh, "git fetch http://localhost:{JOSH_PORT}/rust-lang/rust.git@{commit}{JOSH_FILTER}.git") - .run() - .inspect_err(|_| { - // Try to un-do the previous `git commit`, to leave the repo in the state we found it it. - cmd!(sh, "git reset --hard HEAD^") - .run() - .expect("FAILED to clean up again after failed `git fetch`, sorry for that"); - }) - .context("FAILED to fetch new commits, something went wrong (committing the rust-version file has been undone)")?; - - // Merge the fetched commit. - const MERGE_COMMIT_MESSAGE: &str = "Merge from rust-lang/rust"; - cmd!(sh, "git merge FETCH_HEAD --no-verify --no-ff -m {MERGE_COMMIT_MESSAGE}") - .run() - .context("FAILED to merge new commits, something went wrong")?; - - // Check that the number of roots did not increase. - if num_roots()? != num_roots_before { - bail!("Josh created a new root commit. This is probably not the history you want."); - } - - drop(josh); - Ok(()) - } -} - -impl flags::RustcPush { - pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> { - let branch = self.branch.as_deref().unwrap_or("sync-from-ra"); - let rust_path = self.rust_path; - let rust_fork = self.rust_fork; - - sh.change_dir(project_root()); - let base = sh.read_file("rust-version")?.trim().to_owned(); - // Make sure the repo is clean. - if !cmd!(sh, "git status --untracked-files=no --porcelain").read()?.is_empty() { - bail!("working directory must be clean before running `cargo xtask push`"); - } - // Make sure josh is running. - let josh = start_josh()?; - - // Find a repo we can do our preparation in. - sh.change_dir(rust_path); - - // Prepare the branch. Pushing works much better if we use as base exactly - // the commit that we pulled from last time, so we use the `rust-version` - // file to find out which commit that would be. - println!("Preparing {rust_fork} (base: {base})..."); - if cmd!(sh, "git fetch https://github.com/{rust_fork} {branch}") - .ignore_stderr() - .read() - .is_ok() - { - bail!( - "The branch `{branch}` seems to already exist in `https://github.com/{rust_fork}`. Please delete it and try again." - ); - } - cmd!(sh, "git fetch https://github.com/rust-lang/rust {base}").run()?; - cmd!(sh, "git push https://github.com/{rust_fork} {base}:refs/heads/{branch}") - .ignore_stdout() - .ignore_stderr() // silence the "create GitHub PR" message - .run()?; - println!(); - - // Do the actual push. - sh.change_dir(project_root()); - println!("Pushing rust-analyzer changes..."); - cmd!( - sh, - "git push http://localhost:{JOSH_PORT}/{rust_fork}.git{JOSH_FILTER}.git HEAD:{branch}" - ) - .run()?; - println!(); - - // Do a round-trip check to make sure the push worked as expected. - cmd!( - sh, - "git fetch http://localhost:{JOSH_PORT}/{rust_fork}.git{JOSH_FILTER}.git {branch}" - ) - .ignore_stderr() - .read()?; - let head = cmd!(sh, "git rev-parse HEAD").read()?; - let fetch_head = cmd!(sh, "git rev-parse FETCH_HEAD").read()?; - if head != fetch_head { - bail!( - "Josh created a non-roundtrip push! Do NOT merge this into rustc!\n\ - Expected {head}, got {fetch_head}." - ); - } - println!( - "Confirmed that the push round-trips back to rust-analyzer properly. Please create a rustc PR:" - ); - // https://github.com/github-linguist/linguist/compare/master...octocat:linguist:master - let fork_path = rust_fork.replace('/', ":"); - println!( - " https://github.com/rust-lang/rust/compare/{fork_path}:{branch}?quick_pull=1&title=Subtree+update+of+rust-analyzer&body=r?+@ghost" - ); - - drop(josh); - Ok(()) - } -} - -/// Used for rustc syncs. -const JOSH_FILTER: &str = ":rev(55d9a533b309119c8acd13061581b43ae8840823:prefix=src/tools/rust-analyzer):/src/tools/rust-analyzer"; -const JOSH_PORT: &str = "42042"; - -fn start_josh() -> anyhow::Result<impl Drop> { - // Determine cache directory. - let local_dir = { - let user_dirs = ProjectDirs::from("org", "rust-lang", "rust-analyzer-josh").unwrap(); - user_dirs.cache_dir().to_owned() - }; - - // Start josh, silencing its output. - let mut cmd = Command::new("josh-proxy"); - cmd.arg("--local").arg(local_dir); - cmd.arg("--remote").arg("https://github.com"); - cmd.arg("--port").arg(JOSH_PORT); - cmd.arg("--no-background"); - cmd.stdout(Stdio::null()); - cmd.stderr(Stdio::null()); - let josh = cmd.spawn().context("failed to start josh-proxy, make sure it is installed")?; - // Give it some time so hopefully the port is open. (100ms was not enough.) - thread::sleep(Duration::from_millis(200)); - - Ok(JodChild(josh)) -} diff --git a/src/tools/rustbook/Cargo.lock b/src/tools/rustbook/Cargo.lock index 27798d6aeb0..5f30c75732c 100644 --- a/src/tools/rustbook/Cargo.lock +++ b/src/tools/rustbook/Cargo.lock @@ -19,9 +19,9 @@ dependencies = [ [[package]] name = "ammonia" -version = "4.1.0" +version = "4.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ada2ee439075a3e70b6992fce18ac4e407cd05aea9ca3f75d2c0b0c20bbb364" +checksum = "d6b346764dd0814805de8abf899fe03065bcee69bb1a4771c785817e39f3978f" dependencies = [ "cssparser", "html5ever", @@ -156,9 +156,9 @@ checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" [[package]] name = "cc" -version = "1.2.29" +version = "1.2.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c1599538de2394445747c8cf7935946e3cc27e9625f889d979bfb2aaf569362" +checksum = "deec109607ca693028562ed836a5f1c4b8bd77755c4e132fc5ce11b0b6211ae7" dependencies = [ "shlex", ] @@ -185,9 +185,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.40" +version = "4.5.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40b6887a1d8685cebccf115538db5c0efe625ccac9696ad45c409d96566e910f" +checksum = "be92d32e80243a54711e5d7ce823c35c41c9d929dc4ab58e1276f625841aadf9" dependencies = [ "clap_builder", "clap_derive", @@ -195,9 +195,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.40" +version = "4.5.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0c66c08ce9f0c698cbce5c0279d0bb6ac936d8674174fe48f736533b964f59e" +checksum = "707eab41e9622f9139419d573eca0900137718000c517d47da73045f54331c3d" dependencies = [ "anstream", "anstyle", @@ -208,18 +208,18 @@ dependencies = [ [[package]] name = "clap_complete" -version = "4.5.54" +version = "4.5.55" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aad5b1b4de04fead402672b48897030eec1f3bfe1550776322f59f6d6e6a5677" +checksum = "a5abde44486daf70c5be8b8f8f1b66c49f86236edf6fa2abadb4d961c4c6229a" dependencies = [ "clap", ] [[package]] name = "clap_derive" -version = "4.5.40" +version = "4.5.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2c7947ae4cc3d851207c1adb5b5e260ff0cca11446b1d6d1423788e442257ce" +checksum = "ef4f52386a59ca4c860f7393bcf8abd8dfd91ecccc0f774635ff68e92eeef491" dependencies = [ "heck", "proc-macro2", @@ -256,9 +256,9 @@ dependencies = [ [[package]] name = "crc32fast" -version = "1.4.2" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" dependencies = [ "cfg-if", ] @@ -582,12 +582,11 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "html5ever" -version = "0.31.0" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "953cbbe631aae7fc0a112702ad5d3aaf09da38beaf45ea84610d6e1c358f569c" +checksum = "55d958c2f74b664487a2035fe1dadb032c48718a03b63f3ab0b8537db8549ed4" dependencies = [ "log", - "mac", "markup5ever", "match_token", ] @@ -863,9 +862,9 @@ checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" [[package]] name = "markup5ever" -version = "0.16.2" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e4cd8c02f18a011991a039855480c64d74291c5792fcc160d55d77dc4de4a39" +checksum = "311fe69c934650f8f19652b3946075f0fc41ad8757dbb68f1ca14e7900ecc1c3" dependencies = [ "log", "tendril", @@ -874,9 +873,9 @@ dependencies = [ [[package]] name = "match_token" -version = "0.1.0" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88a9689d8d44bf9964484516275f5cd4c9b59457a6940c1d5d0ecbb94510a36b" +checksum = "ac84fd3f360fcc43dc5f5d186f02a94192761a080e8bc58621ad4d12296a58cf" dependencies = [ "proc-macro2", "quote", @@ -1344,9 +1343,9 @@ checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" [[package]] name = "redox_syscall" -version = "0.5.13" +version = "0.5.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d04b7d0ee6b4a0207a0a7adb104d23ecb0b47d6beae7152d0fa34b692b29fd6" +checksum = "7251471db004e509f4e75a62cca9435365b5ec7bcdff530d612ac7c87c44a792" dependencies = [ "bitflags 2.9.1", ] @@ -1394,15 +1393,15 @@ dependencies = [ [[package]] name = "rustix" -version = "1.0.7" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c71e83d6afe7ff64890ec6b71d6a69bb8a610ab78ce364b3352876bb4c801266" +checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8" dependencies = [ "bitflags 2.9.1", "errno", "libc", "linux-raw-sys", - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] @@ -1460,9 +1459,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.140" +version = "1.0.141" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" +checksum = "30b9eff21ebe718216c6ec64e1d9ac57087aad11efc64e32002bce4a0d4c03d3" dependencies = [ "itoa", "memchr", @@ -2103,9 +2102,9 @@ checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" [[package]] name = "winnow" -version = "0.7.11" +version = "0.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74c7b26e3480b707944fc872477815d29a8e429d2f93a1ce000f5fa84a15cbcd" +checksum = "f3edebf492c8125044983378ecb5766203ad3b4c2f7a922bd7dd207f6d443e95" dependencies = [ "memchr", ] diff --git a/src/tools/rustc-perf b/src/tools/rustc-perf -Subproject 6a70166b92a1b1560cb3cf056427b011b2a1f2b +Subproject dde879cf1087cb34a32287bd8ccc4d545bb9fee diff --git a/src/tools/rustdoc-gui-test/src/main.rs b/src/tools/rustdoc-gui-test/src/main.rs index 5b86bea8932..42feae8c208 100644 --- a/src/tools/rustdoc-gui-test/src/main.rs +++ b/src/tools/rustdoc-gui-test/src/main.rs @@ -65,10 +65,8 @@ fn main() -> Result<(), ()> { } } - // FIXME(binarycat): once we get package.json in version control, this should be updated to install via that instead - let local_node_modules = - npm::install_one(&config.out_dir, &config.npm, "browser-ui-test", "0.21.1") - .expect("unable to install browser-ui-test"); + let local_node_modules = npm::install(&config.rust_src, &config.out_dir, &config.npm) + .expect("unable to install browser-ui-test"); let mut command = Command::new(&config.nodejs); diff --git a/src/tools/rustdoc-js/tester.js b/src/tools/rustdoc-js/tester.js index f70fc917770..0baa179e16b 100644 --- a/src/tools/rustdoc-js/tester.js +++ b/src/tools/rustdoc-js/tester.js @@ -28,7 +28,14 @@ function readFile(filePath) { } function contentToDiffLine(key, value) { - return `"${key}": "${value}",`; + if (typeof value === "object" && !Array.isArray(value) && value !== null) { + const out = Object.entries(value) + .filter(([subKey, _]) => ["path", "name"].includes(subKey)) + .map(([subKey, subValue]) => `"${subKey}": ${JSON.stringify(subValue)}`) + .join(", "); + return `"${key}": ${out},`; + } + return `"${key}": ${JSON.stringify(value)},`; } function shouldIgnoreField(fieldName) { @@ -37,47 +44,61 @@ function shouldIgnoreField(fieldName) { fieldName === "proposeCorrectionTo"; } +function valueMapper(key, testOutput) { + const isAlias = testOutput["is_alias"]; + let value = testOutput[key]; + // To make our life easier, if there is a "parent" type, we add it to the path. + if (key === "path") { + if (testOutput["parent"] !== undefined) { + if (value.length > 0) { + value += "::" + testOutput["parent"]["name"]; + } else { + value = testOutput["parent"]["name"]; + } + } else if (testOutput["is_alias"]) { + value = valueMapper(key, testOutput["original"]); + } + } else if (isAlias && key === "alias") { + value = testOutput["name"]; + } else if (isAlias && ["name"].includes(key)) { + value = testOutput["original"][key]; + } + return value; +} + // This function is only called when no matching result was found and therefore will only display // the diff between the two items. -function betterLookingDiff(entry, data) { +function betterLookingDiff(expected, testOutput) { let output = " {\n"; - const spaces = " "; - for (const key in entry) { - if (!Object.prototype.hasOwnProperty.call(entry, key)) { + const spaces = " "; + for (const key in expected) { + if (!Object.prototype.hasOwnProperty.call(expected, key)) { continue; } - if (!data || !Object.prototype.hasOwnProperty.call(data, key)) { - output += "-" + spaces + contentToDiffLine(key, entry[key]) + "\n"; + if (!testOutput || !Object.prototype.hasOwnProperty.call(testOutput, key)) { + output += "-" + spaces + contentToDiffLine(key, expected[key]) + "\n"; continue; } - const value = data[key]; - if (value !== entry[key]) { - output += "-" + spaces + contentToDiffLine(key, entry[key]) + "\n"; + const value = valueMapper(key, testOutput); + if (value !== expected[key]) { + output += "-" + spaces + contentToDiffLine(key, expected[key]) + "\n"; output += "+" + spaces + contentToDiffLine(key, value) + "\n"; } else { - output += spaces + contentToDiffLine(key, value) + "\n"; + output += spaces + " " + contentToDiffLine(key, value) + "\n"; } } return output + " }"; } -function lookForEntry(entry, data) { - return data.findIndex(data_entry => { +function lookForEntry(expected, testOutput) { + return testOutput.findIndex(testOutputEntry => { let allGood = true; - for (const key in entry) { - if (!Object.prototype.hasOwnProperty.call(entry, key)) { + for (const key in expected) { + if (!Object.prototype.hasOwnProperty.call(expected, key)) { continue; } - let value = data_entry[key]; - // To make our life easier, if there is a "parent" type, we add it to the path. - if (key === "path" && data_entry["parent"] !== undefined) { - if (value.length > 0) { - value += "::" + data_entry["parent"]["name"]; - } else { - value = data_entry["parent"]["name"]; - } - } - if (value !== entry[key]) { + const value = valueMapper(key, testOutputEntry); + if (value !== expected[key]) { allGood = false; break; } diff --git a/src/tools/tidy/Cargo.toml b/src/tools/tidy/Cargo.toml index d995106ae02..c1f27de7ed4 100644 --- a/src/tools/tidy/Cargo.toml +++ b/src/tools/tidy/Cargo.toml @@ -6,7 +6,7 @@ autobins = false [dependencies] build_helper = { path = "../../build_helper" } -cargo_metadata = "0.19" +cargo_metadata = "0.21" regex = "1" miropt-test-tools = { path = "../miropt-test-tools" } walkdir = "2" diff --git a/src/tools/tidy/src/deps.rs b/src/tools/tidy/src/deps.rs index f43f5eae9a5..858b058cb7d 100644 --- a/src/tools/tidy/src/deps.rs +++ b/src/tools/tidy/src/deps.rs @@ -73,7 +73,6 @@ pub(crate) const WORKSPACES: &[(&str, ExceptionList, Option<(&[&str], &[&str])>, // tidy-alphabetical-start ("compiler/rustc_codegen_gcc", EXCEPTIONS_GCC, None, &[]), ("src/bootstrap", EXCEPTIONS_BOOTSTRAP, None, &[]), - ("src/ci/docker/host-x86_64/test-various/uefi_qemu_test", EXCEPTIONS_UEFI_QEMU_TEST, None, &[]), ("src/tools/cargo", EXCEPTIONS_CARGO, None, &["src/tools/cargo"]), //("src/tools/miri/test-cargo-miri", &[], None), // FIXME uncomment once all deps are vendored //("src/tools/miri/test_dependencies", &[], None), // FIXME uncomment once all deps are vendored @@ -81,6 +80,7 @@ pub(crate) const WORKSPACES: &[(&str, ExceptionList, Option<(&[&str], &[&str])>, ("src/tools/rustbook", EXCEPTIONS_RUSTBOOK, None, &["src/doc/book", "src/doc/reference"]), ("src/tools/rustc-perf", EXCEPTIONS_RUSTC_PERF, None, &["src/tools/rustc-perf"]), ("src/tools/test-float-parse", EXCEPTIONS, None, &[]), + ("tests/run-make/uefi-qemu/uefi_qemu_test", EXCEPTIONS_UEFI_QEMU_TEST, None, &[]), // tidy-alphabetical-end ]; @@ -135,6 +135,7 @@ const EXCEPTIONS_CARGO: ExceptionList = &[ ("libz-rs-sys", "Zlib"), ("normalize-line-endings", "Apache-2.0"), ("openssl", "Apache-2.0"), + ("ring", "Apache-2.0 AND ISC"), ("ryu", "Apache-2.0 OR BSL-1.0"), // BSL is not acceptble, but we use it under Apache-2.0 ("similar", "Apache-2.0"), ("sized-chunks", "MPL-2.0+"), @@ -166,7 +167,7 @@ const EXCEPTIONS_RUSTC_PERF: ExceptionList = &[ ("brotli-decompressor", "BSD-3-Clause/MIT"), ("encoding_rs", "(Apache-2.0 OR MIT) AND BSD-3-Clause"), ("inferno", "CDDL-1.0"), - ("ring", NON_STANDARD_LICENSE), // see EXCEPTIONS_NON_STANDARD_LICENSE_DEPS for more. + ("option-ext", "MPL-2.0"), ("ryu", "Apache-2.0 OR BSL-1.0"), ("snap", "BSD-3-Clause"), ("subtle", "BSD-3-Clause"), @@ -225,20 +226,6 @@ const EXCEPTIONS_UEFI_QEMU_TEST: ExceptionList = &[ ("r-efi", "MIT OR Apache-2.0 OR LGPL-2.1-or-later"), // LGPL is not acceptable, but we use it under MIT OR Apache-2.0 ]; -/// Placeholder for non-standard license file. -const NON_STANDARD_LICENSE: &str = "NON_STANDARD_LICENSE"; - -/// These dependencies have non-standard licenses but are genenrally permitted. -const EXCEPTIONS_NON_STANDARD_LICENSE_DEPS: &[&str] = &[ - // `ring` is included because it is an optional dependency of `hyper`, - // which is a training data in rustc-perf for optimized build. - // The license of it is generally `ISC AND MIT AND OpenSSL`, - // though the `package.license` field is not set. - // - // See https://github.com/briansmith/ring/issues/902 - "ring", -]; - const PERMITTED_DEPS_LOCATION: &str = concat!(file!(), ":", line!()); /// Crates rustc is allowed to depend on. Avoid adding to the list if possible. @@ -378,6 +365,7 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[ "serde", "serde_derive", "serde_json", + "serde_path_to_error", "sha1", "sha2", "sharded-slab", @@ -597,7 +585,7 @@ pub fn check(root: &Path, cargo: &Path, bless: bool, bad: &mut bool) { .other_options(vec!["--locked".to_owned()]); let metadata = t!(cmd.exec()); - check_license_exceptions(&metadata, exceptions, bad); + check_license_exceptions(&metadata, workspace, exceptions, bad); if let Some((crates, permitted_deps)) = permitted_deps { check_permitted_dependencies(&metadata, workspace, permitted_deps, crates, bad); } @@ -631,8 +619,8 @@ fn check_proc_macro_dep_list(root: &Path, cargo: &Path, bless: bool, bad: &mut b proc_macro_deps.retain(|pkg| !is_proc_macro_pkg(&metadata[pkg])); let proc_macro_deps: HashSet<_> = - proc_macro_deps.into_iter().map(|dep| metadata[dep].name.clone()).collect(); - let expected = proc_macro_deps::CRATES.iter().map(|s| s.to_string()).collect::<HashSet<_>>(); + proc_macro_deps.into_iter().map(|dep| metadata[dep].name.as_ref()).collect(); + let expected = proc_macro_deps::CRATES.iter().copied().collect::<HashSet<_>>(); let needs_blessing = proc_macro_deps.difference(&expected).next().is_some() || expected.difference(&proc_macro_deps).next().is_some(); @@ -716,7 +704,7 @@ fn check_runtime_license_exceptions(metadata: &Metadata, bad: &mut bool) { // See https://github.com/rust-lang/rust/issues/62620 for more. // In general, these should never be added and this exception // should not be taken as precedent for any new target. - if pkg.name == "fortanix-sgx-abi" && pkg.license.as_deref() == Some("MPL-2.0") { + if *pkg.name == "fortanix-sgx-abi" && pkg.license.as_deref() == Some("MPL-2.0") { continue; } @@ -728,36 +716,38 @@ fn check_runtime_license_exceptions(metadata: &Metadata, bad: &mut bool) { /// Check that all licenses of tool dependencies are in the valid list in `LICENSES`. /// /// Packages listed in `exceptions` are allowed for tools. -fn check_license_exceptions(metadata: &Metadata, exceptions: &[(&str, &str)], bad: &mut bool) { +fn check_license_exceptions( + metadata: &Metadata, + workspace: &str, + exceptions: &[(&str, &str)], + bad: &mut bool, +) { // Validate the EXCEPTIONS list hasn't changed. for (name, license) in exceptions { // Check that the package actually exists. - if !metadata.packages.iter().any(|p| p.name == *name) { + if !metadata.packages.iter().any(|p| *p.name == *name) { tidy_error!( bad, - "could not find exception package `{}`\n\ + "could not find exception package `{}` in workspace `{workspace}`\n\ Remove from EXCEPTIONS list if it is no longer used.", name ); } // Check that the license hasn't changed. - for pkg in metadata.packages.iter().filter(|p| p.name == *name) { + for pkg in metadata.packages.iter().filter(|p| *p.name == *name) { match &pkg.license { None => { - if *license == NON_STANDARD_LICENSE - && EXCEPTIONS_NON_STANDARD_LICENSE_DEPS.contains(&pkg.name.as_str()) - { - continue; - } tidy_error!( bad, - "dependency exception `{}` does not declare a license expression", + "dependency exception `{}` in workspace `{workspace}` does not declare a license expression", pkg.id ); } Some(pkg_license) => { if pkg_license.as_str() != *license { - println!("dependency exception `{name}` license has changed"); + println!( + "dependency exception `{name}` license in workspace `{workspace}` has changed" + ); println!(" previously `{license}` now `{pkg_license}`"); println!(" update EXCEPTIONS for the new license"); *bad = true; @@ -781,12 +771,21 @@ fn check_license_exceptions(metadata: &Metadata, exceptions: &[(&str, &str)], ba let license = match &pkg.license { Some(license) => license, None => { - tidy_error!(bad, "dependency `{}` does not define a license expression", pkg.id); + tidy_error!( + bad, + "dependency `{}` in workspace `{workspace}` does not define a license expression", + pkg.id + ); continue; } }; if !LICENSES.contains(&license.as_str()) { - tidy_error!(bad, "invalid license `{}` in `{}`", license, pkg.id); + tidy_error!( + bad, + "invalid license `{}` for package `{}` in workspace `{workspace}`", + license, + pkg.id + ); } } } @@ -816,9 +815,9 @@ fn check_permitted_dependencies( let Ok(version) = Version::parse(version) else { return false; }; - pkg.name == name && pkg.version == version + *pkg.name == name && pkg.version == version } else { - pkg.name == permitted + *pkg.name == permitted } } if !deps.iter().any(|dep_id| compare(pkg_from_id(metadata, dep_id), permitted)) { @@ -866,7 +865,7 @@ fn check_permitted_dependencies( /// Finds a package with the given name. fn pkg_from_name<'a>(metadata: &'a Metadata, name: &'static str) -> &'a Package { - let mut i = metadata.packages.iter().filter(|p| p.name == name); + let mut i = metadata.packages.iter().filter(|p| *p.name == name); let result = i.next().unwrap_or_else(|| panic!("could not find package `{name}` in package list")); assert!(i.next().is_none(), "more than one package found for `{name}`"); diff --git a/src/tools/tidy/src/ext_tool_checks.rs b/src/tools/tidy/src/extra_checks/mod.rs index 381ea44fd46..f90f716cd95 100644 --- a/src/tools/tidy/src/ext_tool_checks.rs +++ b/src/tools/tidy/src/extra_checks/mod.rs @@ -25,6 +25,8 @@ use std::{fmt, fs, io}; use crate::CiInfo; +mod rustdoc_js; + const MIN_PY_REV: (u32, u32) = (3, 9); const MIN_PY_REV_STR: &str = "≥3.9"; @@ -46,12 +48,25 @@ pub fn check( root_path: &Path, outdir: &Path, ci_info: &CiInfo, + librustdoc_path: &Path, + tools_path: &Path, + npm: &Path, bless: bool, extra_checks: Option<&str>, pos_args: &[String], bad: &mut bool, ) { - if let Err(e) = check_impl(root_path, outdir, ci_info, bless, extra_checks, pos_args) { + if let Err(e) = check_impl( + root_path, + outdir, + ci_info, + librustdoc_path, + tools_path, + npm, + bless, + extra_checks, + pos_args, + ) { tidy_error!(bad, "{e}"); } } @@ -60,6 +75,9 @@ fn check_impl( root_path: &Path, outdir: &Path, ci_info: &CiInfo, + librustdoc_path: &Path, + tools_path: &Path, + npm: &Path, bless: bool, extra_checks: Option<&str>, pos_args: &[String], @@ -68,7 +86,7 @@ fn check_impl( std::env::var("TIDY_PRINT_DIFF").is_ok_and(|v| v.eq_ignore_ascii_case("true") || v == "1"); // Split comma-separated args up - let lint_args = match extra_checks { + let mut lint_args = match extra_checks { Some(s) => s .strip_prefix("--extra-checks=") .unwrap() @@ -81,11 +99,7 @@ fn check_impl( }) .filter_map(|(res, src)| match res { Ok(arg) => { - if arg.is_inactive_auto(ci_info) { - None - } else { - Some(arg) - } + Some(arg) } Err(err) => { // only warn because before bad extra checks would be silently ignored. @@ -96,6 +110,11 @@ fn check_impl( .collect(), None => vec![], }; + if lint_args.iter().any(|ck| ck.auto) { + crate::files_modified_batch_filter(ci_info, &mut lint_args, |ck, path| { + ck.is_non_auto_or_matches(path) + }); + } macro_rules! extra_check { ($lang:ident, $kind:ident) => { @@ -108,6 +127,8 @@ fn check_impl( let shell_lint = extra_check!(Shell, Lint); let cpp_fmt = extra_check!(Cpp, Fmt); let spellcheck = extra_check!(Spellcheck, None); + let js_lint = extra_check!(Js, Lint); + let js_typecheck = extra_check!(Js, Typecheck); let mut py_path = None; @@ -275,6 +296,19 @@ fn check_impl( spellcheck_runner(&args)?; } + if js_lint || js_typecheck { + rustdoc_js::npm_install(root_path, outdir, npm)?; + } + + if js_lint { + rustdoc_js::lint(outdir, librustdoc_path, tools_path)?; + rustdoc_js::es_check(outdir, librustdoc_path)?; + } + + if js_typecheck { + rustdoc_js::typecheck(outdir, librustdoc_path)?; + } + Ok(()) } @@ -688,22 +722,26 @@ impl ExtraCheckArg { self.lang == lang && self.kind.map(|k| k == kind).unwrap_or(true) } - /// Returns `true` if this is an auto arg and the relevant files are not modified. - fn is_inactive_auto(&self, ci_info: &CiInfo) -> bool { + /// Returns `false` if this is an auto arg and the passed filename does not trigger the auto rule + fn is_non_auto_or_matches(&self, filepath: &str) -> bool { if !self.auto { - return false; + return true; } let ext = match self.lang { ExtraCheckLang::Py => ".py", ExtraCheckLang::Cpp => ".cpp", ExtraCheckLang::Shell => ".sh", + ExtraCheckLang::Js => ".js", ExtraCheckLang::Spellcheck => { - return !crate::files_modified(ci_info, |s| { - SPELLCHECK_DIRS.iter().any(|dir| Path::new(s).starts_with(dir)) - }); + for dir in SPELLCHECK_DIRS { + if Path::new(filepath).starts_with(dir) { + return true; + } + } + return false; } }; - !crate::files_modified(ci_info, |s| s.ends_with(ext)) + filepath.ends_with(ext) } fn has_supported_kind(&self) -> bool { @@ -717,6 +755,7 @@ impl ExtraCheckArg { ExtraCheckLang::Cpp => &[Fmt], ExtraCheckLang::Shell => &[Lint], ExtraCheckLang::Spellcheck => &[], + ExtraCheckLang::Js => &[Lint, Typecheck], }; supported_kinds.contains(&kind) } @@ -757,6 +796,7 @@ enum ExtraCheckLang { Shell, Cpp, Spellcheck, + Js, } impl FromStr for ExtraCheckLang { @@ -768,6 +808,7 @@ impl FromStr for ExtraCheckLang { "shell" => Self::Shell, "cpp" => Self::Cpp, "spellcheck" => Self::Spellcheck, + "js" => Self::Js, _ => return Err(ExtraCheckParseError::UnknownLang(s.to_string())), }) } @@ -777,6 +818,7 @@ impl FromStr for ExtraCheckLang { enum ExtraCheckKind { Lint, Fmt, + Typecheck, /// Never parsed, but used as a placeholder for /// langs that never have a specific kind. None, @@ -789,6 +831,7 @@ impl FromStr for ExtraCheckKind { Ok(match s { "lint" => Self::Lint, "fmt" => Self::Fmt, + "typecheck" => Self::Typecheck, _ => return Err(ExtraCheckParseError::UnknownKind(s.to_string())), }) } diff --git a/src/tools/tidy/src/extra_checks/rustdoc_js.rs b/src/tools/tidy/src/extra_checks/rustdoc_js.rs new file mode 100644 index 00000000000..7708b128e23 --- /dev/null +++ b/src/tools/tidy/src/extra_checks/rustdoc_js.rs @@ -0,0 +1,113 @@ +//! Tidy check to ensure that rustdoc templates didn't forget a `{# #}` to strip extra whitespace +//! characters. + +use std::ffi::OsStr; +use std::io; +use std::path::{Path, PathBuf}; +use std::process::{Child, Command}; + +use build_helper::npm; +use ignore::DirEntry; + +use crate::walk::walk_no_read; + +fn node_module_bin(outdir: &Path, name: &str) -> PathBuf { + outdir.join("node_modules/.bin").join(name) +} + +fn spawn_cmd(cmd: &mut Command) -> Result<Child, io::Error> { + cmd.spawn().map_err(|err| { + eprintln!("unable to run {cmd:?} due to {err:?}"); + err + }) +} + +/// install all js dependencies from package.json. +pub(super) fn npm_install(root_path: &Path, outdir: &Path, npm: &Path) -> Result<(), super::Error> { + npm::install(root_path, outdir, npm)?; + Ok(()) +} + +fn rustdoc_js_files(librustdoc_path: &Path) -> Vec<PathBuf> { + let mut files = Vec::new(); + walk_no_read( + &[&librustdoc_path.join("html/static/js")], + |path, is_dir| is_dir || path.extension().is_none_or(|ext| ext != OsStr::new("js")), + &mut |path: &DirEntry| { + files.push(path.path().into()); + }, + ); + return files; +} + +fn run_eslint(outdir: &Path, args: &[PathBuf], config_folder: PathBuf) -> Result<(), super::Error> { + let mut child = spawn_cmd( + Command::new(node_module_bin(outdir, "eslint")) + .arg("-c") + .arg(config_folder.join(".eslintrc.js")) + .args(args), + )?; + match child.wait() { + Ok(exit_status) => { + if exit_status.success() { + return Ok(()); + } + Err(super::Error::FailedCheck("eslint command failed")) + } + Err(error) => Err(super::Error::Generic(format!("eslint command failed: {error:?}"))), + } +} + +pub(super) fn lint( + outdir: &Path, + librustdoc_path: &Path, + tools_path: &Path, +) -> Result<(), super::Error> { + let files_to_check = rustdoc_js_files(librustdoc_path); + println!("Running eslint on rustdoc JS files"); + run_eslint(outdir, &files_to_check, librustdoc_path.join("html/static"))?; + + run_eslint(outdir, &[tools_path.join("rustdoc-js/tester.js")], tools_path.join("rustdoc-js"))?; + run_eslint( + outdir, + &[tools_path.join("rustdoc-gui/tester.js")], + tools_path.join("rustdoc-gui"), + )?; + Ok(()) +} + +pub(super) fn typecheck(outdir: &Path, librustdoc_path: &Path) -> Result<(), super::Error> { + // use npx to ensure correct version + let mut child = spawn_cmd( + Command::new(node_module_bin(outdir, "tsc")) + .arg("-p") + .arg(librustdoc_path.join("html/static/js/tsconfig.json")), + )?; + match child.wait() { + Ok(exit_status) => { + if exit_status.success() { + return Ok(()); + } + Err(super::Error::FailedCheck("tsc command failed")) + } + Err(error) => Err(super::Error::Generic(format!("tsc command failed: {error:?}"))), + } +} + +pub(super) fn es_check(outdir: &Path, librustdoc_path: &Path) -> Result<(), super::Error> { + let files_to_check = rustdoc_js_files(librustdoc_path); + let mut cmd = Command::new(node_module_bin(outdir, "es-check")); + cmd.arg("es2019"); + for f in files_to_check { + cmd.arg(f); + } + match spawn_cmd(&mut cmd)?.wait() { + Ok(exit_status) => { + if exit_status.success() { + return Ok(()); + } + Err(super::Error::FailedCheck("es-check command failed")) + } + Err(error) => Err(super::Error::Generic(format!("es-check command failed: {error:?}"))), + } +} diff --git a/src/tools/tidy/src/features.rs b/src/tools/tidy/src/features.rs index e83b47e1380..fb00b3a943f 100644 --- a/src/tools/tidy/src/features.rs +++ b/src/tools/tidy/src/features.rs @@ -331,11 +331,9 @@ fn collect_lang_features_in(features: &mut Features, base: &Path, file: &str, ba continue; } - if in_feature_group { - if let Some(doc_comment) = line.strip_prefix("///") { - doc_comments.push(doc_comment.trim().to_string()); - continue; - } + if in_feature_group && let Some(doc_comment) = line.strip_prefix("///") { + doc_comments.push(doc_comment.trim().to_string()); + continue; } let mut parts = line.split(','); @@ -465,19 +463,20 @@ fn get_and_check_lib_features( map_lib_features(base_src_path, &mut |res, file, line| match res { Ok((name, f)) => { let mut check_features = |f: &Feature, list: &Features, display: &str| { - if let Some(s) = list.get(name) { - if f.tracking_issue != s.tracking_issue && f.level != Status::Accepted { - tidy_error!( - bad, - "{}:{}: feature gate {} has inconsistent `issue`: \"{}\" mismatches the {} `issue` of \"{}\"", - file.display(), - line, - name, - f.tracking_issue_display(), - display, - s.tracking_issue_display(), - ); - } + if let Some(s) = list.get(name) + && f.tracking_issue != s.tracking_issue + && f.level != Status::Accepted + { + tidy_error!( + bad, + "{}:{}: feature gate {} has inconsistent `issue`: \"{}\" mismatches the {} `issue` of \"{}\"", + file.display(), + line, + name, + f.tracking_issue_display(), + display, + s.tracking_issue_display(), + ); } }; check_features(&f, lang_features, "corresponding lang feature"); diff --git a/src/tools/tidy/src/fluent_period.rs b/src/tools/tidy/src/fluent_period.rs index 85c1ef6166a..836b5699289 100644 --- a/src/tools/tidy/src/fluent_period.rs +++ b/src/tools/tidy/src/fluent_period.rs @@ -33,14 +33,14 @@ fn check_period(filename: &str, contents: &str, bad: &mut bool) { continue; } - if let Some(pat) = &m.value { - if let Some(PatternElement::TextElement { value }) = pat.elements.last() { - // We don't care about ellipses. - if value.ends_with(".") && !value.ends_with("...") { - let ll = find_line(contents, value); - let name = m.id.name; - tidy_error!(bad, "{filename}:{ll}: message `{name}` ends in a period"); - } + if let Some(pat) = &m.value + && let Some(PatternElement::TextElement { value }) = pat.elements.last() + { + // We don't care about ellipses. + if value.ends_with(".") && !value.ends_with("...") { + let ll = find_line(contents, value); + let name = m.id.name; + tidy_error!(bad, "{filename}:{ll}: message `{name}` ends in a period"); } } @@ -50,12 +50,13 @@ fn check_period(filename: &str, contents: &str, bad: &mut bool) { continue; } - if let Some(PatternElement::TextElement { value }) = attr.value.elements.last() { - if value.ends_with(".") && !value.ends_with("...") { - let ll = find_line(contents, value); - let name = attr.id.name; - tidy_error!(bad, "{filename}:{ll}: attr `{name}` ends in a period"); - } + if let Some(PatternElement::TextElement { value }) = attr.value.elements.last() + && value.ends_with(".") + && !value.ends_with("...") + { + let ll = find_line(contents, value); + let name = attr.id.name; + tidy_error!(bad, "{filename}:{ll}: attr `{name}` ends in a period"); } } } diff --git a/src/tools/tidy/src/fluent_used.rs b/src/tools/tidy/src/fluent_used.rs index 12fafd9a7ff..909bf482ddf 100644 --- a/src/tools/tidy/src/fluent_used.rs +++ b/src/tools/tidy/src/fluent_used.rs @@ -12,7 +12,7 @@ fn filter_used_messages( ) { // we don't just check messages never appear in Rust files, // because messages can be used as parts of other fluent messages in Fluent files, - // so we do checking messages appear only once in all Rust and Fluent files. + // so we check messages appear only once in all Rust and Fluent files. let matches = static_regex!(r"\w+").find_iter(contents); for name in matches { if let Some((name, filename)) = msgs_not_appeared_yet.remove_entry(name.as_str()) { diff --git a/src/tools/tidy/src/issues.txt b/src/tools/tidy/src/issues.txt index 77414bec82d..ee06707415f 100644 --- a/src/tools/tidy/src/issues.txt +++ b/src/tools/tidy/src/issues.txt @@ -1364,1230 +1364,6 @@ ui/infinite/issue-41731-infinite-macro-println.rs ui/intrinsics/issue-28575.rs ui/intrinsics/issue-84297-reifying-copy.rs ui/invalid/issue-114435-layout-type-err.rs -ui/issues/auxiliary/issue-11224.rs -ui/issues/auxiliary/issue-11508.rs -ui/issues/auxiliary/issue-11529.rs -ui/issues/auxiliary/issue-11680.rs -ui/issues/auxiliary/issue-12612-1.rs -ui/issues/auxiliary/issue-12612-2.rs -ui/issues/auxiliary/issue-12660-aux.rs -ui/issues/auxiliary/issue-13507.rs -ui/issues/auxiliary/issue-13620-1.rs -ui/issues/auxiliary/issue-13620-2.rs -ui/issues/auxiliary/issue-14344-1.rs -ui/issues/auxiliary/issue-14344-2.rs -ui/issues/auxiliary/issue-14422.rs -ui/issues/auxiliary/issue-15562.rs -ui/issues/auxiliary/issue-16643.rs -ui/issues/auxiliary/issue-16725.rs -ui/issues/auxiliary/issue-17662.rs -ui/issues/auxiliary/issue-18501.rs -ui/issues/auxiliary/issue-18514.rs -ui/issues/auxiliary/issue-18711.rs -ui/issues/auxiliary/issue-18913-1.rs -ui/issues/auxiliary/issue-18913-2.rs -ui/issues/auxiliary/issue-19293.rs -ui/issues/auxiliary/issue-20389.rs -ui/issues/auxiliary/issue-21202.rs -ui/issues/auxiliary/issue-2170-lib.rs -ui/issues/auxiliary/issue-2316-a.rs -ui/issues/auxiliary/issue-2316-b.rs -ui/issues/auxiliary/issue-2380.rs -ui/issues/auxiliary/issue-2414-a.rs -ui/issues/auxiliary/issue-2414-b.rs -ui/issues/auxiliary/issue-2472-b.rs -ui/issues/auxiliary/issue-25185-1.rs -ui/issues/auxiliary/issue-25185-2.rs -ui/issues/auxiliary/issue-2526.rs -ui/issues/auxiliary/issue-25467.rs -ui/issues/auxiliary/issue-2631-a.rs -ui/issues/auxiliary/issue-2723-a.rs -ui/issues/auxiliary/issue-29265.rs -ui/issues/auxiliary/issue-29485.rs -ui/issues/auxiliary/issue-3012-1.rs -ui/issues/auxiliary/issue-30123-aux.rs -ui/issues/auxiliary/issue-3136-a.rs -ui/issues/auxiliary/issue-31702-1.rs -ui/issues/auxiliary/issue-31702-2.rs -ui/issues/auxiliary/issue-34796-aux.rs -ui/issues/auxiliary/issue-36954.rs -ui/issues/auxiliary/issue-38190.rs -ui/issues/auxiliary/issue-38226-aux.rs -ui/issues/auxiliary/issue-3979-traits.rs -ui/issues/auxiliary/issue-41053.rs -ui/issues/auxiliary/issue-41549.rs -ui/issues/auxiliary/issue-42007-s.rs -ui/issues/auxiliary/issue-4208-cc.rs -ui/issues/auxiliary/issue-4545.rs -ui/issues/auxiliary/issue-48984-aux.rs -ui/issues/auxiliary/issue-49544.rs -ui/issues/auxiliary/issue-51798.rs -ui/issues/auxiliary/issue-52489.rs -ui/issues/auxiliary/issue-5518.rs -ui/issues/auxiliary/issue-5521.rs -ui/issues/auxiliary/issue-56943.rs -ui/issues/auxiliary/issue-57271-lib.rs -ui/issues/auxiliary/issue-5844-aux.rs -ui/issues/auxiliary/issue-7178.rs -ui/issues/auxiliary/issue-73112.rs -ui/issues/auxiliary/issue-7899.rs -ui/issues/auxiliary/issue-8044.rs -ui/issues/auxiliary/issue-8259.rs -ui/issues/auxiliary/issue-8401.rs -ui/issues/auxiliary/issue-9123.rs -ui/issues/auxiliary/issue-9155.rs -ui/issues/auxiliary/issue-9188.rs -ui/issues/auxiliary/issue-9906.rs -ui/issues/auxiliary/issue-9968.rs -ui/issues/issue-10228.rs -ui/issues/issue-10291.rs -ui/issues/issue-102964.rs -ui/issues/issue-10396.rs -ui/issues/issue-10412.rs -ui/issues/issue-10436.rs -ui/issues/issue-10456.rs -ui/issues/issue-10465.rs -ui/issues/issue-10545.rs -ui/issues/issue-10638.rs -ui/issues/issue-10656.rs -ui/issues/issue-106755.rs -ui/issues/issue-10683.rs -ui/issues/issue-10718.rs -ui/issues/issue-10734.rs -ui/issues/issue-10764.rs -ui/issues/issue-10767.rs -ui/issues/issue-10802.rs -ui/issues/issue-10806.rs -ui/issues/issue-10853.rs -ui/issues/issue-10877.rs -ui/issues/issue-10902.rs -ui/issues/issue-11004.rs -ui/issues/issue-11047.rs -ui/issues/issue-11085.rs -ui/issues/issue-11192.rs -ui/issues/issue-11205.rs -ui/issues/issue-11224.rs -ui/issues/issue-11267.rs -ui/issues/issue-11374.rs -ui/issues/issue-11382.rs -ui/issues/issue-11384.rs -ui/issues/issue-11508.rs -ui/issues/issue-11529.rs -ui/issues/issue-11552.rs -ui/issues/issue-11592.rs -ui/issues/issue-11677.rs -ui/issues/issue-11680.rs -ui/issues/issue-11681.rs -ui/issues/issue-11709.rs -ui/issues/issue-11740.rs -ui/issues/issue-11771.rs -ui/issues/issue-11820.rs -ui/issues/issue-11844.rs -ui/issues/issue-11869.rs -ui/issues/issue-11958.rs -ui/issues/issue-12033.rs -ui/issues/issue-12041.rs -ui/issues/issue-12127.rs -ui/issues/issue-12285.rs -ui/issues/issue-12567.rs -ui/issues/issue-12612.rs -ui/issues/issue-12660.rs -ui/issues/issue-12677.rs -ui/issues/issue-12729.rs -ui/issues/issue-12744.rs -ui/issues/issue-12860.rs -ui/issues/issue-12863.rs -ui/issues/issue-12909.rs -ui/issues/issue-12920.rs -ui/issues/issue-13027.rs -ui/issues/issue-13058.rs -ui/issues/issue-13105.rs -ui/issues/issue-13167.rs -ui/issues/issue-13202.rs -ui/issues/issue-13204.rs -ui/issues/issue-13214.rs -ui/issues/issue-13259-windows-tcb-trash.rs -ui/issues/issue-13264.rs -ui/issues/issue-13323.rs -ui/issues/issue-13359.rs -ui/issues/issue-13405.rs -ui/issues/issue-13407.rs -ui/issues/issue-13434.rs -ui/issues/issue-13446.rs -ui/issues/issue-13466.rs -ui/issues/issue-13482-2.rs -ui/issues/issue-13482.rs -ui/issues/issue-13497-2.rs -ui/issues/issue-13497.rs -ui/issues/issue-13507-2.rs -ui/issues/issue-13620.rs -ui/issues/issue-13665.rs -ui/issues/issue-13703.rs -ui/issues/issue-13763.rs -ui/issues/issue-13775.rs -ui/issues/issue-13808.rs -ui/issues/issue-13847.rs -ui/issues/issue-13867.rs -ui/issues/issue-14082.rs -ui/issues/issue-14091-2.rs -ui/issues/issue-14091.rs -ui/issues/issue-14092.rs -ui/issues/issue-14229.rs -ui/issues/issue-14254.rs -ui/issues/issue-14285.rs -ui/issues/issue-14308.rs -ui/issues/issue-14330.rs -ui/issues/issue-14344.rs -ui/issues/issue-14366.rs -ui/issues/issue-14382.rs -ui/issues/issue-14393.rs -ui/issues/issue-14399.rs -ui/issues/issue-14422.rs -ui/issues/issue-14541.rs -ui/issues/issue-14721.rs -ui/issues/issue-14821.rs -ui/issues/issue-14845.rs -ui/issues/issue-14853.rs -ui/issues/issue-14865.rs -ui/issues/issue-14875.rs -ui/issues/issue-14901.rs -ui/issues/issue-14915.rs -ui/issues/issue-14919.rs -ui/issues/issue-14959.rs -ui/issues/issue-15034.rs -ui/issues/issue-15043.rs -ui/issues/issue-15063.rs -ui/issues/issue-15094.rs -ui/issues/issue-15104.rs -ui/issues/issue-15129-rpass.rs -ui/issues/issue-15167.rs -ui/issues/issue-15189.rs -ui/issues/issue-15207.rs -ui/issues/issue-15260.rs -ui/issues/issue-15381.rs -ui/issues/issue-15444.rs -ui/issues/issue-15523-big.rs -ui/issues/issue-15523.rs -ui/issues/issue-15562.rs -ui/issues/issue-15571.rs -ui/issues/issue-15673.rs -ui/issues/issue-15734.rs -ui/issues/issue-15735.rs -ui/issues/issue-15756.rs -ui/issues/issue-15763.rs -ui/issues/issue-15774.rs -ui/issues/issue-15783.rs -ui/issues/issue-15793.rs -ui/issues/issue-15858.rs -ui/issues/issue-15896.rs -ui/issues/issue-15965.rs -ui/issues/issue-16048.rs -ui/issues/issue-16149.rs -ui/issues/issue-16151.rs -ui/issues/issue-16256.rs -ui/issues/issue-16278.rs -ui/issues/issue-16401.rs -ui/issues/issue-16441.rs -ui/issues/issue-16452.rs -ui/issues/issue-16492.rs -ui/issues/issue-16530.rs -ui/issues/issue-16560.rs -ui/issues/issue-16562.rs -ui/issues/issue-16596.rs -ui/issues/issue-16643.rs -ui/issues/issue-16648.rs -ui/issues/issue-16668.rs -ui/issues/issue-16671.rs -ui/issues/issue-16683.rs -ui/issues/issue-16725.rs -ui/issues/issue-16739.rs -ui/issues/issue-16745.rs -ui/issues/issue-16774.rs -ui/issues/issue-16783.rs -ui/issues/issue-16819.rs -ui/issues/issue-16922-rpass.rs -ui/issues/issue-16966.rs -ui/issues/issue-16994.rs -ui/issues/issue-17001.rs -ui/issues/issue-17033.rs -ui/issues/issue-17068.rs -ui/issues/issue-17121.rs -ui/issues/issue-17216.rs -ui/issues/issue-17252.rs -ui/issues/issue-17322.rs -ui/issues/issue-17336.rs -ui/issues/issue-17337.rs -ui/issues/issue-17351.rs -ui/issues/issue-17361.rs -ui/issues/issue-17373.rs -ui/issues/issue-17385.rs -ui/issues/issue-17405.rs -ui/issues/issue-17441.rs -ui/issues/issue-17450.rs -ui/issues/issue-17503.rs -ui/issues/issue-17546.rs -ui/issues/issue-17551.rs -ui/issues/issue-17651.rs -ui/issues/issue-17662.rs -ui/issues/issue-17732.rs -ui/issues/issue-17734.rs -ui/issues/issue-17740.rs -ui/issues/issue-17746.rs -ui/issues/issue-17758.rs -ui/issues/issue-17771.rs -ui/issues/issue-17800.rs -ui/issues/issue-17816.rs -ui/issues/issue-17877.rs -ui/issues/issue-17897.rs -ui/issues/issue-17904-2.rs -ui/issues/issue-17904.rs -ui/issues/issue-17905-2.rs -ui/issues/issue-17905.rs -ui/issues/issue-17933.rs -ui/issues/issue-17954.rs -ui/issues/issue-17959.rs -ui/issues/issue-17994.rs -ui/issues/issue-17999.rs -ui/issues/issue-18058.rs -ui/issues/issue-18088.rs -ui/issues/issue-18107.rs -ui/issues/issue-18110.rs -ui/issues/issue-18119.rs -ui/issues/issue-18159.rs -ui/issues/issue-18173.rs -ui/issues/issue-18183.rs -ui/issues/issue-18188.rs -ui/issues/issue-18232.rs -ui/issues/issue-18352.rs -ui/issues/issue-18353.rs -ui/issues/issue-18389.rs -ui/issues/issue-18423.rs -ui/issues/issue-18446-2.rs -ui/issues/issue-18446.rs -ui/issues/issue-18464.rs -ui/issues/issue-18501.rs -ui/issues/issue-18514.rs -ui/issues/issue-18532.rs -ui/issues/issue-18539.rs -ui/issues/issue-18566.rs -ui/issues/issue-18611.rs -ui/issues/issue-18685.rs -ui/issues/issue-18711.rs -ui/issues/issue-18767.rs -ui/issues/issue-18783.rs -ui/issues/issue-18809.rs -ui/issues/issue-18845.rs -ui/issues/issue-18859.rs -ui/issues/issue-18906.rs -ui/issues/issue-18913.rs -ui/issues/issue-18919.rs -ui/issues/issue-18952.rs -ui/issues/issue-18959.rs -ui/issues/issue-18988.rs -ui/issues/issue-19001.rs -ui/issues/issue-19037.rs -ui/issues/issue-19086.rs -ui/issues/issue-19097.rs -ui/issues/issue-19098.rs -ui/issues/issue-19100.rs -ui/issues/issue-19127.rs -ui/issues/issue-19135.rs -ui/issues/issue-19293.rs -ui/issues/issue-19367.rs -ui/issues/issue-19380.rs -ui/issues/issue-19398.rs -ui/issues/issue-19404.rs -ui/issues/issue-19479.rs -ui/issues/issue-19482.rs -ui/issues/issue-19499.rs -ui/issues/issue-19601.rs -ui/issues/issue-19631.rs -ui/issues/issue-19632.rs -ui/issues/issue-19692.rs -ui/issues/issue-19734.rs -ui/issues/issue-19811-escape-unicode.rs -ui/issues/issue-19850.rs -ui/issues/issue-19922.rs -ui/issues/issue-19982.rs -ui/issues/issue-19991.rs -ui/issues/issue-20009.rs -ui/issues/issue-20055-box-trait.rs -ui/issues/issue-20055-box-unsized-array.rs -ui/issues/issue-20162.rs -ui/issues/issue-20174.rs -ui/issues/issue-20186.rs -ui/issues/issue-20225.rs -ui/issues/issue-20261.rs -ui/issues/issue-20313-rpass.rs -ui/issues/issue-20313.rs -ui/issues/issue-20389.rs -ui/issues/issue-20396.rs -ui/issues/issue-20413.rs -ui/issues/issue-20414.rs -ui/issues/issue-20427.rs -ui/issues/issue-20433.rs -ui/issues/issue-20454.rs -ui/issues/issue-20544.rs -ui/issues/issue-20575.rs -ui/issues/issue-20644.rs -ui/issues/issue-20676.rs -ui/issues/issue-20714.rs -ui/issues/issue-2074.rs -ui/issues/issue-20772.rs -ui/issues/issue-20797.rs -ui/issues/issue-20803.rs -ui/issues/issue-20831-debruijn.rs -ui/issues/issue-20847.rs -ui/issues/issue-20939.rs -ui/issues/issue-20953.rs -ui/issues/issue-20971.rs -ui/issues/issue-21033.rs -ui/issues/issue-21140.rs -ui/issues/issue-21160.rs -ui/issues/issue-21174-2.rs -ui/issues/issue-21174.rs -ui/issues/issue-21177.rs -ui/issues/issue-21202.rs -ui/issues/issue-21245.rs -ui/issues/issue-21291.rs -ui/issues/issue-21306.rs -ui/issues/issue-21332.rs -ui/issues/issue-21361.rs -ui/issues/issue-21384.rs -ui/issues/issue-21400.rs -ui/issues/issue-21402.rs -ui/issues/issue-21449.rs -ui/issues/issue-2150.rs -ui/issues/issue-2151.rs -ui/issues/issue-21546.rs -ui/issues/issue-21554.rs -ui/issues/issue-21600.rs -ui/issues/issue-21622.rs -ui/issues/issue-21634.rs -ui/issues/issue-21655.rs -ui/issues/issue-2170-exe.rs -ui/issues/issue-21701.rs -ui/issues/issue-21763.rs -ui/issues/issue-21891.rs -ui/issues/issue-2190-1.rs -ui/issues/issue-21909.rs -ui/issues/issue-21922.rs -ui/issues/issue-21946.rs -ui/issues/issue-21950.rs -ui/issues/issue-21974.rs -ui/issues/issue-22008.rs -ui/issues/issue-22034.rs -ui/issues/issue-22036.rs -ui/issues/issue-2214.rs -ui/issues/issue-22258.rs -ui/issues/issue-22289.rs -ui/issues/issue-22312.rs -ui/issues/issue-22346.rs -ui/issues/issue-22356.rs -ui/issues/issue-22370.rs -ui/issues/issue-22403.rs -ui/issues/issue-22426.rs -ui/issues/issue-22434.rs -ui/issues/issue-22468.rs -ui/issues/issue-22471.rs -ui/issues/issue-22577.rs -ui/issues/issue-22599.rs -ui/issues/issue-22603.rs -ui/issues/issue-22629.rs -ui/issues/issue-22638.rs -ui/issues/issue-22644.rs -ui/issues/issue-22673.rs -ui/issues/issue-22684.rs -ui/issues/issue-22706.rs -ui/issues/issue-22777.rs -ui/issues/issue-22781.rs -ui/issues/issue-22789.rs -ui/issues/issue-2281-part1.rs -ui/issues/issue-22814.rs -ui/issues/issue-2284.rs -ui/issues/issue-22872.rs -ui/issues/issue-22874.rs -ui/issues/issue-2288.rs -ui/issues/issue-22886.rs -ui/issues/issue-22894.rs -ui/issues/issue-22992-2.rs -ui/issues/issue-22992.rs -ui/issues/issue-23024.rs -ui/issues/issue-23036.rs -ui/issues/issue-23041.rs -ui/issues/issue-23046.rs -ui/issues/issue-23073.rs -ui/issues/issue-2311-2.rs -ui/issues/issue-2311.rs -ui/issues/issue-2312.rs -ui/issues/issue-2316-c.rs -ui/issues/issue-23173.rs -ui/issues/issue-23189.rs -ui/issues/issue-23217.rs -ui/issues/issue-23253.rs -ui/issues/issue-23261.rs -ui/issues/issue-23281.rs -ui/issues/issue-23311.rs -ui/issues/issue-23336.rs -ui/issues/issue-23354-2.rs -ui/issues/issue-23354.rs -ui/issues/issue-23406.rs -ui/issues/issue-23433.rs -ui/issues/issue-23442.rs -ui/issues/issue-23477.rs -ui/issues/issue-23485.rs -ui/issues/issue-23491.rs -ui/issues/issue-23543.rs -ui/issues/issue-23544.rs -ui/issues/issue-23550.rs -ui/issues/issue-23589.rs -ui/issues/issue-23699.rs -ui/issues/issue-2380-b.rs -ui/issues/issue-2383.rs -ui/issues/issue-23891.rs -ui/issues/issue-23898.rs -ui/issues/issue-23958.rs -ui/issues/issue-23966.rs -ui/issues/issue-23992.rs -ui/issues/issue-24013.rs -ui/issues/issue-24036.rs -ui/issues/issue-24086.rs -ui/issues/issue-2414-c.rs -ui/issues/issue-24161.rs -ui/issues/issue-24227.rs -ui/issues/issue-2428.rs -ui/issues/issue-24308.rs -ui/issues/issue-24322.rs -ui/issues/issue-24352.rs -ui/issues/issue-24353.rs -ui/issues/issue-24357.rs -ui/issues/issue-24363.rs -ui/issues/issue-24365.rs -ui/issues/issue-24389.rs -ui/issues/issue-24424.rs -ui/issues/issue-24434.rs -ui/issues/issue-2445-b.rs -ui/issues/issue-2445.rs -ui/issues/issue-24533.rs -ui/issues/issue-24589.rs -ui/issues/issue-2463.rs -ui/issues/issue-24682.rs -ui/issues/issue-24687-embed-debuginfo/auxiliary/issue-24687-lib.rs -ui/issues/issue-24687-embed-debuginfo/auxiliary/issue-24687-mbcs-in-comments.rs -ui/issues/issue-2470-bounds-check-overflow.rs -ui/issues/issue-2472.rs -ui/issues/issue-24779.rs -ui/issues/issue-24819.rs -ui/issues/issue-2487-a.rs -ui/issues/issue-24945-repeat-dash-opts.rs -ui/issues/issue-24947.rs -ui/issues/issue-24954.rs -ui/issues/issue-2502.rs -ui/issues/issue-25076.rs -ui/issues/issue-25089.rs -ui/issues/issue-25145.rs -ui/issues/issue-25180.rs -ui/issues/issue-25185.rs -ui/issues/issue-2526-a.rs -ui/issues/issue-25279.rs -ui/issues/issue-25343.rs -ui/issues/issue-25368.rs -ui/issues/issue-25386.rs -ui/issues/issue-25394.rs -ui/issues/issue-25467.rs -ui/issues/issue-25497.rs -ui/issues/issue-2550.rs -ui/issues/issue-25515.rs -ui/issues/issue-25549-multiple-drop.rs -ui/issues/issue-25579.rs -ui/issues/issue-25679.rs -ui/issues/issue-25693.rs -ui/issues/issue-25746-bool-transmute.rs -ui/issues/issue-25757.rs -ui/issues/issue-25810.rs -ui/issues/issue-2590.rs -ui/issues/issue-25901.rs -ui/issues/issue-26056.rs -ui/issues/issue-26093.rs -ui/issues/issue-26095.rs -ui/issues/issue-26127.rs -ui/issues/issue-26186.rs -ui/issues/issue-26205.rs -ui/issues/issue-26217.rs -ui/issues/issue-26237.rs -ui/issues/issue-2631-b.rs -ui/issues/issue-2642.rs -ui/issues/issue-26468.rs -ui/issues/issue-26472.rs -ui/issues/issue-26484.rs -ui/issues/issue-26614.rs -ui/issues/issue-26619.rs -ui/issues/issue-26641.rs -ui/issues/issue-26646.rs -ui/issues/issue-26655.rs -ui/issues/issue-26709.rs -ui/issues/issue-26802.rs -ui/issues/issue-26805.rs -ui/issues/issue-26812.rs -ui/issues/issue-26948.rs -ui/issues/issue-26997.rs -ui/issues/issue-27008.rs -ui/issues/issue-27033.rs -ui/issues/issue-27042.rs -ui/issues/issue-27054-primitive-binary-ops.rs -ui/issues/issue-27078.rs -ui/issues/issue-2708.rs -ui/issues/issue-27105.rs -ui/issues/issue-2723-b.rs -ui/issues/issue-27240.rs -ui/issues/issue-27268.rs -ui/issues/issue-27281.rs -ui/issues/issue-27340.rs -ui/issues/issue-27401-dropflag-reinit.rs -ui/issues/issue-27433.rs -ui/issues/issue-27592.rs -ui/issues/issue-2761.rs -ui/issues/issue-27639.rs -ui/issues/issue-27697.rs -ui/issues/issue-27815.rs -ui/issues/issue-27842.rs -ui/issues/issue-27889.rs -ui/issues/issue-27942.rs -ui/issues/issue-27949.rs -ui/issues/issue-27997.rs -ui/issues/issue-28105.rs -ui/issues/issue-28109.rs -ui/issues/issue-28181.rs -ui/issues/issue-28279.rs -ui/issues/issue-28344.rs -ui/issues/issue-28433.rs -ui/issues/issue-28472.rs -ui/issues/issue-2848.rs -ui/issues/issue-2849.rs -ui/issues/issue-28498-must-work-ex1.rs -ui/issues/issue-28498-must-work-ex2.rs -ui/issues/issue-28498-ugeh-ex1.rs -ui/issues/issue-28550.rs -ui/issues/issue-28561.rs -ui/issues/issue-28568.rs -ui/issues/issue-28586.rs -ui/issues/issue-28600.rs -ui/issues/issue-28625.rs -ui/issues/issue-28776.rs -ui/issues/issue-28828.rs -ui/issues/issue-28839.rs -ui/issues/issue-28936.rs -ui/issues/issue-2895.rs -ui/issues/issue-28971.rs -ui/issues/issue-28983.rs -ui/issues/issue-28999.rs -ui/issues/issue-29030.rs -ui/issues/issue-29037.rs -ui/issues/issue-2904.rs -ui/issues/issue-29048.rs -ui/issues/issue-29053.rs -ui/issues/issue-29071-2.rs -ui/issues/issue-29071.rs -ui/issues/issue-29092.rs -ui/issues/issue-29147-rpass.rs -ui/issues/issue-29147.rs -ui/issues/issue-29265.rs -ui/issues/issue-29276.rs -ui/issues/issue-2935.rs -ui/issues/issue-29466.rs -ui/issues/issue-29485.rs -ui/issues/issue-2951.rs -ui/issues/issue-29516.rs -ui/issues/issue-29522.rs -ui/issues/issue-29540.rs -ui/issues/issue-29663.rs -ui/issues/issue-29668.rs -ui/issues/issue-29710.rs -ui/issues/issue-29723.rs -ui/issues/issue-29740.rs -ui/issues/issue-29743.rs -ui/issues/issue-29821.rs -ui/issues/issue-29857.rs -ui/issues/issue-29861.rs -ui/issues/issue-2989.rs -ui/issues/issue-29948.rs -ui/issues/issue-2995.rs -ui/issues/issue-30018-panic.rs -ui/issues/issue-30081.rs -ui/issues/issue-3012-2.rs -ui/issues/issue-30123.rs -ui/issues/issue-3021-b.rs -ui/issues/issue-3021-d.rs -ui/issues/issue-30236.rs -ui/issues/issue-30255.rs -ui/issues/issue-3026.rs -ui/issues/issue-3029.rs -ui/issues/issue-3037.rs -ui/issues/issue-30371.rs -ui/issues/issue-3038.rs -ui/issues/issue-30380.rs -ui/issues/issue-3052.rs -ui/issues/issue-30530.rs -ui/issues/issue-30589.rs -ui/issues/issue-30615.rs -ui/issues/issue-30756.rs -ui/issues/issue-30891.rs -ui/issues/issue-3091.rs -ui/issues/issue-31011.rs -ui/issues/issue-3109.rs -ui/issues/issue-3121.rs -ui/issues/issue-31267-additional.rs -ui/issues/issue-31267.rs -ui/issues/issue-31299.rs -ui/issues/issue-3136-b.rs -ui/issues/issue-3149.rs -ui/issues/issue-31511.rs -ui/issues/issue-3154.rs -ui/issues/issue-31702.rs -ui/issues/issue-31769.rs -ui/issues/issue-31776.rs -ui/issues/issue-31910.rs -ui/issues/issue-32004.rs -ui/issues/issue-32008.rs -ui/issues/issue-32086.rs -ui/issues/issue-3220.rs -ui/issues/issue-32292.rs -ui/issues/issue-32324.rs -ui/issues/issue-32326.rs -ui/issues/issue-32377.rs -ui/issues/issue-32389.rs -ui/issues/issue-32518.rs -ui/issues/issue-32655.rs -ui/issues/issue-32782.rs -ui/issues/issue-32797.rs -ui/issues/issue-32805.rs -ui/issues/issue-3290.rs -ui/issues/issue-32995-2.rs -ui/issues/issue-32995.rs -ui/issues/issue-33202.rs -ui/issues/issue-33241.rs -ui/issues/issue-33287.rs -ui/issues/issue-33293.rs -ui/issues/issue-33387.rs -ui/issues/issue-3344.rs -ui/issues/issue-33461.rs -ui/issues/issue-33504.rs -ui/issues/issue-33525.rs -ui/issues/issue-33571.rs -ui/issues/issue-33687.rs -ui/issues/issue-33770.rs -ui/issues/issue-3389.rs -ui/issues/issue-33941.rs -ui/issues/issue-34047.rs -ui/issues/issue-34074.rs -ui/issues/issue-34209.rs -ui/issues/issue-34229.rs -ui/issues/issue-3424.rs -ui/issues/issue-3429.rs -ui/issues/issue-34334.rs -ui/issues/issue-34349.rs -ui/issues/issue-34373.rs -ui/issues/issue-34418.rs -ui/issues/issue-34427.rs -ui/issues/issue-3447.rs -ui/issues/issue-34503.rs -ui/issues/issue-34569.rs -ui/issues/issue-34571.rs -ui/issues/issue-34751.rs -ui/issues/issue-3477.rs -ui/issues/issue-34780.rs -ui/issues/issue-34796.rs -ui/issues/issue-34839.rs -ui/issues/issue-3500.rs -ui/issues/issue-35139.rs -ui/issues/issue-3521-2.rs -ui/issues/issue-35241.rs -ui/issues/issue-35423.rs -ui/issues/issue-3556.rs -ui/issues/issue-35570.rs -ui/issues/issue-3559.rs -ui/issues/issue-35600.rs -ui/issues/issue-3574.rs -ui/issues/issue-35815.rs -ui/issues/issue-35976.rs -ui/issues/issue-35988.rs -ui/issues/issue-36023.rs -ui/issues/issue-36036-associated-type-layout.rs -ui/issues/issue-36075.rs -ui/issues/issue-3609.rs -ui/issues/issue-36116.rs -ui/issues/issue-36260.rs -ui/issues/issue-36278-prefix-nesting.rs -ui/issues/issue-36299.rs -ui/issues/issue-36379.rs -ui/issues/issue-36400.rs -ui/issues/issue-36474.rs -ui/issues/issue-3656.rs -ui/issues/issue-3668-non-constant-value-in-constant/issue-3668-2.rs -ui/issues/issue-3668-non-constant-value-in-constant/issue-3668.rs -ui/issues/issue-36744-bitcast-args-if-needed.rs -ui/issues/issue-36786-resolve-call.rs -ui/issues/issue-3680.rs -ui/issues/issue-36816.rs -ui/issues/issue-36836.rs -ui/issues/issue-36839.rs -ui/issues/issue-36856.rs -ui/issues/issue-36936.rs -ui/issues/issue-36954.rs -ui/issues/issue-3702-2.rs -ui/issues/issue-3702.rs -ui/issues/issue-37051.rs -ui/issues/issue-37109.rs -ui/issues/issue-37131.rs -ui/issues/issue-37311-type-length-limit/issue-37311.rs -ui/issues/issue-37510.rs -ui/issues/issue-3753.rs -ui/issues/issue-37534.rs -ui/issues/issue-37576.rs -ui/issues/issue-3763.rs -ui/issues/issue-37665.rs -ui/issues/issue-37686.rs -ui/issues/issue-37725.rs -ui/issues/issue-37733.rs -ui/issues/issue-3779.rs -ui/issues/issue-37884.rs -ui/issues/issue-38160.rs -ui/issues/issue-38190.rs -ui/issues/issue-38226.rs -ui/issues/issue-38381.rs -ui/issues/issue-38412.rs -ui/issues/issue-38437.rs -ui/issues/issue-38458.rs -ui/issues/issue-3847.rs -ui/issues/issue-38556.rs -ui/issues/issue-38727.rs -ui/issues/issue-3874.rs -ui/issues/issue-38763.rs -ui/issues/issue-38857.rs -ui/issues/issue-38875/auxiliary/issue-38875-b.rs -ui/issues/issue-38875/issue-38875.rs -ui/issues/issue-3888-2.rs -ui/issues/issue-38919.rs -ui/issues/issue-38942.rs -ui/issues/issue-3895.rs -ui/issues/issue-38954.rs -ui/issues/issue-38987.rs -ui/issues/issue-39089.rs -ui/issues/issue-39211.rs -ui/issues/issue-39367.rs -ui/issues/issue-39548.rs -ui/issues/issue-39687.rs -ui/issues/issue-39709.rs -ui/issues/issue-3979-2.rs -ui/issues/issue-3979-xcrate.rs -ui/issues/issue-3979.rs -ui/issues/issue-39808.rs -ui/issues/issue-39827.rs -ui/issues/issue-39848.rs -ui/issues/issue-3991.rs -ui/issues/issue-3993.rs -ui/issues/issue-39970.rs -ui/issues/issue-39984.rs -ui/issues/issue-40000.rs -ui/issues/issue-4025.rs -ui/issues/issue-40288-2.rs -ui/issues/issue-40288.rs -ui/issues/issue-40951.rs -ui/issues/issue-41053.rs -ui/issues/issue-41229-ref-str.rs -ui/issues/issue-41298.rs -ui/issues/issue-41479.rs -ui/issues/issue-41498.rs -ui/issues/issue-41549.rs -ui/issues/issue-41604.rs -ui/issues/issue-41652/auxiliary/issue-41652-b.rs -ui/issues/issue-41652/issue-41652.rs -ui/issues/issue-41677.rs -ui/issues/issue-41696.rs -ui/issues/issue-41726.rs -ui/issues/issue-41742.rs -ui/issues/issue-41744.rs -ui/issues/issue-41849-variance-req.rs -ui/issues/issue-41880.rs -ui/issues/issue-41888.rs -ui/issues/issue-41936-variance-coerce-unsized-cycle.rs -ui/issues/issue-41974.rs -ui/issues/issue-41998.rs -ui/issues/issue-42007.rs -ui/issues/issue-4208.rs -ui/issues/issue-42106.rs -ui/issues/issue-42148.rs -ui/issues/issue-42210.rs -ui/issues/issue-4228.rs -ui/issues/issue-42312.rs -ui/issues/issue-42453.rs -ui/issues/issue-42467.rs -ui/issues/issue-4252.rs -ui/issues/issue-42552.rs -ui/issues/issue-4265.rs -ui/issues/issue-42755.rs -ui/issues/issue-42796.rs -ui/issues/issue-42880.rs -ui/issues/issue-42956.rs -ui/issues/issue-43057.rs -ui/issues/issue-43205.rs -ui/issues/issue-43250.rs -ui/issues/issue-43291.rs -ui/issues/issue-4333.rs -ui/issues/issue-4335.rs -ui/issues/issue-43355.rs -ui/issues/issue-43357.rs -ui/issues/issue-43420-no-over-suggest.rs -ui/issues/issue-43424.rs -ui/issues/issue-43431.rs -ui/issues/issue-43483.rs -ui/issues/issue-43692.rs -ui/issues/issue-43806.rs -ui/issues/issue-43853.rs -ui/issues/issue-4387.rs -ui/issues/issue-43910.rs -ui/issues/issue-43923.rs -ui/issues/issue-43988.rs -ui/issues/issue-44023.rs -ui/issues/issue-44056.rs -ui/issues/issue-44078.rs -ui/issues/issue-44216-add-instant.rs -ui/issues/issue-44216-add-system-time.rs -ui/issues/issue-44216-sub-instant.rs -ui/issues/issue-44216-sub-system-time.rs -ui/issues/issue-44239.rs -ui/issues/issue-44247.rs -ui/issues/issue-44405.rs -ui/issues/issue-4464.rs -ui/issues/issue-44730.rs -ui/issues/issue-44851.rs -ui/issues/issue-4517.rs -ui/issues/issue-4541.rs -ui/issues/issue-4542.rs -ui/issues/issue-45425.rs -ui/issues/issue-4545.rs -ui/issues/issue-45510.rs -ui/issues/issue-45562.rs -ui/issues/issue-45697-1.rs -ui/issues/issue-45697.rs -ui/issues/issue-45730.rs -ui/issues/issue-45731.rs -ui/issues/issue-45801.rs -ui/issues/issue-45965.rs -ui/issues/issue-46069.rs -ui/issues/issue-46101.rs -ui/issues/issue-46302.rs -ui/issues/issue-46311.rs -ui/issues/issue-46332.rs -ui/issues/issue-46471-1.rs -ui/issues/issue-46472.rs -ui/issues/issue-46604.rs -ui/issues/issue-46756-consider-borrowing-cast-or-binexpr.rs -ui/issues/issue-46771.rs -ui/issues/issue-46855.rs -ui/issues/issue-46964.rs -ui/issues/issue-46983.rs -ui/issues/issue-47073-zero-padded-tuple-struct-indices.rs -ui/issues/issue-47094.rs -ui/issues/issue-47184.rs -ui/issues/issue-47309.rs -ui/issues/issue-4734.rs -ui/issues/issue-4735.rs -ui/issues/issue-4736.rs -ui/issues/issue-47364.rs -ui/issues/issue-47377.rs -ui/issues/issue-47380.rs -ui/issues/issue-47486.rs -ui/issues/issue-4759-1.rs -ui/issues/issue-4759.rs -ui/issues/issue-47638.rs -ui/issues/issue-47673.rs -ui/issues/issue-47703-1.rs -ui/issues/issue-47703-tuple.rs -ui/issues/issue-47703.rs -ui/issues/issue-47715.rs -ui/issues/issue-47722.rs -ui/issues/issue-48006.rs -ui/issues/issue-48131.rs -ui/issues/issue-48132.rs -ui/issues/issue-48159.rs -ui/issues/issue-48276.rs -ui/issues/issue-4830.rs -ui/issues/issue-48364.rs -ui/issues/issue-48728.rs -ui/issues/issue-4875.rs -ui/issues/issue-48984.rs -ui/issues/issue-49298.rs -ui/issues/issue-4935.rs -ui/issues/issue-49544.rs -ui/issues/issue-49632.rs -ui/issues/issue-4968.rs -ui/issues/issue-4972.rs -ui/issues/issue-49824.rs -ui/issues/issue-49854.rs -ui/issues/issue-49919.rs -ui/issues/issue-49934-errors.rs -ui/issues/issue-49934.rs -ui/issues/issue-49955.rs -ui/issues/issue-49973.rs -ui/issues/issue-50187.rs -ui/issues/issue-50411.rs -ui/issues/issue-50415.rs -ui/issues/issue-50442.rs -ui/issues/issue-50471.rs -ui/issues/issue-50518.rs -ui/issues/issue-50581.rs -ui/issues/issue-50582.rs -ui/issues/issue-50585.rs -ui/issues/issue-50600.rs -ui/issues/issue-50618.rs -ui/issues/issue-5062.rs -ui/issues/issue-5067.rs -ui/issues/issue-50688.rs -ui/issues/issue-50714.rs -ui/issues/issue-50761.rs -ui/issues/issue-50781.rs -ui/issues/issue-50802.rs -ui/issues/issue-50811.rs -ui/issues/issue-5100.rs -ui/issues/issue-51022.rs -ui/issues/issue-51044.rs -ui/issues/issue-51102.rs -ui/issues/issue-51116.rs -ui/issues/issue-51154.rs -ui/issues/issue-51515.rs -ui/issues/issue-51632-try-desugar-incompatible-types.rs -ui/issues/issue-51655.rs -ui/issues/issue-51714.rs -ui/issues/issue-51798.rs -ui/issues/issue-51874.rs -ui/issues/issue-51907.rs -ui/issues/issue-5192.rs -ui/issues/issue-51947.rs -ui/issues/issue-52049.rs -ui/issues/issue-52126-assign-op-invariance.rs -ui/issues/issue-52262.rs -ui/issues/issue-52489.rs -ui/issues/issue-52533.rs -ui/issues/issue-52717.rs -ui/issues/issue-5280.rs -ui/issues/issue-5315.rs -ui/issues/issue-5321-immediates-with-bare-self.rs -ui/issues/issue-53251.rs -ui/issues/issue-53275.rs -ui/issues/issue-53300.rs -ui/issues/issue-53333.rs -ui/issues/issue-53348.rs -ui/issues/issue-53419.rs -ui/issues/issue-53568.rs -ui/issues/issue-5358-1.rs -ui/issues/issue-53728.rs -ui/issues/issue-53843.rs -ui/issues/issue-54044.rs -ui/issues/issue-54062.rs -ui/issues/issue-54094.rs -ui/issues/issue-5439.rs -ui/issues/issue-54410.rs -ui/issues/issue-54462-mutable-noalias-correctness.rs -ui/issues/issue-54477-reduced-2.rs -ui/issues/issue-54696.rs -ui/issues/issue-5518.rs -ui/issues/issue-5521.rs -ui/issues/issue-55376.rs -ui/issues/issue-55380.rs -ui/issues/issue-5550.rs -ui/issues/issue-5554.rs -ui/issues/issue-55587.rs -ui/issues/issue-5572.rs -ui/issues/issue-55731.rs -ui/issues/issue-56128.rs -ui/issues/issue-56175.rs -ui/issues/issue-56199.rs -ui/issues/issue-56229.rs -ui/issues/issue-56237.rs -ui/issues/issue-5666.rs -ui/issues/issue-56806.rs -ui/issues/issue-56835.rs -ui/issues/issue-56870.rs -ui/issues/issue-5688.rs -ui/issues/issue-56943.rs -ui/issues/issue-5708.rs -ui/issues/issue-57156.rs -ui/issues/issue-57162.rs -ui/issues/issue-5718.rs -ui/issues/issue-57198-pass.rs -ui/issues/issue-57271.rs -ui/issues/issue-57399-self-return-impl-trait.rs -ui/issues/issue-5741.rs -ui/issues/issue-5754.rs -ui/issues/issue-57741-dereference-boxed-value/issue-57741-1.rs -ui/issues/issue-57741-dereference-boxed-value/issue-57741.rs -ui/issues/issue-57781.rs -ui/issues/issue-57924.rs -ui/issues/issue-58212.rs -ui/issues/issue-58375-monomorphize-default-impls.rs -ui/issues/issue-5844.rs -ui/issues/issue-58463.rs -ui/issues/issue-58712.rs -ui/issues/issue-58734.rs -ui/issues/issue-5883.rs -ui/issues/issue-5884.rs -ui/issues/issue-58857.rs -ui/issues/issue-5900.rs -ui/issues/issue-59020.rs -ui/issues/issue-5917.rs -ui/issues/issue-59326.rs -ui/issues/issue-59488.rs -ui/issues/issue-59494.rs -ui/issues/issue-5950.rs -ui/issues/issue-59756.rs -ui/issues/issue-5988.rs -ui/issues/issue-5997-outer-generic-parameter/issue-5997-enum.rs -ui/issues/issue-5997-outer-generic-parameter/issue-5997-struct.rs -ui/issues/issue-5997-outer-generic-parameter/issue-5997.rs -ui/issues/issue-60218.rs -ui/issues/issue-60622.rs -ui/issues/issue-60989.rs -ui/issues/issue-61106.rs -ui/issues/issue-61108.rs -ui/issues/issue-6117.rs -ui/issues/issue-6130.rs -ui/issues/issue-61475.rs -ui/issues/issue-6153.rs -ui/issues/issue-61623.rs -ui/issues/issue-61894.rs -ui/issues/issue-62480.rs -ui/issues/issue-6318.rs -ui/issues/issue-6344-let.rs -ui/issues/issue-6344-match.rs -ui/issues/issue-63983.rs -ui/issues/issue-64430.rs -ui/issues/issue-64559.rs -ui/issues/issue-64593.rs -ui/issues/issue-64792-bad-unicode-ctor.rs -ui/issues/issue-65131.rs -ui/issues/issue-65230.rs -ui/issues/issue-65462.rs -ui/issues/issue-6557.rs -ui/issues/issue-66308.rs -ui/issues/issue-66353.rs -ui/issues/issue-66667-function-cmp-cycle.rs -ui/issues/issue-66702-break-outside-loop-val.rs -ui/issues/issue-66706.rs -ui/issues/issue-66923-show-error-for-correct-call.rs -ui/issues/issue-67039-unsound-pin-partialeq.rs -ui/issues/issue-6738.rs -ui/issues/issue-67535.rs -ui/issues/issue-67552.rs -ui/issues/issue-68010-large-zst-consts.rs -ui/issues/issue-68696-catch-during-unwind.rs -ui/issues/issue-6892.rs -ui/issues/issue-68951.rs -ui/issues/issue-6898.rs -ui/issues/issue-69130.rs -ui/issues/issue-6919.rs -ui/issues/issue-69306.rs -ui/issues/issue-6936.rs -ui/issues/issue-69455.rs -ui/issues/issue-69602-type-err-during-codegen-ice.rs -ui/issues/issue-69683.rs -ui/issues/issue-7012.rs -ui/issues/issue-70381.rs -ui/issues/issue-7044.rs -ui/issues/issue-7061.rs -ui/issues/issue-70673.rs -ui/issues/issue-70724-add_type_neq_err_label-unwrap.rs -ui/issues/issue-70746.rs -ui/issues/issue-7092.rs -ui/issues/issue-71406.rs -ui/issues/issue-7178.rs -ui/issues/issue-72002.rs -ui/issues/issue-72076.rs -ui/issues/issue-72278.rs -ui/issues/issue-7246.rs -ui/issues/issue-7268.rs -ui/issues/issue-72839-error-overflow.rs -ui/issues/issue-72933-match-stack-overflow.rs -ui/issues/issue-73112.rs -ui/issues/issue-73229.rs -ui/issues/issue-7344.rs -ui/issues/issue-7364.rs -ui/issues/issue-74082.rs -ui/issues/issue-74564-if-expr-stack-overflow.rs -ui/issues/issue-7519-match-unit-in-arg.rs -ui/issues/issue-75283.rs -ui/issues/issue-7563.rs -ui/issues/issue-75704.rs -ui/issues/issue-7575.rs -ui/issues/issue-76042.rs -ui/issues/issue-76077-inaccesible-private-fields/issue-76077-1.rs -ui/issues/issue-76077-inaccesible-private-fields/issue-76077.rs -ui/issues/issue-76191.rs -ui/issues/issue-7660.rs -ui/issues/issue-7663.rs -ui/issues/issue-7673-cast-generically-implemented-trait.rs -ui/issues/issue-77218/issue-77218-2.rs -ui/issues/issue-77218/issue-77218.rs -ui/issues/issue-7784.rs -ui/issues/issue-77919.rs -ui/issues/issue-78192.rs -ui/issues/issue-78622.rs -ui/issues/issue-7867.rs -ui/issues/issue-78957.rs -ui/issues/issue-7899.rs -ui/issues/issue-7911.rs -ui/issues/issue-7970a.rs -ui/issues/issue-8044.rs -ui/issues/issue-80607.rs -ui/issues/issue-81584.rs -ui/issues/issue-8171-default-method-self-inherit-builtin-trait.rs -ui/issues/issue-81918.rs -ui/issues/issue-8248.rs -ui/issues/issue-8249.rs -ui/issues/issue-8259.rs -ui/issues/issue-83048.rs -ui/issues/issue-8391.rs -ui/issues/issue-8398.rs -ui/issues/issue-8401.rs -ui/issues/issue-8498.rs -ui/issues/issue-8506.rs -ui/issues/issue-8521.rs -ui/issues/issue-85461.rs -ui/issues/issue-8578.rs -ui/issues/issue-86756.rs -ui/issues/issue-87199.rs -ui/issues/issue-8727.rs -ui/issues/issue-87490.rs -ui/issues/issue-8761.rs -ui/issues/issue-8767.rs -ui/issues/issue-87707.rs -ui/issues/issue-8783.rs -ui/issues/issue-88150.rs -ui/issues/issue-8860.rs -ui/issues/issue-8898.rs -ui/issues/issue-9047.rs -ui/issues/issue-9110.rs -ui/issues/issue-9123.rs -ui/issues/issue-9129.rs -ui/issues/issue-91489.rs -ui/issues/issue-9155.rs -ui/issues/issue-9188.rs -ui/issues/issue-9243.rs -ui/issues/issue-9249.rs -ui/issues/issue-9259.rs -ui/issues/issue-92741.rs -ui/issues/issue-9446.rs -ui/issues/issue-9725.rs -ui/issues/issue-9737.rs -ui/issues/issue-9814.rs -ui/issues/issue-98299.rs -ui/issues/issue-9837.rs -ui/issues/issue-9906.rs -ui/issues/issue-9918.rs -ui/issues/issue-9942.rs -ui/issues/issue-9951.rs -ui/issues/issue-9968.rs -ui/issues/issue-99838.rs ui/iterators/issue-28098.rs ui/iterators/issue-58952-filter-type-length.rs ui/lang-items/issue-83471.rs diff --git a/src/tools/tidy/src/lib.rs b/src/tools/tidy/src/lib.rs index ade4055b5bd..4ea9d051ddb 100644 --- a/src/tools/tidy/src/lib.rs +++ b/src/tools/tidy/src/lib.rs @@ -13,8 +13,9 @@ use termcolor::WriteColor; macro_rules! static_regex { ($re:literal) => {{ - static RE: ::std::sync::OnceLock<::regex::Regex> = ::std::sync::OnceLock::new(); - RE.get_or_init(|| ::regex::Regex::new($re).unwrap()) + static RE: ::std::sync::LazyLock<::regex::Regex> = + ::std::sync::LazyLock::new(|| ::regex::Regex::new($re).unwrap()); + &*RE }}; } @@ -124,48 +125,69 @@ pub fn git_diff<S: AsRef<OsStr>>(base_commit: &str, extra_arg: S) -> Option<Stri Some(String::from_utf8_lossy(&output.stdout).into()) } -/// Returns true if any modified file matches the predicate, if we are in CI, or if unable to list modified files. -pub fn files_modified(ci_info: &CiInfo, pred: impl Fn(&str) -> bool) -> bool { +/// Similar to `files_modified`, but only involves a single call to `git`. +/// +/// removes all elements from `items` that do not cause any match when `pred` is called with the list of modifed files. +/// +/// if in CI, no elements will be removed. +pub fn files_modified_batch_filter<T>( + ci_info: &CiInfo, + items: &mut Vec<T>, + pred: impl Fn(&T, &str) -> bool, +) { if CiEnv::is_ci() { // assume everything is modified on CI because we really don't want false positives there. - return true; + return; } let Some(base_commit) = &ci_info.base_commit else { eprintln!("No base commit, assuming all files are modified"); - return true; + return; }; - match crate::git_diff(&base_commit, "--name-status") { + match crate::git_diff(base_commit, "--name-status") { Some(output) => { - let modified_files = output.lines().filter_map(|ln| { - let (status, name) = ln - .trim_end() - .split_once('\t') - .expect("bad format from `git diff --name-status`"); - if status == "M" { Some(name) } else { None } - }); - for modified_file in modified_files { - if pred(modified_file) { - return true; + let modified_files: Vec<_> = output + .lines() + .filter_map(|ln| { + let (status, name) = ln + .trim_end() + .split_once('\t') + .expect("bad format from `git diff --name-status`"); + if status == "M" { Some(name) } else { None } + }) + .collect(); + items.retain(|item| { + for modified_file in &modified_files { + if pred(item, modified_file) { + // at least one predicate matches, keep this item. + return true; + } } - } - false + // no predicates matched, remove this item. + false + }); } None => { eprintln!("warning: failed to run `git diff` to check for changes"); eprintln!("warning: assuming all files are modified"); - true } } } +/// Returns true if any modified file matches the predicate, if we are in CI, or if unable to list modified files. +pub fn files_modified(ci_info: &CiInfo, pred: impl Fn(&str) -> bool) -> bool { + let mut v = vec![()]; + files_modified_batch_filter(ci_info, &mut v, |_, p| pred(p)); + !v.is_empty() +} + pub mod alphabetical; pub mod bins; pub mod debug_artifacts; pub mod deps; pub mod edition; pub mod error_codes; -pub mod ext_tool_checks; pub mod extdeps; +pub mod extra_checks; pub mod features; pub mod filenames; pub mod fluent_alphabetical; @@ -178,7 +200,6 @@ pub mod mir_opt_tests; pub mod pal; pub mod rustdoc_css_themes; pub mod rustdoc_gui_tests; -pub mod rustdoc_js; pub mod rustdoc_json; pub mod rustdoc_templates; pub mod style; diff --git a/src/tools/tidy/src/main.rs b/src/tools/tidy/src/main.rs index 0f1116a632e..cd2567ddb64 100644 --- a/src/tools/tidy/src/main.rs +++ b/src/tools/tidy/src/main.rs @@ -29,6 +29,7 @@ fn main() { let concurrency: NonZeroUsize = FromStr::from_str(&env::args().nth(4).expect("need concurrency")) .expect("concurrency must be a number"); + let npm: PathBuf = env::args_os().nth(5).expect("need name/path of npm command").into(); let root_manifest = root_path.join("Cargo.toml"); let src_path = root_path.join("src"); @@ -112,7 +113,6 @@ fn main() { check!(rustdoc_gui_tests, &tests_path); check!(rustdoc_css_themes, &librustdoc_path); check!(rustdoc_templates, &librustdoc_path); - check!(rustdoc_js, &librustdoc_path, &tools_path, &src_path); check!(rustdoc_json, &src_path, &ci_info); check!(known_bug, &crashes_path); check!(unknown_revision, &tests_path); @@ -128,9 +128,9 @@ fn main() { check!(pal, &library_path); // Checks that need to be done for both the compiler and std libraries. - check!(unit_tests, &src_path); - check!(unit_tests, &compiler_path); - check!(unit_tests, &library_path); + check!(unit_tests, &src_path, false); + check!(unit_tests, &compiler_path, false); + check!(unit_tests, &library_path, true); if bins::check_filesystem_support(&[&root_path], &output_directory) { check!(bins, &root_path); @@ -177,10 +177,13 @@ fn main() { check!(unstable_book, &src_path, collected); check!( - ext_tool_checks, + extra_checks, &root_path, &output_directory, &ci_info, + &librustdoc_path, + &tools_path, + &npm, bless, extra_checks, pos_args diff --git a/src/tools/tidy/src/mir_opt_tests.rs b/src/tools/tidy/src/mir_opt_tests.rs index 1efe71b1687..6119eb58383 100644 --- a/src/tools/tidy/src/mir_opt_tests.rs +++ b/src/tools/tidy/src/mir_opt_tests.rs @@ -55,22 +55,21 @@ fn check_dash_files(path: &Path, bless: bool, bad: &mut bool) { .filter(|e| e.file_type().is_file()) { let path = file.path(); - if path.extension() == Some("rs".as_ref()) { - if let Some(name) = path.file_name().and_then(|s| s.to_str()) { - if name.contains('-') { - if !bless { - tidy_error!( - bad, - "mir-opt test files should not have dashes in them: {}", - path.display() - ); - } else { - let new_name = name.replace('-', "_"); - let mut new_path = path.to_owned(); - new_path.set_file_name(new_name); - let _ = std::fs::rename(path, new_path); - } - } + if path.extension() == Some("rs".as_ref()) + && let Some(name) = path.file_name().and_then(|s| s.to_str()) + && name.contains('-') + { + if !bless { + tidy_error!( + bad, + "mir-opt test files should not have dashes in them: {}", + path.display() + ); + } else { + let new_name = name.replace('-', "_"); + let mut new_path = path.to_owned(); + new_path.set_file_name(new_name); + let _ = std::fs::rename(path, new_path); } } } diff --git a/src/tools/tidy/src/pal.rs b/src/tools/tidy/src/pal.rs index b7d4a331891..5b8b44429bb 100644 --- a/src/tools/tidy/src/pal.rs +++ b/src/tools/tidy/src/pal.rs @@ -37,6 +37,7 @@ use crate::walk::{filter_dirs, walk}; // Paths that may contain platform-specific code. const EXCEPTION_PATHS: &[&str] = &[ "library/compiler-builtins", + "library/std_detect", "library/windows_targets", "library/panic_abort", "library/panic_unwind", diff --git a/src/tools/tidy/src/rustdoc_js.rs b/src/tools/tidy/src/rustdoc_js.rs deleted file mode 100644 index 5737fcbafc0..00000000000 --- a/src/tools/tidy/src/rustdoc_js.rs +++ /dev/null @@ -1,101 +0,0 @@ -//! Tidy check to ensure that rustdoc templates didn't forget a `{# #}` to strip extra whitespace -//! characters. - -use std::ffi::OsStr; -use std::path::{Path, PathBuf}; -use std::process::Command; - -use ignore::DirEntry; - -use crate::walk::walk_no_read; - -fn run_eslint(args: &[PathBuf], config_folder: PathBuf, bad: &mut bool) { - let mut child = match Command::new("npx") - .arg("eslint") - .arg("-c") - .arg(config_folder.join(".eslintrc.js")) - .args(args) - .spawn() - { - Ok(child) => child, - Err(error) => { - *bad = true; - eprintln!("failed to run eslint: {error:?}"); - return; - } - }; - match child.wait() { - Ok(exit_status) => { - if exit_status.success() { - return; - } - eprintln!("eslint command failed"); - } - Err(error) => eprintln!("eslint command failed: {error:?}"), - } - *bad = true; -} - -fn get_eslint_version_inner(global: bool) -> Option<String> { - let mut command = Command::new("npm"); - command.arg("list").arg("--parseable").arg("--long").arg("--depth=0"); - if global { - command.arg("--global"); - } - let output = command.output().ok()?; - let lines = String::from_utf8_lossy(&output.stdout); - lines.lines().find_map(|l| l.split(':').nth(1)?.strip_prefix("eslint@")).map(|v| v.to_owned()) -} - -fn get_eslint_version() -> Option<String> { - get_eslint_version_inner(false).or_else(|| get_eslint_version_inner(true)) -} - -pub fn check(librustdoc_path: &Path, tools_path: &Path, src_path: &Path, bad: &mut bool) { - let eslint_version_path = src_path.join("ci/docker/host-x86_64/tidy/eslint.version"); - let eslint_version = match std::fs::read_to_string(&eslint_version_path) { - Ok(version) => version.trim().to_string(), - Err(error) => { - *bad = true; - eprintln!("failed to read `{}`: {error:?}", eslint_version_path.display()); - return; - } - }; - // Having the correct `eslint` version installed via `npm` isn't strictly necessary, since we're invoking it via `npx`, - // but this check allows the vast majority that is not working on the rustdoc frontend to avoid the penalty of running - // `eslint` in tidy. See also: https://github.com/rust-lang/rust/pull/142851 - match get_eslint_version() { - Some(version) => { - if version != eslint_version { - *bad = true; - eprintln!( - "⚠️ Installed version of eslint (`{version}`) is different than the \ - one used in the CI (`{eslint_version}`)", - ); - eprintln!( - "You can install this version using `npm update eslint` or by using \ - `npm install eslint@{eslint_version}`", - ); - return; - } - } - None => { - eprintln!("`eslint` doesn't seem to be installed. Skipping tidy check for JS files."); - eprintln!("You can install it using `npm install eslint@{eslint_version}`"); - return; - } - } - let mut files_to_check = Vec::new(); - walk_no_read( - &[&librustdoc_path.join("html/static/js")], - |path, is_dir| is_dir || path.extension().is_none_or(|ext| ext != OsStr::new("js")), - &mut |path: &DirEntry| { - files_to_check.push(path.path().into()); - }, - ); - println!("Running eslint on rustdoc JS files"); - run_eslint(&files_to_check, librustdoc_path.join("html/static"), bad); - - run_eslint(&[tools_path.join("rustdoc-js/tester.js")], tools_path.join("rustdoc-js"), bad); - run_eslint(&[tools_path.join("rustdoc-gui/tester.js")], tools_path.join("rustdoc-gui"), bad); -} diff --git a/src/tools/tidy/src/rustdoc_templates.rs b/src/tools/tidy/src/rustdoc_templates.rs index dca3e8d9d25..597290a6a9a 100644 --- a/src/tools/tidy/src/rustdoc_templates.rs +++ b/src/tools/tidy/src/rustdoc_templates.rs @@ -26,7 +26,7 @@ pub fn check(librustdoc_path: &Path, bad: &mut bool) { None // Then we check if this a comment tag. } else if *tag != "{#" { - return Some(false); + Some(false) // And finally we check if the comment is empty (ie, only there to strip // extra whitespace characters). } else if let Some(start_pos) = line.rfind(tag) { diff --git a/src/tools/tidy/src/style.rs b/src/tools/tidy/src/style.rs index 8dde4618ce5..fca097c091b 100644 --- a/src/tools/tidy/src/style.rs +++ b/src/tools/tidy/src/style.rs @@ -417,10 +417,10 @@ pub fn check(path: &Path, bad: &mut bool) { return; } // Shell completions are automatically generated - if let Some(p) = file.parent() { - if p.ends_with(Path::new("src/etc/completions")) { - return; - } + if let Some(p) = file.parent() + && p.ends_with(Path::new("src/etc/completions")) + { + return; } let [ mut skip_cr, @@ -519,8 +519,11 @@ pub fn check(path: &Path, bad: &mut bool) { .any(|directive| matches!(directive, Directive::Ignore(_))); let has_alphabetical_directive = line.contains("tidy-alphabetical-start") || line.contains("tidy-alphabetical-end"); - let has_recognized_directive = - has_recognized_ignore_directive || has_alphabetical_directive; + let has_other_tidy_ignore_directive = + line.contains("ignore-tidy-target-specific-tests"); + let has_recognized_directive = has_recognized_ignore_directive + || has_alphabetical_directive + || has_other_tidy_ignore_directive; if contains_potential_directive && (!has_recognized_directive) { err("Unrecognized tidy directive") } @@ -604,25 +607,25 @@ pub fn check(path: &Path, bad: &mut bool) { backtick_count += comment_text.chars().filter(|ch| *ch == '`').count(); } comment_block = Some((start_line, backtick_count)); - } else if let Some((start_line, backtick_count)) = comment_block.take() { - if backtick_count % 2 == 1 { - let mut err = |msg: &str| { - tidy_error!(bad, "{}:{start_line}: {msg}", file.display()); - }; - let block_len = (i + 1) - start_line; - if block_len == 1 { - suppressible_tidy_err!( - err, - skip_odd_backticks, - "comment with odd number of backticks" - ); - } else { - suppressible_tidy_err!( - err, - skip_odd_backticks, - "{block_len}-line comment block with odd number of backticks" - ); - } + } else if let Some((start_line, backtick_count)) = comment_block.take() + && backtick_count % 2 == 1 + { + let mut err = |msg: &str| { + tidy_error!(bad, "{}:{start_line}: {msg}", file.display()); + }; + let block_len = (i + 1) - start_line; + if block_len == 1 { + suppressible_tidy_err!( + err, + skip_odd_backticks, + "comment with odd number of backticks" + ); + } else { + suppressible_tidy_err!( + err, + skip_odd_backticks, + "{block_len}-line comment block with odd number of backticks" + ); } } } diff --git a/src/tools/tidy/src/target_policy.rs b/src/tools/tidy/src/target_policy.rs index 776221d3062..550932dbfdc 100644 --- a/src/tools/tidy/src/target_policy.rs +++ b/src/tools/tidy/src/target_policy.rs @@ -8,7 +8,7 @@ use std::path::Path; use crate::walk::{filter_not_rust, walk}; const TARGET_DEFINITIONS_PATH: &str = "compiler/rustc_target/src/spec/targets/"; -const ASSEMBLY_TEST_PATH: &str = "tests/assembly/targets/"; +const ASSEMBLY_LLVM_TEST_PATH: &str = "tests/assembly-llvm/targets/"; const REVISION_LINE_START: &str = "//@ revisions: "; const EXCEPTIONS: &[&str] = &[ // FIXME: disabled since it fails on CI saying the csky component is missing @@ -43,7 +43,7 @@ pub fn check(root_path: &Path, bad: &mut bool) { let _ = targets_to_find.insert(target_name); } - walk(&root_path.join(ASSEMBLY_TEST_PATH), |_, _| false, &mut |_, contents| { + walk(&root_path.join(ASSEMBLY_LLVM_TEST_PATH), |_, _| false, &mut |_, contents| { for line in contents.lines() { let Some(_) = line.find(REVISION_LINE_START) else { continue; @@ -55,7 +55,7 @@ pub fn check(root_path: &Path, bad: &mut bool) { for target in targets_to_find { if !EXCEPTIONS.contains(&target.as_str()) { - tidy_error!(bad, "{ASSEMBLY_TEST_PATH}: missing assembly test for {target}") + tidy_error!(bad, "{ASSEMBLY_LLVM_TEST_PATH}: missing assembly test for {target}") } } } diff --git a/src/tools/tidy/src/target_specific_tests.rs b/src/tools/tidy/src/target_specific_tests.rs index 1a6fd3eaf2d..b2d5f259eb2 100644 --- a/src/tools/tidy/src/target_specific_tests.rs +++ b/src/tools/tidy/src/target_specific_tests.rs @@ -12,12 +12,16 @@ const COMPILE_FLAGS_HEADER: &str = "compile-flags:"; #[derive(Default, Debug)] struct RevisionInfo<'a> { - target_arch: Option<&'a str>, + target_arch: Option<Option<&'a str>>, llvm_components: Option<Vec<&'a str>>, } pub fn check(tests_path: &Path, bad: &mut bool) { crate::walk::walk(tests_path, |path, _is_dir| filter_not_rust(path), &mut |entry, content| { + if content.contains("// ignore-tidy-target-specific-tests") { + return; + } + let file = entry.path().display(); let mut header_map = BTreeMap::new(); iter_header(content, &mut |HeaderLine { revision, directive, .. }| { @@ -30,17 +34,18 @@ pub fn check(tests_path: &Path, bad: &mut bool) { comp_vec.push(component); } } - } else if let Some(compile_flags) = directive.strip_prefix(COMPILE_FLAGS_HEADER) { - if let Some((_, v)) = compile_flags.split_once("--target") { - let v = v.trim_start_matches([' ', '=']); - let v = if v == "{{target}}" { Some((v, v)) } else { v.split_once("-") }; - if let Some((arch, _)) = v { - let info = header_map.entry(revision).or_insert(RevisionInfo::default()); - info.target_arch.replace(arch); - } else { - eprintln!("{file}: seems to have a malformed --target value"); - *bad = true; - } + } else if let Some(compile_flags) = directive.strip_prefix(COMPILE_FLAGS_HEADER) + && let Some((_, v)) = compile_flags.split_once("--target") + { + let v = v.trim_start_matches([' ', '=']); + let info = header_map.entry(revision).or_insert(RevisionInfo::default()); + if v.starts_with("{{") { + info.target_arch.replace(None); + } else if let Some((arch, _)) = v.split_once("-") { + info.target_arch.replace(Some(arch)); + } else { + eprintln!("{file}: seems to have a malformed --target value"); + *bad = true; } } }); @@ -54,9 +59,11 @@ pub fn check(tests_path: &Path, bad: &mut bool) { let rev = rev.unwrap_or("[unspecified]"); match (target_arch, llvm_components) { (None, None) => {} - (Some(_), None) => { + (Some(target_arch), None) => { + let llvm_component = + target_arch.map_or_else(|| "<arch>".to_string(), arch_to_llvm_component); eprintln!( - "{file}: revision {rev} should specify `{LLVM_COMPONENTS_HEADER}` as it has `--target` set" + "{file}: revision {rev} should specify `{LLVM_COMPONENTS_HEADER} {llvm_component}` as it has `--target` set" ); *bad = true; } @@ -66,11 +73,45 @@ pub fn check(tests_path: &Path, bad: &mut bool) { ); *bad = true; } - (Some(_), Some(_)) => { - // FIXME: check specified components against the target architectures we - // gathered. + (Some(target_arch), Some(llvm_components)) => { + if let Some(target_arch) = target_arch { + let llvm_component = arch_to_llvm_component(target_arch); + if !llvm_components.contains(&llvm_component.as_str()) { + eprintln!( + "{file}: revision {rev} should specify `{LLVM_COMPONENTS_HEADER} {llvm_component}` as it has `--target` set" + ); + *bad = true; + } + } } } } }); } + +fn arch_to_llvm_component(arch: &str) -> String { + // NOTE: This is an *approximate* mapping of Rust's `--target` architecture to LLVM component + // names. It is not intended to be an authoritative source, but rather a best-effort that's good + // enough for the purpose of this tidy check. + match arch { + "amdgcn" => "amdgpu".into(), + "aarch64_be" | "arm64_32" | "arm64e" | "arm64ec" => "aarch64".into(), + "i386" | "i586" | "i686" | "x86" | "x86_64" | "x86_64h" => "x86".into(), + "loongarch32" | "loongarch64" => "loongarch".into(), + "nvptx64" => "nvptx".into(), + "s390x" => "systemz".into(), + "sparc64" | "sparcv9" => "sparc".into(), + "wasm32" | "wasm32v1" | "wasm64" => "webassembly".into(), + _ if arch.starts_with("armeb") + || arch.starts_with("armv") + || arch.starts_with("thumbv") => + { + "arm".into() + } + _ if arch.starts_with("bpfe") => "bpf".into(), + _ if arch.starts_with("mips") => "mips".into(), + _ if arch.starts_with("powerpc") => "powerpc".into(), + _ if arch.starts_with("riscv") => "riscv".into(), + _ => arch.to_ascii_lowercase(), + } +} diff --git a/src/tools/tidy/src/ui_tests.rs b/src/tools/tidy/src/ui_tests.rs index 7e295731c56..4d195b3952e 100644 --- a/src/tools/tidy/src/ui_tests.rs +++ b/src/tools/tidy/src/ui_tests.rs @@ -1,24 +1,12 @@ //! Tidy check to ensure below in UI test directories: -//! - the number of entries in each directory must be less than `ENTRY_LIMIT` //! - there are no stray `.stderr` files -use std::collections::{BTreeSet, HashMap}; +use std::collections::BTreeSet; use std::ffi::OsStr; use std::fs; use std::io::Write; use std::path::{Path, PathBuf}; -use ignore::Walk; - -// FIXME: GitHub's UI truncates file lists that exceed 1000 entries, so these -// should all be 1000 or lower. Limits significantly smaller than 1000 are also -// desirable, because large numbers of files are unwieldy in general. See issue -// #73494. -const ENTRY_LIMIT: u32 = 901; -// FIXME: The following limits should be reduced eventually. - -const ISSUES_ENTRY_LIMIT: u32 = 1616; - const EXPECTED_TEST_FILE_EXTENSIONS: &[&str] = &[ "rs", // test source files "stderr", // expected stderr file, corresponds to a rs file @@ -54,42 +42,6 @@ const EXTENSION_EXCEPTION_PATHS: &[&str] = &[ "tests/ui/std/windows-bat-args3.bat", // tests escaping arguments through batch files ]; -fn check_entries(tests_path: &Path, bad: &mut bool) { - let mut directories: HashMap<PathBuf, u32> = HashMap::new(); - - for entry in Walk::new(tests_path.join("ui")).flatten() { - let parent = entry.path().parent().unwrap().to_path_buf(); - *directories.entry(parent).or_default() += 1; - } - - let (mut max, mut max_issues) = (0, 0); - for (dir_path, count) in directories { - let is_issues_dir = tests_path.join("ui/issues") == dir_path; - let (limit, maxcnt) = if is_issues_dir { - (ISSUES_ENTRY_LIMIT, &mut max_issues) - } else { - (ENTRY_LIMIT, &mut max) - }; - *maxcnt = (*maxcnt).max(count); - if count > limit { - tidy_error!( - bad, - "following path contains more than {} entries, \ - you should move the test to some relevant subdirectory (current: {}): {}", - limit, - count, - dir_path.display() - ); - } - } - if ISSUES_ENTRY_LIMIT > max_issues { - tidy_error!( - bad, - "`ISSUES_ENTRY_LIMIT` is too high (is {ISSUES_ENTRY_LIMIT}, should be {max_issues})" - ); - } -} - pub fn check(root_path: &Path, bless: bool, bad: &mut bool) { let issues_txt_header = r#"============================================================ ⚠️⚠️⚠️NOTHING SHOULD EVER BE ADDED TO THIS LIST⚠️⚠️⚠️ @@ -97,7 +49,6 @@ pub fn check(root_path: &Path, bless: bool, bad: &mut bool) { "#; let path = &root_path.join("tests"); - check_entries(path, bad); // the list of files in ui tests that are allowed to start with `issue-XXXX` // BTreeSet because we would like a stable ordering so --bless works @@ -161,31 +112,32 @@ pub fn check(root_path: &Path, bless: bool, bad: &mut bool) { tidy_error!(bad, "Stray file with UI testing output: {:?}", file_path); } - if let Ok(metadata) = fs::metadata(file_path) { - if metadata.len() == 0 { - tidy_error!(bad, "Empty file with UI testing output: {:?}", file_path); - } + if let Ok(metadata) = fs::metadata(file_path) + && metadata.len() == 0 + { + tidy_error!(bad, "Empty file with UI testing output: {:?}", file_path); } } - if ext == "rs" { - if let Some(test_name) = static_regex!(r"^issues?[-_]?(\d{3,})").captures(testname) + if ext == "rs" + && let Some(test_name) = static_regex!(r"^issues?[-_]?(\d{3,})").captures(testname) + { + // these paths are always relative to the passed `path` and always UTF8 + let stripped_path = file_path + .strip_prefix(path) + .unwrap() + .to_str() + .unwrap() + .replace(std::path::MAIN_SEPARATOR_STR, "/"); + + if !remaining_issue_names.remove(stripped_path.as_str()) + && !stripped_path.starts_with("ui/issues/") { - // these paths are always relative to the passed `path` and always UTF8 - let stripped_path = file_path - .strip_prefix(path) - .unwrap() - .to_str() - .unwrap() - .replace(std::path::MAIN_SEPARATOR_STR, "/"); - - if !remaining_issue_names.remove(stripped_path.as_str()) { - tidy_error!( - bad, - "file `tests/{stripped_path}` must begin with a descriptive name, consider `{{reason}}-issue-{issue_n}.rs`", - issue_n = &test_name[1], - ); - } + tidy_error!( + bad, + "file `tests/{stripped_path}` must begin with a descriptive name, consider `{{reason}}-issue-{issue_n}.rs`", + issue_n = &test_name[1], + ); } } } diff --git a/src/tools/tidy/src/unit_tests.rs b/src/tools/tidy/src/unit_tests.rs index 90ef36d5882..3d14a467319 100644 --- a/src/tools/tidy/src/unit_tests.rs +++ b/src/tools/tidy/src/unit_tests.rs @@ -1,44 +1,60 @@ //! Tidy check to ensure `#[test]` and `#[bench]` are not used directly inside -//! `core` or `alloc`. +//! of the standard library. //! //! `core` and `alloc` cannot be tested directly due to duplicating lang items. //! All tests and benchmarks must be written externally in //! `{coretests,alloctests}/{tests,benches}`. //! -//! Outside of `core` and `alloc`, tests and benchmarks should be outlined into -//! separate files named `tests.rs` or `benches.rs`, or directories named +//! Outside of the standard library, tests and benchmarks should be outlined +//! into separate files named `tests.rs` or `benches.rs`, or directories named //! `tests` or `benches` unconfigured during normal build. use std::path::Path; use crate::walk::{filter_dirs, walk}; -pub fn check(root_path: &Path, bad: &mut bool) { - let core = root_path.join("core"); - let core_copy = core.clone(); - let is_core = move |path: &Path| path.starts_with(&core); - let alloc = root_path.join("alloc"); - let alloc_copy = alloc.clone(); - let is_alloc = move |path: &Path| path.starts_with(&alloc); - +pub fn check(root_path: &Path, stdlib: bool, bad: &mut bool) { let skip = move |path: &Path, is_dir| { let file_name = path.file_name().unwrap_or_default(); + + // Skip excluded directories and non-rust files if is_dir { - filter_dirs(path) - || path.ends_with("src/doc") - || (file_name == "tests" || file_name == "benches") - && !is_core(path) - && !is_alloc(path) + if filter_dirs(path) || path.ends_with("src/doc") { + return true; + } } else { let extension = path.extension().unwrap_or_default(); - extension != "rs" - || (file_name == "tests.rs" || file_name == "benches.rs") - && !is_core(path) - && !is_alloc(path) - // Tests which use non-public internals and, as such, need to - // have the types in the same crate as the tests themselves. See - // the comment in alloctests/lib.rs. - || path.ends_with("library/alloc/src/collections/btree/borrow/tests.rs") + if extension != "rs" { + return true; + } + } + + // Tests in a separate package are always allowed + if is_dir && file_name != "tests" && file_name.as_encoded_bytes().ends_with(b"tests") { + return true; + } + + if !stdlib { + // Outside of the standard library tests may also be in separate files in the same crate + if is_dir { + if file_name == "tests" || file_name == "benches" { + return true; + } + } else { + if file_name == "tests.rs" || file_name == "benches.rs" { + return true; + } + } + } + + if is_dir { + // FIXME remove those exceptions once no longer necessary + file_name == "std_detect" || file_name == "std" || file_name == "test" + } else { + // Tests which use non-public internals and, as such, need to + // have the types in the same crate as the tests themselves. See + // the comment in alloctests/lib.rs. + path.ends_with("library/alloc/src/collections/btree/borrow/tests.rs") || path.ends_with("library/alloc/src/collections/btree/map/tests.rs") || path.ends_with("library/alloc/src/collections/btree/node/tests.rs") || path.ends_with("library/alloc/src/collections/btree/set/tests.rs") @@ -50,21 +66,29 @@ pub fn check(root_path: &Path, bad: &mut bool) { walk(root_path, skip, &mut |entry, contents| { let path = entry.path(); - let is_core = path.starts_with(&core_copy); - let is_alloc = path.starts_with(&alloc_copy); + let package = path + .strip_prefix(root_path) + .unwrap() + .components() + .next() + .unwrap() + .as_os_str() + .to_str() + .unwrap(); for (i, line) in contents.lines().enumerate() { let line = line.trim(); let is_test = || line.contains("#[test]") && !line.contains("`#[test]"); let is_bench = || line.contains("#[bench]") && !line.contains("`#[bench]"); if !line.starts_with("//") && (is_test() || is_bench()) { - let explanation = if is_core { - "`core` unit tests and benchmarks must be placed into `coretests`" - } else if is_alloc { - "`alloc` unit tests and benchmarks must be placed into `alloctests`" + let explanation = if stdlib { + format!( + "`{package}` unit tests and benchmarks must be placed into `{package}tests`" + ) } else { "unit tests and benchmarks must be placed into \ separate files or directories named \ `tests.rs`, `benches.rs`, `tests` or `benches`" + .to_owned() }; let name = if is_test() { "test" } else { "bench" }; tidy_error!( diff --git a/src/tools/tidy/src/x_version.rs b/src/tools/tidy/src/x_version.rs index 6a5e9eca813..9f7f43c4000 100644 --- a/src/tools/tidy/src/x_version.rs +++ b/src/tools/tidy/src/x_version.rs @@ -25,12 +25,12 @@ pub fn check(root: &Path, cargo: &Path, bad: &mut bool) { if let Some(version) = iter.next() { // Check this is the rust-lang/rust x tool installation since it should be // installed at a path containing `src/tools/x`. - if let Some(path) = iter.next() { - if path.contains("src/tools/x") { - let version = version.strip_prefix("v").unwrap(); - installed = Some(Version::parse(version).unwrap()); - break; - } + if let Some(path) = iter.next() + && path.contains("src/tools/x") + { + let version = version.strip_prefix("v").unwrap(); + installed = Some(Version::parse(version).unwrap()); + break; }; } } else { diff --git a/src/tools/unicode-table-generator/Cargo.toml b/src/tools/unicode-table-generator/Cargo.toml index f8a500922d0..3ca6e9e316f 100644 --- a/src/tools/unicode-table-generator/Cargo.toml +++ b/src/tools/unicode-table-generator/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "unicode-table-generator" version = "0.1.0" -edition = "2021" +edition = "2024" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/src/tools/unicode-table-generator/src/cascading_map.rs b/src/tools/unicode-table-generator/src/cascading_map.rs index 1eb35e819c0..78a7bba3208 100644 --- a/src/tools/unicode-table-generator/src/cascading_map.rs +++ b/src/tools/unicode-table-generator/src/cascading_map.rs @@ -21,7 +21,7 @@ impl RawEmitter { let points = ranges .iter() - .flat_map(|r| (r.start..r.end).into_iter().collect::<Vec<u32>>()) + .flat_map(|r| (r.start..r.end).collect::<Vec<u32>>()) .collect::<Vec<u32>>(); println!("there are {} points", points.len()); @@ -32,21 +32,20 @@ impl RawEmitter { // assert that there is no whitespace over the 0x3000 range. assert!(point <= 0x3000, "the highest unicode whitespace value has changed"); let high_bytes = point as usize >> 8; - let codepoints = codepoints_by_high_bytes.entry(high_bytes).or_insert_with(Vec::new); + let codepoints = codepoints_by_high_bytes.entry(high_bytes).or_default(); codepoints.push(point); } let mut bit_for_high_byte = 1u8; let mut arms = Vec::<String>::new(); - let mut high_bytes: Vec<usize> = - codepoints_by_high_bytes.keys().map(|k| k.clone()).collect(); + let mut high_bytes: Vec<usize> = codepoints_by_high_bytes.keys().copied().collect(); high_bytes.sort(); for high_byte in high_bytes { let codepoints = codepoints_by_high_bytes.get_mut(&high_byte).unwrap(); if codepoints.len() == 1 { let ch = codepoints.pop().unwrap(); - arms.push(format!("{} => c as u32 == {:#04x}", high_byte, ch)); + arms.push(format!("{high_byte} => c as u32 == {ch:#04x}")); continue; } // more than 1 codepoint in this arm @@ -54,8 +53,7 @@ impl RawEmitter { map[(*codepoint & 0xff) as usize] |= bit_for_high_byte; } arms.push(format!( - "{} => WHITESPACE_MAP[c as usize & 0xff] & {} != 0", - high_byte, bit_for_high_byte + "{high_byte} => WHITESPACE_MAP[c as usize & 0xff] & {bit_for_high_byte} != 0" )); bit_for_high_byte <<= 1; } @@ -68,7 +66,7 @@ impl RawEmitter { writeln!(&mut self.file, "pub const fn lookup(c: char) -> bool {{").unwrap(); writeln!(&mut self.file, " match c as u32 >> 8 {{").unwrap(); for arm in arms { - writeln!(&mut self.file, " {},", arm).unwrap(); + writeln!(&mut self.file, " {arm},").unwrap(); } writeln!(&mut self.file, " _ => false,").unwrap(); writeln!(&mut self.file, " }}").unwrap(); diff --git a/src/tools/unicode-table-generator/src/case_mapping.rs b/src/tools/unicode-table-generator/src/case_mapping.rs index 00241b7ee0e..9c6454492e7 100644 --- a/src/tools/unicode-table-generator/src/case_mapping.rs +++ b/src/tools/unicode-table-generator/src/case_mapping.rs @@ -9,7 +9,7 @@ const INDEX_MASK: u32 = 1 << 22; pub(crate) fn generate_case_mapping(data: &UnicodeData) -> String { let mut file = String::new(); - write!(file, "const INDEX_MASK: u32 = 0x{:x};", INDEX_MASK).unwrap(); + write!(file, "const INDEX_MASK: u32 = 0x{INDEX_MASK:x};").unwrap(); file.push_str("\n\n"); file.push_str(HEADER.trim_start()); file.push('\n'); diff --git a/src/tools/unicode-table-generator/src/main.rs b/src/tools/unicode-table-generator/src/main.rs index 415db2c4dbc..6cdb82a87bd 100644 --- a/src/tools/unicode-table-generator/src/main.rs +++ b/src/tools/unicode-table-generator/src/main.rs @@ -160,15 +160,15 @@ fn load_data() -> UnicodeData { .push(Codepoints::Single(row.codepoint)); } - if let Some(mapped) = row.simple_lowercase_mapping { - if mapped != row.codepoint { - to_lower.insert(row.codepoint.value(), (mapped.value(), 0, 0)); - } + if let Some(mapped) = row.simple_lowercase_mapping + && mapped != row.codepoint + { + to_lower.insert(row.codepoint.value(), (mapped.value(), 0, 0)); } - if let Some(mapped) = row.simple_uppercase_mapping { - if mapped != row.codepoint { - to_upper.insert(row.codepoint.value(), (mapped.value(), 0, 0)); - } + if let Some(mapped) = row.simple_uppercase_mapping + && mapped != row.codepoint + { + to_upper.insert(row.codepoint.value(), (mapped.value(), 0, 0)); } } @@ -196,12 +196,12 @@ fn load_data() -> UnicodeData { .flat_map(|codepoints| match codepoints { Codepoints::Single(c) => c .scalar() - .map(|ch| (ch as u32..ch as u32 + 1)) + .map(|ch| ch as u32..ch as u32 + 1) .into_iter() .collect::<Vec<_>>(), Codepoints::Range(c) => c .into_iter() - .flat_map(|c| c.scalar().map(|ch| (ch as u32..ch as u32 + 1))) + .flat_map(|c| c.scalar().map(|ch| ch as u32..ch as u32 + 1)) .collect::<Vec<_>>(), }) .collect::<Vec<Range<u32>>>(), @@ -236,7 +236,7 @@ fn main() { let ranges_by_property = &unicode_data.ranges; if let Some(path) = test_path { - std::fs::write(&path, generate_tests(&write_location, &ranges_by_property)).unwrap(); + std::fs::write(&path, generate_tests(&write_location, ranges_by_property)).unwrap(); } let mut total_bytes = 0; @@ -246,9 +246,9 @@ fn main() { let mut emitter = RawEmitter::new(); if property == &"White_Space" { - emit_whitespace(&mut emitter, &ranges); + emit_whitespace(&mut emitter, ranges); } else { - emit_codepoints(&mut emitter, &ranges); + emit_codepoints(&mut emitter, ranges); } modules.push((property.to_lowercase().to_string(), emitter.file)); @@ -288,7 +288,7 @@ fn main() { for line in contents.lines() { if !line.trim().is_empty() { table_file.push_str(" "); - table_file.push_str(&line); + table_file.push_str(line); } table_file.push('\n'); } @@ -312,7 +312,7 @@ fn version() -> String { let start = readme.find(prefix).unwrap() + prefix.len(); let end = readme.find(" of the Unicode Standard.").unwrap(); let version = - readme[start..end].split('.').map(|v| v.parse::<u32>().expect(&v)).collect::<Vec<_>>(); + readme[start..end].split('.').map(|v| v.parse::<u32>().expect(v)).collect::<Vec<_>>(); let [major, minor, micro] = [version[0], version[1], version[2]]; out.push_str(&format!("({major}, {minor}, {micro});\n")); @@ -320,7 +320,7 @@ fn version() -> String { } fn fmt_list<V: std::fmt::Debug>(values: impl IntoIterator<Item = V>) -> String { - let pieces = values.into_iter().map(|b| format!("{:?}, ", b)).collect::<Vec<_>>(); + let pieces = values.into_iter().map(|b| format!("{b:?}, ")).collect::<Vec<_>>(); let mut out = String::new(); let mut line = String::from("\n "); for piece in pieces { @@ -348,7 +348,7 @@ fn generate_tests(data_path: &str, ranges: &[(&str, Vec<Range<u32>>)]) -> String s.push_str("\nfn main() {\n"); for (property, ranges) in ranges { - s.push_str(&format!(r#" println!("Testing {}");"#, property)); + s.push_str(&format!(r#" println!("Testing {property}");"#)); s.push('\n'); s.push_str(&format!(" {}_true();\n", property.to_lowercase())); s.push_str(&format!(" {}_false();\n", property.to_lowercase())); @@ -373,7 +373,7 @@ fn generate_tests(data_path: &str, ranges: &[(&str, Vec<Range<u32>>)]) -> String s.push_str(" }\n\n"); } - s.push_str("}"); + s.push('}'); s } @@ -388,7 +388,7 @@ fn generate_asserts(s: &mut String, property: &str, points: &[u32], truthy: bool range.start, )); } else { - s.push_str(&format!(" for chn in {:?}u32 {{\n", range)); + s.push_str(&format!(" for chn in {range:?}u32 {{\n")); s.push_str(&format!( " assert!({}unicode_data::{}::lookup(std::char::from_u32(chn).unwrap()), \"{{:?}}\", chn);\n", if truthy { "" } else { "!" }, @@ -439,7 +439,7 @@ fn merge_ranges(ranges: &mut Vec<Range<u32>>) { let mut last_end = None; for range in ranges { if let Some(last) = last_end { - assert!(range.start > last, "{:?}", range); + assert!(range.start > last, "{range:?}"); } last_end = Some(range.end); } diff --git a/src/tools/unicode-table-generator/src/raw_emitter.rs b/src/tools/unicode-table-generator/src/raw_emitter.rs index ee94d3c93a6..e9e0efc4594 100644 --- a/src/tools/unicode-table-generator/src/raw_emitter.rs +++ b/src/tools/unicode-table-generator/src/raw_emitter.rs @@ -156,10 +156,10 @@ pub fn emit_codepoints(emitter: &mut RawEmitter, ranges: &[Range<u32>]) { emitter.blank_line(); let mut bitset = emitter.clone(); - let bitset_ok = bitset.emit_bitset(&ranges).is_ok(); + let bitset_ok = bitset.emit_bitset(ranges).is_ok(); let mut skiplist = emitter.clone(); - skiplist.emit_skiplist(&ranges); + skiplist.emit_skiplist(ranges); if bitset_ok && bitset.bytes_used <= skiplist.bytes_used { *emitter = bitset; @@ -174,7 +174,7 @@ pub fn emit_whitespace(emitter: &mut RawEmitter, ranges: &[Range<u32>]) { emitter.blank_line(); let mut cascading = emitter.clone(); - cascading.emit_cascading_map(&ranges); + cascading.emit_cascading_map(ranges); *emitter = cascading; emitter.desc = String::from("cascading"); } @@ -272,7 +272,7 @@ impl Canonicalized { // for canonical when possible. while let Some((&to, _)) = mappings .iter() - .find(|(&to, _)| to == 0) + .find(|&(&to, _)| to == 0) .or_else(|| mappings.iter().max_by_key(|m| m.1.len())) { // Get the mapping with the most entries. Currently, no mapping can @@ -311,10 +311,9 @@ impl Canonicalized { } } } - assert!( - unique_mapping - .insert(to, UniqueMapping::Canonical(canonical_words.len())) - .is_none() + assert_eq!( + unique_mapping.insert(to, UniqueMapping::Canonical(canonical_words.len())), + None ); canonical_words.push(to); @@ -340,14 +339,10 @@ impl Canonicalized { // We'll probably always have some slack though so this loop will still // be needed. for &w in unique_words { - if !unique_mapping.contains_key(&w) { - assert!( - unique_mapping - .insert(w, UniqueMapping::Canonical(canonical_words.len())) - .is_none() - ); + unique_mapping.entry(w).or_insert_with(|| { canonical_words.push(w); - } + UniqueMapping::Canonical(canonical_words.len()) + }); } assert_eq!(canonicalized_words.len() + canonical_words.len(), unique_words.len()); assert_eq!(unique_mapping.len(), unique_words.len()); |
