diff options
Diffstat (limited to 'src')
492 files changed, 11578 insertions, 7297 deletions
diff --git a/src/bootstrap/defaults/bootstrap.tools.toml b/src/bootstrap/defaults/bootstrap.tools.toml index 57c2706f60a..5abe636bd96 100644 --- a/src/bootstrap/defaults/bootstrap.tools.toml +++ b/src/bootstrap/defaults/bootstrap.tools.toml @@ -14,6 +14,8 @@ test-stage = 2 doc-stage = 2 # Contributors working on tools will probably expect compiler docs to be generated, so they can figure out how to use the API. compiler-docs = true +# Contributors working on tools are the most likely to change non-rust programs. +tidy-extra-checks = "auto:js,auto:py,auto:cpp,auto:spellcheck" [llvm] # Will download LLVM from CI if available on your platform. diff --git a/src/bootstrap/src/core/build_steps/check.rs b/src/bootstrap/src/core/build_steps/check.rs index cfe090b22dc..b4232409ba8 100644 --- a/src/bootstrap/src/core/build_steps/check.rs +++ b/src/bootstrap/src/core/build_steps/check.rs @@ -556,3 +556,9 @@ tool_check_step!(Compiletest { allow_features: COMPILETEST_ALLOW_FEATURES, default: false, }); + +tool_check_step!(Linkchecker { + path: "src/tools/linkchecker", + mode: |_builder| Mode::ToolBootstrap, + default: false +}); diff --git a/src/bootstrap/src/core/build_steps/clippy.rs b/src/bootstrap/src/core/build_steps/clippy.rs index a0371eb7155..b119f2dc3ce 100644 --- a/src/bootstrap/src/core/build_steps/clippy.rs +++ b/src/bootstrap/src/core/build_steps/clippy.rs @@ -19,6 +19,7 @@ const IGNORED_RULES_FOR_STD_AND_RUSTC: &[&str] = &[ "too_many_arguments", "needless_lifetimes", // people want to keep the lifetimes "wrong_self_convention", + "approx_constant", // libcore is what defines those ]; fn lint_args(builder: &Builder<'_>, config: &LintConfig, ignored_rules: &[&str]) -> Vec<String> { diff --git a/src/bootstrap/src/core/build_steps/compile.rs b/src/bootstrap/src/core/build_steps/compile.rs index c7e7b0160b1..4abfe1843eb 100644 --- a/src/bootstrap/src/core/build_steps/compile.rs +++ b/src/bootstrap/src/core/build_steps/compile.rs @@ -597,11 +597,6 @@ pub fn std_cargo(builder: &Builder<'_>, target: TargetSelection, stage: u32, car let mut features = String::new(); - if stage != 0 && builder.config.default_codegen_backend(target).as_deref() == Some("cranelift") - { - features += "compiler-builtins-no-f16-f128 "; - } - if builder.no_std(target) == Some(true) { features += " compiler-builtins-mem"; if !target.starts_with("bpf") { diff --git a/src/bootstrap/src/core/build_steps/dist.rs b/src/bootstrap/src/core/build_steps/dist.rs index 39e4fb2ac01..c8a54ad250c 100644 --- a/src/bootstrap/src/core/build_steps/dist.rs +++ b/src/bootstrap/src/core/build_steps/dist.rs @@ -1068,6 +1068,8 @@ impl Step for PlainSourceTarball { "bootstrap.example.toml", "configure", "license-metadata.json", + "package-lock.json", + "package.json", "x", "x.ps1", "x.py", diff --git a/src/bootstrap/src/core/build_steps/test.rs b/src/bootstrap/src/core/build_steps/test.rs index d346062761c..951ca73fcc4 100644 --- a/src/bootstrap/src/core/build_steps/test.rs +++ b/src/bootstrap/src/core/build_steps/test.rs @@ -8,6 +8,9 @@ use std::ffi::{OsStr, OsString}; use std::path::{Path, PathBuf}; use std::{env, fs, iter}; +#[cfg(feature = "tracing")] +use tracing::instrument; + use crate::core::build_steps::compile::{Std, run_cargo}; use crate::core::build_steps::doc::DocumentationFormat; use crate::core::build_steps::gcc::{Gcc, add_cg_gcc_cargo_flags}; @@ -30,7 +33,7 @@ use crate::utils::helpers::{ linker_flags, t, target_supports_cranelift_backend, up_to_date, }; use crate::utils::render_tests::{add_flags_and_try_run_tests, try_run_tests}; -use crate::{CLang, DocTests, GitRepo, Mode, PathSet, envify}; +use crate::{CLang, DocTests, GitRepo, Mode, PathSet, debug, envify}; const ADB_TEST_DIR: &str = "/data/local/tmp/work"; @@ -713,9 +716,23 @@ impl Step for CompiletestTest { } /// Runs `cargo test` for compiletest. + #[cfg_attr( + feature = "tracing", + instrument(level = "debug", name = "CompiletestTest::run", skip_all) + )] fn run(self, builder: &Builder<'_>) { let host = self.host; + + if builder.top_stage == 0 && !builder.config.compiletest_allow_stage0 { + eprintln!("\ +ERROR: `--stage 0` runs compiletest self-tests against the stage0 (precompiled) compiler, not the in-tree compiler, and will almost always cause tests to fail +NOTE: if you're sure you want to do this, please open an issue as to why. In the meantime, you can override this with `--set build.compiletest-allow-stage0=true`." + ); + crate::exit!(1); + } + let compiler = builder.compiler(builder.top_stage, host); + debug!(?compiler); // We need `ToolStd` for the locally-built sysroot because // compiletest uses unstable features of the `test` crate. @@ -723,8 +740,8 @@ impl Step for CompiletestTest { let mut cargo = tool::prepare_tool_cargo( builder, compiler, - // compiletest uses libtest internals; make it use the in-tree std to make sure it never breaks - // when std sources change. + // compiletest uses libtest internals; make it use the in-tree std to make sure it never + // breaks when std sources change. Mode::ToolStd, host, Kind::Test, @@ -1113,6 +1130,12 @@ impl Step for Tidy { 8 * std::thread::available_parallelism().map_or(1, std::num::NonZeroUsize::get) as u32 }); cmd.arg(jobs.to_string()); + // pass the path to the npm command used for installing js deps. + if let Some(npm) = &builder.config.npm { + cmd.arg(npm); + } else { + cmd.arg("npm"); + } if builder.is_verbose() { cmd.arg("--verbose"); } @@ -1606,12 +1629,11 @@ impl Step for Compiletest { return; } - if builder.top_stage == 0 && env::var("COMPILETEST_FORCE_STAGE0").is_err() { + if builder.top_stage == 0 && !builder.config.compiletest_allow_stage0 { eprintln!("\ ERROR: `--stage 0` runs compiletest on the stage0 (precompiled) compiler, not your local changes, and will almost always cause tests to fail -HELP: to test the compiler, use `--stage 1` instead -HELP: to test the standard library, use `--stage 0 library/std` instead -NOTE: if you're sure you want to do this, please open an issue as to why. In the meantime, you can override this with `COMPILETEST_FORCE_STAGE0=1`." +HELP: to test the compiler or standard library, omit the stage or explicitly use `--stage 1` instead +NOTE: if you're sure you want to do this, please open an issue as to why. In the meantime, you can override this with `--set build.compiletest-allow-stage0=true`." ); crate::exit!(1); } @@ -3126,7 +3148,11 @@ impl Step for Bootstrap { } fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { - run.path("src/bootstrap") + // Bootstrap tests might not be perfectly self-contained and can depend on the external + // environment, submodules that are checked out, etc. + // Therefore we only run them by default on CI. + let runs_on_ci = run.builder.config.is_running_on_ci; + run.path("src/bootstrap").default_condition(runs_on_ci) } fn make_run(run: RunConfig<'_>) { diff --git a/src/bootstrap/src/core/builder/mod.rs b/src/bootstrap/src/core/builder/mod.rs index 923c3a9a935..020622d1c12 100644 --- a/src/bootstrap/src/core/builder/mod.rs +++ b/src/bootstrap/src/core/builder/mod.rs @@ -1033,6 +1033,7 @@ impl<'a> Builder<'a> { check::Compiletest, check::FeaturesStatusDump, check::CoverageDump, + check::Linkchecker, // This has special staging logic, it may run on stage 1 while others run on stage 0. // It takes quite some time to build stage 1, so put this at the end. // diff --git a/src/bootstrap/src/core/config/config.rs b/src/bootstrap/src/core/config/config.rs index 6e04f115424..78abdd7f9b8 100644 --- a/src/bootstrap/src/core/config/config.rs +++ b/src/bootstrap/src/core/config/config.rs @@ -45,7 +45,9 @@ use crate::core::config::{ DebuginfoLevel, DryRun, GccCiMode, LlvmLibunwind, Merge, ReplaceOpt, RustcLto, SplitDebuginfo, StringOrBool, set, threads_from_config, }; -use crate::core::download::is_download_ci_available; +use crate::core::download::{ + DownloadContext, download_beta_toolchain, is_download_ci_available, maybe_download_rustfmt, +}; use crate::utils::channel; use crate::utils::exec::{ExecutionContext, command}; use crate::utils::helpers::{exe, get_host_target}; @@ -296,8 +298,16 @@ pub struct Config { /// Command for visual diff display, e.g. `diff-tool --color=always`. pub compiletest_diff_tool: Option<String>, + /// Whether to allow running both `compiletest` self-tests and `compiletest`-managed test suites + /// against the stage 0 (rustc, std). + /// + /// This is only intended to be used when the stage 0 compiler is actually built from in-tree + /// sources. + pub compiletest_allow_stage0: bool, + /// Whether to use the precompiled stage0 libtest with compiletest. pub compiletest_use_stage0_libtest: bool, + /// Default value for `--extra-checks` pub tidy_extra_checks: Option<String>, pub is_running_on_ci: bool, @@ -747,6 +757,7 @@ impl Config { optimized_compiler_builtins, jobs, compiletest_diff_tool, + compiletest_allow_stage0, compiletest_use_stage0_libtest, tidy_extra_checks, ccache, @@ -795,13 +806,19 @@ impl Config { ); } + config.patch_binaries_for_nix = patch_binaries_for_nix; + config.bootstrap_cache_path = bootstrap_cache_path; + config.llvm_assertions = + toml.llvm.as_ref().is_some_and(|llvm| llvm.assertions.unwrap_or(false)); + config.initial_rustc = if let Some(rustc) = rustc { if !flags_skip_stage0_validation { config.check_stage0_version(&rustc, "rustc"); } rustc } else { - config.download_beta_toolchain(); + let dwn_ctx = DownloadContext::from(&config); + download_beta_toolchain(dwn_ctx); config .out .join(config.host_target) @@ -827,7 +844,8 @@ impl Config { } cargo } else { - config.download_beta_toolchain(); + let dwn_ctx = DownloadContext::from(&config); + download_beta_toolchain(dwn_ctx); config.initial_sysroot.join("bin").join(exe("cargo", config.host_target)) }; @@ -863,7 +881,6 @@ impl Config { config.reuse = reuse.map(PathBuf::from); config.submodules = submodules; config.android_ndk = android_ndk; - config.bootstrap_cache_path = bootstrap_cache_path; set(&mut config.low_priority, low_priority); set(&mut config.compiler_docs, compiler_docs); set(&mut config.library_docs_private_items, library_docs_private_items); @@ -882,7 +899,6 @@ impl Config { set(&mut config.local_rebuild, local_rebuild); set(&mut config.print_step_timings, print_step_timings); set(&mut config.print_step_rusage, print_step_rusage); - config.patch_binaries_for_nix = patch_binaries_for_nix; config.verbose = cmp::max(config.verbose, flags_verbose as usize); @@ -891,9 +907,6 @@ impl Config { config.apply_install_config(toml.install); - config.llvm_assertions = - toml.llvm.as_ref().is_some_and(|llvm| llvm.assertions.unwrap_or(false)); - let file_content = t!(fs::read_to_string(config.src.join("src/ci/channel"))); let ci_channel = file_content.trim_end(); @@ -994,8 +1007,12 @@ impl Config { config.apply_dist_config(toml.dist); - config.initial_rustfmt = - if let Some(r) = rustfmt { Some(r) } else { config.maybe_download_rustfmt() }; + config.initial_rustfmt = if let Some(r) = rustfmt { + Some(r) + } else { + let dwn_ctx = DownloadContext::from(&config); + maybe_download_rustfmt(dwn_ctx) + }; if matches!(config.lld_mode, LldMode::SelfContained) && !config.lld_enabled @@ -1012,8 +1029,12 @@ impl Config { config.optimized_compiler_builtins = optimized_compiler_builtins.unwrap_or(config.channel != "dev"); + config.compiletest_diff_tool = compiletest_diff_tool; + + config.compiletest_allow_stage0 = compiletest_allow_stage0.unwrap_or(false); config.compiletest_use_stage0_libtest = compiletest_use_stage0_libtest.unwrap_or(true); + config.tidy_extra_checks = tidy_extra_checks; let download_rustc = config.download_rustc_commit.is_some(); diff --git a/src/bootstrap/src/core/config/toml/build.rs b/src/bootstrap/src/core/config/toml/build.rs index 4d29691f38b..728367b3972 100644 --- a/src/bootstrap/src/core/config/toml/build.rs +++ b/src/bootstrap/src/core/config/toml/build.rs @@ -68,6 +68,7 @@ define_config! { optimized_compiler_builtins: Option<bool> = "optimized-compiler-builtins", jobs: Option<u32> = "jobs", compiletest_diff_tool: Option<String> = "compiletest-diff-tool", + compiletest_allow_stage0: Option<bool> = "compiletest-allow-stage0", compiletest_use_stage0_libtest: Option<bool> = "compiletest-use-stage0-libtest", tidy_extra_checks: Option<String> = "tidy-extra-checks", ccache: Option<StringOrBool> = "ccache", diff --git a/src/bootstrap/src/core/download.rs b/src/bootstrap/src/core/download.rs index d7c6d8dbcc3..7ec6c62a07d 100644 --- a/src/bootstrap/src/core/download.rs +++ b/src/bootstrap/src/core/download.rs @@ -7,9 +7,9 @@ use std::sync::OnceLock; use xz2::bufread::XzDecoder; -use crate::core::config::BUILDER_CONFIG_FILENAME; +use crate::core::config::{BUILDER_CONFIG_FILENAME, TargetSelection}; use crate::utils::build_stamp::BuildStamp; -use crate::utils::exec::command; +use crate::utils::exec::{ExecutionContext, command}; use crate::utils::helpers::{exe, hex_encode, move_file}; use crate::{Config, t}; @@ -24,17 +24,6 @@ fn extract_curl_version(out: String) -> semver::Version { .unwrap_or(semver::Version::new(1, 0, 0)) } -fn curl_version(config: &Config) -> semver::Version { - let mut curl = command("curl"); - curl.arg("-V"); - let curl = curl.run_capture_stdout(config); - if curl.is_failure() { - return semver::Version::new(1, 0, 0); - } - let output = curl.stdout(); - extract_curl_version(output) -} - /// Generic helpers that are useful anywhere in bootstrap. impl Config { pub fn is_verbose(&self) -> bool { @@ -49,10 +38,7 @@ impl Config { } pub(crate) fn remove(&self, f: &Path) { - if self.dry_run() { - return; - } - fs::remove_file(f).unwrap_or_else(|_| panic!("failed to remove {f:?}")); + remove(&self.exec_ctx, f); } /// Create a temporary directory in `out` and return its path. @@ -68,49 +54,7 @@ impl Config { /// Whether or not `fix_bin_or_dylib` needs to be run; can only be true /// on NixOS fn should_fix_bins_and_dylibs(&self) -> bool { - let val = *SHOULD_FIX_BINS_AND_DYLIBS.get_or_init(|| { - let uname = command("uname").allow_failure().arg("-s").run_capture_stdout(self); - if uname.is_failure() { - return false; - } - let output = uname.stdout(); - if !output.starts_with("Linux") { - return false; - } - // If the user has asked binaries to be patched for Nix, then - // don't check for NixOS or `/lib`. - // NOTE: this intentionally comes after the Linux check: - // - patchelf only works with ELF files, so no need to run it on Mac or Windows - // - On other Unix systems, there is no stable syscall interface, so Nix doesn't manage the global libc. - if let Some(explicit_value) = self.patch_binaries_for_nix { - return explicit_value; - } - - // Use `/etc/os-release` instead of `/etc/NIXOS`. - // The latter one does not exist on NixOS when using tmpfs as root. - let is_nixos = match File::open("/etc/os-release") { - Err(e) if e.kind() == ErrorKind::NotFound => false, - Err(e) => panic!("failed to access /etc/os-release: {e}"), - Ok(os_release) => BufReader::new(os_release).lines().any(|l| { - let l = l.expect("reading /etc/os-release"); - matches!(l.trim(), "ID=nixos" | "ID='nixos'" | "ID=\"nixos\"") - }), - }; - if !is_nixos { - let in_nix_shell = env::var("IN_NIX_SHELL"); - if let Ok(in_nix_shell) = in_nix_shell { - eprintln!( - "The IN_NIX_SHELL environment variable is `{in_nix_shell}`; \ - you may need to set `patch-binaries-for-nix=true` in bootstrap.toml" - ); - } - } - is_nixos - }); - if val { - eprintln!("INFO: You seem to be using Nix."); - } - val + should_fix_bins_and_dylibs(self.patch_binaries_for_nix, &self.exec_ctx) } /// Modifies the interpreter section of 'fname' to fix the dynamic linker, @@ -121,259 +65,22 @@ impl Config { /// /// Please see <https://nixos.org/patchelf.html> for more information fn fix_bin_or_dylib(&self, fname: &Path) { - assert_eq!(SHOULD_FIX_BINS_AND_DYLIBS.get(), Some(&true)); - println!("attempting to patch {}", fname.display()); - - // Only build `.nix-deps` once. - static NIX_DEPS_DIR: OnceLock<PathBuf> = OnceLock::new(); - let mut nix_build_succeeded = true; - let nix_deps_dir = NIX_DEPS_DIR.get_or_init(|| { - // Run `nix-build` to "build" each dependency (which will likely reuse - // the existing `/nix/store` copy, or at most download a pre-built copy). - // - // Importantly, we create a gc-root called `.nix-deps` in the `build/` - // directory, but still reference the actual `/nix/store` path in the rpath - // as it makes it significantly more robust against changes to the location of - // the `.nix-deps` location. - // - // bintools: Needed for the path of `ld-linux.so` (via `nix-support/dynamic-linker`). - // zlib: Needed as a system dependency of `libLLVM-*.so`. - // patchelf: Needed for patching ELF binaries (see doc comment above). - let nix_deps_dir = self.out.join(".nix-deps"); - const NIX_EXPR: &str = " - with (import <nixpkgs> {}); - symlinkJoin { - name = \"rust-stage0-dependencies\"; - paths = [ - zlib - patchelf - stdenv.cc.bintools - ]; - } - "; - nix_build_succeeded = command("nix-build") - .allow_failure() - .args([Path::new("-E"), Path::new(NIX_EXPR), Path::new("-o"), &nix_deps_dir]) - .run_capture_stdout(self) - .is_success(); - nix_deps_dir - }); - if !nix_build_succeeded { - return; - } - - let mut patchelf = command(nix_deps_dir.join("bin/patchelf")); - patchelf.args(&[ - OsString::from("--add-rpath"), - OsString::from(t!(fs::canonicalize(nix_deps_dir)).join("lib")), - ]); - if !path_is_dylib(fname) { - // Finally, set the correct .interp for binaries - let dynamic_linker_path = nix_deps_dir.join("nix-support/dynamic-linker"); - let dynamic_linker = t!(fs::read_to_string(dynamic_linker_path)); - patchelf.args(["--set-interpreter", dynamic_linker.trim_end()]); - } - patchelf.arg(fname); - let _ = patchelf.allow_failure().run_capture_stdout(self); + fix_bin_or_dylib(&self.out, fname, &self.exec_ctx); } fn download_file(&self, url: &str, dest_path: &Path, help_on_error: &str) { - self.verbose(|| println!("download {url}")); - // Use a temporary file in case we crash while downloading, to avoid a corrupt download in cache/. - let tempfile = self.tempdir().join(dest_path.file_name().unwrap()); - // While bootstrap itself only supports http and https downloads, downstream forks might - // need to download components from other protocols. The match allows them adding more - // protocols without worrying about merge conflicts if we change the HTTP implementation. - match url.split_once("://").map(|(proto, _)| proto) { - Some("http") | Some("https") => { - self.download_http_with_retries(&tempfile, url, help_on_error) - } - Some(other) => panic!("unsupported protocol {other} in {url}"), - None => panic!("no protocol in {url}"), - } - t!( - move_file(&tempfile, dest_path), - format!("failed to rename {tempfile:?} to {dest_path:?}") - ); - } - - fn download_http_with_retries(&self, tempfile: &Path, url: &str, help_on_error: &str) { - println!("downloading {url}"); - // Try curl. If that fails and we are on windows, fallback to PowerShell. - // options should be kept in sync with - // src/bootstrap/src/core/download.rs - // for consistency - let mut curl = command("curl").allow_failure(); - curl.args([ - // follow redirect - "--location", - // timeout if speed is < 10 bytes/sec for > 30 seconds - "--speed-time", - "30", - "--speed-limit", - "10", - // timeout if cannot connect within 30 seconds - "--connect-timeout", - "30", - // output file - "--output", - tempfile.to_str().unwrap(), - // if there is an error, don't restart the download, - // instead continue where it left off. - "--continue-at", - "-", - // retry up to 3 times. note that this means a maximum of 4 - // attempts will be made, since the first attempt isn't a *re*try. - "--retry", - "3", - // show errors, even if --silent is specified - "--show-error", - // set timestamp of downloaded file to that of the server - "--remote-time", - // fail on non-ok http status - "--fail", - ]); - // Don't print progress in CI; the \r wrapping looks bad and downloads don't take long enough for progress to be useful. - if self.is_running_on_ci { - curl.arg("--silent"); - } else { - curl.arg("--progress-bar"); - } - // --retry-all-errors was added in 7.71.0, don't use it if curl is old. - if curl_version(self) >= semver::Version::new(7, 71, 0) { - curl.arg("--retry-all-errors"); - } - curl.arg(url); - if !curl.run(self) { - if self.host_target.contains("windows-msvc") { - eprintln!("Fallback to PowerShell"); - for _ in 0..3 { - let powershell = command("PowerShell.exe").allow_failure().args([ - "/nologo", - "-Command", - "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;", - &format!( - "(New-Object System.Net.WebClient).DownloadFile('{}', '{}')", - url, tempfile.to_str().expect("invalid UTF-8 not supported with powershell downloads"), - ), - ]).run_capture_stdout(self); - - if powershell.is_failure() { - return; - } - - eprintln!("\nspurious failure, trying again"); - } - } - if !help_on_error.is_empty() { - eprintln!("{help_on_error}"); - } - crate::exit!(1); - } + let dwn_ctx: DownloadContext<'_> = self.into(); + download_file(dwn_ctx, url, dest_path, help_on_error); } fn unpack(&self, tarball: &Path, dst: &Path, pattern: &str) { - eprintln!("extracting {} to {}", tarball.display(), dst.display()); - if !dst.exists() { - t!(fs::create_dir_all(dst)); - } - - // `tarball` ends with `.tar.xz`; strip that suffix - // example: `rust-dev-nightly-x86_64-unknown-linux-gnu` - let uncompressed_filename = - Path::new(tarball.file_name().expect("missing tarball filename")).file_stem().unwrap(); - let directory_prefix = Path::new(Path::new(uncompressed_filename).file_stem().unwrap()); - - // decompress the file - let data = t!(File::open(tarball), format!("file {} not found", tarball.display())); - let decompressor = XzDecoder::new(BufReader::new(data)); - - let mut tar = tar::Archive::new(decompressor); - - let is_ci_rustc = dst.ends_with("ci-rustc"); - let is_ci_llvm = dst.ends_with("ci-llvm"); - - // `compile::Sysroot` needs to know the contents of the `rustc-dev` tarball to avoid adding - // it to the sysroot unless it was explicitly requested. But parsing the 100 MB tarball is slow. - // Cache the entries when we extract it so we only have to read it once. - let mut recorded_entries = if is_ci_rustc { recorded_entries(dst, pattern) } else { None }; - - for member in t!(tar.entries()) { - let mut member = t!(member); - let original_path = t!(member.path()).into_owned(); - // skip the top-level directory - if original_path == directory_prefix { - continue; - } - let mut short_path = t!(original_path.strip_prefix(directory_prefix)); - let is_builder_config = short_path.to_str() == Some(BUILDER_CONFIG_FILENAME); - - if !(short_path.starts_with(pattern) - || ((is_ci_rustc || is_ci_llvm) && is_builder_config)) - { - continue; - } - short_path = short_path.strip_prefix(pattern).unwrap_or(short_path); - let dst_path = dst.join(short_path); - self.verbose(|| { - println!("extracting {} to {}", original_path.display(), dst.display()) - }); - if !t!(member.unpack_in(dst)) { - panic!("path traversal attack ??"); - } - if let Some(record) = &mut recorded_entries { - t!(writeln!(record, "{}", short_path.to_str().unwrap())); - } - let src_path = dst.join(original_path); - if src_path.is_dir() && dst_path.exists() { - continue; - } - t!(move_file(src_path, dst_path)); - } - let dst_dir = dst.join(directory_prefix); - if dst_dir.exists() { - t!(fs::remove_dir_all(&dst_dir), format!("failed to remove {}", dst_dir.display())); - } + unpack(&self.exec_ctx, tarball, dst, pattern); } /// Returns whether the SHA256 checksum of `path` matches `expected`. + #[cfg(test)] pub(crate) fn verify(&self, path: &Path, expected: &str) -> bool { - use sha2::Digest; - - self.verbose(|| println!("verifying {}", path.display())); - - if self.dry_run() { - return false; - } - - let mut hasher = sha2::Sha256::new(); - - let file = t!(File::open(path)); - let mut reader = BufReader::new(file); - - loop { - let buffer = t!(reader.fill_buf()); - let l = buffer.len(); - // break if EOF - if l == 0 { - break; - } - hasher.update(buffer); - reader.consume(l); - } - - let checksum = hex_encode(hasher.finalize().as_slice()); - let verified = checksum == expected; - - if !verified { - println!( - "invalid checksum: \n\ - found: {checksum}\n\ - expected: {expected}", - ); - } - - verified + verify(&self.exec_ctx, path, expected) } } @@ -388,6 +95,7 @@ fn recorded_entries(dst: &Path, pattern: &str) -> Option<BufWriter<File>> { Some(BufWriter::new(t!(File::create(dst.join(name))))) } +#[derive(Clone)] enum DownloadSource { CI, Dist, @@ -420,63 +128,6 @@ impl Config { cargo_clippy } - #[cfg(test)] - pub(crate) fn maybe_download_rustfmt(&self) -> Option<PathBuf> { - Some(PathBuf::new()) - } - - /// NOTE: rustfmt is a completely different toolchain than the bootstrap compiler, so it can't - /// reuse target directories or artifacts - #[cfg(not(test))] - pub(crate) fn maybe_download_rustfmt(&self) -> Option<PathBuf> { - use build_helper::stage0_parser::VersionMetadata; - - if self.dry_run() { - return Some(PathBuf::new()); - } - - let VersionMetadata { date, version } = self.stage0_metadata.rustfmt.as_ref()?; - let channel = format!("{version}-{date}"); - - let host = self.host_target; - let bin_root = self.out.join(host).join("rustfmt"); - let rustfmt_path = bin_root.join("bin").join(exe("rustfmt", host)); - let rustfmt_stamp = BuildStamp::new(&bin_root).with_prefix("rustfmt").add_stamp(channel); - if rustfmt_path.exists() && rustfmt_stamp.is_up_to_date() { - return Some(rustfmt_path); - } - - self.download_component( - DownloadSource::Dist, - format!("rustfmt-{version}-{build}.tar.xz", build = host.triple), - "rustfmt-preview", - date, - "rustfmt", - ); - self.download_component( - DownloadSource::Dist, - format!("rustc-{version}-{build}.tar.xz", build = host.triple), - "rustc", - date, - "rustfmt", - ); - - if self.should_fix_bins_and_dylibs() { - self.fix_bin_or_dylib(&bin_root.join("bin").join("rustfmt")); - self.fix_bin_or_dylib(&bin_root.join("bin").join("cargo-fmt")); - let lib_dir = bin_root.join("lib"); - for lib in t!(fs::read_dir(&lib_dir), lib_dir.display().to_string()) { - let lib = t!(lib); - if path_is_dylib(&lib.path()) { - self.fix_bin_or_dylib(&lib.path()); - } - } - } - - t!(rustfmt_stamp.write()); - Some(rustfmt_path) - } - pub(crate) fn ci_rust_std_contents(&self) -> Vec<String> { self.ci_component_contents(".rust-std-contents") } @@ -514,30 +165,6 @@ impl Config { ); } - #[cfg(test)] - pub(crate) fn download_beta_toolchain(&self) {} - - #[cfg(not(test))] - pub(crate) fn download_beta_toolchain(&self) { - self.verbose(|| println!("downloading stage0 beta artifacts")); - - let date = &self.stage0_metadata.compiler.date; - let version = &self.stage0_metadata.compiler.version; - let extra_components = ["cargo"]; - - let download_beta_component = |config: &Config, filename, prefix: &_, date: &_| { - config.download_component(DownloadSource::Dist, filename, prefix, date, "stage0") - }; - - self.download_toolchain( - version, - "stage0", - date, - &extra_components, - download_beta_component, - ); - } - fn download_toolchain( &self, version: &str, @@ -607,91 +234,8 @@ impl Config { key: &str, destination: &str, ) { - if self.dry_run() { - return; - } - - let cache_dst = - self.bootstrap_cache_path.as_ref().cloned().unwrap_or_else(|| self.out.join("cache")); - - let cache_dir = cache_dst.join(key); - if !cache_dir.exists() { - t!(fs::create_dir_all(&cache_dir)); - } - - let bin_root = self.out.join(self.host_target).join(destination); - let tarball = cache_dir.join(&filename); - let (base_url, url, should_verify) = match mode { - DownloadSource::CI => { - let dist_server = if self.llvm_assertions { - self.stage0_metadata.config.artifacts_with_llvm_assertions_server.clone() - } else { - self.stage0_metadata.config.artifacts_server.clone() - }; - let url = format!( - "{}/{filename}", - key.strip_suffix(&format!("-{}", self.llvm_assertions)).unwrap() - ); - (dist_server, url, false) - } - DownloadSource::Dist => { - let dist_server = env::var("RUSTUP_DIST_SERVER") - .unwrap_or(self.stage0_metadata.config.dist_server.to_string()); - // NOTE: make `dist` part of the URL because that's how it's stored in src/stage0 - (dist_server, format!("dist/{key}/{filename}"), true) - } - }; - - // For the stage0 compiler, put special effort into ensuring the checksums are valid. - let checksum = if should_verify { - let error = format!( - "src/stage0 doesn't contain a checksum for {url}. \ - Pre-built artifacts might not be available for this \ - target at this time, see https://doc.rust-lang.org/nightly\ - /rustc/platform-support.html for more information." - ); - let sha256 = self.stage0_metadata.checksums_sha256.get(&url).expect(&error); - if tarball.exists() { - if self.verify(&tarball, sha256) { - self.unpack(&tarball, &bin_root, prefix); - return; - } else { - self.verbose(|| { - println!( - "ignoring cached file {} due to failed verification", - tarball.display() - ) - }); - self.remove(&tarball); - } - } - Some(sha256) - } else if tarball.exists() { - self.unpack(&tarball, &bin_root, prefix); - return; - } else { - None - }; - - let mut help_on_error = ""; - if destination == "ci-rustc" { - help_on_error = "ERROR: failed to download pre-built rustc from CI - -NOTE: old builds get deleted after a certain time -HELP: if trying to compile an old commit of rustc, disable `download-rustc` in bootstrap.toml: - -[rust] -download-rustc = false -"; - } - self.download_file(&format!("{base_url}/{url}"), &tarball, help_on_error); - if let Some(sha256) = checksum - && !self.verify(&tarball, sha256) - { - panic!("failed to verify {}", tarball.display()); - } - - self.unpack(&tarball, &bin_root, prefix); + let dwn_ctx: DownloadContext<'_> = self.into(); + download_component(dwn_ctx, mode, filename, prefix, key, destination); } #[cfg(test)] @@ -852,6 +396,39 @@ download-rustc = false } } +/// Only should be used for pre config initialization downloads. +pub(crate) struct DownloadContext<'a> { + host_target: TargetSelection, + out: &'a Path, + patch_binaries_for_nix: Option<bool>, + exec_ctx: &'a ExecutionContext, + stage0_metadata: &'a build_helper::stage0_parser::Stage0, + llvm_assertions: bool, + bootstrap_cache_path: &'a Option<PathBuf>, + is_running_on_ci: bool, +} + +impl<'a> AsRef<DownloadContext<'a>> for DownloadContext<'a> { + fn as_ref(&self) -> &DownloadContext<'a> { + self + } +} + +impl<'a> From<&'a Config> for DownloadContext<'a> { + fn from(value: &'a Config) -> Self { + DownloadContext { + host_target: value.host_target, + out: &value.out, + patch_binaries_for_nix: value.patch_binaries_for_nix, + exec_ctx: &value.exec_ctx, + stage0_metadata: &value.stage0_metadata, + llvm_assertions: value.llvm_assertions, + bootstrap_cache_path: &value.bootstrap_cache_path, + is_running_on_ci: value.is_running_on_ci, + } + } +} + fn path_is_dylib(path: &Path) -> bool { // The .so is not necessarily the extension, it might be libLLVM.so.18.1 path.to_str().is_some_and(|path| path.contains(".so")) @@ -875,6 +452,7 @@ pub(crate) fn is_download_ci_available(target_triple: &str, llvm_assertions: boo "powerpc-unknown-linux-gnu", "powerpc64-unknown-linux-gnu", "powerpc64le-unknown-linux-gnu", + "powerpc64le-unknown-linux-musl", "riscv64gc-unknown-linux-gnu", "s390x-unknown-linux-gnu", "x86_64-apple-darwin", @@ -896,3 +474,596 @@ pub(crate) fn is_download_ci_available(target_triple: &str, llvm_assertions: boo SUPPORTED_PLATFORMS.contains(&target_triple) } } + +#[cfg(test)] +pub(crate) fn maybe_download_rustfmt<'a>( + dwn_ctx: impl AsRef<DownloadContext<'a>>, +) -> Option<PathBuf> { + Some(PathBuf::new()) +} + +/// NOTE: rustfmt is a completely different toolchain than the bootstrap compiler, so it can't +/// reuse target directories or artifacts +#[cfg(not(test))] +pub(crate) fn maybe_download_rustfmt<'a>( + dwn_ctx: impl AsRef<DownloadContext<'a>>, +) -> Option<PathBuf> { + use build_helper::stage0_parser::VersionMetadata; + + let dwn_ctx = dwn_ctx.as_ref(); + + if dwn_ctx.exec_ctx.dry_run() { + return Some(PathBuf::new()); + } + + let VersionMetadata { date, version } = dwn_ctx.stage0_metadata.rustfmt.as_ref()?; + let channel = format!("{version}-{date}"); + + let host = dwn_ctx.host_target; + let bin_root = dwn_ctx.out.join(host).join("rustfmt"); + let rustfmt_path = bin_root.join("bin").join(exe("rustfmt", host)); + let rustfmt_stamp = BuildStamp::new(&bin_root).with_prefix("rustfmt").add_stamp(channel); + if rustfmt_path.exists() && rustfmt_stamp.is_up_to_date() { + return Some(rustfmt_path); + } + + download_component( + dwn_ctx, + DownloadSource::Dist, + format!("rustfmt-{version}-{build}.tar.xz", build = host.triple), + "rustfmt-preview", + date, + "rustfmt", + ); + + download_component( + dwn_ctx, + DownloadSource::Dist, + format!("rustc-{version}-{build}.tar.xz", build = host.triple), + "rustc", + date, + "rustfmt", + ); + + if should_fix_bins_and_dylibs(dwn_ctx.patch_binaries_for_nix, dwn_ctx.exec_ctx) { + fix_bin_or_dylib(dwn_ctx.out, &bin_root.join("bin").join("rustfmt"), dwn_ctx.exec_ctx); + fix_bin_or_dylib(dwn_ctx.out, &bin_root.join("bin").join("cargo-fmt"), dwn_ctx.exec_ctx); + let lib_dir = bin_root.join("lib"); + for lib in t!(fs::read_dir(&lib_dir), lib_dir.display().to_string()) { + let lib = t!(lib); + if path_is_dylib(&lib.path()) { + fix_bin_or_dylib(dwn_ctx.out, &lib.path(), dwn_ctx.exec_ctx); + } + } + } + + t!(rustfmt_stamp.write()); + Some(rustfmt_path) +} + +#[cfg(test)] +pub(crate) fn download_beta_toolchain<'a>(dwn_ctx: impl AsRef<DownloadContext<'a>>) {} + +#[cfg(not(test))] +pub(crate) fn download_beta_toolchain<'a>(dwn_ctx: impl AsRef<DownloadContext<'a>>) { + let dwn_ctx = dwn_ctx.as_ref(); + dwn_ctx.exec_ctx.verbose(|| { + println!("downloading stage0 beta artifacts"); + }); + + let date = dwn_ctx.stage0_metadata.compiler.date.clone(); + let version = dwn_ctx.stage0_metadata.compiler.version.clone(); + let extra_components = ["cargo"]; + let sysroot = "stage0"; + download_toolchain( + dwn_ctx, + &version, + sysroot, + &date, + &extra_components, + "stage0", + DownloadSource::Dist, + ); +} + +fn download_toolchain<'a>( + dwn_ctx: impl AsRef<DownloadContext<'a>>, + version: &str, + sysroot: &str, + stamp_key: &str, + extra_components: &[&str], + destination: &str, + mode: DownloadSource, +) { + let dwn_ctx = dwn_ctx.as_ref(); + let host = dwn_ctx.host_target.triple; + let bin_root = dwn_ctx.out.join(host).join(sysroot); + let rustc_stamp = BuildStamp::new(&bin_root).with_prefix("rustc").add_stamp(stamp_key); + + if !bin_root.join("bin").join(exe("rustc", dwn_ctx.host_target)).exists() + || !rustc_stamp.is_up_to_date() + { + if bin_root.exists() { + t!(fs::remove_dir_all(&bin_root)); + } + let filename = format!("rust-std-{version}-{host}.tar.xz"); + let pattern = format!("rust-std-{host}"); + download_component(dwn_ctx, mode.clone(), filename, &pattern, stamp_key, destination); + let filename = format!("rustc-{version}-{host}.tar.xz"); + download_component(dwn_ctx, mode.clone(), filename, "rustc", stamp_key, destination); + + for component in extra_components { + let filename = format!("{component}-{version}-{host}.tar.xz"); + download_component(dwn_ctx, mode.clone(), filename, component, stamp_key, destination); + } + + if should_fix_bins_and_dylibs(dwn_ctx.patch_binaries_for_nix, dwn_ctx.exec_ctx) { + fix_bin_or_dylib(dwn_ctx.out, &bin_root.join("bin").join("rustc"), dwn_ctx.exec_ctx); + fix_bin_or_dylib(dwn_ctx.out, &bin_root.join("bin").join("rustdoc"), dwn_ctx.exec_ctx); + fix_bin_or_dylib( + dwn_ctx.out, + &bin_root.join("libexec").join("rust-analyzer-proc-macro-srv"), + dwn_ctx.exec_ctx, + ); + let lib_dir = bin_root.join("lib"); + for lib in t!(fs::read_dir(&lib_dir), lib_dir.display().to_string()) { + let lib = t!(lib); + if path_is_dylib(&lib.path()) { + fix_bin_or_dylib(dwn_ctx.out, &lib.path(), dwn_ctx.exec_ctx); + } + } + } + + t!(rustc_stamp.write()); + } +} + +pub(crate) fn remove(exec_ctx: &ExecutionContext, f: &Path) { + if exec_ctx.dry_run() { + return; + } + fs::remove_file(f).unwrap_or_else(|_| panic!("failed to remove {f:?}")); +} + +fn fix_bin_or_dylib(out: &Path, fname: &Path, exec_ctx: &ExecutionContext) { + assert_eq!(SHOULD_FIX_BINS_AND_DYLIBS.get(), Some(&true)); + println!("attempting to patch {}", fname.display()); + + // Only build `.nix-deps` once. + static NIX_DEPS_DIR: OnceLock<PathBuf> = OnceLock::new(); + let mut nix_build_succeeded = true; + let nix_deps_dir = NIX_DEPS_DIR.get_or_init(|| { + // Run `nix-build` to "build" each dependency (which will likely reuse + // the existing `/nix/store` copy, or at most download a pre-built copy). + // + // Importantly, we create a gc-root called `.nix-deps` in the `build/` + // directory, but still reference the actual `/nix/store` path in the rpath + // as it makes it significantly more robust against changes to the location of + // the `.nix-deps` location. + // + // bintools: Needed for the path of `ld-linux.so` (via `nix-support/dynamic-linker`). + // zlib: Needed as a system dependency of `libLLVM-*.so`. + // patchelf: Needed for patching ELF binaries (see doc comment above). + let nix_deps_dir = out.join(".nix-deps"); + const NIX_EXPR: &str = " + with (import <nixpkgs> {}); + symlinkJoin { + name = \"rust-stage0-dependencies\"; + paths = [ + zlib + patchelf + stdenv.cc.bintools + ]; + } + "; + nix_build_succeeded = command("nix-build") + .allow_failure() + .args([Path::new("-E"), Path::new(NIX_EXPR), Path::new("-o"), &nix_deps_dir]) + .run_capture_stdout(exec_ctx) + .is_success(); + nix_deps_dir + }); + if !nix_build_succeeded { + return; + } + + let mut patchelf = command(nix_deps_dir.join("bin/patchelf")); + patchelf.args(&[ + OsString::from("--add-rpath"), + OsString::from(t!(fs::canonicalize(nix_deps_dir)).join("lib")), + ]); + if !path_is_dylib(fname) { + // Finally, set the correct .interp for binaries + let dynamic_linker_path = nix_deps_dir.join("nix-support/dynamic-linker"); + let dynamic_linker = t!(fs::read_to_string(dynamic_linker_path)); + patchelf.args(["--set-interpreter", dynamic_linker.trim_end()]); + } + patchelf.arg(fname); + let _ = patchelf.allow_failure().run_capture_stdout(exec_ctx); +} + +fn should_fix_bins_and_dylibs( + patch_binaries_for_nix: Option<bool>, + exec_ctx: &ExecutionContext, +) -> bool { + let val = *SHOULD_FIX_BINS_AND_DYLIBS.get_or_init(|| { + let uname = command("uname").allow_failure().arg("-s").run_capture_stdout(exec_ctx); + if uname.is_failure() { + return false; + } + let output = uname.stdout(); + if !output.starts_with("Linux") { + return false; + } + // If the user has asked binaries to be patched for Nix, then + // don't check for NixOS or `/lib`. + // NOTE: this intentionally comes after the Linux check: + // - patchelf only works with ELF files, so no need to run it on Mac or Windows + // - On other Unix systems, there is no stable syscall interface, so Nix doesn't manage the global libc. + if let Some(explicit_value) = patch_binaries_for_nix { + return explicit_value; + } + + // Use `/etc/os-release` instead of `/etc/NIXOS`. + // The latter one does not exist on NixOS when using tmpfs as root. + let is_nixos = match File::open("/etc/os-release") { + Err(e) if e.kind() == ErrorKind::NotFound => false, + Err(e) => panic!("failed to access /etc/os-release: {e}"), + Ok(os_release) => BufReader::new(os_release).lines().any(|l| { + let l = l.expect("reading /etc/os-release"); + matches!(l.trim(), "ID=nixos" | "ID='nixos'" | "ID=\"nixos\"") + }), + }; + if !is_nixos { + let in_nix_shell = env::var("IN_NIX_SHELL"); + if let Ok(in_nix_shell) = in_nix_shell { + eprintln!( + "The IN_NIX_SHELL environment variable is `{in_nix_shell}`; \ + you may need to set `patch-binaries-for-nix=true` in bootstrap.toml" + ); + } + } + is_nixos + }); + if val { + eprintln!("INFO: You seem to be using Nix."); + } + val +} + +fn download_component<'a>( + dwn_ctx: impl AsRef<DownloadContext<'a>>, + mode: DownloadSource, + filename: String, + prefix: &str, + key: &str, + destination: &str, +) { + let dwn_ctx = dwn_ctx.as_ref(); + + if dwn_ctx.exec_ctx.dry_run() { + return; + } + + let cache_dst = + dwn_ctx.bootstrap_cache_path.as_ref().cloned().unwrap_or_else(|| dwn_ctx.out.join("cache")); + + let cache_dir = cache_dst.join(key); + if !cache_dir.exists() { + t!(fs::create_dir_all(&cache_dir)); + } + + let bin_root = dwn_ctx.out.join(dwn_ctx.host_target).join(destination); + let tarball = cache_dir.join(&filename); + let (base_url, url, should_verify) = match mode { + DownloadSource::CI => { + let dist_server = if dwn_ctx.llvm_assertions { + dwn_ctx.stage0_metadata.config.artifacts_with_llvm_assertions_server.clone() + } else { + dwn_ctx.stage0_metadata.config.artifacts_server.clone() + }; + let url = format!( + "{}/{filename}", + key.strip_suffix(&format!("-{}", dwn_ctx.llvm_assertions)).unwrap() + ); + (dist_server, url, false) + } + DownloadSource::Dist => { + let dist_server = env::var("RUSTUP_DIST_SERVER") + .unwrap_or(dwn_ctx.stage0_metadata.config.dist_server.to_string()); + // NOTE: make `dist` part of the URL because that's how it's stored in src/stage0 + (dist_server, format!("dist/{key}/{filename}"), true) + } + }; + + // For the stage0 compiler, put special effort into ensuring the checksums are valid. + let checksum = if should_verify { + let error = format!( + "src/stage0 doesn't contain a checksum for {url}. \ + Pre-built artifacts might not be available for this \ + target at this time, see https://doc.rust-lang.org/nightly\ + /rustc/platform-support.html for more information." + ); + let sha256 = dwn_ctx.stage0_metadata.checksums_sha256.get(&url).expect(&error); + if tarball.exists() { + if verify(dwn_ctx.exec_ctx, &tarball, sha256) { + unpack(dwn_ctx.exec_ctx, &tarball, &bin_root, prefix); + return; + } else { + dwn_ctx.exec_ctx.verbose(|| { + println!( + "ignoring cached file {} due to failed verification", + tarball.display() + ) + }); + remove(dwn_ctx.exec_ctx, &tarball); + } + } + Some(sha256) + } else if tarball.exists() { + unpack(dwn_ctx.exec_ctx, &tarball, &bin_root, prefix); + return; + } else { + None + }; + + let mut help_on_error = ""; + if destination == "ci-rustc" { + help_on_error = "ERROR: failed to download pre-built rustc from CI + +NOTE: old builds get deleted after a certain time +HELP: if trying to compile an old commit of rustc, disable `download-rustc` in bootstrap.toml: + +[rust] +download-rustc = false +"; + } + download_file(dwn_ctx, &format!("{base_url}/{url}"), &tarball, help_on_error); + if let Some(sha256) = checksum + && !verify(dwn_ctx.exec_ctx, &tarball, sha256) + { + panic!("failed to verify {}", tarball.display()); + } + + unpack(dwn_ctx.exec_ctx, &tarball, &bin_root, prefix); +} + +pub(crate) fn verify(exec_ctx: &ExecutionContext, path: &Path, expected: &str) -> bool { + use sha2::Digest; + + exec_ctx.verbose(|| { + println!("verifying {}", path.display()); + }); + + if exec_ctx.dry_run() { + return false; + } + + let mut hasher = sha2::Sha256::new(); + + let file = t!(File::open(path)); + let mut reader = BufReader::new(file); + + loop { + let buffer = t!(reader.fill_buf()); + let l = buffer.len(); + // break if EOF + if l == 0 { + break; + } + hasher.update(buffer); + reader.consume(l); + } + + let checksum = hex_encode(hasher.finalize().as_slice()); + let verified = checksum == expected; + + if !verified { + println!( + "invalid checksum: \n\ + found: {checksum}\n\ + expected: {expected}", + ); + } + + verified +} + +fn unpack(exec_ctx: &ExecutionContext, tarball: &Path, dst: &Path, pattern: &str) { + eprintln!("extracting {} to {}", tarball.display(), dst.display()); + if !dst.exists() { + t!(fs::create_dir_all(dst)); + } + + // `tarball` ends with `.tar.xz`; strip that suffix + // example: `rust-dev-nightly-x86_64-unknown-linux-gnu` + let uncompressed_filename = + Path::new(tarball.file_name().expect("missing tarball filename")).file_stem().unwrap(); + let directory_prefix = Path::new(Path::new(uncompressed_filename).file_stem().unwrap()); + + // decompress the file + let data = t!(File::open(tarball), format!("file {} not found", tarball.display())); + let decompressor = XzDecoder::new(BufReader::new(data)); + + let mut tar = tar::Archive::new(decompressor); + + let is_ci_rustc = dst.ends_with("ci-rustc"); + let is_ci_llvm = dst.ends_with("ci-llvm"); + + // `compile::Sysroot` needs to know the contents of the `rustc-dev` tarball to avoid adding + // it to the sysroot unless it was explicitly requested. But parsing the 100 MB tarball is slow. + // Cache the entries when we extract it so we only have to read it once. + let mut recorded_entries = if is_ci_rustc { recorded_entries(dst, pattern) } else { None }; + + for member in t!(tar.entries()) { + let mut member = t!(member); + let original_path = t!(member.path()).into_owned(); + // skip the top-level directory + if original_path == directory_prefix { + continue; + } + let mut short_path = t!(original_path.strip_prefix(directory_prefix)); + let is_builder_config = short_path.to_str() == Some(BUILDER_CONFIG_FILENAME); + + if !(short_path.starts_with(pattern) || ((is_ci_rustc || is_ci_llvm) && is_builder_config)) + { + continue; + } + short_path = short_path.strip_prefix(pattern).unwrap_or(short_path); + let dst_path = dst.join(short_path); + + exec_ctx.verbose(|| { + println!("extracting {} to {}", original_path.display(), dst.display()); + }); + + if !t!(member.unpack_in(dst)) { + panic!("path traversal attack ??"); + } + if let Some(record) = &mut recorded_entries { + t!(writeln!(record, "{}", short_path.to_str().unwrap())); + } + let src_path = dst.join(original_path); + if src_path.is_dir() && dst_path.exists() { + continue; + } + t!(move_file(src_path, dst_path)); + } + let dst_dir = dst.join(directory_prefix); + if dst_dir.exists() { + t!(fs::remove_dir_all(&dst_dir), format!("failed to remove {}", dst_dir.display())); + } +} + +fn download_file<'a>( + dwn_ctx: impl AsRef<DownloadContext<'a>>, + url: &str, + dest_path: &Path, + help_on_error: &str, +) { + let dwn_ctx = dwn_ctx.as_ref(); + + dwn_ctx.exec_ctx.verbose(|| { + println!("download {url}"); + }); + // Use a temporary file in case we crash while downloading, to avoid a corrupt download in cache/. + let tempfile = tempdir(dwn_ctx.out).join(dest_path.file_name().unwrap()); + // While bootstrap itself only supports http and https downloads, downstream forks might + // need to download components from other protocols. The match allows them adding more + // protocols without worrying about merge conflicts if we change the HTTP implementation. + match url.split_once("://").map(|(proto, _)| proto) { + Some("http") | Some("https") => download_http_with_retries( + dwn_ctx.host_target, + dwn_ctx.is_running_on_ci, + dwn_ctx.exec_ctx, + &tempfile, + url, + help_on_error, + ), + Some(other) => panic!("unsupported protocol {other} in {url}"), + None => panic!("no protocol in {url}"), + } + t!(move_file(&tempfile, dest_path), format!("failed to rename {tempfile:?} to {dest_path:?}")); +} + +/// Create a temporary directory in `out` and return its path. +/// +/// NOTE: this temporary directory is shared between all steps; +/// if you need an empty directory, create a new subdirectory inside it. +pub(crate) fn tempdir(out: &Path) -> PathBuf { + let tmp = out.join("tmp"); + t!(fs::create_dir_all(&tmp)); + tmp +} + +fn download_http_with_retries( + host_target: TargetSelection, + is_running_on_ci: bool, + exec_ctx: &ExecutionContext, + tempfile: &Path, + url: &str, + help_on_error: &str, +) { + println!("downloading {url}"); + // Try curl. If that fails and we are on windows, fallback to PowerShell. + // options should be kept in sync with + // src/bootstrap/src/core/download.rs + // for consistency + let mut curl = command("curl").allow_failure(); + curl.args([ + // follow redirect + "--location", + // timeout if speed is < 10 bytes/sec for > 30 seconds + "--speed-time", + "30", + "--speed-limit", + "10", + // timeout if cannot connect within 30 seconds + "--connect-timeout", + "30", + // output file + "--output", + tempfile.to_str().unwrap(), + // if there is an error, don't restart the download, + // instead continue where it left off. + "--continue-at", + "-", + // retry up to 3 times. note that this means a maximum of 4 + // attempts will be made, since the first attempt isn't a *re*try. + "--retry", + "3", + // show errors, even if --silent is specified + "--show-error", + // set timestamp of downloaded file to that of the server + "--remote-time", + // fail on non-ok http status + "--fail", + ]); + // Don't print progress in CI; the \r wrapping looks bad and downloads don't take long enough for progress to be useful. + if is_running_on_ci { + curl.arg("--silent"); + } else { + curl.arg("--progress-bar"); + } + // --retry-all-errors was added in 7.71.0, don't use it if curl is old. + if curl_version(exec_ctx) >= semver::Version::new(7, 71, 0) { + curl.arg("--retry-all-errors"); + } + curl.arg(url); + if !curl.run(exec_ctx) { + if host_target.contains("windows-msvc") { + eprintln!("Fallback to PowerShell"); + for _ in 0..3 { + let powershell = command("PowerShell.exe").allow_failure().args([ + "/nologo", + "-Command", + "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;", + &format!( + "(New-Object System.Net.WebClient).DownloadFile('{}', '{}')", + url, tempfile.to_str().expect("invalid UTF-8 not supported with powershell downloads"), + ), + ]).run_capture_stdout(exec_ctx); + + if powershell.is_failure() { + return; + } + + eprintln!("\nspurious failure, trying again"); + } + } + if !help_on_error.is_empty() { + eprintln!("{help_on_error}"); + } + crate::exit!(1); + } +} + +fn curl_version(exec_ctx: &ExecutionContext) -> semver::Version { + let mut curl = command("curl"); + curl.arg("-V"); + let curl = curl.run_capture_stdout(exec_ctx); + if curl.is_failure() { + return semver::Version::new(1, 0, 0); + } + let output = curl.stdout(); + extract_curl_version(output) +} diff --git a/src/bootstrap/src/core/sanity.rs b/src/bootstrap/src/core/sanity.rs index b39d464493e..15e04f59129 100644 --- a/src/bootstrap/src/core/sanity.rs +++ b/src/bootstrap/src/core/sanity.rs @@ -338,12 +338,6 @@ than building it. // Make sure musl-root is valid. if target.contains("musl") && !target.contains("unikraft") { - // If this is a native target (host is also musl) and no musl-root is given, - // fall back to the system toolchain in /usr before giving up - if build.musl_root(*target).is_none() && build.config.is_host_target(*target) { - let target = build.config.target_config.entry(*target).or_default(); - target.musl_root = Some("/usr".into()); - } match build.musl_libdir(*target) { Some(libdir) => { if fs::metadata(libdir.join("libc.a")).is_err() { diff --git a/src/bootstrap/src/lib.rs b/src/bootstrap/src/lib.rs index 63aab4d116a..51a84ad5272 100644 --- a/src/bootstrap/src/lib.rs +++ b/src/bootstrap/src/lib.rs @@ -1329,23 +1329,33 @@ impl Build { } } - /// Returns the "musl root" for this `target`, if defined + /// Returns the "musl root" for this `target`, if defined. + /// + /// If this is a native target (host is also musl) and no musl-root is given, + /// it falls back to the system toolchain in /usr. fn musl_root(&self, target: TargetSelection) -> Option<&Path> { - self.config + let configured_root = self + .config .target_config .get(&target) .and_then(|t| t.musl_root.as_ref()) .or(self.config.musl_root.as_ref()) - .map(|p| &**p) + .map(|p| &**p); + + if self.config.is_host_target(target) && configured_root.is_none() { + Some(Path::new("/usr")) + } else { + configured_root + } } /// Returns the "musl libdir" for this `target`. fn musl_libdir(&self, target: TargetSelection) -> Option<PathBuf> { - let t = self.config.target_config.get(&target)?; - if let libdir @ Some(_) = &t.musl_libdir { - return libdir.clone(); - } - self.musl_root(target).map(|root| root.join("lib")) + self.config + .target_config + .get(&target) + .and_then(|t| t.musl_libdir.clone()) + .or_else(|| self.musl_root(target).map(|root| root.join("lib"))) } /// Returns the `lib` directory for the WASI target specified, if diff --git a/src/bootstrap/src/utils/change_tracker.rs b/src/bootstrap/src/utils/change_tracker.rs index f802640a42d..d3331b81587 100644 --- a/src/bootstrap/src/utils/change_tracker.rs +++ b/src/bootstrap/src/utils/change_tracker.rs @@ -439,7 +439,7 @@ pub const CONFIG_CHANGE_HISTORY: &[ChangeInfo] = &[ ChangeInfo { change_id: 143255, severity: ChangeSeverity::Warning, - summary: "`llvm.lld` is no longer enabled by default for the dist profile.", + summary: "`rust.lld` is no longer enabled by default for the dist profile.", }, ChangeInfo { change_id: 143251, @@ -486,4 +486,9 @@ pub const CONFIG_CHANGE_HISTORY: &[ChangeInfo] = &[ severity: ChangeSeverity::Warning, summary: "Removed `rust.description` and `llvm.ccache` as it was deprecated in #137723 and #136941 long time ago.", }, + ChangeInfo { + change_id: 144675, + severity: ChangeSeverity::Warning, + summary: "Added `build.compiletest-allow-stage0` flag instead of `COMPILETEST_FORCE_STAGE0` env var, and reject running `compiletest` self tests against stage 0 rustc unless explicitly allowed.", + }, ]; diff --git a/src/bootstrap/src/utils/proc_macro_deps.rs b/src/bootstrap/src/utils/proc_macro_deps.rs index 21c7fc89d7d..777c8601aa1 100644 --- a/src/bootstrap/src/utils/proc_macro_deps.rs +++ b/src/bootstrap/src/utils/proc_macro_deps.rs @@ -3,6 +3,7 @@ /// See <https://github.com/rust-lang/rust/issues/134863> pub static CRATES: &[&str] = &[ // tidy-alphabetical-start + "allocator-api2", "annotate-snippets", "anstyle", "askama_parser", @@ -16,13 +17,17 @@ pub static CRATES: &[&str] = &[ "darling_core", "derive_builder_core", "digest", + "equivalent", "fluent-bundle", "fluent-langneg", "fluent-syntax", "fnv", + "foldhash", "generic-array", + "hashbrown", "heck", "ident_case", + "indexmap", "intl-memoizer", "intl_pluralrules", "libc", diff --git a/src/bootstrap/src/utils/shared_helpers.rs b/src/bootstrap/src/utils/shared_helpers.rs index 9c6b4a7615d..9428e221f41 100644 --- a/src/bootstrap/src/utils/shared_helpers.rs +++ b/src/bootstrap/src/utils/shared_helpers.rs @@ -1,14 +1,18 @@ //! This module serves two purposes: -//! 1. It is part of the `utils` module and used in other parts of bootstrap. -//! 2. It is embedded inside bootstrap shims to avoid a dependency on the bootstrap library. -//! Therefore, this module should never use any other bootstrap module. This reduces binary -//! size and improves compilation time by minimizing linking time. +//! +//! 1. It is part of the `utils` module and used in other parts of bootstrap. +//! 2. It is embedded inside bootstrap shims to avoid a dependency on the bootstrap library. +//! Therefore, this module should never use any other bootstrap module. This reduces binary size +//! and improves compilation time by minimizing linking time. + +// # Note on tests +// +// If we were to declare a tests submodule here, the shim binaries that include this module via +// `#[path]` would fail to find it, which breaks `./x check bootstrap`. So instead the unit tests +// for this module are in `super::tests::shared_helpers_tests`. #![allow(dead_code)] -#[cfg(test)] -mod tests; - use std::env; use std::ffi::OsString; use std::fs::OpenOptions; @@ -16,10 +20,6 @@ use std::io::Write; use std::process::Command; use std::str::FromStr; -// If we were to declare a tests submodule here, the shim binaries that include this -// module via `#[path]` would fail to find it, which breaks `./x check bootstrap`. -// So instead the unit tests for this module are in `super::tests::shared_helpers_tests`. - /// Returns the environment variable which the dynamic library lookup path /// resides in for this platform. pub fn dylib_path_var() -> &'static str { diff --git a/src/bootstrap/src/utils/tests/mod.rs b/src/bootstrap/src/utils/tests/mod.rs index ec87e71e0b6..983680b0385 100644 --- a/src/bootstrap/src/utils/tests/mod.rs +++ b/src/bootstrap/src/utils/tests/mod.rs @@ -12,6 +12,10 @@ use crate::{Build, Config, Flags, t}; pub mod git; +// Note: tests for `shared_helpers` is separate here, as otherwise shim binaries that include the +// `shared_helpers` via `#[path]` would fail to find it, breaking `./x check bootstrap`. +mod shared_helpers_tests; + /// Holds temporary state of a bootstrap test. /// Right now it is only used to redirect the build directory of the bootstrap /// invocation, in the future it would be great if we could actually execute diff --git a/src/bootstrap/src/utils/shared_helpers/tests.rs b/src/bootstrap/src/utils/tests/shared_helpers_tests.rs index 559e9f70abd..c486e65007e 100644 --- a/src/bootstrap/src/utils/shared_helpers/tests.rs +++ b/src/bootstrap/src/utils/tests/shared_helpers_tests.rs @@ -1,3 +1,10 @@ +//! The `shared_helpers` module can't have its own tests submodule, because that would cause +//! problems for the shim binaries that include it via `#[path]`, so instead those unit tests live +//! here. +//! +//! To prevent tidy from complaining about this file not being named `tests.rs`, it lives inside a +//! submodule directory named `tests`. + use crate::utils::shared_helpers::parse_value_from_args; #[test] diff --git a/src/ci/citool/src/jobs.rs b/src/ci/citool/src/jobs.rs index 410274227e4..47516cbc1f4 100644 --- a/src/ci/citool/src/jobs.rs +++ b/src/ci/citool/src/jobs.rs @@ -1,9 +1,9 @@ #[cfg(test)] mod tests; -use std::collections::BTreeMap; +use std::collections::{BTreeMap, HashSet}; -use anyhow::Context as _; +use anyhow::{Context as _, anyhow}; use serde_yaml::Value; use crate::GitHubContext; @@ -85,6 +85,10 @@ impl JobDatabase { .cloned() .collect() } + + fn find_auto_job_by_name(&self, job_name: &str) -> Option<&Job> { + self.auto_jobs.iter().find(|job| job.name == job_name) + } } pub fn load_job_db(db: &str) -> anyhow::Result<JobDatabase> { @@ -97,14 +101,118 @@ pub fn load_job_db(db: &str) -> anyhow::Result<JobDatabase> { db.apply_merge().context("failed to apply merge keys") }; - // Apply merge twice to handle nested merges + // Apply merge twice to handle nested merges up to depth 2. apply_merge(&mut db)?; apply_merge(&mut db)?; - let db: JobDatabase = serde_yaml::from_value(db).context("failed to parse job database")?; + let mut db: JobDatabase = serde_yaml::from_value(db).context("failed to parse job database")?; + + register_pr_jobs_as_auto_jobs(&mut db)?; + + validate_job_database(&db)?; + Ok(db) } +/// Maintain invariant that PR CI jobs must be a subset of Auto CI jobs modulo carve-outs. +/// +/// When PR jobs are auto-registered as Auto jobs, they will have `continue_on_error` overridden to +/// be `false` to avoid wasting Auto CI resources. +/// +/// When a job is already both a PR job and a auto job, we will post-validate their "equivalence +/// modulo certain carve-outs" in [`validate_job_database`]. +/// +/// This invariant is important to make sure that it's not easily possible (without modifying +/// `citool`) to have PRs with red PR-only CI jobs merged into `master`, causing all subsequent PR +/// CI runs to be red until the cause is fixed. +fn register_pr_jobs_as_auto_jobs(db: &mut JobDatabase) -> anyhow::Result<()> { + for pr_job in &db.pr_jobs { + // It's acceptable to "override" a PR job in Auto job, for instance, `x86_64-gnu-tools` will + // receive an additional `DEPLOY_TOOLSTATES_JSON: toolstates-linux.json` env when under Auto + // environment versus PR environment. + if db.find_auto_job_by_name(&pr_job.name).is_some() { + continue; + } + + let auto_registered_job = Job { continue_on_error: Some(false), ..pr_job.clone() }; + db.auto_jobs.push(auto_registered_job); + } + + Ok(()) +} + +fn validate_job_database(db: &JobDatabase) -> anyhow::Result<()> { + fn ensure_no_duplicate_job_names(section: &str, jobs: &Vec<Job>) -> anyhow::Result<()> { + let mut job_names = HashSet::new(); + for job in jobs { + let job_name = job.name.as_str(); + if !job_names.insert(job_name) { + return Err(anyhow::anyhow!( + "duplicate job name `{job_name}` in section `{section}`" + )); + } + } + Ok(()) + } + + ensure_no_duplicate_job_names("pr", &db.pr_jobs)?; + ensure_no_duplicate_job_names("auto", &db.auto_jobs)?; + ensure_no_duplicate_job_names("try", &db.try_jobs)?; + ensure_no_duplicate_job_names("optional", &db.optional_jobs)?; + + fn equivalent_modulo_carve_out(pr_job: &Job, auto_job: &Job) -> anyhow::Result<()> { + let Job { + name, + os, + only_on_channel, + free_disk, + doc_url, + codebuild, + + // Carve-out configs allowed to be different. + env: _, + continue_on_error: _, + } = pr_job; + + if *name == auto_job.name + && *os == auto_job.os + && *only_on_channel == auto_job.only_on_channel + && *free_disk == auto_job.free_disk + && *doc_url == auto_job.doc_url + && *codebuild == auto_job.codebuild + { + Ok(()) + } else { + Err(anyhow!( + "PR job `{}` differs from corresponding Auto job `{}` in configuration other than `continue_on_error` and `env`", + pr_job.name, + auto_job.name + )) + } + } + + for pr_job in &db.pr_jobs { + // At this point, any PR job must also be an Auto job, auto-registered or overridden. + let auto_job = db + .find_auto_job_by_name(&pr_job.name) + .expect("PR job must either be auto-registered as Auto job or overridden"); + + equivalent_modulo_carve_out(pr_job, auto_job)?; + } + + // Auto CI jobs must all "fail-fast" to avoid wasting Auto CI resources. For instance, `tidy`. + for auto_job in &db.auto_jobs { + if auto_job.continue_on_error == Some(true) { + return Err(anyhow!( + "Auto job `{}` cannot have `continue_on_error: true`", + auto_job.name + )); + } + } + + Ok(()) +} + /// Representation of a job outputted to a GitHub Actions workflow. #[derive(serde::Serialize, Debug)] struct GithubActionsJob { diff --git a/src/ci/citool/src/jobs/tests.rs b/src/ci/citool/src/jobs/tests.rs index 63ac508b632..f1f6274e1ed 100644 --- a/src/ci/citool/src/jobs/tests.rs +++ b/src/ci/citool/src/jobs/tests.rs @@ -1,3 +1,4 @@ +use std::collections::BTreeMap; use std::path::Path; use super::Job; @@ -146,3 +147,222 @@ fn validate_jobs() { panic!("Job validation failed:\n{error_messages}"); } } + +#[test] +fn pr_job_implies_auto_job() { + let db = load_job_db( + r#" +envs: + pr: + try: + auto: + optional: + +pr: + - name: pr-ci-a + os: ubuntu + env: {} +try: +auto: +optional: +"#, + ) + .unwrap(); + + assert_eq!(db.auto_jobs.iter().map(|j| j.name.as_str()).collect::<Vec<_>>(), vec!["pr-ci-a"]) +} + +#[test] +fn implied_auto_job_keeps_env_and_fails_fast() { + let db = load_job_db( + r#" +envs: + pr: + try: + auto: + optional: + +pr: + - name: tidy + env: + DEPLOY_TOOLSTATES_JSON: toolstates-linux.json + continue_on_error: true + os: ubuntu +try: +auto: +optional: +"#, + ) + .unwrap(); + + assert_eq!(db.auto_jobs.iter().map(|j| j.name.as_str()).collect::<Vec<_>>(), vec!["tidy"]); + assert_eq!(db.auto_jobs[0].continue_on_error, Some(false)); + assert_eq!( + db.auto_jobs[0].env, + BTreeMap::from([( + "DEPLOY_TOOLSTATES_JSON".to_string(), + serde_yaml::Value::String("toolstates-linux.json".to_string()) + )]) + ); +} + +#[test] +#[should_panic = "duplicate"] +fn duplicate_job_name() { + let _ = load_job_db( + r#" +envs: + pr: + try: + auto: + + +pr: + - name: pr-ci-a + os: ubuntu + env: {} + - name: pr-ci-a + os: ubuntu + env: {} +try: +auto: +optional: +"#, + ) + .unwrap(); +} + +#[test] +fn auto_job_can_override_pr_job_spec() { + let db = load_job_db( + r#" +envs: + pr: + try: + auto: + optional: + +pr: + - name: tidy + os: ubuntu + env: {} +try: +auto: + - name: tidy + env: + DEPLOY_TOOLSTATES_JSON: toolstates-linux.json + continue_on_error: false + os: ubuntu +optional: +"#, + ) + .unwrap(); + + assert_eq!(db.auto_jobs.iter().map(|j| j.name.as_str()).collect::<Vec<_>>(), vec!["tidy"]); + assert_eq!(db.auto_jobs[0].continue_on_error, Some(false)); + assert_eq!( + db.auto_jobs[0].env, + BTreeMap::from([( + "DEPLOY_TOOLSTATES_JSON".to_string(), + serde_yaml::Value::String("toolstates-linux.json".to_string()) + )]) + ); +} + +#[test] +fn compatible_divergence_pr_auto_job() { + let db = load_job_db( + r#" +envs: + pr: + try: + auto: + optional: + +pr: + - name: tidy + continue_on_error: true + env: + ENV_ALLOWED_TO_DIFFER: "hello world" + os: ubuntu +try: +auto: + - name: tidy + continue_on_error: false + env: + ENV_ALLOWED_TO_DIFFER: "goodbye world" + os: ubuntu +optional: +"#, + ) + .unwrap(); + + // `continue_on_error` and `env` are carve-outs *allowed* to diverge between PR and Auto job of + // the same name. Should load successfully. + + assert_eq!(db.auto_jobs.iter().map(|j| j.name.as_str()).collect::<Vec<_>>(), vec!["tidy"]); + assert_eq!(db.auto_jobs[0].continue_on_error, Some(false)); + assert_eq!( + db.auto_jobs[0].env, + BTreeMap::from([( + "ENV_ALLOWED_TO_DIFFER".to_string(), + serde_yaml::Value::String("goodbye world".to_string()) + )]) + ); +} + +#[test] +#[should_panic = "differs"] +fn incompatible_divergence_pr_auto_job() { + // `os` is not one of the carve-out options allowed to diverge. This should fail. + let _ = load_job_db( + r#" +envs: + pr: + try: + auto: + optional: + +pr: + - name: tidy + continue_on_error: true + env: + ENV_ALLOWED_TO_DIFFER: "hello world" + os: ubuntu +try: +auto: + - name: tidy + continue_on_error: false + env: + ENV_ALLOWED_TO_DIFFER: "goodbye world" + os: windows +optional: +"#, + ) + .unwrap(); +} + +#[test] +#[should_panic = "cannot have `continue_on_error: true`"] +fn auto_job_continue_on_error() { + // Auto CI jobs must fail-fast. + let _ = load_job_db( + r#" +envs: + pr: + try: + auto: + optional: + +pr: +try: +auto: + - name: tidy + continue_on_error: true + os: windows + env: {} +optional: +"#, + ) + .unwrap(); +} diff --git a/src/ci/citool/tests/jobs.rs b/src/ci/citool/tests/jobs.rs index dbaf13d4f42..24e0b85cab2 100644 --- a/src/ci/citool/tests/jobs.rs +++ b/src/ci/citool/tests/jobs.rs @@ -6,7 +6,7 @@ const TEST_JOBS_YML_PATH: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/tests/tes fn auto_jobs() { let stdout = get_matrix("push", "commit", "refs/heads/auto"); insta::assert_snapshot!(stdout, @r#" - jobs=[{"name":"aarch64-gnu","full_name":"auto - aarch64-gnu","os":"ubuntu-22.04-arm","env":{"ARTIFACTS_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZN24CBO55","AWS_REGION":"us-west-1","CACHES_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZI5DHEBFL","DEPLOY_BUCKET":"rust-lang-ci2","TOOLSTATE_PUBLISH":1},"free_disk":true},{"name":"x86_64-gnu-llvm-18-1","full_name":"auto - x86_64-gnu-llvm-18-1","os":"ubuntu-24.04","env":{"ARTIFACTS_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZN24CBO55","AWS_REGION":"us-west-1","CACHES_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZI5DHEBFL","DEPLOY_BUCKET":"rust-lang-ci2","DOCKER_SCRIPT":"stage_2_test_set1.sh","IMAGE":"x86_64-gnu-llvm-18","READ_ONLY_SRC":"0","RUST_BACKTRACE":1,"TOOLSTATE_PUBLISH":1},"free_disk":true},{"name":"aarch64-apple","full_name":"auto - aarch64-apple","os":"macos-14","env":{"ARTIFACTS_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZN24CBO55","AWS_REGION":"us-west-1","CACHES_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZI5DHEBFL","DEPLOY_BUCKET":"rust-lang-ci2","MACOSX_DEPLOYMENT_TARGET":11.0,"MACOSX_STD_DEPLOYMENT_TARGET":11.0,"NO_DEBUG_ASSERTIONS":1,"NO_LLVM_ASSERTIONS":1,"NO_OVERFLOW_CHECKS":1,"RUSTC_RETRY_LINKER_ON_SEGFAULT":1,"RUST_CONFIGURE_ARGS":"--enable-sanitizers --enable-profiler --set rust.jemalloc","SCRIPT":"./x.py --stage 2 test --host=aarch64-apple-darwin --target=aarch64-apple-darwin","SELECT_XCODE":"/Applications/Xcode_15.4.app","TOOLSTATE_PUBLISH":1,"USE_XCODE_CLANG":1}},{"name":"dist-i686-msvc","full_name":"auto - dist-i686-msvc","os":"windows-2022","env":{"ARTIFACTS_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZN24CBO55","AWS_REGION":"us-west-1","CACHES_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZI5DHEBFL","CODEGEN_BACKENDS":"llvm,cranelift","DEPLOY_BUCKET":"rust-lang-ci2","DIST_REQUIRE_ALL_TOOLS":1,"RUST_CONFIGURE_ARGS":"--build=i686-pc-windows-msvc --host=i686-pc-windows-msvc --target=i686-pc-windows-msvc,i586-pc-windows-msvc --enable-full-tools --enable-profiler","SCRIPT":"python x.py dist bootstrap --include-default-paths","TOOLSTATE_PUBLISH":1}}] + jobs=[{"name":"aarch64-gnu","full_name":"auto - aarch64-gnu","os":"ubuntu-22.04-arm","env":{"ARTIFACTS_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZN24CBO55","AWS_REGION":"us-west-1","CACHES_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZI5DHEBFL","DEPLOY_BUCKET":"rust-lang-ci2","TOOLSTATE_PUBLISH":1},"free_disk":true},{"name":"x86_64-gnu-llvm-18-1","full_name":"auto - x86_64-gnu-llvm-18-1","os":"ubuntu-24.04","env":{"ARTIFACTS_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZN24CBO55","AWS_REGION":"us-west-1","CACHES_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZI5DHEBFL","DEPLOY_BUCKET":"rust-lang-ci2","DOCKER_SCRIPT":"stage_2_test_set1.sh","IMAGE":"x86_64-gnu-llvm-18","READ_ONLY_SRC":"0","RUST_BACKTRACE":1,"TOOLSTATE_PUBLISH":1},"free_disk":true},{"name":"aarch64-apple","full_name":"auto - aarch64-apple","os":"macos-14","env":{"ARTIFACTS_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZN24CBO55","AWS_REGION":"us-west-1","CACHES_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZI5DHEBFL","DEPLOY_BUCKET":"rust-lang-ci2","MACOSX_DEPLOYMENT_TARGET":11.0,"MACOSX_STD_DEPLOYMENT_TARGET":11.0,"NO_DEBUG_ASSERTIONS":1,"NO_LLVM_ASSERTIONS":1,"NO_OVERFLOW_CHECKS":1,"RUSTC_RETRY_LINKER_ON_SEGFAULT":1,"RUST_CONFIGURE_ARGS":"--enable-sanitizers --enable-profiler --set rust.jemalloc","SCRIPT":"./x.py --stage 2 test --host=aarch64-apple-darwin --target=aarch64-apple-darwin","SELECT_XCODE":"/Applications/Xcode_15.4.app","TOOLSTATE_PUBLISH":1,"USE_XCODE_CLANG":1}},{"name":"dist-i686-msvc","full_name":"auto - dist-i686-msvc","os":"windows-2022","env":{"ARTIFACTS_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZN24CBO55","AWS_REGION":"us-west-1","CACHES_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZI5DHEBFL","CODEGEN_BACKENDS":"llvm,cranelift","DEPLOY_BUCKET":"rust-lang-ci2","DIST_REQUIRE_ALL_TOOLS":1,"RUST_CONFIGURE_ARGS":"--build=i686-pc-windows-msvc --host=i686-pc-windows-msvc --target=i686-pc-windows-msvc,i586-pc-windows-msvc --enable-full-tools --enable-profiler","SCRIPT":"python x.py dist bootstrap --include-default-paths","TOOLSTATE_PUBLISH":1}},{"name":"pr-check-1","full_name":"auto - pr-check-1","os":"ubuntu-24.04","env":{"ARTIFACTS_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZN24CBO55","AWS_REGION":"us-west-1","CACHES_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZI5DHEBFL","DEPLOY_BUCKET":"rust-lang-ci2","TOOLSTATE_PUBLISH":1},"continue_on_error":false,"free_disk":true},{"name":"pr-check-2","full_name":"auto - pr-check-2","os":"ubuntu-24.04","env":{"ARTIFACTS_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZN24CBO55","AWS_REGION":"us-west-1","CACHES_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZI5DHEBFL","DEPLOY_BUCKET":"rust-lang-ci2","TOOLSTATE_PUBLISH":1},"continue_on_error":false,"free_disk":true},{"name":"tidy","full_name":"auto - tidy","os":"ubuntu-24.04","env":{"ARTIFACTS_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZN24CBO55","AWS_REGION":"us-west-1","CACHES_AWS_ACCESS_KEY_ID":"AKIA46X5W6CZI5DHEBFL","DEPLOY_BUCKET":"rust-lang-ci2","TOOLSTATE_PUBLISH":1},"continue_on_error":false,"free_disk":true,"doc_url":"https://foo.bar"}] run_type=auto "#); } diff --git a/src/ci/docker/host-x86_64/dist-various-2/Dockerfile b/src/ci/docker/host-x86_64/dist-various-2/Dockerfile index e1d83d36087..0855ea222a3 100644 --- a/src/ci/docker/host-x86_64/dist-various-2/Dockerfile +++ b/src/ci/docker/host-x86_64/dist-various-2/Dockerfile @@ -81,9 +81,9 @@ RUN /tmp/build-fuchsia-toolchain.sh COPY host-x86_64/dist-various-2/build-x86_64-fortanix-unknown-sgx-toolchain.sh /tmp/ RUN /tmp/build-x86_64-fortanix-unknown-sgx-toolchain.sh -RUN curl -L https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-25/wasi-sdk-25.0-x86_64-linux.tar.gz | \ +RUN curl -L https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-27/wasi-sdk-27.0-x86_64-linux.tar.gz | \ tar -xz -ENV WASI_SDK_PATH=/tmp/wasi-sdk-25.0-x86_64-linux +ENV WASI_SDK_PATH=/tmp/wasi-sdk-27.0-x86_64-linux COPY scripts/freebsd-toolchain.sh /tmp/ RUN /tmp/freebsd-toolchain.sh i686 diff --git a/src/ci/docker/host-x86_64/pr-check-1/Dockerfile b/src/ci/docker/host-x86_64/pr-check-1/Dockerfile index d3c3cd1b63e..04ac0f33daf 100644 --- a/src/ci/docker/host-x86_64/pr-check-1/Dockerfile +++ b/src/ci/docker/host-x86_64/pr-check-1/Dockerfile @@ -40,9 +40,9 @@ COPY host-x86_64/pr-check-1/validate-toolstate.sh /scripts/ # We disable optimized compiler built-ins because that requires a C toolchain for the target. # We also skip the x86_64-unknown-linux-gnu target as it is well-tested by other jobs. ENV SCRIPT \ + python3 ../x.py check bootstrap && \ /scripts/check-default-config-profiles.sh && \ python3 ../x.py build src/tools/build-manifest && \ - python3 ../x.py test --stage 0 src/tools/compiletest && \ python3 ../x.py check compiletest --set build.compiletest-use-stage0-libtest=true && \ python3 ../x.py check --target=i686-pc-windows-gnu --host=i686-pc-windows-gnu && \ python3 ../x.py check --set build.optimized-compiler-builtins=false core alloc std --target=aarch64-unknown-linux-gnu,i686-pc-windows-msvc,i686-unknown-linux-gnu,x86_64-apple-darwin,x86_64-pc-windows-gnu,x86_64-pc-windows-msvc && \ diff --git a/src/ci/docker/host-x86_64/pr-check-2/Dockerfile b/src/ci/docker/host-x86_64/pr-check-2/Dockerfile index ce18a181d31..f82e19bcbb4 100644 --- a/src/ci/docker/host-x86_64/pr-check-2/Dockerfile +++ b/src/ci/docker/host-x86_64/pr-check-2/Dockerfile @@ -30,6 +30,7 @@ ENV SCRIPT \ python3 ../x.py check && \ python3 ../x.py clippy ci && \ python3 ../x.py test --stage 1 core alloc std test proc_macro && \ + python3 ../x.py test --stage 1 src/tools/compiletest && \ python3 ../x.py doc --stage 0 bootstrap && \ # Build both public and internal documentation. RUSTDOCFLAGS=\"--document-private-items --document-hidden-items\" python3 ../x.py doc --stage 0 compiler && \ @@ -37,6 +38,6 @@ ENV SCRIPT \ mkdir -p /checkout/obj/staging/doc && \ cp -r build/x86_64-unknown-linux-gnu/doc /checkout/obj/staging && \ RUSTDOCFLAGS=\"--document-private-items --document-hidden-items\" python3 ../x.py doc --stage 1 library/test && \ - # The BOOTSTRAP_TRACING flag is added to verify whether the + # The BOOTSTRAP_TRACING flag is added to verify whether the # bootstrap process compiles successfully with this flag enabled. BOOTSTRAP_TRACING=1 python3 ../x.py --help diff --git a/src/ci/docker/host-x86_64/test-various/Dockerfile b/src/ci/docker/host-x86_64/test-various/Dockerfile index 4d09bea69c0..82a820c859d 100644 --- a/src/ci/docker/host-x86_64/test-various/Dockerfile +++ b/src/ci/docker/host-x86_64/test-various/Dockerfile @@ -40,9 +40,9 @@ WORKDIR / COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh -RUN curl -L https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-25/wasi-sdk-25.0-x86_64-linux.tar.gz | \ +RUN curl -L https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-27/wasi-sdk-27.0-x86_64-linux.tar.gz | \ tar -xz -ENV WASI_SDK_PATH=/wasi-sdk-25.0-x86_64-linux +ENV WASI_SDK_PATH=/wasi-sdk-27.0-x86_64-linux ENV RUST_CONFIGURE_ARGS \ --musl-root-x86_64=/usr/local/x86_64-linux-musl \ @@ -79,7 +79,6 @@ ENV MUSL_TARGETS=x86_64-unknown-linux-musl \ CXX_x86_64_unknown_linux_musl=x86_64-linux-musl-g++ ENV MUSL_SCRIPT python3 /checkout/x.py --stage 2 test --host='' --target $MUSL_TARGETS -COPY host-x86_64/test-various/uefi_qemu_test /uefi_qemu_test ENV UEFI_TARGETS=aarch64-unknown-uefi,i686-unknown-uefi,x86_64-unknown-uefi \ CC_aarch64_unknown_uefi=clang-11 \ CXX_aarch64_unknown_uefi=clang++-11 \ @@ -88,6 +87,8 @@ ENV UEFI_TARGETS=aarch64-unknown-uefi,i686-unknown-uefi,x86_64-unknown-uefi \ CC_x86_64_unknown_uefi=clang-11 \ CXX_x86_64_unknown_uefi=clang++-11 ENV UEFI_SCRIPT python3 /checkout/x.py --stage 2 build --host='' --target $UEFI_TARGETS && \ - python3 -u /uefi_qemu_test/run.py + python3 /checkout/x.py --stage 2 test tests/run-make/uefi-qemu/rmake.rs --target aarch64-unknown-uefi && \ + python3 /checkout/x.py --stage 2 test tests/run-make/uefi-qemu/rmake.rs --target i686-unknown-uefi && \ + python3 /checkout/x.py --stage 2 test tests/run-make/uefi-qemu/rmake.rs --target x86_64-unknown-uefi ENV SCRIPT $WASM_SCRIPT && $NVPTX_SCRIPT && $MUSL_SCRIPT && $UEFI_SCRIPT diff --git a/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.lock b/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.lock deleted file mode 100644 index 8b6a664ad93..00000000000 --- a/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.lock +++ /dev/null @@ -1,16 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 4 - -[[package]] -name = "r-efi" -version = "5.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" - -[[package]] -name = "uefi_qemu_test" -version = "0.0.0" -dependencies = [ - "r-efi", -] diff --git a/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.toml b/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.toml deleted file mode 100644 index 1a8d0d94368..00000000000 --- a/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/Cargo.toml +++ /dev/null @@ -1,10 +0,0 @@ -[package] -name = "uefi_qemu_test" -version = "0.0.0" -edition = "2021" - -[workspace] -resolver = "2" - -[dependencies] -r-efi = "5.2.0" diff --git a/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/run.py b/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/run.py deleted file mode 100755 index 4f877389fbc..00000000000 --- a/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/run.py +++ /dev/null @@ -1,140 +0,0 @@ -#!/usr/bin/env python3 - -import os -import shutil -import subprocess -import sys -import tempfile - -from pathlib import Path - -TARGET_AARCH64 = "aarch64-unknown-uefi" -TARGET_I686 = "i686-unknown-uefi" -TARGET_X86_64 = "x86_64-unknown-uefi" - - -def run(*cmd, capture=False, check=True, env=None, timeout=None): - """Print and run a command, optionally capturing the output.""" - cmd = [str(p) for p in cmd] - print(" ".join(cmd)) - return subprocess.run( - cmd, capture_output=capture, check=check, env=env, text=True, timeout=timeout - ) - - -def build_and_run(tmp_dir, target): - if target == TARGET_AARCH64: - boot_file_name = "bootaa64.efi" - ovmf_dir = Path("/usr/share/AAVMF") - ovmf_code = "AAVMF_CODE.fd" - ovmf_vars = "AAVMF_VARS.fd" - qemu = "qemu-system-aarch64" - machine = "virt" - cpu = "cortex-a72" - elif target == TARGET_I686: - boot_file_name = "bootia32.efi" - ovmf_dir = Path("/usr/share/OVMF") - ovmf_code = "OVMF32_CODE_4M.secboot.fd" - ovmf_vars = "OVMF32_VARS_4M.fd" - # The i686 target intentionally uses 64-bit qemu; the important - # difference is that the OVMF code provides a 32-bit environment. - qemu = "qemu-system-x86_64" - machine = "q35" - cpu = "qemu64" - elif target == TARGET_X86_64: - boot_file_name = "bootx64.efi" - ovmf_dir = Path("/usr/share/OVMF") - ovmf_code = "OVMF_CODE.fd" - ovmf_vars = "OVMF_VARS.fd" - qemu = "qemu-system-x86_64" - machine = "q35" - cpu = "qemu64" - else: - raise KeyError("invalid target") - - host_artifacts = Path("/checkout/obj/build/x86_64-unknown-linux-gnu") - stage0 = host_artifacts / "stage0/bin" - stage2 = host_artifacts / "stage2/bin" - - env = dict(os.environ) - env["PATH"] = "{}:{}:{}".format(stage2, stage0, env["PATH"]) - - # Copy the test create into `tmp_dir`. - test_crate = Path(tmp_dir) / "uefi_qemu_test" - shutil.copytree("/uefi_qemu_test", test_crate) - - # Build the UEFI executable. - run( - "cargo", - "build", - "--manifest-path", - test_crate / "Cargo.toml", - "--target", - target, - env=env, - ) - - # Create a mock EFI System Partition in a subdirectory. - esp = test_crate / "esp" - boot = esp / "efi/boot" - os.makedirs(boot, exist_ok=True) - - # Copy the executable into the ESP. - src_exe_path = test_crate / "target" / target / "debug/uefi_qemu_test.efi" - shutil.copy(src_exe_path, boot / boot_file_name) - print(src_exe_path, boot / boot_file_name) - - # Select the appropriate EDK2 build. - ovmf_code = ovmf_dir / ovmf_code - ovmf_vars = ovmf_dir / ovmf_vars - - # Make a writable copy of the vars file. aarch64 doesn't boot - # correctly with read-only vars. - ovmf_rw_vars = Path(tmp_dir) / "vars.fd" - shutil.copy(ovmf_vars, ovmf_rw_vars) - - # Run the executable in QEMU and capture the output. - output = run( - qemu, - "-machine", - machine, - "-cpu", - cpu, - "-display", - "none", - "-serial", - "stdio", - "-drive", - f"if=pflash,format=raw,readonly=on,file={ovmf_code}", - "-drive", - f"if=pflash,format=raw,readonly=off,file={ovmf_rw_vars}", - "-drive", - f"format=raw,file=fat:rw:{esp}", - capture=True, - check=True, - # Set a timeout to kill the VM in case something goes wrong. - timeout=60, - ).stdout - - if "Hello World!" in output: - print("VM produced expected output") - else: - print("unexpected VM output:") - print("---start---") - print(output) - print("---end---") - sys.exit(1) - - -def main(): - targets = [TARGET_AARCH64, TARGET_I686, TARGET_X86_64] - - for target in targets: - # Create a temporary directory so that we have a writeable - # workspace. - with tempfile.TemporaryDirectory() as tmp_dir: - build_and_run(tmp_dir, target) - - -if __name__ == "__main__": - main() diff --git a/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/src/main.rs b/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/src/main.rs deleted file mode 100644 index 89e4393cb5c..00000000000 --- a/src/ci/docker/host-x86_64/test-various/uefi_qemu_test/src/main.rs +++ /dev/null @@ -1,46 +0,0 @@ -// Code is adapted from this hello world example: -// https://doc.rust-lang.org/nightly/rustc/platform-support/unknown-uefi.html - -#![no_main] -#![no_std] - -use core::{panic, ptr}; - -use r_efi::efi::{Char16, Handle, RESET_SHUTDOWN, Status, SystemTable}; - -#[panic_handler] -fn panic_handler(_info: &panic::PanicInfo) -> ! { - loop {} -} - -#[export_name = "efi_main"] -pub extern "C" fn main(_h: Handle, st: *mut SystemTable) -> Status { - let s = [ - 0x0048u16, 0x0065u16, 0x006cu16, 0x006cu16, 0x006fu16, // "Hello" - 0x0020u16, // " " - 0x0057u16, 0x006fu16, 0x0072u16, 0x006cu16, 0x0064u16, // "World" - 0x0021u16, // "!" - 0x000au16, // "\n" - 0x0000u16, // NUL - ]; - - // Print "Hello World!". - let r = unsafe { ((*(*st).con_out).output_string)((*st).con_out, s.as_ptr() as *mut Char16) }; - if r.is_error() { - return r; - } - - // Shut down. - unsafe { - ((*((*st).runtime_services)).reset_system)( - RESET_SHUTDOWN, - Status::SUCCESS, - 0, - ptr::null_mut(), - ); - } - - // This should never be reached because `reset_system` should never - // return, so fail with an error if we get here. - Status::UNSUPPORTED -} diff --git a/src/ci/github-actions/jobs.yml b/src/ci/github-actions/jobs.yml index e16f00547c1..48c570bfa11 100644 --- a/src/ci/github-actions/jobs.yml +++ b/src/ci/github-actions/jobs.yml @@ -31,20 +31,11 @@ runners: <<: *base-job - &job-windows - os: windows-2022 - <<: *base-job - - # NOTE: windows-2025 has less disk space available than windows-2022, - # because the D drive is missing. - - &job-windows-25 os: windows-2025 + free_disk: true <<: *base-job - &job-windows-8c - os: windows-2022-8core-32gb - <<: *base-job - - - &job-windows-25-8c os: windows-2025-8core-32gb <<: *base-job @@ -124,9 +115,16 @@ jobs: <<: *job-linux-36c-codebuild -# Jobs that run on each push to a pull request (PR) -# These jobs automatically inherit envs.pr, to avoid repeating -# it in each job definition. +# Jobs that run on each push to a pull request (PR). +# +# These jobs automatically inherit envs.pr, to avoid repeating it in each job +# definition. +# +# PR CI jobs will be automatically registered as Auto CI jobs or overriden. When +# automatically registered, the PR CI job configuration will be copied as an +# Auto CI job but with `continue_on_error` overriden to `false` (to fail-fast). +# When overriden, `citool` will check for equivalence between the PR and CI job +# of the same name modulo `continue_on_error` and `env`. pr: - name: pr-check-1 <<: *job-linux-4c @@ -177,9 +175,15 @@ optional: IMAGE: pr-check-1 <<: *job-linux-4c -# Main CI jobs that have to be green to merge a commit into master -# These jobs automatically inherit envs.auto, to avoid repeating -# it in each job definition. +# Main CI jobs that have to be green to merge a commit into master. +# +# These jobs automatically inherit envs.auto, to avoid repeating it in each job +# definition. +# +# Auto jobs may not specify `continue_on_error: true`, and thus will fail-fast. +# +# Unless explicitly overriden, PR CI jobs will be automatically registered as +# Auto CI jobs. auto: ############################# # Linux/Docker builders # @@ -655,7 +659,7 @@ auto: SCRIPT: python x.py build --set rust.debug=true opt-dist && PGO_HOST=x86_64-pc-windows-msvc ./build/x86_64-pc-windows-msvc/stage0-tools-bin/opt-dist windows-ci -- python x.py dist bootstrap --include-default-paths DIST_REQUIRE_ALL_TOOLS: 1 CODEGEN_BACKENDS: llvm,cranelift - <<: *job-windows-25-8c + <<: *job-windows-8c - name: dist-i686-msvc env: diff --git a/src/ci/scripts/free-disk-space-linux.sh b/src/ci/scripts/free-disk-space-linux.sh new file mode 100755 index 00000000000..32649fe0d9b --- /dev/null +++ b/src/ci/scripts/free-disk-space-linux.sh @@ -0,0 +1,265 @@ +#!/bin/bash +set -euo pipefail + +# Free disk space on Linux GitHub action runners +# Script inspired by https://github.com/jlumbroso/free-disk-space + +isX86() { + local arch + arch=$(uname -m) + if [ "$arch" = "x86_64" ]; then + return 0 + else + return 1 + fi +} + +# Check if we're on a GitHub hosted runner. +# In aws codebuild, the variable RUNNER_ENVIRONMENT is "self-hosted". +isGitHubRunner() { + # `:-` means "use the value of RUNNER_ENVIRONMENT if it exists, otherwise use an empty string". + if [[ "${RUNNER_ENVIRONMENT:-}" == "github-hosted" ]]; then + return 0 + else + return 1 + fi +} + +# print a line of the specified character +printSeparationLine() { + for ((i = 0; i < 80; i++)); do + printf "%s" "$1" + done + printf "\n" +} + +# compute available space +# REF: https://unix.stackexchange.com/a/42049/60849 +# REF: https://stackoverflow.com/a/450821/408734 +getAvailableSpace() { + df -a | awk 'NR > 1 {avail+=$4} END {print avail}' +} + +# make Kb human readable (assume the input is Kb) +# REF: https://unix.stackexchange.com/a/44087/60849 +formatByteCount() { + numfmt --to=iec-i --suffix=B --padding=7 "${1}000" +} + +# macro to output saved space +printSavedSpace() { + # Disk space before the operation + local before=${1} + local title=${2:-} + + local after + after=$(getAvailableSpace) + local saved=$((after - before)) + + if [ "$saved" -lt 0 ]; then + echo "::warning::Saved space is negative: $saved. Using '0' as saved space." + saved=0 + fi + + echo "" + printSeparationLine "*" + if [ -n "${title}" ]; then + echo "=> ${title}: Saved $(formatByteCount "$saved")" + else + echo "=> Saved $(formatByteCount "$saved")" + fi + printSeparationLine "*" + echo "" +} + +# macro to print output of df with caption +printDF() { + local caption=${1} + + printSeparationLine "=" + echo "${caption}" + echo "" + echo "$ df -h" + echo "" + df -h + printSeparationLine "=" +} + +removeUnusedFilesAndDirs() { + local to_remove=( + "/usr/share/java" + ) + + if isGitHubRunner; then + to_remove+=( + "/usr/local/aws-sam-cli" + "/usr/local/doc/cmake" + "/usr/local/julia"* + "/usr/local/lib/android" + "/usr/local/share/chromedriver-"* + "/usr/local/share/chromium" + "/usr/local/share/cmake-"* + "/usr/local/share/edge_driver" + "/usr/local/share/emacs" + "/usr/local/share/gecko_driver" + "/usr/local/share/icons" + "/usr/local/share/powershell" + "/usr/local/share/vcpkg" + "/usr/local/share/vim" + "/usr/share/apache-maven-"* + "/usr/share/gradle-"* + "/usr/share/kotlinc" + "/usr/share/miniconda" + "/usr/share/php" + "/usr/share/ri" + "/usr/share/swift" + + # binaries + "/usr/local/bin/azcopy" + "/usr/local/bin/bicep" + "/usr/local/bin/ccmake" + "/usr/local/bin/cmake-"* + "/usr/local/bin/cmake" + "/usr/local/bin/cpack" + "/usr/local/bin/ctest" + "/usr/local/bin/helm" + "/usr/local/bin/kind" + "/usr/local/bin/kustomize" + "/usr/local/bin/minikube" + "/usr/local/bin/packer" + "/usr/local/bin/phpunit" + "/usr/local/bin/pulumi-"* + "/usr/local/bin/pulumi" + "/usr/local/bin/stack" + + # Haskell runtime + "/usr/local/.ghcup" + + # Azure + "/opt/az" + "/usr/share/az_"* + ) + + if [ -n "${AGENT_TOOLSDIRECTORY:-}" ]; then + # Environment variable set by GitHub Actions + to_remove+=( + "${AGENT_TOOLSDIRECTORY}" + ) + else + echo "::warning::AGENT_TOOLSDIRECTORY is not set. Skipping removal." + fi + else + # Remove folders and files present in AWS CodeBuild + to_remove+=( + # binaries + "/usr/local/bin/ecs-cli" + "/usr/local/bin/eksctl" + "/usr/local/bin/kubectl" + + "${HOME}/.gradle" + "${HOME}/.dotnet" + "${HOME}/.goenv" + "${HOME}/.phpenv" + + ) + fi + + for element in "${to_remove[@]}"; do + if [ ! -e "$element" ]; then + # The file or directory doesn't exist. + # Maybe it was removed in a newer version of the runner or it's not present in a + # specific architecture (e.g. ARM). + echo "::warning::Directory or file $element does not exist, skipping." + fi + done + + # Remove all files and directories at once to save time. + sudo rm -rf "${to_remove[@]}" +} + +execAndMeasureSpaceChange() { + local operation=${1} # Function to execute + local title=${2} + + local before + before=$(getAvailableSpace) + $operation + + printSavedSpace "$before" "$title" +} + +# Remove large packages +# REF: https://github.com/apache/flink/blob/master/tools/azure-pipelines/free_disk_space.sh +cleanPackages() { + local packages=( + '^aspnetcore-.*' + '^dotnet-.*' + '^llvm-.*' + '^mongodb-.*' + 'firefox' + 'libgl1-mesa-dri' + 'mono-devel' + 'php.*' + ) + + if isGitHubRunner; then + packages+=( + azure-cli + ) + + if isX86; then + packages+=( + 'google-chrome-stable' + 'google-cloud-cli' + 'google-cloud-sdk' + 'powershell' + ) + fi + else + packages+=( + 'google-chrome-stable' + ) + fi + + sudo apt-get -qq remove -y --fix-missing "${packages[@]}" + + sudo apt-get autoremove -y || echo "::warning::The command [sudo apt-get autoremove -y] failed" + sudo apt-get clean || echo "::warning::The command [sudo apt-get clean] failed failed" +} + +# Remove Docker images. +# Ubuntu 22 runners have docker images already installed. +# They aren't present in ubuntu 24 runners. +cleanDocker() { + echo "=> Removing the following docker images:" + sudo docker image ls + echo "=> Removing docker images..." + sudo docker image prune --all --force || true +} + +# Remove Swap storage +cleanSwap() { + sudo swapoff -a || true + sudo rm -rf /mnt/swapfile || true + free -h +} + +# Display initial disk space stats + +AVAILABLE_INITIAL=$(getAvailableSpace) + +printDF "BEFORE CLEAN-UP:" +echo "" +execAndMeasureSpaceChange cleanPackages "Unused packages" +execAndMeasureSpaceChange cleanDocker "Docker images" +execAndMeasureSpaceChange cleanSwap "Swap storage" +execAndMeasureSpaceChange removeUnusedFilesAndDirs "Unused files and directories" + +# Output saved space statistic +echo "" +printDF "AFTER CLEAN-UP:" + +echo "" +echo "" + +printSavedSpace "$AVAILABLE_INITIAL" "Total saved" diff --git a/src/ci/scripts/free-disk-space-windows.ps1 b/src/ci/scripts/free-disk-space-windows.ps1 new file mode 100644 index 00000000000..8a4677bd2ab --- /dev/null +++ b/src/ci/scripts/free-disk-space-windows.ps1 @@ -0,0 +1,35 @@ +# Free disk space on Windows GitHub action runners. + +$ErrorActionPreference = 'Stop' + +Get-Volume | Out-String | Write-Output + +$available = $(Get-Volume C).SizeRemaining + +$dirs = 'C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Tools\Llvm', +'C:\rtools45', 'C:\ghcup', 'C:\Program Files (x86)\Android', +'C:\Program Files\Google\Chrome', 'C:\Program Files (x86)\Microsoft\Edge', +'C:\Program Files\Mozilla Firefox', 'C:\Program Files\MySQL', 'C:\Julia', +'C:\Program Files\MongoDB', 'C:\Program Files\Azure Cosmos DB Emulator', +'C:\Program Files\PostgreSQL', 'C:\Program Files\Unity Hub', +'C:\Strawberry', 'C:\hostedtoolcache\windows\Java_Temurin-Hotspot_jdk' + +foreach ($dir in $dirs) { + Start-ThreadJob -InputObject $dir { + Remove-Item -Recurse -Force -LiteralPath $input + } | Out-Null +} + +foreach ($job in Get-Job) { + Wait-Job $job | Out-Null + if ($job.Error) { + Write-Output "::warning file=$PSCommandPath::$($job.Error)" + } + Remove-Job $job +} + +Get-Volume | Out-String | Write-Output + +$saved = ($(Get-Volume C).SizeRemaining - $available) / 1gb +$savedRounded = [math]::Round($saved, 3) +Write-Output "total space saved: $savedRounded GB" diff --git a/src/ci/scripts/free-disk-space.sh b/src/ci/scripts/free-disk-space.sh index 173f64858b3..062ad801cd8 100755 --- a/src/ci/scripts/free-disk-space.sh +++ b/src/ci/scripts/free-disk-space.sh @@ -1,266 +1,10 @@ #!/bin/bash set -euo pipefail -# Free disk space on Linux GitHub action runners -# Script inspired by https://github.com/jlumbroso/free-disk-space +script_dir=$(dirname "$0") -isX86() { - local arch - arch=$(uname -m) - if [ "$arch" = "x86_64" ]; then - return 0 - else - return 1 - fi -} - -# Check if we're on a GitHub hosted runner. -# In aws codebuild, the variable RUNNER_ENVIRONMENT is "self-hosted". -isGitHubRunner() { - # `:-` means "use the value of RUNNER_ENVIRONMENT if it exists, otherwise use an empty string". - if [[ "${RUNNER_ENVIRONMENT:-}" == "github-hosted" ]]; then - return 0 - else - return 1 - fi -} - -# print a line of the specified character -printSeparationLine() { - for ((i = 0; i < 80; i++)); do - printf "%s" "$1" - done - printf "\n" -} - -# compute available space -# REF: https://unix.stackexchange.com/a/42049/60849 -# REF: https://stackoverflow.com/a/450821/408734 -getAvailableSpace() { - df -a | awk 'NR > 1 {avail+=$4} END {print avail}' -} - -# make Kb human readable (assume the input is Kb) -# REF: https://unix.stackexchange.com/a/44087/60849 -formatByteCount() { - numfmt --to=iec-i --suffix=B --padding=7 "${1}000" -} - -# macro to output saved space -printSavedSpace() { - # Disk space before the operation - local before=${1} - local title=${2:-} - - local after - after=$(getAvailableSpace) - local saved=$((after - before)) - - if [ "$saved" -lt 0 ]; then - echo "::warning::Saved space is negative: $saved. Using '0' as saved space." - saved=0 - fi - - echo "" - printSeparationLine "*" - if [ -n "${title}" ]; then - echo "=> ${title}: Saved $(formatByteCount "$saved")" - else - echo "=> Saved $(formatByteCount "$saved")" - fi - printSeparationLine "*" - echo "" -} - -# macro to print output of df with caption -printDF() { - local caption=${1} - - printSeparationLine "=" - echo "${caption}" - echo "" - echo "$ df -h" - echo "" - df -h - printSeparationLine "=" -} - -removeUnusedFilesAndDirs() { - local to_remove=( - "/usr/share/java" - ) - - if isGitHubRunner; then - to_remove+=( - "/usr/local/aws-sam-cli" - "/usr/local/doc/cmake" - "/usr/local/julia"* - "/usr/local/lib/android" - "/usr/local/share/chromedriver-"* - "/usr/local/share/chromium" - "/usr/local/share/cmake-"* - "/usr/local/share/edge_driver" - "/usr/local/share/emacs" - "/usr/local/share/gecko_driver" - "/usr/local/share/icons" - "/usr/local/share/powershell" - "/usr/local/share/vcpkg" - "/usr/local/share/vim" - "/usr/share/apache-maven-"* - "/usr/share/gradle-"* - "/usr/share/kotlinc" - "/usr/share/miniconda" - "/usr/share/php" - "/usr/share/ri" - "/usr/share/swift" - - # binaries - "/usr/local/bin/azcopy" - "/usr/local/bin/bicep" - "/usr/local/bin/ccmake" - "/usr/local/bin/cmake-"* - "/usr/local/bin/cmake" - "/usr/local/bin/cpack" - "/usr/local/bin/ctest" - "/usr/local/bin/helm" - "/usr/local/bin/kind" - "/usr/local/bin/kustomize" - "/usr/local/bin/minikube" - "/usr/local/bin/packer" - "/usr/local/bin/phpunit" - "/usr/local/bin/pulumi-"* - "/usr/local/bin/pulumi" - "/usr/local/bin/stack" - - # Haskell runtime - "/usr/local/.ghcup" - - # Azure - "/opt/az" - "/usr/share/az_"* - ) - - if [ -n "${AGENT_TOOLSDIRECTORY:-}" ]; then - # Environment variable set by GitHub Actions - to_remove+=( - "${AGENT_TOOLSDIRECTORY}" - ) - else - echo "::warning::AGENT_TOOLSDIRECTORY is not set. Skipping removal." - fi - else - # Remove folders and files present in AWS CodeBuild - to_remove+=( - # binaries - "/usr/local/bin/ecs-cli" - "/usr/local/bin/eksctl" - "/usr/local/bin/kubectl" - - "${HOME}/.gradle" - "${HOME}/.dotnet" - "${HOME}/.goenv" - "${HOME}/.phpenv" - - ) - fi - - for element in "${to_remove[@]}"; do - if [ ! -e "$element" ]; then - # The file or directory doesn't exist. - # Maybe it was removed in a newer version of the runner or it's not present in a - # specific architecture (e.g. ARM). - echo "::warning::Directory or file $element does not exist, skipping." - fi - done - - # Remove all files and directories at once to save time. - sudo rm -rf "${to_remove[@]}" -} - -execAndMeasureSpaceChange() { - local operation=${1} # Function to execute - local title=${2} - - local before - before=$(getAvailableSpace) - $operation - - printSavedSpace "$before" "$title" -} - -# Remove large packages -# REF: https://github.com/apache/flink/blob/master/tools/azure-pipelines/free_disk_space.sh -cleanPackages() { - local packages=( - '^aspnetcore-.*' - '^dotnet-.*' - '^llvm-.*' - '^mongodb-.*' - 'firefox' - 'libgl1-mesa-dri' - 'mono-devel' - 'php.*' - ) - - if isGitHubRunner; then - packages+=( - azure-cli - ) - - if isX86; then - packages+=( - 'google-chrome-stable' - 'google-cloud-cli' - 'google-cloud-sdk' - 'powershell' - ) - fi - else - packages+=( - 'google-chrome-stable' - ) - fi - - sudo apt-get -qq remove -y --fix-missing "${packages[@]}" - - sudo apt-get autoremove -y || echo "::warning::The command [sudo apt-get autoremove -y] failed" - sudo apt-get clean || echo "::warning::The command [sudo apt-get clean] failed failed" -} - -# Remove Docker images. -# Ubuntu 22 runners have docker images already installed. -# They aren't present in ubuntu 24 runners. -cleanDocker() { - echo "=> Removing the following docker images:" - sudo docker image ls - echo "=> Removing docker images..." - sudo docker image prune --all --force || true -} - -# Remove Swap storage -cleanSwap() { - sudo swapoff -a || true - sudo rm -rf /mnt/swapfile || true - free -h -} - -# Display initial disk space stats - -AVAILABLE_INITIAL=$(getAvailableSpace) - -printDF "BEFORE CLEAN-UP:" -echo "" - -execAndMeasureSpaceChange cleanPackages "Unused packages" -execAndMeasureSpaceChange cleanDocker "Docker images" -execAndMeasureSpaceChange cleanSwap "Swap storage" -execAndMeasureSpaceChange removeUnusedFilesAndDirs "Unused files and directories" - -# Output saved space statistic -echo "" -printDF "AFTER CLEAN-UP:" - -echo "" -echo "" - -printSavedSpace "$AVAILABLE_INITIAL" "Total saved" +if [[ "${RUNNER_OS:-}" == "Windows" ]]; then + pwsh $script_dir/free-disk-space-windows.ps1 +else + $script_dir/free-disk-space-linux.sh +fi diff --git a/src/doc/rustc-dev-guide/rust-version b/src/doc/rustc-dev-guide/rust-version index f6b7efe51a1..b631041b6bf 100644 --- a/src/doc/rustc-dev-guide/rust-version +++ b/src/doc/rustc-dev-guide/rust-version @@ -1 +1 @@ -460259d14de0274b97b8801e08cb2fe5f16fdac5 +2b5e239c6b86cde974b0ef0f8e23754fb08ff3c5 diff --git a/src/doc/rustc-dev-guide/src/SUMMARY.md b/src/doc/rustc-dev-guide/src/SUMMARY.md index 651e2925ad5..e3c0d50fcc7 100644 --- a/src/doc/rustc-dev-guide/src/SUMMARY.md +++ b/src/doc/rustc-dev-guide/src/SUMMARY.md @@ -53,7 +53,8 @@ - [Walkthrough: a typical contribution](./walkthrough.md) - [Implementing new language features](./implementing_new_features.md) - [Stability attributes](./stability.md) -- [Stabilizing Features](./stabilization_guide.md) +- [Stabilizing language features](./stabilization_guide.md) + - [Stabilization report template](./stabilization_report_template.md) - [Feature Gates](./feature-gates.md) - [Coding conventions](./conventions.md) - [Procedures for breaking changes](./bug-fix-procedure.md) diff --git a/src/doc/rustc-dev-guide/src/external-repos.md b/src/doc/rustc-dev-guide/src/external-repos.md index ecc65b26ab7..5fb7eeee8e3 100644 --- a/src/doc/rustc-dev-guide/src/external-repos.md +++ b/src/doc/rustc-dev-guide/src/external-repos.md @@ -40,27 +40,24 @@ implement a new tool feature or test, that should happen in one collective rustc * `portable-simd` ([sync script](https://github.com/rust-lang/portable-simd/blob/master/subtree-sync.sh)) * `rustfmt` * `rustc_codegen_cranelift` ([sync script](https://github.com/rust-lang/rustc_codegen_cranelift/blob/113af154d459e41b3dc2c5d7d878e3d3a8f33c69/scripts/rustup.sh#L7)) -* Using the [josh] tool - * `miri` ([sync guide](https://github.com/rust-lang/miri/blob/master/CONTRIBUTING.md#advanced-topic-syncing-with-the-rustc-repo)) - * `rust-analyzer` ([sync script](https://github.com/rust-lang/rust-analyzer/blob/2e13684be123eca7181aa48e043e185d8044a84a/xtask/src/release.rs#L147)) - * `rustc-dev-guide` ([josh sync](#synchronizing-a-josh-subtree)) - * `compiler-builtins` ([josh sync](#synchronizing-a-josh-subtree)) - * `stdarch` ([josh sync](#synchronizing-a-josh-subtree)) +* Using the [josh](#synchronizing-a-josh-subtree) tool + * `miri` + * `rust-analyzer` + * `rustc-dev-guide` + * `compiler-builtins` + * `stdarch` ### Josh subtrees -The [josh] tool is an alternative to git subtrees, which manages git history in a different way and scales better to larger repositories. Specific tooling is required to work with josh; you can check out the `miri` or `rust-analyzer` scripts linked above for inspiration. We provide a helper [`rustc-josh-sync`][josh-sync] tool to help with the synchronization, described [below](#synchronizing-a-josh-subtree). +The [josh] tool is an alternative to git subtrees, which manages git history in a different way and scales better to larger repositories. Specific tooling is required to work with josh. We provide a helper [`rustc-josh-sync`][josh-sync] tool to help with the synchronization, described [below](#synchronizing-a-josh-subtree). ### Synchronizing a Josh subtree We use a dedicated tool called [`rustc-josh-sync`][josh-sync] for performing Josh subtree updates. -Currently, we are migrating Josh repositories to it. So far, it is used in: +The commands below can be used for all our Josh subtrees, although note that `miri` +requires you to perform some [additional steps](https://github.com/rust-lang/miri/blob/master/CONTRIBUTING.md#advanced-topic-syncing-with-the-rustc-repo) during pulls. -- compiler-builtins -- rustc-dev-guide -- stdarch - -To install the tool: +You can install the tool using the following command: ``` cargo install --locked --git https://github.com/rust-lang/josh-sync ``` @@ -80,6 +77,9 @@ switch to its repository checkout directory in your terminal). #### Performing push +> NOTE: +> Before you proceed, look at some guidance related to Git [on josh-sync README]. + 1) Run the push command to create a branch named `<branch-name>` in a `rustc` fork under the `<gh-username>` account ``` rustc-josh-sync push <branch-name> <gh-username> @@ -173,3 +173,4 @@ the week leading up to the beta cut. [Toolstate chapter]: https://forge.rust-lang.org/infra/toolstate.html [josh]: https://josh-project.github.io/josh/intro.html [josh-sync]: https://github.com/rust-lang/josh-sync +[on josh-sync README]: https://github.com/rust-lang/josh-sync#git-peculiarities diff --git a/src/doc/rustc-dev-guide/src/implementing_new_features.md b/src/doc/rustc-dev-guide/src/implementing_new_features.md index 5d0e875cbc1..76cf2386c82 100644 --- a/src/doc/rustc-dev-guide/src/implementing_new_features.md +++ b/src/doc/rustc-dev-guide/src/implementing_new_features.md @@ -2,145 +2,91 @@ <!-- toc --> -When you want to implement a new significant feature in the compiler, -you need to go through this process to make sure everything goes -smoothly. +When you want to implement a new significant feature in the compiler, you need to go through this process to make sure everything goes smoothly. -**NOTE: this section is for *language* features, not *library* features, -which use [a different process].** +**NOTE: This section is for *language* features, not *library* features, which use [a different process].** -See also [the Rust Language Design Team's procedures][lang-propose] for -proposing changes to the language. +See also [the Rust Language Design Team's procedures][lang-propose] for proposing changes to the language. [a different process]: ./stability.md [lang-propose]: https://lang-team.rust-lang.org/how_to/propose.html ## The @rfcbot FCP process -When the change is small and uncontroversial, then it can be done -with just writing a PR and getting an r+ from someone who knows that -part of the code. However, if the change is potentially controversial, -it would be a bad idea to push it without consensus from the rest -of the team (both in the "distributed system" sense to make sure -you don't break anything you don't know about, and in the social -sense to avoid PR fights). - -If such a change seems to be too small to require a full formal RFC process -(e.g., a small standard library addition, a big refactoring of the code, a -"technically-breaking" change, or a "big bugfix" that basically amounts to a -small feature) but is still too controversial or big to get by with a single r+, -you can propose a final comment period (FCP). Or, if you're not on the relevant -team (and thus don't have @rfcbot permissions), ask someone who is to start one; -unless they have a concern themselves, they should. - -Again, the FCP process is only needed if you need consensus – if you -don't think anyone would have a problem with your change, it's OK to -get by with only an r+. For example, it is OK to add or modify -unstable command-line flags or attributes without an FCP for -compiler development or standard library use, as long as you don't -expect them to be in wide use in the nightly ecosystem. -Some teams have lighter weight processes that they use in scenarios -like this; for example, the compiler team recommends -filing a Major Change Proposal ([MCP][mcp]) as a lightweight way to -garner support and feedback without requiring full consensus. +When the change is small, uncontroversial, non-breaking, and does not affect the stable language in any user-observable ways or add any new unstable features, then it can be done with just writing a PR and getting an r+ from someone who knows that part of the code. However, if not, more must be done. Even for compiler-internal work, it would be a bad idea to push a controversial change without consensus from the rest of the team (both in the "distributed system" sense to make sure you don't break anything you don't know about, and in the social sense to avoid PR fights). + +For changes that need the consensus of a team, we us the process of proposing a final comment period (FCP). If you're not on the relevant team (and thus don't have @rfcbot permissions), ask someone who is to start one; unless they have a concern themselves, they should. + +The FCP process is only needed if you need consensus – if no processes require consensus for your change and you don't think anyone would have a problem with it, it's OK to rely on only an r+. For example, it is OK to add or modify unstable command-line flags or attributes in the reserved compiler-internal `rustc_` namespace without an FCP for compiler development or standard library use, as long as you don't expect them to be in wide use in the nightly ecosystem. Some teams have lighter weight processes that they use in scenarios like this; for example, the compiler team recommends filing a Major Change Proposal ([MCP][mcp]) as a lightweight way to garner support and feedback without requiring full consensus. [mcp]: https://forge.rust-lang.org/compiler/proposals-and-stabilization.html#how-do-i-submit-an-mcp -You don't need to have the implementation fully ready for r+ to propose an FCP, -but it is generally a good idea to have at least a proof -of concept so that people can see what you are talking about. +You don't need to have the implementation fully ready for r+ to propose an FCP, but it is generally a good idea to have at least a proof of concept so that people can see what you are talking about. -When an FCP is proposed, it requires all members of the team to sign off the -FCP. After they all do so, there's a 10-day-long "final comment period" (hence -the name) where everybody can comment, and if no concerns are raised, the -PR/issue gets FCP approval. +When an FCP is proposed, it requires all members of the team to sign off on the FCP. After they all do so, there's a 10-day-long "final comment period" (hence the name) where everybody can comment, and if no concerns are raised, the PR/issue gets FCP approval. ## The logistics of writing features -There are a few "logistic" hoops you might need to go through in -order to implement a feature in a working way. +There are a few "logistical" hoops you might need to go through in order to implement a feature in a working way. ### Warning Cycles -In some cases, a feature or bugfix might break some existing programs -in some edge cases. In that case, you might want to do a crater run -to assess the impact and possibly add a future-compatibility lint, -similar to those used for -[edition-gated lints](diagnostics.md#edition-gated-lints). +In some cases, a feature or bugfix might break some existing programs in some edge cases. In that case, you'll want to do a crater run to assess the impact and possibly add a future-compatibility lint, similar to those used for [edition-gated lints](diagnostics.md#edition-gated-lints). ### Stability -We [value the stability of Rust]. Code that works and runs on stable -should (mostly) not break. Because of that, we don't want to release -a feature to the world with only team consensus and code review - -we want to gain real-world experience on using that feature on nightly, -and we might want to change the feature based on that experience. - -To allow for that, we must make sure users don't accidentally depend -on that new feature - otherwise, especially if experimentation takes -time or is delayed and the feature takes the trains to stable, -it would end up de facto stable and we'll not be able to make changes -in it without breaking people's code. - -The way we do that is that we make sure all new features are feature -gated - they can't be used without enabling a feature gate -(`#[feature(foo)]`), which can't be done in a stable/beta compiler. -See the [stability in code] section for the technical details. - -Eventually, after we gain enough experience using the feature, -make the necessary changes, and are satisfied, we expose it to -the world using the stabilization process described [here]. -Until then, the feature is not set in stone: every part of the -feature can be changed, or the feature might be completely -rewritten or removed. Features are not supposed to gain tenure -by being unstable and unchanged for a year. +We [value the stability of Rust]. Code that works and runs on stable should (mostly) not break. Because of that, we don't want to release a feature to the world with only team consensus and code review - we want to gain real-world experience on using that feature on nightly, and we might want to change the feature based on that experience. + +To allow for that, we must make sure users don't accidentally depend on that new feature - otherwise, especially if experimentation takes time or is delayed and the feature takes the trains to stable, it would end up de facto stable and we'll not be able to make changes in it without breaking people's code. + +The way we do that is that we make sure all new features are feature gated - they can't be used without enabling a feature gate (`#[feature(foo)]`), which can't be done in a stable/beta compiler. See the [stability in code] section for the technical details. + +Eventually, after we gain enough experience using the feature, make the necessary changes, and are satisfied, we expose it to the world using the stabilization process described [here]. Until then, the feature is not set in stone: every part of the feature can be changed, or the feature might be completely rewritten or removed. Features do not gain tenure by being unstable and unchanged for long periods of time. ### Tracking Issues -To keep track of the status of an unstable feature, the -experience we get while using it on nightly, and of the -concerns that block its stabilization, every feature-gate -needs a tracking issue. General discussions about the feature should be done on the tracking issue. +To keep track of the status of an unstable feature, the experience we get while using it on +nightly, and of the concerns that block its stabilization, every feature-gate needs a tracking +issue. When creating issues and PRs related to the feature, reference this tracking issue, and when there are updates about the feature's progress, post those to the tracking issue. -For features that have an RFC, you should use the RFC's -tracking issue for the feature. +For features that are part of an accept RFC or approved lang experiment, use the tracking issue for that. -For other features, you'll have to make a tracking issue -for that feature. The issue title should be "Tracking issue -for YOUR FEATURE". Use the ["Tracking Issue" issue template][template]. +For other features, create a tracking issue for that feature. The issue title should be "Tracking issue for YOUR FEATURE". Use the ["Tracking Issue" issue template][template]. [template]: https://github.com/rust-lang/rust/issues/new?template=tracking_issue.md +### Lang experiments + +To land in the compiler, features that have user-visible effects on the language (even unstable ones) must either be part of an accepted RFC or an approved [lang experiment]. + +To propose a new lang experiment, open an issue in `rust-lang/rust` that describes the motivation and the intended solution. If it's accepted, this issue will become the tracking issue for the experiment, so use the tracking issue [template] while also including these other details. Nominate the issue for the lang team and CC `@rust-lang/lang` and `@rust-lang/lang-advisors`. When the experiment is approved, the tracking issue will be marked as `B-experimental`. + +Feature flags related to a lang experiment must be marked as `incomplete` until an RFC is accepted for the feature. + +[lang experiment]: https://lang-team.rust-lang.org/how_to/experiment.html + ## Stability in code -The below steps needs to be followed in order to implement -a new unstable feature: +The below steps needs to be followed in order to implement a new unstable feature: -1. Open a [tracking issue] - - if you have an RFC, you can use the tracking issue for the RFC. +1. Open or identify the [tracking issue]. For features that are part of an accept RFC or approved lang experiment, use the tracking issue for that. - The tracking issue should be labeled with at least `C-tracking-issue`. - For a language feature, a label `F-feature_name` should be added as well. + Label the tracking issue with `C-tracking-issue` and the relevant `F-feature_name` label (adding that label if needed). -1. Pick a name for the feature gate (for RFCs, use the name - in the RFC). +1. Pick a name for the feature gate (for RFCs, use the name in the RFC). 1. Add the feature name to `rustc_span/src/symbol.rs` in the `Symbols {...}` block. Note that this block must be in alphabetical order. -1. Add a feature gate declaration to `rustc_feature/src/unstable.rs` in the unstable - `declare_features` block. +1. Add a feature gate declaration to `rustc_feature/src/unstable.rs` in the unstable `declare_features` block. ```rust ignore /// description of feature (unstable, $feature_name, "CURRENT_RUSTC_VERSION", Some($tracking_issue_number)) ``` - If you haven't yet - opened a tracking issue (e.g. because you want initial feedback on whether the feature is likely - to be accepted), you can temporarily use `None` - but make sure to update it before the PR is - merged! + If you haven't yet opened a tracking issue (e.g. because you want initial feedback on whether the feature is likely to be accepted), you can temporarily use `None` - but make sure to update it before the PR is merged! For example: @@ -149,9 +95,7 @@ a new unstable feature: (unstable, non_ascii_idents, "CURRENT_RUSTC_VERSION", Some(55467), None), ``` - Features can be marked as incomplete, and trigger the warn-by-default [`incomplete_features` - lint] - by setting their type to `incomplete`: + Features can be marked as incomplete, and trigger the warn-by-default [`incomplete_features` lint] by setting their type to `incomplete`: [`incomplete_features` lint]: https://doc.rust-lang.org/rustc/lints/listing/warn-by-default.html#incomplete-features @@ -160,42 +104,27 @@ a new unstable feature: (incomplete, deref_patterns, "CURRENT_RUSTC_VERSION", Some(87121), None), ``` - To avoid [semantic merge conflicts], please use `CURRENT_RUSTC_VERSION` instead of `1.70` or - another explicit version number. + Feature flags related to a lang experiment must be marked as `incomplete` until an RFC is accepted for the feature. + + To avoid [semantic merge conflicts], use `CURRENT_RUSTC_VERSION` instead of `1.70` or another explicit version number. [semantic merge conflicts]: https://bors.tech/essay/2017/02/02/pitch/ -1. Prevent usage of the new feature unless the feature gate is set. - You can check it in most places in the compiler using the - expression `tcx.features().$feature_name()` +1. Prevent usage of the new feature unless the feature gate is set. You can check it in most places in the compiler using the expression `tcx.features().$feature_name()`. + + If the feature gate is not set, you should either maintain the pre-feature behavior or raise an error, depending on what makes sense. Errors should generally use [`rustc_session::parse::feature_err`]. For an example of adding an error, see [#81015]. - If the feature gate is not set, you should either maintain - the pre-feature behavior or raise an error, depending on - what makes sense. Errors should generally use [`rustc_session::parse::feature_err`]. - For an example of adding an error, see [#81015]. + For features introducing new syntax, pre-expansion gating should be used instead. During parsing, when the new syntax is parsed, the symbol must be inserted to the current crate's [`GatedSpans`] via `self.sess.gated_span.gate(sym::my_feature, span)`. - For features introducing new syntax, pre-expansion gating should be used instead. - During parsing, when the new syntax is parsed, the symbol must be inserted to the - current crate's [`GatedSpans`] via `self.sess.gated_span.gate(sym::my_feature, span)`. - - After being inserted to the gated spans, the span must be checked in the - [`rustc_ast_passes::feature_gate::check_crate`] function, which actually denies - features. Exactly how it is gated depends on the exact type of feature, but most - likely will use the `gate_all!()` macro. + After being inserted to the gated spans, the span must be checked in the [`rustc_ast_passes::feature_gate::check_crate`] function, which actually denies features. Exactly how it is gated depends on the exact type of feature, but most likely will use the `gate_all!()` macro. -1. Add a test to ensure the feature cannot be used without - a feature gate, by creating `tests/ui/feature-gates/feature-gate-$feature_name.rs`. - You can generate the corresponding `.stderr` file by running `./x test -tests/ui/feature-gates/ --bless`. +1. Add a test to ensure the feature cannot be used without a feature gate, by creating `tests/ui/feature-gates/feature-gate-$feature_name.rs`. You can generate the corresponding `.stderr` file by running `./x test tests/ui/feature-gates/ --bless`. -1. Add a section to the unstable book, in - `src/doc/unstable-book/src/language-features/$feature_name.md`. +1. Add a section to the unstable book, in `src/doc/unstable-book/src/language-features/$feature_name.md`. -1. Write a lot of tests for the new feature, preferably in `tests/ui/$feature_name/`. - PRs without tests will not be accepted! +1. Write a lot of tests for the new feature, preferably in `tests/ui/$feature_name/`. PRs without tests will not be accepted! -1. Get your PR reviewed and land it. You have now successfully - implemented a feature in Rust! +1. Get your PR reviewed and land it. You have now successfully implemented a feature in Rust! [`GatedSpans`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_session/parse/struct.GatedSpans.html [#81015]: https://github.com/rust-lang/rust/pull/81015 @@ -206,3 +135,42 @@ tests/ui/feature-gates/ --bless`. [here]: ./stabilization_guide.md [tracking issue]: #tracking-issues [add-feature-gate]: ./feature-gates.md#adding-a-feature-gate + +## Call for testing + +Once the implementation is complete, the feature will be available to nightly users but not yet part of stable Rust. This is a good time to write a blog post on [the main Rust blog][rust-blog] and issue a "call for testing". + +Some earlier such blog posts include: + +1. [The push for GATs stabilization](https://blog.rust-lang.org/2021/08/03/GATs-stabilization-push/) +2. [Changes to `impl Trait` in Rust 2024](https://blog.rust-lang.org/2024/09/05/impl-trait-capture-rules.html) +3. [Async Closures MVP: Call for Testing!](https://blog.rust-lang.org/inside-rust/2024/08/09/async-closures-call-for-testing/) + +Alternatively, [*This Week in Rust*][twir] has a [section][twir-cft] for this. One example of this having been used is: + +- [Call for testing on boolean literals as cfg predicates](https://github.com/rust-lang/rust/issues/131204#issuecomment-2569314526) + +Which option to choose might depend on how significant the language change is, though note that the [*This Week in Rust*][twir] section might be less visible than a dedicated post on the main Rust blog. + +## Polishing + +Giving users a polished experience means more than just implementing the feature in rustc. We need to think about all of the tools and resources that we ship. This work includes: + +- Documenting the language feature in the [Rust Reference][reference]. +- Extending [`rustfmt`] to format any new syntax (if applicable). +- Extending [`rust-analyzer`] (if applicable). The extent of this work can depend on the nature of the language feature, as some features don't need to be blocked on *full* support. + - When a language feature degrades the user experience simply by existing before support is implemented in [`rust-analyzer`], that may lead the lang team to raise a blocking concern. + - Examples of such might include new syntax that [`rust-analyzer`] can't parse or type inference changes it doesn't understand when those lead to bogus diagnostics. + +## Stabilization + +The final step in the feature lifecycle is [stabilization][stab], which is when the feature becomes available to all Rust users. At this point, backward incompatible changes are generally no longer permitted (see the lang team's [defined semver policies](https://rust-lang.github.io/rfcs/1122-language-semver.html) for details). To learn more about stabilization, see the [stabilization guide][stab]. + + +[stab]: ./stabilization_guide.md +[rust-blog]: https://github.com/rust-lang/blog.rust-lang.org/ +[twir]: https://github.com/rust-lang/this-week-in-rust +[twir-cft]: https://this-week-in-rust.org/blog/2025/01/22/this-week-in-rust-583/#calls-for-testing +[`rustfmt`]: https://github.com/rust-lang/rustfmt +[`rust-analyzer`]: https://github.com/rust-lang/rust-analyzer +[reference]: https://github.com/rust-lang/reference diff --git a/src/doc/rustc-dev-guide/src/stabilization_guide.md b/src/doc/rustc-dev-guide/src/stabilization_guide.md index f875c68745f..f155272e5a2 100644 --- a/src/doc/rustc-dev-guide/src/stabilization_guide.md +++ b/src/doc/rustc-dev-guide/src/stabilization_guide.md @@ -1,120 +1,66 @@ # Request for stabilization -**NOTE**: this page is about stabilizing *language* features. -For stabilizing *library* features, see [Stabilizing a library feature]. +**NOTE**: This page is about stabilizing *language* features. For stabilizing *library* features, see [Stabilizing a library feature]. [Stabilizing a library feature]: ./stability.md#stabilizing-a-library-feature -Once an unstable feature has been well-tested with no outstanding -concern, anyone may push for its stabilization. It involves the -following steps: +Once an unstable feature has been well-tested with no outstanding concerns, anyone may push for its stabilization, though involving the people who have worked on it is prudent. Follow these steps: <!-- toc --> -## Documentation PRs +## Write an RFC, if needed -<a id="updating-documentation"></a> +If the feature was part of a [lang experiment], the lang team generally will want to first accept an RFC before stabilization. -If any documentation for this feature exists, it should be -in the [`Unstable Book`], located at [`src/doc/unstable-book`]. -If it exists, the page for the feature gate should be removed. +[lang experiment]: https://lang-team.rust-lang.org/how_to/experiment.html + +## Documentation PRs -If there was documentation there, integrating it into the -existing documentation is needed. +<a id="updating-documentation"></a> -If there wasn't documentation there, it needs to be added. +The feature might be documented in the [`Unstable Book`], located at [`src/doc/unstable-book`]. Remove the page for the feature gate if it exists. Integrate any useful parts of that documentation in other places. -Places that may need updated documentation: +Places that may need updated documentation include: -- [The Reference]: This must be updated, in full detail. -- [The Book]: This may or may not need updating, depends. - If you're not sure, please open an issue on this repository - and it can be discussed. -- standard library documentation: As needed. Language features - often don't need this, but if it's a feature that changes - how good examples are written, such as when `?` was added - to the language, updating examples is important. -- [Rust by Example]: As needed. +- [The Reference]: This must be updated, in full detail, and a member of the lang-docs team must review and approve the PR before the stabilization can be merged. +- [The Book]: This is updated as needed. If you're not sure, please open an issue on this repository and it can be discussed. +- Standard library documentation: This is updated as needed. Language features often don't need this, but if it's a feature that changes how idiomatic examples are written, such as when `?` was added to the language, updating these in the library documentation is important. Review also the keyword documentation and ABI documentation in the standard library, as these sometimes needs updates for language changes. +- [Rust by Example]: This is updated as needed. -Prepare PRs to update documentation involving this new feature -for repositories mentioned above. Maintainers of these repositories -will keep these PRs open until the whole stabilization process -has completed. Meanwhile, we can proceed to the next step. +Prepare PRs to update documentation involving this new feature for the repositories mentioned above. Maintainers of these repositories will keep these PRs open until the whole stabilization process has completed. Meanwhile, we can proceed to the next step. ## Write a stabilization report -Find the tracking issue of the feature, and create a short -stabilization report. Essentially this would be a brief summary -of the feature plus some links to test cases showing it works -as expected, along with a list of edge cases that came up -and were considered. This is a minimal "due diligence" that -we do before stabilizing. - -The report should contain: +Author a stabilization report using the [template found in this repository][srt]. -- A summary, showing examples (e.g. code snippets) what is - enabled by this feature. -- Links to test cases in our test suite regarding this feature - and describe the feature's behavior on encountering edge cases. -- Links to the documentations (the PRs we have made in the - previous steps). -- Any other relevant information. -- The resolutions of any unresolved questions if the stabilization - is for an RFC. +The stabilization reports summarizes: -Examples of stabilization reports can be found in -[rust-lang/rust#44494][report1] and [rust-lang/rust#28237][report2] (these links -will bring you directly to the comment containing the stabilization report). +- The main design decisions and deviations since the RFC was accepted, including both decisions that were FCP'd or otherwise accepted by the language team as well as those being presented to the lang team for the first time. + - Often, the final stabilized language feature has significant design deviations from the original RFC. That's OK, but these deviations must be highlighted and explained carefully. +- The work that has been done since the RFC was accepted, acknowledging the main contributors that helped drive the language feature forward. -[report1]: https://github.com/rust-lang/rust/issues/44494#issuecomment-360191474 -[report2]: https://github.com/rust-lang/rust/issues/28237#issuecomment-363374130 +The [*Stabilization Template*][srt] includes a series of questions that aim to surface connections between this feature and lang's subteams (e.g. types, opsem, lang-docs, etc.) and to identify items that are commonly overlooked. -## FCP +[srt]: ./stabilization_report_template.md -If any member of the team responsible for tracking this -feature agrees with stabilizing this feature, they will -start the FCP (final-comment-period) process by commenting - -```text -@rfcbot fcp merge -``` - -The rest of the team members will review the proposal. If the final -decision is to stabilize, we proceed to do the actual code modification. +The stabilization report is typically posted as the main comment on the stabilization PR (see the next section). ## Stabilization PR -*This is for stabilizing language features. If you are stabilizing a library -feature, see [the stabilization chapter of the std dev guide][std-guide-stabilization] instead.* - -Once we have decided to stabilize a feature, we need to have -a PR that actually makes that stabilization happen. These kinds -of PRs are a great way to get involved in Rust, as they take -you on a little tour through the source code. +Every feature is different, and some may require steps beyond what this guide discusses. -Here is a general guide to how to stabilize a feature -- -every feature is different, of course, so some features may -require steps beyond what this guide talks about. - -Note: Before we stabilize any feature, it's the rule that it -should appear in the documentation. +Before the stabilization will be considered by the lang team, there must be a complete PR to the Reference describing the feature, and before the stabilization PR will be merged, this PR must have been reviewed and approved by the lang-docs team. ### Updating the feature-gate listing -There is a central listing of unstable feature-gates in -[`compiler/rustc_feature/src/unstable.rs`]. Search for the `declare_features!` -macro. There should be an entry for the feature you are aiming -to stabilize, something like (this example is taken from -[rust-lang/rust#32409]: +There is a central listing of unstable feature-gates in [`compiler/rustc_feature/src/unstable.rs`]. Search for the `declare_features!` macro. There should be an entry for the feature you are aiming to stabilize, something like (this example is taken from [rust-lang/rust#32409]: ```rust,ignore // pub(restricted) visibilities (RFC 1422) (unstable, pub_restricted, "CURRENT_RUSTC_VERSION", Some(32409)), ``` -The above line should be moved to [`compiler/rustc_feature/src/accepted.rs`]. -Entries in the `declare_features!` call are sorted, so find the correct place. -When it is done, it should look like: +The above line should be moved to [`compiler/rustc_feature/src/accepted.rs`]. Entries in the `declare_features!` call are sorted, so find the correct place. When it is done, it should look like: ```rust,ignore // pub(restricted) visibilities (RFC 1422) @@ -122,54 +68,31 @@ When it is done, it should look like: // note that we changed this ``` -(Even though you will encounter version numbers in the file of past changes, -you should not put the rustc version you expect your stabilization to happen in, -but instead `CURRENT_RUSTC_VERSION`) +(Even though you will encounter version numbers in the file of past changes, you should not put the rustc version you expect your stabilization to happen in, but instead use `CURRENT_RUSTC_VERSION`.) ### Removing existing uses of the feature-gate -Next search for the feature string (in this case, `pub_restricted`) -in the codebase to find where it appears. Change uses of -`#![feature(XXX)]` from the `std` and any rustc crates (this includes test folders -under `library/` and `compiler/` but not the toplevel `tests/` one) to be -`#![cfg_attr(bootstrap, feature(XXX))]`. This includes the feature-gate -only for stage0, which is built using the current beta (this is -needed because the feature is still unstable in the current beta). +Next, search for the feature string (in this case, `pub_restricted`) in the codebase to find where it appears. Change uses of `#![feature(XXX)]` from the `std` and any rustc crates (this includes test folders under `library/` and `compiler/` but not the toplevel `tests/` one) to be `#![cfg_attr(bootstrap, feature(XXX))]`. This includes the feature-gate only for stage0, which is built using the current beta (this is needed because the feature is still unstable in the current beta). -Also, remove those strings from any tests (e.g. under `tests/`). If there are tests -specifically targeting the feature-gate (i.e., testing that the -feature-gate is required to use the feature, but nothing else), -simply remove the test. +Also, remove those strings from any tests (e.g. under `tests/`). If there are tests specifically targeting the feature-gate (i.e., testing that the feature-gate is required to use the feature, but nothing else), simply remove the test. ### Do not require the feature-gate to use the feature -Most importantly, remove the code which flags an error if the -feature-gate is not present (since the feature is now considered -stable). If the feature can be detected because it employs some -new syntax, then a common place for that code to be is in the -same `compiler/rustc_ast_passes/src/feature_gate.rs`. -For example, you might see code like this: +Most importantly, remove the code which flags an error if the feature-gate is not present (since the feature is now considered stable). If the feature can be detected because it employs some new syntax, then a common place for that code to be is in `compiler/rustc_ast_passes/src/feature_gate.rs`. For example, you might see code like this: ```rust,ignore -gate_feature_post!(&self, pub_restricted, span, - "`pub(restricted)` syntax is experimental"); +gate_all!(pub_restricted, "`pub(restricted)` syntax is experimental"); ``` -This `gate_feature_post!` macro prints an error if the -`pub_restricted` feature is not enabled. It is not needed -now that `#[pub_restricted]` is stable. +This `gate_feature_post!` macro prints an error if the `pub_restricted` feature is not enabled. It is not needed now that `#[pub_restricted]` is stable. For more subtle features, you may find code like this: ```rust,ignore -if self.tcx.sess.features.borrow().pub_restricted { /* XXX */ } +if self.tcx.features().async_fn_in_dyn_trait() { /* XXX */ } ``` -This `pub_restricted` field (obviously named after the feature) -would ordinarily be false if the feature flag is not present -and true if it is. So transform the code to assume that the field -is true. In this case, that would mean removing the `if` and -leaving just the `/* XXX */`. +This `pub_restricted` field (named after the feature) would ordinarily be false if the feature flag is not present and true if it is. So transform the code to assume that the field is true. In this case, that would mean removing the `if` and leaving just the `/* XXX */`. ```rust,ignore if self.tcx.sess.features.borrow().pub_restricted { /* XXX */ } @@ -194,3 +117,40 @@ if something { /* XXX */ } [Rust by Example]: https://github.com/rust-lang/rust-by-example [`Unstable Book`]: https://doc.rust-lang.org/unstable-book/index.html [`src/doc/unstable-book`]: https://github.com/rust-lang/rust/tree/master/src/doc/unstable-book + +## Team nominations + +When opening the stabilization PR, CC the lang team and its advisors (`@rust-lang/lang @rust-lang/lang-advisors`) and any other teams to whom the feature is relevant, e.g.: + +- `@rust-lang/types`, for type system interactions. +- `@rust-lang/opsem`, for interactions with unsafe code. +- `@rust-lang/compiler`, for implementation robustness. +- `@rust-lang/libs-api`, for changes to the standard library API or its guarantees. +- `@rust-lang/lang-docs`, for questions about how this should be documented in the Reference. + +After the stabilization PR is opened with the stabilization report, wait a bit for any immediate comments. When such comments "simmer down" and you feel the PR is ready for consideration by the lang team, [nominate the PR](https://lang-team.rust-lang.org/how_to/nominate.html) to get it on the agenda for consideration in an upcoming lang meeting. + +If you are not a `rust-lang` organization member, you can ask your assigned reviewer to CC the relevant teams on your behalf. + +## Propose FCP on the PR + +After the lang team and other relevant teams review the stabilization, and after you have answered any questions they may have had, a member of one of the teams may propose to accept the stabilization by commenting: + +```text +@rfcbot fcp merge +``` + +Once enough team members have reviewed, the PR will move into a "final comment period" (FCP). If no new concerns are raised, this period will complete and the PR can be merged after implementation review in the usual way. + +## Reviewing and merging stabilizations + +On a stabilization, before giving it the `r+`, ensure that the PR: + +- Matches what the team proposed for stabilization and what is documented in the Reference PR. +- Includes any changes the team decided to request along the way in order to resolve or avoid concerns. +- Is otherwise exactly what is described in the stabilization report and in any relevant RFCs or prior lang FCPs. +- Does not expose on stable behaviors other than those specified, accepted for stabilization, and documented in the Reference. +- Has sufficient tests to convincingly demonstrate these things. +- Is accompanied by a PR to the Reference than has been reviewed and approved by a member of lang-docs. + +In particular, when reviewing the PR, keep an eye out for any user-visible details that the lang team failed to consider and specify. If you find one, describe it and nominate the PR for the lang team. diff --git a/src/doc/rustc-dev-guide/src/stabilization_report_template.md b/src/doc/rustc-dev-guide/src/stabilization_report_template.md new file mode 100644 index 00000000000..793f7d7e45c --- /dev/null +++ b/src/doc/rustc-dev-guide/src/stabilization_report_template.md @@ -0,0 +1,277 @@ +# Stabilization report template + +## What is this? + +This is a template for [stabilization reports](./stabilization_guide.md) of **language features**. The questions aim to solicit the details most often needed. These details help reviewers to identify potential problems upfront. Not all parts of the template will apply to every stabilization. If a question doesn't apply, explain briefly why. + +Copy everything after the separator and edit it as Markdown. Replace each *TODO* with your answer. + +--- + +# Stabilization report + +## Summary + +> Remind us what this feature is and what value it provides. Tell the story of what led up to this stabilization. +> +> E.g., see: +> +> - [Stabilize AFIT/RPITIT](https://web.archive.org/web/20250329190642/https://github.com/rust-lang/rust/pull/115822) +> - [Stabilize RTN](https://web.archive.org/web/20250321214601/https://github.com/rust-lang/rust/pull/138424) +> - [Stabilize ATPIT](https://web.archive.org/web/20250124214256/https://github.com/rust-lang/rust/pull/120700) +> - [Stabilize opaque type precise capturing](https://web.archive.org/web/20250312173538/https://github.com/rust-lang/rust/pull/127672) + +*TODO* + +Tracking: + +- *TODO* (Link to tracking issue.) + +Reference PRs: + +- *TODO* (Link to Reference PRs.) + +cc @rust-lang/lang @rust-lang/lang-advisors + +### What is stabilized + +> Describe each behavior being stabilized and give a short example of code that will now be accepted. + +```rust +todo!() +``` + +### What isn't stabilized + +> Describe any parts of the feature not being stabilized. Talk about what we might want to do later and what doors are being left open for that. If what we're not stabilizing might lead to surprises for users, talk about that in particular. + +## Design + +### Reference + +> What updates are needed to the Reference? Link to each PR. If the Reference is missing content needed for describing this feature, discuss that. + +- *TODO* + +### RFC history + +> What RFCs have been accepted for this feature? + +- *TODO* + +### Answers to unresolved questions + +> What questions were left unresolved by the RFC? How have they been answered? Link to any relevant lang decisions. + +*TODO* + +### Post-RFC changes + +> What other user-visible changes have occurred since the RFC was accepted? Describe both changes that the lang team accepted (and link to those decisions) as well as changes that are being presented to the team for the first time in this stabilization report. + +*TODO* + +### Key points + +> What decisions have been most difficult and what behaviors to be stabilized have proved most contentious? Summarize the major arguments on all sides and link to earlier documents and discussions. + +*TODO* + +### Nightly extensions + +> Are there extensions to this feature that remain unstable? How do we know that we are not accidentally committing to those? + +*TODO* + +### Doors closed + +> What doors does this stabilization close for later changes to the language? E.g., does this stabilization make any other RFCs, lang experiments, or known in-flight proposals more difficult or impossible to do later? + +## Feedback + +### Call for testing + +> Has a "call for testing" been done? If so, what feedback was received? + +*TODO* + +### Nightly use + +> Do any known nightly users use this feature? Counting instances of `#![feature(FEATURE_NAME)]` on GitHub with grep might be informative. + +*TODO* + +## Implementation + +### Major parts + +> Summarize the major parts of the implementation and provide links into the code and to relevant PRs. +> +> See, e.g., this breakdown of the major parts of async closures: +> +> - <https://rustc-dev-guide.rust-lang.org/coroutine-closures.html> + +*TODO* + +### Coverage + +> Summarize the test coverage of this feature. +> +> Consider what the "edges" of this feature are. We're particularly interested in seeing tests that assure us about exactly what nearby things we're not stabilizing. Tests should of course comprehensively demonstrate that the feature works. Think too about demonstrating the diagnostics seen when common mistakes are made and the feature is used incorrectly. +> +> Within each test, include a comment at the top describing the purpose of the test and what set of invariants it intends to demonstrate. This is a great help to our review. +> +> Describe any known or intentional gaps in test coverage. +> +> Contextualize and link to test folders and individual tests. + +*TODO* + +### Outstanding bugs + +> What outstanding bugs involve this feature? List them. Should any block the stabilization? Discuss why or why not. + +*TODO* + +- *TODO* +- *TODO* +- *TODO* + +### Outstanding FIXMEs + +> What FIXMEs are still in the code for that feature and why is it OK to leave them there? + +*TODO* + +### Tool changes + +> What changes must be made to our other tools to support this feature. Has this work been done? Link to any relevant PRs and issues. + +- [ ] rustfmt + - *TODO* +- [ ] rust-analyzer + - *TODO* +- [ ] rustdoc (both JSON and HTML) + - *TODO* +- [ ] cargo + - *TODO* +- [ ] clippy + - *TODO* +- [ ] rustup + - *TODO* +- [ ] docs.rs + - *TODO* + +*TODO* + +### Breaking changes + +> If this stabilization represents a known breaking change, link to the crater report, the analysis of the crater report, and to all PRs we've made to ecosystem projects affected by this breakage. Discuss any limitations of what we're able to know about or to fix. + +*TODO* + +Crater report: + +- *TODO* + +Crater analysis: + +- *TODO* + +PRs to affected crates: + +- *TODO* +- *TODO* +- *TODO* + +## Type system, opsem + +### Compile-time checks + +> What compilation-time checks are done that are needed to prevent undefined behavior? +> +> Link to tests demonstrating that these checks are being done. + +*TODO* + +- *TODO* +- *TODO* +- *TODO* + +### Type system rules + +> What type system rules are enforced for this feature and what is the purpose of each? + +*TODO* + +### Sound by default? + +> Does the feature's implementation need specific checks to prevent UB, or is it sound by default and need specific opt-in to perform the dangerous/unsafe operations? If it is not sound by default, what is the rationale? + +*TODO* + +### Breaks the AM? + +> Can users use this feature to introduce undefined behavior, or use this feature to break the abstraction of Rust and expose the underlying assembly-level implementation? Describe this if so. + +*TODO* + +## Common interactions + +### Temporaries + +> Does this feature introduce new expressions that can produce temporaries? What are the scopes of those temporaries? + +*TODO* + +### Drop order + +> Does this feature raise questions about the order in which we should drop values? Talk about the decisions made here and how they're consistent with our earlier decisions. + +*TODO* + +### Pre-expansion / post-expansion + +> Does this feature raise questions about what should be accepted pre-expansion (e.g. in code covered by `#[cfg(false)]`) versus what should be accepted post-expansion? What decisions were made about this? + +*TODO* + +### Edition hygiene + +> If this feature is gated on an edition, how do we decide, in the context of the edition hygiene of tokens, whether to accept or reject code. E.g., what token do we use to decide? + +*TODO* + +### SemVer implications + +> Does this feature create any new ways in which library authors must take care to prevent breaking downstreams when making minor-version releases? Describe these. Are these new hazards "major" or "minor" according to [RFC 1105](https://rust-lang.github.io/rfcs/1105-api-evolution.html)? + +*TODO* + +### Exposing other features + +> Are there any other unstable features whose behavior may be exposed by this feature in any way? What features present the highest risk of that? + +*TODO* + +## History + +> List issues and PRs that are important for understanding how we got here. + +- *TODO* +- *TODO* +- *TODO* + +## Acknowledgments + +> Summarize contributors to the feature by name for recognition and so that those people are notified about the stabilization. Does anyone who worked on this *not* think it should be stabilized right now? We'd like to hear about that if so. + +*TODO* + +## Open items + +> List any known items that have not yet been completed and that should be before this is stabilized. + +- [ ] *TODO* +- [ ] *TODO* +- [ ] *TODO* diff --git a/src/doc/rustc-dev-guide/src/tests/directives.md b/src/doc/rustc-dev-guide/src/tests/directives.md index 5c3ae359ba0..89e4d3e9b58 100644 --- a/src/doc/rustc-dev-guide/src/tests/directives.md +++ b/src/doc/rustc-dev-guide/src/tests/directives.md @@ -293,8 +293,6 @@ See [Pretty-printer](compiletest.md#pretty-printer-tests). - `no-auto-check-cfg` — disable auto check-cfg (only for `--check-cfg` tests) - [`revisions`](compiletest.md#revisions) — compile multiple times -- [`unused-revision-names`](compiletest.md#ignoring-unused-revision-names) - - suppress tidy checks for mentioning unknown revision names -[`forbid-output`](compiletest.md#incremental-tests) — incremental cfail rejects output pattern - [`should-ice`](compiletest.md#incremental-tests) — incremental cfail should @@ -315,6 +313,17 @@ test suites that use those tools: - `llvm-cov-flags` adds extra flags when running LLVM's `llvm-cov` tool. - Used by [coverage tests](compiletest.md#coverage-tests) in `coverage-run` mode. +### Tidy specific directives + +The following directives control how the [tidy script](../conventions.md#formatting) +verifies tests. + +- `ignore-tidy-target-specific-tests` disables checking that the appropriate + LLVM component is required (via a `needs-llvm-components` directive) when a + test is compiled for a specific target (via the `--target` flag in a + `compile-flag` directive). +- [`unused-revision-names`](compiletest.md#ignoring-unused-revision-names) - + suppress tidy checks for mentioning unknown revision names. ## Substitutions diff --git a/src/doc/rustc-dev-guide/src/tests/ui.md b/src/doc/rustc-dev-guide/src/tests/ui.md index 9bfc60e08a6..782f78d7614 100644 --- a/src/doc/rustc-dev-guide/src/tests/ui.md +++ b/src/doc/rustc-dev-guide/src/tests/ui.md @@ -309,7 +309,9 @@ fn main((ؼ Use `//~?` to match an error without line information. `//~?` is precise and will not match errors if their line information is available. -It should be preferred to using `error-pattern`, which is imprecise and non-exhaustive. +It should be preferred over `//@ error-pattern` +for tests wishing to match against compiler diagnostics, +due to `//@ error-pattern` being imprecise and non-exhaustive. ```rust,ignore //@ compile-flags: --print yyyy @@ -319,8 +321,8 @@ It should be preferred to using `error-pattern`, which is imprecise and non-exha ### `error-pattern` -The `error-pattern` [directive](directives.md) can be used for runtime messages, which don't -have a specific span, or in exceptional cases, for compile time messages. +The `error-pattern` [directive](directives.md) can be used for runtime messages which don't +have a specific span, or, in exceptional cases, for compile time messages. Let's think about this test: @@ -347,8 +349,6 @@ fn main() { } ``` -Use of `error-pattern` is not recommended in general. - For strict testing of compile time output, try to use the line annotations `//~` as much as possible, including `//~?` annotations for diagnostics without spans. @@ -359,7 +359,8 @@ Some of the compiler messages can stay uncovered by annotations in this mode. For checking runtime output, `//@ check-run-results` may be preferable. -Only use `error-pattern` if none of the above works. +Only use `error-pattern` if none of the above works, such as when finding a +specific string pattern in a runtime panic output. Line annotations `//~` and `error-pattern` are compatible and can be used in the same test. diff --git a/src/doc/rustdoc/src/unstable-features.md b/src/doc/rustdoc/src/unstable-features.md index 27910ad0ab7..7bd2970eee7 100644 --- a/src/doc/rustdoc/src/unstable-features.md +++ b/src/doc/rustdoc/src/unstable-features.md @@ -395,6 +395,12 @@ flags to control that behavior. When the `--extern-html-root-url` flag is given one of your dependencies, rustdoc use that URL for those docs. Keep in mind that if those docs exist in the output directory, those local docs will still override this flag. +The names in this flag are first matched against the names given in the `--extern name=` flags, +which allows selecting between multiple crates with the same name (e.g. multiple versions of +the same crate). For transitive dependencies that haven't been loaded via an `--extern` flag, matching +falls backs to using crate names only, without ability to distinguish between multiple crates with +the same name. + ## `-Z force-unstable-if-unmarked` Using this flag looks like this: diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 1265a39d27b..14295ce0a31 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -36,6 +36,7 @@ use std::mem; use rustc_ast::token::{Token, TokenKind}; use rustc_ast::tokenstream::{TokenStream, TokenTree}; +use rustc_attr_data_structures::{AttributeKind, find_attr}; use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet, IndexEntry}; use rustc_errors::codes::*; use rustc_errors::{FatalError, struct_span_code_err}; @@ -987,28 +988,17 @@ fn clean_proc_macro<'tcx>( kind: MacroKind, cx: &mut DocContext<'tcx>, ) -> ItemKind { - let attrs = cx.tcx.hir_attrs(item.hir_id()); - if kind == MacroKind::Derive - && let Some(derive_name) = - hir_attr_lists(attrs, sym::proc_macro_derive).find_map(|mi| mi.ident()) - { - *name = derive_name.name; + if kind != MacroKind::Derive { + return ProcMacroItem(ProcMacro { kind, helpers: vec![] }); } + let attrs = cx.tcx.hir_attrs(item.hir_id()); + let Some((trait_name, helper_attrs)) = find_attr!(attrs, AttributeKind::ProcMacroDerive { trait_name, helper_attrs, ..} => (*trait_name, helper_attrs)) + else { + return ProcMacroItem(ProcMacro { kind, helpers: vec![] }); + }; + *name = trait_name; + let helpers = helper_attrs.iter().copied().collect(); - let mut helpers = Vec::new(); - for mi in hir_attr_lists(attrs, sym::proc_macro_derive) { - if !mi.has_name(sym::attributes) { - continue; - } - - if let Some(list) = mi.meta_item_list() { - for inner_mi in list { - if let Some(ident) = inner_mi.ident() { - helpers.push(ident.name); - } - } - } - } ProcMacroItem(ProcMacro { kind, helpers }) } @@ -1021,17 +1011,16 @@ fn clean_fn_or_proc_macro<'tcx>( cx: &mut DocContext<'tcx>, ) -> ItemKind { let attrs = cx.tcx.hir_attrs(item.hir_id()); - let macro_kind = attrs.iter().find_map(|a| { - if a.has_name(sym::proc_macro) { - Some(MacroKind::Bang) - } else if a.has_name(sym::proc_macro_derive) { - Some(MacroKind::Derive) - } else if a.has_name(sym::proc_macro_attribute) { - Some(MacroKind::Attr) - } else { - None - } - }); + let macro_kind = if find_attr!(attrs, AttributeKind::ProcMacro(..)) { + Some(MacroKind::Bang) + } else if find_attr!(attrs, AttributeKind::ProcMacroDerive { .. }) { + Some(MacroKind::Derive) + } else if find_attr!(attrs, AttributeKind::ProcMacroAttribute(..)) { + Some(MacroKind::Attr) + } else { + None + }; + match macro_kind { Some(kind) => clean_proc_macro(item, name, kind, cx), None => { diff --git a/src/librustdoc/config.rs b/src/librustdoc/config.rs index 986390dbaa0..fed4296fa22 100644 --- a/src/librustdoc/config.rs +++ b/src/librustdoc/config.rs @@ -173,6 +173,9 @@ pub(crate) struct Options { /// Arguments to be used when compiling doctests. pub(crate) doctest_build_args: Vec<String>, + + /// Target modifiers. + pub(crate) target_modifiers: BTreeMap<OptionsTargetModifiers, String>, } impl fmt::Debug for Options { @@ -377,7 +380,7 @@ impl Options { early_dcx: &mut EarlyDiagCtxt, matches: &getopts::Matches, args: Vec<String>, - ) -> Option<(InputMode, Options, RenderOptions)> { + ) -> Option<(InputMode, Options, RenderOptions, Vec<PathBuf>)> { // Check for unstable options. nightly_options::check_nightly_options(early_dcx, matches, &opts()); @@ -640,10 +643,13 @@ impl Options { let extension_css = matches.opt_str("e").map(|s| PathBuf::from(&s)); - if let Some(ref p) = extension_css - && !p.is_file() - { - dcx.fatal("option --extend-css argument must be a file"); + let mut loaded_paths = Vec::new(); + + if let Some(ref p) = extension_css { + loaded_paths.push(p.clone()); + if !p.is_file() { + dcx.fatal("option --extend-css argument must be a file"); + } } let mut themes = Vec::new(); @@ -687,6 +693,7 @@ impl Options { )) .emit(); } + loaded_paths.push(theme_file.clone()); themes.push(StylePath { path: theme_file }); } } @@ -705,6 +712,7 @@ impl Options { &mut id_map, edition, &None, + &mut loaded_paths, ) else { dcx.fatal("`ExternalHtml::load` failed"); }; @@ -796,7 +804,8 @@ impl Options { let scrape_examples_options = ScrapeExamplesOptions::new(matches, dcx); let with_examples = matches.opt_strs("with-examples"); - let call_locations = crate::scrape_examples::load_call_locations(with_examples, dcx); + let call_locations = + crate::scrape_examples::load_call_locations(with_examples, dcx, &mut loaded_paths); let doctest_build_args = matches.opt_strs("doctest-build-arg"); let unstable_features = @@ -846,6 +855,7 @@ impl Options { unstable_features, expanded_args: args, doctest_build_args, + target_modifiers, }; let render_options = RenderOptions { output, @@ -881,7 +891,7 @@ impl Options { parts_out_dir, disable_minification, }; - Some((input, options, render_options)) + Some((input, options, render_options, loaded_paths)) } } diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs index bd57bb21e63..e89733b2f6d 100644 --- a/src/librustdoc/core.rs +++ b/src/librustdoc/core.rs @@ -214,6 +214,7 @@ pub(crate) fn create_config( scrape_examples_options, expanded_args, remap_path_prefix, + target_modifiers, .. }: RustdocOptions, render_options: &RenderOptions, @@ -277,6 +278,7 @@ pub(crate) fn create_config( } else { OutputTypes::new(&[]) }, + target_modifiers, ..Options::default() }; diff --git a/src/librustdoc/doctest.rs b/src/librustdoc/doctest.rs index 38ba6b4503d..0bef091468f 100644 --- a/src/librustdoc/doctest.rs +++ b/src/librustdoc/doctest.rs @@ -11,7 +11,8 @@ use std::path::{Path, PathBuf}; use std::process::{self, Command, Stdio}; use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::{Arc, Mutex}; -use std::{panic, str}; +use std::time::{Duration, Instant}; +use std::{fmt, panic, str}; pub(crate) use make::{BuildDocTestBuilder, DocTestBuilder}; pub(crate) use markdown::test as test_markdown; @@ -36,6 +37,50 @@ use crate::config::{Options as RustdocOptions, OutputFormat}; use crate::html::markdown::{ErrorCodes, Ignore, LangString, MdRelLine}; use crate::lint::init_lints; +/// Type used to display times (compilation and total) information for merged doctests. +struct MergedDoctestTimes { + total_time: Instant, + /// Total time spent compiling all merged doctests. + compilation_time: Duration, + /// This field is used to keep track of how many merged doctests we (tried to) compile. + added_compilation_times: usize, +} + +impl MergedDoctestTimes { + fn new() -> Self { + Self { + total_time: Instant::now(), + compilation_time: Duration::default(), + added_compilation_times: 0, + } + } + + fn add_compilation_time(&mut self, duration: Duration) { + self.compilation_time += duration; + self.added_compilation_times += 1; + } + + fn display_times(&self) { + // If no merged doctest was compiled, then there is nothing to display since the numbers + // displayed by `libtest` for standalone tests are already accurate (they include both + // compilation and runtime). + if self.added_compilation_times > 0 { + println!("{self}"); + } + } +} + +impl fmt::Display for MergedDoctestTimes { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "all doctests ran in {:.2}s; merged doctests compilation took {:.2}s", + self.total_time.elapsed().as_secs_f64(), + self.compilation_time.as_secs_f64(), + ) + } +} + /// Options that apply to all doctests in a crate or Markdown file (for `rustdoc foo.md`). #[derive(Clone)] pub(crate) struct GlobalTestOptions { @@ -295,6 +340,7 @@ pub(crate) fn run_tests( let mut nb_errors = 0; let mut ran_edition_tests = 0; + let mut times = MergedDoctestTimes::new(); let target_str = rustdoc_options.target.to_string(); for (MergeableTestKey { edition, global_crate_attrs_hash }, mut doctests) in mergeable_tests { @@ -314,13 +360,15 @@ pub(crate) fn run_tests( for (doctest, scraped_test) in &doctests { tests_runner.add_test(doctest, scraped_test, &target_str); } - if let Ok(success) = tests_runner.run_merged_tests( + let (duration, ret) = tests_runner.run_merged_tests( rustdoc_test_options, edition, &opts, &test_args, rustdoc_options, - ) { + ); + times.add_compilation_time(duration); + if let Ok(success) = ret { ran_edition_tests += 1; if !success { nb_errors += 1; @@ -354,11 +402,13 @@ pub(crate) fn run_tests( test::test_main_with_exit_callback(&test_args, standalone_tests, None, || { // We ensure temp dir destructor is called. std::mem::drop(temp_dir.take()); + times.display_times(); }); } if nb_errors != 0 { // We ensure temp dir destructor is called. std::mem::drop(temp_dir); + times.display_times(); // libtest::ERROR_EXIT_CODE is not public but it's the same value. std::process::exit(101); } @@ -444,7 +494,7 @@ fn add_exe_suffix(input: String, target: &TargetTuple) -> String { let exe_suffix = match target { TargetTuple::TargetTuple(_) => Target::expect_builtin(target).options.exe_suffix, TargetTuple::TargetJson { contents, .. } => { - Target::from_json(contents.parse().unwrap()).unwrap().0.options.exe_suffix + Target::from_json(contents).unwrap().0.options.exe_suffix } }; input + &exe_suffix @@ -496,16 +546,19 @@ impl RunnableDocTest { /// /// This is the function that calculates the compiler command line, invokes the compiler, then /// invokes the test or tests in a separate executable (if applicable). +/// +/// Returns a tuple containing the `Duration` of the compilation and the `Result` of the test. fn run_test( doctest: RunnableDocTest, rustdoc_options: &RustdocOptions, supports_color: bool, report_unused_externs: impl Fn(UnusedExterns), -) -> Result<(), TestFailure> { +) -> (Duration, Result<(), TestFailure>) { let langstr = &doctest.langstr; // Make sure we emit well-formed executable names for our target. let rust_out = add_exe_suffix("rust_out".to_owned(), &rustdoc_options.target); let output_file = doctest.test_opts.outdir.path().join(rust_out); + let instant = Instant::now(); // Common arguments used for compiling the doctest runner. // On merged doctests, the compiler is invoked twice: once for the test code itself, @@ -589,7 +642,7 @@ fn run_test( if std::fs::write(&input_file, &doctest.full_test_code).is_err() { // If we cannot write this file for any reason, we leave. All combined tests will be // tested as standalone tests. - return Err(TestFailure::CompileError); + return (Duration::default(), Err(TestFailure::CompileError)); } if !rustdoc_options.nocapture { // If `nocapture` is disabled, then we don't display rustc's output when compiling @@ -660,7 +713,7 @@ fn run_test( if std::fs::write(&runner_input_file, merged_test_code).is_err() { // If we cannot write this file for any reason, we leave. All combined tests will be // tested as standalone tests. - return Err(TestFailure::CompileError); + return (instant.elapsed(), Err(TestFailure::CompileError)); } if !rustdoc_options.nocapture { // If `nocapture` is disabled, then we don't display rustc's output when compiling @@ -713,7 +766,7 @@ fn run_test( let _bomb = Bomb(&out); match (output.status.success(), langstr.compile_fail) { (true, true) => { - return Err(TestFailure::UnexpectedCompilePass); + return (instant.elapsed(), Err(TestFailure::UnexpectedCompilePass)); } (true, false) => {} (false, true) => { @@ -729,17 +782,18 @@ fn run_test( .collect(); if !missing_codes.is_empty() { - return Err(TestFailure::MissingErrorCodes(missing_codes)); + return (instant.elapsed(), Err(TestFailure::MissingErrorCodes(missing_codes))); } } } (false, false) => { - return Err(TestFailure::CompileError); + return (instant.elapsed(), Err(TestFailure::CompileError)); } } + let duration = instant.elapsed(); if doctest.no_run { - return Ok(()); + return (duration, Ok(())); } // Run the code! @@ -771,17 +825,17 @@ fn run_test( cmd.output() }; match result { - Err(e) => return Err(TestFailure::ExecutionError(e)), + Err(e) => return (duration, Err(TestFailure::ExecutionError(e))), Ok(out) => { if langstr.should_panic && out.status.success() { - return Err(TestFailure::UnexpectedRunPass); + return (duration, Err(TestFailure::UnexpectedRunPass)); } else if !langstr.should_panic && !out.status.success() { - return Err(TestFailure::ExecutionFailure(out)); + return (duration, Err(TestFailure::ExecutionFailure(out))); } } } - Ok(()) + (duration, Ok(())) } /// Converts a path intended to use as a command to absolute if it is @@ -1071,7 +1125,7 @@ fn doctest_run_fn( no_run: scraped_test.no_run(&rustdoc_options), merged_test_code: None, }; - let res = + let (_, res) = run_test(runnable_test, &rustdoc_options, doctest.supports_color, report_unused_externs); if let Err(err) = res { diff --git a/src/librustdoc/doctest/runner.rs b/src/librustdoc/doctest/runner.rs index f0914474c79..fcfa424968e 100644 --- a/src/librustdoc/doctest/runner.rs +++ b/src/librustdoc/doctest/runner.rs @@ -1,4 +1,5 @@ use std::fmt::Write; +use std::time::Duration; use rustc_data_structures::fx::FxIndexSet; use rustc_span::edition::Edition; @@ -67,6 +68,10 @@ impl DocTestRunner { self.nb_tests += 1; } + /// Returns a tuple containing the `Duration` of the compilation and the `Result` of the test. + /// + /// If compilation failed, it will return `Err`, otherwise it will return `Ok` containing if + /// the test ran successfully. pub(crate) fn run_merged_tests( &mut self, test_options: IndividualTestOptions, @@ -74,7 +79,7 @@ impl DocTestRunner { opts: &GlobalTestOptions, test_args: &[String], rustdoc_options: &RustdocOptions, - ) -> Result<bool, ()> { + ) -> (Duration, Result<bool, ()>) { let mut code = "\ #![allow(unused_extern_crates)] #![allow(internal_features)] @@ -204,9 +209,9 @@ std::process::Termination::report(test::test_main(test_args, tests, None)) no_run: false, merged_test_code: Some(code), }; - let ret = + let (duration, ret) = run_test(runnable_test, rustdoc_options, self.supports_color, |_: UnusedExterns| {}); - if let Err(TestFailure::CompileError) = ret { Err(()) } else { Ok(ret.is_ok()) } + (duration, if let Err(TestFailure::CompileError) = ret { Err(()) } else { Ok(ret.is_ok()) }) } } diff --git a/src/librustdoc/externalfiles.rs b/src/librustdoc/externalfiles.rs index ea2aa963edd..42ade5b9004 100644 --- a/src/librustdoc/externalfiles.rs +++ b/src/librustdoc/externalfiles.rs @@ -1,4 +1,4 @@ -use std::path::Path; +use std::path::{Path, PathBuf}; use std::{fs, str}; use rustc_errors::DiagCtxtHandle; @@ -32,12 +32,13 @@ impl ExternalHtml { id_map: &mut IdMap, edition: Edition, playground: &Option<Playground>, + loaded_paths: &mut Vec<PathBuf>, ) -> Option<ExternalHtml> { let codes = ErrorCodes::from(nightly_build); - let ih = load_external_files(in_header, dcx)?; + let ih = load_external_files(in_header, dcx, loaded_paths)?; let bc = { - let mut bc = load_external_files(before_content, dcx)?; - let m_bc = load_external_files(md_before_content, dcx)?; + let mut bc = load_external_files(before_content, dcx, loaded_paths)?; + let m_bc = load_external_files(md_before_content, dcx, loaded_paths)?; Markdown { content: &m_bc, links: &[], @@ -52,8 +53,8 @@ impl ExternalHtml { bc }; let ac = { - let mut ac = load_external_files(after_content, dcx)?; - let m_ac = load_external_files(md_after_content, dcx)?; + let mut ac = load_external_files(after_content, dcx, loaded_paths)?; + let m_ac = load_external_files(md_after_content, dcx, loaded_paths)?; Markdown { content: &m_ac, links: &[], @@ -79,8 +80,10 @@ pub(crate) enum LoadStringError { pub(crate) fn load_string<P: AsRef<Path>>( file_path: P, dcx: DiagCtxtHandle<'_>, + loaded_paths: &mut Vec<PathBuf>, ) -> Result<String, LoadStringError> { let file_path = file_path.as_ref(); + loaded_paths.push(file_path.to_owned()); let contents = match fs::read(file_path) { Ok(bytes) => bytes, Err(e) => { @@ -101,10 +104,14 @@ pub(crate) fn load_string<P: AsRef<Path>>( } } -fn load_external_files(names: &[String], dcx: DiagCtxtHandle<'_>) -> Option<String> { +fn load_external_files( + names: &[String], + dcx: DiagCtxtHandle<'_>, + loaded_paths: &mut Vec<PathBuf>, +) -> Option<String> { let mut out = String::new(); for name in names { - let Ok(s) = load_string(name, dcx) else { return None }; + let Ok(s) = load_string(name, dcx, loaded_paths) else { return None }; out.push_str(&s); out.push('\n'); } diff --git a/src/librustdoc/formats/cache.rs b/src/librustdoc/formats/cache.rs index 5191120ebdb..e28cc3a542e 100644 --- a/src/librustdoc/formats/cache.rs +++ b/src/librustdoc/formats/cache.rs @@ -4,6 +4,7 @@ use rustc_ast::join_path_syms; use rustc_attr_data_structures::StabilityLevel; use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet}; use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, DefIdSet}; +use rustc_metadata::creader::CStore; use rustc_middle::ty::{self, TyCtxt}; use rustc_span::Symbol; use tracing::debug; @@ -158,18 +159,33 @@ impl Cache { assert!(cx.external_traits.is_empty()); cx.cache.traits = mem::take(&mut krate.external_traits); + let render_options = &cx.render_options; + let extern_url_takes_precedence = render_options.extern_html_root_takes_precedence; + let dst = &render_options.output; + + // Make `--extern-html-root-url` support the same names as `--extern` whenever possible + let cstore = CStore::from_tcx(tcx); + for (name, extern_url) in &render_options.extern_html_root_urls { + if let Some(crate_num) = cstore.resolved_extern_crate(Symbol::intern(name)) { + let e = ExternalCrate { crate_num }; + let location = e.location(Some(extern_url), extern_url_takes_precedence, dst, tcx); + cx.cache.extern_locations.insert(e.crate_num, location); + } + } + // Cache where all our extern crates are located - // FIXME: this part is specific to HTML so it'd be nice to remove it from the common code + // This is also used in the JSON output. for &crate_num in tcx.crates(()) { let e = ExternalCrate { crate_num }; let name = e.name(tcx); - let render_options = &cx.render_options; - let extern_url = render_options.extern_html_root_urls.get(name.as_str()).map(|u| &**u); - let extern_url_takes_precedence = render_options.extern_html_root_takes_precedence; - let dst = &render_options.output; - let location = e.location(extern_url, extern_url_takes_precedence, dst, tcx); - cx.cache.extern_locations.insert(e.crate_num, location); + cx.cache.extern_locations.entry(e.crate_num).or_insert_with(|| { + // falls back to matching by crates' own names, because + // transitive dependencies and injected crates may be loaded without `--extern` + let extern_url = + render_options.extern_html_root_urls.get(name.as_str()).map(|u| &**u); + e.location(extern_url, extern_url_takes_precedence, dst, tcx) + }); cx.cache.external_paths.insert(e.def_id(), (vec![name], ItemType::Module)); } diff --git a/src/librustdoc/html/render/search_index.rs b/src/librustdoc/html/render/search_index.rs index 3c9be29ccc3..e2f86b8a854 100644 --- a/src/librustdoc/html/render/search_index.rs +++ b/src/librustdoc/html/render/search_index.rs @@ -100,9 +100,22 @@ pub(crate) fn build_index( let crate_doc = short_markdown_summary(&krate.module.doc_value(), &krate.module.link_names(cache)); + #[derive(Eq, Ord, PartialEq, PartialOrd)] + struct SerSymbolAsStr(Symbol); + + impl Serialize for SerSymbolAsStr { + fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> + where + S: Serializer, + { + self.0.as_str().serialize(serializer) + } + } + + type AliasMap = BTreeMap<SerSymbolAsStr, Vec<usize>>; // Aliases added through `#[doc(alias = "...")]`. Since a few items can have the same alias, // we need the alias element to have an array of items. - let mut aliases: BTreeMap<String, Vec<usize>> = BTreeMap::new(); + let mut aliases: AliasMap = BTreeMap::new(); // Sort search index items. This improves the compressibility of the search index. cache.search_index.sort_unstable_by(|k1, k2| { @@ -116,7 +129,7 @@ pub(crate) fn build_index( // Set up alias indexes. for (i, item) in cache.search_index.iter().enumerate() { for alias in &item.aliases[..] { - aliases.entry(alias.to_string()).or_default().push(i); + aliases.entry(SerSymbolAsStr(*alias)).or_default().push(i); } } @@ -474,7 +487,7 @@ pub(crate) fn build_index( // The String is alias name and the vec is the list of the elements with this alias. // // To be noted: the `usize` elements are indexes to `items`. - aliases: &'a BTreeMap<String, Vec<usize>>, + aliases: &'a AliasMap, // Used when a type has more than one impl with an associated item with the same name. associated_item_disambiguators: &'a Vec<(usize, String)>, // A list of shard lengths encoded as vlqhex. See the comment in write_vlqhex_to_string diff --git a/src/librustdoc/html/static/js/main.js b/src/librustdoc/html/static/js/main.js index 3c4af0dc612..8e3d07b3a1c 100644 --- a/src/librustdoc/html/static/js/main.js +++ b/src/librustdoc/html/static/js/main.js @@ -568,7 +568,11 @@ function preLoadCss(cssUrl) { break; case "-": ev.preventDefault(); - collapseAllDocs(); + collapseAllDocs(false); + break; + case "_": + ev.preventDefault(); + collapseAllDocs(true); break; case "?": @@ -1038,11 +1042,14 @@ function preLoadCss(cssUrl) { innerToggle.children[0].innerText = "Summary"; } - function collapseAllDocs() { + /** + * @param {boolean} collapseImpls - also collapse impl blocks if set to true + */ + function collapseAllDocs(collapseImpls) { const innerToggle = document.getElementById(toggleAllDocsId); addClass(innerToggle, "will-expand"); onEachLazy(document.getElementsByClassName("toggle"), e => { - if (e.parentNode.id !== "implementations-list" || + if ((collapseImpls || e.parentNode.id !== "implementations-list") || (!hasClass(e, "implementors-toggle") && !hasClass(e, "type-contents-toggle")) ) { @@ -1053,7 +1060,10 @@ function preLoadCss(cssUrl) { innerToggle.children[0].innerText = "Show all"; } - function toggleAllDocs() { + /** + * @param {MouseEvent=} ev + */ + function toggleAllDocs(ev) { const innerToggle = document.getElementById(toggleAllDocsId); if (!innerToggle) { return; @@ -1061,7 +1071,7 @@ function preLoadCss(cssUrl) { if (hasClass(innerToggle, "will-expand")) { expandAllDocs(); } else { - collapseAllDocs(); + collapseAllDocs(ev !== undefined && ev.shiftKey); } } @@ -1519,6 +1529,10 @@ function preLoadCss(cssUrl) { ["⏎", "Go to active search result"], ["+", "Expand all sections"], ["-", "Collapse all sections"], + // for the sake of brevity, we don't say "inherint impl blocks", + // although that would be more correct, + // since trait impl blocks are collapsed by - + ["_", "Collapse all sections, including impl blocks"], ].map(x => "<dt>" + x[0].split(" ") .map((y, index) => ((index & 1) === 0 ? "<kbd>" + y + "</kbd>" : " " + y + " ")) diff --git a/src/librustdoc/html/static/js/storage.js b/src/librustdoc/html/static/js/storage.js index 76113726894..ca13b891638 100644 --- a/src/librustdoc/html/static/js/storage.js +++ b/src/librustdoc/html/static/js/storage.js @@ -418,7 +418,9 @@ class RustdocToolbarElement extends HTMLElement { <div id="help-button" tabindex="-1"> <a href="${rootPath}help.html"><span class="label">Help</span></a> </div> - <button id="toggle-all-docs"><span class="label">Summary</span></button>`; + <button id="toggle-all-docs" +title="Collapse sections (shift-click to also collapse impl blocks)"><span +class="label">Summary</span></button>`; } } window.customElements.define("rustdoc-toolbar", RustdocToolbarElement); diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index a3cdc4f687f..7431ff8e1ad 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -799,7 +799,7 @@ fn main_args(early_dcx: &mut EarlyDiagCtxt, at_args: &[String]) { // Note that we discard any distinction between different non-zero exit // codes from `from_matches` here. - let (input, options, render_options) = + let (input, options, render_options, loaded_paths) = match config::Options::from_matches(early_dcx, &matches, args) { Some(opts) => opts, None => return, @@ -870,6 +870,12 @@ fn main_args(early_dcx: &mut EarlyDiagCtxt, at_args: &[String]) { interface::run_compiler(config, |compiler| { let sess = &compiler.sess; + // Register the loaded external files in the source map so they show up in depinfo. + // We can't load them via the source map because it gets created after we process the options. + for external_path in &loaded_paths { + let _ = sess.source_map().load_file(external_path); + } + if sess.opts.describe_lints { rustc_driver::describe_lints(sess, registered_lints); return; diff --git a/src/librustdoc/scrape_examples.rs b/src/librustdoc/scrape_examples.rs index fceacb69fb5..4d29c74e1eb 100644 --- a/src/librustdoc/scrape_examples.rs +++ b/src/librustdoc/scrape_examples.rs @@ -333,9 +333,11 @@ pub(crate) fn run( pub(crate) fn load_call_locations( with_examples: Vec<String>, dcx: DiagCtxtHandle<'_>, + loaded_paths: &mut Vec<PathBuf>, ) -> AllCallLocations { let mut all_calls: AllCallLocations = FxIndexMap::default(); for path in with_examples { + loaded_paths.push(path.clone().into()); let bytes = match fs::read(&path) { Ok(bytes) => bytes, Err(e) => dcx.fatal(format!("failed to load examples: {e}")), diff --git a/src/tools/cargo b/src/tools/cargo -Subproject 6833aa715d724437dc1247d0166afe314ab6854 +Subproject 9b296973b425ffb159e12cf3cd56580fd5c8538 diff --git a/src/tools/clippy/.github/workflows/feature_freeze.yml b/src/tools/clippy/.github/workflows/feature_freeze.yml index 7ad58af77d4..ec59be3e7f6 100644 --- a/src/tools/clippy/.github/workflows/feature_freeze.yml +++ b/src/tools/clippy/.github/workflows/feature_freeze.yml @@ -20,16 +20,26 @@ jobs: # of the pull request, as malicious code would be able to access the private # GitHub token. steps: - - name: Check PR Changes - id: pr-changes - run: echo "::set-output name=changes::${{ toJson(github.event.pull_request.changed_files) }}" - - - name: Create Comment - if: steps.pr-changes.outputs.changes != '[]' - run: | - # Use GitHub API to create a comment on the PR - PR_NUMBER=${{ github.event.pull_request.number }} - COMMENT="**Seems that you are trying to add a new lint!**\nWe are currently in a [feature freeze](https://doc.rust-lang.org/nightly/clippy/development/feature_freeze.html), so we are delaying all lint-adding PRs to September 18 and focusing on bugfixes.\nThanks a lot for your contribution, and sorry for the inconvenience.\nWith ❤ from the Clippy team\n\n@rustbot note Feature-freeze\n@rustbot blocked\n@rustbot label +A-lint\n" - GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }} - COMMENT_URL="https://api.github.com/repos/${{ github.repository }}/issues/${PR_NUMBER}/comments" - curl -s -H "Authorization: token ${GITHUB_TOKEN}" -X POST $COMMENT_URL -d "{\"body\":\"$COMMENT\"}" + - name: Add freeze warning comment + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_REPOSITORY: ${{ github.repository }} + PR_NUMBER: ${{ github.event.pull_request.number }} + run: | + COMMENT=$(echo "**Seems that you are trying to add a new lint!**\n\ + \n\ + We are currently in a [feature freeze](https://doc.rust-lang.org/nightly/clippy/development/feature_freeze.html), so we are delaying all lint-adding PRs to September 18 and [focusing on bugfixes](https://github.com/rust-lang/rust-clippy/issues/15086).\n\ + \n\ + Thanks a lot for your contribution, and sorry for the inconvenience.\n\ + \n\ + With ❤ from the Clippy team.\n\ + \n\ + @rustbot note Feature-freeze\n\ + @rustbot blocked\n\ + @rustbot label +A-lint" + ) + curl -s -H "Authorization: Bearer $GITHUB_TOKEN" \ + -H "Content-Type: application/vnd.github.raw+json" \ + -X POST \ + --data "{\"body\":\"${COMMENT}\"}" \ + "https://api.github.com/repos/${GITHUB_REPOSITORY}/issues/${PR_NUMBER}/comments" diff --git a/src/tools/clippy/.gitignore b/src/tools/clippy/.gitignore index a7c25b29021..36a4cdc1c35 100644 --- a/src/tools/clippy/.gitignore +++ b/src/tools/clippy/.gitignore @@ -19,8 +19,10 @@ out # Generated by Cargo *Cargo.lock +!/clippy_test_deps/Cargo.lock /target /clippy_lints/target +/clippy_lints_internal/target /clippy_utils/target /clippy_dev/target /lintcheck/target diff --git a/src/tools/clippy/clippy_dev/src/fmt.rs b/src/tools/clippy/clippy_dev/src/fmt.rs index bd9e57c9f6d..2b2138d3108 100644 --- a/src/tools/clippy/clippy_dev/src/fmt.rs +++ b/src/tools/clippy/clippy_dev/src/fmt.rs @@ -3,7 +3,7 @@ use crate::utils::{ walk_dir_no_dot_or_target, }; use itertools::Itertools; -use rustc_lexer::{TokenKind, tokenize}; +use rustc_lexer::{FrontmatterAllowed, TokenKind, tokenize}; use std::fmt::Write; use std::fs; use std::io::{self, Read}; @@ -92,7 +92,7 @@ fn fmt_conf(check: bool) -> Result<(), Error> { let mut fields = Vec::new(); let mut state = State::Start; - for (i, t) in tokenize(conf) + for (i, t) in tokenize(conf, FrontmatterAllowed::No) .map(|x| { let start = pos; pos += x.len; diff --git a/src/tools/clippy/clippy_lints/src/approx_const.rs b/src/tools/clippy/clippy_lints/src/approx_const.rs index 5ed4c82634a..184fbbf7796 100644 --- a/src/tools/clippy/clippy_lints/src/approx_const.rs +++ b/src/tools/clippy/clippy_lints/src/approx_const.rs @@ -92,9 +92,11 @@ impl LateLintPass<'_> for ApproxConstant { impl ApproxConstant { fn check_known_consts(&self, cx: &LateContext<'_>, span: Span, s: symbol::Symbol, module: &str) { let s = s.as_str(); - if s.parse::<f64>().is_ok() { + if let Ok(maybe_constant) = s.parse::<f64>() { for &(constant, name, min_digits, msrv) in &KNOWN_CONSTS { - if is_approx_const(constant, s, min_digits) && msrv.is_none_or(|msrv| self.msrv.meets(cx, msrv)) { + if is_approx_const(constant, s, maybe_constant, min_digits) + && msrv.is_none_or(|msrv| self.msrv.meets(cx, msrv)) + { span_lint_and_help( cx, APPROX_CONSTANT, @@ -112,18 +114,35 @@ impl ApproxConstant { impl_lint_pass!(ApproxConstant => [APPROX_CONSTANT]); +fn count_digits_after_dot(input: &str) -> usize { + input + .char_indices() + .find(|(_, ch)| *ch == '.') + .map_or(0, |(i, _)| input.len() - i - 1) +} + /// Returns `false` if the number of significant figures in `value` are /// less than `min_digits`; otherwise, returns true if `value` is equal -/// to `constant`, rounded to the number of digits present in `value`. +/// to `constant`, rounded to the number of significant digits present in `value`. #[must_use] -fn is_approx_const(constant: f64, value: &str, min_digits: usize) -> bool { +fn is_approx_const(constant: f64, value: &str, f_value: f64, min_digits: usize) -> bool { if value.len() <= min_digits { + // The value is not precise enough false - } else if constant.to_string().starts_with(value) { - // The value is a truncated constant + } else if f_value.to_string().len() > min_digits && constant.to_string().starts_with(&f_value.to_string()) { + // The value represents the same value true } else { - let round_const = format!("{constant:.*}", value.len() - 2); + // The value is a truncated constant + + // Print constant with numeric formatting (`0`), with the length of `value` as minimum width + // (`value_len$`), and with the same precision as `value` (`.value_prec$`). + // See https://doc.rust-lang.org/std/fmt/index.html. + let round_const = format!( + "{constant:0value_len$.value_prec$}", + value_len = value.len(), + value_prec = count_digits_after_dot(value) + ); value == round_const } } diff --git a/src/tools/clippy/clippy_lints/src/arbitrary_source_item_ordering.rs b/src/tools/clippy/clippy_lints/src/arbitrary_source_item_ordering.rs index a9d3015ce5c..7b4cf033674 100644 --- a/src/tools/clippy/clippy_lints/src/arbitrary_source_item_ordering.rs +++ b/src/tools/clippy/clippy_lints/src/arbitrary_source_item_ordering.rs @@ -8,11 +8,11 @@ use clippy_utils::diagnostics::span_lint_and_note; use clippy_utils::is_cfg_test; use rustc_attr_data_structures::AttributeKind; use rustc_hir::{ - Attribute, FieldDef, HirId, IsAuto, ImplItemId, Item, ItemKind, Mod, OwnerId, QPath, TraitItemId, TyKind, - Variant, VariantData, + Attribute, FieldDef, HirId, ImplItemId, IsAuto, Item, ItemKind, Mod, OwnerId, QPath, TraitItemId, TyKind, Variant, + VariantData, }; -use rustc_middle::ty::AssocKind; use rustc_lint::{LateContext, LateLintPass, LintContext}; +use rustc_middle::ty::AssocKind; use rustc_session::impl_lint_pass; use rustc_span::Ident; @@ -469,13 +469,14 @@ impl<'tcx> LateLintPass<'tcx> for ArbitrarySourceItemOrdering { /// This is implemented here because `rustc_hir` is not a dependency of /// `clippy_config`. fn convert_assoc_item_kind(cx: &LateContext<'_>, owner_id: OwnerId) -> SourceItemOrderingTraitAssocItemKind { - let kind = cx.tcx.associated_item(owner_id.def_id).kind; - #[allow(clippy::enum_glob_use)] // Very local glob use for legibility. use SourceItemOrderingTraitAssocItemKind::*; + + let kind = cx.tcx.associated_item(owner_id.def_id).kind; + match kind { - AssocKind::Const{..} => Const, - AssocKind::Type {..}=> Type, + AssocKind::Const { .. } => Const, + AssocKind::Type { .. } => Type, AssocKind::Fn { .. } => Fn, } } diff --git a/src/tools/clippy/clippy_lints/src/arc_with_non_send_sync.rs b/src/tools/clippy/clippy_lints/src/arc_with_non_send_sync.rs index 9e09fb5bb43..085029a744b 100644 --- a/src/tools/clippy/clippy_lints/src/arc_with_non_send_sync.rs +++ b/src/tools/clippy/clippy_lints/src/arc_with_non_send_sync.rs @@ -73,7 +73,7 @@ impl<'tcx> LateLintPass<'tcx> for ArcWithNonSendSync { diag.note(format!( "`Arc<{arg_ty}>` is not `Send` and `Sync` as `{arg_ty}` is {reason}" )); - diag.help("if the `Arc` will not used be across threads replace it with an `Rc`"); + diag.help("if the `Arc` will not be used across threads replace it with an `Rc`"); diag.help(format!( "otherwise make `{arg_ty}` `Send` and `Sync` or consider a wrapper type such as `Mutex`" )); diff --git a/src/tools/clippy/clippy_lints/src/assigning_clones.rs b/src/tools/clippy/clippy_lints/src/assigning_clones.rs index 8b8b42bbf72..52287be34c7 100644 --- a/src/tools/clippy/clippy_lints/src/assigning_clones.rs +++ b/src/tools/clippy/clippy_lints/src/assigning_clones.rs @@ -98,7 +98,7 @@ impl<'tcx> LateLintPass<'tcx> for AssigningClones { // That is overly conservative - the lint should fire even if there was no initializer, // but the variable has been initialized before `lhs` was evaluated. && path_to_local(lhs).is_none_or(|lhs| local_is_initialized(cx, lhs)) - && let Some(resolved_impl) = cx.tcx.impl_of_method(resolved_fn.def_id()) + && let Some(resolved_impl) = cx.tcx.impl_of_assoc(resolved_fn.def_id()) // Derived forms don't implement `clone_from`/`clone_into`. // See https://github.com/rust-lang/rust/pull/98445#issuecomment-1190681305 && !cx.tcx.is_builtin_derived(resolved_impl) diff --git a/src/tools/clippy/clippy_lints/src/attrs/useless_attribute.rs b/src/tools/clippy/clippy_lints/src/attrs/useless_attribute.rs index 4059f9603c3..b9b5cedb5aa 100644 --- a/src/tools/clippy/clippy_lints/src/attrs/useless_attribute.rs +++ b/src/tools/clippy/clippy_lints/src/attrs/useless_attribute.rs @@ -36,6 +36,7 @@ pub(super) fn check(cx: &EarlyContext<'_>, item: &Item, attrs: &[Attribute]) { | sym::unused_braces | sym::unused_import_braces | sym::unused_imports + | sym::redundant_imports ) { return; diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_ptr_alignment.rs b/src/tools/clippy/clippy_lints/src/casts/cast_ptr_alignment.rs index e4dafde0f9d..a1543cabd2f 100644 --- a/src/tools/clippy/clippy_lints/src/casts/cast_ptr_alignment.rs +++ b/src/tools/clippy/clippy_lints/src/casts/cast_ptr_alignment.rs @@ -63,7 +63,7 @@ fn is_used_as_unaligned(cx: &LateContext<'_>, e: &Expr<'_>) -> bool { ExprKind::MethodCall(name, self_arg, ..) if self_arg.hir_id == e.hir_id => { if matches!(name.ident.name, sym::read_unaligned | sym::write_unaligned) && let Some(def_id) = cx.typeck_results().type_dependent_def_id(parent.hir_id) - && let Some(def_id) = cx.tcx.impl_of_method(def_id) + && let Some(def_id) = cx.tcx.impl_of_assoc(def_id) && cx.tcx.type_of(def_id).instantiate_identity().is_raw_ptr() { true diff --git a/src/tools/clippy/clippy_lints/src/casts/confusing_method_to_numeric_cast.rs b/src/tools/clippy/clippy_lints/src/casts/confusing_method_to_numeric_cast.rs index 769cc120c95..73347e7141e 100644 --- a/src/tools/clippy/clippy_lints/src/casts/confusing_method_to_numeric_cast.rs +++ b/src/tools/clippy/clippy_lints/src/casts/confusing_method_to_numeric_cast.rs @@ -37,7 +37,7 @@ fn get_const_name_and_ty_name( } else { return None; } - } else if let Some(impl_id) = cx.tcx.impl_of_method(method_def_id) + } else if let Some(impl_id) = cx.tcx.impl_of_assoc(method_def_id) && let Some(ty_name) = get_primitive_ty_name(cx.tcx.type_of(impl_id).instantiate_identity()) && matches!( method_name, @@ -59,9 +59,8 @@ fn get_const_name_and_ty_name( pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) { // We allow casts from any function type to any function type. - match cast_to.kind() { - ty::FnDef(..) | ty::FnPtr(..) => return, - _ => { /* continue to checks */ }, + if cast_to.is_fn() { + return; } if let ty::FnDef(def_id, generics) = cast_from.kind() diff --git a/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast.rs b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast.rs index 55e27a05f3c..c5d9643f56a 100644 --- a/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast.rs +++ b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast.rs @@ -3,7 +3,7 @@ use clippy_utils::source::snippet_with_applicability; use rustc_errors::Applicability; use rustc_hir::Expr; use rustc_lint::LateContext; -use rustc_middle::ty::{self, Ty}; +use rustc_middle::ty::Ty; use super::{FN_TO_NUMERIC_CAST, utils}; @@ -13,23 +13,20 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, return; }; - match cast_from.kind() { - ty::FnDef(..) | ty::FnPtr(..) => { - let mut applicability = Applicability::MaybeIncorrect; + if cast_from.is_fn() { + let mut applicability = Applicability::MaybeIncorrect; - if to_nbits >= cx.tcx.data_layout.pointer_size().bits() && !cast_to.is_usize() { - let from_snippet = snippet_with_applicability(cx, cast_expr.span, "x", &mut applicability); - span_lint_and_sugg( - cx, - FN_TO_NUMERIC_CAST, - expr.span, - format!("casting function pointer `{from_snippet}` to `{cast_to}`"), - "try", - format!("{from_snippet} as usize"), - applicability, - ); - } - }, - _ => {}, + if to_nbits >= cx.tcx.data_layout.pointer_size().bits() && !cast_to.is_usize() { + let from_snippet = snippet_with_applicability(cx, cast_expr.span, "x", &mut applicability); + span_lint_and_sugg( + cx, + FN_TO_NUMERIC_CAST, + expr.span, + format!("casting function pointer `{from_snippet}` to `{cast_to}`"), + "try", + format!("{from_snippet} as usize"), + applicability, + ); + } } } diff --git a/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_any.rs b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_any.rs index b22e8f4ee89..43ee91af6e5 100644 --- a/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_any.rs +++ b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_any.rs @@ -3,18 +3,17 @@ use clippy_utils::source::snippet_with_applicability; use rustc_errors::Applicability; use rustc_hir::Expr; use rustc_lint::LateContext; -use rustc_middle::ty::{self, Ty}; +use rustc_middle::ty::Ty; use super::FN_TO_NUMERIC_CAST_ANY; pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) { // We allow casts from any function type to any function type. - match cast_to.kind() { - ty::FnDef(..) | ty::FnPtr(..) => return, - _ => { /* continue to checks */ }, + if cast_to.is_fn() { + return; } - if let ty::FnDef(..) | ty::FnPtr(..) = cast_from.kind() { + if cast_from.is_fn() { let mut applicability = Applicability::MaybeIncorrect; let from_snippet = snippet_with_applicability(cx, cast_expr.span, "..", &mut applicability); diff --git a/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_with_truncation.rs b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_with_truncation.rs index 4da79205e20..9a2e44e07d4 100644 --- a/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_with_truncation.rs +++ b/src/tools/clippy/clippy_lints/src/casts/fn_to_numeric_cast_with_truncation.rs @@ -3,7 +3,7 @@ use clippy_utils::source::snippet_with_applicability; use rustc_errors::Applicability; use rustc_hir::Expr; use rustc_lint::LateContext; -use rustc_middle::ty::{self, Ty}; +use rustc_middle::ty::Ty; use super::{FN_TO_NUMERIC_CAST_WITH_TRUNCATION, utils}; @@ -12,23 +12,20 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, let Some(to_nbits) = utils::int_ty_to_nbits(cx.tcx, cast_to) else { return; }; - match cast_from.kind() { - ty::FnDef(..) | ty::FnPtr(..) => { - let mut applicability = Applicability::MaybeIncorrect; - let from_snippet = snippet_with_applicability(cx, cast_expr.span, "x", &mut applicability); + if cast_from.is_fn() { + let mut applicability = Applicability::MaybeIncorrect; + let from_snippet = snippet_with_applicability(cx, cast_expr.span, "x", &mut applicability); - if to_nbits < cx.tcx.data_layout.pointer_size().bits() { - span_lint_and_sugg( - cx, - FN_TO_NUMERIC_CAST_WITH_TRUNCATION, - expr.span, - format!("casting function pointer `{from_snippet}` to `{cast_to}`, which truncates the value"), - "try", - format!("{from_snippet} as usize"), - applicability, - ); - } - }, - _ => {}, + if to_nbits < cx.tcx.data_layout.pointer_size().bits() { + span_lint_and_sugg( + cx, + FN_TO_NUMERIC_CAST_WITH_TRUNCATION, + expr.span, + format!("casting function pointer `{from_snippet}` to `{cast_to}`, which truncates the value"), + "try", + format!("{from_snippet} as usize"), + applicability, + ); + } } } diff --git a/src/tools/clippy/clippy_lints/src/casts/ptr_as_ptr.rs b/src/tools/clippy/clippy_lints/src/casts/ptr_as_ptr.rs index 6f944914b8f..ee0f3fa81c6 100644 --- a/src/tools/clippy/clippy_lints/src/casts/ptr_as_ptr.rs +++ b/src/tools/clippy/clippy_lints/src/casts/ptr_as_ptr.rs @@ -4,10 +4,9 @@ use clippy_utils::source::snippet_with_applicability; use clippy_utils::sugg::Sugg; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind, Mutability, QPath, TyKind}; -use rustc_hir_pretty::qpath_to_string; use rustc_lint::LateContext; use rustc_middle::ty; -use rustc_span::sym; +use rustc_span::{Span, sym}; use super::PTR_AS_PTR; @@ -74,7 +73,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, msrv: Msrv) { let (help, final_suggestion) = if let Some(method) = omit_cast.corresponding_item() { // don't force absolute path - let method = qpath_to_string(&cx.tcx, method); + let method = snippet_with_applicability(cx, qpath_span_without_turbofish(method), "..", &mut app); ("try call directly", format!("{method}{turbofish}()")) } else { let cast_expr_sugg = Sugg::hir_with_applicability(cx, cast_expr, "_", &mut app); @@ -96,3 +95,14 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, msrv: Msrv) { ); } } + +fn qpath_span_without_turbofish(qpath: &QPath<'_>) -> Span { + if let QPath::Resolved(_, path) = qpath + && let [.., last_ident] = path.segments + && last_ident.args.is_some() + { + return qpath.span().shrink_to_lo().to(last_ident.ident.span); + } + + qpath.span() +} diff --git a/src/tools/clippy/clippy_lints/src/cognitive_complexity.rs b/src/tools/clippy/clippy_lints/src/cognitive_complexity.rs index d5d937d9133..518535e8c8b 100644 --- a/src/tools/clippy/clippy_lints/src/cognitive_complexity.rs +++ b/src/tools/clippy/clippy_lints/src/cognitive_complexity.rs @@ -110,7 +110,6 @@ impl CognitiveComplexity { FnKind::ItemFn(ident, _, _) | FnKind::Method(ident, _) => ident.span, FnKind::Closure => { let header_span = body_span.with_hi(decl.output.span().lo()); - #[expect(clippy::range_plus_one)] if let Some(range) = header_span.map_range(cx, |_, src, range| { let mut idxs = src.get(range.clone())?.match_indices('|'); Some(range.start + idxs.next()?.0..range.start + idxs.next()?.0 + 1) diff --git a/src/tools/clippy/clippy_lints/src/copies.rs b/src/tools/clippy/clippy_lints/src/copies.rs index 27918698cd6..4bd34527d21 100644 --- a/src/tools/clippy/clippy_lints/src/copies.rs +++ b/src/tools/clippy/clippy_lints/src/copies.rs @@ -1,5 +1,6 @@ use clippy_config::Conf; use clippy_utils::diagnostics::{span_lint, span_lint_and_note, span_lint_and_then}; +use clippy_utils::higher::has_let_expr; use clippy_utils::source::{IntoSpan, SpanRangeExt, first_line_of_span, indent_of, reindent_multiline, snippet}; use clippy_utils::ty::{InteriorMut, needs_ordered_drop}; use clippy_utils::visitors::for_each_expr_without_closures; @@ -11,7 +12,7 @@ use clippy_utils::{ use core::iter; use core::ops::ControlFlow; use rustc_errors::Applicability; -use rustc_hir::{BinOpKind, Block, Expr, ExprKind, HirId, HirIdSet, LetStmt, Node, Stmt, StmtKind, intravisit}; +use rustc_hir::{Block, Expr, ExprKind, HirId, HirIdSet, LetStmt, Node, Stmt, StmtKind, intravisit}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::TyCtxt; use rustc_session::impl_lint_pass; @@ -189,24 +190,13 @@ impl<'tcx> LateLintPass<'tcx> for CopyAndPaste<'tcx> { } } -/// Checks if the given expression is a let chain. -fn contains_let(e: &Expr<'_>) -> bool { - match e.kind { - ExprKind::Let(..) => true, - ExprKind::Binary(op, lhs, rhs) if op.node == BinOpKind::And => { - matches!(lhs.kind, ExprKind::Let(..)) || contains_let(rhs) - }, - _ => false, - } -} - fn lint_if_same_then_else(cx: &LateContext<'_>, conds: &[&Expr<'_>], blocks: &[&Block<'_>]) -> bool { let mut eq = SpanlessEq::new(cx); blocks .array_windows::<2>() .enumerate() .fold(true, |all_eq, (i, &[lhs, rhs])| { - if eq.eq_block(lhs, rhs) && !contains_let(conds[i]) && conds.get(i + 1).is_none_or(|e| !contains_let(e)) { + if eq.eq_block(lhs, rhs) && !has_let_expr(conds[i]) && conds.get(i + 1).is_none_or(|e| !has_let_expr(e)) { span_lint_and_note( cx, IF_SAME_THEN_ELSE, diff --git a/src/tools/clippy/clippy_lints/src/dereference.rs b/src/tools/clippy/clippy_lints/src/dereference.rs index 5099df3fa02..995a1209595 100644 --- a/src/tools/clippy/clippy_lints/src/dereference.rs +++ b/src/tools/clippy/clippy_lints/src/dereference.rs @@ -364,7 +364,7 @@ impl<'tcx> LateLintPass<'tcx> for Dereferencing<'tcx> { // * `&self` methods on `&T` can have auto-borrow, but `&self` methods on `T` will take // priority. if let Some(fn_id) = typeck.type_dependent_def_id(hir_id) - && let Some(trait_id) = cx.tcx.trait_of_item(fn_id) + && let Some(trait_id) = cx.tcx.trait_of_assoc(fn_id) && let arg_ty = cx.tcx.erase_regions(adjusted_ty) && let ty::Ref(_, sub_ty, _) = *arg_ty.kind() && let args = diff --git a/src/tools/clippy/clippy_lints/src/derive.rs b/src/tools/clippy/clippy_lints/src/derive.rs index 062f7cef3a7..49dd1bb09c6 100644 --- a/src/tools/clippy/clippy_lints/src/derive.rs +++ b/src/tools/clippy/clippy_lints/src/derive.rs @@ -432,6 +432,11 @@ impl<'tcx> Visitor<'tcx> for UnsafeVisitor<'_, 'tcx> { fn visit_expr(&mut self, expr: &'tcx Expr<'_>) -> Self::Result { if let ExprKind::Block(block, _) = expr.kind && block.rules == BlockCheckMode::UnsafeBlock(UnsafeSource::UserProvided) + && block + .span + .source_callee() + .and_then(|expr| expr.macro_def_id) + .is_none_or(|did| !self.cx.tcx.is_diagnostic_item(sym::pin_macro, did)) { return ControlFlow::Break(()); } diff --git a/src/tools/clippy/clippy_lints/src/disallowed_macros.rs b/src/tools/clippy/clippy_lints/src/disallowed_macros.rs index d55aeae98ed..23e7c7251cf 100644 --- a/src/tools/clippy/clippy_lints/src/disallowed_macros.rs +++ b/src/tools/clippy/clippy_lints/src/disallowed_macros.rs @@ -72,11 +72,11 @@ pub struct DisallowedMacros { // When a macro is disallowed in an early pass, it's stored // and emitted during the late pass. This happens for attributes. - earlies: AttrStorage, + early_macro_cache: AttrStorage, } impl DisallowedMacros { - pub fn new(tcx: TyCtxt<'_>, conf: &'static Conf, earlies: AttrStorage) -> Self { + pub fn new(tcx: TyCtxt<'_>, conf: &'static Conf, early_macro_cache: AttrStorage) -> Self { let (disallowed, _) = create_disallowed_map( tcx, &conf.disallowed_macros, @@ -89,7 +89,7 @@ impl DisallowedMacros { disallowed, seen: FxHashSet::default(), derive_src: None, - earlies, + early_macro_cache, } } @@ -130,7 +130,7 @@ impl_lint_pass!(DisallowedMacros => [DISALLOWED_MACROS]); impl LateLintPass<'_> for DisallowedMacros { fn check_crate(&mut self, cx: &LateContext<'_>) { // once we check a crate in the late pass we can emit the early pass lints - if let Some(attr_spans) = self.earlies.clone().0.get() { + if let Some(attr_spans) = self.early_macro_cache.clone().0.get() { for span in attr_spans { self.check(cx, *span, None); } diff --git a/src/tools/clippy/clippy_lints/src/doc/mod.rs b/src/tools/clippy/clippy_lints/src/doc/mod.rs index 22b781b8929..ea0da0d2467 100644 --- a/src/tools/clippy/clippy_lints/src/doc/mod.rs +++ b/src/tools/clippy/clippy_lints/src/doc/mod.rs @@ -1232,7 +1232,6 @@ fn check_doc<'a, Events: Iterator<Item = (pulldown_cmark::Event<'a>, Range<usize headers } -#[expect(clippy::range_plus_one)] // inclusive ranges aren't the same type fn looks_like_refdef(doc: &str, range: Range<usize>) -> Option<Range<usize>> { if range.end < range.start { return None; diff --git a/src/tools/clippy/clippy_lints/src/empty_with_brackets.rs b/src/tools/clippy/clippy_lints/src/empty_with_brackets.rs index 4414aebbf9a..f2757407ba5 100644 --- a/src/tools/clippy/clippy_lints/src/empty_with_brackets.rs +++ b/src/tools/clippy/clippy_lints/src/empty_with_brackets.rs @@ -92,8 +92,10 @@ impl_lint_pass!(EmptyWithBrackets => [EMPTY_STRUCTS_WITH_BRACKETS, EMPTY_ENUM_VA impl LateLintPass<'_> for EmptyWithBrackets { fn check_item(&mut self, cx: &LateContext<'_>, item: &Item<'_>) { + // FIXME: handle `struct $name {}` if let ItemKind::Struct(ident, _, var_data) = &item.kind && !item.span.from_expansion() + && !ident.span.from_expansion() && has_brackets(var_data) && let span_after_ident = item.span.with_lo(ident.span.hi()) && has_no_fields(cx, var_data, span_after_ident) @@ -116,10 +118,12 @@ impl LateLintPass<'_> for EmptyWithBrackets { } fn check_variant(&mut self, cx: &LateContext<'_>, variant: &Variant<'_>) { - // the span of the parentheses/braces - let span_after_ident = variant.span.with_lo(variant.ident.span.hi()); - - if has_no_fields(cx, &variant.data, span_after_ident) { + // FIXME: handle `$name {}` + if !variant.span.from_expansion() + && !variant.ident.span.from_expansion() + && let span_after_ident = variant.span.with_lo(variant.ident.span.hi()) + && has_no_fields(cx, &variant.data, span_after_ident) + { match variant.data { VariantData::Struct { .. } => { // Empty struct variants can be linted immediately diff --git a/src/tools/clippy/clippy_lints/src/escape.rs b/src/tools/clippy/clippy_lints/src/escape.rs index db2fea1aae9..fc224fa5f92 100644 --- a/src/tools/clippy/clippy_lints/src/escape.rs +++ b/src/tools/clippy/clippy_lints/src/escape.rs @@ -1,8 +1,8 @@ use clippy_config::Conf; use clippy_utils::diagnostics::span_lint_hir; use rustc_abi::ExternAbi; -use rustc_hir::{Body, FnDecl, HirId, HirIdSet, Node, Pat, PatKind, intravisit}; use rustc_hir::def::DefKind; +use rustc_hir::{Body, FnDecl, HirId, HirIdSet, Node, Pat, PatKind, intravisit}; use rustc_hir_typeck::expr_use_visitor::{Delegate, ExprUseVisitor, PlaceBase, PlaceWithHirId}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::mir::FakeReadCause; @@ -87,16 +87,14 @@ impl<'tcx> LateLintPass<'tcx> for BoxedLocal { let mut trait_self_ty = None; match cx.tcx.def_kind(parent_id) { // If the method is an impl for a trait, don't warn. - DefKind::Impl { of_trait: true } => { - return - } + DefKind::Impl { of_trait: true } => return, // find `self` ty for this trait if relevant DefKind::Trait => { trait_self_ty = Some(TraitRef::identity(cx.tcx, parent_id.to_def_id()).self_ty()); - } + }, - _ => {} + _ => {}, } let mut v = EscapeDelegate { diff --git a/src/tools/clippy/clippy_lints/src/eta_reduction.rs b/src/tools/clippy/clippy_lints/src/eta_reduction.rs index 0288747d6f3..9b627678bd3 100644 --- a/src/tools/clippy/clippy_lints/src/eta_reduction.rs +++ b/src/tools/clippy/clippy_lints/src/eta_reduction.rs @@ -29,12 +29,6 @@ declare_clippy_lint! { /// Needlessly creating a closure adds code for no benefit /// and gives the optimizer more work. /// - /// ### Known problems - /// If creating the closure inside the closure has a side- - /// effect then moving the closure creation out will change when that side- - /// effect runs. - /// See [#1439](https://github.com/rust-lang/rust-clippy/issues/1439) for more details. - /// /// ### Example /// ```rust,ignore /// xs.map(|x| foo(x)) diff --git a/src/tools/clippy/clippy_lints/src/fallible_impl_from.rs b/src/tools/clippy/clippy_lints/src/fallible_impl_from.rs index 552cd721f4e..fdfcbb540bc 100644 --- a/src/tools/clippy/clippy_lints/src/fallible_impl_from.rs +++ b/src/tools/clippy/clippy_lints/src/fallible_impl_from.rs @@ -64,8 +64,8 @@ impl<'tcx> LateLintPass<'tcx> for FallibleImplFrom { } fn lint_impl_body(cx: &LateContext<'_>, item_def_id: hir::OwnerId, impl_span: Span) { - use rustc_hir::intravisit::{self, Visitor}; use rustc_hir::Expr; + use rustc_hir::intravisit::{self, Visitor}; struct FindPanicUnwrap<'a, 'tcx> { lcx: &'a LateContext<'tcx>, @@ -96,10 +96,12 @@ fn lint_impl_body(cx: &LateContext<'_>, item_def_id: hir::OwnerId, impl_span: Sp } } - for impl_item in cx.tcx.associated_items(item_def_id) + for impl_item in cx + .tcx + .associated_items(item_def_id) .filter_by_name_unhygienic_and_kind(sym::from, ty::AssocTag::Fn) { - let impl_item_def_id= impl_item.def_id.expect_local(); + let impl_item_def_id = impl_item.def_id.expect_local(); // check the body for `begin_panic` or `unwrap` let body = cx.tcx.hir_body_owned_by(impl_item_def_id); diff --git a/src/tools/clippy/clippy_lints/src/format_args.rs b/src/tools/clippy/clippy_lints/src/format_args.rs index 16c58ecb455..a251f15ba3d 100644 --- a/src/tools/clippy/clippy_lints/src/format_args.rs +++ b/src/tools/clippy/clippy_lints/src/format_args.rs @@ -17,7 +17,7 @@ use rustc_ast::{ FormatArgPosition, FormatArgPositionKind, FormatArgsPiece, FormatArgumentKind, FormatCount, FormatOptions, FormatPlaceholder, FormatTrait, }; -use rustc_attr_data_structures::RustcVersion; +use rustc_attr_data_structures::{AttributeKind, RustcVersion, find_attr}; use rustc_data_structures::fx::FxHashMap; use rustc_errors::Applicability; use rustc_errors::SuggestionStyle::{CompletelyHidden, ShowCode}; @@ -30,7 +30,6 @@ use rustc_span::edition::Edition::Edition2021; use rustc_span::{Span, Symbol, sym}; use rustc_trait_selection::infer::TyCtxtInferExt; use rustc_trait_selection::traits::{Obligation, ObligationCause, Selection, SelectionContext}; -use rustc_attr_data_structures::{AttributeKind, find_attr}; declare_clippy_lint! { /// ### What it does @@ -129,6 +128,7 @@ declare_clippy_lint! { /// # let width = 1; /// # let prec = 2; /// format!("{}", var); + /// format!("{:?}", var); /// format!("{v:?}", v = var); /// format!("{0} {0}", var); /// format!("{0:1$}", var, width); @@ -141,6 +141,7 @@ declare_clippy_lint! { /// # let prec = 2; /// format!("{var}"); /// format!("{var:?}"); + /// format!("{var:?}"); /// format!("{var} {var}"); /// format!("{var:width$}"); /// format!("{var:.prec$}"); @@ -164,7 +165,7 @@ declare_clippy_lint! { /// nothing will be suggested, e.g. `println!("{0}={1}", var, 1+2)`. #[clippy::version = "1.66.0"] pub UNINLINED_FORMAT_ARGS, - style, + pedantic, "using non-inlined variables in `format!` calls" } @@ -657,7 +658,10 @@ impl<'tcx> FormatArgsExpr<'_, 'tcx> { }; let selection = SelectionContext::new(&infcx).select(&obligation); let derived = if let Ok(Some(Selection::UserDefined(data))) = selection { - find_attr!(tcx.get_all_attrs(data.impl_def_id), AttributeKind::AutomaticallyDerived(..)) + find_attr!( + tcx.get_all_attrs(data.impl_def_id), + AttributeKind::AutomaticallyDerived(..) + ) } else { false }; diff --git a/src/tools/clippy/clippy_lints/src/from_over_into.rs b/src/tools/clippy/clippy_lints/src/from_over_into.rs index 85b40ba7419..1da6952eb64 100644 --- a/src/tools/clippy/clippy_lints/src/from_over_into.rs +++ b/src/tools/clippy/clippy_lints/src/from_over_into.rs @@ -9,7 +9,7 @@ use clippy_utils::source::SpanRangeExt; use rustc_errors::Applicability; use rustc_hir::intravisit::{Visitor, walk_path}; use rustc_hir::{ - FnRetTy, GenericArg, GenericArgs, HirId, Impl, ImplItemKind, ImplItemId, Item, ItemKind, PatKind, Path, + FnRetTy, GenericArg, GenericArgs, HirId, Impl, ImplItemId, ImplItemKind, Item, ItemKind, PatKind, Path, PathSegment, Ty, TyKind, }; use rustc_lint::{LateContext, LateLintPass}; diff --git a/src/tools/clippy/clippy_lints/src/functions/must_use.rs b/src/tools/clippy/clippy_lints/src/functions/must_use.rs index d959981a83c..b8d0cec5aeb 100644 --- a/src/tools/clippy/clippy_lints/src/functions/must_use.rs +++ b/src/tools/clippy/clippy_lints/src/functions/must_use.rs @@ -10,7 +10,7 @@ use rustc_span::{Span, sym}; use clippy_utils::attrs::is_proc_macro; use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_then}; -use clippy_utils::source::SpanRangeExt; +use clippy_utils::source::snippet_indent; use clippy_utils::ty::is_must_use_ty; use clippy_utils::visitors::for_each_expr_without_closures; use clippy_utils::{return_ty, trait_ref_of_method}; @@ -28,6 +28,7 @@ pub(super) fn check_item<'tcx>(cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_> if let hir::ItemKind::Fn { ref sig, body: ref body_id, + ident, .. } = item.kind { @@ -51,8 +52,8 @@ pub(super) fn check_item<'tcx>(cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_> sig.decl, cx.tcx.hir_body(*body_id), item.span, + ident.span, item.owner_id, - item.span.with_hi(sig.decl.output.span().hi()), "this function could have a `#[must_use]` attribute", ); } @@ -84,8 +85,8 @@ pub(super) fn check_impl_item<'tcx>(cx: &LateContext<'tcx>, item: &'tcx hir::Imp sig.decl, cx.tcx.hir_body(*body_id), item.span, + item.ident.span, item.owner_id, - item.span.with_hi(sig.decl.output.span().hi()), "this method could have a `#[must_use]` attribute", ); } @@ -120,8 +121,8 @@ pub(super) fn check_trait_item<'tcx>(cx: &LateContext<'tcx>, item: &'tcx hir::Tr sig.decl, body, item.span, + item.ident.span, item.owner_id, - item.span.with_hi(sig.decl.output.span().hi()), "this method could have a `#[must_use]` attribute", ); } @@ -198,8 +199,8 @@ fn check_must_use_candidate<'tcx>( decl: &'tcx hir::FnDecl<'_>, body: &'tcx hir::Body<'_>, item_span: Span, + ident_span: Span, item_id: hir::OwnerId, - fn_span: Span, msg: &'static str, ) { if has_mutable_arg(cx, body) @@ -208,18 +209,18 @@ fn check_must_use_candidate<'tcx>( || returns_unit(decl) || !cx.effective_visibilities.is_exported(item_id.def_id) || is_must_use_ty(cx, return_ty(cx, item_id)) + || item_span.from_expansion() { return; } - span_lint_and_then(cx, MUST_USE_CANDIDATE, fn_span, msg, |diag| { - if let Some(snippet) = fn_span.get_source_text(cx) { - diag.span_suggestion( - fn_span, - "add the attribute", - format!("#[must_use] {snippet}"), - Applicability::MachineApplicable, - ); - } + span_lint_and_then(cx, MUST_USE_CANDIDATE, ident_span, msg, |diag| { + let indent = snippet_indent(cx, item_span).unwrap_or_default(); + diag.span_suggestion( + item_span.shrink_to_lo(), + "add the attribute", + format!("#[must_use] \n{indent}"), + Applicability::MachineApplicable, + ); }); } diff --git a/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs b/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs index 9e94280fc07..7158f9419c1 100644 --- a/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs +++ b/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs @@ -5,7 +5,8 @@ use clippy_utils::msrvs::{self, Msrv}; use clippy_utils::source::snippet_with_context; use clippy_utils::sugg::Sugg; use clippy_utils::{ - contains_return, higher, is_else_clause, is_in_const_context, is_res_lang_ctor, path_res, peel_blocks, + contains_return, expr_adjustment_requires_coercion, higher, is_else_clause, is_in_const_context, is_res_lang_ctor, + path_res, peel_blocks, }; use rustc_errors::Applicability; use rustc_hir::LangItem::{OptionNone, OptionSome}; @@ -92,6 +93,10 @@ impl<'tcx> LateLintPass<'tcx> for IfThenSomeElseNone { expr.span, format!("this could be simplified with `bool::{method_name}`"), |diag| { + if expr_adjustment_requires_coercion(cx, then_arg) { + return; + } + let mut app = Applicability::MachineApplicable; let cond_snip = Sugg::hir_with_context(cx, cond, expr.span.ctxt(), "[condition]", &mut app) .maybe_paren() diff --git a/src/tools/clippy/clippy_lints/src/implicit_saturating_sub.rs b/src/tools/clippy/clippy_lints/src/implicit_saturating_sub.rs index c743501da25..c634c12e187 100644 --- a/src/tools/clippy/clippy_lints/src/implicit_saturating_sub.rs +++ b/src/tools/clippy/clippy_lints/src/implicit_saturating_sub.rs @@ -339,7 +339,7 @@ fn check_with_condition<'tcx>( ExprKind::Path(QPath::TypeRelative(_, name)) => { if name.ident.name == sym::MIN && let Some(const_id) = cx.typeck_results().type_dependent_def_id(cond_num_val.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(const_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(const_id) && let None = cx.tcx.impl_trait_ref(impl_id) // An inherent impl && cx.tcx.type_of(impl_id).instantiate_identity().is_integral() { @@ -350,7 +350,7 @@ fn check_with_condition<'tcx>( if let ExprKind::Path(QPath::TypeRelative(_, name)) = func.kind && name.ident.name == sym::min_value && let Some(func_id) = cx.typeck_results().type_dependent_def_id(func.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(func_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(func_id) && let None = cx.tcx.impl_trait_ref(impl_id) // An inherent impl && cx.tcx.type_of(impl_id).instantiate_identity().is_integral() { diff --git a/src/tools/clippy/clippy_lints/src/incompatible_msrv.rs b/src/tools/clippy/clippy_lints/src/incompatible_msrv.rs index 5d0bd3e8ca3..116d63c3bb1 100644 --- a/src/tools/clippy/clippy_lints/src/incompatible_msrv.rs +++ b/src/tools/clippy/clippy_lints/src/incompatible_msrv.rs @@ -1,10 +1,11 @@ use clippy_config::Conf; -use clippy_utils::diagnostics::span_lint; -use clippy_utils::is_in_test; +use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::msrvs::Msrv; +use clippy_utils::{is_in_const_context, is_in_test}; use rustc_attr_data_structures::{RustcVersion, StabilityLevel, StableSince}; use rustc_data_structures::fx::FxHashMap; -use rustc_hir::{Expr, ExprKind, HirId, QPath}; +use rustc_hir::def::DefKind; +use rustc_hir::{self as hir, AmbigArg, Expr, ExprKind, HirId, QPath}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::TyCtxt; use rustc_session::impl_lint_pass; @@ -33,15 +34,54 @@ declare_clippy_lint! { /// /// To fix this problem, either increase your MSRV or use another item /// available in your current MSRV. + /// + /// You can also locally change the MSRV that should be checked by Clippy, + /// for example if a feature in your crate (e.g., `modern_compiler`) should + /// allow you to use an item: + /// + /// ```no_run + /// //! This crate has a MSRV of 1.3.0, but we also have an optional feature + /// //! `sleep_well` which requires at least Rust 1.4.0. + /// + /// // When the `sleep_well` feature is set, do not warn for functions available + /// // in Rust 1.4.0 and below. + /// #![cfg_attr(feature = "sleep_well", clippy::msrv = "1.4.0")] + /// + /// use std::time::Duration; + /// + /// #[cfg(feature = "sleep_well")] + /// fn sleep_for_some_time() { + /// std::thread::sleep(Duration::new(1, 0)); // Will not trigger the lint + /// } + /// ``` + /// + /// You can also increase the MSRV in tests, by using: + /// + /// ```no_run + /// // Use a much higher MSRV for tests while keeping the main one low + /// #![cfg_attr(test, clippy::msrv = "1.85.0")] + /// + /// #[test] + /// fn my_test() { + /// // The tests can use items introduced in Rust 1.85.0 and lower + /// // without triggering the `incompatible_msrv` lint. + /// } + /// ``` #[clippy::version = "1.78.0"] pub INCOMPATIBLE_MSRV, suspicious, "ensures that all items used in the crate are available for the current MSRV" } +#[derive(Clone, Copy)] +enum Availability { + FeatureEnabled, + Since(RustcVersion), +} + pub struct IncompatibleMsrv { msrv: Msrv, - is_above_msrv: FxHashMap<DefId, RustcVersion>, + availability_cache: FxHashMap<(DefId, bool), Availability>, check_in_tests: bool, } @@ -51,38 +91,50 @@ impl IncompatibleMsrv { pub fn new(conf: &'static Conf) -> Self { Self { msrv: conf.msrv, - is_above_msrv: FxHashMap::default(), + availability_cache: FxHashMap::default(), check_in_tests: conf.check_incompatible_msrv_in_tests, } } - fn get_def_id_version(&mut self, tcx: TyCtxt<'_>, def_id: DefId) -> RustcVersion { - if let Some(version) = self.is_above_msrv.get(&def_id) { - return *version; + /// Returns the availability of `def_id`, whether it is enabled through a feature or + /// available since a given version (the default being Rust 1.0.0). `needs_const` requires + /// the `const`-stability to be looked up instead of the stability in non-`const` contexts. + fn get_def_id_availability(&mut self, tcx: TyCtxt<'_>, def_id: DefId, needs_const: bool) -> Availability { + if let Some(availability) = self.availability_cache.get(&(def_id, needs_const)) { + return *availability; } - let version = if let Some(version) = tcx - .lookup_stability(def_id) - .and_then(|stability| match stability.level { - StabilityLevel::Stable { - since: StableSince::Version(version), - .. - } => Some(version), - _ => None, - }) { - version + let (feature, stability_level) = if needs_const { + tcx.lookup_const_stability(def_id) + .map(|stability| (stability.feature, stability.level)) + .unzip() + } else { + tcx.lookup_stability(def_id) + .map(|stability| (stability.feature, stability.level)) + .unzip() + }; + let version = if feature.is_some_and(|feature| tcx.features().enabled(feature)) { + Availability::FeatureEnabled + } else if let Some(StableSince::Version(version)) = + stability_level.as_ref().and_then(StabilityLevel::stable_since) + { + Availability::Since(version) + } else if needs_const { + // Fallback to regular stability + self.get_def_id_availability(tcx, def_id, false) } else if let Some(parent_def_id) = tcx.opt_parent(def_id) { - self.get_def_id_version(tcx, parent_def_id) + self.get_def_id_availability(tcx, parent_def_id, needs_const) } else { - RustcVersion { + Availability::Since(RustcVersion { major: 1, minor: 0, patch: 0, - } + }) }; - self.is_above_msrv.insert(def_id, version); + self.availability_cache.insert((def_id, needs_const), version); version } + /// Emit lint if `def_id`, associated with `node` and `span`, is below the current MSRV. fn emit_lint_if_under_msrv(&mut self, cx: &LateContext<'_>, def_id: DefId, node: HirId, span: Span) { if def_id.is_local() { // We don't check local items since their MSRV is supposed to always be valid. @@ -108,18 +160,28 @@ impl IncompatibleMsrv { return; } + let needs_const = cx.enclosing_body.is_some() + && is_in_const_context(cx) + && matches!(cx.tcx.def_kind(def_id), DefKind::AssocFn | DefKind::Fn); + if (self.check_in_tests || !is_in_test(cx.tcx, node)) && let Some(current) = self.msrv.current(cx) - && let version = self.get_def_id_version(cx.tcx, def_id) + && let Availability::Since(version) = self.get_def_id_availability(cx.tcx, def_id, needs_const) && version > current { - span_lint( + span_lint_and_then( cx, INCOMPATIBLE_MSRV, span, format!( - "current MSRV (Minimum Supported Rust Version) is `{current}` but this item is stable since `{version}`" + "current MSRV (Minimum Supported Rust Version) is `{current}` but this item is stable{} since `{version}`", + if needs_const { " in a `const` context" } else { "" }, ), + |diag| { + if is_under_cfg_attribute(cx, node) { + diag.note_once("you may want to conditionally increase the MSRV considered by Clippy using the `clippy::msrv` attribute"); + } + }, ); } } @@ -133,17 +195,38 @@ impl<'tcx> LateLintPass<'tcx> for IncompatibleMsrv { self.emit_lint_if_under_msrv(cx, method_did, expr.hir_id, span); } }, - ExprKind::Call(call, _) => { - // Desugaring into function calls by the compiler will use `QPath::LangItem` variants. Those should - // not be linted as they will not be generated in older compilers if the function is not available, - // and the compiler is allowed to call unstable functions. - if let ExprKind::Path(qpath @ (QPath::Resolved(..) | QPath::TypeRelative(..))) = call.kind - && let Some(path_def_id) = cx.qpath_res(&qpath, call.hir_id).opt_def_id() - { - self.emit_lint_if_under_msrv(cx, path_def_id, expr.hir_id, call.span); + // Desugaring into function calls by the compiler will use `QPath::LangItem` variants. Those should + // not be linted as they will not be generated in older compilers if the function is not available, + // and the compiler is allowed to call unstable functions. + ExprKind::Path(qpath @ (QPath::Resolved(..) | QPath::TypeRelative(..))) => { + if let Some(path_def_id) = cx.qpath_res(&qpath, expr.hir_id).opt_def_id() { + self.emit_lint_if_under_msrv(cx, path_def_id, expr.hir_id, expr.span); } }, _ => {}, } } + + fn check_ty(&mut self, cx: &LateContext<'tcx>, hir_ty: &'tcx hir::Ty<'tcx, AmbigArg>) { + if let hir::TyKind::Path(qpath @ (QPath::Resolved(..) | QPath::TypeRelative(..))) = hir_ty.kind + && let Some(ty_def_id) = cx.qpath_res(&qpath, hir_ty.hir_id).opt_def_id() + // `CStr` and `CString` have been moved around but have been available since Rust 1.0.0 + && !matches!(cx.tcx.get_diagnostic_name(ty_def_id), Some(sym::cstr_type | sym::cstring_type)) + { + self.emit_lint_if_under_msrv(cx, ty_def_id, hir_ty.hir_id, hir_ty.span); + } + } +} + +/// Heuristic checking if the node `hir_id` is under a `#[cfg()]` or `#[cfg_attr()]` +/// attribute. +fn is_under_cfg_attribute(cx: &LateContext<'_>, hir_id: HirId) -> bool { + cx.tcx.hir_parent_id_iter(hir_id).any(|id| { + cx.tcx.hir_attrs(id).iter().any(|attr| { + matches!( + attr.ident().map(|ident| ident.name), + Some(sym::cfg_trace | sym::cfg_attr_trace) + ) + }) + }) } diff --git a/src/tools/clippy/clippy_lints/src/ineffective_open_options.rs b/src/tools/clippy/clippy_lints/src/ineffective_open_options.rs index 7a751514b64..a159f615718 100644 --- a/src/tools/clippy/clippy_lints/src/ineffective_open_options.rs +++ b/src/tools/clippy/clippy_lints/src/ineffective_open_options.rs @@ -1,13 +1,12 @@ -use crate::methods::method_call; use clippy_utils::diagnostics::span_lint_and_sugg; -use clippy_utils::{peel_blocks, sym}; +use clippy_utils::source::SpanRangeExt; +use clippy_utils::ty::is_type_diagnostic_item; +use clippy_utils::{peel_blocks, peel_hir_expr_while, sym}; use rustc_ast::LitKind; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; -use rustc_middle::ty; use rustc_session::declare_lint_pass; -use rustc_span::{BytePos, Span}; declare_clippy_lint! { /// ### What it does @@ -43,53 +42,58 @@ declare_clippy_lint! { declare_lint_pass!(IneffectiveOpenOptions => [INEFFECTIVE_OPEN_OPTIONS]); -fn index_if_arg_is_boolean(args: &[Expr<'_>], call_span: Span) -> Option<Span> { - if let [arg] = args - && let ExprKind::Lit(lit) = peel_blocks(arg).kind - && lit.node == LitKind::Bool(true) - { - // The `.` is not included in the span so we cheat a little bit to include it as well. - Some(call_span.with_lo(call_span.lo() - BytePos(1))) - } else { - None - } -} - impl<'tcx> LateLintPass<'tcx> for IneffectiveOpenOptions { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { - let Some((sym::open, mut receiver, [_arg], _, _)) = method_call(expr) else { - return; - }; - let receiver_ty = cx.typeck_results().expr_ty(receiver); - match receiver_ty.peel_refs().kind() { - ty::Adt(adt, _) if cx.tcx.is_diagnostic_item(sym::FsOpenOptions, adt.did()) => {}, - _ => return, - } - - let mut append = None; - let mut write = None; + if let ExprKind::MethodCall(name, recv, [_], _) = expr.kind + && name.ident.name == sym::open + && !expr.span.from_expansion() + && is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(recv).peel_refs(), sym::FsOpenOptions) + { + let mut append = false; + let mut write = None; + peel_hir_expr_while(recv, |e| { + if let ExprKind::MethodCall(name, recv, args, call_span) = e.kind + && !e.span.from_expansion() + { + if let [arg] = args + && let ExprKind::Lit(lit) = peel_blocks(arg).kind + && matches!(lit.node, LitKind::Bool(true)) + && !arg.span.from_expansion() + && !lit.span.from_expansion() + { + match name.ident.name { + sym::append => append = true, + sym::write + if let Some(range) = call_span.map_range(cx, |_, text, range| { + if text.get(..range.start)?.ends_with('.') { + Some(range.start - 1..range.end) + } else { + None + } + }) => + { + write = Some(call_span.with_lo(range.start)); + }, + _ => {}, + } + } + Some(recv) + } else { + None + } + }); - while let Some((name, recv, args, _, span)) = method_call(receiver) { - if name == sym::append { - append = index_if_arg_is_boolean(args, span); - } else if name == sym::write { - write = index_if_arg_is_boolean(args, span); + if append && let Some(write_span) = write { + span_lint_and_sugg( + cx, + INEFFECTIVE_OPEN_OPTIONS, + write_span, + "unnecessary use of `.write(true)` because there is `.append(true)`", + "remove `.write(true)`", + String::new(), + Applicability::MachineApplicable, + ); } - receiver = recv; - } - - if let Some(write_span) = write - && append.is_some() - { - span_lint_and_sugg( - cx, - INEFFECTIVE_OPEN_OPTIONS, - write_span, - "unnecessary use of `.write(true)` because there is `.append(true)`", - "remove `.write(true)`", - String::new(), - Applicability::MachineApplicable, - ); } } } diff --git a/src/tools/clippy/clippy_lints/src/infallible_try_from.rs b/src/tools/clippy/clippy_lints/src/infallible_try_from.rs index e79fcec6e6a..f7cdf05359a 100644 --- a/src/tools/clippy/clippy_lints/src/infallible_try_from.rs +++ b/src/tools/clippy/clippy_lints/src/infallible_try_from.rs @@ -52,13 +52,17 @@ impl<'tcx> LateLintPass<'tcx> for InfallibleTryFrom { if !cx.tcx.is_diagnostic_item(sym::TryFrom, trait_def_id) { return; } - for ii in cx.tcx.associated_items(item.owner_id.def_id) + for ii in cx + .tcx + .associated_items(item.owner_id.def_id) .filter_by_name_unhygienic_and_kind(sym::Error, AssocTag::Type) { let ii_ty = cx.tcx.type_of(ii.def_id).instantiate_identity(); if !ii_ty.is_inhabited_from(cx.tcx, ii.def_id, cx.typing_env()) { let mut span = MultiSpan::from_span(cx.tcx.def_span(item.owner_id.to_def_id())); - let ii_ty_span = cx.tcx.hir_node_by_def_id(ii.def_id.expect_local()) + let ii_ty_span = cx + .tcx + .hir_node_by_def_id(ii.def_id.expect_local()) .expect_impl_item() .expect_type() .span; diff --git a/src/tools/clippy/clippy_lints/src/item_name_repetitions.rs b/src/tools/clippy/clippy_lints/src/item_name_repetitions.rs index 9c91cf68085..95e16aae40f 100644 --- a/src/tools/clippy/clippy_lints/src/item_name_repetitions.rs +++ b/src/tools/clippy/clippy_lints/src/item_name_repetitions.rs @@ -8,6 +8,7 @@ use rustc_data_structures::fx::FxHashSet; use rustc_hir::{EnumDef, FieldDef, Item, ItemKind, OwnerId, QPath, TyKind, Variant, VariantData}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::impl_lint_pass; +use rustc_span::MacroKind; use rustc_span::symbol::Symbol; declare_clippy_lint! { @@ -502,7 +503,8 @@ impl LateLintPass<'_> for ItemNameRepetitions { ); } - if both_are_public && item_camel.len() > mod_camel.len() { + let is_macro_rule = matches!(item.kind, ItemKind::Macro(_, _, MacroKind::Bang)); + if both_are_public && item_camel.len() > mod_camel.len() && !is_macro_rule { let matching = count_match_start(mod_camel, &item_camel); let rmatching = count_match_end(mod_camel, &item_camel); let nchars = mod_camel.chars().count(); diff --git a/src/tools/clippy/clippy_lints/src/iter_without_into_iter.rs b/src/tools/clippy/clippy_lints/src/iter_without_into_iter.rs index 03038f0ab49..b89f91f7255 100644 --- a/src/tools/clippy/clippy_lints/src/iter_without_into_iter.rs +++ b/src/tools/clippy/clippy_lints/src/iter_without_into_iter.rs @@ -139,11 +139,17 @@ impl LateLintPass<'_> for IterWithoutIntoIter { // We can't check inherent impls for slices, but we know that they have an `iter(_mut)` method ty.peel_refs().is_slice() || get_adt_inherent_method(cx, ty, expected_method_name).is_some() }) - && let Some(iter_assoc_span) = cx.tcx.associated_items(item.owner_id) + && let Some(iter_assoc_span) = cx + .tcx + .associated_items(item.owner_id) .filter_by_name_unhygienic_and_kind(sym::IntoIter, ty::AssocTag::Type) .next() .map(|assoc_item| { - cx.tcx.hir_node_by_def_id(assoc_item.def_id.expect_local()).expect_impl_item().expect_type().span + cx.tcx + .hir_node_by_def_id(assoc_item.def_id.expect_local()) + .expect_impl_item() + .expect_type() + .span }) && is_ty_exported(cx, ty) { diff --git a/src/tools/clippy/clippy_lints/src/large_enum_variant.rs b/src/tools/clippy/clippy_lints/src/large_enum_variant.rs index e85d779b488..c2b73943106 100644 --- a/src/tools/clippy/clippy_lints/src/large_enum_variant.rs +++ b/src/tools/clippy/clippy_lints/src/large_enum_variant.rs @@ -1,5 +1,6 @@ use clippy_config::Conf; use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::is_no_std_crate; use clippy_utils::source::snippet_with_applicability; use clippy_utils::ty::{AdtVariantInfo, approx_ty_size, is_copy}; use rustc_errors::Applicability; @@ -83,7 +84,7 @@ impl<'tcx> LateLintPass<'tcx> for LargeEnumVariant { let mut difference = variants_size[0].size - variants_size[1].size; if difference > self.maximum_size_difference_allowed { - let help_text = "consider boxing the large fields to reduce the total size of the enum"; + let help_text = "consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum"; span_lint_and_then( cx, LARGE_ENUM_VARIANT, @@ -117,7 +118,7 @@ impl<'tcx> LateLintPass<'tcx> for LargeEnumVariant { ident.span, "boxing a variant would require the type no longer be `Copy`", ); - } else { + } else if !is_no_std_crate(cx) { let sugg: Vec<(Span, String)> = variants_size[0] .fields_size .iter() diff --git a/src/tools/clippy/clippy_lints/src/legacy_numeric_constants.rs b/src/tools/clippy/clippy_lints/src/legacy_numeric_constants.rs index b3c63f022d3..42c636505c0 100644 --- a/src/tools/clippy/clippy_lints/src/legacy_numeric_constants.rs +++ b/src/tools/clippy/clippy_lints/src/legacy_numeric_constants.rs @@ -1,7 +1,8 @@ use clippy_config::Conf; -use clippy_utils::diagnostics::{span_lint_and_then, span_lint_hir_and_then}; +use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::is_from_proc_macro; use clippy_utils::msrvs::{self, Msrv}; -use clippy_utils::{get_parent_expr, is_from_proc_macro}; +use clippy_utils::source::SpanRangeExt; use hir::def_id::DefId; use rustc_errors::Applicability; use rustc_hir as hir; @@ -102,39 +103,45 @@ impl<'tcx> LateLintPass<'tcx> for LegacyNumericConstants { } fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx rustc_hir::Expr<'tcx>) { - let ExprKind::Path(qpath) = &expr.kind else { - return; - }; - // `std::<integer>::<CONST>` check - let (span, sugg, msg) = if let QPath::Resolved(None, path) = qpath + let (sugg, msg) = if let ExprKind::Path(qpath) = &expr.kind + && let QPath::Resolved(None, path) = qpath && let Some(def_id) = path.res.opt_def_id() && is_numeric_const(cx, def_id) - && let def_path = cx.get_def_path(def_id) - && let [.., mod_name, name] = &*def_path + && let [.., mod_name, name] = &*cx.get_def_path(def_id) // Skip linting if this usage looks identical to the associated constant, // since this would only require removing a `use` import (which is already linted). && !is_numeric_const_path_canonical(path, [*mod_name, *name]) { ( - expr.span, - format!("{mod_name}::{name}"), + vec![(expr.span, format!("{mod_name}::{name}"))], "usage of a legacy numeric constant", ) // `<integer>::xxx_value` check - } else if let QPath::TypeRelative(_, last_segment) = qpath - && let Some(def_id) = cx.qpath_res(qpath, expr.hir_id).opt_def_id() - && let Some(par_expr) = get_parent_expr(cx, expr) - && let ExprKind::Call(_, []) = par_expr.kind + } else if let ExprKind::Call(func, []) = &expr.kind + && let ExprKind::Path(qpath) = &func.kind + && let QPath::TypeRelative(ty, last_segment) = qpath + && let Some(def_id) = cx.qpath_res(qpath, func.hir_id).opt_def_id() && is_integer_method(cx, def_id) { - let name = last_segment.ident.name.as_str(); - - ( - last_segment.ident.span.with_hi(par_expr.span.hi()), - name[..=2].to_ascii_uppercase(), - "usage of a legacy numeric method", - ) + let mut sugg = vec![ + // Replace the function name up to the end by the constant name + ( + last_segment.ident.span.to(expr.span.shrink_to_hi()), + last_segment.ident.name.as_str()[..=2].to_ascii_uppercase(), + ), + ]; + let before_span = expr.span.shrink_to_lo().until(ty.span); + if !before_span.is_empty() { + // Remove everything before the type name + sugg.push((before_span, String::new())); + } + // Use `::` between the type name and the constant + let between_span = ty.span.shrink_to_hi().until(last_segment.ident.span); + if !between_span.check_source_text(cx, |s| s == "::") { + sugg.push((between_span, String::from("::"))); + } + (sugg, "usage of a legacy numeric method") } else { return; }; @@ -143,9 +150,8 @@ impl<'tcx> LateLintPass<'tcx> for LegacyNumericConstants { && self.msrv.meets(cx, msrvs::NUMERIC_ASSOCIATED_CONSTANTS) && !is_from_proc_macro(cx, expr) { - span_lint_hir_and_then(cx, LEGACY_NUMERIC_CONSTANTS, expr.hir_id, span, msg, |diag| { - diag.span_suggestion_verbose( - span, + span_lint_and_then(cx, LEGACY_NUMERIC_CONSTANTS, expr.span, msg, |diag| { + diag.multipart_suggestion_verbose( "use the associated constant instead", sugg, Applicability::MaybeIncorrect, diff --git a/src/tools/clippy/clippy_lints/src/len_zero.rs b/src/tools/clippy/clippy_lints/src/len_zero.rs index 1bf03480c82..6beddc1be14 100644 --- a/src/tools/clippy/clippy_lints/src/len_zero.rs +++ b/src/tools/clippy/clippy_lints/src/len_zero.rs @@ -10,9 +10,9 @@ use rustc_errors::Applicability; use rustc_hir::def::Res; use rustc_hir::def_id::{DefId, DefIdSet}; use rustc_hir::{ - BinOpKind, Expr, ExprKind, FnRetTy, GenericArg, GenericBound, HirId, ImplItem, ImplItemKind, - ImplicitSelfKind, Item, ItemKind, Mutability, Node, OpaqueTyOrigin, PatExprKind, PatKind, PathSegment, PrimTy, - QPath, TraitItemId, TyKind, + BinOpKind, Expr, ExprKind, FnRetTy, GenericArg, GenericBound, HirId, ImplItem, ImplItemKind, ImplicitSelfKind, + Item, ItemKind, Mutability, Node, OpaqueTyOrigin, PatExprKind, PatKind, PathSegment, PrimTy, QPath, TraitItemId, + TyKind, }; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::{self, FnSig, Ty}; @@ -266,11 +266,14 @@ fn span_without_enclosing_paren(cx: &LateContext<'_>, span: Span) -> Span { } fn check_trait_items(cx: &LateContext<'_>, visited_trait: &Item<'_>, ident: Ident, trait_items: &[TraitItemId]) { - fn is_named_self(cx: &LateContext<'_>, item: &TraitItemId, name: Symbol) -> bool { + fn is_named_self(cx: &LateContext<'_>, item: TraitItemId, name: Symbol) -> bool { cx.tcx.item_name(item.owner_id) == name && matches!( cx.tcx.fn_arg_idents(item.owner_id), - [Some(Ident { name: kw::SelfLower, .. })], + [Some(Ident { + name: kw::SelfLower, + .. + })], ) } @@ -284,7 +287,7 @@ fn check_trait_items(cx: &LateContext<'_>, visited_trait: &Item<'_>, ident: Iden } if cx.effective_visibilities.is_exported(visited_trait.owner_id.def_id) - && trait_items.iter().any(|i| is_named_self(cx, i, sym::len)) + && trait_items.iter().any(|&i| is_named_self(cx, i, sym::len)) { let mut current_and_super_traits = DefIdSet::default(); fill_trait_set(visited_trait.owner_id.to_def_id(), &mut current_and_super_traits, cx); diff --git a/src/tools/clippy/clippy_lints/src/loops/needless_range_loop.rs b/src/tools/clippy/clippy_lints/src/loops/needless_range_loop.rs index 7837b18bcd3..7bb684d65bb 100644 --- a/src/tools/clippy/clippy_lints/src/loops/needless_range_loop.rs +++ b/src/tools/clippy/clippy_lints/src/loops/needless_range_loop.rs @@ -39,7 +39,9 @@ pub(super) fn check<'tcx>( var: canonical_id, indexed_mut: FxHashSet::default(), indexed_indirectly: FxHashMap::default(), + unnamed_indexed_indirectly: false, indexed_directly: FxIndexMap::default(), + unnamed_indexed_directly: false, referenced: FxHashSet::default(), nonindex: false, prefer_mutable: false, @@ -47,7 +49,11 @@ pub(super) fn check<'tcx>( walk_expr(&mut visitor, body); // linting condition: we only indexed one variable, and indexed it directly - if visitor.indexed_indirectly.is_empty() && visitor.indexed_directly.len() == 1 { + if visitor.indexed_indirectly.is_empty() + && !visitor.unnamed_indexed_indirectly + && !visitor.unnamed_indexed_directly + && visitor.indexed_directly.len() == 1 + { let (indexed, (indexed_extent, indexed_ty)) = visitor .indexed_directly .into_iter() @@ -217,6 +223,7 @@ fn is_end_eq_array_len<'tcx>( false } +#[expect(clippy::struct_excessive_bools)] struct VarVisitor<'a, 'tcx> { /// context reference cx: &'a LateContext<'tcx>, @@ -226,9 +233,13 @@ struct VarVisitor<'a, 'tcx> { indexed_mut: FxHashSet<Symbol>, /// indirectly indexed variables (`v[(i + 4) % N]`), the extend is `None` for global indexed_indirectly: FxHashMap<Symbol, Option<region::Scope>>, + /// indirectly indexed literals, like `[1, 2, 3][(i + 4) % N]` + unnamed_indexed_indirectly: bool, /// subset of `indexed` of vars that are indexed directly: `v[i]` /// this will not contain cases like `v[calc_index(i)]` or `v[(i + 4) % N]` indexed_directly: FxIndexMap<Symbol, (Option<region::Scope>, Ty<'tcx>)>, + /// directly indexed literals, like `[1, 2, 3][i]` + unnamed_indexed_directly: bool, /// Any names that are used outside an index operation. /// Used to detect things like `&mut vec` used together with `vec[i]` referenced: FxHashSet<Symbol>, @@ -242,6 +253,7 @@ struct VarVisitor<'a, 'tcx> { impl<'tcx> VarVisitor<'_, 'tcx> { fn check(&mut self, idx: &'tcx Expr<'_>, seqexpr: &'tcx Expr<'_>, expr: &'tcx Expr<'_>) -> bool { + let index_used_directly = matches!(idx.kind, ExprKind::Path(_)); if let ExprKind::Path(ref seqpath) = seqexpr.kind // the indexed container is referenced by a name && let QPath::Resolved(None, seqvar) = *seqpath @@ -251,7 +263,6 @@ impl<'tcx> VarVisitor<'_, 'tcx> { if self.prefer_mutable { self.indexed_mut.insert(seqvar.segments[0].ident.name); } - let index_used_directly = matches!(idx.kind, ExprKind::Path(_)); let res = self.cx.qpath_res(seqpath, seqexpr.hir_id); match res { Res::Local(hir_id) => { @@ -286,6 +297,13 @@ impl<'tcx> VarVisitor<'_, 'tcx> { }, _ => (), } + } else if let ExprKind::Repeat(..) | ExprKind::Array(..) = seqexpr.kind { + if index_used_directly { + self.unnamed_indexed_directly = true; + } else { + self.unnamed_indexed_indirectly = true; + } + return false; } true } @@ -299,7 +317,7 @@ impl<'tcx> Visitor<'tcx> for VarVisitor<'_, 'tcx> { .cx .typeck_results() .type_dependent_def_id(expr.hir_id) - .and_then(|def_id| self.cx.tcx.trait_of_item(def_id)) + .and_then(|def_id| self.cx.tcx.trait_of_assoc(def_id)) && ((meth.ident.name == sym::index && self.cx.tcx.lang_items().index_trait() == Some(trait_id)) || (meth.ident.name == sym::index_mut && self.cx.tcx.lang_items().index_mut_trait() == Some(trait_id))) && !self.check(args_1, args_0, expr) diff --git a/src/tools/clippy/clippy_lints/src/loops/never_loop.rs b/src/tools/clippy/clippy_lints/src/loops/never_loop.rs index 69c84bc7038..8a253ae5810 100644 --- a/src/tools/clippy/clippy_lints/src/loops/never_loop.rs +++ b/src/tools/clippy/clippy_lints/src/loops/never_loop.rs @@ -6,9 +6,11 @@ use clippy_utils::macros::root_macro_call_first_node; use clippy_utils::source::snippet; use clippy_utils::visitors::{Descend, for_each_expr_without_closures}; use rustc_errors::Applicability; -use rustc_hir::{Block, Destination, Expr, ExprKind, HirId, InlineAsmOperand, Pat, Stmt, StmtKind, StructTailExpr}; +use rustc_hir::{ + Block, Destination, Expr, ExprKind, HirId, InlineAsmOperand, Node, Pat, Stmt, StmtKind, StructTailExpr, +}; use rustc_lint::LateContext; -use rustc_span::{Span, sym}; +use rustc_span::{BytePos, Span, sym}; use std::iter::once; use std::ops::ControlFlow; @@ -20,7 +22,7 @@ pub(super) fn check<'tcx>( for_loop: Option<&ForLoop<'_>>, ) { match never_loop_block(cx, block, &mut Vec::new(), loop_id) { - NeverLoopResult::Diverging => { + NeverLoopResult::Diverging { ref break_spans } => { span_lint_and_then(cx, NEVER_LOOP, span, "this loop never actually loops", |diag| { if let Some(ForLoop { arg: iterator, @@ -38,10 +40,15 @@ pub(super) fn check<'tcx>( Applicability::Unspecified }; - diag.span_suggestion_verbose( + let mut suggestions = vec![( for_span.with_hi(iterator.span.hi()), - "if you need the first element of the iterator, try writing", for_to_if_let_sugg(cx, iterator, pat), + )]; + // Make sure to clear up the diverging sites when we remove a loopp. + suggestions.extend(break_spans.iter().map(|span| (*span, String::new()))); + diag.multipart_suggestion_verbose( + "if you need the first element of the iterator, try writing", + suggestions, app, ); } @@ -70,22 +77,22 @@ fn contains_any_break_or_continue(block: &Block<'_>) -> bool { /// The first two bits of information are in this enum, and the last part is in the /// `local_labels` variable, which contains a list of `(block_id, reachable)` pairs ordered by /// scope. -#[derive(Copy, Clone)] +#[derive(Clone)] enum NeverLoopResult { /// A continue may occur for the main loop. MayContinueMainLoop, /// We have not encountered any main loop continue, /// but we are diverging (subsequent control flow is not reachable) - Diverging, + Diverging { break_spans: Vec<Span> }, /// We have not encountered any main loop continue, /// and subsequent control flow is (possibly) reachable Normal, } #[must_use] -fn absorb_break(arg: NeverLoopResult) -> NeverLoopResult { +fn absorb_break(arg: &NeverLoopResult) -> NeverLoopResult { match arg { - NeverLoopResult::Diverging | NeverLoopResult::Normal => NeverLoopResult::Normal, + NeverLoopResult::Diverging { .. } | NeverLoopResult::Normal => NeverLoopResult::Normal, NeverLoopResult::MayContinueMainLoop => NeverLoopResult::MayContinueMainLoop, } } @@ -94,7 +101,7 @@ fn absorb_break(arg: NeverLoopResult) -> NeverLoopResult { #[must_use] fn combine_seq(first: NeverLoopResult, second: impl FnOnce() -> NeverLoopResult) -> NeverLoopResult { match first { - NeverLoopResult::Diverging | NeverLoopResult::MayContinueMainLoop => first, + NeverLoopResult::Diverging { .. } | NeverLoopResult::MayContinueMainLoop => first, NeverLoopResult::Normal => second(), } } @@ -103,7 +110,7 @@ fn combine_seq(first: NeverLoopResult, second: impl FnOnce() -> NeverLoopResult) #[must_use] fn combine_seq_many(iter: impl IntoIterator<Item = NeverLoopResult>) -> NeverLoopResult { for e in iter { - if let NeverLoopResult::Diverging | NeverLoopResult::MayContinueMainLoop = e { + if let NeverLoopResult::Diverging { .. } | NeverLoopResult::MayContinueMainLoop = e { return e; } } @@ -118,7 +125,19 @@ fn combine_branches(b1: NeverLoopResult, b2: NeverLoopResult) -> NeverLoopResult NeverLoopResult::MayContinueMainLoop }, (NeverLoopResult::Normal, _) | (_, NeverLoopResult::Normal) => NeverLoopResult::Normal, - (NeverLoopResult::Diverging, NeverLoopResult::Diverging) => NeverLoopResult::Diverging, + ( + NeverLoopResult::Diverging { + break_spans: mut break_spans1, + }, + NeverLoopResult::Diverging { + break_spans: mut break_spans2, + }, + ) => { + break_spans1.append(&mut break_spans2); + NeverLoopResult::Diverging { + break_spans: break_spans1, + } + }, } } @@ -136,7 +155,7 @@ fn never_loop_block<'tcx>( combine_seq_many(iter.map(|(e, els)| { let e = never_loop_expr(cx, e, local_labels, main_loop_id); // els is an else block in a let...else binding - els.map_or(e, |els| { + els.map_or(e.clone(), |els| { combine_seq(e, || match never_loop_block(cx, els, local_labels, main_loop_id) { // Returning MayContinueMainLoop here means that // we will not evaluate the rest of the body @@ -144,7 +163,7 @@ fn never_loop_block<'tcx>( // An else block always diverges, so the Normal case should not happen, // but the analysis is approximate so it might return Normal anyway. // Returning Normal here says that nothing more happens on the main path - NeverLoopResult::Diverging | NeverLoopResult::Normal => NeverLoopResult::Normal, + NeverLoopResult::Diverging { .. } | NeverLoopResult::Normal => NeverLoopResult::Normal, }) }) })) @@ -159,6 +178,45 @@ fn stmt_to_expr<'tcx>(stmt: &Stmt<'tcx>) -> Option<(&'tcx Expr<'tcx>, Option<&'t } } +fn stmt_source_span(stmt: &Stmt<'_>) -> Span { + let call_span = stmt.span.source_callsite(); + // if it is a macro call, the span will be missing the trailing semicolon + if stmt.span == call_span { + return call_span; + } + + // An expression without a trailing semi-colon (must have unit type). + if let StmtKind::Expr(..) = stmt.kind { + return call_span; + } + + call_span.with_hi(call_span.hi() + BytePos(1)) +} + +/// Returns a Vec of all the individual spans after the highlighted expression in a block +fn all_spans_after_expr(cx: &LateContext<'_>, expr: &Expr<'_>) -> Vec<Span> { + if let Node::Stmt(stmt) = cx.tcx.parent_hir_node(expr.hir_id) { + if let Node::Block(block) = cx.tcx.parent_hir_node(stmt.hir_id) { + return block + .stmts + .iter() + .skip_while(|inner| inner.hir_id != stmt.hir_id) + .map(stmt_source_span) + .chain(if let Some(e) = block.expr { vec![e.span] } else { vec![] }) + .collect(); + } + + return vec![stmt.span]; + } + + vec![] +} + +fn is_label_for_block(cx: &LateContext<'_>, dest: &Destination) -> bool { + dest.target_id + .is_ok_and(|hir_id| matches!(cx.tcx.hir_node(hir_id), Node::Block(_))) +} + #[allow(clippy::too_many_lines)] fn never_loop_expr<'tcx>( cx: &LateContext<'tcx>, @@ -197,7 +255,7 @@ fn never_loop_expr<'tcx>( ExprKind::Loop(b, _, _, _) => { // We don't attempt to track reachability after a loop, // just assume there may have been a break somewhere - absorb_break(never_loop_block(cx, b, local_labels, main_loop_id)) + absorb_break(&never_loop_block(cx, b, local_labels, main_loop_id)) }, ExprKind::If(e, e2, e3) => { let e1 = never_loop_expr(cx, e, local_labels, main_loop_id); @@ -212,9 +270,10 @@ fn never_loop_expr<'tcx>( ExprKind::Match(e, arms, _) => { let e = never_loop_expr(cx, e, local_labels, main_loop_id); combine_seq(e, || { - arms.iter().fold(NeverLoopResult::Diverging, |a, b| { - combine_branches(a, never_loop_expr(cx, b.body, local_labels, main_loop_id)) - }) + arms.iter() + .fold(NeverLoopResult::Diverging { break_spans: vec![] }, |a, b| { + combine_branches(a, never_loop_expr(cx, b.body, local_labels, main_loop_id)) + }) }) }, ExprKind::Block(b, _) => { @@ -224,7 +283,7 @@ fn never_loop_expr<'tcx>( let ret = never_loop_block(cx, b, local_labels, main_loop_id); let jumped_to = b.targeted_by_break && local_labels.pop().unwrap().1; match ret { - NeverLoopResult::Diverging if jumped_to => NeverLoopResult::Normal, + NeverLoopResult::Diverging { .. } if jumped_to => NeverLoopResult::Normal, _ => ret, } }, @@ -235,25 +294,39 @@ fn never_loop_expr<'tcx>( if id == main_loop_id { NeverLoopResult::MayContinueMainLoop } else { - NeverLoopResult::Diverging + NeverLoopResult::Diverging { + break_spans: all_spans_after_expr(cx, expr), + } } }, - ExprKind::Break(_, e) | ExprKind::Ret(e) => { + ExprKind::Ret(e) => { let first = e.as_ref().map_or(NeverLoopResult::Normal, |e| { never_loop_expr(cx, e, local_labels, main_loop_id) }); combine_seq(first, || { // checks if break targets a block instead of a loop - if let ExprKind::Break(Destination { target_id: Ok(t), .. }, _) = expr.kind - && let Some((_, reachable)) = local_labels.iter_mut().find(|(label, _)| *label == t) - { - *reachable = true; + mark_block_as_reachable(expr, local_labels); + NeverLoopResult::Diverging { break_spans: vec![] } + }) + }, + ExprKind::Break(dest, e) => { + let first = e.as_ref().map_or(NeverLoopResult::Normal, |e| { + never_loop_expr(cx, e, local_labels, main_loop_id) + }); + combine_seq(first, || { + // checks if break targets a block instead of a loop + mark_block_as_reachable(expr, local_labels); + NeverLoopResult::Diverging { + break_spans: if is_label_for_block(cx, &dest) { + vec![] + } else { + all_spans_after_expr(cx, expr) + }, } - NeverLoopResult::Diverging }) }, ExprKind::Become(e) => combine_seq(never_loop_expr(cx, e, local_labels, main_loop_id), || { - NeverLoopResult::Diverging + NeverLoopResult::Diverging { break_spans: vec![] } }), ExprKind::InlineAsm(asm) => combine_seq_many(asm.operands.iter().map(|(o, _)| match o { InlineAsmOperand::In { expr, .. } | InlineAsmOperand::InOut { expr, .. } => { @@ -283,12 +356,12 @@ fn never_loop_expr<'tcx>( }; let result = combine_seq(result, || { if cx.typeck_results().expr_ty(expr).is_never() { - NeverLoopResult::Diverging + NeverLoopResult::Diverging { break_spans: vec![] } } else { NeverLoopResult::Normal } }); - if let NeverLoopResult::Diverging = result + if let NeverLoopResult::Diverging { .. } = result && let Some(macro_call) = root_macro_call_first_node(cx, expr) && let Some(sym::todo_macro) = cx.tcx.get_diagnostic_name(macro_call.def_id) { @@ -316,3 +389,11 @@ fn for_to_if_let_sugg(cx: &LateContext<'_>, iterator: &Expr<'_>, pat: &Pat<'_>) format!("if let Some({pat_snippet}) = {iter_snippet}.next()") } + +fn mark_block_as_reachable(expr: &Expr<'_>, local_labels: &mut [(HirId, bool)]) { + if let ExprKind::Break(Destination { target_id: Ok(t), .. }, _) = expr.kind + && let Some((_, reachable)) = local_labels.iter_mut().find(|(label, _)| *label == t) + { + *reachable = true; + } +} diff --git a/src/tools/clippy/clippy_lints/src/macro_use.rs b/src/tools/clippy/clippy_lints/src/macro_use.rs index c1a26c5a9c7..3aa449f6411 100644 --- a/src/tools/clippy/clippy_lints/src/macro_use.rs +++ b/src/tools/clippy/clippy_lints/src/macro_use.rs @@ -1,13 +1,14 @@ use clippy_utils::diagnostics::span_lint_hir_and_then; use clippy_utils::source::snippet; use hir::def::{DefKind, Res}; +use rustc_attr_data_structures::{AttributeKind, find_attr}; use rustc_data_structures::fx::FxHashSet; use rustc_errors::Applicability; use rustc_hir::{self as hir, AmbigArg}; use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_session::impl_lint_pass; +use rustc_span::Span; use rustc_span::edition::Edition; -use rustc_span::{Span, sym}; use std::collections::BTreeMap; declare_clippy_lint! { @@ -99,15 +100,14 @@ impl LateLintPass<'_> for MacroUseImports { && let hir::ItemKind::Use(path, _kind) = &item.kind && let hir_id = item.hir_id() && let attrs = cx.tcx.hir_attrs(hir_id) - && let Some(mac_attr) = attrs.iter().find(|attr| attr.has_name(sym::macro_use)) + && let Some(mac_attr_span) = find_attr!(attrs, AttributeKind::MacroUse {span, ..} => *span) && let Some(Res::Def(DefKind::Mod, id)) = path.res.type_ns && !id.is_local() { for kid in cx.tcx.module_children(id) { if let Res::Def(DefKind::Macro(_mac_type), mac_id) = kid.res { - let span = mac_attr.span(); let def_path = cx.tcx.def_path_str(mac_id); - self.imports.push((def_path, span, hir_id)); + self.imports.push((def_path, mac_attr_span, hir_id)); } } } else if item.span.from_expansion() { diff --git a/src/tools/clippy/clippy_lints/src/manual_abs_diff.rs b/src/tools/clippy/clippy_lints/src/manual_abs_diff.rs index bac4b3d32f2..288f27db8ca 100644 --- a/src/tools/clippy/clippy_lints/src/manual_abs_diff.rs +++ b/src/tools/clippy/clippy_lints/src/manual_abs_diff.rs @@ -5,7 +5,7 @@ use clippy_utils::msrvs::{self, Msrv}; use clippy_utils::source::HasSession as _; use clippy_utils::sugg::Sugg; use clippy_utils::ty::is_type_diagnostic_item; -use clippy_utils::{eq_expr_value, peel_blocks, span_contains_comment}; +use clippy_utils::{eq_expr_value, peel_blocks, peel_middle_ty_refs, span_contains_comment}; use rustc_errors::Applicability; use rustc_hir::{BinOpKind, Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; @@ -62,7 +62,7 @@ impl<'tcx> LateLintPass<'tcx> for ManualAbsDiff { && let ExprKind::Binary(op, rhs, lhs) = if_expr.cond.kind && let (BinOpKind::Gt | BinOpKind::Ge, mut a, mut b) | (BinOpKind::Lt | BinOpKind::Le, mut b, mut a) = (op.node, rhs, lhs) - && let Some(ty) = self.are_ty_eligible(cx, a, b) + && let Some((ty, b_n_refs)) = self.are_ty_eligible(cx, a, b) && is_sub_expr(cx, if_expr.then, a, b, ty) && is_sub_expr(cx, r#else, b, a, ty) { @@ -86,8 +86,9 @@ impl<'tcx> LateLintPass<'tcx> for ManualAbsDiff { } }; let sugg = format!( - "{}.abs_diff({})", + "{}.abs_diff({}{})", Sugg::hir(cx, a, "..").maybe_paren(), + "*".repeat(b_n_refs), Sugg::hir(cx, b, "..") ); diag.span_suggestion(expr.span, "replace with `abs_diff`", sugg, applicability); @@ -100,13 +101,15 @@ impl<'tcx> LateLintPass<'tcx> for ManualAbsDiff { impl ManualAbsDiff { /// Returns a type if `a` and `b` are both of it, and this lint can be applied to that /// type (currently, any primitive int, or a `Duration`) - fn are_ty_eligible<'tcx>(&self, cx: &LateContext<'tcx>, a: &Expr<'_>, b: &Expr<'_>) -> Option<Ty<'tcx>> { + fn are_ty_eligible<'tcx>(&self, cx: &LateContext<'tcx>, a: &Expr<'_>, b: &Expr<'_>) -> Option<(Ty<'tcx>, usize)> { let is_int = |ty: Ty<'_>| matches!(ty.kind(), ty::Uint(_) | ty::Int(_)) && self.msrv.meets(cx, msrvs::ABS_DIFF); let is_duration = |ty| is_type_diagnostic_item(cx, ty, sym::Duration) && self.msrv.meets(cx, msrvs::DURATION_ABS_DIFF); let a_ty = cx.typeck_results().expr_ty(a).peel_refs(); - (a_ty == cx.typeck_results().expr_ty(b).peel_refs() && (is_int(a_ty) || is_duration(a_ty))).then_some(a_ty) + let (b_ty, b_n_refs) = peel_middle_ty_refs(cx.typeck_results().expr_ty(b)); + + (a_ty == b_ty && (is_int(a_ty) || is_duration(a_ty))).then_some((a_ty, b_n_refs)) } } diff --git a/src/tools/clippy/clippy_lints/src/manual_assert.rs b/src/tools/clippy/clippy_lints/src/manual_assert.rs index 8378e15c581..ea6b01a053a 100644 --- a/src/tools/clippy/clippy_lints/src/manual_assert.rs +++ b/src/tools/clippy/clippy_lints/src/manual_assert.rs @@ -60,7 +60,8 @@ impl<'tcx> LateLintPass<'tcx> for ManualAssert { ExprKind::Unary(UnOp::Not, e) => (e, ""), _ => (cond, "!"), }; - let cond_sugg = sugg::Sugg::hir_with_applicability(cx, cond, "..", &mut applicability).maybe_paren(); + let cond_sugg = + sugg::Sugg::hir_with_context(cx, cond, expr.span.ctxt(), "..", &mut applicability).maybe_paren(); let semicolon = if is_parent_stmt(cx, expr.hir_id) { ";" } else { "" }; let sugg = format!("assert!({not}{cond_sugg}, {format_args_snip}){semicolon}"); // we show to the user the suggestion without the comments, but when applying the fix, include the diff --git a/src/tools/clippy/clippy_lints/src/matches/single_match.rs b/src/tools/clippy/clippy_lints/src/matches/single_match.rs index 08c0caa4266..7e530e98ac4 100644 --- a/src/tools/clippy/clippy_lints/src/matches/single_match.rs +++ b/src/tools/clippy/clippy_lints/src/matches/single_match.rs @@ -152,21 +152,26 @@ fn report_single_pattern( }) if lit.node.is_str() || lit.node.is_bytestr() => pat_ref_count + 1, _ => pat_ref_count, }; - // References are only implicitly added to the pattern, so no overflow here. - // e.g. will work: match &Some(_) { Some(_) => () } - // will not: match Some(_) { &Some(_) => () } - let ref_count_diff = ty_ref_count - pat_ref_count; - // Try to remove address of expressions first. - let (ex, removed) = peel_n_hir_expr_refs(ex, ref_count_diff); - let ref_count_diff = ref_count_diff - removed; + // References are implicitly removed when `deref_patterns` are used. + // They are implicitly added when match ergonomics are used. + let (ex, ref_or_deref_adjust) = if ty_ref_count > pat_ref_count { + let ref_count_diff = ty_ref_count - pat_ref_count; + + // Try to remove address of expressions first. + let (ex, removed) = peel_n_hir_expr_refs(ex, ref_count_diff); + + (ex, String::from(if ref_count_diff == removed { "" } else { "&" })) + } else { + (ex, "*".repeat(pat_ref_count - ty_ref_count)) + }; let msg = "you seem to be trying to use `match` for an equality check. Consider using `if`"; let sugg = format!( "if {} == {}{} {}{els_str}", snippet_with_context(cx, ex.span, ctxt, "..", &mut app).0, // PartialEq for different reference counts may not exist. - "&".repeat(ref_count_diff), + ref_or_deref_adjust, snippet_with_applicability(cx, arm.pat.span, "..", &mut app), expr_block(cx, arm.body, ctxt, "..", Some(expr.span), &mut app), ); diff --git a/src/tools/clippy/clippy_lints/src/methods/bytes_count_to_len.rs b/src/tools/clippy/clippy_lints/src/methods/bytes_count_to_len.rs index a9f6a41c235..b8cc5ddd845 100644 --- a/src/tools/clippy/clippy_lints/src/methods/bytes_count_to_len.rs +++ b/src/tools/clippy/clippy_lints/src/methods/bytes_count_to_len.rs @@ -14,7 +14,7 @@ pub(super) fn check<'tcx>( bytes_recv: &'tcx hir::Expr<'_>, ) { if let Some(bytes_id) = cx.typeck_results().type_dependent_def_id(count_recv.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(bytes_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(bytes_id) && cx.tcx.type_of(impl_id).instantiate_identity().is_str() && let ty = cx.typeck_results().expr_ty(bytes_recv).peel_refs() && (ty.is_str() || is_type_lang_item(cx, ty, hir::LangItem::String)) diff --git a/src/tools/clippy/clippy_lints/src/methods/case_sensitive_file_extension_comparisons.rs b/src/tools/clippy/clippy_lints/src/methods/case_sensitive_file_extension_comparisons.rs index 292fa08b598..6f9702f6c6c 100644 --- a/src/tools/clippy/clippy_lints/src/methods/case_sensitive_file_extension_comparisons.rs +++ b/src/tools/clippy/clippy_lints/src/methods/case_sensitive_file_extension_comparisons.rs @@ -30,7 +30,7 @@ pub(super) fn check<'tcx>( } if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(method_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(method_id) && cx.tcx.type_of(impl_id).instantiate_identity().is_str() && let ExprKind::Lit(Spanned { node: LitKind::Str(ext_literal, ..), diff --git a/src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs b/src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs index 2ecf3eb8979..0a456d1057a 100644 --- a/src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs +++ b/src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs @@ -28,7 +28,7 @@ pub(super) fn check( if cx .typeck_results() .type_dependent_def_id(expr.hir_id) - .and_then(|id| cx.tcx.trait_of_item(id)) + .and_then(|id| cx.tcx.trait_of_assoc(id)) .zip(cx.tcx.lang_items().clone_trait()) .is_none_or(|(x, y)| x != y) { diff --git a/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs b/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs index 82e5a6d5a41..6e5da5bda8c 100644 --- a/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs +++ b/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs @@ -2,13 +2,15 @@ use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::macros::{FormatArgsStorage, format_args_inputs_span, root_macro_call_first_node}; use clippy_utils::source::snippet_with_applicability; use clippy_utils::ty::{is_type_diagnostic_item, is_type_lang_item}; +use clippy_utils::visitors::for_each_expr; +use clippy_utils::{contains_return, is_inside_always_const_context, peel_blocks}; use rustc_errors::Applicability; use rustc_hir as hir; use rustc_lint::LateContext; -use rustc_middle::ty; use rustc_span::symbol::sym; use rustc_span::{Span, Symbol}; use std::borrow::Cow; +use std::ops::ControlFlow; use super::EXPECT_FUN_CALL; @@ -23,10 +25,10 @@ pub(super) fn check<'tcx>( receiver: &'tcx hir::Expr<'tcx>, args: &'tcx [hir::Expr<'tcx>], ) { - // Strip `&`, `as_ref()` and `as_str()` off `arg` until we're left with either a `String` or + // Strip `{}`, `&`, `as_ref()` and `as_str()` off `arg` until we're left with either a `String` or // `&str` fn get_arg_root<'a>(cx: &LateContext<'_>, arg: &'a hir::Expr<'a>) -> &'a hir::Expr<'a> { - let mut arg_root = arg; + let mut arg_root = peel_blocks(arg); loop { arg_root = match &arg_root.kind { hir::ExprKind::AddrOf(hir::BorrowKind::Ref, _, expr) => expr, @@ -47,124 +49,68 @@ pub(super) fn check<'tcx>( arg_root } - // Only `&'static str` or `String` can be used directly in the `panic!`. Other types should be - // converted to string. - fn requires_to_string(cx: &LateContext<'_>, arg: &hir::Expr<'_>) -> bool { - let arg_ty = cx.typeck_results().expr_ty(arg); - if is_type_lang_item(cx, arg_ty, hir::LangItem::String) { - return false; - } - if let ty::Ref(_, ty, ..) = arg_ty.kind() - && ty.is_str() - && can_be_static_str(cx, arg) - { - return false; - } - true + fn contains_call<'a>(cx: &LateContext<'a>, arg: &'a hir::Expr<'a>) -> bool { + for_each_expr(cx, arg, |expr| { + if matches!(expr.kind, hir::ExprKind::MethodCall { .. } | hir::ExprKind::Call { .. }) + && !is_inside_always_const_context(cx.tcx, expr.hir_id) + { + ControlFlow::Break(()) + } else { + ControlFlow::Continue(()) + } + }) + .is_some() } - // Check if an expression could have type `&'static str`, knowing that it - // has type `&str` for some lifetime. - fn can_be_static_str(cx: &LateContext<'_>, arg: &hir::Expr<'_>) -> bool { - match arg.kind { - hir::ExprKind::Lit(_) => true, - hir::ExprKind::Call(fun, _) => { - if let hir::ExprKind::Path(ref p) = fun.kind { - match cx.qpath_res(p, fun.hir_id) { - hir::def::Res::Def(hir::def::DefKind::Fn | hir::def::DefKind::AssocFn, def_id) => matches!( - cx.tcx.fn_sig(def_id).instantiate_identity().output().skip_binder().kind(), - ty::Ref(re, ..) if re.is_static(), - ), - _ => false, - } - } else { - false - } - }, - hir::ExprKind::MethodCall(..) => { - cx.typeck_results() - .type_dependent_def_id(arg.hir_id) - .is_some_and(|method_id| { - matches!( - cx.tcx.fn_sig(method_id).instantiate_identity().output().skip_binder().kind(), - ty::Ref(re, ..) if re.is_static() - ) - }) - }, - hir::ExprKind::Path(ref p) => matches!( - cx.qpath_res(p, arg.hir_id), - hir::def::Res::Def(hir::def::DefKind::Const | hir::def::DefKind::Static { .. }, _) - ), - _ => false, - } - } + if name == sym::expect + && let [arg] = args + && let arg_root = get_arg_root(cx, arg) + && contains_call(cx, arg_root) + && !contains_return(arg_root) + { + let receiver_type = cx.typeck_results().expr_ty_adjusted(receiver); + let closure_args = if is_type_diagnostic_item(cx, receiver_type, sym::Option) { + "||" + } else if is_type_diagnostic_item(cx, receiver_type, sym::Result) { + "|_|" + } else { + return; + }; - fn is_call(node: &hir::ExprKind<'_>) -> bool { - match node { - hir::ExprKind::AddrOf(hir::BorrowKind::Ref, _, expr) => { - is_call(&expr.kind) - }, - hir::ExprKind::Call(..) - | hir::ExprKind::MethodCall(..) - // These variants are debatable or require further examination - | hir::ExprKind::If(..) - | hir::ExprKind::Match(..) - | hir::ExprKind::Block{ .. } => true, - _ => false, - } - } + let span_replace_word = method_span.with_hi(expr.span.hi()); - if args.len() != 1 || name != sym::expect || !is_call(&args[0].kind) { - return; - } + let mut applicability = Applicability::MachineApplicable; - let receiver_type = cx.typeck_results().expr_ty_adjusted(receiver); - let closure_args = if is_type_diagnostic_item(cx, receiver_type, sym::Option) { - "||" - } else if is_type_diagnostic_item(cx, receiver_type, sym::Result) { - "|_|" - } else { - return; - }; - - let arg_root = get_arg_root(cx, &args[0]); - - let span_replace_word = method_span.with_hi(expr.span.hi()); - - let mut applicability = Applicability::MachineApplicable; - - // Special handling for `format!` as arg_root - if let Some(macro_call) = root_macro_call_first_node(cx, arg_root) { - if cx.tcx.is_diagnostic_item(sym::format_macro, macro_call.def_id) - && let Some(format_args) = format_args_storage.get(cx, arg_root, macro_call.expn) - { - let span = format_args_inputs_span(format_args); - let sugg = snippet_with_applicability(cx, span, "..", &mut applicability); - span_lint_and_sugg( - cx, - EXPECT_FUN_CALL, - span_replace_word, - format!("function call inside of `{name}`"), - "try", - format!("unwrap_or_else({closure_args} panic!({sugg}))"), - applicability, - ); + // Special handling for `format!` as arg_root + if let Some(macro_call) = root_macro_call_first_node(cx, arg_root) { + if cx.tcx.is_diagnostic_item(sym::format_macro, macro_call.def_id) + && let Some(format_args) = format_args_storage.get(cx, arg_root, macro_call.expn) + { + let span = format_args_inputs_span(format_args); + let sugg = snippet_with_applicability(cx, span, "..", &mut applicability); + span_lint_and_sugg( + cx, + EXPECT_FUN_CALL, + span_replace_word, + format!("function call inside of `{name}`"), + "try", + format!("unwrap_or_else({closure_args} panic!({sugg}))"), + applicability, + ); + } + return; } - return; - } - let mut arg_root_snippet: Cow<'_, _> = snippet_with_applicability(cx, arg_root.span, "..", &mut applicability); - if requires_to_string(cx, arg_root) { - arg_root_snippet.to_mut().push_str(".to_string()"); - } + let arg_root_snippet: Cow<'_, _> = snippet_with_applicability(cx, arg_root.span, "..", &mut applicability); - span_lint_and_sugg( - cx, - EXPECT_FUN_CALL, - span_replace_word, - format!("function call inside of `{name}`"), - "try", - format!("unwrap_or_else({closure_args} {{ panic!(\"{{}}\", {arg_root_snippet}) }})"), - applicability, - ); + span_lint_and_sugg( + cx, + EXPECT_FUN_CALL, + span_replace_word, + format!("function call inside of `{name}`"), + "try", + format!("unwrap_or_else({closure_args} panic!(\"{{}}\", {arg_root_snippet}))"), + applicability, + ); + } } diff --git a/src/tools/clippy/clippy_lints/src/methods/filter_map_bool_then.rs b/src/tools/clippy/clippy_lints/src/methods/filter_map_bool_then.rs index 965993808f6..94944bd9445 100644 --- a/src/tools/clippy/clippy_lints/src/methods/filter_map_bool_then.rs +++ b/src/tools/clippy/clippy_lints/src/methods/filter_map_bool_then.rs @@ -1,6 +1,6 @@ use super::FILTER_MAP_BOOL_THEN; use clippy_utils::diagnostics::span_lint_and_then; -use clippy_utils::source::SpanRangeExt; +use clippy_utils::source::{SpanRangeExt, snippet_with_context}; use clippy_utils::ty::is_copy; use clippy_utils::{ CaptureKind, can_move_expr_to_closure, contains_return, is_from_proc_macro, is_trait_method, peel_blocks, @@ -45,9 +45,11 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>, arg: & .filter(|adj| matches!(adj.kind, Adjust::Deref(_))) .count() && let Some(param_snippet) = param.span.get_source_text(cx) - && let Some(filter) = recv.span.get_source_text(cx) - && let Some(map) = then_body.span.get_source_text(cx) { + let mut applicability = Applicability::MachineApplicable; + let (filter, _) = snippet_with_context(cx, recv.span, expr.span.ctxt(), "..", &mut applicability); + let (map, _) = snippet_with_context(cx, then_body.span, expr.span.ctxt(), "..", &mut applicability); + span_lint_and_then( cx, FILTER_MAP_BOOL_THEN, @@ -62,7 +64,7 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>, arg: & "filter(|&{param_snippet}| {derefs}{filter}).map(|{param_snippet}| {map})", derefs = "*".repeat(needed_derefs) ), - Applicability::MachineApplicable, + applicability, ); } else { diag.help("consider using `filter` then `map` instead"); diff --git a/src/tools/clippy/clippy_lints/src/methods/get_first.rs b/src/tools/clippy/clippy_lints/src/methods/get_first.rs index f4465e654c2..2e1d71ce284 100644 --- a/src/tools/clippy/clippy_lints/src/methods/get_first.rs +++ b/src/tools/clippy/clippy_lints/src/methods/get_first.rs @@ -18,7 +18,7 @@ pub(super) fn check<'tcx>( arg: &'tcx hir::Expr<'_>, ) { if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(method_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(method_id) && let identity = cx.tcx.type_of(impl_id).instantiate_identity() && let hir::ExprKind::Lit(Spanned { node: LitKind::Int(Pu128(0), _), diff --git a/src/tools/clippy/clippy_lints/src/methods/implicit_clone.rs b/src/tools/clippy/clippy_lints/src/methods/implicit_clone.rs index 9724463f0c0..efa8cee58df 100644 --- a/src/tools/clippy/clippy_lints/src/methods/implicit_clone.rs +++ b/src/tools/clippy/clippy_lints/src/methods/implicit_clone.rs @@ -50,7 +50,7 @@ pub fn is_clone_like(cx: &LateContext<'_>, method_name: Symbol, method_def_id: h sym::to_path_buf => is_diag_item_method(cx, method_def_id, sym::Path), sym::to_vec => cx .tcx - .impl_of_method(method_def_id) + .impl_of_assoc(method_def_id) .filter(|&impl_did| { cx.tcx.type_of(impl_did).instantiate_identity().is_slice() && cx.tcx.impl_trait_ref(impl_did).is_none() }) diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_overeager_cloned.rs b/src/tools/clippy/clippy_lints/src/methods/iter_overeager_cloned.rs index f5fe4316eb0..f851ebe91f3 100644 --- a/src/tools/clippy/clippy_lints/src/methods/iter_overeager_cloned.rs +++ b/src/tools/clippy/clippy_lints/src/methods/iter_overeager_cloned.rs @@ -44,9 +44,9 @@ pub(super) fn check<'tcx>( let typeck = cx.typeck_results(); if let Some(iter_id) = cx.tcx.get_diagnostic_item(sym::Iterator) && let Some(method_id) = typeck.type_dependent_def_id(expr.hir_id) - && cx.tcx.trait_of_item(method_id) == Some(iter_id) + && cx.tcx.trait_of_assoc(method_id) == Some(iter_id) && let Some(method_id) = typeck.type_dependent_def_id(cloned_call.hir_id) - && cx.tcx.trait_of_item(method_id) == Some(iter_id) + && cx.tcx.trait_of_assoc(method_id) == Some(iter_id) && let cloned_recv_ty = typeck.expr_ty_adjusted(cloned_recv) && let Some(iter_assoc_ty) = cx.get_associated_type(cloned_recv_ty, iter_id, sym::Item) && matches!(*iter_assoc_ty.kind(), ty::Ref(_, ty, _) if !is_copy(cx, ty)) diff --git a/src/tools/clippy/clippy_lints/src/methods/manual_inspect.rs b/src/tools/clippy/clippy_lints/src/methods/manual_inspect.rs index 21f2ce8b7c9..bc96815944d 100644 --- a/src/tools/clippy/clippy_lints/src/methods/manual_inspect.rs +++ b/src/tools/clippy/clippy_lints/src/methods/manual_inspect.rs @@ -100,7 +100,6 @@ pub(crate) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, arg: &Expr<'_>, name: match x { UseKind::Return(s) => edits.push((s.with_leading_whitespace(cx).with_ctxt(s.ctxt()), String::new())), UseKind::Borrowed(s) => { - #[expect(clippy::range_plus_one)] let range = s.map_range(cx, |_, src, range| { let src = src.get(range.clone())?; let trimmed = src.trim_start_matches([' ', '\t', '\n', '\r', '(']); diff --git a/src/tools/clippy/clippy_lints/src/methods/manual_ok_or.rs b/src/tools/clippy/clippy_lints/src/methods/manual_ok_or.rs index c286c5faaed..077957fa44d 100644 --- a/src/tools/clippy/clippy_lints/src/methods/manual_ok_or.rs +++ b/src/tools/clippy/clippy_lints/src/methods/manual_ok_or.rs @@ -18,7 +18,7 @@ pub(super) fn check<'tcx>( map_expr: &'tcx Expr<'_>, ) { if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(method_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(method_id) && is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::Option) && let ExprKind::Call(err_path, [err_arg]) = or_expr.kind && is_res_lang_ctor(cx, path_res(cx, err_path), ResultErr) diff --git a/src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs b/src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs index 8167e4f9605..a811dd1cee1 100644 --- a/src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs +++ b/src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs @@ -59,7 +59,7 @@ pub(super) fn check( && is_type_lang_item(cx, cx.typeck_results().expr_ty(collect_expr), LangItem::String) && let Some(take_id) = cx.typeck_results().type_dependent_def_id(take_expr.hir_id) && let Some(iter_trait_id) = cx.tcx.get_diagnostic_item(sym::Iterator) - && cx.tcx.trait_of_item(take_id) == Some(iter_trait_id) + && cx.tcx.trait_of_assoc(take_id) == Some(iter_trait_id) && let Some(repeat_kind) = parse_repeat_arg(cx, repeat_arg) && let ctxt = collect_expr.span.ctxt() && ctxt == take_expr.span.ctxt() diff --git a/src/tools/clippy/clippy_lints/src/methods/map_clone.rs b/src/tools/clippy/clippy_lints/src/methods/map_clone.rs index 333a33f7527..748be9bfcc6 100644 --- a/src/tools/clippy/clippy_lints/src/methods/map_clone.rs +++ b/src/tools/clippy/clippy_lints/src/methods/map_clone.rs @@ -23,7 +23,7 @@ fn should_run_lint(cx: &LateContext<'_>, e: &hir::Expr<'_>, method_id: DefId) -> return true; } // We check if it's an `Option` or a `Result`. - if let Some(id) = cx.tcx.impl_of_method(method_id) { + if let Some(id) = cx.tcx.impl_of_assoc(method_id) { let identity = cx.tcx.type_of(id).instantiate_identity(); if !is_type_diagnostic_item(cx, identity, sym::Option) && !is_type_diagnostic_item(cx, identity, sym::Result) { return false; @@ -69,7 +69,7 @@ pub(super) fn check(cx: &LateContext<'_>, e: &hir::Expr<'_>, recv: &hir::Expr<'_ hir::ExprKind::MethodCall(method, obj, [], _) => { if ident_eq(name, obj) && method.ident.name == sym::clone && let Some(fn_id) = cx.typeck_results().type_dependent_def_id(closure_expr.hir_id) - && let Some(trait_id) = cx.tcx.trait_of_item(fn_id) + && let Some(trait_id) = cx.tcx.trait_of_assoc(fn_id) && cx.tcx.lang_items().clone_trait() == Some(trait_id) // no autoderefs && !cx.typeck_results().expr_adjustments(obj).iter() diff --git a/src/tools/clippy/clippy_lints/src/methods/map_err_ignore.rs b/src/tools/clippy/clippy_lints/src/methods/map_err_ignore.rs index 5d0d4dae35f..41beda9c5cb 100644 --- a/src/tools/clippy/clippy_lints/src/methods/map_err_ignore.rs +++ b/src/tools/clippy/clippy_lints/src/methods/map_err_ignore.rs @@ -8,7 +8,7 @@ use super::MAP_ERR_IGNORE; pub(super) fn check(cx: &LateContext<'_>, e: &Expr<'_>, arg: &Expr<'_>) { if let Some(method_id) = cx.typeck_results().type_dependent_def_id(e.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(method_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(method_id) && is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::Result) && let ExprKind::Closure(&Closure { capture_clause: CaptureBy::Ref, diff --git a/src/tools/clippy/clippy_lints/src/methods/mod.rs b/src/tools/clippy/clippy_lints/src/methods/mod.rs index f2dabdd3438..bcd54557331 100644 --- a/src/tools/clippy/clippy_lints/src/methods/mod.rs +++ b/src/tools/clippy/clippy_lints/src/methods/mod.rs @@ -3859,6 +3859,7 @@ declare_clippy_lint! { declare_clippy_lint! { /// ### What it does /// Checks for usage of `option.map(f).unwrap_or_default()` and `result.map(f).unwrap_or_default()` where f is a function or closure that returns the `bool` type. + /// Also checks for equality comparisons like `option.map(f) == Some(true)` and `result.map(f) == Ok(true)`. /// /// ### Why is this bad? /// Readability. These can be written more concisely as `option.is_some_and(f)` and `result.is_ok_and(f)`. @@ -3869,6 +3870,11 @@ declare_clippy_lint! { /// # let result: Result<usize, ()> = Ok(1); /// option.map(|a| a > 10).unwrap_or_default(); /// result.map(|a| a > 10).unwrap_or_default(); + /// + /// option.map(|a| a > 10) == Some(true); + /// result.map(|a| a > 10) == Ok(true); + /// option.map(|a| a > 10) != Some(true); + /// result.map(|a| a > 10) != Ok(true); /// ``` /// Use instead: /// ```no_run @@ -3876,11 +3882,16 @@ declare_clippy_lint! { /// # let result: Result<usize, ()> = Ok(1); /// option.is_some_and(|a| a > 10); /// result.is_ok_and(|a| a > 10); + /// + /// option.is_some_and(|a| a > 10); + /// result.is_ok_and(|a| a > 10); + /// option.is_none_or(|a| a > 10); + /// !result.is_ok_and(|a| a > 10); /// ``` #[clippy::version = "1.77.0"] pub MANUAL_IS_VARIANT_AND, pedantic, - "using `.map(f).unwrap_or_default()`, which is more succinctly expressed as `is_some_and(f)` or `is_ok_and(f)`" + "using `.map(f).unwrap_or_default()` or `.map(f) == Some/Ok(true)`, which are more succinctly expressed as `is_some_and(f)` or `is_ok_and(f)`" } declare_clippy_lint! { @@ -5275,10 +5286,6 @@ impl Methods { } map_identity::check(cx, expr, recv, m_arg, name, span); manual_inspect::check(cx, expr, m_arg, name, span, self.msrv); - crate::useless_conversion::check_function_application(cx, expr, recv, m_arg); - }, - (sym::map_break | sym::map_continue, [m_arg]) => { - crate::useless_conversion::check_function_application(cx, expr, recv, m_arg); }, (sym::map_or, [def, map]) => { option_map_or_none::check(cx, expr, recv, def, map); @@ -5546,7 +5553,7 @@ impl Methods { // Handle method calls whose receiver and arguments may come from expansion if let ExprKind::MethodCall(path, recv, args, _call_span) = expr.kind { match (path.ident.name, args) { - (sym::expect, [_]) if !matches!(method_call(recv), Some((sym::ok | sym::err, _, [], _, _))) => { + (sym::expect, [_]) => { unwrap_expect_used::check( cx, expr, diff --git a/src/tools/clippy/clippy_lints/src/methods/mut_mutex_lock.rs b/src/tools/clippy/clippy_lints/src/methods/mut_mutex_lock.rs index 320523aceb6..4235af882b0 100644 --- a/src/tools/clippy/clippy_lints/src/methods/mut_mutex_lock.rs +++ b/src/tools/clippy/clippy_lints/src/methods/mut_mutex_lock.rs @@ -13,7 +13,7 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, ex: &'tcx Expr<'tcx>, recv: &' && let (_, ref_depth, Mutability::Mut) = peel_mid_ty_refs_is_mutable(cx.typeck_results().expr_ty(recv)) && ref_depth >= 1 && let Some(method_id) = cx.typeck_results().type_dependent_def_id(ex.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(method_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(method_id) && is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::Mutex) { span_lint_and_sugg( diff --git a/src/tools/clippy/clippy_lints/src/methods/open_options.rs b/src/tools/clippy/clippy_lints/src/methods/open_options.rs index 9b5f138295c..37a8e25bef9 100644 --- a/src/tools/clippy/clippy_lints/src/methods/open_options.rs +++ b/src/tools/clippy/clippy_lints/src/methods/open_options.rs @@ -18,7 +18,7 @@ fn is_open_options(cx: &LateContext<'_>, ty: Ty<'_>) -> bool { pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>, recv: &'tcx Expr<'_>) { if let Some(method_id) = cx.typeck_results().type_dependent_def_id(e.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(method_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(method_id) && is_open_options(cx, cx.tcx.type_of(impl_id).instantiate_identity()) { let mut options = Vec::new(); @@ -111,7 +111,7 @@ fn get_open_options( // This might be a user defined extension trait with a method like `truncate_write` // which would be a false positive if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(argument.hir_id) - && cx.tcx.trait_of_item(method_def_id).is_some() + && cx.tcx.trait_of_assoc(method_def_id).is_some() { return false; } diff --git a/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs b/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs index 6ce7dd3d4d0..04f0e3c0479 100644 --- a/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs +++ b/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs @@ -242,15 +242,23 @@ pub(super) fn check<'tcx>( let inner_arg = peel_blocks(arg); for_each_expr(cx, inner_arg, |ex| { let is_top_most_expr = ex.hir_id == inner_arg.hir_id; - if let hir::ExprKind::Call(fun, fun_args) = ex.kind { - let fun_span = if fun_args.is_empty() && is_top_most_expr { - Some(fun.span) - } else { - None - }; - if check_or_fn_call(cx, name, method_span, receiver, arg, Some(lambda), expr.span, fun_span) { - return ControlFlow::Break(()); - } + match ex.kind { + hir::ExprKind::Call(fun, fun_args) => { + let fun_span = if fun_args.is_empty() && is_top_most_expr { + Some(fun.span) + } else { + None + }; + if check_or_fn_call(cx, name, method_span, receiver, arg, Some(lambda), expr.span, fun_span) { + return ControlFlow::Break(()); + } + }, + hir::ExprKind::MethodCall(..) => { + if check_or_fn_call(cx, name, method_span, receiver, arg, Some(lambda), expr.span, None) { + return ControlFlow::Break(()); + } + }, + _ => {}, } ControlFlow::Continue(()) }); diff --git a/src/tools/clippy/clippy_lints/src/methods/path_buf_push_overwrite.rs b/src/tools/clippy/clippy_lints/src/methods/path_buf_push_overwrite.rs index 38d9c5f1677..32752ef7435 100644 --- a/src/tools/clippy/clippy_lints/src/methods/path_buf_push_overwrite.rs +++ b/src/tools/clippy/clippy_lints/src/methods/path_buf_push_overwrite.rs @@ -11,7 +11,7 @@ use super::PATH_BUF_PUSH_OVERWRITE; pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, arg: &'tcx Expr<'_>) { if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(method_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(method_id) && is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::PathBuf) && let ExprKind::Lit(lit) = arg.kind && let LitKind::Str(ref path_lit, _) = lit.node diff --git a/src/tools/clippy/clippy_lints/src/methods/stable_sort_primitive.rs b/src/tools/clippy/clippy_lints/src/methods/stable_sort_primitive.rs index aef14435d8a..17d1a6abde0 100644 --- a/src/tools/clippy/clippy_lints/src/methods/stable_sort_primitive.rs +++ b/src/tools/clippy/clippy_lints/src/methods/stable_sort_primitive.rs @@ -9,7 +9,7 @@ use super::STABLE_SORT_PRIMITIVE; pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>, recv: &'tcx Expr<'_>) { if let Some(method_id) = cx.typeck_results().type_dependent_def_id(e.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(method_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(method_id) && cx.tcx.type_of(impl_id).instantiate_identity().is_slice() && let Some(slice_type) = is_slice_of_primitives(cx, recv) { diff --git a/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs b/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs index 6f78d6c6128..51dd4ac313a 100644 --- a/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs +++ b/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs @@ -286,7 +286,7 @@ fn parse_iter_usage<'tcx>( let iter_id = cx.tcx.get_diagnostic_item(sym::Iterator)?; match (name.ident.name, args) { - (sym::next, []) if cx.tcx.trait_of_item(did) == Some(iter_id) => (IterUsageKind::Nth(0), e.span), + (sym::next, []) if cx.tcx.trait_of_assoc(did) == Some(iter_id) => (IterUsageKind::Nth(0), e.span), (sym::next_tuple, []) => { return if paths::ITERTOOLS_NEXT_TUPLE.matches(cx, did) && let ty::Adt(adt_def, subs) = cx.typeck_results().expr_ty(e).kind() @@ -303,7 +303,7 @@ fn parse_iter_usage<'tcx>( None }; }, - (sym::nth | sym::skip, [idx_expr]) if cx.tcx.trait_of_item(did) == Some(iter_id) => { + (sym::nth | sym::skip, [idx_expr]) if cx.tcx.trait_of_assoc(did) == Some(iter_id) => { if let Some(Constant::Int(idx)) = ConstEvalCtxt::new(cx).eval(idx_expr) { let span = if name.ident.as_str() == "nth" { e.span @@ -312,7 +312,7 @@ fn parse_iter_usage<'tcx>( && next_name.ident.name == sym::next && next_expr.span.ctxt() == ctxt && let Some(next_id) = cx.typeck_results().type_dependent_def_id(next_expr.hir_id) - && cx.tcx.trait_of_item(next_id) == Some(iter_id) + && cx.tcx.trait_of_assoc(next_id) == Some(iter_id) { next_expr.span } else { diff --git a/src/tools/clippy/clippy_lints/src/methods/suspicious_splitn.rs b/src/tools/clippy/clippy_lints/src/methods/suspicious_splitn.rs index f8b6d4349fb..9876681ddbb 100644 --- a/src/tools/clippy/clippy_lints/src/methods/suspicious_splitn.rs +++ b/src/tools/clippy/clippy_lints/src/methods/suspicious_splitn.rs @@ -10,7 +10,7 @@ use super::SUSPICIOUS_SPLITN; pub(super) fn check(cx: &LateContext<'_>, method_name: Symbol, expr: &Expr<'_>, self_arg: &Expr<'_>, count: u128) { if count <= 1 && let Some(call_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(call_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(call_id) && cx.tcx.impl_trait_ref(impl_id).is_none() && let self_ty = cx.tcx.type_of(impl_id).instantiate_identity() && (self_ty.is_slice() || self_ty.is_str()) diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_fallible_conversions.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_fallible_conversions.rs index ce81282ddfe..0ec2d8b4fc3 100644 --- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_fallible_conversions.rs +++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_fallible_conversions.rs @@ -165,7 +165,7 @@ pub(super) fn check_method(cx: &LateContext<'_>, expr: &Expr<'_>) { pub(super) fn check_function(cx: &LateContext<'_>, expr: &Expr<'_>, callee: &Expr<'_>) { if let ExprKind::Path(ref qpath) = callee.kind && let Some(item_def_id) = cx.qpath_res(qpath, callee.hir_id).opt_def_id() - && let Some(trait_def_id) = cx.tcx.trait_of_item(item_def_id) + && let Some(trait_def_id) = cx.tcx.trait_of_assoc(item_def_id) { let qpath_spans = match qpath { QPath::Resolved(_, path) => { diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_sort_by.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_sort_by.rs index dbff08bc51c..1de9f6ab497 100644 --- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_sort_by.rs +++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_sort_by.rs @@ -114,7 +114,7 @@ fn mirrored_exprs(a_expr: &Expr<'_>, a_ident: &Ident, b_expr: &Expr<'_>, b_ident fn detect_lint(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, arg: &Expr<'_>) -> Option<LintTrigger> { if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(method_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(method_id) && cx.tcx.type_of(impl_id).instantiate_identity().is_slice() && let ExprKind::Closure(&Closure { body, .. }) = arg.kind && let closure_body = cx.tcx.hir_body(body) diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs index 769526d131b..54f45263275 100644 --- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs +++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs @@ -694,7 +694,7 @@ fn check_if_applicable_to_argument<'tcx>(cx: &LateContext<'tcx>, arg: &Expr<'tcx sym::to_string => cx.tcx.is_diagnostic_item(sym::to_string_method, method_def_id), sym::to_vec => cx .tcx - .impl_of_method(method_def_id) + .impl_of_assoc(method_def_id) .filter(|&impl_did| cx.tcx.type_of(impl_did).instantiate_identity().is_slice()) .is_some(), _ => false, @@ -734,7 +734,7 @@ fn check_if_applicable_to_argument<'tcx>(cx: &LateContext<'tcx>, arg: &Expr<'tcx fn check_borrow_predicate<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'tcx>) { if let ExprKind::MethodCall(_, caller, &[arg], _) = expr.kind && let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) - && cx.tcx.trait_of_item(method_def_id).is_none() + && cx.tcx.trait_of_assoc(method_def_id).is_none() && let Some(borrow_id) = cx.tcx.get_diagnostic_item(sym::Borrow) && cx.tcx.predicates_of(method_def_id).predicates.iter().any(|(pred, _)| { if let ClauseKind::Trait(trait_pred) = pred.kind().skip_binder() diff --git a/src/tools/clippy/clippy_lints/src/methods/useless_asref.rs b/src/tools/clippy/clippy_lints/src/methods/useless_asref.rs index d30c12e0c48..38fad239f67 100644 --- a/src/tools/clippy/clippy_lints/src/methods/useless_asref.rs +++ b/src/tools/clippy/clippy_lints/src/methods/useless_asref.rs @@ -79,7 +79,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, call_name: Symbo applicability, ); } - } else if let Some(impl_id) = cx.tcx.impl_of_method(def_id) + } else if let Some(impl_id) = cx.tcx.impl_of_assoc(def_id) && let Some(adt) = cx.tcx.type_of(impl_id).instantiate_identity().ty_adt_def() && matches!(cx.tcx.get_diagnostic_name(adt.did()), Some(sym::Option | sym::Result)) { @@ -131,7 +131,7 @@ fn is_calling_clone(cx: &LateContext<'_>, arg: &hir::Expr<'_>) -> bool { hir::ExprKind::MethodCall(method, obj, [], _) => { if method.ident.name == sym::clone && let Some(fn_id) = cx.typeck_results().type_dependent_def_id(closure_expr.hir_id) - && let Some(trait_id) = cx.tcx.trait_of_item(fn_id) + && let Some(trait_id) = cx.tcx.trait_of_assoc(fn_id) // We check it's the `Clone` trait. && cx.tcx.lang_items().clone_trait().is_some_and(|id| id == trait_id) // no autoderefs diff --git a/src/tools/clippy/clippy_lints/src/methods/vec_resize_to_zero.rs b/src/tools/clippy/clippy_lints/src/methods/vec_resize_to_zero.rs index 5ea4ada128a..bfb481f4fc0 100644 --- a/src/tools/clippy/clippy_lints/src/methods/vec_resize_to_zero.rs +++ b/src/tools/clippy/clippy_lints/src/methods/vec_resize_to_zero.rs @@ -18,7 +18,7 @@ pub(super) fn check<'tcx>( name_span: Span, ) { if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) - && let Some(impl_id) = cx.tcx.impl_of_method(method_id) + && let Some(impl_id) = cx.tcx.impl_of_assoc(method_id) && is_type_diagnostic_item(cx, cx.tcx.type_of(impl_id).instantiate_identity(), sym::Vec) && let ExprKind::Lit(Spanned { node: LitKind::Int(Pu128(0), _), diff --git a/src/tools/clippy/clippy_lints/src/missing_fields_in_debug.rs b/src/tools/clippy/clippy_lints/src/missing_fields_in_debug.rs index 760ecf07589..18e2b384a46 100644 --- a/src/tools/clippy/clippy_lints/src/missing_fields_in_debug.rs +++ b/src/tools/clippy/clippy_lints/src/missing_fields_in_debug.rs @@ -7,9 +7,7 @@ use clippy_utils::{is_path_lang_item, sym}; use rustc_ast::LitKind; use rustc_data_structures::fx::FxHashSet; use rustc_hir::def::{DefKind, Res}; -use rustc_hir::{ - Block, Expr, ExprKind, Impl, Item, ItemKind, LangItem, Node, QPath, TyKind, VariantData, -}; +use rustc_hir::{Block, Expr, ExprKind, Impl, Item, ItemKind, LangItem, Node, QPath, TyKind, VariantData}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::{Ty, TypeckResults}; use rustc_session::declare_lint_pass; diff --git a/src/tools/clippy/clippy_lints/src/missing_inline.rs b/src/tools/clippy/clippy_lints/src/missing_inline.rs index c4a3d10299b..5a5025973b5 100644 --- a/src/tools/clippy/clippy_lints/src/missing_inline.rs +++ b/src/tools/clippy/clippy_lints/src/missing_inline.rs @@ -1,7 +1,7 @@ use clippy_utils::diagnostics::span_lint; use rustc_attr_data_structures::{AttributeKind, find_attr}; -use rustc_hir as hir; -use rustc_hir::Attribute; +use rustc_hir::def_id::DefId; +use rustc_hir::{self as hir, Attribute}; use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_middle::ty::AssocItemContainer; use rustc_session::declare_lint_pass; @@ -97,11 +97,23 @@ impl<'tcx> LateLintPass<'tcx> for MissingInline { } match it.kind { hir::ItemKind::Fn { .. } => { + if fn_is_externally_exported(cx, it.owner_id.to_def_id()) { + return; + } + let desc = "a function"; let attrs = cx.tcx.hir_attrs(it.hir_id()); check_missing_inline_attrs(cx, attrs, it.span, desc); }, - hir::ItemKind::Trait(ref _constness, ref _is_auto, ref _unsafe, _ident, _generics, _bounds, trait_items) => { + hir::ItemKind::Trait( + ref _constness, + ref _is_auto, + ref _unsafe, + _ident, + _generics, + _bounds, + trait_items, + ) => { // note: we need to check if the trait is exported so we can't use // `LateLintPass::check_trait_item` here. for &tit in trait_items { @@ -173,3 +185,10 @@ impl<'tcx> LateLintPass<'tcx> for MissingInline { check_missing_inline_attrs(cx, attrs, impl_item.span, desc); } } + +/// Checks if this function is externally exported, where #[inline] wouldn't have the desired effect +/// and a rustc warning would be triggered, see #15301 +fn fn_is_externally_exported(cx: &LateContext<'_>, def_id: DefId) -> bool { + let attrs = cx.tcx.codegen_fn_attrs(def_id); + attrs.contains_extern_indicator() +} diff --git a/src/tools/clippy/clippy_lints/src/missing_trait_methods.rs b/src/tools/clippy/clippy_lints/src/missing_trait_methods.rs index fa61d0fa11a..399bf4e1806 100644 --- a/src/tools/clippy/clippy_lints/src/missing_trait_methods.rs +++ b/src/tools/clippy/clippy_lints/src/missing_trait_methods.rs @@ -66,7 +66,9 @@ impl<'tcx> LateLintPass<'tcx> for MissingTraitMethods { }) = item.kind && let Some(trait_id) = trait_ref.trait_def_id() { - let trait_item_ids: DefIdSet = cx.tcx.associated_items(item.owner_id) + let trait_item_ids: DefIdSet = cx + .tcx + .associated_items(item.owner_id) .in_definition_order() .filter_map(|assoc_item| assoc_item.trait_item_def_id) .collect(); diff --git a/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs b/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs index d9f4fb271fb..a489c0a4a5a 100644 --- a/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs +++ b/src/tools/clippy/clippy_lints/src/mixed_read_write_in_expression.rs @@ -171,14 +171,11 @@ impl<'tcx> Visitor<'tcx> for DivergenceVisitor<'_, 'tcx> { ExprKind::Continue(_) | ExprKind::Break(_, _) | ExprKind::Ret(_) => self.report_diverging_sub_expr(e), ExprKind::Call(func, _) => { let typ = self.cx.typeck_results().expr_ty(func); - match typ.kind() { - ty::FnDef(..) | ty::FnPtr(..) => { - let sig = typ.fn_sig(self.cx.tcx); - if self.cx.tcx.instantiate_bound_regions_with_erased(sig).output().kind() == &ty::Never { - self.report_diverging_sub_expr(e); - } - }, - _ => {}, + if typ.is_fn() { + let sig = typ.fn_sig(self.cx.tcx); + if self.cx.tcx.instantiate_bound_regions_with_erased(sig).output().kind() == &ty::Never { + self.report_diverging_sub_expr(e); + } } }, ExprKind::MethodCall(..) => { diff --git a/src/tools/clippy/clippy_lints/src/mut_reference.rs b/src/tools/clippy/clippy_lints/src/mut_reference.rs index 2f1ab3d2652..31f51b45754 100644 --- a/src/tools/clippy/clippy_lints/src/mut_reference.rs +++ b/src/tools/clippy/clippy_lints/src/mut_reference.rs @@ -79,7 +79,7 @@ fn check_arguments<'tcx>( name: &str, fn_kind: &str, ) { - if let ty::FnDef(..) | ty::FnPtr(..) = type_definition.kind() { + if type_definition.is_fn() { let parameters = type_definition.fn_sig(cx.tcx).skip_binder().inputs(); for (argument, parameter) in iter::zip(arguments, parameters) { if let ty::Ref(_, _, Mutability::Not) | ty::RawPtr(_, Mutability::Not) = parameter.kind() diff --git a/src/tools/clippy/clippy_lints/src/needless_for_each.rs b/src/tools/clippy/clippy_lints/src/needless_for_each.rs index 6a7c8436bad..a67545e419c 100644 --- a/src/tools/clippy/clippy_lints/src/needless_for_each.rs +++ b/src/tools/clippy/clippy_lints/src/needless_for_each.rs @@ -6,7 +6,7 @@ use rustc_session::declare_lint_pass; use rustc_span::Span; use clippy_utils::diagnostics::span_lint_and_then; -use clippy_utils::source::snippet_with_applicability; +use clippy_utils::source::{snippet_with_applicability, snippet_with_context}; use clippy_utils::ty::has_iter_method; use clippy_utils::{is_trait_method, sym}; @@ -101,18 +101,23 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessForEach { let body_param_sugg = snippet_with_applicability(cx, body.params[0].pat.span, "..", &mut applicability); let for_each_rev_sugg = snippet_with_applicability(cx, for_each_recv.span, "..", &mut applicability); - let body_value_sugg = snippet_with_applicability(cx, body.value.span, "..", &mut applicability); + let (body_value_sugg, is_macro_call) = + snippet_with_context(cx, body.value.span, for_each_recv.span.ctxt(), "..", &mut applicability); let sugg = format!( "for {} in {} {}", body_param_sugg, for_each_rev_sugg, - match body.value.kind { - ExprKind::Block(block, _) if is_let_desugar(block) => { - format!("{{ {body_value_sugg} }}") - }, - ExprKind::Block(_, _) => body_value_sugg.to_string(), - _ => format!("{{ {body_value_sugg}; }}"), + if is_macro_call { + format!("{{ {body_value_sugg}; }}") + } else { + match body.value.kind { + ExprKind::Block(block, _) if is_let_desugar(block) => { + format!("{{ {body_value_sugg} }}") + }, + ExprKind::Block(_, _) => body_value_sugg.to_string(), + _ => format!("{{ {body_value_sugg}; }}"), + } } ); diff --git a/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs b/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs index c97ecce75b4..7b057998063 100644 --- a/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs +++ b/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs @@ -246,8 +246,10 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByValue { for (span, suggestion) in clone_spans { diag.span_suggestion( span, - span.get_source_text(cx) - .map_or("change the call to".to_owned(), |src| format!("change `{src}` to")), + span.get_source_text(cx).map_or_else( + || "change the call to".to_owned(), + |src| format!("change `{src}` to"), + ), suggestion, Applicability::Unspecified, ); @@ -275,8 +277,10 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByValue { for (span, suggestion) in clone_spans { diag.span_suggestion( span, - span.get_source_text(cx) - .map_or("change the call to".to_owned(), |src| format!("change `{src}` to")), + span.get_source_text(cx).map_or_else( + || "change the call to".to_owned(), + |src| format!("change `{src}` to"), + ), suggestion, Applicability::Unspecified, ); @@ -308,9 +312,7 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByValue { /// Functions marked with these attributes must have the exact signature. pub(crate) fn requires_exact_signature(attrs: &[Attribute]) -> bool { attrs.iter().any(|attr| { - [sym::proc_macro, sym::proc_macro_attribute, sym::proc_macro_derive] - .iter() - .any(|&allow| attr.has_name(allow)) + attr.is_proc_macro_attr() }) } diff --git a/src/tools/clippy/clippy_lints/src/new_without_default.rs b/src/tools/clippy/clippy_lints/src/new_without_default.rs index 3b86f1d1f59..b598a390005 100644 --- a/src/tools/clippy/clippy_lints/src/new_without_default.rs +++ b/src/tools/clippy/clippy_lints/src/new_without_default.rs @@ -65,11 +65,16 @@ impl<'tcx> LateLintPass<'tcx> for NewWithoutDefault { .. }) = item.kind { - for assoc_item in cx.tcx.associated_items(item.owner_id.def_id) + for assoc_item in cx + .tcx + .associated_items(item.owner_id.def_id) .filter_by_name_unhygienic(sym::new) { if let AssocKind::Fn { has_self: false, .. } = assoc_item.kind { - let impl_item = cx.tcx.hir_node_by_def_id(assoc_item.def_id.expect_local()).expect_impl_item(); + let impl_item = cx + .tcx + .hir_node_by_def_id(assoc_item.def_id.expect_local()) + .expect_impl_item(); if impl_item.span.in_external_macro(cx.sess().source_map()) { return; } diff --git a/src/tools/clippy/clippy_lints/src/non_copy_const.rs b/src/tools/clippy/clippy_lints/src/non_copy_const.rs index 5f10e1968f1..388c029c9ef 100644 --- a/src/tools/clippy/clippy_lints/src/non_copy_const.rs +++ b/src/tools/clippy/clippy_lints/src/non_copy_const.rs @@ -338,7 +338,7 @@ impl<'tcx> NonCopyConst<'tcx> { tcx: TyCtxt<'tcx>, typing_env: TypingEnv<'tcx>, ty: Ty<'tcx>, - val: ConstValue<'tcx>, + val: ConstValue, ) -> Result<bool, ()> { let ty = tcx.try_normalize_erasing_regions(typing_env, ty).unwrap_or(ty); match self.is_ty_freeze(tcx, typing_env, ty) { @@ -477,7 +477,7 @@ impl<'tcx> NonCopyConst<'tcx> { typing_env: TypingEnv<'tcx>, typeck: &'tcx TypeckResults<'tcx>, mut src_expr: &'tcx Expr<'tcx>, - mut val: ConstValue<'tcx>, + mut val: ConstValue, ) -> Result<Option<BorrowSource<'tcx>>, ()> { let mut parents = tcx.hir_parent_iter(src_expr.hir_id); let mut ty = typeck.expr_ty(src_expr); diff --git a/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs b/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs index a78a342d4fe..466beb04b07 100644 --- a/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs +++ b/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs @@ -3,12 +3,11 @@ use clippy_config::Conf; use clippy_utils::consts::{ConstEvalCtxt, Constant}; use clippy_utils::diagnostics::span_lint; use clippy_utils::ty::is_type_diagnostic_item; -use clippy_utils::{expr_or_init, is_from_proc_macro, is_lint_allowed, peel_hir_expr_refs, peel_hir_expr_unary}; +use clippy_utils::{expr_or_init, is_from_proc_macro, is_lint_allowed, peel_hir_expr_refs, peel_hir_expr_unary, sym}; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::{self, Ty}; use rustc_session::impl_lint_pass; -use rustc_span::symbol::sym; use rustc_span::{Span, Symbol}; use {rustc_ast as ast, rustc_hir as hir}; @@ -89,6 +88,18 @@ impl ArithmeticSideEffects { self.allowed_unary.contains(ty_string_elem) } + fn is_non_zero_u(cx: &LateContext<'_>, ty: Ty<'_>) -> bool { + if let ty::Adt(adt, substs) = ty.kind() + && cx.tcx.is_diagnostic_item(sym::NonZero, adt.did()) + && let int_type = substs.type_at(0) + && matches!(int_type.kind(), ty::Uint(_)) + { + true + } else { + false + } + } + /// Verifies built-in types that have specific allowed operations fn has_specific_allowed_type_and_operation<'tcx>( cx: &LateContext<'tcx>, @@ -97,33 +108,12 @@ impl ArithmeticSideEffects { rhs_ty: Ty<'tcx>, ) -> bool { let is_div_or_rem = matches!(op, hir::BinOpKind::Div | hir::BinOpKind::Rem); - let is_non_zero_u = |cx: &LateContext<'tcx>, ty: Ty<'tcx>| { - let tcx = cx.tcx; - - let ty::Adt(adt, substs) = ty.kind() else { return false }; - - if !tcx.is_diagnostic_item(sym::NonZero, adt.did()) { - return false; - } - - let int_type = substs.type_at(0); - let unsigned_int_types = [ - tcx.types.u8, - tcx.types.u16, - tcx.types.u32, - tcx.types.u64, - tcx.types.u128, - tcx.types.usize, - ]; - - unsigned_int_types.contains(&int_type) - }; let is_sat_or_wrap = |ty: Ty<'_>| { is_type_diagnostic_item(cx, ty, sym::Saturating) || is_type_diagnostic_item(cx, ty, sym::Wrapping) }; // If the RHS is `NonZero<u*>`, then division or module by zero will never occur. - if is_non_zero_u(cx, rhs_ty) && is_div_or_rem { + if Self::is_non_zero_u(cx, rhs_ty) && is_div_or_rem { return true; } @@ -219,6 +209,18 @@ impl ArithmeticSideEffects { let (mut actual_rhs, rhs_ref_counter) = peel_hir_expr_refs(rhs); actual_lhs = expr_or_init(cx, actual_lhs); actual_rhs = expr_or_init(cx, actual_rhs); + + // `NonZeroU*.get() - 1`, will never overflow + if let hir::BinOpKind::Sub = op + && let hir::ExprKind::MethodCall(method, receiver, [], _) = actual_lhs.kind + && method.ident.name == sym::get + && let receiver_ty = cx.typeck_results().expr_ty(receiver).peel_refs() + && Self::is_non_zero_u(cx, receiver_ty) + && let Some(1) = Self::literal_integer(cx, actual_rhs) + { + return; + } + let lhs_ty = cx.typeck_results().expr_ty(actual_lhs).peel_refs(); let rhs_ty = cx.typeck_results().expr_ty_adjusted(actual_rhs).peel_refs(); if self.has_allowed_binary(lhs_ty, rhs_ty) { @@ -227,6 +229,7 @@ impl ArithmeticSideEffects { if Self::has_specific_allowed_type_and_operation(cx, lhs_ty, op, rhs_ty) { return; } + let has_valid_op = if Self::is_integral(lhs_ty) && Self::is_integral(rhs_ty) { if let hir::BinOpKind::Shl | hir::BinOpKind::Shr = op { // At least for integers, shifts are already handled by the CTFE diff --git a/src/tools/clippy/clippy_lints/src/operators/cmp_owned.rs b/src/tools/clippy/clippy_lints/src/operators/cmp_owned.rs index 9b2cfd91b85..22ec4fe60fb 100644 --- a/src/tools/clippy/clippy_lints/src/operators/cmp_owned.rs +++ b/src/tools/clippy/clippy_lints/src/operators/cmp_owned.rs @@ -41,7 +41,7 @@ fn check_op(cx: &LateContext<'_>, expr: &Expr<'_>, other: &Expr<'_>, left: bool) ExprKind::MethodCall(_, arg, [], _) if typeck .type_dependent_def_id(expr.hir_id) - .and_then(|id| cx.tcx.trait_of_item(id)) + .and_then(|id| cx.tcx.trait_of_assoc(id)) .is_some_and(|id| matches!(cx.tcx.get_diagnostic_name(id), Some(sym::ToString | sym::ToOwned))) => { (arg, arg.span) diff --git a/src/tools/clippy/clippy_lints/src/operators/manual_is_multiple_of.rs b/src/tools/clippy/clippy_lints/src/operators/manual_is_multiple_of.rs index 821178a4315..55bb78cfce5 100644 --- a/src/tools/clippy/clippy_lints/src/operators/manual_is_multiple_of.rs +++ b/src/tools/clippy/clippy_lints/src/operators/manual_is_multiple_of.rs @@ -2,11 +2,12 @@ use clippy_utils::consts::is_zero_integer_const; use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::msrvs::{self, Msrv}; use clippy_utils::sugg::Sugg; +use clippy_utils::ty::expr_type_is_certain; use rustc_ast::BinOpKind; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind}; use rustc_lint::LateContext; -use rustc_middle::ty; +use rustc_middle::ty::{self, Ty}; use super::MANUAL_IS_MULTIPLE_OF; @@ -22,9 +23,21 @@ pub(super) fn check<'tcx>( && let Some(operand) = uint_compare_to_zero(cx, op, lhs, rhs) && let ExprKind::Binary(operand_op, operand_left, operand_right) = operand.kind && operand_op.node == BinOpKind::Rem + && matches!( + cx.typeck_results().expr_ty_adjusted(operand_left).peel_refs().kind(), + ty::Uint(_) + ) + && matches!( + cx.typeck_results().expr_ty_adjusted(operand_right).peel_refs().kind(), + ty::Uint(_) + ) + && expr_type_is_certain(cx, operand_left) { let mut app = Applicability::MachineApplicable; - let divisor = Sugg::hir_with_applicability(cx, operand_right, "_", &mut app); + let divisor = deref_sugg( + Sugg::hir_with_applicability(cx, operand_right, "_", &mut app), + cx.typeck_results().expr_ty_adjusted(operand_right), + ); span_lint_and_sugg( cx, MANUAL_IS_MULTIPLE_OF, @@ -64,3 +77,11 @@ fn uint_compare_to_zero<'tcx>( matches!(cx.typeck_results().expr_ty_adjusted(operand).kind(), ty::Uint(_)).then_some(operand) } + +fn deref_sugg<'a>(sugg: Sugg<'a>, ty: Ty<'_>) -> Sugg<'a> { + if let ty::Ref(_, target_ty, _) = ty.kind() { + deref_sugg(sugg.deref(), *target_ty) + } else { + sugg + } +} diff --git a/src/tools/clippy/clippy_lints/src/operators/op_ref.rs b/src/tools/clippy/clippy_lints/src/operators/op_ref.rs index 21e1ab0f4f2..0a1f2625f4c 100644 --- a/src/tools/clippy/clippy_lints/src/operators/op_ref.rs +++ b/src/tools/clippy/clippy_lints/src/operators/op_ref.rs @@ -179,7 +179,7 @@ fn in_impl<'tcx>( bin_op: DefId, ) -> Option<(&'tcx rustc_hir::Ty<'tcx>, &'tcx rustc_hir::Ty<'tcx>)> { if let Some(block) = get_enclosing_block(cx, e.hir_id) - && let Some(impl_def_id) = cx.tcx.impl_of_method(block.hir_id.owner.to_def_id()) + && let Some(impl_def_id) = cx.tcx.impl_of_assoc(block.hir_id.owner.to_def_id()) && let item = cx.tcx.hir_expect_item(impl_def_id.expect_local()) && let ItemKind::Impl(item) = &item.kind && let Some(of_trait) = &item.of_trait diff --git a/src/tools/clippy/clippy_lints/src/pattern_type_mismatch.rs b/src/tools/clippy/clippy_lints/src/pattern_type_mismatch.rs index 19d9acfc930..4197680dd04 100644 --- a/src/tools/clippy/clippy_lints/src/pattern_type_mismatch.rs +++ b/src/tools/clippy/clippy_lints/src/pattern_type_mismatch.rs @@ -96,6 +96,12 @@ impl<'tcx> LateLintPass<'tcx> for PatternTypeMismatch { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { if let ExprKind::Match(_, arms, _) = expr.kind { + // if the match is generated by an external macro, the writer does not control + // how the scrutinee (`match &scrutiny { ... }`) is matched + if expr.span.in_external_macro(cx.sess().source_map()) { + return; + } + for arm in arms { let pat = &arm.pat; if apply_lint(cx, pat, DerefPossible::Possible) { diff --git a/src/tools/clippy/clippy_lints/src/ptr.rs b/src/tools/clippy/clippy_lints/src/ptr.rs index 94cdcf00054..b3058c51afd 100644 --- a/src/tools/clippy/clippy_lints/src/ptr.rs +++ b/src/tools/clippy/clippy_lints/src/ptr.rs @@ -584,7 +584,13 @@ fn check_ptr_arg_usage<'tcx>(cx: &LateContext<'tcx>, body: &Body<'tcx>, args: &[ Some((Node::Stmt(_), _)) => (), Some((Node::LetStmt(l), _)) => { // Only trace simple bindings. e.g `let x = y;` - if let PatKind::Binding(BindingMode::NONE, id, _, None) = l.pat.kind { + if let PatKind::Binding(BindingMode::NONE, id, ident, None) = l.pat.kind + // Let's not lint for the current parameter. The user may still intend to mutate + // (or, if not mutate, then perhaps call a method that's not otherwise available + // for) the referenced value behind the parameter through this local let binding + // with the underscore being only temporary. + && !ident.name.as_str().starts_with('_') + { self.bindings.insert(id, args_idx); } else { set_skip_flag(); @@ -650,7 +656,14 @@ fn check_ptr_arg_usage<'tcx>(cx: &LateContext<'tcx>, body: &Body<'tcx>, args: &[ .filter_map(|(i, arg)| { let param = &body.params[arg.idx]; match param.pat.kind { - PatKind::Binding(BindingMode::NONE, id, _, None) if !is_lint_allowed(cx, PTR_ARG, param.hir_id) => { + PatKind::Binding(BindingMode::NONE, id, ident, None) + if !is_lint_allowed(cx, PTR_ARG, param.hir_id) + // Let's not lint for the current parameter. The user may still intend to mutate + // (or, if not mutate, then perhaps call a method that's not otherwise available + // for) the referenced value behind the parameter with the underscore being only + // temporary. + && !ident.name.as_str().starts_with('_') => + { Some((id, i)) }, _ => { diff --git a/src/tools/clippy/clippy_lints/src/ranges.rs b/src/tools/clippy/clippy_lints/src/ranges.rs index d292ed86ea4..03d00ba849f 100644 --- a/src/tools/clippy/clippy_lints/src/ranges.rs +++ b/src/tools/clippy/clippy_lints/src/ranges.rs @@ -4,15 +4,20 @@ use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg, span_lint_and_the use clippy_utils::msrvs::{self, Msrv}; use clippy_utils::source::{SpanRangeExt, snippet, snippet_with_applicability}; use clippy_utils::sugg::Sugg; -use clippy_utils::{get_parent_expr, higher, is_in_const_context, is_integer_const, path_to_local}; +use clippy_utils::ty::implements_trait; +use clippy_utils::{ + expr_use_ctxt, fn_def_id, get_parent_expr, higher, is_in_const_context, is_integer_const, is_path_lang_item, + path_to_local, +}; +use rustc_ast::Mutability; use rustc_ast::ast::RangeLimits; use rustc_errors::Applicability; -use rustc_hir::{BinOpKind, Expr, ExprKind, HirId}; -use rustc_lint::{LateContext, LateLintPass}; -use rustc_middle::ty; +use rustc_hir::{BinOpKind, Expr, ExprKind, HirId, LangItem, Node}; +use rustc_lint::{LateContext, LateLintPass, Lint}; +use rustc_middle::ty::{self, ClauseKind, GenericArgKind, PredicatePolarity, Ty}; use rustc_session::impl_lint_pass; -use rustc_span::Span; use rustc_span::source_map::Spanned; +use rustc_span::{Span, sym}; use std::cmp::Ordering; declare_clippy_lint! { @@ -24,6 +29,12 @@ declare_clippy_lint! { /// The code is more readable with an inclusive range /// like `x..=y`. /// + /// ### Limitations + /// The lint is conservative and will trigger only when switching + /// from an exclusive to an inclusive range is provably safe from + /// a typing point of view. This corresponds to situations where + /// the range is used as an iterator, or for indexing. + /// /// ### Known problems /// Will add unnecessary pair of parentheses when the /// expression is not wrapped in a pair but starts with an opening parenthesis @@ -34,11 +45,6 @@ declare_clippy_lint! { /// exclusive ranges, because they essentially add an extra branch that /// LLVM may fail to hoist out of the loop. /// - /// This will cause a warning that cannot be fixed if the consumer of the - /// range only accepts a specific range type, instead of the generic - /// `RangeBounds` trait - /// ([#3307](https://github.com/rust-lang/rust-clippy/issues/3307)). - /// /// ### Example /// ```no_run /// # let x = 0; @@ -71,11 +77,11 @@ declare_clippy_lint! { /// The code is more readable with an exclusive range /// like `x..y`. /// - /// ### Known problems - /// This will cause a warning that cannot be fixed if - /// the consumer of the range only accepts a specific range type, instead of - /// the generic `RangeBounds` trait - /// ([#3307](https://github.com/rust-lang/rust-clippy/issues/3307)). + /// ### Limitations + /// The lint is conservative and will trigger only when switching + /// from an inclusive to an exclusive range is provably safe from + /// a typing point of view. This corresponds to situations where + /// the range is used as an iterator, or for indexing. /// /// ### Example /// ```no_run @@ -344,70 +350,188 @@ fn check_range_bounds<'a, 'tcx>(cx: &'a LateContext<'tcx>, ex: &'a Expr<'_>) -> None } -// exclusive range plus one: `x..(y+1)` -fn check_exclusive_range_plus_one(cx: &LateContext<'_>, expr: &Expr<'_>) { - if expr.span.can_be_used_for_suggestions() - && let Some(higher::Range { - start, - end: Some(end), - limits: RangeLimits::HalfOpen, - }) = higher::Range::hir(expr) - && let Some(y) = y_plus_one(cx, end) +/// Check whether `expr` could switch range types without breaking the typing requirements. This is +/// generally the case when `expr` is used as an iterator for example, or as a slice or `&str` +/// index. +/// +/// FIXME: Note that the current implementation may still return false positives. A proper fix would +/// check that the obligations are still satisfied after switching the range type. +fn can_switch_ranges<'tcx>( + cx: &LateContext<'tcx>, + expr: &'tcx Expr<'_>, + original: RangeLimits, + inner_ty: Ty<'tcx>, +) -> bool { + let use_ctxt = expr_use_ctxt(cx, expr); + let (Node::Expr(parent_expr), false) = (use_ctxt.node, use_ctxt.is_ty_unified) else { + return false; + }; + + // Check if `expr` is the argument of a compiler-generated `IntoIter::into_iter(expr)` + if let ExprKind::Call(func, [arg]) = parent_expr.kind + && arg.hir_id == use_ctxt.child_id + && is_path_lang_item(cx, func, LangItem::IntoIterIntoIter) { - let span = expr.span; - span_lint_and_then( - cx, - RANGE_PLUS_ONE, - span, - "an inclusive range would be more readable", - |diag| { - let start = start.map_or(String::new(), |x| Sugg::hir(cx, x, "x").maybe_paren().to_string()); - let end = Sugg::hir(cx, y, "y").maybe_paren(); - match span.with_source_text(cx, |src| src.starts_with('(') && src.ends_with(')')) { - Some(true) => { - diag.span_suggestion(span, "use", format!("({start}..={end})"), Applicability::MaybeIncorrect); - }, - Some(false) => { - diag.span_suggestion( - span, - "use", - format!("{start}..={end}"), - Applicability::MachineApplicable, // snippet - ); - }, - None => {}, - } - }, - ); + return true; + } + + // Check if `expr` is used as the receiver of a method of the `Iterator`, `IntoIterator`, + // or `RangeBounds` traits. + if let ExprKind::MethodCall(_, receiver, _, _) = parent_expr.kind + && receiver.hir_id == use_ctxt.child_id + && let Some(method_did) = cx.typeck_results().type_dependent_def_id(parent_expr.hir_id) + && let Some(trait_did) = cx.tcx.trait_of_assoc(method_did) + && matches!( + cx.tcx.get_diagnostic_name(trait_did), + Some(sym::Iterator | sym::IntoIterator | sym::RangeBounds) + ) + { + return true; + } + + // Check if `expr` is an argument of a call which requires an `Iterator`, `IntoIterator`, + // or `RangeBounds` trait. + if let ExprKind::Call(_, args) | ExprKind::MethodCall(_, _, args, _) = parent_expr.kind + && let Some(id) = fn_def_id(cx, parent_expr) + && let Some(arg_idx) = args.iter().position(|e| e.hir_id == use_ctxt.child_id) + { + let input_idx = if matches!(parent_expr.kind, ExprKind::MethodCall(..)) { + arg_idx + 1 + } else { + arg_idx + }; + let inputs = cx + .tcx + .liberate_late_bound_regions(id, cx.tcx.fn_sig(id).instantiate_identity()) + .inputs(); + let expr_ty = inputs[input_idx]; + // Check that the `expr` type is present only once, otherwise modifying just one of them might be + // risky if they are referenced using the same generic type for example. + if inputs.iter().enumerate().all(|(n, ty)| + n == input_idx + || !ty.walk().any(|arg| matches!(arg.kind(), + GenericArgKind::Type(ty) if ty == expr_ty))) + // Look for a clause requiring `Iterator`, `IntoIterator`, or `RangeBounds`, and resolving to `expr_type`. + && cx + .tcx + .param_env(id) + .caller_bounds() + .into_iter() + .any(|p| { + if let ClauseKind::Trait(t) = p.kind().skip_binder() + && t.polarity == PredicatePolarity::Positive + && matches!( + cx.tcx.get_diagnostic_name(t.trait_ref.def_id), + Some(sym::Iterator | sym::IntoIterator | sym::RangeBounds) + ) + { + t.self_ty() == expr_ty + } else { + false + } + }) + { + return true; + } + } + + // Check if `expr` is used for indexing, and if the switched range type could be used + // as well. + if let ExprKind::Index(outer_expr, index, _) = parent_expr.kind + && index.hir_id == expr.hir_id + // Build the switched range type (for example `RangeInclusive<usize>`). + && let Some(switched_range_def_id) = match original { + RangeLimits::HalfOpen => cx.tcx.lang_items().range_inclusive_struct(), + RangeLimits::Closed => cx.tcx.lang_items().range_struct(), + } + && let switched_range_ty = cx + .tcx + .type_of(switched_range_def_id) + .instantiate(cx.tcx, &[inner_ty.into()]) + // Check that the switched range type can be used for indexing the original expression + // through the `Index` or `IndexMut` trait. + && let ty::Ref(_, outer_ty, mutability) = cx.typeck_results().expr_ty_adjusted(outer_expr).kind() + && let Some(index_def_id) = match mutability { + Mutability::Not => cx.tcx.lang_items().index_trait(), + Mutability::Mut => cx.tcx.lang_items().index_mut_trait(), + } + && implements_trait(cx, *outer_ty, index_def_id, &[switched_range_ty.into()]) + // We could also check that the associated item of the `index_def_id` trait with the switched range type + // return the same type, but it is reasonable to expect so. We can't check that the result is identical + // in both `Index<Range<…>>` and `Index<RangeInclusive<…>>` anyway. + { + return true; } + + false +} + +// exclusive range plus one: `x..(y+1)` +fn check_exclusive_range_plus_one<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { + check_range_switch( + cx, + expr, + RangeLimits::HalfOpen, + y_plus_one, + RANGE_PLUS_ONE, + "an inclusive range would be more readable", + "..=", + ); } // inclusive range minus one: `x..=(y-1)` -fn check_inclusive_range_minus_one(cx: &LateContext<'_>, expr: &Expr<'_>) { +fn check_inclusive_range_minus_one<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { + check_range_switch( + cx, + expr, + RangeLimits::Closed, + y_minus_one, + RANGE_MINUS_ONE, + "an exclusive range would be more readable", + "..", + ); +} + +/// Check for a `kind` of range in `expr`, check for `predicate` on the end, +/// and emit the `lint` with `msg` and the `operator`. +fn check_range_switch<'tcx>( + cx: &LateContext<'tcx>, + expr: &'tcx Expr<'_>, + kind: RangeLimits, + predicate: impl for<'hir> FnOnce(&LateContext<'_>, &Expr<'hir>) -> Option<&'hir Expr<'hir>>, + lint: &'static Lint, + msg: &'static str, + operator: &str, +) { if expr.span.can_be_used_for_suggestions() && let Some(higher::Range { start, end: Some(end), - limits: RangeLimits::Closed, + limits, }) = higher::Range::hir(expr) - && let Some(y) = y_minus_one(cx, end) + && limits == kind + && let Some(y) = predicate(cx, end) + && can_switch_ranges(cx, expr, kind, cx.typeck_results().expr_ty(y)) { - span_lint_and_then( - cx, - RANGE_MINUS_ONE, - expr.span, - "an exclusive range would be more readable", - |diag| { - let start = start.map_or(String::new(), |x| Sugg::hir(cx, x, "x").maybe_paren().to_string()); - let end = Sugg::hir(cx, y, "y").maybe_paren(); - diag.span_suggestion( - expr.span, - "use", - format!("{start}..{end}"), - Applicability::MachineApplicable, // snippet - ); - }, - ); + let span = expr.span; + span_lint_and_then(cx, lint, span, msg, |diag| { + let mut app = Applicability::MachineApplicable; + let start = start.map_or(String::new(), |x| { + Sugg::hir_with_applicability(cx, x, "<x>", &mut app) + .maybe_paren() + .to_string() + }); + let end = Sugg::hir_with_applicability(cx, y, "<y>", &mut app).maybe_paren(); + match span.with_source_text(cx, |src| src.starts_with('(') && src.ends_with(')')) { + Some(true) => { + diag.span_suggestion(span, "use", format!("({start}{operator}{end})"), app); + }, + Some(false) => { + diag.span_suggestion(span, "use", format!("{start}{operator}{end}"), app); + }, + None => {}, + } + }); } } @@ -494,7 +618,7 @@ fn check_reversed_empty_range(cx: &LateContext<'_>, expr: &Expr<'_>) { } } -fn y_plus_one<'t>(cx: &LateContext<'_>, expr: &'t Expr<'_>) -> Option<&'t Expr<'t>> { +fn y_plus_one<'tcx>(cx: &LateContext<'_>, expr: &Expr<'tcx>) -> Option<&'tcx Expr<'tcx>> { match expr.kind { ExprKind::Binary( Spanned { @@ -515,7 +639,7 @@ fn y_plus_one<'t>(cx: &LateContext<'_>, expr: &'t Expr<'_>) -> Option<&'t Expr<' } } -fn y_minus_one<'t>(cx: &LateContext<'_>, expr: &'t Expr<'_>) -> Option<&'t Expr<'t>> { +fn y_minus_one<'tcx>(cx: &LateContext<'_>, expr: &Expr<'tcx>) -> Option<&'tcx Expr<'tcx>> { match expr.kind { ExprKind::Binary( Spanned { diff --git a/src/tools/clippy/clippy_lints/src/return_self_not_must_use.rs b/src/tools/clippy/clippy_lints/src/return_self_not_must_use.rs index 25929b853af..3497216d1c5 100644 --- a/src/tools/clippy/clippy_lints/src/return_self_not_must_use.rs +++ b/src/tools/clippy/clippy_lints/src/return_self_not_must_use.rs @@ -113,7 +113,7 @@ impl<'tcx> LateLintPass<'tcx> for ReturnSelfNotMustUse { ) { if matches!(kind, FnKind::Method(_, _)) // We are only interested in methods, not in functions or associated functions. - && let Some(impl_def) = cx.tcx.impl_of_method(fn_def.to_def_id()) + && let Some(impl_def) = cx.tcx.impl_of_assoc(fn_def.to_def_id()) // We don't want this method to be te implementation of a trait because the // `#[must_use]` should be put on the trait definition directly. && cx.tcx.trait_id_of_impl(impl_def).is_none() diff --git a/src/tools/clippy/clippy_lints/src/same_name_method.rs b/src/tools/clippy/clippy_lints/src/same_name_method.rs index 85fde780e68..67eb71f7d07 100644 --- a/src/tools/clippy/clippy_lints/src/same_name_method.rs +++ b/src/tools/clippy/clippy_lints/src/same_name_method.rs @@ -3,7 +3,7 @@ use rustc_data_structures::fx::FxHashMap; use rustc_hir::def::{DefKind, Res}; use rustc_hir::{HirId, Impl, ItemKind, Node, Path, QPath, TraitRef, TyKind}; use rustc_lint::{LateContext, LateLintPass}; -use rustc_middle::ty::{AssocKind, AssocItem}; +use rustc_middle::ty::{AssocItem, AssocKind}; use rustc_session::declare_lint_pass; use rustc_span::Span; use rustc_span::symbol::Symbol; @@ -53,11 +53,7 @@ impl<'tcx> LateLintPass<'tcx> for SameNameMethod { for id in cx.tcx.hir_free_items() { if matches!(cx.tcx.def_kind(id.owner_id), DefKind::Impl { .. }) && let item = cx.tcx.hir_item(id) - && let ItemKind::Impl(Impl { - of_trait, - self_ty, - .. - }) = &item.kind + && let ItemKind::Impl(Impl { of_trait, self_ty, .. }) = &item.kind && let TyKind::Path(QPath::Resolved(_, Path { res, .. })) = self_ty.kind { if !map.contains_key(res) { @@ -127,7 +123,9 @@ impl<'tcx> LateLintPass<'tcx> for SameNameMethod { }, None => { for assoc_item in cx.tcx.associated_items(id.owner_id).in_definition_order() { - let AssocKind::Fn { name, .. } = assoc_item.kind else { continue }; + let AssocKind::Fn { name, .. } = assoc_item.kind else { + continue; + }; let impl_span = cx.tcx.def_span(assoc_item.def_id); let hir_id = cx.tcx.local_def_id_to_hir_id(assoc_item.def_id.expect_local()); if let Some(trait_spans) = existing_name.trait_methods.get(&name) { @@ -140,10 +138,7 @@ impl<'tcx> LateLintPass<'tcx> for SameNameMethod { |diag| { // TODO should we `span_note` on every trait? // iterate on trait_spans? - diag.span_note( - trait_spans[0], - format!("existing `{name}` defined here"), - ); + diag.span_note(trait_spans[0], format!("existing `{name}` defined here")); }, ); } diff --git a/src/tools/clippy/clippy_lints/src/unconditional_recursion.rs b/src/tools/clippy/clippy_lints/src/unconditional_recursion.rs index d321c48f6af..dcddff557d1 100644 --- a/src/tools/clippy/clippy_lints/src/unconditional_recursion.rs +++ b/src/tools/clippy/clippy_lints/src/unconditional_recursion.rs @@ -206,7 +206,7 @@ fn check_partial_eq(cx: &LateContext<'_>, method_span: Span, method_def_id: Loca let arg_ty = cx.typeck_results().expr_ty_adjusted(arg); if let Some(fn_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) - && let Some(trait_id) = cx.tcx.trait_of_item(fn_id) + && let Some(trait_id) = cx.tcx.trait_of_assoc(fn_id) && trait_id == trait_def_id && matches_ty(receiver_ty, arg_ty, self_arg, other_arg) { @@ -250,7 +250,7 @@ fn check_to_string(cx: &LateContext<'_>, method_span: Span, method_def_id: Local let is_bad = match expr.kind { ExprKind::MethodCall(segment, _receiver, &[_arg], _) if segment.ident.name == name.name => { if let Some(fn_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) - && let Some(trait_id) = cx.tcx.trait_of_item(fn_id) + && let Some(trait_id) = cx.tcx.trait_of_assoc(fn_id) && trait_id == trait_def_id { true @@ -318,7 +318,7 @@ where && let ExprKind::Path(qpath) = f.kind && is_default_method_on_current_ty(self.cx.tcx, qpath, self.implemented_ty_id) && let Some(method_def_id) = path_def_id(self.cx, f) - && let Some(trait_def_id) = self.cx.tcx.trait_of_item(method_def_id) + && let Some(trait_def_id) = self.cx.tcx.trait_of_assoc(method_def_id) && self.cx.tcx.is_diagnostic_item(sym::Default, trait_def_id) { span_error(self.cx, self.method_span, expr); @@ -426,7 +426,7 @@ fn check_from(cx: &LateContext<'_>, method_span: Span, method_def_id: LocalDefId if let Some((fn_def_id, node_args)) = fn_def_id_with_node_args(cx, expr) && let [s1, s2] = **node_args && let (Some(s1), Some(s2)) = (s1.as_type(), s2.as_type()) - && let Some(trait_def_id) = cx.tcx.trait_of_item(fn_def_id) + && let Some(trait_def_id) = cx.tcx.trait_of_assoc(fn_def_id) && cx.tcx.is_diagnostic_item(sym::Into, trait_def_id) && get_impl_trait_def_id(cx, method_def_id) == cx.tcx.get_diagnostic_item(sym::From) && s1 == sig.inputs()[0] diff --git a/src/tools/clippy/clippy_lints/src/unused_async.rs b/src/tools/clippy/clippy_lints/src/unused_async.rs index e67afc7f5a8..5a3e4b7adf6 100644 --- a/src/tools/clippy/clippy_lints/src/unused_async.rs +++ b/src/tools/clippy/clippy_lints/src/unused_async.rs @@ -1,8 +1,12 @@ use clippy_utils::diagnostics::span_lint_hir_and_then; use clippy_utils::is_def_id_trait_method; +use clippy_utils::usage::is_todo_unimplemented_stub; use rustc_hir::def::DefKind; use rustc_hir::intravisit::{FnKind, Visitor, walk_expr, walk_fn}; -use rustc_hir::{Body, Defaultness, Expr, ExprKind, FnDecl, HirId, Node, TraitItem, YieldSource}; +use rustc_hir::{ + Body, Closure, ClosureKind, CoroutineDesugaring, CoroutineKind, Defaultness, Expr, ExprKind, FnDecl, HirId, Node, + TraitItem, YieldSource, +}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::hir::nested_filter; use rustc_session::impl_lint_pass; @@ -81,11 +85,8 @@ impl<'tcx> Visitor<'tcx> for AsyncFnVisitor<'_, 'tcx> { let is_async_block = matches!( ex.kind, - ExprKind::Closure(rustc_hir::Closure { - kind: rustc_hir::ClosureKind::Coroutine(rustc_hir::CoroutineKind::Desugared( - rustc_hir::CoroutineDesugaring::Async, - _ - )), + ExprKind::Closure(Closure { + kind: ClosureKind::Coroutine(CoroutineKind::Desugared(CoroutineDesugaring::Async, _)), .. }) ); @@ -120,6 +121,7 @@ impl<'tcx> LateLintPass<'tcx> for UnusedAsync { && fn_kind.asyncness().is_async() && !is_def_id_trait_method(cx, def_id) && !is_default_trait_impl(cx, def_id) + && !async_fn_contains_todo_unimplemented_macro(cx, body) { let mut visitor = AsyncFnVisitor { cx, @@ -203,3 +205,18 @@ fn is_default_trait_impl(cx: &LateContext<'_>, def_id: LocalDefId) -> bool { }) ) } + +fn async_fn_contains_todo_unimplemented_macro(cx: &LateContext<'_>, body: &Body<'_>) -> bool { + if let ExprKind::Closure(closure) = body.value.kind + && let ClosureKind::Coroutine(CoroutineKind::Desugared(CoroutineDesugaring::Async, _)) = closure.kind + && let body = cx.tcx.hir_body(closure.body) + && let ExprKind::Block(block, _) = body.value.kind + && block.stmts.is_empty() + && let Some(expr) = block.expr + && let ExprKind::DropTemps(inner) = expr.kind + { + return is_todo_unimplemented_stub(cx, inner); + } + + false +} diff --git a/src/tools/clippy/clippy_lints/src/unused_io_amount.rs b/src/tools/clippy/clippy_lints/src/unused_io_amount.rs index 12cc1093899..f3cd3f1bb28 100644 --- a/src/tools/clippy/clippy_lints/src/unused_io_amount.rs +++ b/src/tools/clippy/clippy_lints/src/unused_io_amount.rs @@ -84,7 +84,7 @@ impl<'tcx> LateLintPass<'tcx> for UnusedIoAmount { /// get desugared to match. fn check_block(&mut self, cx: &LateContext<'tcx>, block: &'tcx hir::Block<'tcx>) { let fn_def_id = block.hir_id.owner.to_def_id(); - if let Some(impl_id) = cx.tcx.impl_of_method(fn_def_id) + if let Some(impl_id) = cx.tcx.impl_of_assoc(fn_def_id) && let Some(trait_id) = cx.tcx.trait_id_of_impl(impl_id) { // We don't want to lint inside io::Read or io::Write implementations, as the author has more @@ -300,7 +300,7 @@ fn check_io_mode(cx: &LateContext<'_>, call: &hir::Expr<'_>) -> Option<IoOp> { }; if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(call.hir_id) - && let Some(trait_def_id) = cx.tcx.trait_of_item(method_def_id) + && let Some(trait_def_id) = cx.tcx.trait_of_assoc(method_def_id) { if let Some(diag_name) = cx.tcx.get_diagnostic_name(trait_def_id) { match diag_name { diff --git a/src/tools/clippy/clippy_lints/src/unused_self.rs b/src/tools/clippy/clippy_lints/src/unused_self.rs index 12da891a71b..dff39974a37 100644 --- a/src/tools/clippy/clippy_lints/src/unused_self.rs +++ b/src/tools/clippy/clippy_lints/src/unused_self.rs @@ -1,12 +1,10 @@ use clippy_config::Conf; use clippy_utils::diagnostics::span_lint_and_help; -use clippy_utils::macros::root_macro_call_first_node; -use clippy_utils::sym; +use clippy_utils::usage::is_todo_unimplemented_stub; use clippy_utils::visitors::is_local_used; -use rustc_hir::{Body, Impl, ImplItem, ImplItemKind, ItemKind}; +use rustc_hir::{Impl, ImplItem, ImplItemKind, ItemKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::impl_lint_pass; -use std::ops::ControlFlow; declare_clippy_lint! { /// ### What it does @@ -60,18 +58,6 @@ impl<'tcx> LateLintPass<'tcx> for UnusedSelf { let parent = cx.tcx.hir_get_parent_item(impl_item.hir_id()).def_id; let parent_item = cx.tcx.hir_expect_item(parent); let assoc_item = cx.tcx.associated_item(impl_item.owner_id); - let contains_todo = |cx, body: &'_ Body<'_>| -> bool { - clippy_utils::visitors::for_each_expr_without_closures(body.value, |e| { - if let Some(macro_call) = root_macro_call_first_node(cx, e) - && cx.tcx.is_diagnostic_item(sym::todo_macro, macro_call.def_id) - { - ControlFlow::Break(()) - } else { - ControlFlow::Continue(()) - } - }) - .is_some() - }; if let ItemKind::Impl(Impl { of_trait: None, .. }) = parent_item.kind && assoc_item.is_method() && let ImplItemKind::Fn(.., body_id) = &impl_item.kind @@ -79,7 +65,7 @@ impl<'tcx> LateLintPass<'tcx> for UnusedSelf { && let body = cx.tcx.hir_body(*body_id) && let [self_param, ..] = body.params && !is_local_used(cx, body, self_param.pat.hir_id) - && !contains_todo(cx, body) + && !is_todo_unimplemented_stub(cx, body.value) { span_lint_and_help( cx, diff --git a/src/tools/clippy/clippy_lints/src/unused_trait_names.rs b/src/tools/clippy/clippy_lints/src/unused_trait_names.rs index b7a1d5b2123..12f2804dbaa 100644 --- a/src/tools/clippy/clippy_lints/src/unused_trait_names.rs +++ b/src/tools/clippy/clippy_lints/src/unused_trait_names.rs @@ -6,7 +6,7 @@ use clippy_utils::source::snippet_opt; use rustc_errors::Applicability; use rustc_hir::def::{DefKind, Res}; use rustc_hir::{Item, ItemKind, UseKind}; -use rustc_lint::{LateContext, LateLintPass, LintContext as _}; +use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::Visibility; use rustc_session::impl_lint_pass; use rustc_span::symbol::kw; @@ -59,7 +59,7 @@ impl_lint_pass!(UnusedTraitNames => [UNUSED_TRAIT_NAMES]); impl<'tcx> LateLintPass<'tcx> for UnusedTraitNames { fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'tcx>) { - if !item.span.in_external_macro(cx.sess().source_map()) + if !item.span.from_expansion() && let ItemKind::Use(path, UseKind::Single(ident)) = item.kind // Ignore imports that already use Underscore && ident.name != kw::Underscore diff --git a/src/tools/clippy/clippy_lints/src/useless_conversion.rs b/src/tools/clippy/clippy_lints/src/useless_conversion.rs index 380ddea4e1e..e5b20c0e0a1 100644 --- a/src/tools/clippy/clippy_lints/src/useless_conversion.rs +++ b/src/tools/clippy/clippy_lints/src/useless_conversion.rs @@ -176,6 +176,33 @@ impl<'tcx> LateLintPass<'tcx> for UselessConversion { } }, + ExprKind::MethodCall(path, recv, [arg], _) => { + if matches!( + path.ident.name, + sym::map | sym::map_err | sym::map_break | sym::map_continue + ) && has_eligible_receiver(cx, recv, e) + && (is_trait_item(cx, arg, sym::Into) || is_trait_item(cx, arg, sym::From)) + && let ty::FnDef(_, args) = cx.typeck_results().expr_ty(arg).kind() + && let &[from_ty, to_ty] = args.into_type_list(cx.tcx).as_slice() + && same_type_and_consts(from_ty, to_ty) + { + span_lint_and_then( + cx, + USELESS_CONVERSION, + e.span.with_lo(recv.span.hi()), + format!("useless conversion to the same type: `{from_ty}`"), + |diag| { + diag.suggest_remove_item( + cx, + e.span.with_lo(recv.span.hi()), + "consider removing", + Applicability::MachineApplicable, + ); + }, + ); + } + }, + ExprKind::MethodCall(name, recv, [], _) => { if is_trait_method(cx, e, sym::Into) && name.ident.name == sym::into { let a = cx.typeck_results().expr_ty(e); @@ -412,32 +439,6 @@ impl<'tcx> LateLintPass<'tcx> for UselessConversion { } } -/// Check if `arg` is a `Into::into` or `From::from` applied to `receiver` to give `expr`, through a -/// higher-order mapping function. -pub fn check_function_application(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, arg: &Expr<'_>) { - if has_eligible_receiver(cx, recv, expr) - && (is_trait_item(cx, arg, sym::Into) || is_trait_item(cx, arg, sym::From)) - && let ty::FnDef(_, args) = cx.typeck_results().expr_ty(arg).kind() - && let &[from_ty, to_ty] = args.into_type_list(cx.tcx).as_slice() - && same_type_and_consts(from_ty, to_ty) - { - span_lint_and_then( - cx, - USELESS_CONVERSION, - expr.span.with_lo(recv.span.hi()), - format!("useless conversion to the same type: `{from_ty}`"), - |diag| { - diag.suggest_remove_item( - cx, - expr.span.with_lo(recv.span.hi()), - "consider removing", - Applicability::MachineApplicable, - ); - }, - ); - } -} - fn has_eligible_receiver(cx: &LateContext<'_>, recv: &Expr<'_>, expr: &Expr<'_>) -> bool { if is_inherent_method_call(cx, expr) { matches!( diff --git a/src/tools/clippy/clippy_lints_internal/src/derive_deserialize_allowing_unknown.rs b/src/tools/clippy/clippy_lints_internal/src/derive_deserialize_allowing_unknown.rs index 88b099c477f..41fafc08c25 100644 --- a/src/tools/clippy/clippy_lints_internal/src/derive_deserialize_allowing_unknown.rs +++ b/src/tools/clippy/clippy_lints_internal/src/derive_deserialize_allowing_unknown.rs @@ -2,6 +2,7 @@ use clippy_utils::diagnostics::span_lint; use clippy_utils::paths; use rustc_ast::tokenstream::{TokenStream, TokenTree}; use rustc_ast::{AttrStyle, DelimArgs}; +use rustc_attr_data_structures::{AttributeKind, find_attr}; use rustc_hir::def::Res; use rustc_hir::def_id::LocalDefId; use rustc_hir::{ @@ -11,7 +12,6 @@ use rustc_lint::{LateContext, LateLintPass}; use rustc_lint_defs::declare_tool_lint; use rustc_middle::ty::TyCtxt; use rustc_session::declare_lint_pass; -use rustc_span::sym; declare_tool_lint! { /// ### What it does @@ -88,7 +88,10 @@ impl<'tcx> LateLintPass<'tcx> for DeriveDeserializeAllowingUnknown { } // Is it derived? - if !find_attr!(cx.tcx.get_all_attrs(item.owner_id), AttributeKind::AutomaticallyDerived(..)) { + if !find_attr!( + cx.tcx.get_all_attrs(item.owner_id), + AttributeKind::AutomaticallyDerived(..) + ) { return; } diff --git a/src/tools/clippy/clippy_lints_internal/src/lint_without_lint_pass.rs b/src/tools/clippy/clippy_lints_internal/src/lint_without_lint_pass.rs index 45a866030b2..fda65bc84ed 100644 --- a/src/tools/clippy/clippy_lints_internal/src/lint_without_lint_pass.rs +++ b/src/tools/clippy/clippy_lints_internal/src/lint_without_lint_pass.rs @@ -1,7 +1,7 @@ use crate::internal_paths; use clippy_utils::diagnostics::{span_lint, span_lint_and_help}; -use clippy_utils::is_lint_allowed; use clippy_utils::macros::root_macro_call_first_node; +use clippy_utils::{is_lint_allowed, sym}; use rustc_ast::ast::LitKind; use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; use rustc_hir as hir; @@ -12,9 +12,9 @@ use rustc_hir::{ExprKind, HirId, Item, MutTy, Mutability, Path, TyKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::hir::nested_filter; use rustc_session::{declare_tool_lint, impl_lint_pass}; +use rustc_span::Span; use rustc_span::source_map::Spanned; use rustc_span::symbol::Symbol; -use rustc_span::{Span, sym}; declare_tool_lint! { /// ### What it does @@ -160,9 +160,8 @@ impl<'tcx> LateLintPass<'tcx> for LintWithoutLintPass { let body = cx.tcx.hir_body_owned_by( impl_item_refs .iter() - .find(|iiref| iiref.ident.as_str() == "lint_vec") + .find(|&&iiref| cx.tcx.item_name(iiref.owner_id) == sym::lint_vec) .expect("LintPass needs to implement lint_vec") - .id .owner_id .def_id, ); diff --git a/src/tools/clippy/clippy_lints_internal/src/msrv_attr_impl.rs b/src/tools/clippy/clippy_lints_internal/src/msrv_attr_impl.rs index 70b3c03d2bb..66aeb910891 100644 --- a/src/tools/clippy/clippy_lints_internal/src/msrv_attr_impl.rs +++ b/src/tools/clippy/clippy_lints_internal/src/msrv_attr_impl.rs @@ -1,6 +1,7 @@ use crate::internal_paths; use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::source::snippet; +use clippy_utils::sym; use rustc_errors::Applicability; use rustc_hir as hir; use rustc_lint::{LateContext, LateLintPass, LintContext}; @@ -40,7 +41,9 @@ impl LateLintPass<'_> for MsrvAttrImpl { .filter(|t| matches!(t.kind(), GenericArgKind::Type(_))) .any(|t| internal_paths::MSRV_STACK.matches_ty(cx, t.expect_ty())) }) - && !items.iter().any(|item| item.ident.name.as_str() == "check_attributes") + && !items + .iter() + .any(|&item| cx.tcx.item_name(item.owner_id) == sym::check_attributes) { let span = cx.sess().source_map().span_through_char(item.span, '{'); span_lint_and_sugg( diff --git a/src/tools/clippy/clippy_utils/README.md b/src/tools/clippy/clippy_utils/README.md index 645b644d9f4..19e71f6af1d 100644 --- a/src/tools/clippy/clippy_utils/README.md +++ b/src/tools/clippy/clippy_utils/README.md @@ -8,7 +8,7 @@ This crate is only guaranteed to build with this `nightly` toolchain: <!-- begin autogenerated nightly --> ``` -nightly-2025-07-10 +nightly-2025-07-25 ``` <!-- end autogenerated nightly --> diff --git a/src/tools/clippy/clippy_utils/src/diagnostics.rs b/src/tools/clippy/clippy_utils/src/diagnostics.rs index 8453165818b..625e1eead21 100644 --- a/src/tools/clippy/clippy_utils/src/diagnostics.rs +++ b/src/tools/clippy/clippy_utils/src/diagnostics.rs @@ -22,10 +22,13 @@ fn docs_link(diag: &mut Diag<'_, ()>, lint: &'static Lint) { { diag.help(format!( "for further information visit https://rust-lang.github.io/rust-clippy/{}/index.html#{lint}", - &option_env!("RUST_RELEASE_NUM").map_or("master".to_string(), |n| { - // extract just major + minor version and ignore patch versions - format!("rust-{}", n.rsplit_once('.').unwrap().1) - }) + &option_env!("RUST_RELEASE_NUM").map_or_else( + || "master".to_string(), + |n| { + // extract just major + minor version and ignore patch versions + format!("rust-{}", n.rsplit_once('.').unwrap().1) + } + ) )); } } diff --git a/src/tools/clippy/clippy_utils/src/eager_or_lazy.rs b/src/tools/clippy/clippy_utils/src/eager_or_lazy.rs index 9d38672efad..eb3f442ac75 100644 --- a/src/tools/clippy/clippy_utils/src/eager_or_lazy.rs +++ b/src/tools/clippy/clippy_utils/src/eager_or_lazy.rs @@ -51,7 +51,7 @@ impl ops::BitOrAssign for EagernessSuggestion { fn fn_eagerness(cx: &LateContext<'_>, fn_id: DefId, name: Symbol, have_one_arg: bool) -> EagernessSuggestion { use EagernessSuggestion::{Eager, Lazy, NoChange}; - let ty = match cx.tcx.impl_of_method(fn_id) { + let ty = match cx.tcx.impl_of_assoc(fn_id) { Some(id) => cx.tcx.type_of(id).instantiate_identity(), None => return Lazy, }; diff --git a/src/tools/clippy/clippy_utils/src/lib.rs b/src/tools/clippy/clippy_utils/src/lib.rs index ff1ee663f9b..67e09e772a7 100644 --- a/src/tools/clippy/clippy_utils/src/lib.rs +++ b/src/tools/clippy/clippy_utils/src/lib.rs @@ -89,8 +89,8 @@ use std::sync::{Mutex, MutexGuard, OnceLock}; use itertools::Itertools; use rustc_abi::Integer; -use rustc_ast::join_path_syms; use rustc_ast::ast::{self, LitKind, RangeLimits}; +use rustc_ast::join_path_syms; use rustc_attr_data_structures::{AttributeKind, find_attr}; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::packed::Pu128; @@ -114,7 +114,7 @@ use rustc_middle::hir::nested_filter; use rustc_middle::hir::place::PlaceBase; use rustc_middle::lint::LevelAndSource; use rustc_middle::mir::{AggregateKind, Operand, RETURN_PLACE, Rvalue, StatementKind, TerminatorKind}; -use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow}; +use rustc_middle::ty::adjustment::{Adjust, Adjustment, AutoBorrow, PointerCoercion}; use rustc_middle::ty::layout::IntegerExt; use rustc_middle::ty::{ self as rustc_ty, Binder, BorrowKind, ClosureKind, EarlyBinder, GenericArgKind, GenericArgsRef, IntTy, Ty, TyCtxt, @@ -349,7 +349,7 @@ pub fn is_ty_alias(qpath: &QPath<'_>) -> bool { /// Checks if the given method call expression calls an inherent method. pub fn is_inherent_method_call(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { if let Some(method_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id) { - cx.tcx.trait_of_item(method_id).is_none() + cx.tcx.trait_of_assoc(method_id).is_none() } else { false } @@ -357,7 +357,7 @@ pub fn is_inherent_method_call(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { /// Checks if a method is defined in an impl of a diagnostic item pub fn is_diag_item_method(cx: &LateContext<'_>, def_id: DefId, diag_item: Symbol) -> bool { - if let Some(impl_did) = cx.tcx.impl_of_method(def_id) + if let Some(impl_did) = cx.tcx.impl_of_assoc(def_id) && let Some(adt) = cx.tcx.type_of(impl_did).instantiate_identity().ty_adt_def() { return cx.tcx.is_diagnostic_item(diag_item, adt.did()); @@ -367,7 +367,7 @@ pub fn is_diag_item_method(cx: &LateContext<'_>, def_id: DefId, diag_item: Symbo /// Checks if a method is in a diagnostic item trait pub fn is_diag_trait_item(cx: &LateContext<'_>, def_id: DefId, diag_item: Symbol) -> bool { - if let Some(trait_did) = cx.tcx.trait_of_item(def_id) { + if let Some(trait_did) = cx.tcx.trait_of_assoc(def_id) { return cx.tcx.is_diagnostic_item(diag_item, trait_did); } false @@ -620,7 +620,7 @@ fn is_default_equivalent_ctor(cx: &LateContext<'_>, def_id: DefId, path: &QPath< if let QPath::TypeRelative(_, method) = path && method.ident.name == sym::new - && let Some(impl_did) = cx.tcx.impl_of_method(def_id) + && let Some(impl_did) = cx.tcx.impl_of_assoc(def_id) && let Some(adt) = cx.tcx.type_of(impl_did).instantiate_identity().ty_adt_def() { return std_types_symbols.iter().any(|&symbol| { @@ -1897,6 +1897,7 @@ pub fn is_must_use_func_call(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { /// * `|x| { return x }` /// * `|x| { return x; }` /// * `|(x, y)| (x, y)` +/// * `|[x, y]| [x, y]` /// /// Consider calling [`is_expr_untyped_identity_function`] or [`is_expr_identity_function`] instead. fn is_body_identity_function(cx: &LateContext<'_>, func: &Body<'_>) -> bool { @@ -1907,9 +1908,9 @@ fn is_body_identity_function(cx: &LateContext<'_>, func: &Body<'_>) -> bool { .get(pat.hir_id) .is_some_and(|mode| matches!(mode.0, ByRef::Yes(_))) { - // If a tuple `(x, y)` is of type `&(i32, i32)`, then due to match ergonomics, - // the inner patterns become references. Don't consider this the identity function - // as that changes types. + // If the parameter is `(x, y)` of type `&(T, T)`, or `[x, y]` of type `&[T; 2]`, then + // due to match ergonomics, the inner patterns become references. Don't consider this + // the identity function as that changes types. return false; } @@ -1922,6 +1923,13 @@ fn is_body_identity_function(cx: &LateContext<'_>, func: &Body<'_>) -> bool { { pats.iter().zip(tup).all(|(pat, expr)| check_pat(cx, pat, expr)) }, + (PatKind::Slice(before, slice, after), ExprKind::Array(arr)) + if slice.is_none() && before.len() + after.len() == arr.len() => + { + (before.iter().chain(after)) + .zip(arr) + .all(|(pat, expr)| check_pat(cx, pat, expr)) + }, _ => false, } } @@ -3269,15 +3277,13 @@ fn maybe_get_relative_path(from: &DefPath, to: &DefPath, max_super: usize) -> St if go_up_by > max_super { // `super` chain would be too long, just use the absolute path instead - join_path_syms( - once(kw::Crate).chain(to.data.iter().filter_map(|el| { - if let DefPathData::TypeNs(sym) = el.data { - Some(sym) - } else { - None - } - })) - ) + join_path_syms(once(kw::Crate).chain(to.data.iter().filter_map(|el| { + if let DefPathData::TypeNs(sym) = el.data { + Some(sym) + } else { + None + } + }))) } else { join_path_syms(repeat_n(kw::Super, go_up_by).chain(path)) } @@ -3560,3 +3566,14 @@ pub fn potential_return_of_enclosing_body(cx: &LateContext<'_>, expr: &Expr<'_>) // enclosing body. false } + +/// Checks if the expression has adjustments that require coercion, for example: dereferencing with +/// overloaded deref, coercing pointers and `dyn` objects. +pub fn expr_adjustment_requires_coercion(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { + cx.typeck_results().expr_adjustments(expr).iter().any(|adj| { + matches!( + adj.kind, + Adjust::Deref(Some(_)) | Adjust::Pointer(PointerCoercion::Unsize) | Adjust::NeverToAny + ) + }) +} diff --git a/src/tools/clippy/clippy_utils/src/paths.rs b/src/tools/clippy/clippy_utils/src/paths.rs index c681806517a..ea8cfc59356 100644 --- a/src/tools/clippy/clippy_utils/src/paths.rs +++ b/src/tools/clippy/clippy_utils/src/paths.rs @@ -308,10 +308,11 @@ fn local_item_child_by_name(tcx: TyCtxt<'_>, local_id: LocalDefId, ns: PathNS, n None } }), - ItemKind::Impl(..) | ItemKind::Trait(..) - => tcx.associated_items(local_id).filter_by_name_unhygienic(name) - .find(|assoc_item| ns.matches(Some(assoc_item.namespace()))) - .map(|assoc_item| assoc_item.def_id), + ItemKind::Impl(..) | ItemKind::Trait(..) => tcx + .associated_items(local_id) + .filter_by_name_unhygienic(name) + .find(|assoc_item| ns.matches(Some(assoc_item.namespace()))) + .map(|assoc_item| assoc_item.def_id), _ => None, } } diff --git a/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs b/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs index b3356450d38..11c17a77b15 100644 --- a/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs +++ b/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs @@ -420,7 +420,7 @@ pub fn is_stable_const_fn(cx: &LateContext<'_>, def_id: DefId, msrv: Msrv) -> bo .lookup_const_stability(def_id) .or_else(|| { cx.tcx - .trait_of_item(def_id) + .trait_of_assoc(def_id) .and_then(|trait_def_id| cx.tcx.lookup_const_stability(trait_def_id)) }) .is_none_or(|const_stab| { diff --git a/src/tools/clippy/clippy_utils/src/sym.rs b/src/tools/clippy/clippy_utils/src/sym.rs index 8a8218c6976..934be97d94e 100644 --- a/src/tools/clippy/clippy_utils/src/sym.rs +++ b/src/tools/clippy/clippy_utils/src/sym.rs @@ -98,6 +98,7 @@ generate! { ceil_char_boundary, chain, chars, + check_attributes, checked_abs, checked_add, checked_isqrt, @@ -196,6 +197,7 @@ generate! { kw, last, lazy_static, + lint_vec, ln, lock, lock_api, @@ -261,6 +263,7 @@ generate! { read_to_end, read_to_string, read_unaligned, + redundant_imports, redundant_pub_crate, regex, rem_euclid, diff --git a/src/tools/clippy/clippy_utils/src/ty/mod.rs b/src/tools/clippy/clippy_utils/src/ty/mod.rs index fe208c032f4..d70232ef3aa 100644 --- a/src/tools/clippy/clippy_utils/src/ty/mod.rs +++ b/src/tools/clippy/clippy_utils/src/ty/mod.rs @@ -492,10 +492,7 @@ pub fn peel_mid_ty_refs_is_mutable(ty: Ty<'_>) -> (Ty<'_>, usize, Mutability) { /// Returns `true` if the given type is an `unsafe` function. pub fn type_is_unsafe_function<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool { - match ty.kind() { - ty::FnDef(..) | ty::FnPtr(..) => ty.fn_sig(cx.tcx).safety().is_unsafe(), - _ => false, - } + ty.is_fn() && ty.fn_sig(cx.tcx).safety().is_unsafe() } /// Returns the base type for HIR references and pointers. diff --git a/src/tools/clippy/clippy_utils/src/ty/type_certainty/mod.rs b/src/tools/clippy/clippy_utils/src/ty/type_certainty/mod.rs index 84df36c75bf..d9c7e6eac9f 100644 --- a/src/tools/clippy/clippy_utils/src/ty/type_certainty/mod.rs +++ b/src/tools/clippy/clippy_utils/src/ty/type_certainty/mod.rs @@ -12,10 +12,11 @@ //! be considered a bug. use crate::paths::{PathNS, lookup_path}; +use rustc_ast::{LitFloatType, LitIntType, LitKind}; use rustc_hir::def::{DefKind, Res}; use rustc_hir::def_id::DefId; use rustc_hir::intravisit::{InferKind, Visitor, VisitorExt, walk_qpath, walk_ty}; -use rustc_hir::{self as hir, AmbigArg, Expr, ExprKind, GenericArgs, HirId, Node, PathSegment, QPath, TyKind}; +use rustc_hir::{self as hir, AmbigArg, Expr, ExprKind, GenericArgs, HirId, Node, Param, PathSegment, QPath, TyKind}; use rustc_lint::LateContext; use rustc_middle::ty::{self, AdtDef, GenericArgKind, Ty}; use rustc_span::Span; @@ -24,22 +25,24 @@ mod certainty; use certainty::{Certainty, Meet, join, meet}; pub fn expr_type_is_certain(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { - expr_type_certainty(cx, expr).is_certain() + expr_type_certainty(cx, expr, false).is_certain() } -fn expr_type_certainty(cx: &LateContext<'_>, expr: &Expr<'_>) -> Certainty { +/// Determine the type certainty of `expr`. `in_arg` indicates that the expression happens within +/// the evaluation of a function or method call argument. +fn expr_type_certainty(cx: &LateContext<'_>, expr: &Expr<'_>, in_arg: bool) -> Certainty { let certainty = match &expr.kind { ExprKind::Unary(_, expr) | ExprKind::Field(expr, _) | ExprKind::Index(expr, _, _) - | ExprKind::AddrOf(_, _, expr) => expr_type_certainty(cx, expr), + | ExprKind::AddrOf(_, _, expr) => expr_type_certainty(cx, expr, in_arg), - ExprKind::Array(exprs) => join(exprs.iter().map(|expr| expr_type_certainty(cx, expr))), + ExprKind::Array(exprs) => join(exprs.iter().map(|expr| expr_type_certainty(cx, expr, in_arg))), ExprKind::Call(callee, args) => { - let lhs = expr_type_certainty(cx, callee); + let lhs = expr_type_certainty(cx, callee, false); let rhs = if type_is_inferable_from_arguments(cx, expr) { - meet(args.iter().map(|arg| expr_type_certainty(cx, arg))) + meet(args.iter().map(|arg| expr_type_certainty(cx, arg, true))) } else { Certainty::Uncertain }; @@ -47,7 +50,7 @@ fn expr_type_certainty(cx: &LateContext<'_>, expr: &Expr<'_>) -> Certainty { }, ExprKind::MethodCall(method, receiver, args, _) => { - let mut receiver_type_certainty = expr_type_certainty(cx, receiver); + let mut receiver_type_certainty = expr_type_certainty(cx, receiver, false); // Even if `receiver_type_certainty` is `Certain(Some(..))`, the `Self` type in the method // identified by `type_dependent_def_id(..)` can differ. This can happen as a result of a `deref`, // for example. So update the `DefId` in `receiver_type_certainty` (if any). @@ -59,7 +62,8 @@ fn expr_type_certainty(cx: &LateContext<'_>, expr: &Expr<'_>) -> Certainty { let lhs = path_segment_certainty(cx, receiver_type_certainty, method, false); let rhs = if type_is_inferable_from_arguments(cx, expr) { meet( - std::iter::once(receiver_type_certainty).chain(args.iter().map(|arg| expr_type_certainty(cx, arg))), + std::iter::once(receiver_type_certainty) + .chain(args.iter().map(|arg| expr_type_certainty(cx, arg, true))), ) } else { Certainty::Uncertain @@ -67,16 +71,39 @@ fn expr_type_certainty(cx: &LateContext<'_>, expr: &Expr<'_>) -> Certainty { lhs.join(rhs) }, - ExprKind::Tup(exprs) => meet(exprs.iter().map(|expr| expr_type_certainty(cx, expr))), + ExprKind::Tup(exprs) => meet(exprs.iter().map(|expr| expr_type_certainty(cx, expr, in_arg))), - ExprKind::Binary(_, lhs, rhs) => expr_type_certainty(cx, lhs).meet(expr_type_certainty(cx, rhs)), + ExprKind::Binary(_, lhs, rhs) => { + // If one of the side of the expression is uncertain, the certainty will come from the other side, + // with no information on the type. + match ( + expr_type_certainty(cx, lhs, in_arg), + expr_type_certainty(cx, rhs, in_arg), + ) { + (Certainty::Uncertain, Certainty::Certain(_)) | (Certainty::Certain(_), Certainty::Uncertain) => { + Certainty::Certain(None) + }, + (l, r) => l.meet(r), + } + }, - ExprKind::Lit(_) => Certainty::Certain(None), + ExprKind::Lit(lit) => { + if !in_arg + && matches!( + lit.node, + LitKind::Int(_, LitIntType::Unsuffixed) | LitKind::Float(_, LitFloatType::Unsuffixed) + ) + { + Certainty::Uncertain + } else { + Certainty::Certain(None) + } + }, ExprKind::Cast(_, ty) => type_certainty(cx, ty), ExprKind::If(_, if_expr, Some(else_expr)) => { - expr_type_certainty(cx, if_expr).join(expr_type_certainty(cx, else_expr)) + expr_type_certainty(cx, if_expr, in_arg).join(expr_type_certainty(cx, else_expr, in_arg)) }, ExprKind::Path(qpath) => qpath_certainty(cx, qpath, false), @@ -188,6 +215,20 @@ fn qpath_certainty(cx: &LateContext<'_>, qpath: &QPath<'_>, resolves_to_type: bo certainty } +/// Tries to tell whether `param` resolves to something certain, e.g., a non-wildcard type if +/// present. The certainty `DefId` is cleared before returning. +fn param_certainty(cx: &LateContext<'_>, param: &Param<'_>) -> Certainty { + let owner_did = cx.tcx.hir_enclosing_body_owner(param.hir_id); + let Some(fn_decl) = cx.tcx.hir_fn_decl_by_hir_id(cx.tcx.local_def_id_to_hir_id(owner_did)) else { + return Certainty::Uncertain; + }; + let inputs = fn_decl.inputs; + let body_params = cx.tcx.hir_body_owned_by(owner_did).params; + std::iter::zip(body_params, inputs) + .find(|(p, _)| p.hir_id == param.hir_id) + .map_or(Certainty::Uncertain, |(_, ty)| type_certainty(cx, ty).clear_def_id()) +} + fn path_segment_certainty( cx: &LateContext<'_>, parent_certainty: Certainty, @@ -240,15 +281,16 @@ fn path_segment_certainty( // `get_parent` because `hir_id` refers to a `Pat`, and we're interested in the node containing the `Pat`. Res::Local(hir_id) => match cx.tcx.parent_hir_node(hir_id) { - // An argument's type is always certain. - Node::Param(..) => Certainty::Certain(None), + // A parameter's type is not always certain, as it may come from an untyped closure definition, + // or from a wildcard in a typed closure definition. + Node::Param(param) => param_certainty(cx, param), // A local's type is certain if its type annotation is certain or it has an initializer whose // type is certain. Node::LetStmt(local) => { let lhs = local.ty.map_or(Certainty::Uncertain, |ty| type_certainty(cx, ty)); let rhs = local .init - .map_or(Certainty::Uncertain, |init| expr_type_certainty(cx, init)); + .map_or(Certainty::Uncertain, |init| expr_type_certainty(cx, init, false)); let certainty = lhs.join(rhs); if resolves_to_type { certainty diff --git a/src/tools/clippy/clippy_utils/src/usage.rs b/src/tools/clippy/clippy_utils/src/usage.rs index 1b049b6d12c..76d43feee12 100644 --- a/src/tools/clippy/clippy_utils/src/usage.rs +++ b/src/tools/clippy/clippy_utils/src/usage.rs @@ -1,3 +1,4 @@ +use crate::macros::root_macro_call_first_node; use crate::visitors::{Descend, Visitable, for_each_expr, for_each_expr_without_closures}; use crate::{self as utils, get_enclosing_loop_or_multi_call_closure}; use core::ops::ControlFlow; @@ -9,6 +10,7 @@ use rustc_lint::LateContext; use rustc_middle::hir::nested_filter; use rustc_middle::mir::FakeReadCause; use rustc_middle::ty; +use rustc_span::sym; /// Returns a set of mutated local variable IDs, or `None` if mutations could not be determined. pub fn mutated_variables<'tcx>(expr: &'tcx Expr<'_>, cx: &LateContext<'tcx>) -> Option<HirIdSet> { @@ -140,6 +142,46 @@ impl<'tcx> Visitor<'tcx> for BindingUsageFinder<'_, 'tcx> { } } +/// Checks if the given expression is a macro call to `todo!()` or `unimplemented!()`. +pub fn is_todo_unimplemented_macro(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { + root_macro_call_first_node(cx, expr).is_some_and(|macro_call| { + [sym::todo_macro, sym::unimplemented_macro] + .iter() + .any(|&sym| cx.tcx.is_diagnostic_item(sym, macro_call.def_id)) + }) +} + +/// Checks if the given expression is a stub, i.e., a `todo!()` or `unimplemented!()` expression, +/// or a block whose last expression is a `todo!()` or `unimplemented!()`. +pub fn is_todo_unimplemented_stub(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool { + if let ExprKind::Block(block, _) = expr.kind { + if let Some(last_expr) = block.expr { + return is_todo_unimplemented_macro(cx, last_expr); + } + + return block.stmts.last().is_some_and(|stmt| { + if let hir::StmtKind::Expr(expr) | hir::StmtKind::Semi(expr) = stmt.kind { + return is_todo_unimplemented_macro(cx, expr); + } + false + }); + } + + is_todo_unimplemented_macro(cx, expr) +} + +/// Checks if the given expression contains macro call to `todo!()` or `unimplemented!()`. +pub fn contains_todo_unimplement_macro(cx: &LateContext<'_>, expr: &'_ Expr<'_>) -> bool { + for_each_expr_without_closures(expr, |e| { + if is_todo_unimplemented_macro(cx, e) { + ControlFlow::Break(()) + } else { + ControlFlow::Continue(()) + } + }) + .is_some() +} + pub fn contains_return_break_continue_macro(expression: &Expr<'_>) -> bool { for_each_expr_without_closures(expression, |e| { match e.kind { diff --git a/src/tools/clippy/rust-toolchain.toml b/src/tools/clippy/rust-toolchain.toml index f46e079db3f..0edb80edd04 100644 --- a/src/tools/clippy/rust-toolchain.toml +++ b/src/tools/clippy/rust-toolchain.toml @@ -1,6 +1,6 @@ [toolchain] # begin autogenerated nightly -channel = "nightly-2025-07-10" +channel = "nightly-2025-07-25" # end autogenerated nightly components = ["cargo", "llvm-tools", "rust-src", "rust-std", "rustc", "rustc-dev", "rustfmt"] profile = "minimal" diff --git a/src/tools/clippy/rustc_tools_util/src/lib.rs b/src/tools/clippy/rustc_tools_util/src/lib.rs index b45edf23455..194ed84d04c 100644 --- a/src/tools/clippy/rustc_tools_util/src/lib.rs +++ b/src/tools/clippy/rustc_tools_util/src/lib.rs @@ -157,7 +157,8 @@ pub fn get_commit_date() -> Option<String> { #[must_use] pub fn get_compiler_version() -> Option<String> { - get_output("rustc", &["-V"]) + let compiler = std::option_env!("RUSTC").unwrap_or("rustc"); + get_output(compiler, &["-V"]) } #[must_use] @@ -172,6 +173,8 @@ pub fn get_channel(compiler_version: Option<String>) -> String { return String::from("beta"); } else if rustc_output.contains("nightly") { return String::from("nightly"); + } else if rustc_output.contains("dev") { + return String::from("dev"); } } diff --git a/src/tools/clippy/tests/compile-test.rs b/src/tools/clippy/tests/compile-test.rs index 83f91ccaa7b..464efc45c6b 100644 --- a/src/tools/clippy/tests/compile-test.rs +++ b/src/tools/clippy/tests/compile-test.rs @@ -162,6 +162,10 @@ impl TestContext { // however has some staging logic that is hurting us here, so to work around // that we set both the "real" and "staging" rustc to TEST_RUSTC, including the // associated library paths. + #[expect( + clippy::option_env_unwrap, + reason = "TEST_RUSTC will ensure that the requested env vars are set during compile time" + )] if let Some(rustc) = option_env!("TEST_RUSTC") { let libdir = option_env!("TEST_RUSTC_LIB").unwrap(); let sysroot = option_env!("TEST_SYSROOT").unwrap(); @@ -169,10 +173,7 @@ impl TestContext { p.envs.push(("RUSTC_REAL_LIBDIR".into(), Some(libdir.into()))); p.envs.push(("RUSTC_SNAPSHOT".into(), Some(rustc.into()))); p.envs.push(("RUSTC_SNAPSHOT_LIBDIR".into(), Some(libdir.into()))); - p.envs.push(( - "RUSTC_SYSROOT".into(), - Some(sysroot.into()), - )); + p.envs.push(("RUSTC_SYSROOT".into(), Some(sysroot.into()))); } p }, diff --git a/src/tools/clippy/tests/ui-toml/check_incompatible_msrv_in_tests/check_incompatible_msrv_in_tests.enabled.stderr b/src/tools/clippy/tests/ui-toml/check_incompatible_msrv_in_tests/check_incompatible_msrv_in_tests.enabled.stderr index 8a85d38fba3..608264beb10 100644 --- a/src/tools/clippy/tests/ui-toml/check_incompatible_msrv_in_tests/check_incompatible_msrv_in_tests.enabled.stderr +++ b/src/tools/clippy/tests/ui-toml/check_incompatible_msrv_in_tests/check_incompatible_msrv_in_tests.enabled.stderr @@ -18,6 +18,8 @@ error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is | LL | sleep(Duration::new(1, 0)); | ^^^^^ + | + = note: you may want to conditionally increase the MSRV considered by Clippy using the `clippy::msrv` attribute error: aborting due to 3 previous errors diff --git a/src/tools/clippy/tests/ui-toml/enum_variant_size/enum_variant_size.stderr b/src/tools/clippy/tests/ui-toml/enum_variant_size/enum_variant_size.stderr index 020b3cc7878..a5dfd7015a3 100644 --- a/src/tools/clippy/tests/ui-toml/enum_variant_size/enum_variant_size.stderr +++ b/src/tools/clippy/tests/ui-toml/enum_variant_size/enum_variant_size.stderr @@ -12,7 +12,7 @@ LL | | } | = note: `-D clippy::large-enum-variant` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::large_enum_variant)]` -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - B([u8; 501]), LL + B(Box<[u8; 501]>), diff --git a/src/tools/clippy/tests/ui/approx_const.rs b/src/tools/clippy/tests/ui/approx_const.rs index 6461666be8f..fc493421a16 100644 --- a/src/tools/clippy/tests/ui/approx_const.rs +++ b/src/tools/clippy/tests/ui/approx_const.rs @@ -106,4 +106,19 @@ fn main() { //~^ approx_constant let no_tau = 6.3; + + // issue #15194 + #[allow(clippy::excessive_precision)] + let x: f64 = 3.1415926535897932384626433832; + //~^ approx_constant + + #[allow(clippy::excessive_precision)] + let _: f64 = 003.14159265358979311599796346854418516159057617187500; + //~^ approx_constant + + let almost_frac_1_sqrt_2 = 00.70711; + //~^ approx_constant + + let almost_frac_1_sqrt_2 = 00.707_11; + //~^ approx_constant } diff --git a/src/tools/clippy/tests/ui/approx_const.stderr b/src/tools/clippy/tests/ui/approx_const.stderr index f7bda0468cb..32a3517ff2e 100644 --- a/src/tools/clippy/tests/ui/approx_const.stderr +++ b/src/tools/clippy/tests/ui/approx_const.stderr @@ -184,5 +184,37 @@ LL | let almost_tau = 6.28; | = help: consider using the constant directly -error: aborting due to 23 previous errors +error: approximate value of `f{32, 64}::consts::PI` found + --> tests/ui/approx_const.rs:112:18 + | +LL | let x: f64 = 3.1415926535897932384626433832; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: consider using the constant directly + +error: approximate value of `f{32, 64}::consts::PI` found + --> tests/ui/approx_const.rs:116:18 + | +LL | let _: f64 = 003.14159265358979311599796346854418516159057617187500; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: consider using the constant directly + +error: approximate value of `f{32, 64}::consts::FRAC_1_SQRT_2` found + --> tests/ui/approx_const.rs:119:32 + | +LL | let almost_frac_1_sqrt_2 = 00.70711; + | ^^^^^^^^ + | + = help: consider using the constant directly + +error: approximate value of `f{32, 64}::consts::FRAC_1_SQRT_2` found + --> tests/ui/approx_const.rs:122:32 + | +LL | let almost_frac_1_sqrt_2 = 00.707_11; + | ^^^^^^^^^ + | + = help: consider using the constant directly + +error: aborting due to 27 previous errors diff --git a/src/tools/clippy/tests/ui/arc_with_non_send_sync.stderr b/src/tools/clippy/tests/ui/arc_with_non_send_sync.stderr index 5556b0df88c..ce726206b0c 100644 --- a/src/tools/clippy/tests/ui/arc_with_non_send_sync.stderr +++ b/src/tools/clippy/tests/ui/arc_with_non_send_sync.stderr @@ -5,7 +5,7 @@ LL | let _ = Arc::new(RefCell::new(42)); | ^^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: `Arc<RefCell<i32>>` is not `Send` and `Sync` as `RefCell<i32>` is not `Sync` - = help: if the `Arc` will not used be across threads replace it with an `Rc` + = help: if the `Arc` will not be used across threads replace it with an `Rc` = help: otherwise make `RefCell<i32>` `Send` and `Sync` or consider a wrapper type such as `Mutex` = note: `-D clippy::arc-with-non-send-sync` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::arc_with_non_send_sync)]` @@ -17,7 +17,7 @@ LL | let _ = Arc::new(mutex.lock().unwrap()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: `Arc<MutexGuard<'_, i32>>` is not `Send` and `Sync` as `MutexGuard<'_, i32>` is not `Send` - = help: if the `Arc` will not used be across threads replace it with an `Rc` + = help: if the `Arc` will not be used across threads replace it with an `Rc` = help: otherwise make `MutexGuard<'_, i32>` `Send` and `Sync` or consider a wrapper type such as `Mutex` error: usage of an `Arc` that is not `Send` and `Sync` @@ -27,7 +27,7 @@ LL | let _ = Arc::new(&42 as *const i32); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: `Arc<*const i32>` is not `Send` and `Sync` as `*const i32` is neither `Send` nor `Sync` - = help: if the `Arc` will not used be across threads replace it with an `Rc` + = help: if the `Arc` will not be used across threads replace it with an `Rc` = help: otherwise make `*const i32` `Send` and `Sync` or consider a wrapper type such as `Mutex` error: aborting due to 3 previous errors diff --git a/src/tools/clippy/tests/ui/arithmetic_side_effects.rs b/src/tools/clippy/tests/ui/arithmetic_side_effects.rs index 21be2af201f..3245b2c983e 100644 --- a/src/tools/clippy/tests/ui/arithmetic_side_effects.rs +++ b/src/tools/clippy/tests/ui/arithmetic_side_effects.rs @@ -664,6 +664,20 @@ pub fn issue_12318() { //~^ arithmetic_side_effects } +pub fn issue_15225() { + use core::num::{NonZero, NonZeroU8}; + + let one = const { NonZeroU8::new(1).unwrap() }; + let _ = one.get() - 1; + + let one: NonZero<u8> = const { NonZero::new(1).unwrap() }; + let _ = one.get() - 1; + + type AliasedType = u8; + let one: NonZero<AliasedType> = const { NonZero::new(1).unwrap() }; + let _ = one.get() - 1; +} + pub fn explicit_methods() { use core::ops::Add; let one: i32 = 1; diff --git a/src/tools/clippy/tests/ui/arithmetic_side_effects.stderr b/src/tools/clippy/tests/ui/arithmetic_side_effects.stderr index e15fb612be5..4150493ba94 100644 --- a/src/tools/clippy/tests/ui/arithmetic_side_effects.stderr +++ b/src/tools/clippy/tests/ui/arithmetic_side_effects.stderr @@ -758,13 +758,13 @@ LL | one.sub_assign(1); | ^^^^^^^^^^^^^^^^^ error: arithmetic operation that can potentially result in unexpected side-effects - --> tests/ui/arithmetic_side_effects.rs:670:5 + --> tests/ui/arithmetic_side_effects.rs:684:5 | LL | one.add(&one); | ^^^^^^^^^^^^^ error: arithmetic operation that can potentially result in unexpected side-effects - --> tests/ui/arithmetic_side_effects.rs:672:5 + --> tests/ui/arithmetic_side_effects.rs:686:5 | LL | Box::new(one).add(one); | ^^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/assign_ops.fixed b/src/tools/clippy/tests/ui/assign_ops.fixed index eee61f949e7..3754b9dfe74 100644 --- a/src/tools/clippy/tests/ui/assign_ops.fixed +++ b/src/tools/clippy/tests/ui/assign_ops.fixed @@ -84,6 +84,7 @@ mod issue14871 { const ONE: Self; } + #[rustfmt::skip] // rustfmt doesn't understand the order of pub const on traits (yet) pub const trait NumberConstants { fn constant(value: usize) -> Self; } diff --git a/src/tools/clippy/tests/ui/assign_ops.rs b/src/tools/clippy/tests/ui/assign_ops.rs index 13ffcee0a3c..0b878d4f490 100644 --- a/src/tools/clippy/tests/ui/assign_ops.rs +++ b/src/tools/clippy/tests/ui/assign_ops.rs @@ -84,6 +84,7 @@ mod issue14871 { const ONE: Self; } + #[rustfmt::skip] // rustfmt doesn't understand the order of pub const on traits (yet) pub const trait NumberConstants { fn constant(value: usize) -> Self; } diff --git a/src/tools/clippy/tests/ui/auxiliary/external_item.rs b/src/tools/clippy/tests/ui/auxiliary/external_item.rs index ca4bc369e44..621e18f5c01 100644 --- a/src/tools/clippy/tests/ui/auxiliary/external_item.rs +++ b/src/tools/clippy/tests/ui/auxiliary/external_item.rs @@ -4,4 +4,4 @@ impl _ExternalStruct { pub fn _foo(self) {} } -pub fn _exernal_foo() {} +pub fn _external_foo() {} diff --git a/src/tools/clippy/tests/ui/checked_conversions.fixed b/src/tools/clippy/tests/ui/checked_conversions.fixed index 279a5b6e1ff..6175275ef04 100644 --- a/src/tools/clippy/tests/ui/checked_conversions.fixed +++ b/src/tools/clippy/tests/ui/checked_conversions.fixed @@ -95,7 +95,7 @@ pub const fn issue_8898(i: u32) -> bool { #[clippy::msrv = "1.33"] fn msrv_1_33() { let value: i64 = 33; - let _ = value <= (u32::MAX as i64) && value >= 0; + let _ = value <= (u32::max_value() as i64) && value >= 0; } #[clippy::msrv = "1.34"] diff --git a/src/tools/clippy/tests/ui/checked_conversions.rs b/src/tools/clippy/tests/ui/checked_conversions.rs index c339bc674bb..9ed0e8f660d 100644 --- a/src/tools/clippy/tests/ui/checked_conversions.rs +++ b/src/tools/clippy/tests/ui/checked_conversions.rs @@ -95,13 +95,13 @@ pub const fn issue_8898(i: u32) -> bool { #[clippy::msrv = "1.33"] fn msrv_1_33() { let value: i64 = 33; - let _ = value <= (u32::MAX as i64) && value >= 0; + let _ = value <= (u32::max_value() as i64) && value >= 0; } #[clippy::msrv = "1.34"] fn msrv_1_34() { let value: i64 = 34; - let _ = value <= (u32::MAX as i64) && value >= 0; + let _ = value <= (u32::max_value() as i64) && value >= 0; //~^ checked_conversions } diff --git a/src/tools/clippy/tests/ui/checked_conversions.stderr b/src/tools/clippy/tests/ui/checked_conversions.stderr index 3841b9d5a4d..624876dacb2 100644 --- a/src/tools/clippy/tests/ui/checked_conversions.stderr +++ b/src/tools/clippy/tests/ui/checked_conversions.stderr @@ -100,8 +100,8 @@ LL | let _ = value <= u16::MAX as u32 && value as i32 == 5; error: checked cast can be simplified --> tests/ui/checked_conversions.rs:104:13 | -LL | let _ = value <= (u32::MAX as i64) && value >= 0; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `u32::try_from(value).is_ok()` +LL | let _ = value <= (u32::max_value() as i64) && value >= 0; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `u32::try_from(value).is_ok()` error: aborting due to 17 previous errors diff --git a/src/tools/clippy/tests/ui/checked_unwrap/simple_conditionals.stderr b/src/tools/clippy/tests/ui/checked_unwrap/simple_conditionals.stderr index a4bf0099244..26e360112b6 100644 --- a/src/tools/clippy/tests/ui/checked_unwrap/simple_conditionals.stderr +++ b/src/tools/clippy/tests/ui/checked_unwrap/simple_conditionals.stderr @@ -328,7 +328,7 @@ error: creating a shared reference to mutable static LL | if X.is_some() { | ^^^^^^^^^^^ shared reference to mutable static | - = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/static-mut-references.html> + = note: for more information, see <https://doc.rust-lang.org/edition-guide/rust-2024/static-mut-references.html> = note: shared references to mutable statics are dangerous; it's undefined behavior if the static is mutated or if a mutable reference is created for it while the shared reference lives = note: `#[deny(static_mut_refs)]` on by default diff --git a/src/tools/clippy/tests/ui/expect.rs b/src/tools/clippy/tests/ui/expect.rs index 8f7379f0021..1ab01ecfcfe 100644 --- a/src/tools/clippy/tests/ui/expect.rs +++ b/src/tools/clippy/tests/ui/expect.rs @@ -16,7 +16,26 @@ fn expect_result() { //~^ expect_used } +#[allow(clippy::ok_expect)] +#[allow(clippy::err_expect)] +fn issue_15247() { + let x: Result<u8, u8> = Err(0); + x.ok().expect("Huh"); + //~^ expect_used + + { x.ok() }.expect("..."); + //~^ expect_used + + let y: Result<u8, u8> = Ok(0); + y.err().expect("Huh"); + //~^ expect_used + + { y.err() }.expect("..."); + //~^ expect_used +} + fn main() { expect_option(); expect_result(); + issue_15247(); } diff --git a/src/tools/clippy/tests/ui/expect.stderr b/src/tools/clippy/tests/ui/expect.stderr index 70cf3072003..353fb776531 100644 --- a/src/tools/clippy/tests/ui/expect.stderr +++ b/src/tools/clippy/tests/ui/expect.stderr @@ -24,5 +24,37 @@ LL | let _ = res.expect_err(""); | = note: if this value is an `Ok`, it will panic -error: aborting due to 3 previous errors +error: used `expect()` on an `Option` value + --> tests/ui/expect.rs:23:5 + | +LL | x.ok().expect("Huh"); + | ^^^^^^^^^^^^^^^^^^^^ + | + = note: if this value is `None`, it will panic + +error: used `expect()` on an `Option` value + --> tests/ui/expect.rs:26:5 + | +LL | { x.ok() }.expect("..."); + | ^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: if this value is `None`, it will panic + +error: used `expect()` on an `Option` value + --> tests/ui/expect.rs:30:5 + | +LL | y.err().expect("Huh"); + | ^^^^^^^^^^^^^^^^^^^^^ + | + = note: if this value is `None`, it will panic + +error: used `expect()` on an `Option` value + --> tests/ui/expect.rs:33:5 + | +LL | { y.err() }.expect("..."); + | ^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: if this value is `None`, it will panic + +error: aborting due to 7 previous errors diff --git a/src/tools/clippy/tests/ui/expect_fun_call.fixed b/src/tools/clippy/tests/ui/expect_fun_call.fixed index 73eaebf773c..b923521afde 100644 --- a/src/tools/clippy/tests/ui/expect_fun_call.fixed +++ b/src/tools/clippy/tests/ui/expect_fun_call.fixed @@ -90,17 +90,30 @@ fn main() { "foo" } - Some("foo").unwrap_or_else(|| { panic!("{}", get_string()) }); + const fn const_evaluable() -> &'static str { + "foo" + } + + Some("foo").unwrap_or_else(|| panic!("{}", get_string())); //~^ expect_fun_call - Some("foo").unwrap_or_else(|| { panic!("{}", get_string()) }); + Some("foo").unwrap_or_else(|| panic!("{}", get_string())); //~^ expect_fun_call - Some("foo").unwrap_or_else(|| { panic!("{}", get_string()) }); + Some("foo").unwrap_or_else(|| panic!("{}", get_string())); //~^ expect_fun_call - Some("foo").unwrap_or_else(|| { panic!("{}", get_static_str()) }); + Some("foo").unwrap_or_else(|| panic!("{}", get_static_str())); //~^ expect_fun_call - Some("foo").unwrap_or_else(|| { panic!("{}", get_non_static_str(&0).to_string()) }); + Some("foo").unwrap_or_else(|| panic!("{}", get_non_static_str(&0))); + //~^ expect_fun_call + + Some("foo").unwrap_or_else(|| panic!("{}", const_evaluable())); //~^ expect_fun_call + + const { + Some("foo").expect(const_evaluable()); + } + + Some("foo").expect(const { const_evaluable() }); } //Issue #3839 @@ -122,4 +135,15 @@ fn main() { let format_capture_and_value: Option<i32> = None; format_capture_and_value.unwrap_or_else(|| panic!("{error_code}, {}", 1)); //~^ expect_fun_call + + // Issue #15056 + let a = false; + Some(5).expect(if a { "a" } else { "b" }); + + let return_in_expect: Option<i32> = None; + return_in_expect.expect(if true { + "Error" + } else { + return; + }); } diff --git a/src/tools/clippy/tests/ui/expect_fun_call.rs b/src/tools/clippy/tests/ui/expect_fun_call.rs index ecebc9ebfb6..bc58d24bc81 100644 --- a/src/tools/clippy/tests/ui/expect_fun_call.rs +++ b/src/tools/clippy/tests/ui/expect_fun_call.rs @@ -90,6 +90,10 @@ fn main() { "foo" } + const fn const_evaluable() -> &'static str { + "foo" + } + Some("foo").expect(&get_string()); //~^ expect_fun_call Some("foo").expect(get_string().as_ref()); @@ -101,6 +105,15 @@ fn main() { //~^ expect_fun_call Some("foo").expect(get_non_static_str(&0)); //~^ expect_fun_call + + Some("foo").expect(const_evaluable()); + //~^ expect_fun_call + + const { + Some("foo").expect(const_evaluable()); + } + + Some("foo").expect(const { const_evaluable() }); } //Issue #3839 @@ -122,4 +135,15 @@ fn main() { let format_capture_and_value: Option<i32> = None; format_capture_and_value.expect(&format!("{error_code}, {}", 1)); //~^ expect_fun_call + + // Issue #15056 + let a = false; + Some(5).expect(if a { "a" } else { "b" }); + + let return_in_expect: Option<i32> = None; + return_in_expect.expect(if true { + "Error" + } else { + return; + }); } diff --git a/src/tools/clippy/tests/ui/expect_fun_call.stderr b/src/tools/clippy/tests/ui/expect_fun_call.stderr index 36713196cb9..0692ecb4862 100644 --- a/src/tools/clippy/tests/ui/expect_fun_call.stderr +++ b/src/tools/clippy/tests/ui/expect_fun_call.stderr @@ -38,58 +38,64 @@ LL | Some("foo").expect(format!("{} {}", 1, 2).as_ref()); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{} {}", 1, 2))` error: function call inside of `expect` - --> tests/ui/expect_fun_call.rs:93:21 + --> tests/ui/expect_fun_call.rs:97:21 | LL | Some("foo").expect(&get_string()); - | ^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| { panic!("{}", get_string()) })` + | ^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{}", get_string()))` error: function call inside of `expect` - --> tests/ui/expect_fun_call.rs:95:21 + --> tests/ui/expect_fun_call.rs:99:21 | LL | Some("foo").expect(get_string().as_ref()); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| { panic!("{}", get_string()) })` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{}", get_string()))` error: function call inside of `expect` - --> tests/ui/expect_fun_call.rs:97:21 + --> tests/ui/expect_fun_call.rs:101:21 | LL | Some("foo").expect(get_string().as_str()); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| { panic!("{}", get_string()) })` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{}", get_string()))` error: function call inside of `expect` - --> tests/ui/expect_fun_call.rs:100:21 + --> tests/ui/expect_fun_call.rs:104:21 | LL | Some("foo").expect(get_static_str()); - | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| { panic!("{}", get_static_str()) })` + | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{}", get_static_str()))` error: function call inside of `expect` - --> tests/ui/expect_fun_call.rs:102:21 + --> tests/ui/expect_fun_call.rs:106:21 | LL | Some("foo").expect(get_non_static_str(&0)); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| { panic!("{}", get_non_static_str(&0).to_string()) })` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{}", get_non_static_str(&0)))` + +error: function call inside of `expect` + --> tests/ui/expect_fun_call.rs:109:21 + | +LL | Some("foo").expect(const_evaluable()); + | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{}", const_evaluable()))` error: function call inside of `expect` - --> tests/ui/expect_fun_call.rs:107:16 + --> tests/ui/expect_fun_call.rs:120:16 | LL | Some(true).expect(&format!("key {}, {}", 1, 2)); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("key {}, {}", 1, 2))` error: function call inside of `expect` - --> tests/ui/expect_fun_call.rs:114:17 + --> tests/ui/expect_fun_call.rs:127:17 | LL | opt_ref.expect(&format!("{:?}", opt_ref)); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{:?}", opt_ref))` error: function call inside of `expect` - --> tests/ui/expect_fun_call.rs:119:20 + --> tests/ui/expect_fun_call.rs:132:20 | LL | format_capture.expect(&format!("{error_code}")); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{error_code}"))` error: function call inside of `expect` - --> tests/ui/expect_fun_call.rs:123:30 + --> tests/ui/expect_fun_call.rs:136:30 | LL | format_capture_and_value.expect(&format!("{error_code}, {}", 1)); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{error_code}, {}", 1))` -error: aborting due to 15 previous errors +error: aborting due to 16 previous errors diff --git a/src/tools/clippy/tests/ui/filter_map_bool_then.fixed b/src/tools/clippy/tests/ui/filter_map_bool_then.fixed index b3e112f19eb..d370b85a67e 100644 --- a/src/tools/clippy/tests/ui/filter_map_bool_then.fixed +++ b/src/tools/clippy/tests/ui/filter_map_bool_then.fixed @@ -89,3 +89,24 @@ fn issue11503() { let _: Vec<usize> = bools.iter().enumerate().filter(|&(i, b)| ****b).map(|(i, b)| i).collect(); //~^ filter_map_bool_then } + +fn issue15047() { + #[derive(Clone, Copy)] + enum MyEnum { + A, + B, + C, + } + + macro_rules! foo { + ($e:expr) => { + $e + 1 + }; + } + + let x = 1; + let _ = [(MyEnum::A, "foo", 1i32)] + .iter() + .filter(|&(t, s, i)| matches!(t, MyEnum::A if s.starts_with("bar"))).map(|(t, s, i)| foo!(x)); + //~^ filter_map_bool_then +} diff --git a/src/tools/clippy/tests/ui/filter_map_bool_then.rs b/src/tools/clippy/tests/ui/filter_map_bool_then.rs index d996b3cb3c5..12295cc2482 100644 --- a/src/tools/clippy/tests/ui/filter_map_bool_then.rs +++ b/src/tools/clippy/tests/ui/filter_map_bool_then.rs @@ -89,3 +89,24 @@ fn issue11503() { let _: Vec<usize> = bools.iter().enumerate().filter_map(|(i, b)| b.then(|| i)).collect(); //~^ filter_map_bool_then } + +fn issue15047() { + #[derive(Clone, Copy)] + enum MyEnum { + A, + B, + C, + } + + macro_rules! foo { + ($e:expr) => { + $e + 1 + }; + } + + let x = 1; + let _ = [(MyEnum::A, "foo", 1i32)] + .iter() + .filter_map(|(t, s, i)| matches!(t, MyEnum::A if s.starts_with("bar")).then(|| foo!(x))); + //~^ filter_map_bool_then +} diff --git a/src/tools/clippy/tests/ui/filter_map_bool_then.stderr b/src/tools/clippy/tests/ui/filter_map_bool_then.stderr index aeb1baeb35e..edf6c655939 100644 --- a/src/tools/clippy/tests/ui/filter_map_bool_then.stderr +++ b/src/tools/clippy/tests/ui/filter_map_bool_then.stderr @@ -61,5 +61,11 @@ error: usage of `bool::then` in `filter_map` LL | let _: Vec<usize> = bools.iter().enumerate().filter_map(|(i, b)| b.then(|| i)).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `filter` then `map` instead: `filter(|&(i, b)| ****b).map(|(i, b)| i)` -error: aborting due to 10 previous errors +error: usage of `bool::then` in `filter_map` + --> tests/ui/filter_map_bool_then.rs:110:10 + | +LL | .filter_map(|(t, s, i)| matches!(t, MyEnum::A if s.starts_with("bar")).then(|| foo!(x))); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `filter` then `map` instead: `filter(|&(t, s, i)| matches!(t, MyEnum::A if s.starts_with("bar"))).map(|(t, s, i)| foo!(x))` + +error: aborting due to 11 previous errors diff --git a/src/tools/clippy/tests/ui/flat_map_identity.fixed b/src/tools/clippy/tests/ui/flat_map_identity.fixed index f6206232612..06a3eee9d84 100644 --- a/src/tools/clippy/tests/ui/flat_map_identity.fixed +++ b/src/tools/clippy/tests/ui/flat_map_identity.fixed @@ -16,3 +16,16 @@ fn main() { let _ = iterator.flatten(); //~^ flat_map_identity } + +fn issue15198() { + let x = [[1, 2], [3, 4]]; + // don't lint: this is an `Iterator<Item = &[i32, i32]>` + // match ergonomics makes the binding patterns into references + // so that its type changes to `Iterator<Item = [&i32, &i32]>` + let _ = x.iter().flat_map(|[x, y]| [x, y]); + let _ = x.iter().flat_map(|x| [x[0]]); + + // no match ergonomics for `[i32, i32]` + let _ = x.iter().copied().flatten(); + //~^ flat_map_identity +} diff --git a/src/tools/clippy/tests/ui/flat_map_identity.rs b/src/tools/clippy/tests/ui/flat_map_identity.rs index c59e749474e..1cab7d559d8 100644 --- a/src/tools/clippy/tests/ui/flat_map_identity.rs +++ b/src/tools/clippy/tests/ui/flat_map_identity.rs @@ -16,3 +16,16 @@ fn main() { let _ = iterator.flat_map(|x| return x); //~^ flat_map_identity } + +fn issue15198() { + let x = [[1, 2], [3, 4]]; + // don't lint: this is an `Iterator<Item = &[i32, i32]>` + // match ergonomics makes the binding patterns into references + // so that its type changes to `Iterator<Item = [&i32, &i32]>` + let _ = x.iter().flat_map(|[x, y]| [x, y]); + let _ = x.iter().flat_map(|x| [x[0]]); + + // no match ergonomics for `[i32, i32]` + let _ = x.iter().copied().flat_map(|[x, y]| [x, y]); + //~^ flat_map_identity +} diff --git a/src/tools/clippy/tests/ui/flat_map_identity.stderr b/src/tools/clippy/tests/ui/flat_map_identity.stderr index 75137f5d9e5..18c863bf96d 100644 --- a/src/tools/clippy/tests/ui/flat_map_identity.stderr +++ b/src/tools/clippy/tests/ui/flat_map_identity.stderr @@ -19,5 +19,11 @@ error: use of `flat_map` with an identity function LL | let _ = iterator.flat_map(|x| return x); | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `flatten()` -error: aborting due to 3 previous errors +error: use of `flat_map` with an identity function + --> tests/ui/flat_map_identity.rs:29:31 + | +LL | let _ = x.iter().copied().flat_map(|[x, y]| [x, y]); + | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `flatten()` + +error: aborting due to 4 previous errors diff --git a/src/tools/clippy/tests/ui/if_then_some_else_none.fixed b/src/tools/clippy/tests/ui/if_then_some_else_none.fixed index f774608712d..d14a805b666 100644 --- a/src/tools/clippy/tests/ui/if_then_some_else_none.fixed +++ b/src/tools/clippy/tests/ui/if_then_some_else_none.fixed @@ -122,3 +122,46 @@ const fn issue12103(x: u32) -> Option<u32> { // Should not issue an error in `const` context if x > 42 { Some(150) } else { None } } + +mod issue15257 { + struct Range { + start: u8, + end: u8, + } + + fn can_be_safely_rewrite(rs: &[&Range]) -> Option<Vec<u8>> { + (rs.len() == 1 && rs[0].start == rs[0].end).then(|| vec![rs[0].start]) + } + + fn reborrow_as_ptr(i: *mut i32) -> Option<*const i32> { + let modulo = unsafe { *i % 2 }; + (modulo == 0).then_some(i) + } + + fn reborrow_as_fn_ptr(i: i32) { + fn do_something(fn_: Option<fn(i32)>) { + todo!() + } + + fn item_fn(i: i32) { + todo!() + } + + do_something((i % 2 == 0).then_some(item_fn)); + } + + fn reborrow_as_fn_unsafe(i: i32) { + fn do_something(fn_: Option<unsafe fn(i32)>) { + todo!() + } + + fn item_fn(i: i32) { + todo!() + } + + do_something((i % 2 == 0).then_some(item_fn)); + + let closure_fn = |i: i32| {}; + do_something((i % 2 == 0).then_some(closure_fn)); + } +} diff --git a/src/tools/clippy/tests/ui/if_then_some_else_none.rs b/src/tools/clippy/tests/ui/if_then_some_else_none.rs index 8b8ff0a6ea0..bb0072f3157 100644 --- a/src/tools/clippy/tests/ui/if_then_some_else_none.rs +++ b/src/tools/clippy/tests/ui/if_then_some_else_none.rs @@ -143,3 +143,71 @@ const fn issue12103(x: u32) -> Option<u32> { // Should not issue an error in `const` context if x > 42 { Some(150) } else { None } } + +mod issue15257 { + struct Range { + start: u8, + end: u8, + } + + fn can_be_safely_rewrite(rs: &[&Range]) -> Option<Vec<u8>> { + if rs.len() == 1 && rs[0].start == rs[0].end { + //~^ if_then_some_else_none + Some(vec![rs[0].start]) + } else { + None + } + } + + fn reborrow_as_ptr(i: *mut i32) -> Option<*const i32> { + let modulo = unsafe { *i % 2 }; + if modulo == 0 { + //~^ if_then_some_else_none + Some(i) + } else { + None + } + } + + fn reborrow_as_fn_ptr(i: i32) { + fn do_something(fn_: Option<fn(i32)>) { + todo!() + } + + fn item_fn(i: i32) { + todo!() + } + + do_something(if i % 2 == 0 { + //~^ if_then_some_else_none + Some(item_fn) + } else { + None + }); + } + + fn reborrow_as_fn_unsafe(i: i32) { + fn do_something(fn_: Option<unsafe fn(i32)>) { + todo!() + } + + fn item_fn(i: i32) { + todo!() + } + + do_something(if i % 2 == 0 { + //~^ if_then_some_else_none + Some(item_fn) + } else { + None + }); + + let closure_fn = |i: i32| {}; + do_something(if i % 2 == 0 { + //~^ if_then_some_else_none + Some(closure_fn) + } else { + None + }); + } +} diff --git a/src/tools/clippy/tests/ui/if_then_some_else_none.stderr b/src/tools/clippy/tests/ui/if_then_some_else_none.stderr index 71285574ef2..c2e624a0a73 100644 --- a/src/tools/clippy/tests/ui/if_then_some_else_none.stderr +++ b/src/tools/clippy/tests/ui/if_then_some_else_none.stderr @@ -58,5 +58,63 @@ error: this could be simplified with `bool::then` LL | if s == "1" { Some(true) } else { None } | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `(s == "1").then(|| true)` -error: aborting due to 6 previous errors +error: this could be simplified with `bool::then` + --> tests/ui/if_then_some_else_none.rs:154:9 + | +LL | / if rs.len() == 1 && rs[0].start == rs[0].end { +LL | | +LL | | Some(vec![rs[0].start]) +LL | | } else { +LL | | None +LL | | } + | |_________^ help: try: `(rs.len() == 1 && rs[0].start == rs[0].end).then(|| vec![rs[0].start])` + +error: this could be simplified with `bool::then_some` + --> tests/ui/if_then_some_else_none.rs:164:9 + | +LL | / if modulo == 0 { +LL | | +LL | | Some(i) +LL | | } else { +LL | | None +LL | | } + | |_________^ help: try: `(modulo == 0).then_some(i)` + +error: this could be simplified with `bool::then_some` + --> tests/ui/if_then_some_else_none.rs:181:22 + | +LL | do_something(if i % 2 == 0 { + | ______________________^ +LL | | +LL | | Some(item_fn) +LL | | } else { +LL | | None +LL | | }); + | |_________^ help: try: `(i % 2 == 0).then_some(item_fn)` + +error: this could be simplified with `bool::then_some` + --> tests/ui/if_then_some_else_none.rs:198:22 + | +LL | do_something(if i % 2 == 0 { + | ______________________^ +LL | | +LL | | Some(item_fn) +LL | | } else { +LL | | None +LL | | }); + | |_________^ help: try: `(i % 2 == 0).then_some(item_fn)` + +error: this could be simplified with `bool::then_some` + --> tests/ui/if_then_some_else_none.rs:206:22 + | +LL | do_something(if i % 2 == 0 { + | ______________________^ +LL | | +LL | | Some(closure_fn) +LL | | } else { +LL | | None +LL | | }); + | |_________^ help: try: `(i % 2 == 0).then_some(closure_fn)` + +error: aborting due to 11 previous errors diff --git a/src/tools/clippy/tests/ui/if_then_some_else_none_unfixable.rs b/src/tools/clippy/tests/ui/if_then_some_else_none_unfixable.rs new file mode 100644 index 00000000000..be04299a6ab --- /dev/null +++ b/src/tools/clippy/tests/ui/if_then_some_else_none_unfixable.rs @@ -0,0 +1,35 @@ +#![warn(clippy::if_then_some_else_none)] +#![allow(clippy::manual_is_multiple_of)] + +mod issue15257 { + use std::pin::Pin; + + #[derive(Default)] + pub struct Foo {} + pub trait Bar {} + impl Bar for Foo {} + + fn pointer_unsized_coercion(i: u32) -> Option<Box<dyn Bar>> { + if i % 2 == 0 { + //~^ if_then_some_else_none + Some(Box::new(Foo::default())) + } else { + None + } + } + + fn reborrow_as_pin(i: Pin<&mut i32>) { + use std::ops::Rem; + + fn do_something(i: Option<&i32>) { + todo!() + } + + do_something(if i.rem(2) == 0 { + //~^ if_then_some_else_none + Some(&i) + } else { + None + }); + } +} diff --git a/src/tools/clippy/tests/ui/if_then_some_else_none_unfixable.stderr b/src/tools/clippy/tests/ui/if_then_some_else_none_unfixable.stderr new file mode 100644 index 00000000000..f77ce7910e7 --- /dev/null +++ b/src/tools/clippy/tests/ui/if_then_some_else_none_unfixable.stderr @@ -0,0 +1,28 @@ +error: this could be simplified with `bool::then` + --> tests/ui/if_then_some_else_none_unfixable.rs:13:9 + | +LL | / if i % 2 == 0 { +LL | | +LL | | Some(Box::new(Foo::default())) +LL | | } else { +LL | | None +LL | | } + | |_________^ + | + = note: `-D clippy::if-then-some-else-none` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::if_then_some_else_none)]` + +error: this could be simplified with `bool::then` + --> tests/ui/if_then_some_else_none_unfixable.rs:28:22 + | +LL | do_something(if i.rem(2) == 0 { + | ______________________^ +LL | | +LL | | Some(&i) +LL | | } else { +LL | | None +LL | | }); + | |_________^ + +error: aborting due to 2 previous errors + diff --git a/src/tools/clippy/tests/ui/incompatible_msrv.rs b/src/tools/clippy/tests/ui/incompatible_msrv.rs index 99101b2bb8f..f7f21e1850d 100644 --- a/src/tools/clippy/tests/ui/incompatible_msrv.rs +++ b/src/tools/clippy/tests/ui/incompatible_msrv.rs @@ -1,8 +1,10 @@ #![warn(clippy::incompatible_msrv)] #![feature(custom_inner_attributes)] -#![feature(panic_internals)] +#![allow(stable_features)] +#![feature(strict_provenance)] // For use in test #![clippy::msrv = "1.3.0"] +use std::cell::Cell; use std::collections::HashMap; use std::collections::hash_map::Entry; use std::future::Future; @@ -13,6 +15,8 @@ fn foo() { let mut map: HashMap<&str, u32> = HashMap::new(); assert_eq!(map.entry("poneyland").key(), &"poneyland"); //~^ incompatible_msrv + //~| NOTE: `-D clippy::incompatible-msrv` implied by `-D warnings` + //~| HELP: to override `-D warnings` add `#[allow(clippy::incompatible_msrv)]` if let Entry::Vacant(v) = map.entry("poneyland") { v.into_key(); @@ -23,6 +27,18 @@ fn foo() { //~^ incompatible_msrv } +#[clippy::msrv = "1.2.0"] +static NO_BODY_BAD_MSRV: Option<Duration> = None; +//~^ incompatible_msrv + +static NO_BODY_GOOD_MSRV: Option<Duration> = None; + +#[clippy::msrv = "1.2.0"] +fn bad_type_msrv() { + let _: Option<Duration> = None; + //~^ incompatible_msrv +} + #[test] fn test() { sleep(Duration::new(1, 0)); @@ -43,21 +59,22 @@ fn core_special_treatment(p: bool) { // But still lint code calling `core` functions directly if p { - core::panicking::panic("foo"); - //~^ ERROR: is `1.3.0` but this item is stable since `1.6.0` + let _ = core::iter::once_with(|| 0); + //~^ incompatible_msrv } // Lint code calling `core` from non-`core` macros macro_rules! my_panic { ($msg:expr) => { - core::panicking::panic($msg) - }; //~^ ERROR: is `1.3.0` but this item is stable since `1.6.0` + let _ = core::iter::once_with(|| $msg); + //~^ incompatible_msrv + }; } my_panic!("foo"); // Lint even when the macro comes from `core` and calls `core` functions - assert!(core::panicking::panic("out of luck")); - //~^ ERROR: is `1.3.0` but this item is stable since `1.6.0` + assert!(core::iter::once_with(|| 0).next().is_some()); + //~^ incompatible_msrv } #[clippy::msrv = "1.26.0"] @@ -70,7 +87,85 @@ fn lang_items() { #[clippy::msrv = "1.80.0"] fn issue14212() { let _ = std::iter::repeat_n((), 5); - //~^ ERROR: is `1.80.0` but this item is stable since `1.82.0` + //~^ incompatible_msrv +} + +#[clippy::msrv = "1.0.0"] +fn cstr_and_cstring_ok() { + let _: Option<&'static std::ffi::CStr> = None; + let _: Option<std::ffi::CString> = None; +} + +fn local_msrv_change_suggestion() { + let _ = std::iter::repeat_n((), 5); + //~^ incompatible_msrv + + #[cfg(any(test, not(test)))] + { + let _ = std::iter::repeat_n((), 5); + //~^ incompatible_msrv + //~| NOTE: you may want to conditionally increase the MSRV + + // Emit the additional note only once + let _ = std::iter::repeat_n((), 5); + //~^ incompatible_msrv + } +} + +#[clippy::msrv = "1.78.0"] +fn feature_enable_14425(ptr: *const u8) -> usize { + // Do not warn, because it is enabled through a feature even though + // it is stabilized only since Rust 1.84.0. + let r = ptr.addr(); + + // Warn about this which has been introduced in the same Rust version + // but is not allowed through a feature. + r.isqrt() + //~^ incompatible_msrv +} + +fn non_fn_items() { + let _ = std::io::ErrorKind::CrossesDevices; + //~^ incompatible_msrv +} + +#[clippy::msrv = "1.87.0"] +fn msrv_non_ok_in_const() { + { + let c = Cell::new(42); + _ = c.get(); + } + const { + let c = Cell::new(42); + _ = c.get(); + //~^ incompatible_msrv + } +} + +#[clippy::msrv = "1.88.0"] +fn msrv_ok_in_const() { + { + let c = Cell::new(42); + _ = c.get(); + } + const { + let c = Cell::new(42); + _ = c.get(); + } +} + +#[clippy::msrv = "1.86.0"] +fn enum_variant_not_ok() { + let _ = std::io::ErrorKind::InvalidFilename; + //~^ incompatible_msrv + let _ = const { std::io::ErrorKind::InvalidFilename }; + //~^ incompatible_msrv +} + +#[clippy::msrv = "1.87.0"] +fn enum_variant_ok() { + let _ = std::io::ErrorKind::InvalidFilename; + let _ = const { std::io::ErrorKind::InvalidFilename }; } fn main() {} diff --git a/src/tools/clippy/tests/ui/incompatible_msrv.stderr b/src/tools/clippy/tests/ui/incompatible_msrv.stderr index 5ea2bb9cc58..e42360d296f 100644 --- a/src/tools/clippy/tests/ui/incompatible_msrv.stderr +++ b/src/tools/clippy/tests/ui/incompatible_msrv.stderr @@ -1,5 +1,5 @@ error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is stable since `1.10.0` - --> tests/ui/incompatible_msrv.rs:14:39 + --> tests/ui/incompatible_msrv.rs:16:39 | LL | assert_eq!(map.entry("poneyland").key(), &"poneyland"); | ^^^^^ @@ -8,45 +8,107 @@ LL | assert_eq!(map.entry("poneyland").key(), &"poneyland"); = help: to override `-D warnings` add `#[allow(clippy::incompatible_msrv)]` error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is stable since `1.12.0` - --> tests/ui/incompatible_msrv.rs:18:11 + --> tests/ui/incompatible_msrv.rs:22:11 | LL | v.into_key(); | ^^^^^^^^^^ error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is stable since `1.4.0` - --> tests/ui/incompatible_msrv.rs:22:5 + --> tests/ui/incompatible_msrv.rs:26:5 | LL | sleep(Duration::new(1, 0)); | ^^^^^ -error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is stable since `1.6.0` - --> tests/ui/incompatible_msrv.rs:46:9 +error: current MSRV (Minimum Supported Rust Version) is `1.2.0` but this item is stable since `1.3.0` + --> tests/ui/incompatible_msrv.rs:31:33 | -LL | core::panicking::panic("foo"); - | ^^^^^^^^^^^^^^^^^^^^^^ +LL | static NO_BODY_BAD_MSRV: Option<Duration> = None; + | ^^^^^^^^ -error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is stable since `1.6.0` - --> tests/ui/incompatible_msrv.rs:53:13 +error: current MSRV (Minimum Supported Rust Version) is `1.2.0` but this item is stable since `1.3.0` + --> tests/ui/incompatible_msrv.rs:38:19 | -LL | core::panicking::panic($msg) - | ^^^^^^^^^^^^^^^^^^^^^^ +LL | let _: Option<Duration> = None; + | ^^^^^^^^ + +error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is stable since `1.43.0` + --> tests/ui/incompatible_msrv.rs:62:17 + | +LL | let _ = core::iter::once_with(|| 0); + | ^^^^^^^^^^^^^^^^^^^^^ + +error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is stable since `1.43.0` + --> tests/ui/incompatible_msrv.rs:69:21 + | +LL | let _ = core::iter::once_with(|| $msg); + | ^^^^^^^^^^^^^^^^^^^^^ ... LL | my_panic!("foo"); | ---------------- in this macro invocation | = note: this error originates in the macro `my_panic` (in Nightly builds, run with -Z macro-backtrace for more info) -error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is stable since `1.6.0` - --> tests/ui/incompatible_msrv.rs:59:13 +error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is stable since `1.43.0` + --> tests/ui/incompatible_msrv.rs:76:13 | -LL | assert!(core::panicking::panic("out of luck")); - | ^^^^^^^^^^^^^^^^^^^^^^ +LL | assert!(core::iter::once_with(|| 0).next().is_some()); + | ^^^^^^^^^^^^^^^^^^^^^ error: current MSRV (Minimum Supported Rust Version) is `1.80.0` but this item is stable since `1.82.0` - --> tests/ui/incompatible_msrv.rs:72:13 + --> tests/ui/incompatible_msrv.rs:89:13 + | +LL | let _ = std::iter::repeat_n((), 5); + | ^^^^^^^^^^^^^^^^^^^ + +error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is stable since `1.82.0` + --> tests/ui/incompatible_msrv.rs:100:13 | LL | let _ = std::iter::repeat_n((), 5); | ^^^^^^^^^^^^^^^^^^^ -error: aborting due to 7 previous errors +error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is stable since `1.82.0` + --> tests/ui/incompatible_msrv.rs:105:17 + | +LL | let _ = std::iter::repeat_n((), 5); + | ^^^^^^^^^^^^^^^^^^^ + | + = note: you may want to conditionally increase the MSRV considered by Clippy using the `clippy::msrv` attribute + +error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is stable since `1.82.0` + --> tests/ui/incompatible_msrv.rs:110:17 + | +LL | let _ = std::iter::repeat_n((), 5); + | ^^^^^^^^^^^^^^^^^^^ + +error: current MSRV (Minimum Supported Rust Version) is `1.78.0` but this item is stable since `1.84.0` + --> tests/ui/incompatible_msrv.rs:123:7 + | +LL | r.isqrt() + | ^^^^^^^ + +error: current MSRV (Minimum Supported Rust Version) is `1.3.0` but this item is stable since `1.85.0` + --> tests/ui/incompatible_msrv.rs:128:13 + | +LL | let _ = std::io::ErrorKind::CrossesDevices; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error: current MSRV (Minimum Supported Rust Version) is `1.87.0` but this item is stable in a `const` context since `1.88.0` + --> tests/ui/incompatible_msrv.rs:140:15 + | +LL | _ = c.get(); + | ^^^^^ + +error: current MSRV (Minimum Supported Rust Version) is `1.86.0` but this item is stable since `1.87.0` + --> tests/ui/incompatible_msrv.rs:159:13 + | +LL | let _ = std::io::ErrorKind::InvalidFilename; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error: current MSRV (Minimum Supported Rust Version) is `1.86.0` but this item is stable since `1.87.0` + --> tests/ui/incompatible_msrv.rs:161:21 + | +LL | let _ = const { std::io::ErrorKind::InvalidFilename }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error: aborting due to 17 previous errors diff --git a/src/tools/clippy/tests/ui/large_enum_variant.32bit.stderr b/src/tools/clippy/tests/ui/large_enum_variant.32bit.stderr index 80ca5daa1d5..ac1ed27a6b3 100644 --- a/src/tools/clippy/tests/ui/large_enum_variant.32bit.stderr +++ b/src/tools/clippy/tests/ui/large_enum_variant.32bit.stderr @@ -12,7 +12,7 @@ LL | | } | = note: `-D clippy::large-enum-variant` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::large_enum_variant)]` -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - B([i32; 8000]), LL + B(Box<[i32; 8000]>), @@ -30,7 +30,7 @@ LL | | ContainingLargeEnum(LargeEnum), LL | | } | |_^ the entire enum is at least 32004 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - ContainingLargeEnum(LargeEnum), LL + ContainingLargeEnum(Box<LargeEnum>), @@ -49,7 +49,7 @@ LL | | StructLikeLittle { x: i32, y: i32 }, LL | | } | |_^ the entire enum is at least 70008 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - ContainingMoreThanOneField(i32, [i32; 8000], [i32; 9500]), LL + ContainingMoreThanOneField(i32, Box<[i32; 8000]>, Box<[i32; 9500]>), @@ -67,7 +67,7 @@ LL | | StructLikeLarge { x: [i32; 8000], y: i32 }, LL | | } | |_^ the entire enum is at least 32008 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - StructLikeLarge { x: [i32; 8000], y: i32 }, LL + StructLikeLarge { x: Box<[i32; 8000]>, y: i32 }, @@ -85,7 +85,7 @@ LL | | StructLikeLarge2 { x: [i32; 8000] }, LL | | } | |_^ the entire enum is at least 32004 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - StructLikeLarge2 { x: [i32; 8000] }, LL + StructLikeLarge2 { x: Box<[i32; 8000]> }, @@ -104,7 +104,7 @@ LL | | C([u8; 200]), LL | | } | |_^ the entire enum is at least 1256 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - B([u8; 1255]), LL + B(Box<[u8; 1255]>), @@ -122,7 +122,7 @@ LL | | ContainingMoreThanOneField([i32; 8000], [i32; 2], [i32; 9500], [i32; LL | | } | |_^ the entire enum is at least 70132 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - ContainingMoreThanOneField([i32; 8000], [i32; 2], [i32; 9500], [i32; 30]), LL + ContainingMoreThanOneField(Box<[i32; 8000]>, [i32; 2], Box<[i32; 9500]>, [i32; 30]), @@ -140,7 +140,7 @@ LL | | B(Struct2), LL | | } | |_^ the entire enum is at least 32004 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - B(Struct2), LL + B(Box<Struct2>), @@ -158,7 +158,7 @@ LL | | B(Struct2), LL | | } | |_^ the entire enum is at least 32000 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - B(Struct2), LL + B(Box<Struct2>), @@ -176,7 +176,7 @@ LL | | B(Struct2), LL | | } | |_^ the entire enum is at least 32000 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - B(Struct2), LL + B(Box<Struct2>), @@ -199,7 +199,7 @@ note: boxing a variant would require the type no longer be `Copy` | LL | enum CopyableLargeEnum { | ^^^^^^^^^^^^^^^^^ -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum --> tests/ui/large_enum_variant.rs:118:5 | LL | B([u64; 8000]), @@ -222,7 +222,7 @@ note: boxing a variant would require the type no longer be `Copy` | LL | enum ManuallyCopyLargeEnum { | ^^^^^^^^^^^^^^^^^^^^^ -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum --> tests/ui/large_enum_variant.rs:124:5 | LL | B([u64; 8000]), @@ -245,7 +245,7 @@ note: boxing a variant would require the type no longer be `Copy` | LL | enum SomeGenericPossiblyCopyEnum<T> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum --> tests/ui/large_enum_variant.rs:138:5 | LL | B([u64; 4000]), @@ -263,7 +263,7 @@ LL | | Large((T, [u8; 512])), LL | | } | |_^ the entire enum is at least 512 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - Large((T, [u8; 512])), LL + Large(Box<(T, [u8; 512])>), @@ -281,7 +281,7 @@ LL | | Small(u8), LL | | } | |_^ the entire enum is at least 516 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - Large([Foo<u64>; 64]), LL + Large(Box<[Foo<u64>; 64]>), @@ -299,7 +299,7 @@ LL | | Error(PossiblyLargeEnumWithConst<256>), LL | | } | |_^ the entire enum is at least 514 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - Error(PossiblyLargeEnumWithConst<256>), LL + Error(Box<PossiblyLargeEnumWithConst<256>>), @@ -317,7 +317,7 @@ LL | | Recursive(Box<WithRecursion>), LL | | } | |_^ the entire enum is at least 516 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - Large([u64; 64]), LL + Large(Box<[u64; 64]>), @@ -335,7 +335,7 @@ LL | | Error(WithRecursionAndGenerics<u64>), LL | | } | |_^ the entire enum is at least 516 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - Error(WithRecursionAndGenerics<u64>), LL + Error(Box<WithRecursionAndGenerics<u64>>), diff --git a/src/tools/clippy/tests/ui/large_enum_variant.64bit.stderr b/src/tools/clippy/tests/ui/large_enum_variant.64bit.stderr index 559bdf2a2f5..d8199f9090f 100644 --- a/src/tools/clippy/tests/ui/large_enum_variant.64bit.stderr +++ b/src/tools/clippy/tests/ui/large_enum_variant.64bit.stderr @@ -12,7 +12,7 @@ LL | | } | = note: `-D clippy::large-enum-variant` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::large_enum_variant)]` -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - B([i32; 8000]), LL + B(Box<[i32; 8000]>), @@ -30,7 +30,7 @@ LL | | ContainingLargeEnum(LargeEnum), LL | | } | |_^ the entire enum is at least 32004 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - ContainingLargeEnum(LargeEnum), LL + ContainingLargeEnum(Box<LargeEnum>), @@ -49,7 +49,7 @@ LL | | StructLikeLittle { x: i32, y: i32 }, LL | | } | |_^ the entire enum is at least 70008 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - ContainingMoreThanOneField(i32, [i32; 8000], [i32; 9500]), LL + ContainingMoreThanOneField(i32, Box<[i32; 8000]>, Box<[i32; 9500]>), @@ -67,7 +67,7 @@ LL | | StructLikeLarge { x: [i32; 8000], y: i32 }, LL | | } | |_^ the entire enum is at least 32008 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - StructLikeLarge { x: [i32; 8000], y: i32 }, LL + StructLikeLarge { x: Box<[i32; 8000]>, y: i32 }, @@ -85,7 +85,7 @@ LL | | StructLikeLarge2 { x: [i32; 8000] }, LL | | } | |_^ the entire enum is at least 32004 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - StructLikeLarge2 { x: [i32; 8000] }, LL + StructLikeLarge2 { x: Box<[i32; 8000]> }, @@ -104,7 +104,7 @@ LL | | C([u8; 200]), LL | | } | |_^ the entire enum is at least 1256 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - B([u8; 1255]), LL + B(Box<[u8; 1255]>), @@ -122,7 +122,7 @@ LL | | ContainingMoreThanOneField([i32; 8000], [i32; 2], [i32; 9500], [i32; LL | | } | |_^ the entire enum is at least 70132 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - ContainingMoreThanOneField([i32; 8000], [i32; 2], [i32; 9500], [i32; 30]), LL + ContainingMoreThanOneField(Box<[i32; 8000]>, [i32; 2], Box<[i32; 9500]>, [i32; 30]), @@ -140,7 +140,7 @@ LL | | B(Struct2), LL | | } | |_^ the entire enum is at least 32004 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - B(Struct2), LL + B(Box<Struct2>), @@ -158,7 +158,7 @@ LL | | B(Struct2), LL | | } | |_^ the entire enum is at least 32000 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - B(Struct2), LL + B(Box<Struct2>), @@ -176,7 +176,7 @@ LL | | B(Struct2), LL | | } | |_^ the entire enum is at least 32000 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - B(Struct2), LL + B(Box<Struct2>), @@ -199,7 +199,7 @@ note: boxing a variant would require the type no longer be `Copy` | LL | enum CopyableLargeEnum { | ^^^^^^^^^^^^^^^^^ -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum --> tests/ui/large_enum_variant.rs:118:5 | LL | B([u64; 8000]), @@ -222,7 +222,7 @@ note: boxing a variant would require the type no longer be `Copy` | LL | enum ManuallyCopyLargeEnum { | ^^^^^^^^^^^^^^^^^^^^^ -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum --> tests/ui/large_enum_variant.rs:124:5 | LL | B([u64; 8000]), @@ -245,7 +245,7 @@ note: boxing a variant would require the type no longer be `Copy` | LL | enum SomeGenericPossiblyCopyEnum<T> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum --> tests/ui/large_enum_variant.rs:138:5 | LL | B([u64; 4000]), @@ -263,7 +263,7 @@ LL | | Large((T, [u8; 512])), LL | | } | |_^ the entire enum is at least 512 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - Large((T, [u8; 512])), LL + Large(Box<(T, [u8; 512])>), @@ -281,7 +281,7 @@ LL | | Small(u8), LL | | } | |_^ the entire enum is at least 520 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - Large([Foo<u64>; 64]), LL + Large(Box<[Foo<u64>; 64]>), @@ -299,7 +299,7 @@ LL | | Error(PossiblyLargeEnumWithConst<256>), LL | | } | |_^ the entire enum is at least 514 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - Error(PossiblyLargeEnumWithConst<256>), LL + Error(Box<PossiblyLargeEnumWithConst<256>>), @@ -317,7 +317,7 @@ LL | | Recursive(Box<WithRecursion>), LL | | } | |_^ the entire enum is at least 520 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - Large([u64; 64]), LL + Large(Box<[u64; 64]>), @@ -335,7 +335,7 @@ LL | | Error(WithRecursionAndGenerics<u64>), LL | | } | |_^ the entire enum is at least 520 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - Error(WithRecursionAndGenerics<u64>), LL + Error(Box<WithRecursionAndGenerics<u64>>), @@ -353,7 +353,7 @@ LL | | _SmallBoi(u8), LL | | } | |_____^ the entire enum is at least 296 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - BigBoi(PublishWithBytes), LL + BigBoi(Box<PublishWithBytes>), @@ -371,7 +371,7 @@ LL | | _SmallBoi(u8), LL | | } | |_____^ the entire enum is at least 224 bytes | -help: consider boxing the large fields to reduce the total size of the enum +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum | LL - BigBoi(PublishWithVec), LL + BigBoi(Box<PublishWithVec>), diff --git a/src/tools/clippy/tests/ui/large_enum_variant_no_std.rs b/src/tools/clippy/tests/ui/large_enum_variant_no_std.rs new file mode 100644 index 00000000000..ff0213155b6 --- /dev/null +++ b/src/tools/clippy/tests/ui/large_enum_variant_no_std.rs @@ -0,0 +1,8 @@ +#![no_std] +#![warn(clippy::large_enum_variant)] + +enum Myenum { + //~^ ERROR: large size difference between variants + Small(u8), + Large([u8; 1024]), +} diff --git a/src/tools/clippy/tests/ui/large_enum_variant_no_std.stderr b/src/tools/clippy/tests/ui/large_enum_variant_no_std.stderr new file mode 100644 index 00000000000..4f32e3e4835 --- /dev/null +++ b/src/tools/clippy/tests/ui/large_enum_variant_no_std.stderr @@ -0,0 +1,22 @@ +error: large size difference between variants + --> tests/ui/large_enum_variant_no_std.rs:4:1 + | +LL | / enum Myenum { +LL | | +LL | | Small(u8), + | | --------- the second-largest variant contains at least 1 bytes +LL | | Large([u8; 1024]), + | | ----------------- the largest variant contains at least 1024 bytes +LL | | } + | |_^ the entire enum is at least 1025 bytes + | +help: consider boxing the large fields or introducing indirection in some other way to reduce the total size of the enum + --> tests/ui/large_enum_variant_no_std.rs:7:5 + | +LL | Large([u8; 1024]), + | ^^^^^^^^^^^^^^^^^ + = note: `-D clippy::large-enum-variant` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::large_enum_variant)]` + +error: aborting due to 1 previous error + diff --git a/src/tools/clippy/tests/ui/legacy_numeric_constants.fixed b/src/tools/clippy/tests/ui/legacy_numeric_constants.fixed index 30bb549a9d6..d90e7bec027 100644 --- a/src/tools/clippy/tests/ui/legacy_numeric_constants.fixed +++ b/src/tools/clippy/tests/ui/legacy_numeric_constants.fixed @@ -79,9 +79,31 @@ fn main() { f64::consts::E; b!(); + std::primitive::i32::MAX; + //~^ ERROR: usage of a legacy numeric method + //~| HELP: use the associated constant instead [(0, "", i128::MAX)]; //~^ ERROR: usage of a legacy numeric constant //~| HELP: use the associated constant instead + i32::MAX; + //~^ ERROR: usage of a legacy numeric method + //~| HELP: use the associated constant instead + assert_eq!(0, -i32::MAX); + //~^ ERROR: usage of a legacy numeric method + //~| HELP: use the associated constant instead + i128::MAX; + //~^ ERROR: usage of a legacy numeric constant + //~| HELP: use the associated constant instead + u32::MAX; + //~^ ERROR: usage of a legacy numeric method + //~| HELP: use the associated constant instead + i32::MAX; + //~^ ERROR: usage of a legacy numeric method + //~| HELP: use the associated constant instead + type Ω = i32; + Ω::MAX; + //~^ ERROR: usage of a legacy numeric method + //~| HELP: use the associated constant instead } #[warn(clippy::legacy_numeric_constants)] diff --git a/src/tools/clippy/tests/ui/legacy_numeric_constants.rs b/src/tools/clippy/tests/ui/legacy_numeric_constants.rs index d3878199055..4a2ef3f70c2 100644 --- a/src/tools/clippy/tests/ui/legacy_numeric_constants.rs +++ b/src/tools/clippy/tests/ui/legacy_numeric_constants.rs @@ -79,9 +79,31 @@ fn main() { f64::consts::E; b!(); + <std::primitive::i32>::max_value(); + //~^ ERROR: usage of a legacy numeric method + //~| HELP: use the associated constant instead [(0, "", std::i128::MAX)]; //~^ ERROR: usage of a legacy numeric constant //~| HELP: use the associated constant instead + (i32::max_value()); + //~^ ERROR: usage of a legacy numeric method + //~| HELP: use the associated constant instead + assert_eq!(0, -(i32::max_value())); + //~^ ERROR: usage of a legacy numeric method + //~| HELP: use the associated constant instead + (std::i128::MAX); + //~^ ERROR: usage of a legacy numeric constant + //~| HELP: use the associated constant instead + (<u32>::max_value()); + //~^ ERROR: usage of a legacy numeric method + //~| HELP: use the associated constant instead + ((i32::max_value)()); + //~^ ERROR: usage of a legacy numeric method + //~| HELP: use the associated constant instead + type Ω = i32; + Ω::max_value(); + //~^ ERROR: usage of a legacy numeric method + //~| HELP: use the associated constant instead } #[warn(clippy::legacy_numeric_constants)] diff --git a/src/tools/clippy/tests/ui/legacy_numeric_constants.stderr b/src/tools/clippy/tests/ui/legacy_numeric_constants.stderr index 4d69b8165a3..0b4f32e0abc 100644 --- a/src/tools/clippy/tests/ui/legacy_numeric_constants.stderr +++ b/src/tools/clippy/tests/ui/legacy_numeric_constants.stderr @@ -72,10 +72,10 @@ LL | u32::MAX; | +++++ error: usage of a legacy numeric method - --> tests/ui/legacy_numeric_constants.rs:50:10 + --> tests/ui/legacy_numeric_constants.rs:50:5 | LL | i32::max_value(); - | ^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^^ | help: use the associated constant instead | @@ -84,10 +84,10 @@ LL + i32::MAX; | error: usage of a legacy numeric method - --> tests/ui/legacy_numeric_constants.rs:53:9 + --> tests/ui/legacy_numeric_constants.rs:53:5 | LL | u8::max_value(); - | ^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^ | help: use the associated constant instead | @@ -96,10 +96,10 @@ LL + u8::MAX; | error: usage of a legacy numeric method - --> tests/ui/legacy_numeric_constants.rs:56:9 + --> tests/ui/legacy_numeric_constants.rs:56:5 | LL | u8::min_value(); - | ^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^ | help: use the associated constant instead | @@ -120,10 +120,10 @@ LL + u8::MIN; | error: usage of a legacy numeric method - --> tests/ui/legacy_numeric_constants.rs:62:27 + --> tests/ui/legacy_numeric_constants.rs:62:5 | LL | ::std::primitive::u8::min_value(); - | ^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | help: use the associated constant instead | @@ -132,10 +132,10 @@ LL + ::std::primitive::u8::MIN; | error: usage of a legacy numeric method - --> tests/ui/legacy_numeric_constants.rs:65:26 + --> tests/ui/legacy_numeric_constants.rs:65:5 | LL | std::primitive::i32::max_value(); - | ^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | help: use the associated constant instead | @@ -171,8 +171,20 @@ LL - let x = std::u64::MAX; LL + let x = u64::MAX; | +error: usage of a legacy numeric method + --> tests/ui/legacy_numeric_constants.rs:82:5 + | +LL | <std::primitive::i32>::max_value(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: use the associated constant instead + | +LL - <std::primitive::i32>::max_value(); +LL + std::primitive::i32::MAX; + | + error: usage of a legacy numeric constant - --> tests/ui/legacy_numeric_constants.rs:82:14 + --> tests/ui/legacy_numeric_constants.rs:85:14 | LL | [(0, "", std::i128::MAX)]; | ^^^^^^^^^^^^^^ @@ -183,8 +195,80 @@ LL - [(0, "", std::i128::MAX)]; LL + [(0, "", i128::MAX)]; | +error: usage of a legacy numeric method + --> tests/ui/legacy_numeric_constants.rs:88:5 + | +LL | (i32::max_value()); + | ^^^^^^^^^^^^^^^^^^ + | +help: use the associated constant instead + | +LL - (i32::max_value()); +LL + i32::MAX; + | + +error: usage of a legacy numeric method + --> tests/ui/legacy_numeric_constants.rs:91:20 + | +LL | assert_eq!(0, -(i32::max_value())); + | ^^^^^^^^^^^^^^^^^^ + | +help: use the associated constant instead + | +LL - assert_eq!(0, -(i32::max_value())); +LL + assert_eq!(0, -i32::MAX); + | + +error: usage of a legacy numeric constant + --> tests/ui/legacy_numeric_constants.rs:94:5 + | +LL | (std::i128::MAX); + | ^^^^^^^^^^^^^^^^ + | +help: use the associated constant instead + | +LL - (std::i128::MAX); +LL + i128::MAX; + | + +error: usage of a legacy numeric method + --> tests/ui/legacy_numeric_constants.rs:97:5 + | +LL | (<u32>::max_value()); + | ^^^^^^^^^^^^^^^^^^^^ + | +help: use the associated constant instead + | +LL - (<u32>::max_value()); +LL + u32::MAX; + | + +error: usage of a legacy numeric method + --> tests/ui/legacy_numeric_constants.rs:100:5 + | +LL | ((i32::max_value)()); + | ^^^^^^^^^^^^^^^^^^^^ + | +help: use the associated constant instead + | +LL - ((i32::max_value)()); +LL + i32::MAX; + | + +error: usage of a legacy numeric method + --> tests/ui/legacy_numeric_constants.rs:104:5 + | +LL | Ω::max_value(); + | ^^^^^^^^^^^^^^ + | +help: use the associated constant instead + | +LL - Ω::max_value(); +LL + Ω::MAX; + | + error: usage of a legacy numeric constant - --> tests/ui/legacy_numeric_constants.rs:116:5 + --> tests/ui/legacy_numeric_constants.rs:138:5 | LL | std::u32::MAX; | ^^^^^^^^^^^^^ @@ -195,5 +279,5 @@ LL - std::u32::MAX; LL + u32::MAX; | -error: aborting due to 16 previous errors +error: aborting due to 23 previous errors diff --git a/src/tools/clippy/tests/ui/manual_abs_diff.fixed b/src/tools/clippy/tests/ui/manual_abs_diff.fixed index f1b1278ea6d..2766942140c 100644 --- a/src/tools/clippy/tests/ui/manual_abs_diff.fixed +++ b/src/tools/clippy/tests/ui/manual_abs_diff.fixed @@ -104,3 +104,7 @@ fn non_primitive_ty() { let (a, b) = (S(10), S(20)); let _ = if a < b { b - a } else { a - b }; } + +fn issue15254(a: &usize, b: &usize) -> usize { + b.abs_diff(*a) +} diff --git a/src/tools/clippy/tests/ui/manual_abs_diff.rs b/src/tools/clippy/tests/ui/manual_abs_diff.rs index 60ef819c12d..2c408f2be37 100644 --- a/src/tools/clippy/tests/ui/manual_abs_diff.rs +++ b/src/tools/clippy/tests/ui/manual_abs_diff.rs @@ -114,3 +114,12 @@ fn non_primitive_ty() { let (a, b) = (S(10), S(20)); let _ = if a < b { b - a } else { a - b }; } + +fn issue15254(a: &usize, b: &usize) -> usize { + if a < b { + //~^ manual_abs_diff + b - a + } else { + a - b + } +} diff --git a/src/tools/clippy/tests/ui/manual_abs_diff.stderr b/src/tools/clippy/tests/ui/manual_abs_diff.stderr index c14c1dc830f..bb6d312b435 100644 --- a/src/tools/clippy/tests/ui/manual_abs_diff.stderr +++ b/src/tools/clippy/tests/ui/manual_abs_diff.stderr @@ -79,5 +79,16 @@ error: manual absolute difference pattern without using `abs_diff` LL | let _ = if a > b { (a - b) as u32 } else { (b - a) as u32 }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace with `abs_diff`: `a.abs_diff(b)` -error: aborting due to 11 previous errors +error: manual absolute difference pattern without using `abs_diff` + --> tests/ui/manual_abs_diff.rs:119:5 + | +LL | / if a < b { +LL | | +LL | | b - a +LL | | } else { +LL | | a - b +LL | | } + | |_____^ help: replace with `abs_diff`: `b.abs_diff(*a)` + +error: aborting due to 12 previous errors diff --git a/src/tools/clippy/tests/ui/manual_assert.edition2018.stderr b/src/tools/clippy/tests/ui/manual_assert.edition2018.stderr index 8cedf2c6863..221cddf069d 100644 --- a/src/tools/clippy/tests/ui/manual_assert.edition2018.stderr +++ b/src/tools/clippy/tests/ui/manual_assert.edition2018.stderr @@ -189,5 +189,23 @@ LL - }; LL + const BAR: () = assert!(!(N == 0), ); | -error: aborting due to 10 previous errors +error: only a `panic!` in `if`-then statement + --> tests/ui/manual_assert.rs:116:5 + | +LL | / if !is_x86_feature_detected!("ssse3") { +LL | | +LL | | panic!("SSSE3 is not supported"); +LL | | } + | |_____^ + | +help: try instead + | +LL - if !is_x86_feature_detected!("ssse3") { +LL - +LL - panic!("SSSE3 is not supported"); +LL - } +LL + assert!(is_x86_feature_detected!("ssse3"), "SSSE3 is not supported"); + | + +error: aborting due to 11 previous errors diff --git a/src/tools/clippy/tests/ui/manual_assert.edition2021.stderr b/src/tools/clippy/tests/ui/manual_assert.edition2021.stderr index 8cedf2c6863..221cddf069d 100644 --- a/src/tools/clippy/tests/ui/manual_assert.edition2021.stderr +++ b/src/tools/clippy/tests/ui/manual_assert.edition2021.stderr @@ -189,5 +189,23 @@ LL - }; LL + const BAR: () = assert!(!(N == 0), ); | -error: aborting due to 10 previous errors +error: only a `panic!` in `if`-then statement + --> tests/ui/manual_assert.rs:116:5 + | +LL | / if !is_x86_feature_detected!("ssse3") { +LL | | +LL | | panic!("SSSE3 is not supported"); +LL | | } + | |_____^ + | +help: try instead + | +LL - if !is_x86_feature_detected!("ssse3") { +LL - +LL - panic!("SSSE3 is not supported"); +LL - } +LL + assert!(is_x86_feature_detected!("ssse3"), "SSSE3 is not supported"); + | + +error: aborting due to 11 previous errors diff --git a/src/tools/clippy/tests/ui/manual_assert.rs b/src/tools/clippy/tests/ui/manual_assert.rs index 46a42c3d00a..ab02bd5f5e5 100644 --- a/src/tools/clippy/tests/ui/manual_assert.rs +++ b/src/tools/clippy/tests/ui/manual_assert.rs @@ -105,3 +105,17 @@ fn issue12505() { }; } } + +fn issue15227(left: u64, right: u64) -> u64 { + macro_rules! is_x86_feature_detected { + ($feature:literal) => { + $feature.len() > 0 && $feature.starts_with("ss") + }; + } + + if !is_x86_feature_detected!("ssse3") { + //~^ manual_assert + panic!("SSSE3 is not supported"); + } + unsafe { todo!() } +} diff --git a/src/tools/clippy/tests/ui/manual_is_multiple_of.fixed b/src/tools/clippy/tests/ui/manual_is_multiple_of.fixed index 6735b99f298..03f75e725ed 100644 --- a/src/tools/clippy/tests/ui/manual_is_multiple_of.fixed +++ b/src/tools/clippy/tests/ui/manual_is_multiple_of.fixed @@ -23,3 +23,81 @@ fn f(a: u64, b: u64) { fn g(a: u64, b: u64) { let _ = a % b == 0; } + +fn needs_deref(a: &u64, b: &u64) { + let _ = a.is_multiple_of(*b); //~ manual_is_multiple_of +} + +fn closures(a: u64, b: u64) { + // Do not lint, types are ambiguous at this point + let cl = |a, b| a % b == 0; + let _ = cl(a, b); + + // Do not lint, types are ambiguous at this point + let cl = |a: _, b: _| a % b == 0; + let _ = cl(a, b); + + // Type of `a` is enough + let cl = |a: u64, b| a.is_multiple_of(b); //~ manual_is_multiple_of + let _ = cl(a, b); + + // Type of `a` is enough + let cl = |a: &u64, b| a.is_multiple_of(b); //~ manual_is_multiple_of + let _ = cl(&a, b); + + // Type of `b` is not enough + let cl = |a, b: u64| a % b == 0; + let _ = cl(&a, b); +} + +fn any_rem<T: std::ops::Rem<Output = u32>>(a: T, b: T) { + // An arbitrary `Rem` implementation should not lint + let _ = a % b == 0; +} + +mod issue15103 { + fn foo() -> Option<u64> { + let mut n: u64 = 150_000_000; + + (2..).find(|p| { + while n.is_multiple_of(*p) { + //~^ manual_is_multiple_of + n /= p; + } + n <= 1 + }) + } + + const fn generate_primes<const N: usize>() -> [u64; N] { + let mut result = [0; N]; + if N == 0 { + return result; + } + result[0] = 2; + if N == 1 { + return result; + } + let mut idx = 1; + let mut p = 3; + while idx < N { + let mut j = 0; + while j < idx && p % result[j] != 0 { + j += 1; + } + if j == idx { + result[idx] = p; + idx += 1; + } + p += 1; + } + result + } + + fn bar() -> u32 { + let d = |n: u32| -> u32 { (1..=n / 2).filter(|i| n.is_multiple_of(*i)).sum() }; + //~^ manual_is_multiple_of + + let d = |n| (1..=n / 2).filter(|i| n % i == 0).sum(); + (1..1_000).filter(|&i| i == d(d(i)) && i != d(i)).sum() + } +} diff --git a/src/tools/clippy/tests/ui/manual_is_multiple_of.rs b/src/tools/clippy/tests/ui/manual_is_multiple_of.rs index 00b638e4fd9..7b6fa64c843 100644 --- a/src/tools/clippy/tests/ui/manual_is_multiple_of.rs +++ b/src/tools/clippy/tests/ui/manual_is_multiple_of.rs @@ -23,3 +23,81 @@ fn f(a: u64, b: u64) { fn g(a: u64, b: u64) { let _ = a % b == 0; } + +fn needs_deref(a: &u64, b: &u64) { + let _ = a % b == 0; //~ manual_is_multiple_of +} + +fn closures(a: u64, b: u64) { + // Do not lint, types are ambiguous at this point + let cl = |a, b| a % b == 0; + let _ = cl(a, b); + + // Do not lint, types are ambiguous at this point + let cl = |a: _, b: _| a % b == 0; + let _ = cl(a, b); + + // Type of `a` is enough + let cl = |a: u64, b| a % b == 0; //~ manual_is_multiple_of + let _ = cl(a, b); + + // Type of `a` is enough + let cl = |a: &u64, b| a % b == 0; //~ manual_is_multiple_of + let _ = cl(&a, b); + + // Type of `b` is not enough + let cl = |a, b: u64| a % b == 0; + let _ = cl(&a, b); +} + +fn any_rem<T: std::ops::Rem<Output = u32>>(a: T, b: T) { + // An arbitrary `Rem` implementation should not lint + let _ = a % b == 0; +} + +mod issue15103 { + fn foo() -> Option<u64> { + let mut n: u64 = 150_000_000; + + (2..).find(|p| { + while n % p == 0 { + //~^ manual_is_multiple_of + n /= p; + } + n <= 1 + }) + } + + const fn generate_primes<const N: usize>() -> [u64; N] { + let mut result = [0; N]; + if N == 0 { + return result; + } + result[0] = 2; + if N == 1 { + return result; + } + let mut idx = 1; + let mut p = 3; + while idx < N { + let mut j = 0; + while j < idx && p % result[j] != 0 { + j += 1; + } + if j == idx { + result[idx] = p; + idx += 1; + } + p += 1; + } + result + } + + fn bar() -> u32 { + let d = |n: u32| -> u32 { (1..=n / 2).filter(|i| n % i == 0).sum() }; + //~^ manual_is_multiple_of + + let d = |n| (1..=n / 2).filter(|i| n % i == 0).sum(); + (1..1_000).filter(|&i| i == d(d(i)) && i != d(i)).sum() + } +} diff --git a/src/tools/clippy/tests/ui/manual_is_multiple_of.stderr b/src/tools/clippy/tests/ui/manual_is_multiple_of.stderr index 0b1ae70c2a7..8523599ec40 100644 --- a/src/tools/clippy/tests/ui/manual_is_multiple_of.stderr +++ b/src/tools/clippy/tests/ui/manual_is_multiple_of.stderr @@ -37,5 +37,35 @@ error: manual implementation of `.is_multiple_of()` LL | let _ = 0 < a % b; | ^^^^^^^^^ help: replace with: `!a.is_multiple_of(b)` -error: aborting due to 6 previous errors +error: manual implementation of `.is_multiple_of()` + --> tests/ui/manual_is_multiple_of.rs:28:13 + | +LL | let _ = a % b == 0; + | ^^^^^^^^^^ help: replace with: `a.is_multiple_of(*b)` + +error: manual implementation of `.is_multiple_of()` + --> tests/ui/manual_is_multiple_of.rs:41:26 + | +LL | let cl = |a: u64, b| a % b == 0; + | ^^^^^^^^^^ help: replace with: `a.is_multiple_of(b)` + +error: manual implementation of `.is_multiple_of()` + --> tests/ui/manual_is_multiple_of.rs:45:27 + | +LL | let cl = |a: &u64, b| a % b == 0; + | ^^^^^^^^^^ help: replace with: `a.is_multiple_of(b)` + +error: manual implementation of `.is_multiple_of()` + --> tests/ui/manual_is_multiple_of.rs:63:19 + | +LL | while n % p == 0 { + | ^^^^^^^^^^ help: replace with: `n.is_multiple_of(*p)` + +error: manual implementation of `.is_multiple_of()` + --> tests/ui/manual_is_multiple_of.rs:97:58 + | +LL | let d = |n: u32| -> u32 { (1..=n / 2).filter(|i| n % i == 0).sum() }; + | ^^^^^^^^^^ help: replace with: `n.is_multiple_of(*i)` + +error: aborting due to 11 previous errors diff --git a/src/tools/clippy/tests/ui/map_identity.fixed b/src/tools/clippy/tests/ui/map_identity.fixed index 83b2dac5fc5..b82d3e6d956 100644 --- a/src/tools/clippy/tests/ui/map_identity.fixed +++ b/src/tools/clippy/tests/ui/map_identity.fixed @@ -87,3 +87,15 @@ fn issue13904() { let _ = { it }.next(); //~^ map_identity } + +// same as `issue11764`, but for arrays +fn issue15198() { + let x = [[1, 2], [3, 4]]; + // don't lint: `&[i32; 2]` becomes `[&i32; 2]` + let _ = x.iter().map(|[x, y]| [x, y]); + let _ = x.iter().map(|x| [x[0]]).map(|[x]| x); + + // no match ergonomics for `[i32, i32]` + let _ = x.iter().copied(); + //~^ map_identity +} diff --git a/src/tools/clippy/tests/ui/map_identity.rs b/src/tools/clippy/tests/ui/map_identity.rs index e839c551364..c295bf87270 100644 --- a/src/tools/clippy/tests/ui/map_identity.rs +++ b/src/tools/clippy/tests/ui/map_identity.rs @@ -93,3 +93,15 @@ fn issue13904() { let _ = { it }.map(|x| x).next(); //~^ map_identity } + +// same as `issue11764`, but for arrays +fn issue15198() { + let x = [[1, 2], [3, 4]]; + // don't lint: `&[i32; 2]` becomes `[&i32; 2]` + let _ = x.iter().map(|[x, y]| [x, y]); + let _ = x.iter().map(|x| [x[0]]).map(|[x]| x); + + // no match ergonomics for `[i32, i32]` + let _ = x.iter().copied().map(|[x, y]| [x, y]); + //~^ map_identity +} diff --git a/src/tools/clippy/tests/ui/map_identity.stderr b/src/tools/clippy/tests/ui/map_identity.stderr index 9836f3b4cc5..9b624a0dc75 100644 --- a/src/tools/clippy/tests/ui/map_identity.stderr +++ b/src/tools/clippy/tests/ui/map_identity.stderr @@ -87,5 +87,11 @@ error: unnecessary map of the identity function LL | let _ = { it }.map(|x| x).next(); | ^^^^^^^^^^^ help: remove the call to `map` -error: aborting due to 13 previous errors +error: unnecessary map of the identity function + --> tests/ui/map_identity.rs:105:30 + | +LL | let _ = x.iter().copied().map(|[x, y]| [x, y]); + | ^^^^^^^^^^^^^^^^^^^^^ help: remove the call to `map` + +error: aborting due to 14 previous errors diff --git a/src/tools/clippy/tests/ui/missing_inline.rs b/src/tools/clippy/tests/ui/missing_inline.rs index c1801005b77..223c7447975 100644 --- a/src/tools/clippy/tests/ui/missing_inline.rs +++ b/src/tools/clippy/tests/ui/missing_inline.rs @@ -80,3 +80,20 @@ impl PubFoo { // do not lint this since users cannot control the external code #[derive(Debug)] pub struct S; + +pub mod issue15301 { + #[unsafe(no_mangle)] + pub extern "C" fn call_from_c() { + println!("Just called a Rust function from C!"); + } + + #[unsafe(no_mangle)] + pub extern "Rust" fn call_from_rust() { + println!("Just called a Rust function from Rust!"); + } + + #[unsafe(no_mangle)] + pub fn call_from_rust_no_extern() { + println!("Just called a Rust function from Rust!"); + } +} diff --git a/src/tools/clippy/tests/ui/module_name_repetitions.rs b/src/tools/clippy/tests/ui/module_name_repetitions.rs index 2fde98d7927..5d16858bf85 100644 --- a/src/tools/clippy/tests/ui/module_name_repetitions.rs +++ b/src/tools/clippy/tests/ui/module_name_repetitions.rs @@ -55,3 +55,21 @@ pub mod foo { } fn main() {} + +pub mod issue14095 { + pub mod widget { + #[macro_export] + macro_rules! define_widget { + ($id:ident) => { + /* ... */ + }; + } + + #[macro_export] + macro_rules! widget_impl { + ($id:ident) => { + /* ... */ + }; + } + } +} diff --git a/src/tools/clippy/tests/ui/must_use_candidates.fixed b/src/tools/clippy/tests/ui/must_use_candidates.fixed index 4c1d6b1ccb5..1e8589cf39d 100644 --- a/src/tools/clippy/tests/ui/must_use_candidates.fixed +++ b/src/tools/clippy/tests/ui/must_use_candidates.fixed @@ -13,13 +13,15 @@ use std::sync::atomic::{AtomicBool, Ordering}; pub struct MyAtomic(AtomicBool); pub struct MyPure; -#[must_use] pub fn pure(i: u8) -> u8 { +#[must_use] +pub fn pure(i: u8) -> u8 { //~^ must_use_candidate i } impl MyPure { - #[must_use] pub fn inherent_pure(&self) -> u8 { + #[must_use] + pub fn inherent_pure(&self) -> u8 { //~^ must_use_candidate 0 } @@ -51,7 +53,8 @@ pub fn with_callback<F: Fn(u32) -> bool>(f: &F) -> bool { f(0) } -#[must_use] pub fn with_marker(_d: std::marker::PhantomData<&mut u32>) -> bool { +#[must_use] +pub fn with_marker(_d: std::marker::PhantomData<&mut u32>) -> bool { //~^ must_use_candidate true } @@ -64,7 +67,8 @@ pub fn atomics(b: &AtomicBool) -> bool { b.load(Ordering::SeqCst) } -#[must_use] pub fn rcd(_x: Rc<u32>) -> bool { +#[must_use] +pub fn rcd(_x: Rc<u32>) -> bool { //~^ must_use_candidate true } @@ -73,7 +77,8 @@ pub fn rcmut(_x: Rc<&mut u32>) -> bool { true } -#[must_use] pub fn arcd(_x: Arc<u32>) -> bool { +#[must_use] +pub fn arcd(_x: Arc<u32>) -> bool { //~^ must_use_candidate false } diff --git a/src/tools/clippy/tests/ui/must_use_candidates.stderr b/src/tools/clippy/tests/ui/must_use_candidates.stderr index 590253d95f9..5ddbd026062 100644 --- a/src/tools/clippy/tests/ui/must_use_candidates.stderr +++ b/src/tools/clippy/tests/ui/must_use_candidates.stderr @@ -1,35 +1,64 @@ error: this function could have a `#[must_use]` attribute - --> tests/ui/must_use_candidates.rs:16:1 + --> tests/ui/must_use_candidates.rs:16:8 | LL | pub fn pure(i: u8) -> u8 { - | ^^^^^^^^^^^^^^^^^^^^^^^^ help: add the attribute: `#[must_use] pub fn pure(i: u8) -> u8` + | ^^^^ | = note: `-D clippy::must-use-candidate` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(clippy::must_use_candidate)]` +help: add the attribute + | +LL + #[must_use] +LL | pub fn pure(i: u8) -> u8 { + | error: this method could have a `#[must_use]` attribute - --> tests/ui/must_use_candidates.rs:22:5 + --> tests/ui/must_use_candidates.rs:22:12 | LL | pub fn inherent_pure(&self) -> u8 { - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: add the attribute: `#[must_use] pub fn inherent_pure(&self) -> u8` + | ^^^^^^^^^^^^^ + | +help: add the attribute + | +LL ~ #[must_use] +LL ~ pub fn inherent_pure(&self) -> u8 { + | error: this function could have a `#[must_use]` attribute - --> tests/ui/must_use_candidates.rs:54:1 + --> tests/ui/must_use_candidates.rs:54:8 + | +LL | pub fn with_marker(_d: std::marker::PhantomData<&mut u32>) -> bool { + | ^^^^^^^^^^^ + | +help: add the attribute | +LL + #[must_use] LL | pub fn with_marker(_d: std::marker::PhantomData<&mut u32>) -> bool { - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: add the attribute: `#[must_use] pub fn with_marker(_d: std::marker::PhantomData<&mut u32>) -> bool` + | error: this function could have a `#[must_use]` attribute - --> tests/ui/must_use_candidates.rs:67:1 + --> tests/ui/must_use_candidates.rs:67:8 | LL | pub fn rcd(_x: Rc<u32>) -> bool { - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: add the attribute: `#[must_use] pub fn rcd(_x: Rc<u32>) -> bool` + | ^^^ + | +help: add the attribute + | +LL + #[must_use] +LL | pub fn rcd(_x: Rc<u32>) -> bool { + | error: this function could have a `#[must_use]` attribute - --> tests/ui/must_use_candidates.rs:76:1 + --> tests/ui/must_use_candidates.rs:76:8 | LL | pub fn arcd(_x: Arc<u32>) -> bool { - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: add the attribute: `#[must_use] pub fn arcd(_x: Arc<u32>) -> bool` + | ^^^^ + | +help: add the attribute + | +LL + #[must_use] +LL | pub fn arcd(_x: Arc<u32>) -> bool { + | error: aborting due to 5 previous errors diff --git a/src/tools/clippy/tests/ui/needless_for_each_fixable.fixed b/src/tools/clippy/tests/ui/needless_for_each_fixable.fixed index a73aff55639..a6d64d9afc1 100644 --- a/src/tools/clippy/tests/ui/needless_for_each_fixable.fixed +++ b/src/tools/clippy/tests/ui/needless_for_each_fixable.fixed @@ -143,3 +143,9 @@ mod issue14734 { //~^ needless_for_each } } + +fn issue15256() { + let vec: Vec<i32> = Vec::new(); + for v in vec.iter() { println!("{v}"); } + //~^ needless_for_each +} diff --git a/src/tools/clippy/tests/ui/needless_for_each_fixable.rs b/src/tools/clippy/tests/ui/needless_for_each_fixable.rs index d92f055d3f4..7e74d2b428f 100644 --- a/src/tools/clippy/tests/ui/needless_for_each_fixable.rs +++ b/src/tools/clippy/tests/ui/needless_for_each_fixable.rs @@ -143,3 +143,9 @@ mod issue14734 { //~^ needless_for_each } } + +fn issue15256() { + let vec: Vec<i32> = Vec::new(); + vec.iter().for_each(|v| println!("{v}")); + //~^ needless_for_each +} diff --git a/src/tools/clippy/tests/ui/needless_for_each_fixable.stderr b/src/tools/clippy/tests/ui/needless_for_each_fixable.stderr index f8014456097..204cfa36b02 100644 --- a/src/tools/clippy/tests/ui/needless_for_each_fixable.stderr +++ b/src/tools/clippy/tests/ui/needless_for_each_fixable.stderr @@ -148,5 +148,11 @@ error: needless use of `for_each` LL | rows.iter().for_each(|x| do_something(x, 1u8)); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for x in rows.iter() { do_something(x, 1u8); }` -error: aborting due to 10 previous errors +error: needless use of `for_each` + --> tests/ui/needless_for_each_fixable.rs:149:5 + | +LL | vec.iter().for_each(|v| println!("{v}")); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `for v in vec.iter() { println!("{v}"); }` + +error: aborting due to 11 previous errors diff --git a/src/tools/clippy/tests/ui/needless_range_loop.rs b/src/tools/clippy/tests/ui/needless_range_loop.rs index 8a1c1be289c..70cf9fa7369 100644 --- a/src/tools/clippy/tests/ui/needless_range_loop.rs +++ b/src/tools/clippy/tests/ui/needless_range_loop.rs @@ -185,3 +185,28 @@ mod issue_2496 { unimplemented!() } } + +fn needless_loop() { + use std::hint::black_box; + let x = [0; 64]; + for i in 0..64 { + let y = [0; 64]; + + black_box(x[i]); + black_box(y[i]); + } + + for i in 0..64 { + black_box(x[i]); + black_box([0; 64][i]); + } + + for i in 0..64 { + black_box(x[i]); + black_box([1, 2, 3, 4, 5, 6, 7, 8][i]); + } + + for i in 0..64 { + black_box([1, 2, 3, 4, 5, 6, 7, 8][i]); + } +} diff --git a/src/tools/clippy/tests/ui/never_loop.rs b/src/tools/clippy/tests/ui/never_loop.rs index e0f54ef899b..48d4b8ad151 100644 --- a/src/tools/clippy/tests/ui/never_loop.rs +++ b/src/tools/clippy/tests/ui/never_loop.rs @@ -466,3 +466,35 @@ fn main() { test13(); test14(); } + +fn issue15059() { + 'a: for _ in 0..1 { + //~^ never_loop + break 'a; + } + + let mut b = 1; + 'a: for i in 0..1 { + //~^ never_loop + match i { + 0 => { + b *= 2; + break 'a; + }, + x => { + b += x; + break 'a; + }, + } + } + + #[allow(clippy::unused_unit)] + for v in 0..10 { + //~^ never_loop + break; + println!("{v}"); + // This is comment and should be kept + println!("This is a comment"); + () + } +} diff --git a/src/tools/clippy/tests/ui/never_loop.stderr b/src/tools/clippy/tests/ui/never_loop.stderr index bc9a7ec48b4..54b463266a3 100644 --- a/src/tools/clippy/tests/ui/never_loop.stderr +++ b/src/tools/clippy/tests/ui/never_loop.stderr @@ -176,8 +176,10 @@ LL | | } | help: if you need the first element of the iterator, try writing | -LL - for v in 0..10 { -LL + if let Some(v) = (0..10).next() { +LL ~ if let Some(v) = (0..10).next() { +LL | +LL ~ +LL ~ | error: this loop never actually loops @@ -232,5 +234,68 @@ LL | | break 'inner; LL | | } | |_________^ -error: aborting due to 21 previous errors +error: this loop never actually loops + --> tests/ui/never_loop.rs:471:5 + | +LL | / 'a: for _ in 0..1 { +LL | | +LL | | break 'a; +LL | | } + | |_____^ + | +help: if you need the first element of the iterator, try writing + | +LL ~ if let Some(_) = (0..1).next() { +LL | +LL ~ + | + +error: this loop never actually loops + --> tests/ui/never_loop.rs:477:5 + | +LL | / 'a: for i in 0..1 { +LL | | +LL | | match i { +LL | | 0 => { +... | +LL | | } + | |_____^ + | +help: if you need the first element of the iterator, try writing + | +LL ~ if let Some(i) = (0..1).next() { +LL | +... +LL | b *= 2; +LL ~ +LL | }, +LL | x => { +LL | b += x; +LL ~ + | + +error: this loop never actually loops + --> tests/ui/never_loop.rs:492:5 + | +LL | / for v in 0..10 { +LL | | +LL | | break; +LL | | println!("{v}"); +... | +LL | | () +LL | | } + | |_____^ + | +help: if you need the first element of the iterator, try writing + | +LL ~ if let Some(v) = (0..10).next() { +LL | +LL ~ +LL ~ +LL | // This is comment and should be kept +LL ~ +LL ~ + | + +error: aborting due to 24 previous errors diff --git a/src/tools/clippy/tests/ui/or_fun_call.fixed b/src/tools/clippy/tests/ui/or_fun_call.fixed index bcd2602edb6..0a8525a12f5 100644 --- a/src/tools/clippy/tests/ui/or_fun_call.fixed +++ b/src/tools/clippy/tests/ui/or_fun_call.fixed @@ -283,6 +283,8 @@ mod issue8993 { let _ = Some(4).map_or_else(g, f); //~^ or_fun_call let _ = Some(4).map_or(0, f); + let _ = Some(4).map_or_else(|| "asd".to_string().len() as i32, f); + //~^ or_fun_call } } @@ -426,6 +428,8 @@ mod result_map_or { let _ = x.map_or_else(|_| g(), f); //~^ or_fun_call let _ = x.map_or(0, f); + let _ = x.map_or_else(|_| "asd".to_string().len() as i32, f); + //~^ or_fun_call } } diff --git a/src/tools/clippy/tests/ui/or_fun_call.rs b/src/tools/clippy/tests/ui/or_fun_call.rs index 8d1202ebf91..b4f9b950a7f 100644 --- a/src/tools/clippy/tests/ui/or_fun_call.rs +++ b/src/tools/clippy/tests/ui/or_fun_call.rs @@ -283,6 +283,8 @@ mod issue8993 { let _ = Some(4).map_or(g(), f); //~^ or_fun_call let _ = Some(4).map_or(0, f); + let _ = Some(4).map_or("asd".to_string().len() as i32, f); + //~^ or_fun_call } } @@ -426,6 +428,8 @@ mod result_map_or { let _ = x.map_or(g(), f); //~^ or_fun_call let _ = x.map_or(0, f); + let _ = x.map_or("asd".to_string().len() as i32, f); + //~^ or_fun_call } } diff --git a/src/tools/clippy/tests/ui/or_fun_call.stderr b/src/tools/clippy/tests/ui/or_fun_call.stderr index 585ee2d0e19..3e4df772668 100644 --- a/src/tools/clippy/tests/ui/or_fun_call.stderr +++ b/src/tools/clippy/tests/ui/or_fun_call.stderr @@ -154,62 +154,68 @@ error: function call inside of `map_or` LL | let _ = Some(4).map_or(g(), f); | ^^^^^^^^^^^^^^ help: try: `map_or_else(g, f)` +error: function call inside of `map_or` + --> tests/ui/or_fun_call.rs:286:25 + | +LL | let _ = Some(4).map_or("asd".to_string().len() as i32, f); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `map_or_else(|| "asd".to_string().len() as i32, f)` + error: use of `unwrap_or_else` to construct default value - --> tests/ui/or_fun_call.rs:315:18 + --> tests/ui/or_fun_call.rs:317:18 | LL | with_new.unwrap_or_else(Vec::new); | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: use of `unwrap_or_else` to construct default value - --> tests/ui/or_fun_call.rs:319:28 + --> tests/ui/or_fun_call.rs:321:28 | LL | with_default_trait.unwrap_or_else(Default::default); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: use of `unwrap_or_else` to construct default value - --> tests/ui/or_fun_call.rs:323:27 + --> tests/ui/or_fun_call.rs:325:27 | LL | with_default_type.unwrap_or_else(u64::default); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: use of `unwrap_or_else` to construct default value - --> tests/ui/or_fun_call.rs:327:22 + --> tests/ui/or_fun_call.rs:329:22 | LL | real_default.unwrap_or_else(<FakeDefault as Default>::default); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: use of `or_insert_with` to construct default value - --> tests/ui/or_fun_call.rs:331:23 + --> tests/ui/or_fun_call.rs:333:23 | LL | map.entry(42).or_insert_with(String::new); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `or_default()` error: use of `or_insert_with` to construct default value - --> tests/ui/or_fun_call.rs:335:25 + --> tests/ui/or_fun_call.rs:337:25 | LL | btree.entry(42).or_insert_with(String::new); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `or_default()` error: use of `unwrap_or_else` to construct default value - --> tests/ui/or_fun_call.rs:339:25 + --> tests/ui/or_fun_call.rs:341:25 | LL | let _ = stringy.unwrap_or_else(String::new); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: function call inside of `unwrap_or` - --> tests/ui/or_fun_call.rs:381:17 + --> tests/ui/or_fun_call.rs:383:17 | LL | let _ = opt.unwrap_or({ f() }); // suggest `.unwrap_or_else(f)` | ^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(f)` error: function call inside of `unwrap_or` - --> tests/ui/or_fun_call.rs:386:17 + --> tests/ui/or_fun_call.rs:388:17 | LL | let _ = opt.unwrap_or(f() + 1); // suggest `.unwrap_or_else(|| f() + 1)` | ^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| f() + 1)` error: function call inside of `unwrap_or` - --> tests/ui/or_fun_call.rs:391:17 + --> tests/ui/or_fun_call.rs:393:17 | LL | let _ = opt.unwrap_or({ | _________________^ @@ -229,52 +235,58 @@ LL ~ }); | error: function call inside of `map_or` - --> tests/ui/or_fun_call.rs:397:17 + --> tests/ui/or_fun_call.rs:399:17 | LL | let _ = opt.map_or(f() + 1, |v| v); // suggest `.map_or_else(|| f() + 1, |v| v)` | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `map_or_else(|| f() + 1, |v| v)` error: use of `unwrap_or` to construct default value - --> tests/ui/or_fun_call.rs:402:17 + --> tests/ui/or_fun_call.rs:404:17 | LL | let _ = opt.unwrap_or({ i32::default() }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: function call inside of `unwrap_or` - --> tests/ui/or_fun_call.rs:409:21 + --> tests/ui/or_fun_call.rs:411:21 | LL | let _ = opt_foo.unwrap_or(Foo { val: String::default() }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| Foo { val: String::default() })` error: function call inside of `map_or` - --> tests/ui/or_fun_call.rs:424:19 + --> tests/ui/or_fun_call.rs:426:19 | LL | let _ = x.map_or(g(), |v| v); | ^^^^^^^^^^^^^^^^^^ help: try: `map_or_else(|_| g(), |v| v)` error: function call inside of `map_or` - --> tests/ui/or_fun_call.rs:426:19 + --> tests/ui/or_fun_call.rs:428:19 | LL | let _ = x.map_or(g(), f); | ^^^^^^^^^^^^^^ help: try: `map_or_else(|_| g(), f)` +error: function call inside of `map_or` + --> tests/ui/or_fun_call.rs:431:19 + | +LL | let _ = x.map_or("asd".to_string().len() as i32, f); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `map_or_else(|_| "asd".to_string().len() as i32, f)` + error: function call inside of `get_or_insert` - --> tests/ui/or_fun_call.rs:438:15 + --> tests/ui/or_fun_call.rs:442:15 | LL | let _ = x.get_or_insert(g()); | ^^^^^^^^^^^^^^^^^^ help: try: `get_or_insert_with(g)` error: function call inside of `and` - --> tests/ui/or_fun_call.rs:448:15 + --> tests/ui/or_fun_call.rs:452:15 | LL | let _ = x.and(g()); | ^^^^^^^^ help: try: `and_then(|_| g())` error: function call inside of `and` - --> tests/ui/or_fun_call.rs:458:15 + --> tests/ui/or_fun_call.rs:462:15 | LL | let _ = x.and(g()); | ^^^^^^^^ help: try: `and_then(|_| g())` -error: aborting due to 43 previous errors +error: aborting due to 45 previous errors diff --git a/src/tools/clippy/tests/ui/pattern_type_mismatch/auxiliary/external.rs b/src/tools/clippy/tests/ui/pattern_type_mismatch/auxiliary/external.rs new file mode 100644 index 00000000000..cd27c5c74aa --- /dev/null +++ b/src/tools/clippy/tests/ui/pattern_type_mismatch/auxiliary/external.rs @@ -0,0 +1,13 @@ +//! **FAKE** external macro crate. + +#[macro_export] +macro_rules! macro_with_match { + ( $p:pat ) => { + let something = (); + + match &something { + $p => true, + _ => false, + } + }; +} diff --git a/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.rs b/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.rs index 49ea1d3f7a6..aa988a577df 100644 --- a/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.rs +++ b/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.rs @@ -6,6 +6,9 @@ clippy::single_match )] +//@aux-build:external.rs +use external::macro_with_match; + fn main() {} fn syntax_match() { @@ -159,3 +162,9 @@ fn macro_expansion() { let value = &Some(23); matching_macro!(value); } + +fn external_macro_expansion() { + macro_with_match! { + () + }; +} diff --git a/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.stderr b/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.stderr index cd604d604c1..636841e0a21 100644 --- a/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.stderr +++ b/src/tools/clippy/tests/ui/pattern_type_mismatch/syntax.stderr @@ -1,5 +1,5 @@ error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/syntax.rs:16:9 + --> tests/ui/pattern_type_mismatch/syntax.rs:19:9 | LL | Some(_) => (), | ^^^^^^^ @@ -9,7 +9,7 @@ LL | Some(_) => (), = help: to override `-D warnings` add `#[allow(clippy::pattern_type_mismatch)]` error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/syntax.rs:36:12 + --> tests/ui/pattern_type_mismatch/syntax.rs:39:12 | LL | if let Some(_) = ref_value {} | ^^^^^^^ @@ -17,7 +17,7 @@ LL | if let Some(_) = ref_value {} = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/syntax.rs:48:15 + --> tests/ui/pattern_type_mismatch/syntax.rs:51:15 | LL | while let Some(_) = ref_value { | ^^^^^^^ @@ -25,7 +25,7 @@ LL | while let Some(_) = ref_value { = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/syntax.rs:68:9 + --> tests/ui/pattern_type_mismatch/syntax.rs:71:9 | LL | for (_a, _b) in slice.iter() {} | ^^^^^^^^ @@ -33,7 +33,7 @@ LL | for (_a, _b) in slice.iter() {} = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/syntax.rs:79:9 + --> tests/ui/pattern_type_mismatch/syntax.rs:82:9 | LL | let (_n, _m) = ref_value; | ^^^^^^^^ @@ -41,7 +41,7 @@ LL | let (_n, _m) = ref_value; = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/syntax.rs:89:12 + --> tests/ui/pattern_type_mismatch/syntax.rs:92:12 | LL | fn foo((_a, _b): &(i32, i32)) {} | ^^^^^^^^ @@ -49,7 +49,7 @@ LL | fn foo((_a, _b): &(i32, i32)) {} = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/syntax.rs:104:10 + --> tests/ui/pattern_type_mismatch/syntax.rs:107:10 | LL | foo(|(_a, _b)| ()); | ^^^^^^^^ @@ -57,7 +57,7 @@ LL | foo(|(_a, _b)| ()); = help: explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/syntax.rs:121:9 + --> tests/ui/pattern_type_mismatch/syntax.rs:124:9 | LL | Some(_) => (), | ^^^^^^^ @@ -65,7 +65,7 @@ LL | Some(_) => (), = help: use `*` to dereference the match expression or explicitly match against a `&_` pattern and adjust the enclosed variable bindings error: type of pattern does not match the expression type - --> tests/ui/pattern_type_mismatch/syntax.rs:142:17 + --> tests/ui/pattern_type_mismatch/syntax.rs:145:17 | LL | Some(_) => (), | ^^^^^^^ diff --git a/src/tools/clippy/tests/ui/ptr_arg.rs b/src/tools/clippy/tests/ui/ptr_arg.rs index 578641e910d..be14e0762ff 100644 --- a/src/tools/clippy/tests/ui/ptr_arg.rs +++ b/src/tools/clippy/tests/ui/ptr_arg.rs @@ -123,7 +123,7 @@ fn test_cow_with_ref(c: &Cow<[i32]>) {} //~^ ptr_arg fn test_cow(c: Cow<[i32]>) { - let _c = c; + let d = c; } trait Foo2 { @@ -141,36 +141,36 @@ mod issue_5644 { use std::path::PathBuf; fn allowed( - #[allow(clippy::ptr_arg)] _v: &Vec<u32>, - #[allow(clippy::ptr_arg)] _s: &String, - #[allow(clippy::ptr_arg)] _p: &PathBuf, - #[allow(clippy::ptr_arg)] _c: &Cow<[i32]>, - #[expect(clippy::ptr_arg)] _expect: &Cow<[i32]>, + #[allow(clippy::ptr_arg)] v: &Vec<u32>, + #[allow(clippy::ptr_arg)] s: &String, + #[allow(clippy::ptr_arg)] p: &PathBuf, + #[allow(clippy::ptr_arg)] c: &Cow<[i32]>, + #[expect(clippy::ptr_arg)] expect: &Cow<[i32]>, ) { } - fn some_allowed(#[allow(clippy::ptr_arg)] _v: &Vec<u32>, _s: &String) {} + fn some_allowed(#[allow(clippy::ptr_arg)] v: &Vec<u32>, s: &String) {} //~^ ptr_arg struct S; impl S { fn allowed( - #[allow(clippy::ptr_arg)] _v: &Vec<u32>, - #[allow(clippy::ptr_arg)] _s: &String, - #[allow(clippy::ptr_arg)] _p: &PathBuf, - #[allow(clippy::ptr_arg)] _c: &Cow<[i32]>, - #[expect(clippy::ptr_arg)] _expect: &Cow<[i32]>, + #[allow(clippy::ptr_arg)] v: &Vec<u32>, + #[allow(clippy::ptr_arg)] s: &String, + #[allow(clippy::ptr_arg)] p: &PathBuf, + #[allow(clippy::ptr_arg)] c: &Cow<[i32]>, + #[expect(clippy::ptr_arg)] expect: &Cow<[i32]>, ) { } } trait T { fn allowed( - #[allow(clippy::ptr_arg)] _v: &Vec<u32>, - #[allow(clippy::ptr_arg)] _s: &String, - #[allow(clippy::ptr_arg)] _p: &PathBuf, - #[allow(clippy::ptr_arg)] _c: &Cow<[i32]>, - #[expect(clippy::ptr_arg)] _expect: &Cow<[i32]>, + #[allow(clippy::ptr_arg)] v: &Vec<u32>, + #[allow(clippy::ptr_arg)] s: &String, + #[allow(clippy::ptr_arg)] p: &PathBuf, + #[allow(clippy::ptr_arg)] c: &Cow<[i32]>, + #[expect(clippy::ptr_arg)] expect: &Cow<[i32]>, ) { } } @@ -182,22 +182,22 @@ mod issue6509 { fn foo_vec(vec: &Vec<u8>) { //~^ ptr_arg - let _ = vec.clone().pop(); - let _ = vec.clone().clone(); + let a = vec.clone().pop(); + let b = vec.clone().clone(); } fn foo_path(path: &PathBuf) { //~^ ptr_arg - let _ = path.clone().pop(); - let _ = path.clone().clone(); + let c = path.clone().pop(); + let d = path.clone().clone(); } - fn foo_str(str: &PathBuf) { + fn foo_str(str: &String) { //~^ ptr_arg - let _ = str.clone().pop(); - let _ = str.clone().clone(); + let e = str.clone().pop(); + let f = str.clone().clone(); } } @@ -340,8 +340,8 @@ mod issue_13308 { ToOwned::clone_into(source, destination); } - fn h1(_: &<String as Deref>::Target) {} - fn h2<T: Deref>(_: T, _: &T::Target) {} + fn h1(x: &<String as Deref>::Target) {} + fn h2<T: Deref>(x: T, y: &T::Target) {} // Other cases that are still ok to lint and ideally shouldn't regress fn good(v1: &String, v2: &String) { @@ -352,3 +352,91 @@ mod issue_13308 { h2(String::new(), v2); } } + +mod issue_13489_and_13728 { + // This is a no-lint from now on. + fn foo(_x: &Vec<i32>) { + todo!(); + } + + // But this still gives us a lint. + fn foo_used(x: &Vec<i32>) { + //~^ ptr_arg + + todo!(); + } + + // This is also a no-lint from now on. + fn foo_local(x: &Vec<i32>) { + let _y = x; + + todo!(); + } + + // But this still gives us a lint. + fn foo_local_used(x: &Vec<i32>) { + //~^ ptr_arg + + let y = x; + + todo!(); + } + + // This only lints once from now on. + fn foofoo(_x: &Vec<i32>, y: &String) { + //~^ ptr_arg + + todo!(); + } + + // And this is also a no-lint from now on. + fn foofoo_local(_x: &Vec<i32>, y: &String) { + let _z = y; + + todo!(); + } +} + +mod issue_13489_and_13728_mut { + // This is a no-lint from now on. + fn bar(_x: &mut Vec<u32>) { + todo!() + } + + // But this still gives us a lint. + fn bar_used(x: &mut Vec<u32>) { + //~^ ptr_arg + + todo!() + } + + // This is also a no-lint from now on. + fn bar_local(x: &mut Vec<u32>) { + let _y = x; + + todo!() + } + + // But this still gives us a lint. + fn bar_local_used(x: &mut Vec<u32>) { + //~^ ptr_arg + + let y = x; + + todo!() + } + + // This only lints once from now on. + fn barbar(_x: &mut Vec<u32>, y: &mut String) { + //~^ ptr_arg + + todo!() + } + + // And this is also a no-lint from now on. + fn barbar_local(_x: &mut Vec<u32>, y: &mut String) { + let _z = y; + + todo!() + } +} diff --git a/src/tools/clippy/tests/ui/ptr_arg.stderr b/src/tools/clippy/tests/ui/ptr_arg.stderr index fd9ceddfe11..87235057349 100644 --- a/src/tools/clippy/tests/ui/ptr_arg.stderr +++ b/src/tools/clippy/tests/ui/ptr_arg.stderr @@ -127,10 +127,10 @@ LL | fn test_cow_with_ref(c: &Cow<[i32]>) {} | ^^^^^^^^^^^ help: change this to: `&[i32]` error: writing `&String` instead of `&str` involves a new object where a slice will do - --> tests/ui/ptr_arg.rs:152:66 + --> tests/ui/ptr_arg.rs:152:64 | -LL | fn some_allowed(#[allow(clippy::ptr_arg)] _v: &Vec<u32>, _s: &String) {} - | ^^^^^^^ help: change this to: `&str` +LL | fn some_allowed(#[allow(clippy::ptr_arg)] v: &Vec<u32>, s: &String) {} + | ^^^^^^^ help: change this to: `&str` error: writing `&Vec` instead of `&[_]` involves a new object where a slice will do --> tests/ui/ptr_arg.rs:182:21 @@ -143,8 +143,8 @@ help: change this to LL ~ fn foo_vec(vec: &[u8]) { LL | LL | -LL ~ let _ = vec.to_owned().pop(); -LL ~ let _ = vec.to_owned().clone(); +LL ~ let a = vec.to_owned().pop(); +LL ~ let b = vec.to_owned().clone(); | error: writing `&PathBuf` instead of `&Path` involves a new object where a slice will do @@ -158,23 +158,23 @@ help: change this to LL ~ fn foo_path(path: &Path) { LL | LL | -LL ~ let _ = path.to_path_buf().pop(); -LL ~ let _ = path.to_path_buf().clone(); +LL ~ let c = path.to_path_buf().pop(); +LL ~ let d = path.to_path_buf().clone(); | -error: writing `&PathBuf` instead of `&Path` involves a new object where a slice will do +error: writing `&String` instead of `&str` involves a new object where a slice will do --> tests/ui/ptr_arg.rs:196:21 | -LL | fn foo_str(str: &PathBuf) { - | ^^^^^^^^ +LL | fn foo_str(str: &String) { + | ^^^^^^^ | help: change this to | -LL ~ fn foo_str(str: &Path) { +LL ~ fn foo_str(str: &str) { LL | LL | -LL ~ let _ = str.to_path_buf().pop(); -LL ~ let _ = str.to_path_buf().clone(); +LL ~ let e = str.to_owned().pop(); +LL ~ let f = str.to_owned().clone(); | error: writing `&mut Vec` instead of `&mut [_]` involves a new object where a slice will do @@ -231,6 +231,42 @@ error: writing `&String` instead of `&str` involves a new object where a slice w LL | fn good(v1: &String, v2: &String) { | ^^^^^^^ help: change this to: `&str` +error: writing `&Vec` instead of `&[_]` involves a new object where a slice will do + --> tests/ui/ptr_arg.rs:363:20 + | +LL | fn foo_used(x: &Vec<i32>) { + | ^^^^^^^^^ help: change this to: `&[i32]` + +error: writing `&Vec` instead of `&[_]` involves a new object where a slice will do + --> tests/ui/ptr_arg.rs:377:26 + | +LL | fn foo_local_used(x: &Vec<i32>) { + | ^^^^^^^^^ help: change this to: `&[i32]` + +error: writing `&String` instead of `&str` involves a new object where a slice will do + --> tests/ui/ptr_arg.rs:386:33 + | +LL | fn foofoo(_x: &Vec<i32>, y: &String) { + | ^^^^^^^ help: change this to: `&str` + +error: writing `&mut Vec` instead of `&mut [_]` involves a new object where a slice will do + --> tests/ui/ptr_arg.rs:407:20 + | +LL | fn bar_used(x: &mut Vec<u32>) { + | ^^^^^^^^^^^^^ help: change this to: `&mut [u32]` + +error: writing `&mut Vec` instead of `&mut [_]` involves a new object where a slice will do + --> tests/ui/ptr_arg.rs:421:26 + | +LL | fn bar_local_used(x: &mut Vec<u32>) { + | ^^^^^^^^^^^^^ help: change this to: `&mut [u32]` + +error: writing `&mut String` instead of `&mut str` involves a new object where a slice will do + --> tests/ui/ptr_arg.rs:430:37 + | +LL | fn barbar(_x: &mut Vec<u32>, y: &mut String) { + | ^^^^^^^^^^^ help: change this to: `&mut str` + error: eliding a lifetime that's named elsewhere is confusing --> tests/ui/ptr_arg.rs:314:36 | @@ -248,5 +284,5 @@ help: consistently use `'a` LL | fn cow_good_ret_ty<'a>(input: &'a Cow<'a, str>) -> &'a str { | ++ -error: aborting due to 27 previous errors +error: aborting due to 33 previous errors diff --git a/src/tools/clippy/tests/ui/ptr_as_ptr.fixed b/src/tools/clippy/tests/ui/ptr_as_ptr.fixed index 2033f31c1ee..71fea6144e7 100644 --- a/src/tools/clippy/tests/ui/ptr_as_ptr.fixed +++ b/src/tools/clippy/tests/ui/ptr_as_ptr.fixed @@ -219,3 +219,11 @@ mod null_entire_infer { //~^ ptr_as_ptr } } + +#[allow(clippy::transmute_null_to_fn)] +fn issue15283() { + unsafe { + let _: fn() = std::mem::transmute(std::ptr::null::<u8>()); + //~^ ptr_as_ptr + } +} diff --git a/src/tools/clippy/tests/ui/ptr_as_ptr.rs b/src/tools/clippy/tests/ui/ptr_as_ptr.rs index 224d09b0eb6..4d507592a1e 100644 --- a/src/tools/clippy/tests/ui/ptr_as_ptr.rs +++ b/src/tools/clippy/tests/ui/ptr_as_ptr.rs @@ -219,3 +219,11 @@ mod null_entire_infer { //~^ ptr_as_ptr } } + +#[allow(clippy::transmute_null_to_fn)] +fn issue15283() { + unsafe { + let _: fn() = std::mem::transmute(std::ptr::null::<()>() as *const u8); + //~^ ptr_as_ptr + } +} diff --git a/src/tools/clippy/tests/ui/ptr_as_ptr.stderr b/src/tools/clippy/tests/ui/ptr_as_ptr.stderr index 66dae8e0135..adad159bb0f 100644 --- a/src/tools/clippy/tests/ui/ptr_as_ptr.stderr +++ b/src/tools/clippy/tests/ui/ptr_as_ptr.stderr @@ -201,5 +201,11 @@ error: `as` casting between raw pointers without changing their constness LL | core::ptr::null() as _ | ^^^^^^^^^^^^^^^^^^^^^^ help: try call directly: `core::ptr::null()` -error: aborting due to 33 previous errors +error: `as` casting between raw pointers without changing their constness + --> tests/ui/ptr_as_ptr.rs:226:43 + | +LL | let _: fn() = std::mem::transmute(std::ptr::null::<()>() as *const u8); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try call directly: `std::ptr::null::<u8>()` + +error: aborting due to 34 previous errors diff --git a/src/tools/clippy/tests/ui/range_plus_minus_one.fixed b/src/tools/clippy/tests/ui/range_plus_minus_one.fixed index ee716ef3a6a..5c6da6d5aed 100644 --- a/src/tools/clippy/tests/ui/range_plus_minus_one.fixed +++ b/src/tools/clippy/tests/ui/range_plus_minus_one.fixed @@ -1,5 +1,9 @@ +#![warn(clippy::range_minus_one, clippy::range_plus_one)] #![allow(unused_parens)] #![allow(clippy::iter_with_drain)] + +use std::ops::{Index, IndexMut, Range, RangeBounds, RangeInclusive}; + fn f() -> usize { 42 } @@ -20,8 +24,6 @@ macro_rules! macro_minus_one { }; } -#[warn(clippy::range_plus_one)] -#[warn(clippy::range_minus_one)] fn main() { for _ in 0..2 {} for _ in 0..=2 {} @@ -45,15 +47,13 @@ fn main() { //~^ range_plus_one for _ in 0..=(1 + f()) {} + // Those are not linted, as in the general case we cannot be sure that the exact type won't be + // important. let _ = ..11 - 1; - let _ = ..11; - //~^ range_minus_one - let _ = ..11; - //~^ range_minus_one - let _ = (1..=11); - //~^ range_plus_one - let _ = ((f() + 1)..=f()); - //~^ range_plus_one + let _ = ..=11 - 1; + let _ = ..=(11 - 1); + let _ = (1..11 + 1); + let _ = (f() + 1)..(f() + 1); const ONE: usize = 1; // integer consts are linted, too @@ -65,4 +65,118 @@ fn main() { macro_plus_one!(5); macro_minus_one!(5); + + // As an instance of `Iterator` + (1..=10).for_each(|_| {}); + //~^ range_plus_one + + // As an instance of `IntoIterator` + #[allow(clippy::useless_conversion)] + (1..=10).into_iter().for_each(|_| {}); + //~^ range_plus_one + + // As an instance of `RangeBounds` + { + let _ = (1..=10).start_bound(); + //~^ range_plus_one + } + + // As a `SliceIndex` + let a = [10, 20, 30]; + let _ = &a[1..=1]; + //~^ range_plus_one + + // As method call argument + vec.drain(2..=3); + //~^ range_plus_one + + // As function call argument + take_arg(10..=20); + //~^ range_plus_one + + // As function call argument inside a block + take_arg({ 10..=20 }); + //~^ range_plus_one + + // Do not lint in case types are unified + take_arg(if true { 10..20 } else { 10..20 + 1 }); + + // Do not lint, as the same type is used for both parameters + take_args(10..20 + 1, 10..21); + + // Do not lint, as the range type is also used indirectly in second parameter + take_arg_and_struct(10..20 + 1, S { t: 1..2 }); + + // As target of `IndexMut` + let mut a = [10, 20, 30]; + a[0..=2][0] = 1; + //~^ range_plus_one +} + +fn take_arg<T: Iterator<Item = u32>>(_: T) {} +fn take_args<T: Iterator<Item = u32>>(_: T, _: T) {} + +struct S<T> { + t: T, +} +fn take_arg_and_struct<T: Iterator<Item = u32>>(_: T, _: S<T>) {} + +fn no_index_by_range_inclusive(a: usize) { + struct S; + + impl Index<Range<usize>> for S { + type Output = [u32]; + fn index(&self, _: Range<usize>) -> &Self::Output { + &[] + } + } + + _ = &S[0..a + 1]; +} + +fn no_index_mut_with_switched_range(a: usize) { + struct S(u32); + + impl Index<Range<usize>> for S { + type Output = u32; + fn index(&self, _: Range<usize>) -> &Self::Output { + &self.0 + } + } + + impl IndexMut<Range<usize>> for S { + fn index_mut(&mut self, _: Range<usize>) -> &mut Self::Output { + &mut self.0 + } + } + + impl Index<RangeInclusive<usize>> for S { + type Output = u32; + fn index(&self, _: RangeInclusive<usize>) -> &Self::Output { + &self.0 + } + } + + S(2)[0..a + 1] = 3; +} + +fn issue9908() { + // Simplified test case + let _ = || 0..=1; + + // Original test case + let full_length = 1024; + let range = { + // do some stuff, omit here + None + }; + + let range = range.map(|(s, t)| s..=t).unwrap_or(0..=(full_length - 1)); + + assert_eq!(range, 0..=1023); +} + +fn issue9908_2(n: usize) -> usize { + (1..n).sum() + //~^ range_minus_one } diff --git a/src/tools/clippy/tests/ui/range_plus_minus_one.rs b/src/tools/clippy/tests/ui/range_plus_minus_one.rs index f2d5ae2c150..7172da6034b 100644 --- a/src/tools/clippy/tests/ui/range_plus_minus_one.rs +++ b/src/tools/clippy/tests/ui/range_plus_minus_one.rs @@ -1,5 +1,9 @@ +#![warn(clippy::range_minus_one, clippy::range_plus_one)] #![allow(unused_parens)] #![allow(clippy::iter_with_drain)] + +use std::ops::{Index, IndexMut, Range, RangeBounds, RangeInclusive}; + fn f() -> usize { 42 } @@ -20,8 +24,6 @@ macro_rules! macro_minus_one { }; } -#[warn(clippy::range_plus_one)] -#[warn(clippy::range_minus_one)] fn main() { for _ in 0..2 {} for _ in 0..=2 {} @@ -45,15 +47,13 @@ fn main() { //~^ range_plus_one for _ in 0..=(1 + f()) {} + // Those are not linted, as in the general case we cannot be sure that the exact type won't be + // important. let _ = ..11 - 1; let _ = ..=11 - 1; - //~^ range_minus_one let _ = ..=(11 - 1); - //~^ range_minus_one let _ = (1..11 + 1); - //~^ range_plus_one let _ = (f() + 1)..(f() + 1); - //~^ range_plus_one const ONE: usize = 1; // integer consts are linted, too @@ -65,4 +65,118 @@ fn main() { macro_plus_one!(5); macro_minus_one!(5); + + // As an instance of `Iterator` + (1..10 + 1).for_each(|_| {}); + //~^ range_plus_one + + // As an instance of `IntoIterator` + #[allow(clippy::useless_conversion)] + (1..10 + 1).into_iter().for_each(|_| {}); + //~^ range_plus_one + + // As an instance of `RangeBounds` + { + let _ = (1..10 + 1).start_bound(); + //~^ range_plus_one + } + + // As a `SliceIndex` + let a = [10, 20, 30]; + let _ = &a[1..1 + 1]; + //~^ range_plus_one + + // As method call argument + vec.drain(2..3 + 1); + //~^ range_plus_one + + // As function call argument + take_arg(10..20 + 1); + //~^ range_plus_one + + // As function call argument inside a block + take_arg({ 10..20 + 1 }); + //~^ range_plus_one + + // Do not lint in case types are unified + take_arg(if true { 10..20 } else { 10..20 + 1 }); + + // Do not lint, as the same type is used for both parameters + take_args(10..20 + 1, 10..21); + + // Do not lint, as the range type is also used indirectly in second parameter + take_arg_and_struct(10..20 + 1, S { t: 1..2 }); + + // As target of `IndexMut` + let mut a = [10, 20, 30]; + a[0..2 + 1][0] = 1; + //~^ range_plus_one +} + +fn take_arg<T: Iterator<Item = u32>>(_: T) {} +fn take_args<T: Iterator<Item = u32>>(_: T, _: T) {} + +struct S<T> { + t: T, +} +fn take_arg_and_struct<T: Iterator<Item = u32>>(_: T, _: S<T>) {} + +fn no_index_by_range_inclusive(a: usize) { + struct S; + + impl Index<Range<usize>> for S { + type Output = [u32]; + fn index(&self, _: Range<usize>) -> &Self::Output { + &[] + } + } + + _ = &S[0..a + 1]; +} + +fn no_index_mut_with_switched_range(a: usize) { + struct S(u32); + + impl Index<Range<usize>> for S { + type Output = u32; + fn index(&self, _: Range<usize>) -> &Self::Output { + &self.0 + } + } + + impl IndexMut<Range<usize>> for S { + fn index_mut(&mut self, _: Range<usize>) -> &mut Self::Output { + &mut self.0 + } + } + + impl Index<RangeInclusive<usize>> for S { + type Output = u32; + fn index(&self, _: RangeInclusive<usize>) -> &Self::Output { + &self.0 + } + } + + S(2)[0..a + 1] = 3; +} + +fn issue9908() { + // Simplified test case + let _ = || 0..=1; + + // Original test case + let full_length = 1024; + let range = { + // do some stuff, omit here + None + }; + + let range = range.map(|(s, t)| s..=t).unwrap_or(0..=(full_length - 1)); + + assert_eq!(range, 0..=1023); +} + +fn issue9908_2(n: usize) -> usize { + (1..=n - 1).sum() + //~^ range_minus_one } diff --git a/src/tools/clippy/tests/ui/range_plus_minus_one.stderr b/src/tools/clippy/tests/ui/range_plus_minus_one.stderr index 9b23a8b8c0b..a419d935bd6 100644 --- a/src/tools/clippy/tests/ui/range_plus_minus_one.stderr +++ b/src/tools/clippy/tests/ui/range_plus_minus_one.stderr @@ -1,5 +1,5 @@ error: an inclusive range would be more readable - --> tests/ui/range_plus_minus_one.rs:29:14 + --> tests/ui/range_plus_minus_one.rs:31:14 | LL | for _ in 0..3 + 1 {} | ^^^^^^^^ help: use: `0..=3` @@ -8,55 +8,85 @@ LL | for _ in 0..3 + 1 {} = help: to override `-D warnings` add `#[allow(clippy::range_plus_one)]` error: an inclusive range would be more readable - --> tests/ui/range_plus_minus_one.rs:33:14 + --> tests/ui/range_plus_minus_one.rs:35:14 | LL | for _ in 0..1 + 5 {} | ^^^^^^^^ help: use: `0..=5` error: an inclusive range would be more readable - --> tests/ui/range_plus_minus_one.rs:37:14 + --> tests/ui/range_plus_minus_one.rs:39:14 | LL | for _ in 1..1 + 1 {} | ^^^^^^^^ help: use: `1..=1` error: an inclusive range would be more readable - --> tests/ui/range_plus_minus_one.rs:44:14 + --> tests/ui/range_plus_minus_one.rs:46:14 | LL | for _ in 0..(1 + f()) {} | ^^^^^^^^^^^^ help: use: `0..=f()` -error: an exclusive range would be more readable - --> tests/ui/range_plus_minus_one.rs:49:13 +error: an inclusive range would be more readable + --> tests/ui/range_plus_minus_one.rs:60:14 | -LL | let _ = ..=11 - 1; - | ^^^^^^^^^ help: use: `..11` +LL | for _ in 1..ONE + ONE {} + | ^^^^^^^^^^^^ help: use: `1..=ONE` + +error: an inclusive range would be more readable + --> tests/ui/range_plus_minus_one.rs:70:5 | - = note: `-D clippy::range-minus-one` implied by `-D warnings` - = help: to override `-D warnings` add `#[allow(clippy::range_minus_one)]` +LL | (1..10 + 1).for_each(|_| {}); + | ^^^^^^^^^^^ help: use: `(1..=10)` -error: an exclusive range would be more readable - --> tests/ui/range_plus_minus_one.rs:51:13 +error: an inclusive range would be more readable + --> tests/ui/range_plus_minus_one.rs:75:5 | -LL | let _ = ..=(11 - 1); - | ^^^^^^^^^^^ help: use: `..11` +LL | (1..10 + 1).into_iter().for_each(|_| {}); + | ^^^^^^^^^^^ help: use: `(1..=10)` error: an inclusive range would be more readable - --> tests/ui/range_plus_minus_one.rs:53:13 + --> tests/ui/range_plus_minus_one.rs:80:17 | -LL | let _ = (1..11 + 1); - | ^^^^^^^^^^^ help: use: `(1..=11)` +LL | let _ = (1..10 + 1).start_bound(); + | ^^^^^^^^^^^ help: use: `(1..=10)` error: an inclusive range would be more readable - --> tests/ui/range_plus_minus_one.rs:55:13 + --> tests/ui/range_plus_minus_one.rs:86:16 | -LL | let _ = (f() + 1)..(f() + 1); - | ^^^^^^^^^^^^^^^^^^^^ help: use: `((f() + 1)..=f())` +LL | let _ = &a[1..1 + 1]; + | ^^^^^^^^ help: use: `1..=1` error: an inclusive range would be more readable - --> tests/ui/range_plus_minus_one.rs:60:14 + --> tests/ui/range_plus_minus_one.rs:90:15 | -LL | for _ in 1..ONE + ONE {} - | ^^^^^^^^^^^^ help: use: `1..=ONE` +LL | vec.drain(2..3 + 1); + | ^^^^^^^^ help: use: `2..=3` + +error: an inclusive range would be more readable + --> tests/ui/range_plus_minus_one.rs:94:14 + | +LL | take_arg(10..20 + 1); + | ^^^^^^^^^^ help: use: `10..=20` + +error: an inclusive range would be more readable + --> tests/ui/range_plus_minus_one.rs:98:16 + | +LL | take_arg({ 10..20 + 1 }); + | ^^^^^^^^^^ help: use: `10..=20` + +error: an inclusive range would be more readable + --> tests/ui/range_plus_minus_one.rs:112:7 + | +LL | a[0..2 + 1][0] = 1; + | ^^^^^^^^ help: use: `0..=2` + +error: an exclusive range would be more readable + --> tests/ui/range_plus_minus_one.rs:180:5 + | +LL | (1..=n - 1).sum() + | ^^^^^^^^^^^ help: use: `(1..n)` + | + = note: `-D clippy::range-minus-one` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::range_minus_one)]` -error: aborting due to 9 previous errors +error: aborting due to 14 previous errors diff --git a/src/tools/clippy/tests/ui/single_match_else_deref_patterns.fixed b/src/tools/clippy/tests/ui/single_match_else_deref_patterns.fixed new file mode 100644 index 00000000000..7a9f8063096 --- /dev/null +++ b/src/tools/clippy/tests/ui/single_match_else_deref_patterns.fixed @@ -0,0 +1,53 @@ +#![feature(deref_patterns)] +#![allow( + incomplete_features, + clippy::eq_op, + clippy::op_ref, + clippy::deref_addrof, + clippy::borrow_deref_ref, + clippy::needless_if +)] +#![deny(clippy::single_match_else)] + +fn string() { + if *"" == *"" {} + + if *&*&*&*"" == *"" {} + + if ***&&"" == *"" {} + + if *&*&*"" == *"" {} + + if **&&*"" == *"" {} +} + +fn int() { + if &&&1 == &&&2 { unreachable!() } else { + // ok + } + //~^^^^^^ single_match_else + if &&1 == &&2 { unreachable!() } else { + // ok + } + //~^^^^^^ single_match_else + if &&1 == &&2 { unreachable!() } else { + // ok + } + //~^^^^^^ single_match_else + if &1 == &2 { unreachable!() } else { + // ok + } + //~^^^^^^ single_match_else + if &1 == &2 { unreachable!() } else { + // ok + } + //~^^^^^^ single_match_else + if 1 == 2 { unreachable!() } else { + // ok + } + //~^^^^^^ single_match_else + if 1 == 2 { unreachable!() } else { + // ok + } + //~^^^^^^ single_match_else +} diff --git a/src/tools/clippy/tests/ui/single_match_else_deref_patterns.rs b/src/tools/clippy/tests/ui/single_match_else_deref_patterns.rs new file mode 100644 index 00000000000..ef19c7cbde2 --- /dev/null +++ b/src/tools/clippy/tests/ui/single_match_else_deref_patterns.rs @@ -0,0 +1,94 @@ +#![feature(deref_patterns)] +#![allow( + incomplete_features, + clippy::eq_op, + clippy::op_ref, + clippy::deref_addrof, + clippy::borrow_deref_ref, + clippy::needless_if +)] +#![deny(clippy::single_match_else)] + +fn string() { + match *"" { + //~^ single_match + "" => {}, + _ => {}, + } + + match *&*&*&*"" { + //~^ single_match + "" => {}, + _ => {}, + } + + match ***&&"" { + //~^ single_match + "" => {}, + _ => {}, + } + + match *&*&*"" { + //~^ single_match + "" => {}, + _ => {}, + } + + match **&&*"" { + //~^ single_match + "" => {}, + _ => {}, + } +} + +fn int() { + match &&&1 { + &&&2 => unreachable!(), + _ => { + // ok + }, + } + //~^^^^^^ single_match_else + match &&&1 { + &&2 => unreachable!(), + _ => { + // ok + }, + } + //~^^^^^^ single_match_else + match &&1 { + &&2 => unreachable!(), + _ => { + // ok + }, + } + //~^^^^^^ single_match_else + match &&&1 { + &2 => unreachable!(), + _ => { + // ok + }, + } + //~^^^^^^ single_match_else + match &&1 { + &2 => unreachable!(), + _ => { + // ok + }, + } + //~^^^^^^ single_match_else + match &&&1 { + 2 => unreachable!(), + _ => { + // ok + }, + } + //~^^^^^^ single_match_else + match &&1 { + 2 => unreachable!(), + _ => { + // ok + }, + } + //~^^^^^^ single_match_else +} diff --git a/src/tools/clippy/tests/ui/single_match_else_deref_patterns.stderr b/src/tools/clippy/tests/ui/single_match_else_deref_patterns.stderr new file mode 100644 index 00000000000..a47df55459b --- /dev/null +++ b/src/tools/clippy/tests/ui/single_match_else_deref_patterns.stderr @@ -0,0 +1,188 @@ +error: you seem to be trying to use `match` for an equality check. Consider using `if` + --> tests/ui/single_match_else_deref_patterns.rs:13:5 + | +LL | / match *"" { +LL | | +LL | | "" => {}, +LL | | _ => {}, +LL | | } + | |_____^ help: try: `if *"" == *"" {}` + | + = note: you might want to preserve the comments from inside the `match` + = note: `-D clippy::single-match` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(clippy::single_match)]` + +error: you seem to be trying to use `match` for an equality check. Consider using `if` + --> tests/ui/single_match_else_deref_patterns.rs:19:5 + | +LL | / match *&*&*&*"" { +LL | | +LL | | "" => {}, +LL | | _ => {}, +LL | | } + | |_____^ help: try: `if *&*&*&*"" == *"" {}` + | + = note: you might want to preserve the comments from inside the `match` + +error: you seem to be trying to use `match` for an equality check. Consider using `if` + --> tests/ui/single_match_else_deref_patterns.rs:25:5 + | +LL | / match ***&&"" { +LL | | +LL | | "" => {}, +LL | | _ => {}, +LL | | } + | |_____^ help: try: `if ***&&"" == *"" {}` + | + = note: you might want to preserve the comments from inside the `match` + +error: you seem to be trying to use `match` for an equality check. Consider using `if` + --> tests/ui/single_match_else_deref_patterns.rs:31:5 + | +LL | / match *&*&*"" { +LL | | +LL | | "" => {}, +LL | | _ => {}, +LL | | } + | |_____^ help: try: `if *&*&*"" == *"" {}` + | + = note: you might want to preserve the comments from inside the `match` + +error: you seem to be trying to use `match` for an equality check. Consider using `if` + --> tests/ui/single_match_else_deref_patterns.rs:37:5 + | +LL | / match **&&*"" { +LL | | +LL | | "" => {}, +LL | | _ => {}, +LL | | } + | |_____^ help: try: `if **&&*"" == *"" {}` + | + = note: you might want to preserve the comments from inside the `match` + +error: you seem to be trying to use `match` for an equality check. Consider using `if` + --> tests/ui/single_match_else_deref_patterns.rs:45:5 + | +LL | / match &&&1 { +LL | | &&&2 => unreachable!(), +LL | | _ => { +... | +LL | | } + | |_____^ + | +note: the lint level is defined here + --> tests/ui/single_match_else_deref_patterns.rs:10:9 + | +LL | #![deny(clippy::single_match_else)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^ +help: try + | +LL ~ if &&&1 == &&&2 { unreachable!() } else { +LL + // ok +LL + } + | + +error: you seem to be trying to use `match` for an equality check. Consider using `if` + --> tests/ui/single_match_else_deref_patterns.rs:52:5 + | +LL | / match &&&1 { +LL | | &&2 => unreachable!(), +LL | | _ => { +... | +LL | | } + | |_____^ + | +help: try + | +LL ~ if &&1 == &&2 { unreachable!() } else { +LL + // ok +LL + } + | + +error: you seem to be trying to use `match` for an equality check. Consider using `if` + --> tests/ui/single_match_else_deref_patterns.rs:59:5 + | +LL | / match &&1 { +LL | | &&2 => unreachable!(), +LL | | _ => { +... | +LL | | } + | |_____^ + | +help: try + | +LL ~ if &&1 == &&2 { unreachable!() } else { +LL + // ok +LL + } + | + +error: you seem to be trying to use `match` for an equality check. Consider using `if` + --> tests/ui/single_match_else_deref_patterns.rs:66:5 + | +LL | / match &&&1 { +LL | | &2 => unreachable!(), +LL | | _ => { +... | +LL | | } + | |_____^ + | +help: try + | +LL ~ if &1 == &2 { unreachable!() } else { +LL + // ok +LL + } + | + +error: you seem to be trying to use `match` for an equality check. Consider using `if` + --> tests/ui/single_match_else_deref_patterns.rs:73:5 + | +LL | / match &&1 { +LL | | &2 => unreachable!(), +LL | | _ => { +... | +LL | | } + | |_____^ + | +help: try + | +LL ~ if &1 == &2 { unreachable!() } else { +LL + // ok +LL + } + | + +error: you seem to be trying to use `match` for an equality check. Consider using `if` + --> tests/ui/single_match_else_deref_patterns.rs:80:5 + | +LL | / match &&&1 { +LL | | 2 => unreachable!(), +LL | | _ => { +... | +LL | | } + | |_____^ + | +help: try + | +LL ~ if 1 == 2 { unreachable!() } else { +LL + // ok +LL + } + | + +error: you seem to be trying to use `match` for an equality check. Consider using `if` + --> tests/ui/single_match_else_deref_patterns.rs:87:5 + | +LL | / match &&1 { +LL | | 2 => unreachable!(), +LL | | _ => { +... | +LL | | } + | |_____^ + | +help: try + | +LL ~ if 1 == 2 { unreachable!() } else { +LL + // ok +LL + } + | + +error: aborting due to 12 previous errors + diff --git a/src/tools/clippy/tests/ui/unsafe_derive_deserialize.rs b/src/tools/clippy/tests/ui/unsafe_derive_deserialize.rs index 14371bc203b..d0022f3b6d9 100644 --- a/src/tools/clippy/tests/ui/unsafe_derive_deserialize.rs +++ b/src/tools/clippy/tests/ui/unsafe_derive_deserialize.rs @@ -82,3 +82,32 @@ impl H { } fn main() {} + +mod issue15120 { + macro_rules! uns { + ($e:expr) => { + unsafe { $e } + }; + } + + #[derive(serde::Deserialize)] + struct Foo; + + impl Foo { + fn foo(&self) { + // Do not lint if `unsafe` comes from the `core::pin::pin!()` macro + std::pin::pin!(()); + } + } + + //~v unsafe_derive_deserialize + #[derive(serde::Deserialize)] + struct Bar; + + impl Bar { + fn bar(&self) { + // Lint if `unsafe` comes from the another macro + _ = uns!(42); + } + } +} diff --git a/src/tools/clippy/tests/ui/unsafe_derive_deserialize.stderr b/src/tools/clippy/tests/ui/unsafe_derive_deserialize.stderr index f2d4429f707..4b5dd6e61fc 100644 --- a/src/tools/clippy/tests/ui/unsafe_derive_deserialize.stderr +++ b/src/tools/clippy/tests/ui/unsafe_derive_deserialize.stderr @@ -36,5 +36,14 @@ LL | #[derive(Deserialize)] = help: consider implementing `serde::Deserialize` manually. See https://serde.rs/impl-deserialize.html = note: this error originates in the derive macro `Deserialize` (in Nightly builds, run with -Z macro-backtrace for more info) -error: aborting due to 4 previous errors +error: you are deriving `serde::Deserialize` on a type that has methods using `unsafe` + --> tests/ui/unsafe_derive_deserialize.rs:104:14 + | +LL | #[derive(serde::Deserialize)] + | ^^^^^^^^^^^^^^^^^^ + | + = help: consider implementing `serde::Deserialize` manually. See https://serde.rs/impl-deserialize.html + = note: this error originates in the derive macro `serde::Deserialize` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: aborting due to 5 previous errors diff --git a/src/tools/clippy/tests/ui/unused_async.rs b/src/tools/clippy/tests/ui/unused_async.rs index 433459253dd..7a0be825a2d 100644 --- a/src/tools/clippy/tests/ui/unused_async.rs +++ b/src/tools/clippy/tests/ui/unused_async.rs @@ -127,3 +127,13 @@ mod issue14704 { async fn cancel(self: Arc<Self>) {} } } + +mod issue15305 { + async fn todo_task() -> Result<(), String> { + todo!("Implement task"); + } + + async fn unimplemented_task() -> Result<(), String> { + unimplemented!("Implement task"); + } +} diff --git a/src/tools/clippy/tests/ui/unused_trait_names.fixed b/src/tools/clippy/tests/ui/unused_trait_names.fixed index 17e32ddfd9d..6abbed01bb0 100644 --- a/src/tools/clippy/tests/ui/unused_trait_names.fixed +++ b/src/tools/clippy/tests/ui/unused_trait_names.fixed @@ -200,11 +200,11 @@ fn msrv_1_33() { MyStruct.do_things(); } +// Linting inside macro expansion is no longer supported mod lint_inside_macro_expansion_bad { macro_rules! foo { () => { - use std::any::Any as _; - //~^ unused_trait_names + use std::any::Any; fn bar() { "bar".type_id(); } diff --git a/src/tools/clippy/tests/ui/unused_trait_names.rs b/src/tools/clippy/tests/ui/unused_trait_names.rs index 3cf8597e535..4a06f062dc3 100644 --- a/src/tools/clippy/tests/ui/unused_trait_names.rs +++ b/src/tools/clippy/tests/ui/unused_trait_names.rs @@ -200,11 +200,11 @@ fn msrv_1_33() { MyStruct.do_things(); } +// Linting inside macro expansion is no longer supported mod lint_inside_macro_expansion_bad { macro_rules! foo { () => { use std::any::Any; - //~^ unused_trait_names fn bar() { "bar".type_id(); } diff --git a/src/tools/clippy/tests/ui/unused_trait_names.stderr b/src/tools/clippy/tests/ui/unused_trait_names.stderr index 3183289d853..28067e17414 100644 --- a/src/tools/clippy/tests/ui/unused_trait_names.stderr +++ b/src/tools/clippy/tests/ui/unused_trait_names.stderr @@ -58,16 +58,5 @@ error: importing trait that is only used anonymously LL | use simple_trait::{MyStruct, MyTrait}; | ^^^^^^^ help: use: `MyTrait as _` -error: importing trait that is only used anonymously - --> tests/ui/unused_trait_names.rs:206:27 - | -LL | use std::any::Any; - | ^^^ help: use: `Any as _` -... -LL | foo!(); - | ------ in this macro invocation - | - = note: this error originates in the macro `foo` (in Nightly builds, run with -Z macro-backtrace for more info) - -error: aborting due to 10 previous errors +error: aborting due to 9 previous errors diff --git a/src/tools/clippy/tests/ui/used_underscore_items.rs b/src/tools/clippy/tests/ui/used_underscore_items.rs index 7e8289f1406..aecdd32693c 100644 --- a/src/tools/clippy/tests/ui/used_underscore_items.rs +++ b/src/tools/clippy/tests/ui/used_underscore_items.rs @@ -62,13 +62,13 @@ fn main() { //~^ used_underscore_items } -// should not lint exteranl crate. +// should not lint external crate. // user cannot control how others name their items fn external_item_call() { let foo_struct3 = external_item::_ExternalStruct {}; foo_struct3._foo(); - external_item::_exernal_foo(); + external_item::_external_foo(); } // should not lint foreign functions. diff --git a/src/tools/clippy/tests/ui/useless_attribute.fixed b/src/tools/clippy/tests/ui/useless_attribute.fixed index 930bc1eaecf..be4fb55ddfb 100644 --- a/src/tools/clippy/tests/ui/useless_attribute.fixed +++ b/src/tools/clippy/tests/ui/useless_attribute.fixed @@ -146,3 +146,15 @@ pub mod unknown_namespace { #[allow(rustc::non_glob_import_of_type_ir_inherent)] use some_module::SomeType; } + +// Regression test for https://github.com/rust-lang/rust-clippy/issues/15316 +pub mod redundant_imports_issue { + macro_rules! empty { + () => {}; + } + + #[expect(redundant_imports)] + pub(crate) use empty; + + empty!(); +} diff --git a/src/tools/clippy/tests/ui/useless_attribute.rs b/src/tools/clippy/tests/ui/useless_attribute.rs index 50fafd478e5..5a1bcf97a5b 100644 --- a/src/tools/clippy/tests/ui/useless_attribute.rs +++ b/src/tools/clippy/tests/ui/useless_attribute.rs @@ -146,3 +146,15 @@ pub mod unknown_namespace { #[allow(rustc::non_glob_import_of_type_ir_inherent)] use some_module::SomeType; } + +// Regression test for https://github.com/rust-lang/rust-clippy/issues/15316 +pub mod redundant_imports_issue { + macro_rules! empty { + () => {}; + } + + #[expect(redundant_imports)] + pub(crate) use empty; + + empty!(); +} diff --git a/src/tools/clippy/triagebot.toml b/src/tools/clippy/triagebot.toml index 805baf2af6d..a62b6269a3b 100644 --- a/src/tools/clippy/triagebot.toml +++ b/src/tools/clippy/triagebot.toml @@ -54,6 +54,7 @@ contributing_url = "https://github.com/rust-lang/rust-clippy/blob/master/CONTRIB users_on_vacation = [ "matthiaskrgr", "Manishearth", + "samueltardieu", ] [assign.owners] diff --git a/src/tools/clippy/util/gh-pages/index_template.html b/src/tools/clippy/util/gh-pages/index_template.html index 6f380ec8fee..5d65ea585df 100644 --- a/src/tools/clippy/util/gh-pages/index_template.html +++ b/src/tools/clippy/util/gh-pages/index_template.html @@ -49,9 +49,7 @@ Otherwise, have a great day =^.^= <script src="theme.js"></script> {# #} <div class="container"> {# #} - <div class="page-header"> {# #} - <h1>Clippy Lints <span id="lint-count" class="badge"></span></h1> {# #} - </div> {# #} + <h1 class="page-header">Clippy Lints <span id="lint-count" class="badge"></span></h1> {# #} <noscript> {# #} <div class="alert alert-danger" role="alert"> {# #} @@ -59,143 +57,141 @@ Otherwise, have a great day =^.^= </div> {# #} </noscript> {# #} - <div> {# #} - <div class="panel panel-default" id="menu-filters"> {# #} - <div class="panel-body row"> {# #} - <div id="upper-filters" class="col-12 col-md-5"> {# #} - <div class="btn-group" id="lint-levels" tabindex="-1"> {# #} - <button type="button" class="btn btn-default dropdown-toggle"> {# #} - Lint levels <span class="badge">4</span> <span class="caret"></span> {# #} - </button> {# #} - <ul class="dropdown-menu" id="lint-levels-selector"> {# #} - <li class="checkbox"> {# #} - <button onclick="toggleElements('levels_filter', true)">All</button> {# #} - </li> {# #} - <li class="checkbox"> {# #} - <button onclick="toggleElements('levels_filter', false)">None</button> {# #} - </li> {# #} - <li role="separator" class="divider"></li> {# #} - </ul> {# #} - </div> {# #} - <div class="btn-group" id="lint-groups" tabindex="-1"> {# #} - <button type="button" class="btn btn-default dropdown-toggle"> {# #} - Lint groups <span class="badge">9</span> <span class="caret"></span> {# #} - </button> {# #} - <ul class="dropdown-menu" id="lint-groups-selector"> {# #} - <li class="checkbox"> {# #} - <button onclick="toggleElements('groups_filter', true)">All</button> {# #} - </li> {# #} - <li class="checkbox"> {# #} - <button onclick="resetGroupsToDefault()">Default</button> {# #} - </li> {# #} - <li class="checkbox"> {# #} - <button onclick="toggleElements('groups_filter', false)">None</button> {# #} - </li> {# #} - <li role="separator" class="divider"></li> {# #} - </ul> {# #} - </div> {# #} - <div class="btn-group" id="version-filter" tabindex="-1"> {# #} - <button type="button" class="btn btn-default dropdown-toggle"> {# #} - Version {#+ #} - <span id="version-filter-count" class="badge">0</span> {#+ #} - <span class="caret"></span> {# #} - </button> {# #} - <ul id="version-filter-selector" class="dropdown-menu"> {# #} - <li class="checkbox"> {# #} - <button onclick="clearVersionFilters()">Clear filters</button> {# #} - </li> {# #} - <li role="separator" class="divider"></li> {# #} - </ul> {# #} - </div> {# #} - <div class="btn-group" id="lint-applicabilities" tabindex="-1"> {# #} - <button type="button" class="btn btn-default dropdown-toggle"> {# #} - Applicability {#+ #} - <span class="badge">4</span> {#+ #} - <span class="caret"></span> {# #} - </button> {# #} - <ul class="dropdown-menu" id="lint-applicabilities-selector"> {# #} - <li class="checkbox"> {# #} - <button onclick="toggleElements('applicabilities_filter', true)">All</button> {# #} - </li> {# #} - <li class="checkbox"> {# #} - <button onclick="toggleElements('applicabilities_filter', false)">None</button> {# #} - </li> {# #} - <li role="separator" class="divider"></li> {# #} - </ul> {# #} - </div> {# #} + <div id="menu-filters"> {# #} + <div class="panel-body row"> {# #} + <div id="upper-filters" class="col-12 col-md-5"> {# #} + <div class="btn-group" id="lint-levels" tabindex="-1"> {# #} + <button type="button" class="btn btn-default dropdown-toggle"> {# #} + Lint levels <span class="badge">4</span> <span class="caret"></span> {# #} + </button> {# #} + <ul class="dropdown-menu" id="lint-levels-selector"> {# #} + <li class="checkbox"> {# #} + <button onclick="toggleElements('levels_filter', true)">All</button> {# #} + </li> {# #} + <li class="checkbox"> {# #} + <button onclick="toggleElements('levels_filter', false)">None</button> {# #} + </li> {# #} + <li role="separator" class="divider"></li> {# #} + </ul> {# #} </div> {# #} - <div class="col-12 col-md-5 search-control"> {# #} - <div class="input-group"> {# #} - <label class="input-group-addon" id="filter-label" for="search-input">Filter:</label> {# #} - <input type="text" class="form-control filter-input" placeholder="Keywords or search string (`S` or `/` to focus)" id="search-input" /> {# #} - <span class="input-group-btn"> {# #} - <button class="filter-clear btn" type="button" onclick="searchState.clearInput(event)"> {# #} - Clear {# #} - </button> {# #} - </span> {# #} - </div> {# #} + <div class="btn-group" id="lint-groups" tabindex="-1"> {# #} + <button type="button" class="btn btn-default dropdown-toggle"> {# #} + Lint groups <span class="badge">9</span> <span class="caret"></span> {# #} + </button> {# #} + <ul class="dropdown-menu" id="lint-groups-selector"> {# #} + <li class="checkbox"> {# #} + <button onclick="toggleElements('groups_filter', true)">All</button> {# #} + </li> {# #} + <li class="checkbox"> {# #} + <button onclick="resetGroupsToDefault()">Default</button> {# #} + </li> {# #} + <li class="checkbox"> {# #} + <button onclick="toggleElements('groups_filter', false)">None</button> {# #} + </li> {# #} + <li role="separator" class="divider"></li> {# #} + </ul> {# #} </div> {# #} - <div class="col-12 col-md-2 btn-group expansion-group"> {# #} - <button title="Collapse All" class="btn btn-default expansion-control" type="button" id="collapse-all"> {# #} - <span class="glyphicon glyphicon-collapse-up"></span> {# #} + <div class="btn-group" id="version-filter" tabindex="-1"> {# #} + <button type="button" class="btn btn-default dropdown-toggle"> {# #} + Version {#+ #} + <span id="version-filter-count" class="badge">0</span> {#+ #} + <span class="caret"></span> {# #} </button> {# #} - <button title="Expand All" class="btn btn-default expansion-control" type="button" id="expand-all"> {# #} - <span class="glyphicon glyphicon-collapse-down"></span> {# #} + <ul id="version-filter-selector" class="dropdown-menu"> {# #} + <li class="checkbox"> {# #} + <button onclick="clearVersionFilters()">Clear filters</button> {# #} + </li> {# #} + <li role="separator" class="divider"></li> {# #} + </ul> {# #} + </div> {# #} + <div class="btn-group" id="lint-applicabilities" tabindex="-1"> {# #} + <button type="button" class="btn btn-default dropdown-toggle"> {# #} + Applicability {#+ #} + <span class="badge">4</span> {#+ #} + <span class="caret"></span> {# #} </button> {# #} + <ul class="dropdown-menu" id="lint-applicabilities-selector"> {# #} + <li class="checkbox"> {# #} + <button onclick="toggleElements('applicabilities_filter', true)">All</button> {# #} + </li> {# #} + <li class="checkbox"> {# #} + <button onclick="toggleElements('applicabilities_filter', false)">None</button> {# #} + </li> {# #} + <li role="separator" class="divider"></li> {# #} + </ul> {# #} + </div> {# #} + </div> {# #} + <div class="col-12 col-md-5 search-control"> {# #} + <div class="input-group"> {# #} + <label class="input-group-addon" id="filter-label" for="search-input">Filter:</label> {# #} + <input type="text" class="form-control filter-input" placeholder="Keywords or search string (`S` or `/` to focus)" id="search-input" /> {# #} + <span class="input-group-btn"> {# #} + <button class="filter-clear btn" type="button" onclick="searchState.clearInput(event)"> {# #} + Clear {# #} + </button> {# #} + </span> {# #} </div> {# #} </div> {# #} - </div> - {% for lint in lints %} - <article class="panel panel-default" id="{{lint.id}}"> {# #} - <input id="label-{{lint.id}}" type="checkbox"> {# #} - <label for="label-{{lint.id}}"> {# #} - <h2 class="lint-title"> {# #} - <div class="panel-title-name" id="lint-{{lint.id}}"> {# #} - {{lint.id +}} - <a href="#{{lint.id}}" class="anchor label label-default">¶</a> {#+ #} - <a href="" class="copy-to-clipboard anchor label label-default"> {# #} - 📋 {# #} - </a> {# #} - </div> {# #} + <div class="col-12 col-md-2 btn-group expansion-group"> {# #} + <button title="Collapse All" class="btn btn-default expansion-control" type="button" id="collapse-all"> {# #} + <span class="glyphicon glyphicon-collapse-up"></span> {# #} + </button> {# #} + <button title="Expand All" class="btn btn-default expansion-control" type="button" id="expand-all"> {# #} + <span class="glyphicon glyphicon-collapse-down"></span> {# #} + </button> {# #} + </div> {# #} + </div> {# #} + </div> + {% for lint in lints %} + <article id="{{lint.id}}"> {# #} + <input id="label-{{lint.id}}" type="checkbox"> {# #} + <label for="label-{{lint.id}}"> {# #} + <h2 class="lint-title"> {# #} + <div class="panel-title-name" id="lint-{{lint.id}}"> {# #} + {{lint.id ~}} + <a href="#{{lint.id}}" class="anchor label label-default">¶</a> {#+ #} + <a href="" class="copy-to-clipboard anchor label label-default"> {# #} + 📋 {# #} + </a> {# #} + </div> {# #} - <span class="label label-lint-group label-default label-group-{{lint.group}}">{{lint.group}}</span> {#+ #} + <span class="label label-default lint-group group-{{lint.group}}">{{lint.group}}</span> {#+ #} - <span class="label label-lint-level label-lint-level-{{lint.level}}">{{lint.level}}</span> {#+ #} + <span class="label lint-level level-{{lint.level}}">{{lint.level}}</span> {#+ #} - <span class="label label-doc-folding"></span> {# #} - </h2> {# #} - </label> {# #} + <span class="label doc-folding"></span> {# #} + </h2> {# #} + </label> {# #} - <div class="list-group lint-docs"> {# #} - <div class="list-group-item lint-doc-md">{{Self::markdown(lint.docs)}}</div> {# #} - <div class="lint-additional-info-container"> - {# Applicability #} - <div> {# #} - Applicability: {#+ #} - <span class="label label-default label-applicability">{{ lint.applicability_str() }}</span> {# #} - <a href="https://doc.rust-lang.org/nightly/nightly-rustc/rustc_lint_defs/enum.Applicability.html#variants">(?)</a> {# #} - </div> - {# Clippy version #} - <div> {# #} - {% if lint.group == "deprecated" %}Deprecated{% else %} Added{% endif +%} in: {#+ #} - <span class="label label-default label-version">{{lint.version}}</span> {# #} - </div> - {# Open related issues #} + <div class="lint-docs"> {# #} + <div class="lint-doc-md">{{Self::markdown(lint.docs)}}</div> {# #} + <div class="lint-additional-info"> + {# Applicability #} + <div> {# #} + Applicability: {#+ #} + <span class="label label-default applicability">{{ lint.applicability_str() }}</span> {# #} + <a href="https://doc.rust-lang.org/nightly/nightly-rustc/rustc_lint_defs/enum.Applicability.html#variants">(?)</a> {# #} + </div> + {# Clippy version #} + <div> {# #} + {% if lint.group == "deprecated" %}Deprecated{% else %} Added{% endif +%} in: {#+ #} + <span class="label label-default label-version">{{lint.version}}</span> {# #} + </div> + {# Open related issues #} + <div> {# #} + <a href="https://github.com/rust-lang/rust-clippy/issues?q=is%3Aissue+{{lint.id}}">Related Issues</a> {# #} + </div> + + {# Jump to source #} + {% if let Some(id_location) = lint.id_location %} <div> {# #} - <a href="https://github.com/rust-lang/rust-clippy/issues?q=is%3Aissue+{{lint.id}}">Related Issues</a> {# #} + <a href="https://github.com/rust-lang/rust-clippy/blob/master/{{id_location}}">View Source</a> {# #} </div> - - {# Jump to source #} - {% if let Some(id_location) = lint.id_location %} - <div> {# #} - <a href="https://github.com/rust-lang/rust-clippy/blob/master/{{id_location}}">View Source</a> {# #} - </div> - {% endif %} - </div> {# #} + {% endif %} </div> {# #} - </article> - {% endfor %} - </div> {# #} + </div> {# #} + </article> + {% endfor %} </div> {# #} <a {#+ #} diff --git a/src/tools/clippy/util/gh-pages/script.js b/src/tools/clippy/util/gh-pages/script.js index ee13f1c0cd8..d3204967531 100644 --- a/src/tools/clippy/util/gh-pages/script.js +++ b/src/tools/clippy/util/gh-pages/script.js @@ -208,7 +208,6 @@ const LEVEL_FILTERS_DEFAULT = { allow: true, warn: true, deny: true, - none: true, }; const APPLICABILITIES_FILTER_DEFAULT = { Unspecified: true, @@ -250,10 +249,10 @@ window.filters = { } return { elem: elem, - group: elem.querySelector(".label-lint-group").innerText, - level: elem.querySelector(".label-lint-level").innerText, + group: elem.querySelector(".lint-group").innerText, + level: elem.querySelector(".lint-level").innerText, version: parseInt(version.split(".")[1]), - applicability: elem.querySelector(".label-applicability").innerText, + applicability: elem.querySelector(".applicability").innerText, filteredOut: false, searchFilteredOut: false, }; @@ -594,19 +593,19 @@ disableShortcutsButton.checked = disableShortcuts; addListeners(); highlightLazily(); -generateSettings(); -generateSearch(); -parseURLFilters(); -scrollToLintByURL(); -filters.filterLints(); -updateLintCount(); - function updateLintCount() { const allLints = filters.getAllLints().filter(lint => lint.group != "deprecated"); const totalLints = allLints.length; - + const countElement = document.getElementById("lint-count"); if (countElement) { countElement.innerText = `Total number: ${totalLints}`; } } + +generateSettings(); +generateSearch(); +parseURLFilters(); +scrollToLintByURL(); +filters.filterLints(); +updateLintCount(); diff --git a/src/tools/clippy/util/gh-pages/style.css b/src/tools/clippy/util/gh-pages/style.css index 022ea875200..66abf4598b0 100644 --- a/src/tools/clippy/util/gh-pages/style.css +++ b/src/tools/clippy/util/gh-pages/style.css @@ -30,17 +30,25 @@ blockquote { font-size: 1em; } background-color: var(--theme-hover); } -div.panel div.panel-body button { +.container > * { + margin-bottom: 20px; + border-radius: 4px; + background: var(--bg); + border: 1px solid var(--theme-popup-border); + box-shadow: 0 1px 1px rgba(0,0,0,.05); +} + +div.panel-body button { background: var(--searchbar-bg); color: var(--searchbar-fg); border-color: var(--theme-popup-border); } -div.panel div.panel-body button:hover { +div.panel-body button:hover { box-shadow: 0 0 3px var(--searchbar-shadow-color); } -div.panel div.panel-body button.open { +div.panel-body button.open { filter: brightness(90%); } @@ -48,8 +56,6 @@ div.panel div.panel-body button.open { background-color: #777; } -.panel-heading { cursor: pointer; } - .lint-title { cursor: pointer; margin-top: 0; @@ -70,8 +76,8 @@ div.panel div.panel-body button.open { .panel-title-name { flex: 1; min-width: 400px;} -.panel .panel-title-name .anchor { display: none; } -.panel:hover .panel-title-name .anchor { display: inline;} +.panel-title-name .anchor { display: none; } +article:hover .panel-title-name .anchor { display: inline;} .search-control { margin-top: 15px; @@ -111,40 +117,48 @@ div.panel div.panel-body button.open { padding-bottom: 0.3em; } -.label-lint-group { - min-width: 8em; -} -.label-lint-level { +.lint-level { min-width: 4em; } - -.label-lint-level-allow { +.level-allow { background-color: #5cb85c; } -.label-lint-level-warn { +.level-warn { background-color: #f0ad4e; } -.label-lint-level-deny { +.level-deny { background-color: #d9534f; } -.label-lint-level-none { +.level-none { background-color: #777777; opacity: 0.5; } -.label-group-deprecated { +.lint-group { + min-width: 8em; +} +.group-deprecated { opacity: 0.5; } -.label-doc-folding { +.doc-folding { color: #000; background-color: #fff; border: 1px solid var(--theme-popup-border); } -.label-doc-folding:hover { +.doc-folding:hover { background-color: #e6e6e6; } +.lint-doc-md { + position: relative; + display: block; + padding: 10px 15px; + margin-bottom: -1px; + background: 0%; + border-bottom: 1px solid var(--theme-popup-border); + border-top: 1px solid var(--theme-popup-border); +} .lint-doc-md > h3 { border-top: 1px solid var(--theme-popup-border); padding: 10px 15px; @@ -157,32 +171,32 @@ div.panel div.panel-body button.open { } @media (max-width:749px) { - .lint-additional-info-container { + .lint-additional-info { display: flex; flex-flow: column; } - .lint-additional-info-container > div + div { + .lint-additional-info > div + div { border-top: 1px solid var(--theme-popup-border); } } @media (min-width:750px) { - .lint-additional-info-container { + .lint-additional-info { display: flex; flex-flow: row; } - .lint-additional-info-container > div + div { + .lint-additional-info > div + div { border-left: 1px solid var(--theme-popup-border); } } -.lint-additional-info-container > div { +.lint-additional-info > div { display: inline-flex; min-width: 200px; flex-grow: 1; padding: 9px 5px 5px 15px; } -.label-applicability { +.applicability { background-color: #777777; margin: auto 5px; } @@ -332,21 +346,12 @@ L4.75,12h2.5l0.5393066-2.1572876 c0.2276001-0.1062012,0.4459839-0.2269287,0.649 border: 1px solid var(--theme-popup-border); } .page-header { - border-color: var(--theme-popup-border); -} -.panel-default .panel-heading { - background: var(--theme-hover); - color: var(--fg); - border: 1px solid var(--theme-popup-border); -} -.panel-default .panel-heading:hover { - filter: brightness(90%); -} -.list-group-item { - background: 0%; - border: 1px solid var(--theme-popup-border); + border: 0; + border-bottom: 1px solid var(--theme-popup-border); + padding-bottom: 19px; + border-radius: 0; } -.panel, pre, hr { +pre, hr { background: var(--bg); border: 1px solid var(--theme-popup-border); } @@ -442,14 +447,15 @@ article > label { article > input[type="checkbox"] { display: none; } -article > input[type="checkbox"] + label .label-doc-folding::before { +article > input[type="checkbox"] + label .doc-folding::before { content: "+"; } -article > input[type="checkbox"]:checked + label .label-doc-folding::before { +article > input[type="checkbox"]:checked + label .doc-folding::before { content: "−"; } .lint-docs { display: none; + margin-bottom: 0; } article > input[type="checkbox"]:checked ~ .lint-docs { display: block; diff --git a/src/tools/compiletest/src/directives.rs b/src/tools/compiletest/src/directives.rs index 1397c87ab07..54511f4fd08 100644 --- a/src/tools/compiletest/src/directives.rs +++ b/src/tools/compiletest/src/directives.rs @@ -12,6 +12,9 @@ use tracing::*; use crate::common::{CodegenBackend, Config, Debugger, FailMode, PassMode, RunFailMode, TestMode}; use crate::debuggers::{extract_cdb_version, extract_gdb_version}; use crate::directives::auxiliary::{AuxProps, parse_and_update_aux}; +use crate::directives::directive_names::{ + KNOWN_DIRECTIVE_NAMES, KNOWN_HTMLDOCCK_DIRECTIVE_NAMES, KNOWN_JSONDOCCK_DIRECTIVE_NAMES, +}; use crate::directives::needs::CachedNeedsConditions; use crate::errors::ErrorKind; use crate::executor::{CollectedTestDesc, ShouldPanic}; @@ -20,6 +23,7 @@ use crate::util::static_regex; pub(crate) mod auxiliary; mod cfg; +mod directive_names; mod needs; #[cfg(test)] mod tests; @@ -59,9 +63,9 @@ impl EarlyProps { &mut poisoned, testfile, rdr, - &mut |DirectiveLine { raw_directive: ln, .. }| { - parse_and_update_aux(config, ln, &mut props.aux); - config.parse_and_update_revisions(testfile, ln, &mut props.revisions); + &mut |DirectiveLine { line_number, raw_directive: ln, .. }| { + parse_and_update_aux(config, ln, testfile, line_number, &mut props.aux); + config.parse_and_update_revisions(testfile, line_number, ln, &mut props.revisions); }, ); @@ -351,7 +355,7 @@ impl TestProps { &mut poisoned, testfile, file, - &mut |directive @ DirectiveLine { raw_directive: ln, .. }| { + &mut |directive @ DirectiveLine { line_number, raw_directive: ln, .. }| { if !directive.applies_to_test_revision(test_revision) { return; } @@ -361,17 +365,28 @@ impl TestProps { config.push_name_value_directive( ln, ERROR_PATTERN, + testfile, + line_number, &mut self.error_patterns, |r| r, ); config.push_name_value_directive( ln, REGEX_ERROR_PATTERN, + testfile, + line_number, &mut self.regex_error_patterns, |r| r, ); - config.push_name_value_directive(ln, DOC_FLAGS, &mut self.doc_flags, |r| r); + config.push_name_value_directive( + ln, + DOC_FLAGS, + testfile, + line_number, + &mut self.doc_flags, + |r| r, + ); fn split_flags(flags: &str) -> Vec<String> { // Individual flags can be single-quoted to preserve spaces; see @@ -386,7 +401,9 @@ impl TestProps { .collect::<Vec<_>>() } - if let Some(flags) = config.parse_name_value_directive(ln, COMPILE_FLAGS) { + if let Some(flags) = + config.parse_name_value_directive(ln, COMPILE_FLAGS, testfile, line_number) + { let flags = split_flags(&flags); for flag in &flags { if flag == "--edition" || flag.starts_with("--edition=") { @@ -395,25 +412,40 @@ impl TestProps { } self.compile_flags.extend(flags); } - if config.parse_name_value_directive(ln, INCORRECT_COMPILER_FLAGS).is_some() { + if config + .parse_name_value_directive( + ln, + INCORRECT_COMPILER_FLAGS, + testfile, + line_number, + ) + .is_some() + { panic!("`compiler-flags` directive should be spelled `compile-flags`"); } - if let Some(edition) = config.parse_edition(ln) { + if let Some(edition) = config.parse_edition(ln, testfile, line_number) { // The edition is added at the start, since flags from //@compile-flags must // be passed to rustc last. self.compile_flags.insert(0, format!("--edition={}", edition.trim())); has_edition = true; } - config.parse_and_update_revisions(testfile, ln, &mut self.revisions); + config.parse_and_update_revisions( + testfile, + line_number, + ln, + &mut self.revisions, + ); - if let Some(flags) = config.parse_name_value_directive(ln, RUN_FLAGS) { + if let Some(flags) = + config.parse_name_value_directive(ln, RUN_FLAGS, testfile, line_number) + { self.run_flags.extend(split_flags(&flags)); } if self.pp_exact.is_none() { - self.pp_exact = config.parse_pp_exact(ln, testfile); + self.pp_exact = config.parse_pp_exact(ln, testfile, line_number); } config.set_name_directive(ln, SHOULD_ICE, &mut self.should_ice); @@ -435,7 +467,9 @@ impl TestProps { ); config.set_name_directive(ln, NO_PREFER_DYNAMIC, &mut self.no_prefer_dynamic); - if let Some(m) = config.parse_name_value_directive(ln, PRETTY_MODE) { + if let Some(m) = + config.parse_name_value_directive(ln, PRETTY_MODE, testfile, line_number) + { self.pretty_mode = m; } @@ -446,35 +480,45 @@ impl TestProps { ); // Call a helper method to deal with aux-related directives. - parse_and_update_aux(config, ln, &mut self.aux); + parse_and_update_aux(config, ln, testfile, line_number, &mut self.aux); config.push_name_value_directive( ln, EXEC_ENV, + testfile, + line_number, &mut self.exec_env, Config::parse_env, ); config.push_name_value_directive( ln, UNSET_EXEC_ENV, + testfile, + line_number, &mut self.unset_exec_env, |r| r.trim().to_owned(), ); config.push_name_value_directive( ln, RUSTC_ENV, + testfile, + line_number, &mut self.rustc_env, Config::parse_env, ); config.push_name_value_directive( ln, UNSET_RUSTC_ENV, + testfile, + line_number, &mut self.unset_rustc_env, |r| r.trim().to_owned(), ); config.push_name_value_directive( ln, FORBID_OUTPUT, + testfile, + line_number, &mut self.forbid_output, |r| r, ); @@ -510,7 +554,7 @@ impl TestProps { } if let Some(code) = config - .parse_name_value_directive(ln, FAILURE_STATUS) + .parse_name_value_directive(ln, FAILURE_STATUS, testfile, line_number) .and_then(|code| code.trim().parse::<i32>().ok()) { self.failure_status = Some(code); @@ -531,6 +575,8 @@ impl TestProps { config.set_name_value_directive( ln, ASSEMBLY_OUTPUT, + testfile, + line_number, &mut self.assembly_output, |r| r.trim().to_string(), ); @@ -543,7 +589,9 @@ impl TestProps { // Unlike the other `name_value_directive`s this needs to be handled manually, // because it sets a `bool` flag. - if let Some(known_bug) = config.parse_name_value_directive(ln, KNOWN_BUG) { + if let Some(known_bug) = + config.parse_name_value_directive(ln, KNOWN_BUG, testfile, line_number) + { let known_bug = known_bug.trim(); if known_bug == "unknown" || known_bug.split(',').all(|issue_ref| { @@ -571,16 +619,25 @@ impl TestProps { config.set_name_value_directive( ln, TEST_MIR_PASS, + testfile, + line_number, &mut self.mir_unit_test, |s| s.trim().to_string(), ); config.set_name_directive(ln, REMAP_SRC_BASE, &mut self.remap_src_base); - if let Some(flags) = config.parse_name_value_directive(ln, LLVM_COV_FLAGS) { + if let Some(flags) = + config.parse_name_value_directive(ln, LLVM_COV_FLAGS, testfile, line_number) + { self.llvm_cov_flags.extend(split_flags(&flags)); } - if let Some(flags) = config.parse_name_value_directive(ln, FILECHECK_FLAGS) { + if let Some(flags) = config.parse_name_value_directive( + ln, + FILECHECK_FLAGS, + testfile, + line_number, + ) { self.filecheck_flags.extend(split_flags(&flags)); } @@ -588,9 +645,12 @@ impl TestProps { self.update_add_core_stubs(ln, config); - if let Some(err_kind) = - config.parse_name_value_directive(ln, DONT_REQUIRE_ANNOTATIONS) - { + if let Some(err_kind) = config.parse_name_value_directive( + ln, + DONT_REQUIRE_ANNOTATIONS, + testfile, + line_number, + ) { self.dont_require_annotations .insert(ErrorKind::expect_from_user_str(err_kind.trim())); } @@ -769,294 +829,6 @@ fn line_directive<'line>( Some(DirectiveLine { line_number, revision, raw_directive }) } -/// This was originally generated by collecting directives from ui tests and then extracting their -/// directive names. This is **not** an exhaustive list of all possible directives. Instead, this is -/// a best-effort approximation for diagnostics. Add new directives to this list when needed. -const KNOWN_DIRECTIVE_NAMES: &[&str] = &[ - // tidy-alphabetical-start - "add-core-stubs", - "assembly-output", - "aux-bin", - "aux-build", - "aux-codegen-backend", - "aux-crate", - "build-aux-docs", - "build-fail", - "build-pass", - "check-fail", - "check-pass", - "check-run-results", - "check-stdout", - "check-test-line-numbers-match", - "compile-flags", - "doc-flags", - "dont-check-compiler-stderr", - "dont-check-compiler-stdout", - "dont-check-failure-status", - "dont-require-annotations", - "edition", - "error-pattern", - "exact-llvm-major-version", - "exec-env", - "failure-status", - "filecheck-flags", - "forbid-output", - "force-host", - "ignore-16bit", - "ignore-32bit", - "ignore-64bit", - "ignore-aarch64", - "ignore-aarch64-pc-windows-msvc", - "ignore-aarch64-unknown-linux-gnu", - "ignore-aix", - "ignore-android", - "ignore-apple", - "ignore-arm", - "ignore-arm-unknown-linux-gnueabi", - "ignore-arm-unknown-linux-gnueabihf", - "ignore-arm-unknown-linux-musleabi", - "ignore-arm-unknown-linux-musleabihf", - "ignore-auxiliary", - "ignore-avr", - "ignore-backends", - "ignore-beta", - "ignore-cdb", - "ignore-compare-mode-next-solver", - "ignore-compare-mode-polonius", - "ignore-coverage-map", - "ignore-coverage-run", - "ignore-cross-compile", - "ignore-eabi", - "ignore-elf", - "ignore-emscripten", - "ignore-endian-big", - "ignore-enzyme", - "ignore-freebsd", - "ignore-fuchsia", - "ignore-gdb", - "ignore-gdb-version", - "ignore-gnu", - "ignore-haiku", - "ignore-horizon", - "ignore-i686-pc-windows-gnu", - "ignore-i686-pc-windows-msvc", - "ignore-illumos", - "ignore-ios", - "ignore-linux", - "ignore-lldb", - "ignore-llvm-version", - "ignore-loongarch32", - "ignore-loongarch64", - "ignore-macabi", - "ignore-macos", - "ignore-msp430", - "ignore-msvc", - "ignore-musl", - "ignore-netbsd", - "ignore-nightly", - "ignore-none", - "ignore-nto", - "ignore-nvptx64", - "ignore-nvptx64-nvidia-cuda", - "ignore-openbsd", - "ignore-pass", - "ignore-powerpc", - "ignore-remote", - "ignore-riscv64", - "ignore-rustc-debug-assertions", - "ignore-rustc_abi-x86-sse2", - "ignore-s390x", - "ignore-sgx", - "ignore-sparc64", - "ignore-spirv", - "ignore-stable", - "ignore-stage1", - "ignore-stage2", - "ignore-std-debug-assertions", - "ignore-test", - "ignore-thumb", - "ignore-thumbv8m.base-none-eabi", - "ignore-thumbv8m.main-none-eabi", - "ignore-tvos", - "ignore-unix", - "ignore-unknown", - "ignore-uwp", - "ignore-visionos", - "ignore-vxworks", - "ignore-wasi", - "ignore-wasm", - "ignore-wasm32", - "ignore-wasm32-bare", - "ignore-wasm64", - "ignore-watchos", - "ignore-windows", - "ignore-windows-gnu", - "ignore-windows-msvc", - "ignore-x32", - "ignore-x86", - "ignore-x86_64", - "ignore-x86_64-apple-darwin", - "ignore-x86_64-pc-windows-gnu", - "ignore-x86_64-unknown-linux-gnu", - "incremental", - "known-bug", - "llvm-cov-flags", - "max-llvm-major-version", - "min-cdb-version", - "min-gdb-version", - "min-lldb-version", - "min-llvm-version", - "min-system-llvm-version", - "needs-asm-support", - "needs-backends", - "needs-crate-type", - "needs-deterministic-layouts", - "needs-dlltool", - "needs-dynamic-linking", - "needs-enzyme", - "needs-force-clang-based-tests", - "needs-git-hash", - "needs-llvm-components", - "needs-llvm-zstd", - "needs-profiler-runtime", - "needs-relocation-model-pic", - "needs-run-enabled", - "needs-rust-lld", - "needs-rustc-debug-assertions", - "needs-sanitizer-address", - "needs-sanitizer-cfi", - "needs-sanitizer-dataflow", - "needs-sanitizer-hwaddress", - "needs-sanitizer-kcfi", - "needs-sanitizer-leak", - "needs-sanitizer-memory", - "needs-sanitizer-memtag", - "needs-sanitizer-safestack", - "needs-sanitizer-shadow-call-stack", - "needs-sanitizer-support", - "needs-sanitizer-thread", - "needs-std-debug-assertions", - "needs-subprocess", - "needs-symlink", - "needs-target-has-atomic", - "needs-target-std", - "needs-threads", - "needs-unwind", - "needs-wasmtime", - "needs-xray", - "no-auto-check-cfg", - "no-prefer-dynamic", - "normalize-stderr", - "normalize-stderr-32bit", - "normalize-stderr-64bit", - "normalize-stdout", - "only-16bit", - "only-32bit", - "only-64bit", - "only-aarch64", - "only-aarch64-apple-darwin", - "only-aarch64-unknown-linux-gnu", - "only-apple", - "only-arm", - "only-avr", - "only-beta", - "only-bpf", - "only-cdb", - "only-dist", - "only-elf", - "only-emscripten", - "only-gnu", - "only-i686-pc-windows-gnu", - "only-i686-pc-windows-msvc", - "only-i686-unknown-linux-gnu", - "only-ios", - "only-linux", - "only-loongarch32", - "only-loongarch64", - "only-loongarch64-unknown-linux-gnu", - "only-macos", - "only-mips", - "only-mips64", - "only-msp430", - "only-msvc", - "only-musl", - "only-nightly", - "only-nvptx64", - "only-powerpc", - "only-riscv64", - "only-rustc_abi-x86-sse2", - "only-s390x", - "only-sparc", - "only-sparc64", - "only-stable", - "only-thumb", - "only-tvos", - "only-unix", - "only-visionos", - "only-wasm32", - "only-wasm32-bare", - "only-wasm32-wasip1", - "only-watchos", - "only-windows", - "only-windows-gnu", - "only-windows-msvc", - "only-x86", - "only-x86_64", - "only-x86_64-apple-darwin", - "only-x86_64-fortanix-unknown-sgx", - "only-x86_64-pc-windows-gnu", - "only-x86_64-pc-windows-msvc", - "only-x86_64-unknown-linux-gnu", - "pp-exact", - "pretty-compare-only", - "pretty-mode", - "proc-macro", - "reference", - "regex-error-pattern", - "remap-src-base", - "revisions", - "run-crash", - "run-fail", - "run-fail-or-crash", - "run-flags", - "run-pass", - "run-rustfix", - "rustc-env", - "rustfix-only-machine-applicable", - "should-fail", - "should-ice", - "stderr-per-bitwidth", - "test-mir-pass", - "unique-doc-out-dir", - "unset-exec-env", - "unset-rustc-env", - // Used by the tidy check `unknown_revision`. - "unused-revision-names", - // tidy-alphabetical-end -]; - -const KNOWN_HTMLDOCCK_DIRECTIVE_NAMES: &[&str] = &[ - "count", - "!count", - "files", - "!files", - "has", - "!has", - "has-dir", - "!has-dir", - "hasraw", - "!hasraw", - "matches", - "!matches", - "matchesraw", - "!matchesraw", - "snapshot", - "!snapshot", -]; - -const KNOWN_JSONDOCCK_DIRECTIVE_NAMES: &[&str] = - &["count", "!count", "has", "!has", "is", "!is", "ismany", "!ismany", "set", "!set"]; - /// The (partly) broken-down contents of a line containing a test directive, /// which [`iter_directives`] passes to its callback function. /// @@ -1204,6 +976,7 @@ impl Config { fn parse_and_update_revisions( &self, testfile: &Utf8Path, + line_number: usize, line: &str, existing: &mut Vec<String>, ) { @@ -1217,7 +990,8 @@ impl Config { const FILECHECK_FORBIDDEN_REVISION_NAMES: [&str; 9] = ["CHECK", "COM", "NEXT", "SAME", "EMPTY", "NOT", "COUNT", "DAG", "LABEL"]; - if let Some(raw) = self.parse_name_value_directive(line, "revisions") { + if let Some(raw) = self.parse_name_value_directive(line, "revisions", testfile, line_number) + { if self.mode == TestMode::RunMake { panic!("`run-make` tests do not support revisions: {}", testfile); } @@ -1262,8 +1036,13 @@ impl Config { (name.to_owned(), value.to_owned()) } - fn parse_pp_exact(&self, line: &str, testfile: &Utf8Path) -> Option<Utf8PathBuf> { - if let Some(s) = self.parse_name_value_directive(line, "pp-exact") { + fn parse_pp_exact( + &self, + line: &str, + testfile: &Utf8Path, + line_number: usize, + ) -> Option<Utf8PathBuf> { + if let Some(s) = self.parse_name_value_directive(line, "pp-exact", testfile, line_number) { Some(Utf8PathBuf::from(&s)) } else if self.parse_name_directive(line, "pp-exact") { testfile.file_name().map(Utf8PathBuf::from) @@ -1304,19 +1083,31 @@ impl Config { line.starts_with("no-") && self.parse_name_directive(&line[3..], directive) } - pub fn parse_name_value_directive(&self, line: &str, directive: &str) -> Option<String> { + pub fn parse_name_value_directive( + &self, + line: &str, + directive: &str, + testfile: &Utf8Path, + line_number: usize, + ) -> Option<String> { let colon = directive.len(); if line.starts_with(directive) && line.as_bytes().get(colon) == Some(&b':') { let value = line[(colon + 1)..].to_owned(); debug!("{}: {}", directive, value); - Some(expand_variables(value, self)) + let value = expand_variables(value, self); + if value.is_empty() { + error!("{testfile}:{line_number}: empty value for directive `{directive}`"); + help!("expected syntax is: `{directive}: value`"); + panic!("empty directive value detected"); + } + Some(value) } else { None } } - fn parse_edition(&self, line: &str) -> Option<String> { - self.parse_name_value_directive(line, "edition") + fn parse_edition(&self, line: &str, testfile: &Utf8Path, line_number: usize) -> Option<String> { + self.parse_name_value_directive(line, "edition", testfile, line_number) } fn set_name_directive(&self, line: &str, directive: &str, value: &mut bool) { @@ -1338,11 +1129,14 @@ impl Config { &self, line: &str, directive: &str, + testfile: &Utf8Path, + line_number: usize, value: &mut Option<T>, parse: impl FnOnce(String) -> T, ) { if value.is_none() { - *value = self.parse_name_value_directive(line, directive).map(parse); + *value = + self.parse_name_value_directive(line, directive, testfile, line_number).map(parse); } } @@ -1350,10 +1144,14 @@ impl Config { &self, line: &str, directive: &str, + testfile: &Utf8Path, + line_number: usize, values: &mut Vec<T>, parse: impl FnOnce(String) -> T, ) { - if let Some(value) = self.parse_name_value_directive(line, directive).map(parse) { + if let Some(value) = + self.parse_name_value_directive(line, directive, testfile, line_number).map(parse) + { values.push(value); } } @@ -1670,9 +1468,9 @@ pub(crate) fn make_test_description<R: Read>( decision!(cfg::handle_ignore(config, ln)); decision!(cfg::handle_only(config, ln)); decision!(needs::handle_needs(&cache.needs, config, ln)); - decision!(ignore_llvm(config, path, ln)); - decision!(ignore_backends(config, path, ln)); - decision!(needs_backends(config, path, ln)); + decision!(ignore_llvm(config, path, ln, line_number)); + decision!(ignore_backends(config, path, ln, line_number)); + decision!(needs_backends(config, path, ln, line_number)); decision!(ignore_cdb(config, ln)); decision!(ignore_gdb(config, ln)); decision!(ignore_lldb(config, ln)); @@ -1799,8 +1597,15 @@ fn ignore_lldb(config: &Config, line: &str) -> IgnoreDecision { IgnoreDecision::Continue } -fn ignore_backends(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecision { - if let Some(backends_to_ignore) = config.parse_name_value_directive(line, "ignore-backends") { +fn ignore_backends( + config: &Config, + path: &Utf8Path, + line: &str, + line_number: usize, +) -> IgnoreDecision { + if let Some(backends_to_ignore) = + config.parse_name_value_directive(line, "ignore-backends", path, line_number) + { for backend in backends_to_ignore.split_whitespace().map(|backend| { match CodegenBackend::try_from(backend) { Ok(backend) => backend, @@ -1819,8 +1624,15 @@ fn ignore_backends(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecisi IgnoreDecision::Continue } -fn needs_backends(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecision { - if let Some(needed_backends) = config.parse_name_value_directive(line, "needs-backends") { +fn needs_backends( + config: &Config, + path: &Utf8Path, + line: &str, + line_number: usize, +) -> IgnoreDecision { + if let Some(needed_backends) = + config.parse_name_value_directive(line, "needs-backends", path, line_number) + { if !needed_backends .split_whitespace() .map(|backend| match CodegenBackend::try_from(backend) { @@ -1842,9 +1654,9 @@ fn needs_backends(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecisio IgnoreDecision::Continue } -fn ignore_llvm(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecision { +fn ignore_llvm(config: &Config, path: &Utf8Path, line: &str, line_number: usize) -> IgnoreDecision { if let Some(needed_components) = - config.parse_name_value_directive(line, "needs-llvm-components") + config.parse_name_value_directive(line, "needs-llvm-components", path, line_number) { let components: HashSet<_> = config.llvm_components.split_whitespace().collect(); if let Some(missing_component) = needed_components @@ -1865,7 +1677,9 @@ fn ignore_llvm(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecision { if let Some(actual_version) = &config.llvm_version { // Note that these `min` versions will check for not just major versions. - if let Some(version_string) = config.parse_name_value_directive(line, "min-llvm-version") { + if let Some(version_string) = + config.parse_name_value_directive(line, "min-llvm-version", path, line_number) + { let min_version = extract_llvm_version(&version_string); // Ignore if actual version is smaller than the minimum required version. if *actual_version < min_version { @@ -1876,7 +1690,7 @@ fn ignore_llvm(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecision { }; } } else if let Some(version_string) = - config.parse_name_value_directive(line, "max-llvm-major-version") + config.parse_name_value_directive(line, "max-llvm-major-version", path, line_number) { let max_version = extract_llvm_version(&version_string); // Ignore if actual major version is larger than the maximum required major version. @@ -1890,7 +1704,7 @@ fn ignore_llvm(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecision { }; } } else if let Some(version_string) = - config.parse_name_value_directive(line, "min-system-llvm-version") + config.parse_name_value_directive(line, "min-system-llvm-version", path, line_number) { let min_version = extract_llvm_version(&version_string); // Ignore if using system LLVM and actual version @@ -1903,7 +1717,7 @@ fn ignore_llvm(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecision { }; } } else if let Some(version_range) = - config.parse_name_value_directive(line, "ignore-llvm-version") + config.parse_name_value_directive(line, "ignore-llvm-version", path, line_number) { // Syntax is: "ignore-llvm-version: <version1> [- <version2>]" let (v_min, v_max) = @@ -1929,7 +1743,7 @@ fn ignore_llvm(config: &Config, path: &Utf8Path, line: &str) -> IgnoreDecision { } } } else if let Some(version_string) = - config.parse_name_value_directive(line, "exact-llvm-major-version") + config.parse_name_value_directive(line, "exact-llvm-major-version", path, line_number) { // Syntax is "exact-llvm-major-version: <version>" let version = extract_llvm_version(&version_string); diff --git a/src/tools/compiletest/src/directives/auxiliary.rs b/src/tools/compiletest/src/directives/auxiliary.rs index cdb75f6ffa9..7c1ed2e7006 100644 --- a/src/tools/compiletest/src/directives/auxiliary.rs +++ b/src/tools/compiletest/src/directives/auxiliary.rs @@ -3,6 +3,8 @@ use std::iter; +use camino::Utf8Path; + use super::directives::{AUX_BIN, AUX_BUILD, AUX_CODEGEN_BACKEND, AUX_CRATE, PROC_MACRO}; use crate::common::Config; @@ -41,17 +43,42 @@ impl AuxProps { /// If the given test directive line contains an `aux-*` directive, parse it /// and update [`AuxProps`] accordingly. -pub(super) fn parse_and_update_aux(config: &Config, ln: &str, aux: &mut AuxProps) { +pub(super) fn parse_and_update_aux( + config: &Config, + ln: &str, + testfile: &Utf8Path, + line_number: usize, + aux: &mut AuxProps, +) { if !(ln.starts_with("aux-") || ln.starts_with("proc-macro")) { return; } - config.push_name_value_directive(ln, AUX_BUILD, &mut aux.builds, |r| r.trim().to_string()); - config.push_name_value_directive(ln, AUX_BIN, &mut aux.bins, |r| r.trim().to_string()); - config.push_name_value_directive(ln, AUX_CRATE, &mut aux.crates, parse_aux_crate); - config - .push_name_value_directive(ln, PROC_MACRO, &mut aux.proc_macros, |r| r.trim().to_string()); - if let Some(r) = config.parse_name_value_directive(ln, AUX_CODEGEN_BACKEND) { + config.push_name_value_directive(ln, AUX_BUILD, testfile, line_number, &mut aux.builds, |r| { + r.trim().to_string() + }); + config.push_name_value_directive(ln, AUX_BIN, testfile, line_number, &mut aux.bins, |r| { + r.trim().to_string() + }); + config.push_name_value_directive( + ln, + AUX_CRATE, + testfile, + line_number, + &mut aux.crates, + parse_aux_crate, + ); + config.push_name_value_directive( + ln, + PROC_MACRO, + testfile, + line_number, + &mut aux.proc_macros, + |r| r.trim().to_string(), + ); + if let Some(r) = + config.parse_name_value_directive(ln, AUX_CODEGEN_BACKEND, testfile, line_number) + { aux.codegen_backend = Some(r.trim().to_owned()); } } diff --git a/src/tools/compiletest/src/directives/cfg.rs b/src/tools/compiletest/src/directives/cfg.rs index 35f6a9e1644..802a1d63d1f 100644 --- a/src/tools/compiletest/src/directives/cfg.rs +++ b/src/tools/compiletest/src/directives/cfg.rs @@ -285,6 +285,11 @@ fn parse_cfg_name_directive<'a>( if name == "gdb-version" { outcome = MatchOutcome::External; } + + // Don't error out for ignore-backends,as it is handled elsewhere. + if name == "backends" { + outcome = MatchOutcome::External; + } } ParsedNameDirective { diff --git a/src/tools/compiletest/src/directives/directive_names.rs b/src/tools/compiletest/src/directives/directive_names.rs new file mode 100644 index 00000000000..7fc76a42e0c --- /dev/null +++ b/src/tools/compiletest/src/directives/directive_names.rs @@ -0,0 +1,289 @@ +/// This was originally generated by collecting directives from ui tests and then extracting their +/// directive names. This is **not** an exhaustive list of all possible directives. Instead, this is +/// a best-effort approximation for diagnostics. Add new directives to this list when needed. +pub(crate) const KNOWN_DIRECTIVE_NAMES: &[&str] = &[ + // tidy-alphabetical-start + "add-core-stubs", + "assembly-output", + "aux-bin", + "aux-build", + "aux-codegen-backend", + "aux-crate", + "build-aux-docs", + "build-fail", + "build-pass", + "check-fail", + "check-pass", + "check-run-results", + "check-stdout", + "check-test-line-numbers-match", + "compile-flags", + "doc-flags", + "dont-check-compiler-stderr", + "dont-check-compiler-stdout", + "dont-check-failure-status", + "dont-require-annotations", + "edition", + "error-pattern", + "exact-llvm-major-version", + "exec-env", + "failure-status", + "filecheck-flags", + "forbid-output", + "force-host", + "ignore-16bit", + "ignore-32bit", + "ignore-64bit", + "ignore-aarch64", + "ignore-aarch64-pc-windows-msvc", + "ignore-aarch64-unknown-linux-gnu", + "ignore-aix", + "ignore-android", + "ignore-apple", + "ignore-arm", + "ignore-arm-unknown-linux-gnueabi", + "ignore-arm-unknown-linux-gnueabihf", + "ignore-arm-unknown-linux-musleabi", + "ignore-arm-unknown-linux-musleabihf", + "ignore-auxiliary", + "ignore-avr", + "ignore-backends", + "ignore-beta", + "ignore-cdb", + "ignore-compare-mode-next-solver", + "ignore-compare-mode-polonius", + "ignore-coverage-map", + "ignore-coverage-run", + "ignore-cross-compile", + "ignore-eabi", + "ignore-elf", + "ignore-emscripten", + "ignore-endian-big", + "ignore-enzyme", + "ignore-freebsd", + "ignore-fuchsia", + "ignore-gdb", + "ignore-gdb-version", + "ignore-gnu", + "ignore-haiku", + "ignore-horizon", + "ignore-i686-pc-windows-gnu", + "ignore-i686-pc-windows-msvc", + "ignore-illumos", + "ignore-ios", + "ignore-linux", + "ignore-lldb", + "ignore-llvm-version", + "ignore-loongarch32", + "ignore-loongarch64", + "ignore-macabi", + "ignore-macos", + "ignore-msp430", + "ignore-msvc", + "ignore-musl", + "ignore-netbsd", + "ignore-nightly", + "ignore-none", + "ignore-nto", + "ignore-nvptx64", + "ignore-nvptx64-nvidia-cuda", + "ignore-openbsd", + "ignore-pass", + "ignore-powerpc", + "ignore-powerpc64", + "ignore-remote", + "ignore-riscv64", + "ignore-rustc-debug-assertions", + "ignore-rustc_abi-x86-sse2", + "ignore-s390x", + "ignore-sgx", + "ignore-sparc64", + "ignore-spirv", + "ignore-stable", + "ignore-stage1", + "ignore-stage2", + "ignore-std-debug-assertions", + "ignore-test", + "ignore-thumb", + "ignore-thumbv8m.base-none-eabi", + "ignore-thumbv8m.main-none-eabi", + "ignore-tvos", + "ignore-unix", + "ignore-unknown", + "ignore-uwp", + "ignore-visionos", + "ignore-vxworks", + "ignore-wasi", + "ignore-wasm", + "ignore-wasm32", + "ignore-wasm32-bare", + "ignore-wasm64", + "ignore-watchos", + "ignore-windows", + "ignore-windows-gnu", + "ignore-windows-msvc", + "ignore-x32", + "ignore-x86", + "ignore-x86_64", + "ignore-x86_64-apple-darwin", + "ignore-x86_64-pc-windows-gnu", + "ignore-x86_64-unknown-linux-gnu", + "incremental", + "known-bug", + "llvm-cov-flags", + "max-llvm-major-version", + "min-cdb-version", + "min-gdb-version", + "min-lldb-version", + "min-llvm-version", + "min-system-llvm-version", + "needs-asm-support", + "needs-backends", + "needs-crate-type", + "needs-deterministic-layouts", + "needs-dlltool", + "needs-dynamic-linking", + "needs-enzyme", + "needs-force-clang-based-tests", + "needs-git-hash", + "needs-llvm-components", + "needs-llvm-zstd", + "needs-profiler-runtime", + "needs-relocation-model-pic", + "needs-run-enabled", + "needs-rust-lld", + "needs-rustc-debug-assertions", + "needs-sanitizer-address", + "needs-sanitizer-cfi", + "needs-sanitizer-dataflow", + "needs-sanitizer-hwaddress", + "needs-sanitizer-kcfi", + "needs-sanitizer-leak", + "needs-sanitizer-memory", + "needs-sanitizer-memtag", + "needs-sanitizer-safestack", + "needs-sanitizer-shadow-call-stack", + "needs-sanitizer-support", + "needs-sanitizer-thread", + "needs-std-debug-assertions", + "needs-subprocess", + "needs-symlink", + "needs-target-has-atomic", + "needs-target-std", + "needs-threads", + "needs-unwind", + "needs-wasmtime", + "needs-xray", + "no-auto-check-cfg", + "no-prefer-dynamic", + "normalize-stderr", + "normalize-stderr-32bit", + "normalize-stderr-64bit", + "normalize-stdout", + "only-16bit", + "only-32bit", + "only-64bit", + "only-aarch64", + "only-aarch64-apple-darwin", + "only-aarch64-unknown-linux-gnu", + "only-apple", + "only-arm", + "only-avr", + "only-beta", + "only-bpf", + "only-cdb", + "only-dist", + "only-elf", + "only-emscripten", + "only-gnu", + "only-i686-pc-windows-gnu", + "only-i686-pc-windows-msvc", + "only-i686-unknown-linux-gnu", + "only-ios", + "only-linux", + "only-loongarch32", + "only-loongarch64", + "only-loongarch64-unknown-linux-gnu", + "only-macos", + "only-mips", + "only-mips64", + "only-msp430", + "only-msvc", + "only-musl", + "only-nightly", + "only-nvptx64", + "only-powerpc", + "only-riscv64", + "only-rustc_abi-x86-sse2", + "only-s390x", + "only-sparc", + "only-sparc64", + "only-stable", + "only-thumb", + "only-tvos", + "only-uefi", + "only-unix", + "only-visionos", + "only-wasm32", + "only-wasm32-bare", + "only-wasm32-wasip1", + "only-watchos", + "only-windows", + "only-windows-gnu", + "only-windows-msvc", + "only-x86", + "only-x86_64", + "only-x86_64-apple-darwin", + "only-x86_64-fortanix-unknown-sgx", + "only-x86_64-pc-windows-gnu", + "only-x86_64-pc-windows-msvc", + "only-x86_64-unknown-linux-gnu", + "pp-exact", + "pretty-compare-only", + "pretty-mode", + "proc-macro", + "reference", + "regex-error-pattern", + "remap-src-base", + "revisions", + "run-crash", + "run-fail", + "run-fail-or-crash", + "run-flags", + "run-pass", + "run-rustfix", + "rustc-env", + "rustfix-only-machine-applicable", + "should-fail", + "should-ice", + "stderr-per-bitwidth", + "test-mir-pass", + "unique-doc-out-dir", + "unset-exec-env", + "unset-rustc-env", + // Used by the tidy check `unknown_revision`. + "unused-revision-names", + // tidy-alphabetical-end +]; + +pub(crate) const KNOWN_HTMLDOCCK_DIRECTIVE_NAMES: &[&str] = &[ + "count", + "!count", + "files", + "!files", + "has", + "!has", + "has-dir", + "!has-dir", + "hasraw", + "!hasraw", + "matches", + "!matches", + "matchesraw", + "!matchesraw", + "snapshot", + "!snapshot", +]; + +pub(crate) const KNOWN_JSONDOCCK_DIRECTIVE_NAMES: &[&str] = + &["count", "!count", "has", "!has", "is", "!is", "ismany", "!ismany", "set", "!set"]; diff --git a/src/tools/compiletest/src/runtest/debugger.rs b/src/tools/compiletest/src/runtest/debugger.rs index a4103c5b4a9..ba824124e87 100644 --- a/src/tools/compiletest/src/runtest/debugger.rs +++ b/src/tools/compiletest/src/runtest/debugger.rs @@ -47,10 +47,14 @@ impl DebuggerCommands { continue; }; - if let Some(command) = config.parse_name_value_directive(&line, &command_directive) { + if let Some(command) = + config.parse_name_value_directive(&line, &command_directive, file, line_no) + { commands.push(command); } - if let Some(pattern) = config.parse_name_value_directive(&line, &check_directive) { + if let Some(pattern) = + config.parse_name_value_directive(&line, &check_directive, file, line_no) + { check_lines.push((line_no, pattern)); } } diff --git a/src/tools/generate-copyright/Cargo.toml b/src/tools/generate-copyright/Cargo.toml index e420a450d42..bcb3165de45 100644 --- a/src/tools/generate-copyright/Cargo.toml +++ b/src/tools/generate-copyright/Cargo.toml @@ -9,7 +9,7 @@ description = "Produces a manifest of all the copyrighted materials in the Rust [dependencies] anyhow = "1.0.65" askama = "0.14.0" -cargo_metadata = "0.18.1" +cargo_metadata = "0.21" serde = { version = "1.0.147", features = ["derive"] } serde_json = "1.0.85" thiserror = "1" diff --git a/src/tools/generate-copyright/src/cargo_metadata.rs b/src/tools/generate-copyright/src/cargo_metadata.rs index 3fae26bda47..87cd85c8def 100644 --- a/src/tools/generate-copyright/src/cargo_metadata.rs +++ b/src/tools/generate-copyright/src/cargo_metadata.rs @@ -92,7 +92,8 @@ pub fn get_metadata( continue; } // otherwise it's an out-of-tree dependency - let package_id = Package { name: package.name, version: package.version.to_string() }; + let package_id = + Package { name: package.name.to_string(), version: package.version.to_string() }; output.insert( package_id, PackageMetadata { diff --git a/src/tools/linkchecker/Cargo.toml b/src/tools/linkchecker/Cargo.toml index 7123d43eb56..fb5bff3fe63 100644 --- a/src/tools/linkchecker/Cargo.toml +++ b/src/tools/linkchecker/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "linkchecker" version = "0.1.0" -edition = "2021" +edition = "2024" [[bin]] name = "linkchecker" diff --git a/src/tools/linkchecker/main.rs b/src/tools/linkchecker/main.rs index 84cba3f8c44..1dc45728c90 100644 --- a/src/tools/linkchecker/main.rs +++ b/src/tools/linkchecker/main.rs @@ -17,12 +17,13 @@ //! should catch the majority of "broken link" cases. use std::cell::{Cell, RefCell}; +use std::collections::hash_map::Entry; use std::collections::{HashMap, HashSet}; -use std::io::ErrorKind; +use std::fs; +use std::iter::once; use std::path::{Component, Path, PathBuf}; use std::rc::Rc; use std::time::Instant; -use std::{env, fs}; use html5ever::tendril::ByteTendril; use html5ever::tokenizer::{ @@ -110,10 +111,25 @@ macro_rules! t { }; } +struct Cli { + docs: PathBuf, + link_targets_dirs: Vec<PathBuf>, +} + fn main() { - let docs = env::args_os().nth(1).expect("doc path should be first argument"); - let docs = env::current_dir().unwrap().join(docs); - let mut checker = Checker { root: docs.clone(), cache: HashMap::new() }; + let cli = match parse_cli() { + Ok(cli) => cli, + Err(err) => { + eprintln!("error: {err}"); + usage_and_exit(1); + } + }; + + let mut checker = Checker { + root: cli.docs.clone(), + link_targets_dirs: cli.link_targets_dirs, + cache: HashMap::new(), + }; let mut report = Report { errors: 0, start: Instant::now(), @@ -125,7 +141,7 @@ fn main() { intra_doc_exceptions: 0, has_broken_urls: false, }; - checker.walk(&docs, &mut report); + checker.walk(&cli.docs, &mut report); report.report(); if report.errors != 0 { println!("found some broken links"); @@ -133,8 +149,50 @@ fn main() { } } +fn parse_cli() -> Result<Cli, String> { + fn to_absolute_path(arg: &str) -> Result<PathBuf, String> { + std::path::absolute(arg).map_err(|e| format!("could not convert to absolute {arg}: {e}")) + } + + let mut verbatim = false; + let mut docs = None; + let mut link_targets_dirs = Vec::new(); + + let mut args = std::env::args().skip(1); + while let Some(arg) = args.next() { + if !verbatim && arg == "--" { + verbatim = true; + } else if !verbatim && (arg == "-h" || arg == "--help") { + usage_and_exit(0) + } else if !verbatim && arg == "--link-targets-dir" { + link_targets_dirs.push(to_absolute_path( + &args.next().ok_or("missing value for --link-targets-dir")?, + )?); + } else if !verbatim && let Some(value) = arg.strip_prefix("--link-targets-dir=") { + link_targets_dirs.push(to_absolute_path(value)?); + } else if !verbatim && arg.starts_with('-') { + return Err(format!("unknown flag: {arg}")); + } else if docs.is_none() { + docs = Some(arg); + } else { + return Err("too many positional arguments".into()); + } + } + + Ok(Cli { + docs: to_absolute_path(&docs.ok_or("missing first positional argument")?)?, + link_targets_dirs, + }) +} + +fn usage_and_exit(code: i32) -> ! { + eprintln!("usage: linkchecker PATH [--link-targets-dir=PATH ...]"); + std::process::exit(code) +} + struct Checker { root: PathBuf, + link_targets_dirs: Vec<PathBuf>, cache: Cache, } @@ -420,37 +478,34 @@ impl Checker { /// Load a file from disk, or from the cache if available. fn load_file(&mut self, file: &Path, report: &mut Report) -> (String, &FileEntry) { - // https://docs.microsoft.com/en-us/windows/win32/debug/system-error-codes--0-499- - #[cfg(windows)] - const ERROR_INVALID_NAME: i32 = 123; - let pretty_path = file.strip_prefix(&self.root).unwrap_or(file).to_str().unwrap().to_string(); - let entry = - self.cache.entry(pretty_path.clone()).or_insert_with(|| match fs::metadata(file) { + for base in once(&self.root).chain(self.link_targets_dirs.iter()) { + let entry = self.cache.entry(pretty_path.clone()); + if let Entry::Occupied(e) = &entry + && !matches!(e.get(), FileEntry::Missing) + { + break; + } + + let file = base.join(&pretty_path); + entry.insert_entry(match fs::metadata(&file) { Ok(metadata) if metadata.is_dir() => FileEntry::Dir, Ok(_) => { if file.extension().and_then(|s| s.to_str()) != Some("html") { FileEntry::OtherFile } else { report.html_files += 1; - load_html_file(file, report) + load_html_file(&file, report) } } - Err(e) if e.kind() == ErrorKind::NotFound => FileEntry::Missing, - Err(e) => { - // If a broken intra-doc link contains `::`, on windows, it will cause `ERROR_INVALID_NAME` rather than `NotFound`. - // Explicitly check for that so that the broken link can be allowed in `LINKCHECK_EXCEPTIONS`. - #[cfg(windows)] - if e.raw_os_error() == Some(ERROR_INVALID_NAME) - && file.as_os_str().to_str().map_or(false, |s| s.contains("::")) - { - return FileEntry::Missing; - } - panic!("unexpected read error for {}: {}", file.display(), e); - } + Err(e) if is_not_found_error(&file, &e) => FileEntry::Missing, + Err(e) => panic!("unexpected read error for {}: {}", file.display(), e), }); + } + + let entry = self.cache.get(&pretty_path).unwrap(); (pretty_path, entry) } } @@ -629,3 +684,16 @@ fn parse_ids(ids: &mut HashSet<String>, file: &str, source: &str, report: &mut R ids.insert(encoded); } } + +fn is_not_found_error(path: &Path, error: &std::io::Error) -> bool { + // https://docs.microsoft.com/en-us/windows/win32/debug/system-error-codes--0-499- + const WINDOWS_ERROR_INVALID_NAME: i32 = 123; + + error.kind() == std::io::ErrorKind::NotFound + // If a broken intra-doc link contains `::`, on windows, it will cause `ERROR_INVALID_NAME` + // rather than `NotFound`. Explicitly check for that so that the broken link can be allowed + // in `LINKCHECK_EXCEPTIONS`. + || (cfg!(windows) + && error.raw_os_error() == Some(WINDOWS_ERROR_INVALID_NAME) + && path.as_os_str().to_str().map_or(false, |s| s.contains("::"))) +} diff --git a/src/tools/miri/.github/workflows/ci.yml b/src/tools/miri/.github/workflows/ci.yml index 11c0f08debe..c47f9695624 100644 --- a/src/tools/miri/.github/workflows/ci.yml +++ b/src/tools/miri/.github/workflows/ci.yml @@ -45,11 +45,17 @@ jobs: os: macos-latest - host_target: i686-pc-windows-msvc os: windows-latest + - host_target: aarch64-pc-windows-msvc + os: windows-11-arm runs-on: ${{ matrix.os }} env: HOST_TARGET: ${{ matrix.host_target }} steps: - uses: actions/checkout@v4 + - name: apt update + if: ${{ startsWith(matrix.os, 'ubuntu') }} + # The runners seem to have outdated apt repos sometimes + run: sudo apt update - name: install qemu if: ${{ matrix.qemu }} run: sudo apt install qemu-user qemu-user-binfmt @@ -63,6 +69,12 @@ jobs: sudo apt update # Install needed packages sudo apt install $(echo "libatomic1: zlib1g-dev:" | sed 's/:/:${{ matrix.multiarch }}/g') + - name: Install rustup on Windows ARM + if: ${{ matrix.os == 'windows-11-arm' }} + run: | + curl -LOs https://static.rust-lang.org/rustup/dist/aarch64-pc-windows-msvc/rustup-init.exe + ./rustup-init.exe -y --no-modify-path + echo "$USERPROFILE/.cargo/bin" >> "$GITHUB_PATH" - uses: ./.github/workflows/setup with: toolchain_flags: "--host ${{ matrix.host_target }}" @@ -147,35 +159,48 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 256 # get a bit more of the history - - name: install josh-proxy - run: cargo +stable install josh-proxy --git https://github.com/josh-project/josh --tag r24.10.04 + - name: install josh-sync + run: cargo +stable install --locked --git https://github.com/rust-lang/josh-sync - name: setup bot git name and email run: | git config --global user.name 'The Miri Cronjob Bot' git config --global user.email 'miri@cron.bot' - name: Install nightly toolchain run: rustup toolchain install nightly --profile minimal - - name: get changes from rustc - run: ./miri rustc-pull - name: Install rustup-toolchain-install-master run: cargo install -f rustup-toolchain-install-master - - name: format changes (if any) + - name: Push changes to a branch and create PR run: | + # Make it easier to see what happens. + set -x + # Temporarily disable early exit to examine the status code of rustc-josh-sync + set +e + rustc-josh-sync pull + exitcode=$? + set -e + + # If there were no changes to pull, rustc-josh-sync returns status code 2. + # In that case, skip the rest of the job. + if [ $exitcode -eq 2 ]; then + echo "Nothing changed in rustc, skipping PR" + exit 0 + elif [ $exitcode -ne 0 ]; then + # If return code was not 0 or 2, rustc-josh-sync actually failed + echo "error: rustc-josh-sync failed" + exit ${exitcode} + fi + + # Format changes ./miri toolchain ./miri fmt --check || (./miri fmt && git commit -am "fmt") - - name: Push changes to a branch and create PR - run: | - # `git diff --exit-code` "succeeds" if the diff is empty. - if git diff --exit-code HEAD^; then echo "Nothing changed in rustc, skipping PR"; exit 0; fi - # The diff is non-empty, create a PR. + + # Create a PR BRANCH="rustup-$(date -u +%Y-%m-%d)" git switch -c $BRANCH git push -u origin $BRANCH gh pr create -B master --title 'Automatic Rustup' --body 'Please close and re-open this PR to trigger CI, then enable auto-merge.' env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - ZULIP_BOT_EMAIL: ${{ secrets.ZULIP_BOT_EMAIL }} - ZULIP_API_TOKEN: ${{ secrets.ZULIP_API_TOKEN }} cron-fail-notify: name: cronjob failure notification @@ -198,7 +223,7 @@ jobs: It would appear that the [Miri cron job build]('"https://github.com/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID"') failed. This likely means that rustc changed the miri directory and - we now need to do a [`./miri rustc-pull`](https://github.com/rust-lang/miri/blob/master/CONTRIBUTING.md#importing-changes-from-the-rustc-repo). + we now need to do a [`rustc-josh-sync pull`](https://github.com/rust-lang/miri/blob/master/CONTRIBUTING.md#importing-changes-from-the-rustc-repo). Would you mind investigating this issue? diff --git a/src/tools/miri/.gitignore b/src/tools/miri/.gitignore index ed2d0ba7ba0..4a238dc0313 100644 --- a/src/tools/miri/.gitignore +++ b/src/tools/miri/.gitignore @@ -1,5 +1,4 @@ target -/doc tex/*/out *.dot *.out diff --git a/src/tools/miri/CONTRIBUTING.md b/src/tools/miri/CONTRIBUTING.md index 637c0dd2fdf..7d78fdddbad 100644 --- a/src/tools/miri/CONTRIBUTING.md +++ b/src/tools/miri/CONTRIBUTING.md @@ -297,14 +297,14 @@ You can also directly run Miri on a Rust source file: ## Advanced topic: Syncing with the rustc repo -We use the [`josh` proxy](https://github.com/josh-project/josh) to transmit changes between the +We use the [`josh-sync`](https://github.com/rust-lang/josh-sync) tool to transmit changes between the rustc and Miri repositories. You can install it as follows: ```sh -cargo +stable install josh-proxy --git https://github.com/josh-project/josh --tag r24.10.04 +cargo install --locked --git https://github.com/rust-lang/josh-sync ``` -Josh will automatically be started and stopped by `./miri`. +The commands below will automatically install and manage the [Josh](https://github.com/josh-project/josh) proxy that performs the actual work. ### Importing changes from the rustc repo @@ -312,10 +312,12 @@ Josh will automatically be started and stopped by `./miri`. We assume we start on an up-to-date master branch in the Miri repo. +1) First, create a branch for the pull, e.g. `git checkout -b rustup` +2) Then run the following: ```sh # Fetch and merge rustc side of the history. Takes ca 5 min the first time. # This will also update the `rustc-version` file. -./miri rustc-pull +rustc-josh-sync pull # Update local toolchain and apply formatting. ./miri toolchain && ./miri fmt git commit -am "rustup" @@ -328,12 +330,12 @@ needed. ### Exporting changes to the rustc repo -We will use the josh proxy to push to your fork of rustc. Run the following in the Miri repo, +We will use the `josh-sync` tool to push to your fork of rustc. Run the following in the Miri repo, assuming we are on an up-to-date master branch: ```sh # Push the Miri changes to your rustc fork (substitute your github handle for YOUR_NAME). -./miri rustc-push YOUR_NAME miri +rustc-josh-sync push miri YOUR_NAME ``` This will create a new branch called `miri` in your fork, and the output should include a link that diff --git a/src/tools/miri/Cargo.lock b/src/tools/miri/Cargo.lock index 0af4181dc15..b46f0f83420 100644 --- a/src/tools/miri/Cargo.lock +++ b/src/tools/miri/Cargo.lock @@ -166,10 +166,12 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.30" +version = "1.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "deec109607ca693028562ed836a5f1c4b8bd77755c4e132fc5ce11b0b6211ae7" +checksum = "be714c154be609ec7f5dad223a33bf1482fff90472de28f7362806e6d4832b8c" dependencies = [ + "jobserver", + "libc", "shlex", ] @@ -215,6 +217,52 @@ dependencies = [ ] [[package]] +name = "clap" +version = "4.5.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be92d32e80243a54711e5d7ce823c35c41c9d929dc4ab58e1276f625841aadf9" +dependencies = [ + "clap_builder", +] + +[[package]] +name = "clap_builder" +version = "4.5.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "707eab41e9622f9139419d573eca0900137718000c517d47da73045f54331c3d" +dependencies = [ + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_lex" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675" + +[[package]] +name = "cmake" +version = "0.1.54" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7caa3f9de89ddbe2c607f4101924c5abec803763ae9534e4f4d7d8f84aa81f0" +dependencies = [ + "cc", +] + +[[package]] +name = "codespan-reporting" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe6d2e5af09e8c8ad56c969f2157a3d4238cebc7c55f0a517728c38f7b200f81" +dependencies = [ + "serde", + "termcolor", + "unicode-width 0.2.1", +] + +[[package]] name = "color-eyre" version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -314,6 +362,68 @@ dependencies = [ ] [[package]] +name = "cxx" +version = "1.0.161" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3523cc02ad831111491dd64b27ad999f1ae189986728e477604e61b81f828df" +dependencies = [ + "cc", + "cxxbridge-cmd", + "cxxbridge-flags", + "cxxbridge-macro", + "foldhash", + "link-cplusplus", +] + +[[package]] +name = "cxx-build" +version = "1.0.161" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "212b754247a6f07b10fa626628c157593f0abf640a3dd04cce2760eca970f909" +dependencies = [ + "cc", + "codespan-reporting", + "indexmap", + "proc-macro2", + "quote", + "scratch", + "syn", +] + +[[package]] +name = "cxxbridge-cmd" +version = "1.0.161" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f426a20413ec2e742520ba6837c9324b55ffac24ead47491a6e29f933c5b135a" +dependencies = [ + "clap", + "codespan-reporting", + "indexmap", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "cxxbridge-flags" +version = "1.0.161" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a258b6069020b4e5da6415df94a50ee4f586a6c38b037a180e940a43d06a070d" + +[[package]] +name = "cxxbridge-macro" +version = "1.0.161" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8dec184b52be5008d6eaf7e62fc1802caf1ad1227d11b3b7df2c409c7ffc3f4" +dependencies = [ + "indexmap", + "proc-macro2", + "quote", + "rustversion", + "syn", +] + +[[package]] name = "directories" version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -335,12 +445,29 @@ dependencies = [ ] [[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] name = "encode_unicode" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" [[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] name = "errno" version = "0.3.13" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -373,6 +500,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] name = "generic-array" version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -383,6 +525,17 @@ dependencies = [ ] [[package]] +name = "genmc-sys" +version = "0.1.0" +dependencies = [ + "cc", + "cmake", + "cxx", + "cxx-build", + "git2", +] + +[[package]] name = "getrandom" version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -412,12 +565,150 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] +name = "git2" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2deb07a133b1520dc1a5690e9bd08950108873d7ed5de38dcc74d3b5ebffa110" +dependencies = [ + "bitflags", + "libc", + "libgit2-sys", + "log", + "openssl-probe", + "openssl-sys", + "url", +] + +[[package]] +name = "hashbrown" +version = "0.15.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5" + +[[package]] +name = "icu_collections" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" + +[[package]] +name = "icu_properties" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "potential_utf", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632" + +[[package]] +name = "icu_provider" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af" +dependencies = [ + "displaydoc", + "icu_locale_core", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "idna" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] name = "indenter" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" [[package]] +name = "indexmap" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661" +dependencies = [ + "equivalent", + "hashbrown", +] + +[[package]] name = "indicatif" version = "0.17.11" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -464,6 +755,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" [[package]] +name = "jobserver" +version = "0.1.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38f262f097c174adebe41eb73d66ae9c06b2844fb0da69969647bbddd9b0538a" +dependencies = [ + "getrandom 0.3.3", + "libc", +] + +[[package]] name = "js-sys" version = "0.3.77" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -511,6 +812,19 @@ dependencies = [ ] [[package]] +name = "libgit2-sys" +version = "0.18.2+1.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c42fe03df2bd3c53a3a9c7317ad91d80c81cd1fb0caec8d7cc4cd2bfa10c222" +dependencies = [ + "cc", + "libc", + "libz-sys", + "openssl-sys", + "pkg-config", +] + +[[package]] name = "libloading" version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -531,12 +845,39 @@ dependencies = [ ] [[package]] +name = "libz-sys" +version = "1.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b70e7a7df205e92a1a4cd9aaae7898dac0aa555503cc0a649494d0d60e7651d" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "link-cplusplus" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a6f6da007f968f9def0d65a05b187e2960183de70c160204ecfccf0ee330212" +dependencies = [ + "cc", +] + +[[package]] name = "linux-raw-sys" version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" [[package]] +name = "litemap" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" + +[[package]] name = "lock_api" version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -612,6 +953,7 @@ dependencies = [ "chrono-tz", "colored 3.0.0", "directories", + "genmc-sys", "getrandom 0.3.3", "ipc-channel", "libc", @@ -673,6 +1015,24 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] +name = "openssl-probe" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" + +[[package]] +name = "openssl-sys" +version = "0.9.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90096e2e47630d78b7d1c20952dc621f957103f8bc2c8359ec81290d75238571" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] name = "option-ext" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -723,6 +1083,12 @@ dependencies = [ ] [[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] name = "perf-event-open-sys" version = "3.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -756,12 +1122,27 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" [[package]] +name = "pkg-config" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" + +[[package]] name = "portable-atomic" version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483" [[package]] +name = "potential_utf" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585" +dependencies = [ + "zerovec", +] + +[[package]] name = "ppv-lite86" version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -947,6 +1328,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] +name = "scratch" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f6280af86e5f559536da57a45ebc84948833b3bee313a7dd25232e09c878a52" + +[[package]] name = "semver" version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1026,6 +1413,18 @@ dependencies = [ ] [[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] name = "syn" version = "2.0.104" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1037,6 +1436,17 @@ dependencies = [ ] [[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] name = "tempfile" version = "3.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1050,6 +1460,15 @@ dependencies = [ ] [[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", +] + +[[package]] name = "thiserror" version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1109,6 +1528,16 @@ dependencies = [ ] [[package]] +name = "tinystr" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] name = "tracing" version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1200,6 +1629,23 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c" [[package]] +name = "url" +version = "2.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] name = "uuid" version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1217,6 +1663,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" [[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] name = "version_check" version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1306,6 +1758,15 @@ dependencies = [ ] [[package]] +name = "winapi-util" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] name = "windows" version = "0.58.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1525,6 +1986,36 @@ dependencies = [ ] [[package]] +name = "writeable" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" + +[[package]] +name = "yoke" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] name = "zerocopy" version = "0.8.26" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1543,3 +2034,57 @@ dependencies = [ "quote", "syn", ] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerotrie" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a05eb080e015ba39cc9e23bbe5e7fb04d5fb040350f99f34e338d5fdd294428" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] diff --git a/src/tools/miri/Cargo.toml b/src/tools/miri/Cargo.toml index d293af5cea2..91dadf78a2f 100644 --- a/src/tools/miri/Cargo.toml +++ b/src/tools/miri/Cargo.toml @@ -48,6 +48,10 @@ nix = { version = "0.30.1", features = ["mman", "ptrace", "signal"], optional = ipc-channel = { version = "0.20.0", optional = true } capstone = { version = "0.13", optional = true } +# FIXME(genmc,macos): Add `target_os = "macos"` once https://github.com/dtolnay/cxx/issues/1535 is fixed. +[target.'cfg(all(target_os = "linux", target_pointer_width = "64", target_endian = "little"))'.dependencies] +genmc-sys = { path = "./genmc-sys/", version = "0.1.0", optional = true } + [dev-dependencies] ui_test = "0.30.2" colored = "3" @@ -66,7 +70,7 @@ harness = false [features] default = ["stack-cache", "native-lib"] -genmc = [] +genmc = ["dep:genmc-sys"] # this enables a GPL dependency! stack-cache = [] stack-cache-consistency-check = ["stack-cache"] tracing = ["serde_json"] diff --git a/src/tools/miri/cargo-miri/src/phases.rs b/src/tools/miri/cargo-miri/src/phases.rs index b72b974bdbd..efb9053f69a 100644 --- a/src/tools/miri/cargo-miri/src/phases.rs +++ b/src/tools/miri/cargo-miri/src/phases.rs @@ -1,9 +1,9 @@ //! Implements the various phases of `cargo miri run/test`. use std::env; -use std::fs::{self, File}; +use std::fs::File; use std::io::BufReader; -use std::path::{Path, PathBuf}; +use std::path::{self, Path, PathBuf}; use std::process::Command; use rustc_version::VersionMeta; @@ -222,12 +222,12 @@ pub fn phase_cargo_miri(mut args: impl Iterator<Item = String>) { // that to be the Miri driver, but acting as rustc, in host mode. // // In `main`, we need the value of `RUSTC` to distinguish RUSTC_WRAPPER invocations from rustdoc - // or TARGET_RUNNER invocations, so we canonicalize it here to make it exceedingly unlikely that + // or TARGET_RUNNER invocations, so we make it absolute to make it exceedingly unlikely that // there would be a collision with other invocations of cargo-miri (as rustdoc or as runner). We // explicitly do this even if RUSTC_STAGE is set, since for these builds we do *not* want the // bootstrap `rustc` thing in our way! Instead, we have MIRI_HOST_SYSROOT to use for host // builds. - cmd.env("RUSTC", fs::canonicalize(find_miri()).unwrap()); + cmd.env("RUSTC", path::absolute(find_miri()).unwrap()); // In case we get invoked as RUSTC without the wrapper, let's be a host rustc. This makes no // sense for cross-interpretation situations, but without the wrapper, this will use the host // sysroot, so asking it to behave like a target build makes even less sense. diff --git a/src/tools/miri/ci/ci.sh b/src/tools/miri/ci/ci.sh index 5767d178279..b66530e77b8 100755 --- a/src/tools/miri/ci/ci.sh +++ b/src/tools/miri/ci/ci.sh @@ -142,7 +142,6 @@ case $HOST_TARGET in # Host GC_STRESS=1 MIR_OPT=1 MANY_SEEDS=64 TEST_BENCH=1 CARGO_MIRI_ENV=1 run_tests # Extra tier 1 - MANY_SEEDS=64 TEST_TARGET=i686-unknown-linux-gnu run_tests MANY_SEEDS=64 TEST_TARGET=x86_64-apple-darwin run_tests MANY_SEEDS=64 TEST_TARGET=x86_64-pc-windows-gnu run_tests ;; @@ -161,8 +160,6 @@ case $HOST_TARGET in aarch64-unknown-linux-gnu) # Host GC_STRESS=1 MIR_OPT=1 MANY_SEEDS=64 TEST_BENCH=1 CARGO_MIRI_ENV=1 run_tests - # Extra tier 1 candidate - MANY_SEEDS=64 TEST_TARGET=aarch64-pc-windows-msvc run_tests # Extra tier 2 MANY_SEEDS=16 TEST_TARGET=arm-unknown-linux-gnueabi run_tests # 32bit ARM MANY_SEEDS=16 TEST_TARGET=aarch64-pc-windows-gnullvm run_tests # gnullvm ABI @@ -189,13 +186,20 @@ case $HOST_TARGET in ;; i686-pc-windows-msvc) # Host - # Without GC_STRESS as this is the slowest runner. + # Without GC_STRESS as this is a very slow runner. MIR_OPT=1 MANY_SEEDS=64 TEST_BENCH=1 run_tests # Extra tier 1 # We really want to ensure a Linux target works on a Windows host, # and a 64bit target works on a 32bit host. TEST_TARGET=x86_64-unknown-linux-gnu run_tests ;; + aarch64-pc-windows-msvc) + # Host + # Without GC_STRESS as this is a very slow runner. + MIR_OPT=1 MANY_SEEDS=64 TEST_BENCH=1 CARGO_MIRI_ENV=1 run_tests + # Extra tier 1 + MANY_SEEDS=64 TEST_TARGET=i686-unknown-linux-gnu run_tests + ;; *) echo "FATAL: unknown host target: $HOST_TARGET" exit 1 diff --git a/src/tools/miri/doc/genmc.md b/src/tools/miri/doc/genmc.md new file mode 100644 index 00000000000..5aabe90b5da --- /dev/null +++ b/src/tools/miri/doc/genmc.md @@ -0,0 +1,62 @@ +# **(WIP)** Documentation for Miri-GenMC + +[GenMC](https://github.com/MPI-SWS/genmc) is a stateless model checker for exploring concurrent executions of a program. +Miri-GenMC integrates that model checker into Miri. + +**NOTE: Currently, no actual GenMC functionality is part of Miri, this is still WIP.** + +<!-- FIXME(genmc): add explanation. --> + +## Usage + +**IMPORTANT: The license of GenMC and thus the `genmc-sys` crate in the Miri repo is currently "GPL-3.0-or-later", so a binary produced with the `genmc` feature is subject to the requirements of the GPL. As long as that remains the case, the `genmc` feature of Miri is OFF-BY-DEFAULT and must be OFF for all Miri releases.** + +For testing/developing Miri-GenMC (while keeping in mind the licensing issues): +- clone the Miri repo. +- build Miri-GenMC with `./miri build --features=genmc`. +- OR: install Miri-GenMC in the current system with `./miri install --features=genmc` + +Basic usage: +```shell +MIRIFLAGS="-Zmiri-genmc" cargo miri run +``` + +<!-- FIXME(genmc): explain options. --> + +<!-- FIXME(genmc): explain Miri-GenMC specific functions. --> + +## Tips + +<!-- FIXME(genmc): add tips for using Miri-GenMC more efficiently. --> + +## Limitations + +Some or all of these limitations might get removed in the future: + +- Borrow tracking is currently incompatible (stacked/tree borrows). +- Only Linux is supported for now. +- No support for 32-bit or big-endian targets. +- No cross-target interpretation. + +<!-- FIXME(genmc): document remaining limitations --> + +## Development + +GenMC is written in C++, which complicates development a bit. +The prerequisites for building Miri-GenMC are: +- A compiler with C++23 support. +- LLVM developments headers and clang. + <!-- FIXME(genmc,llvm): remove once LLVM dependency is no longer required. --> + +The actual code for GenMC is not contained in the Miri repo itself, but in a [separate GenMC repo](https://github.com/MPI-SWS/genmc) (with its own maintainers). +These sources need to be available to build Miri-GenMC. +The process for obtaining them is as follows: +- By default, a fixed commit of GenMC is downloaded to `genmc-sys/genmc-src` and built automatically. + (The commit is determined by `GENMC_COMMIT` in `genmc-sys/build.rs`.) +- If you want to overwrite that, set the `GENMC_SRC_PATH` environment variable to a path that contains the GenMC sources. + If you place this directory inside the Miri folder, it is recommended to call it `genmc-src` as that tells `./miri fmt` to avoid + formatting the Rust files inside that folder. + +<!-- FIXME(genmc): explain how submitting code to GenMC should be handled. --> + +<!-- FIXME(genmc): explain development. --> diff --git a/src/tools/miri/etc/rust_analyzer_helix.toml b/src/tools/miri/etc/rust_analyzer_helix.toml index 91e4070478c..c46b246049f 100644 --- a/src/tools/miri/etc/rust_analyzer_helix.toml +++ b/src/tools/miri/etc/rust_analyzer_helix.toml @@ -5,6 +5,7 @@ source = "discover" linkedProjects = [ "Cargo.toml", "cargo-miri/Cargo.toml", + "genmc-sys/Cargo.toml", "miri-script/Cargo.toml", ] diff --git a/src/tools/miri/etc/rust_analyzer_vscode.json b/src/tools/miri/etc/rust_analyzer_vscode.json index 6917c6a1fd8..8e647f5331f 100644 --- a/src/tools/miri/etc/rust_analyzer_vscode.json +++ b/src/tools/miri/etc/rust_analyzer_vscode.json @@ -3,6 +3,7 @@ "rust-analyzer.linkedProjects": [ "Cargo.toml", "cargo-miri/Cargo.toml", + "genmc-sys/Cargo.toml", "miri-script/Cargo.toml", ], "rust-analyzer.check.invocationStrategy": "once", diff --git a/src/tools/miri/genmc-sys/.gitignore b/src/tools/miri/genmc-sys/.gitignore new file mode 100644 index 00000000000..276a053cd05 --- /dev/null +++ b/src/tools/miri/genmc-sys/.gitignore @@ -0,0 +1 @@ +genmc-src*/ diff --git a/src/tools/miri/genmc-sys/Cargo.toml b/src/tools/miri/genmc-sys/Cargo.toml new file mode 100644 index 00000000000..737ab9073bf --- /dev/null +++ b/src/tools/miri/genmc-sys/Cargo.toml @@ -0,0 +1,17 @@ +[package] +authors = ["Miri Team"] +# The parts in this repo are MIT OR Apache-2.0, but we are linking in +# code from https://github.com/MPI-SWS/genmc which is GPL-3.0-or-later. +license = "(MIT OR Apache-2.0) AND GPL-3.0-or-later" +name = "genmc-sys" +version = "0.1.0" +edition = "2024" + +[dependencies] +cxx = { version = "1.0.160", features = ["c++20"] } + +[build-dependencies] +cc = "1.2.16" +cmake = "0.1.54" +git2 = { version = "0.20.2", default-features = false, features = ["https"] } +cxx-build = { version = "1.0.160", features = ["parallel"] } diff --git a/src/tools/miri/genmc-sys/build.rs b/src/tools/miri/genmc-sys/build.rs new file mode 100644 index 00000000000..479a3bd7186 --- /dev/null +++ b/src/tools/miri/genmc-sys/build.rs @@ -0,0 +1,269 @@ +use std::path::{Path, PathBuf}; +use std::str::FromStr; + +// Build script for running Miri with GenMC. +// Check out doc/genmc.md for more info. + +/// Path where the downloaded GenMC repository will be stored (relative to the `genmc-sys` directory). +/// Note that this directory is *not* cleaned up automatically by `cargo clean`. +const GENMC_DOWNLOAD_PATH: &str = "./genmc-src/"; + +/// Name of the library of the GenMC model checker. +const GENMC_MODEL_CHECKER: &str = "genmc_lib"; + +/// Path where the `cxx_bridge!` macro is used to define the Rust-C++ interface. +const RUST_CXX_BRIDGE_FILE_PATH: &str = "src/lib.rs"; + +/// The profile with which to build GenMC. +const GENMC_CMAKE_PROFILE: &str = "RelWithDebInfo"; + +mod downloading { + use std::path::PathBuf; + use std::str::FromStr; + + use git2::{Commit, Oid, Remote, Repository, StatusOptions}; + + use super::GENMC_DOWNLOAD_PATH; + + /// The GenMC repository the we get our commit from. + pub(crate) const GENMC_GITHUB_URL: &str = "https://github.com/MPI-SWS/genmc.git"; + /// The GenMC commit we depend on. It must be available on the specified GenMC repository. + pub(crate) const GENMC_COMMIT: &str = "3438dd2c1202cd4a47ed7881d099abf23e4167ab"; + + pub(crate) fn download_genmc() -> PathBuf { + let Ok(genmc_download_path) = PathBuf::from_str(GENMC_DOWNLOAD_PATH); + let commit_oid = Oid::from_str(GENMC_COMMIT).expect("Commit should be valid."); + + match Repository::open(&genmc_download_path) { + Ok(repo) => { + assert_repo_unmodified(&repo); + let commit = update_local_repo(&repo, commit_oid); + checkout_commit(&repo, &commit); + } + Err(_) => { + let repo = clone_remote_repo(&genmc_download_path); + let Ok(commit) = repo.find_commit(commit_oid) else { + panic!( + "Cloned GenMC repository does not contain required commit '{GENMC_COMMIT}'" + ); + }; + checkout_commit(&repo, &commit); + } + }; + + genmc_download_path + } + + fn get_remote(repo: &Repository) -> Remote<'_> { + let remote = repo.find_remote("origin").unwrap_or_else(|e| { + panic!( + "Could not load commit ({GENMC_COMMIT}) from remote repository '{GENMC_GITHUB_URL}'. Error: {e}" + ); + }); + + // Ensure that the correct remote URL is set. + let remote_url = remote.url(); + if let Some(remote_url) = remote_url + && remote_url == GENMC_GITHUB_URL + { + return remote; + } + + // Update remote URL. + println!( + "cargo::warning=GenMC repository remote URL has changed from '{remote_url:?}' to '{GENMC_GITHUB_URL}'" + ); + repo.remote_set_url("origin", GENMC_GITHUB_URL) + .expect("cannot rename url of remote 'origin'"); + + // Reacquire the `Remote`, since `remote_set_url` doesn't update Remote objects already in memory. + repo.find_remote("origin").unwrap() + } + + // Check if the required commit exists already, otherwise try fetching it. + fn update_local_repo(repo: &Repository, commit_oid: Oid) -> Commit<'_> { + repo.find_commit(commit_oid).unwrap_or_else(|_find_error| { + println!("GenMC repository at path '{GENMC_DOWNLOAD_PATH}' does not contain commit '{GENMC_COMMIT}'."); + // The commit is not in the checkout. Try `git fetch` and hope that we find the commit then. + let mut remote = get_remote(repo); + remote.fetch(&[GENMC_COMMIT], None, None).expect("Failed to fetch from remote."); + + repo.find_commit(commit_oid) + .expect("Remote repository should contain expected commit") + }) + } + + fn clone_remote_repo(genmc_download_path: &PathBuf) -> Repository { + Repository::clone(GENMC_GITHUB_URL, &genmc_download_path).unwrap_or_else(|e| { + panic!("Cannot clone GenMC repo from '{GENMC_GITHUB_URL}': {e:?}"); + }) + } + + /// Set the state of the repo to a specific commit + fn checkout_commit(repo: &Repository, commit: &Commit<'_>) { + repo.checkout_tree(commit.as_object(), None).expect("Failed to checkout"); + repo.set_head_detached(commit.id()).expect("Failed to set HEAD"); + println!("Successfully set checked out commit {commit:?}"); + } + + /// Check that the downloaded repository is unmodified. + /// If it is modified, explain that it shouldn't be, and hint at how to do local development with GenMC. + /// We don't overwrite any changes made to the directory, to prevent data loss. + fn assert_repo_unmodified(repo: &Repository) { + let statuses = repo + .statuses(Some( + StatusOptions::new() + .include_untracked(true) + .include_ignored(false) + .include_unmodified(false), + )) + .expect("should be able to get repository status"); + if statuses.is_empty() { + return; + } + + panic!( + "Downloaded GenMC repository at path '{GENMC_DOWNLOAD_PATH}' has been modified. Please undo any changes made, or delete the '{GENMC_DOWNLOAD_PATH}' directory to have it downloaded again.\n\ + HINT: For local development, set the environment variable 'GENMC_SRC_PATH' to the path of a GenMC repository." + ); + } +} + +// FIXME(genmc,llvm): Remove once the LLVM dependency of the GenMC model checker is removed. +/// The linked LLVM version is in the generated `config.h`` file, which we parse and use to link to LLVM. +/// Returns c++ compiler definitions required for building with/including LLVM, and the include path for LLVM headers. +fn link_to_llvm(config_file: &Path) -> (String, String) { + /// Search a string for a line matching `//@VARIABLE_NAME: VARIABLE CONTENT` + fn extract_value<'a>(input: &'a str, name: &str) -> Option<&'a str> { + input + .lines() + .find_map(|line| line.strip_prefix("//@")?.strip_prefix(name)?.strip_prefix(": ")) + } + + let file_content = std::fs::read_to_string(&config_file).unwrap_or_else(|err| { + panic!("GenMC config file ({}) should exist, but got errror {err:?}", config_file.display()) + }); + + let llvm_definitions = extract_value(&file_content, "LLVM_DEFINITIONS") + .expect("Config file should contain LLVM_DEFINITIONS"); + let llvm_include_dirs = extract_value(&file_content, "LLVM_INCLUDE_DIRS") + .expect("Config file should contain LLVM_INCLUDE_DIRS"); + let llvm_library_dir = extract_value(&file_content, "LLVM_LIBRARY_DIR") + .expect("Config file should contain LLVM_LIBRARY_DIR"); + let llvm_config_path = extract_value(&file_content, "LLVM_CONFIG_PATH") + .expect("Config file should contain LLVM_CONFIG_PATH"); + + // Add linker search path. + let lib_dir = PathBuf::from_str(llvm_library_dir).unwrap(); + println!("cargo::rustc-link-search=native={}", lib_dir.display()); + + // Add libraries to link. + let output = std::process::Command::new(llvm_config_path) + .arg("--libs") // Print the libraries to link to (space-separated list) + .output() + .expect("failed to execute llvm-config"); + let llvm_link_libs = + String::try_from(output.stdout).expect("llvm-config output should be a valid string"); + + for link_lib in llvm_link_libs.trim().split(" ") { + let link_lib = + link_lib.strip_prefix("-l").expect("Linker parameter should start with \"-l\""); + println!("cargo::rustc-link-lib=dylib={link_lib}"); + } + + (llvm_definitions.to_string(), llvm_include_dirs.to_string()) +} + +/// Build the GenMC model checker library and the Rust-C++ interop library with cxx.rs +fn compile_cpp_dependencies(genmc_path: &Path) { + // Part 1: + // Compile the GenMC library using cmake. + + let cmakelists_path = genmc_path.join("CMakeLists.txt"); + + // FIXME(genmc,cargo): Switch to using `CARGO_CFG_DEBUG_ASSERTIONS` once https://github.com/rust-lang/cargo/issues/15760 is completed. + // Enable/disable additional debug checks, prints and options for GenMC, based on the Rust profile (debug/release) + let enable_genmc_debug = matches!(std::env::var("PROFILE").as_deref().unwrap(), "debug"); + + let mut config = cmake::Config::new(cmakelists_path); + config.profile(GENMC_CMAKE_PROFILE); + config.define("GENMC_DEBUG", if enable_genmc_debug { "ON" } else { "OFF" }); + + // The actual compilation happens here: + let genmc_install_dir = config.build(); + + // Add the model checker library to be linked and tell rustc where to find it: + let cmake_lib_dir = genmc_install_dir.join("lib").join("genmc"); + println!("cargo::rustc-link-search=native={}", cmake_lib_dir.display()); + println!("cargo::rustc-link-lib=static={GENMC_MODEL_CHECKER}"); + + // FIXME(genmc,llvm): Remove once the LLVM dependency of the GenMC model checker is removed. + let config_file = genmc_install_dir.join("include").join("genmc").join("config.h"); + let (llvm_definitions, llvm_include_dirs) = link_to_llvm(&config_file); + + // Part 2: + // Compile the cxx_bridge (the link between the Rust and C++ code). + + let genmc_include_dir = genmc_install_dir.join("include").join("genmc"); + + // FIXME(genmc,llvm): remove once LLVM dependency is removed. + // These definitions are parsed into a cmake list and then printed to the config.h file, so they are ';' separated. + let definitions = llvm_definitions.split(";"); + + let mut bridge = cxx_build::bridge("src/lib.rs"); + // FIXME(genmc,cmake): Remove once the GenMC debug setting is available in the config.h file. + if enable_genmc_debug { + bridge.define("ENABLE_GENMC_DEBUG", None); + } + for definition in definitions { + bridge.flag(definition); + } + bridge + .opt_level(2) + .debug(true) // Same settings that GenMC uses (default for cmake `RelWithDebInfo`) + .warnings(false) // NOTE: enabling this produces a lot of warnings. + .std("c++23") + .include(genmc_include_dir) + .include(llvm_include_dirs) + .include("./src_cpp") + .file("./src_cpp/MiriInterface.hpp") + .file("./src_cpp/MiriInterface.cpp") + .compile("genmc_interop"); + + // Link the Rust-C++ interface library generated by cxx_build: + println!("cargo::rustc-link-lib=static=genmc_interop"); +} + +fn main() { + // Make sure we don't accidentally distribute a binary with GPL code. + if option_env!("RUSTC_STAGE").is_some() { + panic!( + "genmc should not be enabled in the rustc workspace since it includes a GPL dependency" + ); + } + + // Select which path to use for the GenMC repo: + let genmc_path = if let Ok(genmc_src_path) = std::env::var("GENMC_SRC_PATH") { + let genmc_src_path = + PathBuf::from_str(&genmc_src_path).expect("GENMC_SRC_PATH should contain a valid path"); + assert!( + genmc_src_path.exists(), + "GENMC_SRC_PATH={} does not exist!", + genmc_src_path.display() + ); + genmc_src_path + } else { + downloading::download_genmc() + }; + + // Build all required components: + compile_cpp_dependencies(&genmc_path); + + // Only rebuild if anything changes: + // Note that we don't add the downloaded GenMC repo, since that should never be modified + // manually. Adding that path here would also trigger an unnecessary rebuild after the repo is + // cloned (since cargo detects that as a file modification). + println!("cargo::rerun-if-changed={RUST_CXX_BRIDGE_FILE_PATH}"); + println!("cargo::rerun-if-changed=./src"); + println!("cargo::rerun-if-changed=./src_cpp"); +} diff --git a/src/tools/miri/genmc-sys/src/lib.rs b/src/tools/miri/genmc-sys/src/lib.rs new file mode 100644 index 00000000000..ab46d729ea1 --- /dev/null +++ b/src/tools/miri/genmc-sys/src/lib.rs @@ -0,0 +1,30 @@ +pub use self::ffi::*; + +impl Default for GenmcParams { + fn default() -> Self { + Self { + print_random_schedule_seed: false, + do_symmetry_reduction: false, + // FIXME(GenMC): Add defaults for remaining parameters + } + } +} + +#[cxx::bridge] +mod ffi { + /// Parameters that will be given to GenMC for setting up the model checker. + /// (The fields of this struct are visible to both Rust and C++) + #[derive(Clone, Debug)] + struct GenmcParams { + pub print_random_schedule_seed: bool, + pub do_symmetry_reduction: bool, + // FIXME(GenMC): Add remaining parameters. + } + unsafe extern "C++" { + include!("MiriInterface.hpp"); + + type MiriGenMCShim; + + fn createGenmcHandle(config: &GenmcParams) -> UniquePtr<MiriGenMCShim>; + } +} diff --git a/src/tools/miri/genmc-sys/src_cpp/MiriInterface.cpp b/src/tools/miri/genmc-sys/src_cpp/MiriInterface.cpp new file mode 100644 index 00000000000..0827bb3d407 --- /dev/null +++ b/src/tools/miri/genmc-sys/src_cpp/MiriInterface.cpp @@ -0,0 +1,50 @@ +#include "MiriInterface.hpp" + +#include "genmc-sys/src/lib.rs.h" + +auto MiriGenMCShim::createHandle(const GenmcParams &config) + -> std::unique_ptr<MiriGenMCShim> +{ + auto conf = std::make_shared<Config>(); + + // Miri needs all threads to be replayed, even fully completed ones. + conf->replayCompletedThreads = true; + + // We only support the RC11 memory model for Rust. + conf->model = ModelType::RC11; + + conf->printRandomScheduleSeed = config.print_random_schedule_seed; + + // FIXME(genmc): disable any options we don't support currently: + conf->ipr = false; + conf->disableBAM = true; + conf->instructionCaching = false; + + ERROR_ON(config.do_symmetry_reduction, "Symmetry reduction is currently unsupported in GenMC mode."); + conf->symmetryReduction = config.do_symmetry_reduction; + + // FIXME(genmc): Should there be a way to change this option from Miri? + conf->schedulePolicy = SchedulePolicy::WF; + + // FIXME(genmc): implement estimation mode: + conf->estimate = false; + conf->estimationMax = 1000; + const auto mode = conf->estimate ? GenMCDriver::Mode(GenMCDriver::EstimationMode{}) + : GenMCDriver::Mode(GenMCDriver::VerificationMode{}); + + // Running Miri-GenMC without race detection is not supported. + // Disabling this option also changes the behavior of the replay scheduler to only schedule at atomic operations, which is required with Miri. + // This happens because Miri can generate multiple GenMC events for a single MIR terminator. Without this option, + // the scheduler might incorrectly schedule an atomic MIR terminator because the first event it creates is a non-atomic (e.g., `StorageLive`). + conf->disableRaceDetection = false; + + // Miri can already check for unfreed memory. Also, GenMC cannot distinguish between memory + // that is allowed to leak and memory that is not. + conf->warnUnfreedMemory = false; + + // FIXME(genmc): check config: + // checkConfigOptions(*conf); + + auto driver = std::make_unique<MiriGenMCShim>(std::move(conf), mode); + return driver; +} diff --git a/src/tools/miri/genmc-sys/src_cpp/MiriInterface.hpp b/src/tools/miri/genmc-sys/src_cpp/MiriInterface.hpp new file mode 100644 index 00000000000..e55522ef418 --- /dev/null +++ b/src/tools/miri/genmc-sys/src_cpp/MiriInterface.hpp @@ -0,0 +1,44 @@ +#ifndef GENMC_MIRI_INTERFACE_HPP +#define GENMC_MIRI_INTERFACE_HPP + +#include "rust/cxx.h" + +#include "config.h" + +#include "Config/Config.hpp" +#include "Verification/GenMCDriver.hpp" + +#include <iostream> + +/**** Types available to Miri ****/ + +// Config struct defined on the Rust side and translated to C++ by cxx.rs: +struct GenmcParams; + +struct MiriGenMCShim : private GenMCDriver +{ + +public: + MiriGenMCShim(std::shared_ptr<const Config> conf, Mode mode /* = VerificationMode{} */) + : GenMCDriver(std::move(conf), nullptr, mode) + { + std::cerr << "C++: GenMC handle created!" << std::endl; + } + + virtual ~MiriGenMCShim() + { + std::cerr << "C++: GenMC handle destroyed!" << std::endl; + } + + static std::unique_ptr<MiriGenMCShim> createHandle(const GenmcParams &config); +}; + +/**** Functions available to Miri ****/ + +// NOTE: CXX doesn't support exposing static methods to Rust currently, so we expose this function instead. +static inline auto createGenmcHandle(const GenmcParams &config) -> std::unique_ptr<MiriGenMCShim> +{ + return MiriGenMCShim::createHandle(config); +} + +#endif /* GENMC_MIRI_INTERFACE_HPP */ diff --git a/src/tools/miri/josh-sync.toml b/src/tools/miri/josh-sync.toml new file mode 100644 index 00000000000..86208b3742d --- /dev/null +++ b/src/tools/miri/josh-sync.toml @@ -0,0 +1,2 @@ +repo = "miri" +filter = ":rev(75dd959a3a40eb5b4574f8d2e23aa6efbeb33573:prefix=src/tools/miri):/src/tools/miri" diff --git a/src/tools/miri/miri-script/Cargo.lock b/src/tools/miri/miri-script/Cargo.lock index a049bfcbccd..044a678869e 100644 --- a/src/tools/miri/miri-script/Cargo.lock +++ b/src/tools/miri/miri-script/Cargo.lock @@ -117,27 +117,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" [[package]] -name = "directories" -version = "6.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16f5094c54661b38d03bd7e50df373292118db60b585c08a411c6d840017fe7d" -dependencies = [ - "dirs-sys", -] - -[[package]] -name = "dirs-sys" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab" -dependencies = [ - "libc", - "option-ext", - "redox_users", - "windows-sys 0.60.2", -] - -[[package]] name = "dunce" version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -167,17 +146,6 @@ checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "getrandom" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" -dependencies = [ - "cfg-if", - "libc", - "wasi 0.11.1+wasi-snapshot-preview1", -] - -[[package]] -name = "getrandom" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" @@ -185,7 +153,7 @@ dependencies = [ "cfg-if", "libc", "r-efi", - "wasi 0.14.2+wasi-0.2.4", + "wasi", ] [[package]] @@ -222,16 +190,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1171693293099992e19cddea4e8b849964e9846f4acee11b3948bcc337be8776" [[package]] -name = "libredox" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1580801010e535496706ba011c15f8532df6b42297d2e471fec38ceadd8c0638" -dependencies = [ - "bitflags", - "libc", -] - -[[package]] name = "linux-raw-sys" version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -249,7 +207,6 @@ version = "0.1.0" dependencies = [ "anyhow", "clap", - "directories", "dunce", "itertools", "path_macro", @@ -276,12 +233,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad" [[package]] -name = "option-ext" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" - -[[package]] name = "path_macro" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -312,17 +263,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" [[package]] -name = "redox_users" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd6f9d3d47bdd2ad6945c5015a226ec6155d0bcdfd8f7cd29f86b71f8de99d2b" -dependencies = [ - "getrandom 0.2.16", - "libredox", - "thiserror", -] - -[[package]] name = "rustc_version" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -427,33 +367,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1" dependencies = [ "fastrand", - "getrandom 0.3.3", + "getrandom", "once_cell", "rustix", "windows-sys 0.59.0", ] [[package]] -name = "thiserror" -version = "2.0.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" -dependencies = [ - "thiserror-impl", -] - -[[package]] -name = "thiserror-impl" -version = "2.0.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] name = "unicode-ident" version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -477,12 +397,6 @@ dependencies = [ [[package]] name = "wasi" -version = "0.11.1+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" - -[[package]] -name = "wasi" version = "0.14.2+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" diff --git a/src/tools/miri/miri-script/Cargo.toml b/src/tools/miri/miri-script/Cargo.toml index b3f82cd1d50..39858880e8c 100644 --- a/src/tools/miri/miri-script/Cargo.toml +++ b/src/tools/miri/miri-script/Cargo.toml @@ -22,7 +22,6 @@ anyhow = "1.0" xshell = "0.2.6" rustc_version = "0.4" dunce = "1.0.4" -directories = "6" serde = "1" serde_json = "1" serde_derive = "1" diff --git a/src/tools/miri/miri-script/src/commands.rs b/src/tools/miri/miri-script/src/commands.rs index 9aaad9ca04a..ee09b9b4b73 100644 --- a/src/tools/miri/miri-script/src/commands.rs +++ b/src/tools/miri/miri-script/src/commands.rs @@ -2,11 +2,9 @@ use std::collections::BTreeMap; use std::ffi::{OsStr, OsString}; use std::fmt::Write as _; use std::fs::{self, File}; -use std::io::{self, BufRead, BufReader, BufWriter, Write as _}; -use std::ops::Not; +use std::io::{self, BufRead, BufReader, BufWriter}; use std::path::PathBuf; -use std::time::Duration; -use std::{env, net, process}; +use std::{env, process}; use anyhow::{Context, Result, anyhow, bail}; use path_macro::path; @@ -18,11 +16,6 @@ use xshell::{Shell, cmd}; use crate::Command; use crate::util::*; -/// Used for rustc syncs. -const JOSH_FILTER: &str = - ":rev(75dd959a3a40eb5b4574f8d2e23aa6efbeb33573:prefix=src/tools/miri):/src/tools/miri"; -const JOSH_PORT: u16 = 42042; - impl MiriEnv { /// Prepares the environment: builds miri and cargo-miri and a sysroot. /// Returns the location of the sysroot. @@ -99,66 +92,6 @@ impl Command { Ok(()) } - fn start_josh() -> Result<impl Drop> { - // Determine cache directory. - let local_dir = { - let user_dirs = - directories::ProjectDirs::from("org", "rust-lang", "miri-josh").unwrap(); - user_dirs.cache_dir().to_owned() - }; - - // Start josh, silencing its output. - let mut cmd = process::Command::new("josh-proxy"); - cmd.arg("--local").arg(local_dir); - cmd.arg("--remote").arg("https://github.com"); - cmd.arg("--port").arg(JOSH_PORT.to_string()); - cmd.arg("--no-background"); - cmd.stdout(process::Stdio::null()); - cmd.stderr(process::Stdio::null()); - let josh = cmd.spawn().context("failed to start josh-proxy, make sure it is installed")?; - - // Create a wrapper that stops it on drop. - struct Josh(process::Child); - impl Drop for Josh { - fn drop(&mut self) { - #[cfg(unix)] - { - // Try to gracefully shut it down. - process::Command::new("kill") - .args(["-s", "INT", &self.0.id().to_string()]) - .output() - .expect("failed to SIGINT josh-proxy"); - // Sadly there is no "wait with timeout"... so we just give it some time to finish. - std::thread::sleep(Duration::from_millis(100)); - // Now hopefully it is gone. - if self.0.try_wait().expect("failed to wait for josh-proxy").is_some() { - return; - } - } - // If that didn't work (or we're not on Unix), kill it hard. - eprintln!( - "I have to kill josh-proxy the hard way, let's hope this does not break anything." - ); - self.0.kill().expect("failed to SIGKILL josh-proxy"); - } - } - - // Wait until the port is open. We try every 10ms until 1s passed. - for _ in 0..100 { - // This will generally fail immediately when the port is still closed. - let josh_ready = net::TcpStream::connect_timeout( - &net::SocketAddr::from(([127, 0, 0, 1], JOSH_PORT)), - Duration::from_millis(1), - ); - if josh_ready.is_ok() { - return Ok(Josh(josh)); - } - // Not ready yet. - std::thread::sleep(Duration::from_millis(10)); - } - bail!("Even after waiting for 1s, josh-proxy is still not available.") - } - pub fn exec(self) -> Result<()> { // First, and crucially only once, run the auto-actions -- but not for all commands. match &self { @@ -170,11 +103,7 @@ impl Command { | Command::Fmt { .. } | Command::Doc { .. } | Command::Clippy { .. } => Self::auto_actions()?, - | Command::Toolchain { .. } - | Command::Bench { .. } - | Command::RustcPull { .. } - | Command::RustcPush { .. } - | Command::Squash => {} + | Command::Toolchain { .. } | Command::Bench { .. } | Command::Squash => {} } // Then run the actual command. match self { @@ -191,8 +120,6 @@ impl Command { Command::Bench { target, no_install, save_baseline, load_baseline, benches } => Self::bench(target, no_install, save_baseline, load_baseline, benches), Command::Toolchain { flags } => Self::toolchain(flags), - Command::RustcPull { commit } => Self::rustc_pull(commit.clone()), - Command::RustcPush { github_user, branch } => Self::rustc_push(github_user, branch), Command::Squash => Self::squash(), } } @@ -233,156 +160,6 @@ impl Command { Ok(()) } - fn rustc_pull(commit: Option<String>) -> Result<()> { - let sh = Shell::new()?; - sh.change_dir(miri_dir()?); - let commit = commit.map(Result::Ok).unwrap_or_else(|| { - let rust_repo_head = - cmd!(sh, "git ls-remote https://github.com/rust-lang/rust/ HEAD").read()?; - rust_repo_head - .split_whitespace() - .next() - .map(|front| front.trim().to_owned()) - .ok_or_else(|| anyhow!("Could not obtain Rust repo HEAD from remote.")) - })?; - // Make sure the repo is clean. - if cmd!(sh, "git status --untracked-files=no --porcelain").read()?.is_empty().not() { - bail!("working directory must be clean before running `./miri rustc-pull`"); - } - // Make sure josh is running. - let josh = Self::start_josh()?; - let josh_url = - format!("http://localhost:{JOSH_PORT}/rust-lang/rust.git@{commit}{JOSH_FILTER}.git"); - - // Update rust-version file. As a separate commit, since making it part of - // the merge has confused the heck out of josh in the past. - // We pass `--no-verify` to avoid running git hooks like `./miri fmt` that could in turn - // trigger auto-actions. - // We do this before the merge so that if there are merge conflicts, we have - // the right rust-version file while resolving them. - sh.write_file("rust-version", format!("{commit}\n"))?; - const PREPARING_COMMIT_MESSAGE: &str = "Preparing for merge from rustc"; - cmd!(sh, "git commit rust-version --no-verify -m {PREPARING_COMMIT_MESSAGE}") - .run() - .context("FAILED to commit rust-version file, something went wrong")?; - - // Fetch given rustc commit. - cmd!(sh, "git fetch {josh_url}") - .run() - .inspect_err(|_| { - // Try to un-do the previous `git commit`, to leave the repo in the state we found it. - cmd!(sh, "git reset --hard HEAD^") - .run() - .expect("FAILED to clean up again after failed `git fetch`, sorry for that"); - }) - .context("FAILED to fetch new commits, something went wrong (committing the rust-version file has been undone)")?; - - // This should not add any new root commits. So count those before and after merging. - let num_roots = || -> Result<u32> { - Ok(cmd!(sh, "git rev-list HEAD --max-parents=0 --count") - .read() - .context("failed to determine the number of root commits")? - .parse::<u32>()?) - }; - let num_roots_before = num_roots()?; - - // Merge the fetched commit. - const MERGE_COMMIT_MESSAGE: &str = "Merge from rustc"; - cmd!(sh, "git merge FETCH_HEAD --no-verify --no-ff -m {MERGE_COMMIT_MESSAGE}") - .run() - .context("FAILED to merge new commits, something went wrong")?; - - // Check that the number of roots did not increase. - if num_roots()? != num_roots_before { - bail!("Josh created a new root commit. This is probably not the history you want."); - } - - drop(josh); - Ok(()) - } - - fn rustc_push(github_user: String, branch: String) -> Result<()> { - let sh = Shell::new()?; - sh.change_dir(miri_dir()?); - let base = sh.read_file("rust-version")?.trim().to_owned(); - // Make sure the repo is clean. - if cmd!(sh, "git status --untracked-files=no --porcelain").read()?.is_empty().not() { - bail!("working directory must be clean before running `./miri rustc-push`"); - } - // Make sure josh is running. - let josh = Self::start_josh()?; - let josh_url = - format!("http://localhost:{JOSH_PORT}/{github_user}/rust.git{JOSH_FILTER}.git"); - - // Find a repo we can do our preparation in. - if let Ok(rustc_git) = env::var("RUSTC_GIT") { - // If rustc_git is `Some`, we'll use an existing fork for the branch updates. - sh.change_dir(rustc_git); - } else { - // Otherwise, do this in the local Miri repo. - println!( - "This will pull a copy of the rust-lang/rust history into this Miri checkout, growing it by about 1GB." - ); - print!( - "To avoid that, abort now and set the `RUSTC_GIT` environment variable to an existing rustc checkout. Proceed? [y/N] " - ); - std::io::stdout().flush()?; - let mut answer = String::new(); - std::io::stdin().read_line(&mut answer)?; - if answer.trim().to_lowercase() != "y" { - std::process::exit(1); - } - }; - // Prepare the branch. Pushing works much better if we use as base exactly - // the commit that we pulled from last time, so we use the `rust-version` - // file to find out which commit that would be. - println!("Preparing {github_user}/rust (base: {base})..."); - if cmd!(sh, "git fetch https://github.com/{github_user}/rust {branch}") - .ignore_stderr() - .read() - .is_ok() - { - println!( - "The branch '{branch}' seems to already exist in 'https://github.com/{github_user}/rust'. Please delete it and try again." - ); - std::process::exit(1); - } - cmd!(sh, "git fetch https://github.com/rust-lang/rust {base}").run()?; - cmd!(sh, "git push https://github.com/{github_user}/rust {base}:refs/heads/{branch}") - .ignore_stdout() - .ignore_stderr() // silence the "create GitHub PR" message - .run()?; - println!(); - - // Do the actual push. - sh.change_dir(miri_dir()?); - println!("Pushing miri changes..."); - cmd!(sh, "git push {josh_url} HEAD:{branch}").run()?; - println!(); - - // Do a round-trip check to make sure the push worked as expected. - cmd!(sh, "git fetch {josh_url} {branch}").ignore_stderr().read()?; - let head = cmd!(sh, "git rev-parse HEAD").read()?; - let fetch_head = cmd!(sh, "git rev-parse FETCH_HEAD").read()?; - if head != fetch_head { - bail!( - "Josh created a non-roundtrip push! Do NOT merge this into rustc!\n\ - Expected {head}, got {fetch_head}." - ); - } - println!( - "Confirmed that the push round-trips back to Miri properly. Please create a rustc PR:" - ); - println!( - // Open PR with `subtree update` title to silence the `no-merges` triagebot check - // See https://github.com/rust-lang/rust/pull/114157 - " https://github.com/rust-lang/rust/compare/{github_user}:{branch}?quick_pull=1&title=Miri+subtree+update&body=r?+@ghost" - ); - - drop(josh); - Ok(()) - } - fn squash() -> Result<()> { let sh = Shell::new()?; sh.change_dir(miri_dir()?); @@ -757,8 +534,8 @@ impl Command { if ty.is_file() { name.ends_with(".rs") } else { - // dir or symlink. skip `target` and `.git`. - &name != "target" && &name != ".git" + // dir or symlink. skip `target`, `.git` and `genmc-src*` + &name != "target" && &name != ".git" && !name.starts_with("genmc-src") } }) .filter_ok(|item| item.file_type().is_file()) diff --git a/src/tools/miri/miri-script/src/main.rs b/src/tools/miri/miri-script/src/main.rs index e41df662ca2..761ec5979fa 100644 --- a/src/tools/miri/miri-script/src/main.rs +++ b/src/tools/miri/miri-script/src/main.rs @@ -142,25 +142,6 @@ pub enum Command { #[arg(trailing_var_arg = true, allow_hyphen_values = true)] flags: Vec<String>, }, - /// Pull and merge Miri changes from the rustc repo. - /// - /// The fetched commit is stored in the `rust-version` file, so the next `./miri toolchain` will - /// install the rustc that just got pulled. - RustcPull { - /// The commit to fetch (default: latest rustc commit). - commit: Option<String>, - }, - /// Push Miri changes back to the rustc repo. - /// - /// This will pull a copy of the rustc history into the Miri repo, unless you set the RUSTC_GIT - /// env var to an existing clone of the rustc repo. - RustcPush { - /// The Github user that owns the rustc fork to which we should push. - github_user: String, - /// The branch to push to. - #[arg(default_value = "miri-sync")] - branch: String, - }, /// Squash the commits of the current feature branch into one. Squash, } @@ -184,8 +165,7 @@ impl Command { flags.extend(remainder); Ok(()) } - Self::Bench { .. } | Self::RustcPull { .. } | Self::RustcPush { .. } | Self::Squash => - bail!("unexpected \"--\" found in arguments"), + Self::Bench { .. } | Self::Squash => bail!("unexpected \"--\" found in arguments"), } } } diff --git a/src/tools/miri/rust-version b/src/tools/miri/rust-version index d734ec333a5..2178caf6396 100644 --- a/src/tools/miri/rust-version +++ b/src/tools/miri/rust-version @@ -1 +1 @@ -6707bf0f59485cf054ac1095725df43220e4be20 +733dab558992d902d6d17576de1da768094e2cf3 diff --git a/src/tools/miri/src/bin/log/setup.rs b/src/tools/miri/src/bin/log/setup.rs index da0ba528b2c..a9392d010f8 100644 --- a/src/tools/miri/src/bin/log/setup.rs +++ b/src/tools/miri/src/bin/log/setup.rs @@ -60,7 +60,7 @@ fn init_logger_once(early_dcx: &EarlyDiagCtxt) { #[cfg(not(feature = "tracing"))] { crate::fatal_error!( - "fatal error: cannot enable MIRI_TRACING since Miri was not built with the \"tracing\" feature" + "Cannot enable MIRI_TRACING since Miri was not built with the \"tracing\" feature" ); } diff --git a/src/tools/miri/src/bin/log/tracing_chrome.rs b/src/tools/miri/src/bin/log/tracing_chrome.rs index 5a96633c99e..3379816550c 100644 --- a/src/tools/miri/src/bin/log/tracing_chrome.rs +++ b/src/tools/miri/src/bin/log/tracing_chrome.rs @@ -12,6 +12,7 @@ //! ```rust //! tracing::info_span!("my_span", tracing_separate_thread = tracing::field::Empty, /* ... */) //! ``` +//! - use i64 instead of u64 for the "id" in [ChromeLayer::get_root_id] to be compatible with Perfetto //! //! Depending on the tracing-chrome crate from crates.io is unfortunately not possible, since it //! depends on `tracing_core` which conflicts with rustc_private's `tracing_core` (meaning it would @@ -285,9 +286,9 @@ struct Callsite { } enum Message { - Enter(f64, Callsite, Option<u64>), + Enter(f64, Callsite, Option<i64>), Event(f64, Callsite), - Exit(f64, Callsite, Option<u64>), + Exit(f64, Callsite, Option<i64>), NewThread(usize, String), Flush, Drop, @@ -519,14 +520,17 @@ where } } - fn get_root_id(&self, span: SpanRef<S>) -> Option<u64> { + fn get_root_id(&self, span: SpanRef<S>) -> Option<i64> { + // Returns `Option<i64>` instead of `Option<u64>` because apparently Perfetto gives an + // error if an id does not fit in a 64-bit signed integer in 2's complement. We cast the + // span id from `u64` to `i64` with wraparound, since negative values are fine. match self.trace_style { TraceStyle::Threaded => { if span.fields().field("tracing_separate_thread").is_some() { // assign an independent "id" to spans with argument "tracing_separate_thread", // so they appear a separate trace line in trace visualization tools, see // https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/preview#heading=h.jh64i9l3vwa1 - Some(span.id().into_u64()) + Some(span.id().into_u64().cast_signed()) // the comment above explains the cast } else { None } @@ -539,6 +543,7 @@ where .unwrap_or(span) .id() .into_u64() + .cast_signed() // the comment above explains the cast ), } } diff --git a/src/tools/miri/src/bin/miri.rs b/src/tools/miri/src/bin/miri.rs index 89fa980ff64..ae1b25f8857 100644 --- a/src/tools/miri/src/bin/miri.rs +++ b/src/tools/miri/src/bin/miri.rs @@ -67,8 +67,6 @@ use crate::log::setup::{deinit_loggers, init_early_loggers, init_late_loggers}; struct MiriCompilerCalls { miri_config: Option<MiriConfig>, many_seeds: Option<ManySeedsConfig>, - /// Settings for using GenMC with Miri. - genmc_config: Option<GenmcConfig>, } struct ManySeedsConfig { @@ -77,12 +75,8 @@ struct ManySeedsConfig { } impl MiriCompilerCalls { - fn new( - miri_config: MiriConfig, - many_seeds: Option<ManySeedsConfig>, - genmc_config: Option<GenmcConfig>, - ) -> Self { - Self { miri_config: Some(miri_config), many_seeds, genmc_config } + fn new(miri_config: MiriConfig, many_seeds: Option<ManySeedsConfig>) -> Self { + Self { miri_config: Some(miri_config), many_seeds } } } @@ -192,8 +186,8 @@ impl rustc_driver::Callbacks for MiriCompilerCalls { optimizations is usually marginal at best."); } - if let Some(genmc_config) = &self.genmc_config { - let _genmc_ctx = Rc::new(GenmcCtx::new(&config, genmc_config)); + if let Some(_genmc_config) = &config.genmc_config { + let _genmc_ctx = Rc::new(GenmcCtx::new(&config)); todo!("GenMC mode not yet implemented"); }; @@ -487,7 +481,6 @@ fn main() { let mut many_seeds_keep_going = false; let mut miri_config = MiriConfig::default(); miri_config.env = env_snapshot; - let mut genmc_config = None; let mut rustc_args = vec![]; let mut after_dashdash = false; @@ -603,9 +596,9 @@ fn main() { } else if arg == "-Zmiri-many-seeds-keep-going" { many_seeds_keep_going = true; } else if let Some(trimmed_arg) = arg.strip_prefix("-Zmiri-genmc") { - // FIXME(GenMC): Currently, GenMC mode is incompatible with aliasing model checking. - miri_config.borrow_tracker = None; - GenmcConfig::parse_arg(&mut genmc_config, trimmed_arg); + if let Err(msg) = GenmcConfig::parse_arg(&mut miri_config.genmc_config, trimmed_arg) { + fatal_error!("{msg}"); + } } else if let Some(param) = arg.strip_prefix("-Zmiri-env-forward=") { miri_config.forwarded_env_vars.push(param.to_owned()); } else if let Some(param) = arg.strip_prefix("-Zmiri-env-set=") { @@ -740,13 +733,18 @@ fn main() { many_seeds.map(|seeds| ManySeedsConfig { seeds, keep_going: many_seeds_keep_going }); // Validate settings for data race detection and GenMC mode. - assert_eq!(genmc_config.is_some(), miri_config.genmc_mode); - if genmc_config.is_some() { + if miri_config.genmc_config.is_some() { if !miri_config.data_race_detector { fatal_error!("Cannot disable data race detection in GenMC mode (currently)"); } else if !miri_config.weak_memory_emulation { fatal_error!("Cannot disable weak memory emulation in GenMC mode"); } + if miri_config.borrow_tracker.is_some() { + eprintln!( + "warning: borrow tracking has been disabled, it is not (yet) supported in GenMC mode." + ); + miri_config.borrow_tracker = None; + } } else if miri_config.weak_memory_emulation && !miri_config.data_race_detector { fatal_error!( "Weak memory emulation cannot be enabled when the data race detector is disabled" @@ -765,8 +763,5 @@ fn main() { ); } } - run_compiler_and_exit( - &rustc_args, - &mut MiriCompilerCalls::new(miri_config, many_seeds, genmc_config), - ) + run_compiler_and_exit(&rustc_args, &mut MiriCompilerCalls::new(miri_config, many_seeds)) } diff --git a/src/tools/miri/src/concurrency/genmc/config.rs b/src/tools/miri/src/concurrency/genmc/config.rs index f91211a670f..c56adab90fe 100644 --- a/src/tools/miri/src/concurrency/genmc/config.rs +++ b/src/tools/miri/src/concurrency/genmc/config.rs @@ -1,19 +1,35 @@ -use crate::MiriConfig; +use super::GenmcParams; +/// Configuration for GenMC mode. +/// The `params` field is shared with the C++ side. +/// The remaining options are kept on the Rust side. #[derive(Debug, Default, Clone)] pub struct GenmcConfig { - // TODO: add fields + pub(super) params: GenmcParams, + do_estimation: bool, + // FIXME(GenMC): add remaining options. } impl GenmcConfig { /// Function for parsing command line options for GenMC mode. + /// /// All GenMC arguments start with the string "-Zmiri-genmc". + /// Passing any GenMC argument will enable GenMC mode. /// - /// `trimmed_arg` should be the argument to be parsed, with the suffix "-Zmiri-genmc" removed - pub fn parse_arg(genmc_config: &mut Option<GenmcConfig>, trimmed_arg: &str) { + /// `trimmed_arg` should be the argument to be parsed, with the suffix "-Zmiri-genmc" removed. + pub fn parse_arg( + genmc_config: &mut Option<GenmcConfig>, + trimmed_arg: &str, + ) -> Result<(), String> { + // FIXME(genmc): Ensure host == target somewhere. + if genmc_config.is_none() { *genmc_config = Some(Default::default()); } - todo!("implement parsing of GenMC options") + if trimmed_arg.is_empty() { + return Ok(()); // this corresponds to "-Zmiri-genmc" + } + // FIXME(GenMC): implement remaining parameters. + todo!(); } } diff --git a/src/tools/miri/src/concurrency/genmc/dummy.rs b/src/tools/miri/src/concurrency/genmc/dummy.rs index 3d0558fb685..79d27c4be15 100644 --- a/src/tools/miri/src/concurrency/genmc/dummy.rs +++ b/src/tools/miri/src/concurrency/genmc/dummy.rs @@ -16,7 +16,7 @@ pub struct GenmcCtx {} pub struct GenmcConfig {} impl GenmcCtx { - pub fn new(_miri_config: &MiriConfig, _genmc_config: &GenmcConfig) -> Self { + pub fn new(_miri_config: &MiriConfig) -> Self { unreachable!() } @@ -227,10 +227,15 @@ impl VisitProvenance for GenmcCtx { } impl GenmcConfig { - pub fn parse_arg(_genmc_config: &mut Option<GenmcConfig>, trimmed_arg: &str) { - unimplemented!( - "GenMC feature im Miri is disabled, cannot handle argument: \"-Zmiri-genmc{trimmed_arg}\"" - ); + pub fn parse_arg( + _genmc_config: &mut Option<GenmcConfig>, + trimmed_arg: &str, + ) -> Result<(), String> { + if cfg!(feature = "genmc") { + Err(format!("GenMC is disabled in this build of Miri")) + } else { + Err(format!("GenMC is not supported on this target")) + } } pub fn should_print_graph(&self, _rep: usize) -> bool { diff --git a/src/tools/miri/src/concurrency/genmc/mod.rs b/src/tools/miri/src/concurrency/genmc/mod.rs index 0dfd4b9b80f..3617775e27e 100644 --- a/src/tools/miri/src/concurrency/genmc/mod.rs +++ b/src/tools/miri/src/concurrency/genmc/mod.rs @@ -2,6 +2,7 @@ use std::cell::Cell; +use genmc_sys::{GenmcParams, createGenmcHandle}; use rustc_abi::{Align, Size}; use rustc_const_eval::interpret::{InterpCx, InterpResult, interp_ok}; use rustc_middle::mir; @@ -24,9 +25,19 @@ pub struct GenmcCtx { impl GenmcCtx { /// Create a new `GenmcCtx` from a given config. - pub fn new(miri_config: &MiriConfig, genmc_config: &GenmcConfig) -> Self { - assert!(miri_config.genmc_mode); - todo!() + pub fn new(miri_config: &MiriConfig) -> Self { + let genmc_config = miri_config.genmc_config.as_ref().unwrap(); + + let handle = createGenmcHandle(&genmc_config.params); + assert!(!handle.is_null()); + + eprintln!("Miri: GenMC handle creation successful!"); + + drop(handle); + eprintln!("Miri: Dropping GenMC handle successful!"); + + // FIXME(GenMC): implement + std::process::exit(0); } pub fn get_stuck_execution_count(&self) -> usize { diff --git a/src/tools/miri/src/concurrency/mod.rs b/src/tools/miri/src/concurrency/mod.rs index c2ea8a00dec..435615efd9f 100644 --- a/src/tools/miri/src/concurrency/mod.rs +++ b/src/tools/miri/src/concurrency/mod.rs @@ -8,7 +8,17 @@ mod vector_clock; pub mod weak_memory; // Import either the real genmc adapter or a dummy module. -#[cfg_attr(not(feature = "genmc"), path = "genmc/dummy.rs")] +// On unsupported platforms, we always include the dummy module, even if the `genmc` feature is enabled. +// FIXME(genmc,macos): Add `target_os = "macos"` once `https://github.com/dtolnay/cxx/issues/1535` is fixed. +#[cfg_attr( + not(all( + feature = "genmc", + target_os = "linux", + target_pointer_width = "64", + target_endian = "little" + )), + path = "genmc/dummy.rs" +)] mod genmc; pub use self::data_race_handler::{AllocDataRaceHandler, GlobalDataRaceHandler}; diff --git a/src/tools/miri/src/concurrency/thread.rs b/src/tools/miri/src/concurrency/thread.rs index 878afdf2517..fe1ef86ccd3 100644 --- a/src/tools/miri/src/concurrency/thread.rs +++ b/src/tools/miri/src/concurrency/thread.rs @@ -375,7 +375,7 @@ impl Timeout { } /// The clock to use for the timeout you are asking for. -#[derive(Debug, Copy, Clone)] +#[derive(Debug, Copy, Clone, PartialEq)] pub enum TimeoutClock { Monotonic, RealTime, diff --git a/src/tools/miri/src/eval.rs b/src/tools/miri/src/eval.rs index 3c80e60b772..4c531a8d1f5 100644 --- a/src/tools/miri/src/eval.rs +++ b/src/tools/miri/src/eval.rs @@ -125,8 +125,8 @@ pub struct MiriConfig { pub data_race_detector: bool, /// Determine if weak memory emulation should be enabled. Requires data race detection to be enabled. pub weak_memory_emulation: bool, - /// Determine if we are running in GenMC mode. In this mode, Miri will explore multiple concurrent executions of the given program. - pub genmc_mode: bool, + /// Determine if we are running in GenMC mode and with which settings. In GenMC mode, Miri will explore multiple concurrent executions of the given program. + pub genmc_config: Option<GenmcConfig>, /// Track when an outdated (weak memory) load happens. pub track_outdated_loads: bool, /// Rate of spurious failures for compare_exchange_weak atomic operations, @@ -192,7 +192,7 @@ impl Default for MiriConfig { track_alloc_accesses: false, data_race_detector: true, weak_memory_emulation: true, - genmc_mode: false, + genmc_config: None, track_outdated_loads: false, cmpxchg_weak_failure_rate: 0.8, // 80% measureme_out: None, @@ -334,8 +334,8 @@ pub fn create_ecx<'tcx>( helpers::try_resolve_path(tcx, &["core", "ascii", "escape_default"], Namespace::ValueNS); if !matches!(sentinel, Some(s) if tcx.is_mir_available(s.def.def_id())) { tcx.dcx().fatal( - "the current sysroot was built without `-Zalways-encode-mir`, or libcore seems missing. \ - Use `cargo miri setup` to prepare a sysroot that is suitable for Miri." + "the current sysroot was built without `-Zalways-encode-mir`, or libcore seems missing.\n\ + Note that directly invoking the `miri` binary is not supported; please use `cargo miri` instead." ); } diff --git a/src/tools/miri/src/helpers.rs b/src/tools/miri/src/helpers.rs index ccfff7fa94b..ab7e35710d3 100644 --- a/src/tools/miri/src/helpers.rs +++ b/src/tools/miri/src/helpers.rs @@ -3,7 +3,7 @@ use std::time::Duration; use std::{cmp, iter}; use rand::RngCore; -use rustc_abi::{Align, CanonAbi, ExternAbi, FieldIdx, FieldsShape, Size, Variants}; +use rustc_abi::{Align, ExternAbi, FieldIdx, FieldsShape, Size, Variants}; use rustc_apfloat::Float; use rustc_apfloat::ieee::{Double, Half, Quad, Single}; use rustc_hir::Safety; @@ -14,11 +14,10 @@ use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; use rustc_middle::middle::dependency_format::Linkage; use rustc_middle::middle::exported_symbols::ExportedSymbol; use rustc_middle::ty::layout::{LayoutOf, MaybeResult, TyAndLayout}; -use rustc_middle::ty::{self, Binder, FloatTy, FnSig, IntTy, Ty, TyCtxt, UintTy}; +use rustc_middle::ty::{self, FloatTy, IntTy, Ty, TyCtxt, UintTy}; use rustc_session::config::CrateType; use rustc_span::{Span, Symbol}; use rustc_symbol_mangling::mangle_internal_symbol; -use rustc_target::callconv::FnAbi; use crate::*; @@ -437,7 +436,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { /// For now, arguments must be scalars (so that the caller does not have to know the layout). /// /// If you do not provide a return place, a dangling zero-sized place will be created - /// for your convenience. + /// for your convenience. This is only valid if the return type is `()`. fn call_function( &mut self, f: ty::Instance<'tcx>, @@ -452,7 +451,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let mir = this.load_mir(f.def, None)?; let dest = match dest { Some(dest) => dest.clone(), - None => MPlaceTy::fake_alloc_zst(this.layout_of(mir.return_ty())?), + None => MPlaceTy::fake_alloc_zst(this.machine.layouts.unit), }; // Construct a function pointer type representing the caller perspective. @@ -465,6 +464,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { ); let caller_fn_abi = this.fn_abi_of_fn_ptr(ty::Binder::dummy(sig), ty::List::empty())?; + // This will also show proper errors if there is any ABI mismatch. this.init_stack_frame( f, mir, @@ -929,21 +929,6 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { self.read_c_str_with_char_size(ptr, wchar_t.size, wchar_t.align.abi) } - /// Check that the calling convention is what we expect. - fn check_callconv<'a>( - &self, - fn_abi: &FnAbi<'tcx, Ty<'tcx>>, - exp_abi: CanonAbi, - ) -> InterpResult<'a, ()> { - if fn_abi.conv != exp_abi { - throw_ub_format!( - r#"calling a function with calling convention "{exp_abi}" using caller calling convention "{}""#, - fn_abi.conv - ); - } - interp_ok(()) - } - fn frame_in_std(&self) -> bool { let this = self.eval_context_ref(); let frame = this.frame(); @@ -967,161 +952,6 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { crate_name == "std" || crate_name == "std_miri_test" } - fn check_abi_and_shim_symbol_clash( - &mut self, - abi: &FnAbi<'tcx, Ty<'tcx>>, - exp_abi: CanonAbi, - link_name: Symbol, - ) -> InterpResult<'tcx, ()> { - self.check_callconv(abi, exp_abi)?; - if let Some((body, instance)) = self.eval_context_mut().lookup_exported_symbol(link_name)? { - // If compiler-builtins is providing the symbol, then don't treat it as a clash. - // We'll use our built-in implementation in `emulate_foreign_item_inner` for increased - // performance. Note that this means we won't catch any undefined behavior in - // compiler-builtins when running other crates, but Miri can still be run on - // compiler-builtins itself (or any crate that uses it as a normal dependency) - if self.eval_context_ref().tcx.is_compiler_builtins(instance.def_id().krate) { - return interp_ok(()); - } - - throw_machine_stop!(TerminationInfo::SymbolShimClashing { - link_name, - span: body.span.data(), - }) - } - interp_ok(()) - } - - fn check_shim<'a, const N: usize>( - &mut self, - abi: &FnAbi<'tcx, Ty<'tcx>>, - exp_abi: CanonAbi, - link_name: Symbol, - args: &'a [OpTy<'tcx>], - ) -> InterpResult<'tcx, &'a [OpTy<'tcx>; N]> { - self.check_abi_and_shim_symbol_clash(abi, exp_abi, link_name)?; - - if abi.c_variadic { - throw_ub_format!( - "calling a non-variadic function with a variadic caller-side signature" - ); - } - if let Ok(ops) = args.try_into() { - return interp_ok(ops); - } - throw_ub_format!( - "incorrect number of arguments for `{link_name}`: got {}, expected {}", - args.len(), - N - ) - } - - /// Check that the given `caller_fn_abi` matches the expected ABI described by - /// `callee_abi`, `callee_input_tys`, `callee_output_ty`, and then returns the list of - /// arguments. - fn check_shim_abi<'a, const N: usize>( - &mut self, - link_name: Symbol, - caller_fn_abi: &FnAbi<'tcx, Ty<'tcx>>, - callee_abi: ExternAbi, - callee_input_tys: [Ty<'tcx>; N], - callee_output_ty: Ty<'tcx>, - caller_args: &'a [OpTy<'tcx>], - ) -> InterpResult<'tcx, &'a [OpTy<'tcx>; N]> { - let this = self.eval_context_mut(); - let mut inputs_and_output = callee_input_tys.to_vec(); - inputs_and_output.push(callee_output_ty); - let fn_sig_binder = Binder::dummy(FnSig { - inputs_and_output: this.machine.tcx.mk_type_list(&inputs_and_output), - c_variadic: false, - // This does not matter for the ABI. - safety: Safety::Safe, - abi: callee_abi, - }); - let callee_fn_abi = this.fn_abi_of_fn_ptr(fn_sig_binder, Default::default())?; - - this.check_abi_and_shim_symbol_clash(caller_fn_abi, callee_fn_abi.conv, link_name)?; - - if caller_fn_abi.c_variadic { - throw_ub_format!( - "ABI mismatch: calling a non-variadic function with a variadic caller-side signature" - ); - } - - if callee_fn_abi.fixed_count != caller_fn_abi.fixed_count { - throw_ub_format!( - "ABI mismatch: expected {} arguments, found {} arguments ", - callee_fn_abi.fixed_count, - caller_fn_abi.fixed_count - ); - } - - if callee_fn_abi.can_unwind && !caller_fn_abi.can_unwind { - throw_ub_format!( - "ABI mismatch: callee may unwind, but caller-side signature prohibits unwinding", - ); - } - - if !this.check_argument_compat(&caller_fn_abi.ret, &callee_fn_abi.ret)? { - throw_ub!(AbiMismatchReturn { - caller_ty: caller_fn_abi.ret.layout.ty, - callee_ty: callee_fn_abi.ret.layout.ty - }); - } - - if let Some(index) = caller_fn_abi - .args - .iter() - .zip(callee_fn_abi.args.iter()) - .map(|(caller_arg, callee_arg)| this.check_argument_compat(caller_arg, callee_arg)) - .collect::<InterpResult<'tcx, Vec<bool>>>()? - .into_iter() - .position(|b| !b) - { - throw_ub!(AbiMismatchArgument { - caller_ty: caller_fn_abi.args[index].layout.ty, - callee_ty: callee_fn_abi.args[index].layout.ty - }); - } - - if let Ok(ops) = caller_args.try_into() { - return interp_ok(ops); - } - unreachable!() - } - - /// Check shim for variadic function. - /// Returns a tuple that consisting of an array of fixed args, and a slice of varargs. - fn check_shim_variadic<'a, const N: usize>( - &mut self, - abi: &FnAbi<'tcx, Ty<'tcx>>, - exp_abi: CanonAbi, - link_name: Symbol, - args: &'a [OpTy<'tcx>], - ) -> InterpResult<'tcx, (&'a [OpTy<'tcx>; N], &'a [OpTy<'tcx>])> - where - &'a [OpTy<'tcx>; N]: TryFrom<&'a [OpTy<'tcx>]>, - { - self.check_abi_and_shim_symbol_clash(abi, exp_abi, link_name)?; - - if !abi.c_variadic { - throw_ub_format!( - "calling a variadic function with a non-variadic caller-side signature" - ); - } - if abi.fixed_count != u32::try_from(N).unwrap() { - throw_ub_format!( - "incorrect number of fixed arguments for variadic function `{}`: got {}, expected {N}", - link_name.as_str(), - abi.fixed_count - ) - } - if let Some(args) = args.split_first_chunk() { - return interp_ok(args); - } - panic!("mismatch between signature and `args` slice"); - } - /// Mark a machine allocation that was just created as immutable. fn mark_immutable(&mut self, mplace: &MPlaceTy<'tcx>) { let this = self.eval_context_mut(); @@ -1257,8 +1087,21 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "failed to evaluate static in required link_section: {def_id:?}\n{err:?}" ) }); - let val = this.read_immediate(&const_val)?; - array.push(val); + match const_val.layout.ty.kind() { + ty::FnPtr(..) => { + array.push(this.read_immediate(&const_val)?); + } + ty::Array(elem_ty, _) if matches!(elem_ty.kind(), ty::FnPtr(..)) => { + let mut elems = this.project_array_fields(&const_val)?; + while let Some((_idx, elem)) = elems.next(this)? { + array.push(this.read_immediate(&elem)?); + } + } + _ => + throw_unsup_format!( + "only function pointers and arrays of function pointers are supported in well-known linker sections" + ), + } } interp_ok(()) })?; @@ -1317,39 +1160,6 @@ impl<'tcx> MiriMachine<'tcx> { } } -/// Check that the number of args is what we expect. -pub fn check_intrinsic_arg_count<'a, 'tcx, const N: usize>( - args: &'a [OpTy<'tcx>], -) -> InterpResult<'tcx, &'a [OpTy<'tcx>; N]> -where - &'a [OpTy<'tcx>; N]: TryFrom<&'a [OpTy<'tcx>]>, -{ - if let Ok(ops) = args.try_into() { - return interp_ok(ops); - } - throw_ub_format!( - "incorrect number of arguments for intrinsic: got {}, expected {}", - args.len(), - N - ) -} - -/// Check that the number of varargs is at least the minimum what we expect. -/// Fixed args should not be included. -pub fn check_min_vararg_count<'a, 'tcx, const N: usize>( - name: &'a str, - args: &'a [OpTy<'tcx>], -) -> InterpResult<'tcx, &'a [OpTy<'tcx>; N]> { - if let Some((ops, _)) = args.split_first_chunk() { - return interp_ok(ops); - } - throw_ub_format!( - "not enough variadic arguments for `{name}`: got {}, expected at least {}", - args.len(), - N - ) -} - pub fn isolation_abort_error<'tcx>(name: &str) -> InterpResult<'tcx> { throw_machine_stop!(TerminationInfo::UnsupportedInIsolation(format!( "{name} not available when isolation is enabled", @@ -1465,7 +1275,7 @@ pub struct MaybeEnteredTraceSpan { #[macro_export] macro_rules! enter_trace_span { ($name:ident :: $subname:ident $($tt:tt)*) => {{ - enter_trace_span!(stringify!($name), $name = %stringify!(subname) $($tt)*) + enter_trace_span!(stringify!($name), $name = %stringify!($subname) $($tt)*) }}; ($($tt:tt)*) => { diff --git a/src/tools/miri/src/intrinsics/atomic.rs b/src/tools/miri/src/intrinsics/atomic.rs index 0a59a707a10..bcc3e9ec885 100644 --- a/src/tools/miri/src/intrinsics/atomic.rs +++ b/src/tools/miri/src/intrinsics/atomic.rs @@ -2,7 +2,7 @@ use rustc_middle::mir::BinOp; use rustc_middle::ty::AtomicOrdering; use rustc_middle::{mir, ty}; -use self::helpers::check_intrinsic_arg_count; +use super::check_intrinsic_arg_count; use crate::*; pub enum AtomicOp { diff --git a/src/tools/miri/src/intrinsics/mod.rs b/src/tools/miri/src/intrinsics/mod.rs index 4efa7dd4dcf..b5e81460773 100644 --- a/src/tools/miri/src/intrinsics/mod.rs +++ b/src/tools/miri/src/intrinsics/mod.rs @@ -14,11 +14,28 @@ use rustc_middle::ty::{self, FloatTy, ScalarInt}; use rustc_span::{Symbol, sym}; use self::atomic::EvalContextExt as _; -use self::helpers::{ToHost, ToSoft, check_intrinsic_arg_count}; +use self::helpers::{ToHost, ToSoft}; use self::simd::EvalContextExt as _; use crate::math::{IeeeExt, apply_random_float_error_ulp}; use crate::*; +/// Check that the number of args is what we expect. +fn check_intrinsic_arg_count<'a, 'tcx, const N: usize>( + args: &'a [OpTy<'tcx>], +) -> InterpResult<'tcx, &'a [OpTy<'tcx>; N]> +where + &'a [OpTy<'tcx>; N]: TryFrom<&'a [OpTy<'tcx>]>, +{ + if let Ok(ops) = args.try_into() { + return interp_ok(ops); + } + throw_ub_format!( + "incorrect number of arguments for intrinsic: got {}, expected {}", + args.len(), + N + ) +} + impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {} pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { fn call_intrinsic( @@ -114,7 +131,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { )); } "catch_unwind" => { - this.handle_catch_unwind(args, dest, ret)?; + let [try_fn, data, catch_fn] = check_intrinsic_arg_count(args)?; + this.handle_catch_unwind(try_fn, data, catch_fn, dest, ret)?; // This pushed a stack frame, don't jump to `ret`. return interp_ok(EmulateItemResult::AlreadyJumped); } diff --git a/src/tools/miri/src/intrinsics/simd.rs b/src/tools/miri/src/intrinsics/simd.rs index e63992aa95f..b26516c0ff0 100644 --- a/src/tools/miri/src/intrinsics/simd.rs +++ b/src/tools/miri/src/intrinsics/simd.rs @@ -6,9 +6,8 @@ use rustc_middle::ty::FloatTy; use rustc_middle::{mir, ty}; use rustc_span::{Symbol, sym}; -use crate::helpers::{ - ToHost, ToSoft, bool_to_simd_element, check_intrinsic_arg_count, simd_element_to_bool, -}; +use super::check_intrinsic_arg_count; +use crate::helpers::{ToHost, ToSoft, bool_to_simd_element, simd_element_to_bool}; use crate::*; #[derive(Copy, Clone)] diff --git a/src/tools/miri/src/lib.rs b/src/tools/miri/src/lib.rs index ae70257653c..507d4f7b428 100644 --- a/src/tools/miri/src/lib.rs +++ b/src/tools/miri/src/lib.rs @@ -7,6 +7,7 @@ #![feature(never_type)] #![feature(try_blocks)] #![feature(io_error_more)] +#![feature(if_let_guard)] #![feature(variant_count)] #![feature(yeet_expr)] #![feature(nonzero_ops)] @@ -158,6 +159,7 @@ pub use crate::shims::foreign_items::{DynSym, EvalContextExt as _}; pub use crate::shims::io_error::{EvalContextExt as _, IoError, LibcError}; pub use crate::shims::os_str::EvalContextExt as _; pub use crate::shims::panic::EvalContextExt as _; +pub use crate::shims::sig::EvalContextExt as _; pub use crate::shims::time::EvalContextExt as _; pub use crate::shims::tls::TlsData; pub use crate::shims::unwind::{CatchUnwindData, EvalContextExt as _}; diff --git a/src/tools/miri/src/machine.rs b/src/tools/miri/src/machine.rs index ce33c870b4b..8f0814a070c 100644 --- a/src/tools/miri/src/machine.rs +++ b/src/tools/miri/src/machine.rs @@ -4,7 +4,6 @@ use std::any::Any; use std::borrow::Cow; use std::cell::{Cell, RefCell}; -use std::collections::hash_map::Entry; use std::path::Path; use std::rc::Rc; use std::{fmt, process}; @@ -70,12 +69,6 @@ pub struct FrameExtra<'tcx> { /// This is used by `MiriMachine::current_span` and `MiriMachine::caller_span` pub is_user_relevant: bool, - /// We have a cache for the mapping from [`mir::Const`] to resulting [`AllocId`]. - /// However, we don't want all frames to always get the same result, so we insert - /// an additional bit of "salt" into the cache key. This salt is fixed per-frame - /// so that within a call, a const will have a stable address. - salt: usize, - /// Data race detector per-frame data. pub data_race: Option<data_race::FrameState>, } @@ -83,19 +76,12 @@ pub struct FrameExtra<'tcx> { impl<'tcx> std::fmt::Debug for FrameExtra<'tcx> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { // Omitting `timing`, it does not support `Debug`. - let FrameExtra { - borrow_tracker, - catch_unwind, - timing: _, - is_user_relevant, - salt, - data_race, - } = self; + let FrameExtra { borrow_tracker, catch_unwind, timing: _, is_user_relevant, data_race } = + self; f.debug_struct("FrameData") .field("borrow_tracker", borrow_tracker) .field("catch_unwind", catch_unwind) .field("is_user_relevant", is_user_relevant) - .field("salt", salt) .field("data_race", data_race) .finish() } @@ -108,7 +94,6 @@ impl VisitProvenance for FrameExtra<'_> { borrow_tracker, timing: _, is_user_relevant: _, - salt: _, data_race: _, } = self; @@ -578,11 +563,6 @@ pub struct MiriMachine<'tcx> { /// diagnostics. pub(crate) allocation_spans: RefCell<FxHashMap<AllocId, (Span, Option<Span>)>>, - /// Maps MIR consts to their evaluated result. We combine the const with a "salt" (`usize`) - /// that is fixed per stack frame; this lets us have sometimes different results for the - /// same const while ensuring consistent results within a single call. - const_cache: RefCell<FxHashMap<(mir::Const<'tcx>, usize), OpTy<'tcx>>>, - /// For each allocation, an offset inside that allocation that was deemed aligned even for /// symbolic alignment checks. This cannot be stored in `AllocExtra` since it needs to be /// tracked for vtables and function allocations as well as regular allocations. @@ -622,6 +602,9 @@ pub struct MiriMachine<'tcx> { } impl<'tcx> MiriMachine<'tcx> { + /// Create a new MiriMachine. + /// + /// Invariant: `genmc_ctx.is_some() == config.genmc_config.is_some()` pub(crate) fn new( config: &MiriConfig, layout_cx: LayoutCx<'tcx>, @@ -645,7 +628,7 @@ impl<'tcx> MiriMachine<'tcx> { }); let rng = StdRng::seed_from_u64(config.seed.unwrap_or(0)); let borrow_tracker = config.borrow_tracker.map(|bt| bt.instantiate_global_state(config)); - let data_race = if config.genmc_mode { + let data_race = if config.genmc_config.is_some() { // `genmc_ctx` persists across executions, so we don't create a new one here. GlobalDataRaceHandler::Genmc(genmc_ctx.unwrap()) } else if config.data_race_detector { @@ -764,7 +747,6 @@ impl<'tcx> MiriMachine<'tcx> { stack_size, collect_leak_backtraces: config.collect_leak_backtraces, allocation_spans: RefCell::new(FxHashMap::default()), - const_cache: RefCell::new(FxHashMap::default()), symbolic_alignment: RefCell::new(FxHashMap::default()), union_data_ranges: FxHashMap::default(), pthread_mutex_sanity: Cell::new(false), @@ -941,7 +923,6 @@ impl VisitProvenance for MiriMachine<'_> { stack_size: _, collect_leak_backtraces: _, allocation_spans: _, - const_cache: _, symbolic_alignment: _, union_data_ranges: _, pthread_mutex_sanity: _, @@ -1578,7 +1559,6 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> { catch_unwind: None, timing, is_user_relevant: ecx.machine.is_user_relevant(&frame), - salt: ecx.machine.rng.borrow_mut().random_range(0..ADDRS_PER_ANON_GLOBAL), data_race: ecx .machine .data_race @@ -1737,33 +1717,6 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> { interp_ok(()) } - fn eval_mir_constant<F>( - ecx: &InterpCx<'tcx, Self>, - val: mir::Const<'tcx>, - span: Span, - layout: Option<TyAndLayout<'tcx>>, - eval: F, - ) -> InterpResult<'tcx, OpTy<'tcx>> - where - F: Fn( - &InterpCx<'tcx, Self>, - mir::Const<'tcx>, - Span, - Option<TyAndLayout<'tcx>>, - ) -> InterpResult<'tcx, OpTy<'tcx>>, - { - let frame = ecx.active_thread_stack().last().unwrap(); - let mut cache = ecx.machine.const_cache.borrow_mut(); - match cache.entry((val, frame.extra.salt)) { - Entry::Vacant(ve) => { - let op = eval(ecx, val, span, layout)?; - ve.insert(op.clone()); - interp_ok(op) - } - Entry::Occupied(oe) => interp_ok(oe.get().clone()), - } - } - fn get_global_alloc_salt( ecx: &InterpCx<'tcx, Self>, instance: Option<ty::Instance<'tcx>>, diff --git a/src/tools/miri/src/shims/aarch64.rs b/src/tools/miri/src/shims/aarch64.rs index 44ad5081ad5..6e422b4ab71 100644 --- a/src/tools/miri/src/shims/aarch64.rs +++ b/src/tools/miri/src/shims/aarch64.rs @@ -20,7 +20,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let unprefixed_name = link_name.as_str().strip_prefix("llvm.aarch64.").unwrap(); match unprefixed_name { "isb" => { - let [arg] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [arg] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let arg = this.read_scalar(arg)?.to_i32()?; match arg { // SY ("full system scope") @@ -38,7 +38,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // `left` input, the second half of the output from the `right` input. // https://developer.arm.com/architectures/instruction-sets/intrinsics/vpmaxq_u8 "neon.umaxp.v16i8" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; diff --git a/src/tools/miri/src/shims/backtrace.rs b/src/tools/miri/src/shims/backtrace.rs index 18d60915d20..bd3914b652a 100644 --- a/src/tools/miri/src/shims/backtrace.rs +++ b/src/tools/miri/src/shims/backtrace.rs @@ -15,7 +15,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { dest: &MPlaceTy<'tcx>, ) -> InterpResult<'tcx> { let this = self.eval_context_mut(); - let [flags] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [flags] = this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let flags = this.read_scalar(flags)?.to_u64()?; if flags != 0 { @@ -37,7 +37,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let ptr_ty = this.machine.layouts.mut_raw_ptr.ty; let ptr_layout = this.layout_of(ptr_ty)?; - let [flags, buf] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [flags, buf] = this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let flags = this.read_scalar(flags)?.to_u64()?; let buf_place = this.deref_pointer_as(buf, ptr_layout)?; @@ -117,7 +117,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { dest: &MPlaceTy<'tcx>, ) -> InterpResult<'tcx> { let this = self.eval_context_mut(); - let [ptr, flags] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [ptr, flags] = this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let flags = this.read_scalar(flags)?.to_u64()?; @@ -195,7 +195,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let this = self.eval_context_mut(); let [ptr, flags, name_ptr, filename_ptr] = - this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let flags = this.read_scalar(flags)?.to_u64()?; if flags != 0 { diff --git a/src/tools/miri/src/shims/files.rs b/src/tools/miri/src/shims/files.rs index 606d1ffbea6..0d4642c6ad0 100644 --- a/src/tools/miri/src/shims/files.rs +++ b/src/tools/miri/src/shims/files.rs @@ -1,7 +1,7 @@ use std::any::Any; use std::collections::BTreeMap; use std::fs::{File, Metadata}; -use std::io::{IsTerminal, Seek, SeekFrom, Write}; +use std::io::{ErrorKind, IsTerminal, Seek, SeekFrom, Write}; use std::marker::CoercePointee; use std::ops::Deref; use std::rc::{Rc, Weak}; @@ -167,6 +167,11 @@ pub trait FileDescription: std::fmt::Debug + FileDescriptionExt { throw_unsup_format!("cannot write to {}", self.name()); } + /// Determines whether this FD non-deterministically has its reads and writes shortened. + fn nondet_short_accesses(&self) -> bool { + true + } + /// Seeks to the given offset (which can be relative to the beginning, end, or current position). /// Returns the new position from the start of the stream. fn seek<'tcx>( @@ -334,6 +339,15 @@ impl FileDescription for FileHandle { ) -> InterpResult<'tcx> { assert!(communicate_allowed, "isolation should have prevented even opening a file"); + if !self.writable { + // Linux hosts return EBADF here which we can't translate via the platform-independent + // code since it does not map to any `io::ErrorKind` -- so if we don't do anything + // special, we'd throw an "unsupported error code" here. Windows returns something that + // gets translated to `PermissionDenied`. That seems like a good value so let's just use + // this everywhere, even if it means behavior on Unix targets does not match the real + // thing. + return finish.call(ecx, Err(ErrorKind::PermissionDenied.into())); + } let result = ecx.write_to_host(&self.file, len, ptr)?; finish.call(ecx, result) } diff --git a/src/tools/miri/src/shims/foreign_items.rs b/src/tools/miri/src/shims/foreign_items.rs index 94cda57658a..21545b68029 100644 --- a/src/tools/miri/src/shims/foreign_items.rs +++ b/src/tools/miri/src/shims/foreign_items.rs @@ -288,16 +288,17 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { match link_name.as_str() { // Miri-specific extern functions "miri_start_unwind" => { - let [payload] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [payload] = + this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; this.handle_miri_start_unwind(payload)?; return interp_ok(EmulateItemResult::NeedsUnwind); } "miri_run_provenance_gc" => { - let [] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; this.run_provenance_gc(); } "miri_get_alloc_id" => { - let [ptr] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [ptr] = this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let ptr = this.read_pointer(ptr)?; let (alloc_id, _, _) = this.ptr_get_alloc_id(ptr, 0).map_err_kind(|_e| { err_machine_stop!(TerminationInfo::Abort(format!( @@ -307,7 +308,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(Scalar::from_u64(alloc_id.0.get()), dest)?; } "miri_print_borrow_state" => { - let [id, show_unnamed] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [id, show_unnamed] = + this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let id = this.read_scalar(id)?.to_u64()?; let show_unnamed = this.read_scalar(show_unnamed)?.to_bool()?; if let Some(id) = std::num::NonZero::new(id).map(AllocId) @@ -322,7 +324,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { // This associates a name to a tag. Very useful for debugging, and also makes // tests more strict. let [ptr, nth_parent, name] = - this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let ptr = this.read_pointer(ptr)?; let nth_parent = this.read_scalar(nth_parent)?.to_u8()?; let name = this.read_immediate(name)?; @@ -335,7 +337,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { this.give_pointer_debug_name(ptr, nth_parent, &name)?; } "miri_static_root" => { - let [ptr] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [ptr] = this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let ptr = this.read_pointer(ptr)?; let (alloc_id, offset, _) = this.ptr_get_alloc_id(ptr, 0)?; if offset != Size::ZERO { @@ -346,7 +348,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { this.machine.static_roots.push(alloc_id); } "miri_host_to_target_path" => { - let [ptr, out, out_size] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [ptr, out, out_size] = + this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let ptr = this.read_pointer(ptr)?; let out = this.read_pointer(out)?; let out_size = this.read_scalar(out_size)?.to_target_usize(this)?; @@ -382,7 +385,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { // Writes some bytes to the interpreter's stdout/stderr. See the // README for details. "miri_write_to_stdout" | "miri_write_to_stderr" => { - let [msg] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [msg] = this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let msg = this.read_immediate(msg)?; let msg = this.read_byte_slice(&msg)?; // Note: we're ignoring errors writing to host stdout/stderr. @@ -396,7 +399,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { "miri_promise_symbolic_alignment" => { use rustc_abi::AlignFromBytesError; - let [ptr, align] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [ptr, align] = + this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let ptr = this.read_pointer(ptr)?; let align = this.read_target_usize(align)?; if !align.is_power_of_two() { @@ -437,12 +441,12 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { // Aborting the process. "exit" => { - let [code] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [code] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let code = this.read_scalar(code)?.to_i32()?; throw_machine_stop!(TerminationInfo::Exit { code, leak_check: false }); } "abort" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; throw_machine_stop!(TerminationInfo::Abort( "the program aborted execution".to_owned() )) @@ -450,7 +454,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { // Standard C allocation "malloc" => { - let [size] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [size] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let size = this.read_target_usize(size)?; if size <= this.max_size_of_val().bytes() { let res = this.malloc(size, AllocInit::Uninit)?; @@ -464,7 +468,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { } } "calloc" => { - let [items, elem_size] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [items, elem_size] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let items = this.read_target_usize(items)?; let elem_size = this.read_target_usize(elem_size)?; if let Some(size) = this.compute_size_in_bytes(Size::from_bytes(elem_size), items) { @@ -479,12 +484,13 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { } } "free" => { - let [ptr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let ptr = this.read_pointer(ptr)?; this.free(ptr)?; } "realloc" => { - let [old_ptr, new_size] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [old_ptr, new_size] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let old_ptr = this.read_pointer(old_ptr)?; let new_size = this.read_target_usize(new_size)?; if new_size <= this.max_size_of_val().bytes() { @@ -504,7 +510,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { let default = |ecx: &mut MiriInterpCx<'tcx>| { // Only call `check_shim` when `#[global_allocator]` isn't used. When that // macro is used, we act like no shim exists, so that the exported function can run. - let [size, align] = ecx.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [size, align] = + ecx.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let size = ecx.read_target_usize(size)?; let align = ecx.read_target_usize(align)?; @@ -537,7 +544,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { return this.emulate_allocator(|this| { // See the comment for `__rust_alloc` why `check_shim` is only called in the // default case. - let [size, align] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [size, align] = + this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let size = this.read_target_usize(size)?; let align = this.read_target_usize(align)?; @@ -559,7 +567,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { // See the comment for `__rust_alloc` why `check_shim` is only called in the // default case. let [ptr, old_size, align] = - ecx.check_shim(abi, CanonAbi::Rust, link_name, args)?; + ecx.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let ptr = ecx.read_pointer(ptr)?; let old_size = ecx.read_target_usize(old_size)?; let align = ecx.read_target_usize(align)?; @@ -590,7 +598,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { // See the comment for `__rust_alloc` why `check_shim` is only called in the // default case. let [ptr, old_size, align, new_size] = - this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let ptr = this.read_pointer(ptr)?; let old_size = this.read_target_usize(old_size)?; let align = this.read_target_usize(align)?; @@ -613,20 +621,21 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { } name if name == this.mangle_internal_symbol("__rust_no_alloc_shim_is_unstable_v2") => { // This is a no-op shim that only exists to prevent making the allocator shims instantly stable. - let [] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; } name if name == this.mangle_internal_symbol("__rust_alloc_error_handler_should_panic_v2") => { // Gets the value of the `oom` option. - let [] = this.check_shim(abi, CanonAbi::Rust, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::Rust, link_name, args)?; let val = this.tcx.sess.opts.unstable_opts.oom.should_panic(); this.write_int(val, dest)?; } // C memory handling functions "memcmp" => { - let [left, right, n] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, n] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let left = this.read_pointer(left)?; let right = this.read_pointer(right)?; let n = Size::from_bytes(this.read_target_usize(n)?); @@ -650,7 +659,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(Scalar::from_i32(result), dest)?; } "memrchr" => { - let [ptr, val, num] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr, val, num] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let ptr = this.read_pointer(ptr)?; let val = this.read_scalar(val)?.to_i32()?; let num = this.read_target_usize(num)?; @@ -676,7 +686,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { } } "memchr" => { - let [ptr, val, num] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr, val, num] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let ptr = this.read_pointer(ptr)?; let val = this.read_scalar(val)?.to_i32()?; let num = this.read_target_usize(num)?; @@ -699,7 +710,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { } } "strlen" => { - let [ptr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let ptr = this.read_pointer(ptr)?; // This reads at least 1 byte, so we are already enforcing that this is a valid pointer. let n = this.read_c_str(ptr)?.len(); @@ -709,7 +720,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { )?; } "wcslen" => { - let [ptr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let ptr = this.read_pointer(ptr)?; // This reads at least 1 byte, so we are already enforcing that this is a valid pointer. let n = this.read_wchar_t_str(ptr)?.len(); @@ -719,7 +730,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { )?; } "memcpy" => { - let [ptr_dest, ptr_src, n] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr_dest, ptr_src, n] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let ptr_dest = this.read_pointer(ptr_dest)?; let ptr_src = this.read_pointer(ptr_src)?; let n = this.read_target_usize(n)?; @@ -733,7 +745,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_pointer(ptr_dest, dest)?; } "strcpy" => { - let [ptr_dest, ptr_src] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr_dest, ptr_src] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let ptr_dest = this.read_pointer(ptr_dest)?; let ptr_src = this.read_pointer(ptr_src)?; @@ -764,7 +777,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { | "erff" | "erfcf" => { - let [f] = this.check_shim(abi, CanonAbi::C , link_name, args)?; + let [f] = this.check_shim_sig_lenient(abi, CanonAbi::C , link_name, args)?; let f = this.read_scalar(f)?.to_f32()?; // Using host floats (but it's fine, these operations do not have guaranteed precision). let f_host = f.to_host(); @@ -802,7 +815,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { | "atan2f" | "fdimf" => { - let [f1, f2] = this.check_shim(abi, CanonAbi::C , link_name, args)?; + let [f1, f2] = this.check_shim_sig_lenient(abi, CanonAbi::C , link_name, args)?; let f1 = this.read_scalar(f1)?.to_f32()?; let f2 = this.read_scalar(f2)?.to_f32()?; // underscore case for windows, here and below @@ -841,7 +854,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { | "erf" | "erfc" => { - let [f] = this.check_shim(abi, CanonAbi::C , link_name, args)?; + let [f] = this.check_shim_sig_lenient(abi, CanonAbi::C , link_name, args)?; let f = this.read_scalar(f)?.to_f64()?; // Using host floats (but it's fine, these operations do not have guaranteed precision). let f_host = f.to_host(); @@ -879,7 +892,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { | "atan2" | "fdim" => { - let [f1, f2] = this.check_shim(abi, CanonAbi::C , link_name, args)?; + let [f1, f2] = this.check_shim_sig_lenient(abi, CanonAbi::C , link_name, args)?; let f1 = this.read_scalar(f1)?.to_f64()?; let f2 = this.read_scalar(f2)?.to_f64()?; // underscore case for windows, here and below @@ -908,7 +921,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { | "ldexp" | "scalbn" => { - let [x, exp] = this.check_shim(abi, CanonAbi::C , link_name, args)?; + let [x, exp] = this.check_shim_sig_lenient(abi, CanonAbi::C , link_name, args)?; // For radix-2 (binary) systems, `ldexp` and `scalbn` are the same. let x = this.read_scalar(x)?.to_f64()?; let exp = this.read_scalar(exp)?.to_i32()?; @@ -918,7 +931,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(res, dest)?; } "lgammaf_r" => { - let [x, signp] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [x, signp] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let x = this.read_scalar(x)?.to_f32()?; let signp = this.deref_pointer_as(signp, this.machine.layouts.i32)?; @@ -934,7 +947,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(res, dest)?; } "lgamma_r" => { - let [x, signp] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [x, signp] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let x = this.read_scalar(x)?.to_f64()?; let signp = this.deref_pointer_as(signp, this.machine.layouts.i32)?; @@ -952,7 +965,8 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { // LLVM intrinsics "llvm.prefetch" => { - let [p, rw, loc, ty] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [p, rw, loc, ty] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let _ = this.read_pointer(p)?; let rw = this.read_scalar(rw)?.to_i32()?; @@ -979,7 +993,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { // Used to implement the x86 `_mm{,256,512}_popcnt_epi{8,16,32,64}` and wasm // `{i,u}8x16_popcnt` functions. name if name.starts_with("llvm.ctpop.v") => { - let [op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (op, op_len) = this.project_to_simd(op)?; let (dest, dest_len) = this.project_to_simd(dest)?; @@ -1015,7 +1029,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { } // FIXME: Move this to an `arm` submodule. "llvm.arm.hint" if this.tcx.sess.target.arch == "arm" => { - let [arg] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [arg] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let arg = this.read_scalar(arg)?.to_i32()?; // Note that different arguments might have different target feature requirements. match arg { diff --git a/src/tools/miri/src/shims/mod.rs b/src/tools/miri/src/shims/mod.rs index 2a7709829ee..7f594d4fdd6 100644 --- a/src/tools/miri/src/shims/mod.rs +++ b/src/tools/miri/src/shims/mod.rs @@ -18,6 +18,7 @@ pub mod global_ctor; pub mod io_error; pub mod os_str; pub mod panic; +pub mod sig; pub mod time; pub mod tls; pub mod unwind; diff --git a/src/tools/miri/src/shims/sig.rs b/src/tools/miri/src/shims/sig.rs new file mode 100644 index 00000000000..bc5e7f584f5 --- /dev/null +++ b/src/tools/miri/src/shims/sig.rs @@ -0,0 +1,266 @@ +//! Everything related to checking the signature of shim invocations. + +use rustc_abi::{CanonAbi, ExternAbi}; +use rustc_hir::Safety; +use rustc_middle::ty::{Binder, FnSig, Ty}; +use rustc_span::Symbol; +use rustc_target::callconv::FnAbi; + +use crate::*; + +/// Describes the expected signature of a shim. +pub struct ShimSig<'tcx, const ARGS: usize> { + pub abi: ExternAbi, + pub args: [Ty<'tcx>; ARGS], + pub ret: Ty<'tcx>, +} + +/// Construct a `ShimSig` with convenient syntax: +/// ```rust,ignore +/// shim_sig!(this, extern "C" fn (*const T, i32) -> usize) +/// ``` +#[macro_export] +macro_rules! shim_sig { + (extern $abi:literal fn($($arg:ty),*) -> $ret:ty) => { + |this| $crate::shims::sig::ShimSig { + abi: std::str::FromStr::from_str($abi).expect("incorrect abi specified"), + args: [$(shim_sig_arg!(this, $arg)),*], + ret: shim_sig_arg!(this, $ret), + } + }; +} + +/// Helper for `shim_sig!`. +#[macro_export] +macro_rules! shim_sig_arg { + // Unfortuantely we cannot take apart a `ty`-typed token at compile time, + // so we have to stringify it and match at runtime. + ($this:ident, $x:ty) => {{ + match stringify!($x) { + "i8" => $this.tcx.types.i8, + "i16" => $this.tcx.types.i16, + "i32" => $this.tcx.types.i32, + "i64" => $this.tcx.types.i64, + "i128" => $this.tcx.types.i128, + "isize" => $this.tcx.types.isize, + "u8" => $this.tcx.types.u8, + "u16" => $this.tcx.types.u16, + "u32" => $this.tcx.types.u32, + "u64" => $this.tcx.types.u64, + "u128" => $this.tcx.types.u128, + "usize" => $this.tcx.types.usize, + "()" => $this.tcx.types.unit, + "*const _" => $this.machine.layouts.const_raw_ptr.ty, + "*mut _" => $this.machine.layouts.mut_raw_ptr.ty, + ty if let Some(libc_ty) = ty.strip_prefix("libc::") => $this.libc_ty_layout(libc_ty).ty, + ty => panic!("unsupported signature type {ty:?}"), + } + }}; +} + +/// Helper function to compare two ABIs. +fn check_shim_abi<'tcx>( + this: &MiriInterpCx<'tcx>, + callee_abi: &FnAbi<'tcx, Ty<'tcx>>, + caller_abi: &FnAbi<'tcx, Ty<'tcx>>, +) -> InterpResult<'tcx> { + if callee_abi.conv != caller_abi.conv { + throw_ub_format!( + r#"calling a function with calling convention "{callee}" using caller calling convention "{caller}""#, + callee = callee_abi.conv, + caller = caller_abi.conv, + ); + } + if callee_abi.can_unwind && !caller_abi.can_unwind { + throw_ub_format!( + "ABI mismatch: callee may unwind, but caller-side signature prohibits unwinding", + ); + } + if caller_abi.c_variadic && !callee_abi.c_variadic { + throw_ub_format!( + "ABI mismatch: calling a non-variadic function with a variadic caller-side signature" + ); + } + if !caller_abi.c_variadic && callee_abi.c_variadic { + throw_ub_format!( + "ABI mismatch: calling a variadic function with a non-variadic caller-side signature" + ); + } + + if callee_abi.fixed_count != caller_abi.fixed_count { + throw_ub_format!( + "ABI mismatch: expected {} arguments, found {} arguments ", + callee_abi.fixed_count, + caller_abi.fixed_count + ); + } + + if !this.check_argument_compat(&caller_abi.ret, &callee_abi.ret)? { + throw_ub!(AbiMismatchReturn { + caller_ty: caller_abi.ret.layout.ty, + callee_ty: callee_abi.ret.layout.ty + }); + } + + for (idx, (caller_arg, callee_arg)) in + caller_abi.args.iter().zip(callee_abi.args.iter()).enumerate() + { + if !this.check_argument_compat(caller_arg, callee_arg)? { + throw_ub!(AbiMismatchArgument { + arg_idx: idx, + caller_ty: caller_abi.args[idx].layout.ty, + callee_ty: callee_abi.args[idx].layout.ty + }); + } + } + + interp_ok(()) +} + +fn check_shim_symbol_clash<'tcx>( + this: &mut MiriInterpCx<'tcx>, + link_name: Symbol, +) -> InterpResult<'tcx, ()> { + if let Some((body, instance)) = this.lookup_exported_symbol(link_name)? { + // If compiler-builtins is providing the symbol, then don't treat it as a clash. + // We'll use our built-in implementation in `emulate_foreign_item_inner` for increased + // performance. Note that this means we won't catch any undefined behavior in + // compiler-builtins when running other crates, but Miri can still be run on + // compiler-builtins itself (or any crate that uses it as a normal dependency) + if this.tcx.is_compiler_builtins(instance.def_id().krate) { + return interp_ok(()); + } + + throw_machine_stop!(TerminationInfo::SymbolShimClashing { + link_name, + span: body.span.data(), + }) + } + interp_ok(()) +} + +impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {} +pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { + fn check_shim_sig_lenient<'a, const N: usize>( + &mut self, + abi: &FnAbi<'tcx, Ty<'tcx>>, + exp_abi: CanonAbi, + link_name: Symbol, + args: &'a [OpTy<'tcx>], + ) -> InterpResult<'tcx, &'a [OpTy<'tcx>; N]> { + let this = self.eval_context_mut(); + check_shim_symbol_clash(this, link_name)?; + + if abi.conv != exp_abi { + throw_ub_format!( + r#"calling a function with calling convention "{exp_abi}" using caller calling convention "{}""#, + abi.conv + ); + } + if abi.c_variadic { + throw_ub_format!( + "calling a non-variadic function with a variadic caller-side signature" + ); + } + + if let Ok(ops) = args.try_into() { + return interp_ok(ops); + } + throw_ub_format!( + "incorrect number of arguments for `{link_name}`: got {}, expected {}", + args.len(), + N + ) + } + + /// Check that the given `caller_fn_abi` matches the expected ABI described by `shim_sig`, and + /// then returns the list of arguments. + fn check_shim_sig<'a, const N: usize>( + &mut self, + shim_sig: fn(&MiriInterpCx<'tcx>) -> ShimSig<'tcx, N>, + link_name: Symbol, + caller_fn_abi: &FnAbi<'tcx, Ty<'tcx>>, + caller_args: &'a [OpTy<'tcx>], + ) -> InterpResult<'tcx, &'a [OpTy<'tcx>; N]> { + let this = self.eval_context_mut(); + let shim_sig = shim_sig(this); + + // Compute full callee ABI. + let mut inputs_and_output = Vec::with_capacity(N.strict_add(1)); + inputs_and_output.extend(&shim_sig.args); + inputs_and_output.push(shim_sig.ret); + let fn_sig_binder = Binder::dummy(FnSig { + inputs_and_output: this.machine.tcx.mk_type_list(&inputs_and_output), + c_variadic: false, + // This does not matter for the ABI. + safety: Safety::Safe, + abi: shim_sig.abi, + }); + let callee_fn_abi = this.fn_abi_of_fn_ptr(fn_sig_binder, Default::default())?; + + // Check everything. + check_shim_abi(this, callee_fn_abi, caller_fn_abi)?; + check_shim_symbol_clash(this, link_name)?; + + // Return arguments. + if let Ok(ops) = caller_args.try_into() { + return interp_ok(ops); + } + unreachable!() + } + + /// Check shim for variadic function. + /// Returns a tuple that consisting of an array of fixed args, and a slice of varargs. + fn check_shim_sig_variadic_lenient<'a, const N: usize>( + &mut self, + abi: &FnAbi<'tcx, Ty<'tcx>>, + exp_abi: CanonAbi, + link_name: Symbol, + args: &'a [OpTy<'tcx>], + ) -> InterpResult<'tcx, (&'a [OpTy<'tcx>; N], &'a [OpTy<'tcx>])> + where + &'a [OpTy<'tcx>; N]: TryFrom<&'a [OpTy<'tcx>]>, + { + let this = self.eval_context_mut(); + check_shim_symbol_clash(this, link_name)?; + + if abi.conv != exp_abi { + throw_ub_format!( + r#"calling a function with calling convention "{exp_abi}" using caller calling convention "{}""#, + abi.conv + ); + } + if !abi.c_variadic { + throw_ub_format!( + "calling a variadic function with a non-variadic caller-side signature" + ); + } + if abi.fixed_count != u32::try_from(N).unwrap() { + throw_ub_format!( + "incorrect number of fixed arguments for variadic function `{}`: got {}, expected {N}", + link_name.as_str(), + abi.fixed_count + ) + } + if let Some(args) = args.split_first_chunk() { + return interp_ok(args); + } + panic!("mismatch between signature and `args` slice"); + } +} + +/// Check that the number of varargs is at least the minimum what we expect. +/// Fixed args should not be included. +pub fn check_min_vararg_count<'a, 'tcx, const N: usize>( + name: &'a str, + args: &'a [OpTy<'tcx>], +) -> InterpResult<'tcx, &'a [OpTy<'tcx>; N]> { + if let Some((ops, _)) = args.split_first_chunk() { + return interp_ok(ops); + } + throw_ub_format!( + "not enough variadic arguments for `{name}`: got {}, expected at least {}", + args.len(), + N + ) +} diff --git a/src/tools/miri/src/shims/time.rs b/src/tools/miri/src/shims/time.rs index eb21abc2a45..b5b35797fec 100644 --- a/src/tools/miri/src/shims/time.rs +++ b/src/tools/miri/src/shims/time.rs @@ -17,73 +17,71 @@ pub fn system_time_to_duration<'tcx>(time: &SystemTime) -> InterpResult<'tcx, Du impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {} pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { - fn clock_gettime( - &mut self, - clk_id_op: &OpTy<'tcx>, - tp_op: &OpTy<'tcx>, - dest: &MPlaceTy<'tcx>, - ) -> InterpResult<'tcx> { + fn parse_clockid(&self, clk_id: Scalar) -> Option<TimeoutClock> { // This clock support is deliberately minimal because a lot of clock types have fiddly // properties (is it possible for Miri to be suspended independently of the host?). If you // have a use for another clock type, please open an issue. + let this = self.eval_context_ref(); - let this = self.eval_context_mut(); - - this.assert_target_os_is_unix("clock_gettime"); - let clockid_t_size = this.libc_ty_layout("clockid_t").size; - - let clk_id = this.read_scalar(clk_id_op)?.to_int(clockid_t_size)?; - let tp = this.deref_pointer_as(tp_op, this.libc_ty_layout("timespec"))?; - - let absolute_clocks; - let mut relative_clocks; + // Portable names that exist everywhere. + if clk_id == this.eval_libc("CLOCK_REALTIME") { + return Some(TimeoutClock::RealTime); + } else if clk_id == this.eval_libc("CLOCK_MONOTONIC") { + return Some(TimeoutClock::Monotonic); + } + // Some further platform-specific names we support. match this.tcx.sess.target.os.as_ref() { "linux" | "freebsd" | "android" => { - // Linux, Android, and FreeBSD have two main kinds of clocks. REALTIME clocks return the actual time since the - // Unix epoch, including effects which may cause time to move backwards such as NTP. // Linux further distinguishes regular and "coarse" clocks, but the "coarse" version - // is just specified to be "faster and less precise", so we implement both the same way. - absolute_clocks = vec![ - this.eval_libc("CLOCK_REALTIME").to_int(clockid_t_size)?, - this.eval_libc("CLOCK_REALTIME_COARSE").to_int(clockid_t_size)?, - ]; - // The second kind is MONOTONIC clocks for which 0 is an arbitrary time point, but they are - // never allowed to go backwards. We don't need to do any additional monotonicity - // enforcement because std::time::Instant already guarantees that it is monotonic. - relative_clocks = vec![ - this.eval_libc("CLOCK_MONOTONIC").to_int(clockid_t_size)?, - this.eval_libc("CLOCK_MONOTONIC_COARSE").to_int(clockid_t_size)?, - ]; + // is just specified to be "faster and less precise", so we treat it like normal + // clocks. + if clk_id == this.eval_libc("CLOCK_REALTIME_COARSE") { + return Some(TimeoutClock::RealTime); + } else if clk_id == this.eval_libc("CLOCK_MONOTONIC_COARSE") { + return Some(TimeoutClock::Monotonic); + } } "macos" => { - absolute_clocks = vec![this.eval_libc("CLOCK_REALTIME").to_int(clockid_t_size)?]; - relative_clocks = vec![this.eval_libc("CLOCK_MONOTONIC").to_int(clockid_t_size)?]; // `CLOCK_UPTIME_RAW` supposed to not increment while the system is asleep... but // that's not really something a program running inside Miri can tell, anyway. // We need to support it because std uses it. - relative_clocks.push(this.eval_libc("CLOCK_UPTIME_RAW").to_int(clockid_t_size)?); - } - "solaris" | "illumos" => { - // The REALTIME clock returns the actual time since the Unix epoch. - absolute_clocks = vec![this.eval_libc("CLOCK_REALTIME").to_int(clockid_t_size)?]; - // MONOTONIC, in the other hand, is the high resolution, non-adjustable - // clock from an arbitrary time in the past. - // Note that the man page mentions HIGHRES but it is just - // an alias of MONOTONIC and the libc crate does not expose it anyway. - // https://docs.oracle.com/cd/E23824_01/html/821-1465/clock-gettime-3c.html - relative_clocks = vec![this.eval_libc("CLOCK_MONOTONIC").to_int(clockid_t_size)?]; + if clk_id == this.eval_libc("CLOCK_UPTIME_RAW") { + return Some(TimeoutClock::Monotonic); + } } - target => throw_unsup_format!("`clock_gettime` is not supported on target OS {target}"), + _ => {} } - let duration = if absolute_clocks.contains(&clk_id) { - this.check_no_isolation("`clock_gettime` with `REALTIME` clocks")?; - system_time_to_duration(&SystemTime::now())? - } else if relative_clocks.contains(&clk_id) { - this.machine.monotonic_clock.now().duration_since(this.machine.monotonic_clock.epoch()) - } else { - return this.set_last_error_and_return(LibcError("EINVAL"), dest); + None + } + + fn clock_gettime( + &mut self, + clk_id_op: &OpTy<'tcx>, + tp_op: &OpTy<'tcx>, + dest: &MPlaceTy<'tcx>, + ) -> InterpResult<'tcx> { + let this = self.eval_context_mut(); + + this.assert_target_os_is_unix("clock_gettime"); + + let clk_id = this.read_scalar(clk_id_op)?; + let tp = this.deref_pointer_as(tp_op, this.libc_ty_layout("timespec"))?; + + let duration = match this.parse_clockid(clk_id) { + Some(TimeoutClock::RealTime) => { + this.check_no_isolation("`clock_gettime` with `REALTIME` clocks")?; + system_time_to_duration(&SystemTime::now())? + } + Some(TimeoutClock::Monotonic) => + this.machine + .monotonic_clock + .now() + .duration_since(this.machine.monotonic_clock.epoch()), + None => { + return this.set_last_error_and_return(LibcError("EINVAL"), dest); + } }; let tv_sec = duration.as_secs(); diff --git a/src/tools/miri/src/shims/unix/android/foreign_items.rs b/src/tools/miri/src/shims/unix/android/foreign_items.rs index 690b5295681..04c5d28838b 100644 --- a/src/tools/miri/src/shims/unix/android/foreign_items.rs +++ b/src/tools/miri/src/shims/unix/android/foreign_items.rs @@ -26,29 +26,30 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { match link_name.as_str() { // epoll, eventfd "epoll_create1" => { - let [flag] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [flag] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.epoll_create1(flag)?; this.write_scalar(result, dest)?; } "epoll_ctl" => { - let [epfd, op, fd, event] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [epfd, op, fd, event] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.epoll_ctl(epfd, op, fd, event)?; this.write_scalar(result, dest)?; } "epoll_wait" => { let [epfd, events, maxevents, timeout] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.epoll_wait(epfd, events, maxevents, timeout, dest)?; } "eventfd" => { - let [val, flag] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [val, flag] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.eventfd(val, flag)?; this.write_scalar(result, dest)?; } // Miscellaneous "__errno" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let errno_place = this.last_error_place()?; this.write_scalar(errno_place.to_ref(this).to_scalar(), dest)?; } diff --git a/src/tools/miri/src/shims/unix/android/thread.rs b/src/tools/miri/src/shims/unix/android/thread.rs index 5d17d6c8517..4e7b21d7d94 100644 --- a/src/tools/miri/src/shims/unix/android/thread.rs +++ b/src/tools/miri/src/shims/unix/android/thread.rs @@ -3,7 +3,7 @@ use rustc_middle::ty::Ty; use rustc_span::Symbol; use rustc_target::callconv::FnAbi; -use crate::helpers::check_min_vararg_count; +use crate::shims::sig::check_min_vararg_count; use crate::shims::unix::thread::{EvalContextExt as _, ThreadNameResult}; use crate::*; @@ -16,7 +16,7 @@ pub fn prctl<'tcx>( args: &[OpTy<'tcx>], dest: &MPlaceTy<'tcx>, ) -> InterpResult<'tcx> { - let ([op], varargs) = ecx.check_shim_variadic(abi, CanonAbi::C, link_name, args)?; + let ([op], varargs) = ecx.check_shim_sig_variadic_lenient(abi, CanonAbi::C, link_name, args)?; // FIXME: Use constants once https://github.com/rust-lang/libc/pull/3941 backported to the 0.2 branch. let pr_set_name = 15; diff --git a/src/tools/miri/src/shims/unix/fd.rs b/src/tools/miri/src/shims/unix/fd.rs index 71102d9f2f3..e226a55d8b1 100644 --- a/src/tools/miri/src/shims/unix/fd.rs +++ b/src/tools/miri/src/shims/unix/fd.rs @@ -4,10 +4,11 @@ use std::io; use std::io::ErrorKind; +use rand::Rng; use rustc_abi::Size; -use crate::helpers::check_min_vararg_count; use crate::shims::files::FileDescription; +use crate::shims::sig::check_min_vararg_count; use crate::shims::unix::linux_like::epoll::EpollReadyEvents; use crate::shims::unix::*; use crate::*; @@ -263,9 +264,18 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { return this.set_last_error_and_return(LibcError("EBADF"), dest); }; + // Non-deterministically decide to further reduce the count, simulating a partial read (but + // never to 0, that has different behavior). + let count = + if fd.nondet_short_accesses() && count >= 2 && this.machine.rng.get_mut().random() { + count / 2 + } else { + count + }; + trace!("read: FD mapped to {fd:?}"); // We want to read at most `count` bytes. We are sure that `count` is not negative - // because it was a target's `usize`. Also we are sure that its smaller than + // because it was a target's `usize`. Also we are sure that it's smaller than // `usize::MAX` because it is bounded by the host's `isize`. let finish = { @@ -328,6 +338,15 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { return this.set_last_error_and_return(LibcError("EBADF"), dest); }; + // Non-deterministically decide to further reduce the count, simulating a partial write (but + // never to 0, that has different behavior). + let count = + if fd.nondet_short_accesses() && count >= 2 && this.machine.rng.get_mut().random() { + count / 2 + } else { + count + }; + let finish = { let dest = dest.clone(); callback!( diff --git a/src/tools/miri/src/shims/unix/foreign_items.rs b/src/tools/miri/src/shims/unix/foreign_items.rs index 548eabb1b9f..55906f4eb95 100644 --- a/src/tools/miri/src/shims/unix/foreign_items.rs +++ b/src/tools/miri/src/shims/unix/foreign_items.rs @@ -1,7 +1,7 @@ use std::ffi::OsStr; use std::str; -use rustc_abi::{CanonAbi, ExternAbi, Size}; +use rustc_abi::{CanonAbi, Size}; use rustc_middle::ty::Ty; use rustc_span::Symbol; use rustc_target::callconv::FnAbi; @@ -14,7 +14,7 @@ use self::shims::unix::solarish::foreign_items as solarish; use crate::concurrency::cpu_affinity::CpuAffinityMask; use crate::shims::alloc::EvalContextExt as _; use crate::shims::unix::*; -use crate::*; +use crate::{shim_sig, *}; pub fn is_dyn_sym(name: &str, target_os: &str) -> bool { match name { @@ -111,40 +111,30 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { match link_name.as_str() { // Environment related shims "getenv" => { - let [name] = this.check_shim_abi( + let [name] = this.check_shim_sig( + shim_sig!(extern "C" fn(*const _) -> *mut _), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.const_raw_ptr.ty], - this.machine.layouts.mut_raw_ptr.ty, args, )?; let result = this.getenv(name)?; this.write_pointer(result, dest)?; } "unsetenv" => { - let [name] = this.check_shim_abi( + let [name] = this.check_shim_sig( + shim_sig!(extern "C" fn(*const _) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.const_raw_ptr.ty], - this.tcx.types.i32, args, )?; let result = this.unsetenv(name)?; this.write_scalar(result, dest)?; } "setenv" => { - let [name, value, overwrite] = this.check_shim_abi( + let [name, value, overwrite] = this.check_shim_sig( + shim_sig!(extern "C" fn(*const _, *const _, i32) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [ - this.machine.layouts.const_raw_ptr.ty, - this.machine.layouts.const_raw_ptr.ty, - this.tcx.types.i32, - ], - this.tcx.types.i32, args, )?; this.read_scalar(overwrite)?.to_i32()?; @@ -152,48 +142,40 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(result, dest)?; } "getcwd" => { - let [buf, size] = this.check_shim_abi( + let [buf, size] = this.check_shim_sig( + shim_sig!(extern "C" fn(*mut _, usize) -> *mut _), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.mut_raw_ptr.ty, this.tcx.types.usize], - this.machine.layouts.mut_raw_ptr.ty, args, )?; let result = this.getcwd(buf, size)?; this.write_pointer(result, dest)?; } "chdir" => { - let [path] = this.check_shim_abi( + let [path] = this.check_shim_sig( + shim_sig!(extern "C" fn(*const _) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.const_raw_ptr.ty], - this.tcx.types.i32, args, )?; let result = this.chdir(path)?; this.write_scalar(result, dest)?; } "getpid" => { - let [] = this.check_shim_abi( + let [] = this.check_shim_sig( + shim_sig!(extern "C" fn() -> libc::pid_t), link_name, abi, - ExternAbi::C { unwind: false }, - [], - this.libc_ty_layout("pid_t").ty, args, )?; let result = this.getpid()?; this.write_scalar(result, dest)?; } "sysconf" => { - let [val] = this.check_shim_abi( + let [val] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32) -> isize), link_name, abi, - ExternAbi::C { unwind: false }, - [this.tcx.types.i32], - this.tcx.types.isize, args, )?; let result = this.sysconf(val)?; @@ -201,12 +183,10 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } // File descriptors "read" => { - let [fd, buf, count] = this.check_shim_abi( + let [fd, buf, count] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, *mut _, usize) -> isize), link_name, abi, - ExternAbi::C { unwind: false }, - [this.tcx.types.i32, this.machine.layouts.mut_raw_ptr.ty, this.tcx.types.usize], - this.tcx.types.isize, args, )?; let fd = this.read_scalar(fd)?.to_i32()?; @@ -215,16 +195,10 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.read(fd, buf, count, None, dest)?; } "write" => { - let [fd, buf, n] = this.check_shim_abi( + let [fd, buf, n] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, *const _, usize) -> isize), link_name, abi, - ExternAbi::C { unwind: false }, - [ - this.tcx.types.i32, - this.machine.layouts.const_raw_ptr.ty, - this.tcx.types.usize, - ], - this.tcx.types.isize, args, )?; let fd = this.read_scalar(fd)?.to_i32()?; @@ -234,98 +208,64 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write(fd, buf, count, None, dest)?; } "pread" => { - let off_t = this.libc_ty_layout("off_t"); - let [fd, buf, count, offset] = this.check_shim_abi( + let [fd, buf, count, offset] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, *mut _, usize, libc::off_t) -> isize), link_name, abi, - ExternAbi::C { unwind: false }, - [ - this.tcx.types.i32, - this.machine.layouts.mut_raw_ptr.ty, - this.tcx.types.usize, - off_t.ty, - ], - this.tcx.types.isize, args, )?; let fd = this.read_scalar(fd)?.to_i32()?; let buf = this.read_pointer(buf)?; let count = this.read_target_usize(count)?; - let offset = this.read_scalar(offset)?.to_int(off_t.size)?; + let offset = this.read_scalar(offset)?.to_int(offset.layout.size)?; this.read(fd, buf, count, Some(offset), dest)?; } "pwrite" => { - let off_t = this.libc_ty_layout("off_t"); - let [fd, buf, n, offset] = this.check_shim_abi( + let [fd, buf, n, offset] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, *const _, usize, libc::off_t) -> isize), link_name, abi, - ExternAbi::C { unwind: false }, - [ - this.tcx.types.i32, - this.machine.layouts.const_raw_ptr.ty, - this.tcx.types.usize, - off_t.ty, - ], - this.tcx.types.isize, args, )?; let fd = this.read_scalar(fd)?.to_i32()?; let buf = this.read_pointer(buf)?; let count = this.read_target_usize(n)?; - let offset = this.read_scalar(offset)?.to_int(off_t.size)?; + let offset = this.read_scalar(offset)?.to_int(offset.layout.size)?; trace!("Called pwrite({:?}, {:?}, {:?}, {:?})", fd, buf, count, offset); this.write(fd, buf, count, Some(offset), dest)?; } "pread64" => { - let off64_t = this.libc_ty_layout("off64_t"); - let [fd, buf, count, offset] = this.check_shim_abi( + let [fd, buf, count, offset] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, *mut _, usize, libc::off64_t) -> isize), link_name, abi, - ExternAbi::C { unwind: false }, - [ - this.tcx.types.i32, - this.machine.layouts.mut_raw_ptr.ty, - this.tcx.types.usize, - off64_t.ty, - ], - this.tcx.types.isize, args, )?; let fd = this.read_scalar(fd)?.to_i32()?; let buf = this.read_pointer(buf)?; let count = this.read_target_usize(count)?; - let offset = this.read_scalar(offset)?.to_int(off64_t.size)?; + let offset = this.read_scalar(offset)?.to_int(offset.layout.size)?; this.read(fd, buf, count, Some(offset), dest)?; } "pwrite64" => { - let off64_t = this.libc_ty_layout("off64_t"); - let [fd, buf, n, offset] = this.check_shim_abi( + let [fd, buf, n, offset] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, *const _, usize, libc::off64_t) -> isize), link_name, abi, - ExternAbi::C { unwind: false }, - [ - this.tcx.types.i32, - this.machine.layouts.const_raw_ptr.ty, - this.tcx.types.usize, - off64_t.ty, - ], - this.tcx.types.isize, args, )?; let fd = this.read_scalar(fd)?.to_i32()?; let buf = this.read_pointer(buf)?; let count = this.read_target_usize(n)?; - let offset = this.read_scalar(offset)?.to_int(off64_t.size)?; + let offset = this.read_scalar(offset)?.to_int(offset.layout.size)?; trace!("Called pwrite64({:?}, {:?}, {:?}, {:?})", fd, buf, count, offset); this.write(fd, buf, count, Some(offset), dest)?; } "close" => { - let [fd] = this.check_shim_abi( + let [fd] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.tcx.types.i32], - this.tcx.types.i32, args, )?; let result = this.close(fd)?; @@ -333,17 +273,15 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "fcntl" => { let ([fd_num, cmd], varargs) = - this.check_shim_variadic(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_variadic_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.fcntl(fd_num, cmd, varargs)?; this.write_scalar(result, dest)?; } "dup" => { - let [old_fd] = this.check_shim_abi( + let [old_fd] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.tcx.types.i32], - this.tcx.types.i32, args, )?; let old_fd = this.read_scalar(old_fd)?.to_i32()?; @@ -351,12 +289,10 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(new_fd, dest)?; } "dup2" => { - let [old_fd, new_fd] = this.check_shim_abi( + let [old_fd, new_fd] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, i32) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.tcx.types.i32, this.tcx.types.i32], - this.tcx.types.i32, args, )?; let old_fd = this.read_scalar(old_fd)?.to_i32()?; @@ -367,12 +303,10 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "flock" => { // Currently this function does not exist on all Unixes, e.g. on Solaris. this.check_target_os(&["linux", "freebsd", "macos", "illumos"], link_name)?; - let [fd, op] = this.check_shim_abi( + let [fd, op] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, i32) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.tcx.types.i32, this.tcx.types.i32], - this.tcx.types.i32, args, )?; let fd = this.read_scalar(fd)?.to_i32()?; @@ -386,230 +320,187 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // `open` is variadic, the third argument is only present when the second argument // has O_CREAT (or on linux O_TMPFILE, but miri doesn't support that) set let ([path_raw, flag], varargs) = - this.check_shim_variadic(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_variadic_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.open(path_raw, flag, varargs)?; this.write_scalar(result, dest)?; } "unlink" => { - let [path] = this.check_shim_abi( + let [path] = this.check_shim_sig( + shim_sig!(extern "C" fn(*const _) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.const_raw_ptr.ty], - this.tcx.types.i32, args, )?; let result = this.unlink(path)?; this.write_scalar(result, dest)?; } "symlink" => { - let [target, linkpath] = this.check_shim_abi( + let [target, linkpath] = this.check_shim_sig( + shim_sig!(extern "C" fn(*const _, *const _) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.const_raw_ptr.ty, this.machine.layouts.const_raw_ptr.ty], - this.tcx.types.i32, args, )?; let result = this.symlink(target, linkpath)?; this.write_scalar(result, dest)?; } "rename" => { - let [oldpath, newpath] = this.check_shim_abi( + let [oldpath, newpath] = this.check_shim_sig( + shim_sig!(extern "C" fn(*const _, *const _) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.const_raw_ptr.ty, this.machine.layouts.const_raw_ptr.ty], - this.tcx.types.i32, args, )?; let result = this.rename(oldpath, newpath)?; this.write_scalar(result, dest)?; } "mkdir" => { - let [path, mode] = this.check_shim_abi( + let [path, mode] = this.check_shim_sig( + shim_sig!(extern "C" fn(*const _, libc::mode_t) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.const_raw_ptr.ty, this.libc_ty_layout("mode_t").ty], - this.tcx.types.i32, args, )?; let result = this.mkdir(path, mode)?; this.write_scalar(result, dest)?; } "rmdir" => { - let [path] = this.check_shim_abi( + let [path] = this.check_shim_sig( + shim_sig!(extern "C" fn(*const _) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.const_raw_ptr.ty], - this.tcx.types.i32, args, )?; let result = this.rmdir(path)?; this.write_scalar(result, dest)?; } "opendir" => { - let [name] = this.check_shim_abi( + let [name] = this.check_shim_sig( + shim_sig!(extern "C" fn(*const _) -> *mut _), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.const_raw_ptr.ty], - this.machine.layouts.mut_raw_ptr.ty, args, )?; let result = this.opendir(name)?; this.write_scalar(result, dest)?; } "closedir" => { - let [dirp] = this.check_shim_abi( + let [dirp] = this.check_shim_sig( + shim_sig!(extern "C" fn(*mut _) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.mut_raw_ptr.ty], - this.tcx.types.i32, args, )?; let result = this.closedir(dirp)?; this.write_scalar(result, dest)?; } "lseek64" => { - let off64_t = this.libc_ty_layout("off64_t"); - let [fd, offset, whence] = this.check_shim_abi( + let [fd, offset, whence] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, libc::off64_t, i32) -> libc::off64_t), link_name, abi, - ExternAbi::C { unwind: false }, - [this.tcx.types.i32, off64_t.ty, this.tcx.types.i32], - off64_t.ty, args, )?; let fd = this.read_scalar(fd)?.to_i32()?; - let offset = this.read_scalar(offset)?.to_int(off64_t.size)?; + let offset = this.read_scalar(offset)?.to_int(offset.layout.size)?; let whence = this.read_scalar(whence)?.to_i32()?; this.lseek64(fd, offset, whence, dest)?; } "lseek" => { - let off_t = this.libc_ty_layout("off_t"); - let [fd, offset, whence] = this.check_shim_abi( + let [fd, offset, whence] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, libc::off_t, i32) -> libc::off_t), link_name, abi, - ExternAbi::C { unwind: false }, - [this.tcx.types.i32, off_t.ty, this.tcx.types.i32], - off_t.ty, args, )?; let fd = this.read_scalar(fd)?.to_i32()?; - let offset = this.read_scalar(offset)?.to_int(off_t.size)?; + let offset = this.read_scalar(offset)?.to_int(offset.layout.size)?; let whence = this.read_scalar(whence)?.to_i32()?; this.lseek64(fd, offset, whence, dest)?; } "ftruncate64" => { - let off64_t = this.libc_ty_layout("off64_t"); - let [fd, length] = this.check_shim_abi( + let [fd, length] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, libc::off64_t) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.tcx.types.i32, off64_t.ty], - this.tcx.types.i32, args, )?; let fd = this.read_scalar(fd)?.to_i32()?; - let length = this.read_scalar(length)?.to_int(off64_t.size)?; + let length = this.read_scalar(length)?.to_int(length.layout.size)?; let result = this.ftruncate64(fd, length)?; this.write_scalar(result, dest)?; } "ftruncate" => { - let off_t = this.libc_ty_layout("off_t"); - let [fd, length] = this.check_shim_abi( + let [fd, length] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, libc::off_t) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.tcx.types.i32, off_t.ty], - this.tcx.types.i32, args, )?; let fd = this.read_scalar(fd)?.to_i32()?; - let length = this.read_scalar(length)?.to_int(off_t.size)?; + let length = this.read_scalar(length)?.to_int(length.layout.size)?; let result = this.ftruncate64(fd, length)?; this.write_scalar(result, dest)?; } "fsync" => { - let [fd] = this.check_shim_abi( + let [fd] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.tcx.types.i32], - this.tcx.types.i32, args, )?; let result = this.fsync(fd)?; this.write_scalar(result, dest)?; } "fdatasync" => { - let [fd] = this.check_shim_abi( + let [fd] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.tcx.types.i32], - this.tcx.types.i32, args, )?; let result = this.fdatasync(fd)?; this.write_scalar(result, dest)?; } "readlink" => { - let [pathname, buf, bufsize] = this.check_shim_abi( + let [pathname, buf, bufsize] = this.check_shim_sig( + shim_sig!(extern "C" fn(*const _, *mut _, usize) -> isize), link_name, abi, - ExternAbi::C { unwind: false }, - [ - this.machine.layouts.const_raw_ptr.ty, - this.machine.layouts.mut_raw_ptr.ty, - this.tcx.types.usize, - ], - this.tcx.types.isize, args, )?; let result = this.readlink(pathname, buf, bufsize)?; this.write_scalar(Scalar::from_target_isize(result, this), dest)?; } "posix_fadvise" => { - let off_t = this.libc_ty_layout("off_t"); - let [fd, offset, len, advice] = this.check_shim_abi( + let [fd, offset, len, advice] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, libc::off_t, libc::off_t, i32) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.tcx.types.i32, off_t.ty, off_t.ty, this.tcx.types.i32], - this.tcx.types.i32, args, )?; this.read_scalar(fd)?.to_i32()?; - this.read_scalar(offset)?.to_int(off_t.size)?; - this.read_scalar(len)?.to_int(off_t.size)?; + this.read_scalar(offset)?.to_int(offset.layout.size)?; + this.read_scalar(len)?.to_int(len.layout.size)?; this.read_scalar(advice)?.to_i32()?; // fadvise is only informational, we can ignore it. this.write_null(dest)?; } "realpath" => { - let [path, resolved_path] = this.check_shim_abi( + let [path, resolved_path] = this.check_shim_sig( + shim_sig!(extern "C" fn(*const _, *mut _) -> *mut _), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.const_raw_ptr.ty, this.machine.layouts.mut_raw_ptr.ty], - this.machine.layouts.mut_raw_ptr.ty, args, )?; let result = this.realpath(path, resolved_path)?; this.write_scalar(result, dest)?; } "mkstemp" => { - let [template] = this.check_shim_abi( + let [template] = this.check_shim_sig( + shim_sig!(extern "C" fn(*mut _) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.mut_raw_ptr.ty], - this.tcx.types.i32, args, )?; let result = this.mkstemp(template)?; @@ -618,29 +509,20 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Unnamed sockets and pipes "socketpair" => { - let [domain, type_, protocol, sv] = this.check_shim_abi( + let [domain, type_, protocol, sv] = this.check_shim_sig( + shim_sig!(extern "C" fn(i32, i32, i32, *mut _) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [ - this.tcx.types.i32, - this.tcx.types.i32, - this.tcx.types.i32, - this.machine.layouts.mut_raw_ptr.ty, - ], - this.tcx.types.i32, args, )?; let result = this.socketpair(domain, type_, protocol, sv)?; this.write_scalar(result, dest)?; } "pipe" => { - let [pipefd] = this.check_shim_abi( + let [pipefd] = this.check_shim_sig( + shim_sig!(extern "C" fn(*mut _) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.mut_raw_ptr.ty], - this.tcx.types.i32, args, )?; let result = this.pipe2(pipefd, /*flags*/ None)?; @@ -649,12 +531,10 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "pipe2" => { // Currently this function does not exist on all Unixes, e.g. on macOS. this.check_target_os(&["linux", "freebsd", "solaris", "illumos"], link_name)?; - let [pipefd, flags] = this.check_shim_abi( + let [pipefd, flags] = this.check_shim_sig( + shim_sig!(extern "C" fn(*mut _, i32) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.mut_raw_ptr.ty, this.tcx.types.i32], - this.tcx.types.i32, args, )?; let result = this.pipe2(pipefd, Some(flags))?; @@ -663,36 +543,30 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Time "gettimeofday" => { - let [tv, tz] = this.check_shim_abi( + let [tv, tz] = this.check_shim_sig( + shim_sig!(extern "C" fn(*mut _, *mut _) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.mut_raw_ptr.ty, this.machine.layouts.mut_raw_ptr.ty], - this.tcx.types.i32, args, )?; let result = this.gettimeofday(tv, tz)?; this.write_scalar(result, dest)?; } "localtime_r" => { - let [timep, result_op] = this.check_shim_abi( + let [timep, result_op] = this.check_shim_sig( + shim_sig!(extern "C" fn(*const _, *mut _) -> *mut _), link_name, abi, - ExternAbi::C { unwind: false }, - [this.machine.layouts.const_raw_ptr.ty, this.machine.layouts.mut_raw_ptr.ty], - this.machine.layouts.mut_raw_ptr.ty, args, )?; let result = this.localtime_r(timep, result_op)?; this.write_pointer(result, dest)?; } "clock_gettime" => { - let [clk_id, tp] = this.check_shim_abi( + let [clk_id, tp] = this.check_shim_sig( + shim_sig!(extern "C" fn(libc::clockid_t, *mut _) -> i32), link_name, abi, - ExternAbi::C { unwind: false }, - [this.libc_ty_layout("clockid_t").ty, this.machine.layouts.mut_raw_ptr.ty], - this.tcx.types.i32, args, )?; this.clock_gettime(clk_id, tp, dest)?; @@ -700,20 +574,22 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Allocation "posix_memalign" => { - let [memptr, align, size] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [memptr, align, size] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.posix_memalign(memptr, align, size)?; this.write_scalar(result, dest)?; } "mmap" => { let [addr, length, prot, flags, fd, offset] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let offset = this.read_scalar(offset)?.to_int(this.libc_ty_layout("off_t").size)?; let ptr = this.mmap(addr, length, prot, flags, fd, offset)?; this.write_scalar(ptr, dest)?; } "munmap" => { - let [addr, length] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [addr, length] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.munmap(addr, length)?; this.write_scalar(result, dest)?; } @@ -721,7 +597,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "reallocarray" => { // Currently this function does not exist on all Unixes, e.g. on macOS. this.check_target_os(&["linux", "freebsd", "android"], link_name)?; - let [ptr, nmemb, size] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr, nmemb, size] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let ptr = this.read_pointer(ptr)?; let nmemb = this.read_target_usize(nmemb)?; let size = this.read_target_usize(size)?; @@ -744,14 +621,16 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "aligned_alloc" => { // This is a C11 function, we assume all Unixes have it. // (MSVC explicitly does not support this.) - let [align, size] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [align, size] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let res = this.aligned_alloc(align, size)?; this.write_pointer(res, dest)?; } // Dynamic symbol loading "dlsym" => { - let [handle, symbol] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [handle, symbol] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.read_target_usize(handle)?; let symbol = this.read_pointer(symbol)?; let name = this.read_c_str(symbol)?; @@ -767,7 +646,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Thread-local storage "pthread_key_create" => { - let [key, dtor] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [key, dtor] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let key_place = this.deref_pointer_as(key, this.libc_ty_layout("pthread_key_t"))?; let dtor = this.read_pointer(dtor)?; @@ -795,21 +674,22 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_null(dest)?; } "pthread_key_delete" => { - let [key] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [key] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let key = this.read_scalar(key)?.to_bits(key.layout.size)?; this.machine.tls.delete_tls_key(key)?; // Return success (0) this.write_null(dest)?; } "pthread_getspecific" => { - let [key] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [key] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let key = this.read_scalar(key)?.to_bits(key.layout.size)?; let active_thread = this.active_thread(); let ptr = this.machine.tls.load_tls(key, active_thread, this)?; this.write_scalar(ptr, dest)?; } "pthread_setspecific" => { - let [key, new_ptr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [key, new_ptr] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let key = this.read_scalar(key)?.to_bits(key.layout.size)?; let active_thread = this.active_thread(); let new_data = this.read_scalar(new_ptr)?; @@ -821,117 +701,124 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Synchronization primitives "pthread_mutexattr_init" => { - let [attr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [attr] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_mutexattr_init(attr)?; this.write_null(dest)?; } "pthread_mutexattr_settype" => { - let [attr, kind] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [attr, kind] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.pthread_mutexattr_settype(attr, kind)?; this.write_scalar(result, dest)?; } "pthread_mutexattr_destroy" => { - let [attr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [attr] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_mutexattr_destroy(attr)?; this.write_null(dest)?; } "pthread_mutex_init" => { - let [mutex, attr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [mutex, attr] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_mutex_init(mutex, attr)?; this.write_null(dest)?; } "pthread_mutex_lock" => { - let [mutex] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [mutex] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_mutex_lock(mutex, dest)?; } "pthread_mutex_trylock" => { - let [mutex] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [mutex] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.pthread_mutex_trylock(mutex)?; this.write_scalar(result, dest)?; } "pthread_mutex_unlock" => { - let [mutex] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [mutex] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.pthread_mutex_unlock(mutex)?; this.write_scalar(result, dest)?; } "pthread_mutex_destroy" => { - let [mutex] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [mutex] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_mutex_destroy(mutex)?; this.write_int(0, dest)?; } "pthread_rwlock_rdlock" => { - let [rwlock] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [rwlock] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_rwlock_rdlock(rwlock, dest)?; } "pthread_rwlock_tryrdlock" => { - let [rwlock] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [rwlock] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.pthread_rwlock_tryrdlock(rwlock)?; this.write_scalar(result, dest)?; } "pthread_rwlock_wrlock" => { - let [rwlock] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [rwlock] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_rwlock_wrlock(rwlock, dest)?; } "pthread_rwlock_trywrlock" => { - let [rwlock] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [rwlock] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.pthread_rwlock_trywrlock(rwlock)?; this.write_scalar(result, dest)?; } "pthread_rwlock_unlock" => { - let [rwlock] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [rwlock] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_rwlock_unlock(rwlock)?; this.write_null(dest)?; } "pthread_rwlock_destroy" => { - let [rwlock] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [rwlock] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_rwlock_destroy(rwlock)?; this.write_null(dest)?; } "pthread_condattr_init" => { - let [attr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [attr] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_condattr_init(attr)?; this.write_null(dest)?; } "pthread_condattr_setclock" => { - let [attr, clock_id] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [attr, clock_id] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.pthread_condattr_setclock(attr, clock_id)?; this.write_scalar(result, dest)?; } "pthread_condattr_getclock" => { - let [attr, clock_id] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [attr, clock_id] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_condattr_getclock(attr, clock_id)?; this.write_null(dest)?; } "pthread_condattr_destroy" => { - let [attr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [attr] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_condattr_destroy(attr)?; this.write_null(dest)?; } "pthread_cond_init" => { - let [cond, attr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [cond, attr] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_cond_init(cond, attr)?; this.write_null(dest)?; } "pthread_cond_signal" => { - let [cond] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [cond] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_cond_signal(cond)?; this.write_null(dest)?; } "pthread_cond_broadcast" => { - let [cond] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [cond] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_cond_broadcast(cond)?; this.write_null(dest)?; } "pthread_cond_wait" => { - let [cond, mutex] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [cond, mutex] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_cond_wait(cond, mutex, dest)?; } "pthread_cond_timedwait" => { - let [cond, mutex, abstime] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [cond, mutex, abstime] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_cond_timedwait(cond, mutex, abstime, dest)?; } "pthread_cond_destroy" => { - let [cond] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [cond] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_cond_destroy(cond)?; this.write_null(dest)?; } @@ -939,31 +826,33 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Threading "pthread_create" => { let [thread, attr, start, arg] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_create(thread, attr, start, arg)?; this.write_null(dest)?; } "pthread_join" => { - let [thread, retval] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [thread, retval] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.pthread_join(thread, retval, dest)?; } "pthread_detach" => { - let [thread] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [thread] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let res = this.pthread_detach(thread)?; this.write_scalar(res, dest)?; } "pthread_self" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let res = this.pthread_self()?; this.write_scalar(res, dest)?; } "sched_yield" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.sched_yield()?; this.write_null(dest)?; } "nanosleep" => { - let [duration, rem] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [duration, rem] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.nanosleep(duration, rem)?; this.write_scalar(result, dest)?; } @@ -974,14 +863,15 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { link_name, )?; let [clock_id, flags, req, rem] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.clock_nanosleep(clock_id, flags, req, rem)?; this.write_scalar(result, dest)?; } "sched_getaffinity" => { // Currently this function does not exist on all Unixes, e.g. on macOS. this.check_target_os(&["linux", "freebsd", "android"], link_name)?; - let [pid, cpusetsize, mask] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [pid, cpusetsize, mask] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let pid = this.read_scalar(pid)?.to_u32()?; let cpusetsize = this.read_target_usize(cpusetsize)?; let mask = this.read_pointer(mask)?; @@ -1018,7 +908,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "sched_setaffinity" => { // Currently this function does not exist on all Unixes, e.g. on macOS. this.check_target_os(&["linux", "freebsd", "android"], link_name)?; - let [pid, cpusetsize, mask] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [pid, cpusetsize, mask] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let pid = this.read_scalar(pid)?.to_u32()?; let cpusetsize = this.read_target_usize(cpusetsize)?; let mask = this.read_pointer(mask)?; @@ -1058,13 +949,13 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Miscellaneous "isatty" => { - let [fd] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [fd] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.isatty(fd)?; this.write_scalar(result, dest)?; } "pthread_atfork" => { let [prepare, parent, child] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.read_pointer(prepare)?; this.read_pointer(parent)?; this.read_pointer(child)?; @@ -1078,7 +969,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { &["linux", "macos", "freebsd", "illumos", "solaris", "android"], link_name, )?; - let [buf, bufsize] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [buf, bufsize] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let buf = this.read_pointer(buf)?; let bufsize = this.read_target_usize(bufsize)?; @@ -1096,7 +988,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "strerror_r" => { - let [errnum, buf, buflen] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [errnum, buf, buflen] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.strerror_r(errnum, buf, buflen)?; this.write_scalar(result, dest)?; } @@ -1108,7 +1001,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { &["linux", "freebsd", "illumos", "solaris", "android"], link_name, )?; - let [ptr, len, flags] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr, len, flags] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let ptr = this.read_pointer(ptr)?; let len = this.read_target_usize(len)?; let _flags = this.read_scalar(flags)?.to_i32()?; @@ -1120,7 +1014,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // This function is non-standard but exists with the same signature and // same behavior (eg never fails) on FreeBSD and Solaris/Illumos. this.check_target_os(&["freebsd", "illumos", "solaris"], link_name)?; - let [ptr, len] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr, len] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let ptr = this.read_pointer(ptr)?; let len = this.read_target_usize(len)?; this.gen_random(ptr, len)?; @@ -1144,12 +1038,12 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { link_name, )?; // This function looks and behaves excatly like miri_start_unwind. - let [payload] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [payload] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.handle_miri_start_unwind(payload)?; return interp_ok(EmulateItemResult::NeedsUnwind); } "getuid" | "geteuid" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; // For now, just pretend we always have this fixed UID. this.write_int(UID, dest)?; } @@ -1157,7 +1051,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Incomplete shims that we "stub out" just to get pre-main initialization code to work. // These shims are enabled only when the caller is in the standard library. "pthread_attr_getguardsize" if this.frame_in_std() => { - let [_attr, guard_size] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [_attr, guard_size] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let guard_size_layout = this.machine.layouts.usize; let guard_size = this.deref_pointer_as(guard_size, guard_size_layout)?; this.write_scalar( @@ -1170,11 +1065,11 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "pthread_attr_init" | "pthread_attr_destroy" if this.frame_in_std() => { - let [_] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [_] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.write_null(dest)?; } "pthread_attr_setstacksize" if this.frame_in_std() => { - let [_, _] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [_, _] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.write_null(dest)?; } @@ -1182,7 +1077,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // We don't support "pthread_attr_setstack", so we just pretend all stacks have the same values here. // Hence we can mostly ignore the input `attr_place`. let [attr_place, addr_place, size_place] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let _attr_place = this.deref_pointer_as(attr_place, this.libc_ty_layout("pthread_attr_t"))?; let addr_place = this.deref_pointer_as(addr_place, this.machine.layouts.usize)?; @@ -1202,18 +1097,18 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "signal" | "sigaltstack" if this.frame_in_std() => { - let [_, _] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [_, _] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.write_null(dest)?; } "sigaction" | "mprotect" if this.frame_in_std() => { - let [_, _, _] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [_, _, _] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.write_null(dest)?; } "getpwuid_r" | "__posix_getpwuid_r" if this.frame_in_std() => { // getpwuid_r is the standard name, __posix_getpwuid_r is used on solarish let [uid, pwd, buf, buflen, result] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.check_no_isolation("`getpwuid_r`")?; let uid = this.read_scalar(uid)?.to_u32()?; diff --git a/src/tools/miri/src/shims/unix/freebsd/foreign_items.rs b/src/tools/miri/src/shims/unix/freebsd/foreign_items.rs index 33564a2f84c..9e247053fbc 100644 --- a/src/tools/miri/src/shims/unix/freebsd/foreign_items.rs +++ b/src/tools/miri/src/shims/unix/freebsd/foreign_items.rs @@ -24,7 +24,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { match link_name.as_str() { // Threading "pthread_setname_np" => { - let [thread, name] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [thread, name] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let max_len = u64::MAX; // FreeBSD does not seem to have a limit. let res = match this.pthread_setname_np( this.read_scalar(thread)?, @@ -39,7 +40,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(res, dest)?; } "pthread_getname_np" => { - let [thread, name, len] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [thread, name, len] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; // FreeBSD's pthread_getname_np uses strlcpy, which truncates the resulting value, // but always adds a null terminator (except for zero-sized buffers). // https://github.com/freebsd/freebsd-src/blob/c2d93a803acef634bd0eede6673aeea59e90c277/lib/libthr/thread/thr_info.c#L119-L144 @@ -57,7 +59,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(res, dest)?; } "pthread_getthreadid_np" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.unix_gettid(link_name.as_str())?; this.write_scalar(result, dest)?; } @@ -65,7 +67,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "cpuset_getaffinity" => { // The "same" kind of api as `sched_getaffinity` but more fine grained control for FreeBSD specifically. let [level, which, id, set_size, mask] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let level = this.read_scalar(level)?.to_i32()?; let which = this.read_scalar(which)?.to_i32()?; @@ -129,7 +131,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Synchronization primitives "_umtx_op" => { let [obj, op, val, uaddr, uaddr2] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this._umtx_op(obj, op, val, uaddr, uaddr2, dest)?; } @@ -137,29 +139,30 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // For those, we both intercept `func` and `call@FBSD_1.0` symbols cases // since freebsd 12 the former form can be expected. "stat" | "stat@FBSD_1.0" => { - let [path, buf] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [path, buf] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.macos_fbsd_solarish_stat(path, buf)?; this.write_scalar(result, dest)?; } "lstat" | "lstat@FBSD_1.0" => { - let [path, buf] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [path, buf] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.macos_fbsd_solarish_lstat(path, buf)?; this.write_scalar(result, dest)?; } "fstat" | "fstat@FBSD_1.0" => { - let [fd, buf] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [fd, buf] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.macos_fbsd_solarish_fstat(fd, buf)?; this.write_scalar(result, dest)?; } "readdir_r" | "readdir_r@FBSD_1.0" => { - let [dirp, entry, result] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [dirp, entry, result] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.macos_fbsd_readdir_r(dirp, entry, result)?; this.write_scalar(result, dest)?; } // Miscellaneous "__error" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let errno_place = this.last_error_place()?; this.write_scalar(errno_place.to_ref(this).to_scalar(), dest)?; } @@ -167,7 +170,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Incomplete shims that we "stub out" just to get pre-main initialization code to work. // These shims are enabled only when the caller is in the standard library. "pthread_attr_get_np" if this.frame_in_std() => { - let [_thread, _attr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [_thread, _attr] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.write_null(dest)?; } diff --git a/src/tools/miri/src/shims/unix/freebsd/sync.rs b/src/tools/miri/src/shims/unix/freebsd/sync.rs index f4e7d9e58f9..13d30e05573 100644 --- a/src/tools/miri/src/shims/unix/freebsd/sync.rs +++ b/src/tools/miri/src/shims/unix/freebsd/sync.rs @@ -228,26 +228,14 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let abs_time_flag = flags == abs_time; let clock_id_place = this.project_field(ut, FieldIdx::from_u32(2))?; - let clock_id = this.read_scalar(&clock_id_place)?.to_i32()?; - let timeout_clock = this.translate_umtx_time_clock_id(clock_id)?; + let clock_id = this.read_scalar(&clock_id_place)?; + let Some(timeout_clock) = this.parse_clockid(clock_id) else { + throw_unsup_format!("unsupported clock") + }; + if timeout_clock == TimeoutClock::RealTime { + this.check_no_isolation("`_umtx_op` with `CLOCK_REALTIME`")?; + } interp_ok(Some(UmtxTime { timeout: duration, abs_time: abs_time_flag, timeout_clock })) } - - /// Translate raw FreeBSD clockid to a Miri TimeoutClock. - /// FIXME: share this code with the pthread and clock_gettime shims. - fn translate_umtx_time_clock_id(&mut self, raw_id: i32) -> InterpResult<'tcx, TimeoutClock> { - let this = self.eval_context_mut(); - - let timeout = if raw_id == this.eval_libc_i32("CLOCK_REALTIME") { - // RealTime clock can't be used in isolation mode. - this.check_no_isolation("`_umtx_op` with `CLOCK_REALTIME` timeout")?; - TimeoutClock::RealTime - } else if raw_id == this.eval_libc_i32("CLOCK_MONOTONIC") { - TimeoutClock::Monotonic - } else { - throw_unsup_format!("unsupported clock id {raw_id}"); - }; - interp_ok(timeout) - } } diff --git a/src/tools/miri/src/shims/unix/fs.rs b/src/tools/miri/src/shims/unix/fs.rs index 0f2878ad26c..f9bcacf64c4 100644 --- a/src/tools/miri/src/shims/unix/fs.rs +++ b/src/tools/miri/src/shims/unix/fs.rs @@ -13,9 +13,9 @@ use rustc_abi::Size; use rustc_data_structures::fx::FxHashMap; use self::shims::time::system_time_to_duration; -use crate::helpers::check_min_vararg_count; use crate::shims::files::FileHandle; use crate::shims::os_str::bytes_to_os_str; +use crate::shims::sig::check_min_vararg_count; use crate::shims::unix::fd::{FlockOp, UnixFileDescription}; use crate::*; diff --git a/src/tools/miri/src/shims/unix/linux/foreign_items.rs b/src/tools/miri/src/shims/unix/linux/foreign_items.rs index b3e99e6cc68..e7e0c3b6ecd 100644 --- a/src/tools/miri/src/shims/unix/linux/foreign_items.rs +++ b/src/tools/miri/src/shims/unix/linux/foreign_items.rs @@ -37,48 +37,50 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { match link_name.as_str() { // File related shims "readdir64" => { - let [dirp] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [dirp] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.linux_solarish_readdir64("dirent64", dirp)?; this.write_scalar(result, dest)?; } "sync_file_range" => { let [fd, offset, nbytes, flags] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.sync_file_range(fd, offset, nbytes, flags)?; this.write_scalar(result, dest)?; } "statx" => { let [dirfd, pathname, flags, mask, statxbuf] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.linux_statx(dirfd, pathname, flags, mask, statxbuf)?; this.write_scalar(result, dest)?; } // epoll, eventfd "epoll_create1" => { - let [flag] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [flag] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.epoll_create1(flag)?; this.write_scalar(result, dest)?; } "epoll_ctl" => { - let [epfd, op, fd, event] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [epfd, op, fd, event] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.epoll_ctl(epfd, op, fd, event)?; this.write_scalar(result, dest)?; } "epoll_wait" => { let [epfd, events, maxevents, timeout] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.epoll_wait(epfd, events, maxevents, timeout, dest)?; } "eventfd" => { - let [val, flag] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [val, flag] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.eventfd(val, flag)?; this.write_scalar(result, dest)?; } // Threading "pthread_setname_np" => { - let [thread, name] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [thread, name] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let res = match this.pthread_setname_np( this.read_scalar(thread)?, this.read_scalar(name)?, @@ -93,7 +95,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(res, dest)?; } "pthread_getname_np" => { - let [thread, name, len] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [thread, name, len] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; // The function's behavior isn't portable between platforms. // In case of glibc, the length of the output buffer must // be not shorter than TASK_COMM_LEN. @@ -116,7 +119,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(res, dest)?; } "gettid" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.unix_gettid(link_name.as_str())?; this.write_scalar(result, dest)?; } @@ -129,34 +132,35 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Miscellaneous "mmap64" => { let [addr, length, prot, flags, fd, offset] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let offset = this.read_scalar(offset)?.to_i64()?; let ptr = this.mmap(addr, length, prot, flags, fd, offset.into())?; this.write_scalar(ptr, dest)?; } "mremap" => { let ([old_address, old_size, new_size, flags], _) = - this.check_shim_variadic(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_variadic_lenient(abi, CanonAbi::C, link_name, args)?; let ptr = this.mremap(old_address, old_size, new_size, flags)?; this.write_scalar(ptr, dest)?; } "__xpg_strerror_r" => { - let [errnum, buf, buflen] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [errnum, buf, buflen] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.strerror_r(errnum, buf, buflen)?; this.write_scalar(result, dest)?; } "__errno_location" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let errno_place = this.last_error_place()?; this.write_scalar(errno_place.to_ref(this).to_scalar(), dest)?; } "__libc_current_sigrtmin" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.write_int(SIGRTMIN, dest)?; } "__libc_current_sigrtmax" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.write_int(SIGRTMAX, dest)?; } @@ -164,7 +168,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Incomplete shims that we "stub out" just to get pre-main initialization code to work. // These shims are enabled only when the caller is in the standard library. "pthread_getattr_np" if this.frame_in_std() => { - let [_thread, _attr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [_thread, _attr] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.write_null(dest)?; } diff --git a/src/tools/miri/src/shims/unix/linux_like/eventfd.rs b/src/tools/miri/src/shims/unix/linux_like/eventfd.rs index ee7deb8d383..2d35ef064db 100644 --- a/src/tools/miri/src/shims/unix/linux_like/eventfd.rs +++ b/src/tools/miri/src/shims/unix/linux_like/eventfd.rs @@ -37,6 +37,11 @@ impl FileDescription for EventFd { "event" } + fn nondet_short_accesses(&self) -> bool { + // We always read and write exactly one `u64`. + false + } + fn close<'tcx>( self, _communicate_allowed: bool, diff --git a/src/tools/miri/src/shims/unix/linux_like/sync.rs b/src/tools/miri/src/shims/unix/linux_like/sync.rs index 9fad74c0241..5f032c52dee 100644 --- a/src/tools/miri/src/shims/unix/linux_like/sync.rs +++ b/src/tools/miri/src/shims/unix/linux_like/sync.rs @@ -1,5 +1,5 @@ use crate::concurrency::sync::FutexRef; -use crate::helpers::check_min_vararg_count; +use crate::shims::sig::check_min_vararg_count; use crate::*; struct LinuxFutex { diff --git a/src/tools/miri/src/shims/unix/linux_like/syscall.rs b/src/tools/miri/src/shims/unix/linux_like/syscall.rs index d3534e6e1bc..106e6c448d0 100644 --- a/src/tools/miri/src/shims/unix/linux_like/syscall.rs +++ b/src/tools/miri/src/shims/unix/linux_like/syscall.rs @@ -3,7 +3,7 @@ use rustc_middle::ty::Ty; use rustc_span::Symbol; use rustc_target::callconv::FnAbi; -use crate::helpers::check_min_vararg_count; +use crate::shims::sig::check_min_vararg_count; use crate::shims::unix::env::EvalContextExt; use crate::shims::unix::linux_like::eventfd::EvalContextExt as _; use crate::shims::unix::linux_like::sync::futex; @@ -16,7 +16,7 @@ pub fn syscall<'tcx>( args: &[OpTy<'tcx>], dest: &MPlaceTy<'tcx>, ) -> InterpResult<'tcx> { - let ([op], varargs) = ecx.check_shim_variadic(abi, CanonAbi::C, link_name, args)?; + let ([op], varargs) = ecx.check_shim_sig_variadic_lenient(abi, CanonAbi::C, link_name, args)?; // The syscall variadic function is legal to call with more arguments than needed, // extra arguments are simply ignored. The important check is that when we use an // argument, we have to also check all arguments *before* it to ensure that they diff --git a/src/tools/miri/src/shims/unix/macos/foreign_items.rs b/src/tools/miri/src/shims/unix/macos/foreign_items.rs index 23303718091..297d903c6ba 100644 --- a/src/tools/miri/src/shims/unix/macos/foreign_items.rs +++ b/src/tools/miri/src/shims/unix/macos/foreign_items.rs @@ -35,64 +35,67 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { match link_name.as_str() { // errno "__error" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let errno_place = this.last_error_place()?; this.write_scalar(errno_place.to_ref(this).to_scalar(), dest)?; } // File related shims "close$NOCANCEL" => { - let [result] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [result] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.close(result)?; this.write_scalar(result, dest)?; } "stat" | "stat64" | "stat$INODE64" => { - let [path, buf] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [path, buf] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.macos_fbsd_solarish_stat(path, buf)?; this.write_scalar(result, dest)?; } "lstat" | "lstat64" | "lstat$INODE64" => { - let [path, buf] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [path, buf] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.macos_fbsd_solarish_lstat(path, buf)?; this.write_scalar(result, dest)?; } "fstat" | "fstat64" | "fstat$INODE64" => { - let [fd, buf] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [fd, buf] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.macos_fbsd_solarish_fstat(fd, buf)?; this.write_scalar(result, dest)?; } "opendir$INODE64" => { - let [name] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [name] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.opendir(name)?; this.write_scalar(result, dest)?; } "readdir_r" | "readdir_r$INODE64" => { - let [dirp, entry, result] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [dirp, entry, result] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.macos_fbsd_readdir_r(dirp, entry, result)?; this.write_scalar(result, dest)?; } "realpath$DARWIN_EXTSN" => { - let [path, resolved_path] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [path, resolved_path] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.realpath(path, resolved_path)?; this.write_scalar(result, dest)?; } "ioctl" => { let ([fd_num, cmd], varargs) = - this.check_shim_variadic(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_variadic_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.ioctl(fd_num, cmd, varargs)?; this.write_scalar(result, dest)?; } // Environment related shims "_NSGetEnviron" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let environ = this.machine.env_vars.unix().environ(); this.write_pointer(environ, dest)?; } // Random data generation "CCRandomGenerateBytes" => { - let [bytes, count] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [bytes, count] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let bytes = this.read_pointer(bytes)?; let count = this.read_target_usize(count)?; let success = this.eval_libc_i32("kCCSuccess"); @@ -102,28 +105,29 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Time related shims "mach_absolute_time" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.mach_absolute_time()?; this.write_scalar(result, dest)?; } "mach_timebase_info" => { - let [info] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [info] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.mach_timebase_info(info)?; this.write_scalar(result, dest)?; } // Access to command-line arguments "_NSGetArgc" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.write_pointer(this.machine.argc.expect("machine must be initialized"), dest)?; } "_NSGetArgv" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.write_pointer(this.machine.argv.expect("machine must be initialized"), dest)?; } "_NSGetExecutablePath" => { - let [buf, bufsize] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [buf, bufsize] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.check_no_isolation("`_NSGetExecutablePath`")?; let buf_ptr = this.read_pointer(buf)?; @@ -148,7 +152,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Thread-local storage "_tlv_atexit" => { - let [dtor, data] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [dtor, data] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let dtor = this.read_pointer(dtor)?; let dtor = this.get_ptr_fn(dtor)?.as_instance()?; let data = this.read_scalar(data)?; @@ -158,13 +163,13 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Querying system information "pthread_get_stackaddr_np" => { - let [thread] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [thread] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.read_target_usize(thread)?; let stack_addr = Scalar::from_uint(this.machine.stack_addr, this.pointer_size()); this.write_scalar(stack_addr, dest)?; } "pthread_get_stacksize_np" => { - let [thread] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [thread] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.read_target_usize(thread)?; let stack_size = Scalar::from_uint(this.machine.stack_size, this.pointer_size()); this.write_scalar(stack_size, dest)?; @@ -172,7 +177,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Threading "pthread_setname_np" => { - let [name] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [name] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; // The real implementation has logic in two places: // * in userland at https://github.com/apple-oss-distributions/libpthread/blob/c032e0b076700a0a47db75528a282b8d3a06531a/src/pthread.c#L1178-L1200, @@ -199,7 +204,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(res, dest)?; } "pthread_getname_np" => { - let [thread, name, len] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [thread, name, len] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; // The function's behavior isn't portable between platforms. // In case of macOS, a truncated name (due to a too small buffer) @@ -223,7 +229,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(res, dest)?; } "pthread_threadid_np" => { - let [thread, tid_ptr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [thread, tid_ptr] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let res = this.apple_pthread_threadip_np(thread, tid_ptr)?; this.write_scalar(res, dest)?; } @@ -231,7 +238,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Synchronization primitives "os_sync_wait_on_address" => { let [addr_op, value_op, size_op, flags_op] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.os_sync_wait_on_address( addr_op, value_op, @@ -243,7 +250,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "os_sync_wait_on_address_with_deadline" => { let [addr_op, value_op, size_op, flags_op, clock_op, timeout_op] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.os_sync_wait_on_address( addr_op, value_op, @@ -255,7 +262,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "os_sync_wait_on_address_with_timeout" => { let [addr_op, value_op, size_op, flags_op, clock_op, timeout_op] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.os_sync_wait_on_address( addr_op, value_op, @@ -267,36 +274,36 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "os_sync_wake_by_address_any" => { let [addr_op, size_op, flags_op] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.os_sync_wake_by_address( addr_op, size_op, flags_op, /* all */ false, dest, )?; } "os_sync_wake_by_address_all" => { let [addr_op, size_op, flags_op] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.os_sync_wake_by_address( addr_op, size_op, flags_op, /* all */ true, dest, )?; } "os_unfair_lock_lock" => { - let [lock_op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [lock_op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.os_unfair_lock_lock(lock_op)?; } "os_unfair_lock_trylock" => { - let [lock_op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [lock_op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.os_unfair_lock_trylock(lock_op, dest)?; } "os_unfair_lock_unlock" => { - let [lock_op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [lock_op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.os_unfair_lock_unlock(lock_op)?; } "os_unfair_lock_assert_owner" => { - let [lock_op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [lock_op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.os_unfair_lock_assert_owner(lock_op)?; } "os_unfair_lock_assert_not_owner" => { - let [lock_op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [lock_op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.os_unfair_lock_assert_not_owner(lock_op)?; } diff --git a/src/tools/miri/src/shims/unix/solarish/foreign_items.rs b/src/tools/miri/src/shims/unix/solarish/foreign_items.rs index e3d15b89be6..d7033a65fe2 100644 --- a/src/tools/miri/src/shims/unix/solarish/foreign_items.rs +++ b/src/tools/miri/src/shims/unix/solarish/foreign_items.rs @@ -27,32 +27,34 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // epoll, eventfd (NOT available on Solaris!) "epoll_create1" => { this.assert_target_os("illumos", "epoll_create1"); - let [flag] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [flag] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.epoll_create1(flag)?; this.write_scalar(result, dest)?; } "epoll_ctl" => { this.assert_target_os("illumos", "epoll_ctl"); - let [epfd, op, fd, event] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [epfd, op, fd, event] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.epoll_ctl(epfd, op, fd, event)?; this.write_scalar(result, dest)?; } "epoll_wait" => { this.assert_target_os("illumos", "epoll_wait"); let [epfd, events, maxevents, timeout] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.epoll_wait(epfd, events, maxevents, timeout, dest)?; } "eventfd" => { this.assert_target_os("illumos", "eventfd"); - let [val, flag] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [val, flag] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.eventfd(val, flag)?; this.write_scalar(result, dest)?; } // Threading "pthread_setname_np" => { - let [thread, name] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [thread, name] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; // THREAD_NAME_MAX allows a thread name of 31+1 length // https://github.com/illumos/illumos-gate/blob/7671517e13b8123748eda4ef1ee165c6d9dba7fe/usr/src/uts/common/sys/thread.h#L613 let max_len = 32; @@ -70,7 +72,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(res, dest)?; } "pthread_getname_np" => { - let [thread, name, len] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [thread, name, len] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; // See https://illumos.org/man/3C/pthread_getname_np for the error codes. let res = match this.pthread_getname_np( this.read_scalar(thread)?, @@ -87,22 +90,22 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // File related shims "stat" | "stat64" => { - let [path, buf] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [path, buf] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.macos_fbsd_solarish_stat(path, buf)?; this.write_scalar(result, dest)?; } "lstat" | "lstat64" => { - let [path, buf] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [path, buf] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.macos_fbsd_solarish_lstat(path, buf)?; this.write_scalar(result, dest)?; } "fstat" | "fstat64" => { - let [fd, buf] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [fd, buf] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.macos_fbsd_solarish_fstat(fd, buf)?; this.write_scalar(result, dest)?; } "readdir" => { - let [dirp] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [dirp] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.linux_solarish_readdir64("dirent", dirp)?; this.write_scalar(result, dest)?; } @@ -110,20 +113,20 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Sockets and pipes "__xnet_socketpair" => { let [domain, type_, protocol, sv] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.socketpair(domain, type_, protocol, sv)?; this.write_scalar(result, dest)?; } // Miscellaneous "___errno" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let errno_place = this.last_error_place()?; this.write_scalar(errno_place.to_ref(this).to_scalar(), dest)?; } "stack_getbounds" => { - let [stack] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [stack] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let stack = this.deref_pointer_as(stack, this.libc_ty_layout("stack_t"))?; this.write_int_fields_named( @@ -141,7 +144,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "pset_info" => { - let [pset, tpe, cpus, list] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [pset, tpe, cpus, list] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; // We do not need to handle the current process cpu mask, available_parallelism // implementation pass null anyway. We only care for the number of // cpus. @@ -170,7 +174,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "__sysconf_xpg7" => { - let [val] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [val] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.sysconf(val)?; this.write_scalar(result, dest)?; } diff --git a/src/tools/miri/src/shims/unix/sync.rs b/src/tools/miri/src/shims/unix/sync.rs index e20e3b79c3b..5ad4fd501a6 100644 --- a/src/tools/miri/src/shims/unix/sync.rs +++ b/src/tools/miri/src/shims/unix/sync.rs @@ -297,14 +297,13 @@ fn condattr_clock_offset<'tcx>(ecx: &MiriInterpCx<'tcx>) -> InterpResult<'tcx, u fn condattr_get_clock_id<'tcx>( ecx: &MiriInterpCx<'tcx>, attr_ptr: &OpTy<'tcx>, -) -> InterpResult<'tcx, i32> { +) -> InterpResult<'tcx, Scalar> { ecx.deref_pointer_and_read( attr_ptr, condattr_clock_offset(ecx)?, ecx.libc_ty_layout("pthread_condattr_t"), ecx.machine.layouts.i32, - )? - .to_i32() + ) } fn condattr_set_clock_id<'tcx>( @@ -321,20 +320,6 @@ fn condattr_set_clock_id<'tcx>( ) } -/// Translates the clock from what is stored in pthread_condattr_t to our enum. -fn condattr_translate_clock_id<'tcx>( - ecx: &MiriInterpCx<'tcx>, - raw_id: i32, -) -> InterpResult<'tcx, ClockId> { - interp_ok(if raw_id == ecx.eval_libc_i32("CLOCK_REALTIME") { - ClockId::Realtime - } else if raw_id == ecx.eval_libc_i32("CLOCK_MONOTONIC") { - ClockId::Monotonic - } else { - throw_unsup_format!("unsupported clock id: {raw_id}"); - }) -} - // # pthread_cond_t // We store some data directly inside the type, ignoring the platform layout: // - init: u32 @@ -363,22 +348,16 @@ fn cond_init_offset<'tcx>(ecx: &MiriInterpCx<'tcx>) -> InterpResult<'tcx, Size> interp_ok(offset) } -#[derive(Debug, Clone, Copy)] -enum ClockId { - Realtime, - Monotonic, -} - #[derive(Debug, Clone)] struct PthreadCondvar { condvar_ref: CondvarRef, - clock: ClockId, + clock: TimeoutClock, } fn cond_create<'tcx>( ecx: &mut MiriInterpCx<'tcx>, cond_ptr: &OpTy<'tcx>, - clock: ClockId, + clock: TimeoutClock, ) -> InterpResult<'tcx, PthreadCondvar> { let cond = ecx.deref_pointer_as(cond_ptr, ecx.libc_ty_layout("pthread_cond_t"))?; let data = PthreadCondvar { condvar_ref: CondvarRef::new(), clock }; @@ -407,7 +386,10 @@ where throw_unsup_format!("unsupported static initializer used for `pthread_cond_t`"); } // This used the static initializer. The clock there is always CLOCK_REALTIME. - interp_ok(PthreadCondvar { condvar_ref: CondvarRef::new(), clock: ClockId::Realtime }) + interp_ok(PthreadCondvar { + condvar_ref: CondvarRef::new(), + clock: TimeoutClock::RealTime, + }) }, ) } @@ -742,11 +724,9 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { ) -> InterpResult<'tcx, Scalar> { let this = self.eval_context_mut(); - let clock_id = this.read_scalar(clock_id_op)?.to_i32()?; - if clock_id == this.eval_libc_i32("CLOCK_REALTIME") - || clock_id == this.eval_libc_i32("CLOCK_MONOTONIC") - { - condattr_set_clock_id(this, attr_op, clock_id)?; + let clock_id = this.read_scalar(clock_id_op)?; + if this.parse_clockid(clock_id).is_some() { + condattr_set_clock_id(this, attr_op, clock_id.to_i32()?)?; } else { let einval = this.eval_libc_i32("EINVAL"); return interp_ok(Scalar::from_i32(einval)); @@ -764,7 +744,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let clock_id = condattr_get_clock_id(this, attr_op)?; this.write_scalar( - Scalar::from_i32(clock_id), + clock_id, &this.deref_pointer_as(clk_id_op, this.libc_ty_layout("clockid_t"))?, )?; @@ -799,13 +779,16 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let attr = this.read_pointer(attr_op)?; // Default clock if `attr` is null, and on macOS where there is no clock attribute. let clock_id = if this.ptr_is_null(attr)? || this.tcx.sess.target.os == "macos" { - this.eval_libc_i32("CLOCK_REALTIME") + this.eval_libc("CLOCK_REALTIME") } else { condattr_get_clock_id(this, attr_op)? }; - let clock_id = condattr_translate_clock_id(this, clock_id)?; + let Some(clock) = this.parse_clockid(clock_id) else { + // This is UB since this situation cannot arise when using pthread_condattr_setclock. + throw_ub_format!("pthread_cond_init: invalid attributes (unsupported clock)") + }; - cond_create(this, cond_op, clock_id)?; + cond_create(this, cond_op, clock)?; interp_ok(()) } @@ -870,18 +853,14 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { return interp_ok(()); } }; - let timeout_clock = match data.clock { - ClockId::Realtime => { - this.check_no_isolation("`pthread_cond_timedwait` with `CLOCK_REALTIME`")?; - TimeoutClock::RealTime - } - ClockId::Monotonic => TimeoutClock::Monotonic, - }; + if data.clock == TimeoutClock::RealTime { + this.check_no_isolation("`pthread_cond_timedwait` with `CLOCK_REALTIME`")?; + } this.condvar_wait( data.condvar_ref, mutex_ref, - Some((timeout_clock, TimeoutAnchor::Absolute, duration)), + Some((data.clock, TimeoutAnchor::Absolute, duration)), Scalar::from_i32(0), this.eval_libc("ETIMEDOUT"), // retval_timeout dest.clone(), diff --git a/src/tools/miri/src/shims/unwind.rs b/src/tools/miri/src/shims/unwind.rs index ba0c50b54b4..0dd2b20487d 100644 --- a/src/tools/miri/src/shims/unwind.rs +++ b/src/tools/miri/src/shims/unwind.rs @@ -16,7 +16,6 @@ use rustc_abi::ExternAbi; use rustc_middle::mir; use rustc_target::spec::PanicStrategy; -use self::helpers::check_intrinsic_arg_count; use crate::*; /// Holds all of the relevant data for when unwinding hits a `try` frame. @@ -60,7 +59,9 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { /// Handles the `catch_unwind` intrinsic. fn handle_catch_unwind( &mut self, - args: &[OpTy<'tcx>], + try_fn: &OpTy<'tcx>, + data: &OpTy<'tcx>, + catch_fn: &OpTy<'tcx>, dest: &MPlaceTy<'tcx>, ret: Option<mir::BasicBlock>, ) -> InterpResult<'tcx> { @@ -78,7 +79,6 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // a pointer to `Box<dyn Any + Send + 'static>`. // Get all the arguments. - let [try_fn, data, catch_fn] = check_intrinsic_arg_count(args)?; let try_fn = this.read_pointer(try_fn)?; let data = this.read_immediate(data)?; let catch_fn = this.read_pointer(catch_fn)?; diff --git a/src/tools/miri/src/shims/wasi/foreign_items.rs b/src/tools/miri/src/shims/wasi/foreign_items.rs index 8d92d0f3381..bfcdbd8130d 100644 --- a/src/tools/miri/src/shims/wasi/foreign_items.rs +++ b/src/tools/miri/src/shims/wasi/foreign_items.rs @@ -23,12 +23,14 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { match link_name.as_str() { // Allocation "posix_memalign" => { - let [memptr, align, size] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [memptr, align, size] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let result = this.posix_memalign(memptr, align, size)?; this.write_scalar(result, dest)?; } "aligned_alloc" => { - let [align, size] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [align, size] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let res = this.aligned_alloc(align, size)?; this.write_pointer(res, dest)?; } diff --git a/src/tools/miri/src/shims/windows/foreign_items.rs b/src/tools/miri/src/shims/windows/foreign_items.rs index 959abc0baca..7b13f1d9080 100644 --- a/src/tools/miri/src/shims/windows/foreign_items.rs +++ b/src/tools/miri/src/shims/windows/foreign_items.rs @@ -157,42 +157,44 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { match link_name.as_str() { // Environment related shims "GetEnvironmentVariableW" => { - let [name, buf, size] = this.check_shim(abi, sys_conv, link_name, args)?; + let [name, buf, size] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let result = this.GetEnvironmentVariableW(name, buf, size)?; this.write_scalar(result, dest)?; } "SetEnvironmentVariableW" => { - let [name, value] = this.check_shim(abi, sys_conv, link_name, args)?; + let [name, value] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let result = this.SetEnvironmentVariableW(name, value)?; this.write_scalar(result, dest)?; } "GetEnvironmentStringsW" => { - let [] = this.check_shim(abi, sys_conv, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let result = this.GetEnvironmentStringsW()?; this.write_pointer(result, dest)?; } "FreeEnvironmentStringsW" => { - let [env_block] = this.check_shim(abi, sys_conv, link_name, args)?; + let [env_block] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let result = this.FreeEnvironmentStringsW(env_block)?; this.write_scalar(result, dest)?; } "GetCurrentDirectoryW" => { - let [size, buf] = this.check_shim(abi, sys_conv, link_name, args)?; + let [size, buf] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let result = this.GetCurrentDirectoryW(size, buf)?; this.write_scalar(result, dest)?; } "SetCurrentDirectoryW" => { - let [path] = this.check_shim(abi, sys_conv, link_name, args)?; + let [path] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let result = this.SetCurrentDirectoryW(path)?; this.write_scalar(result, dest)?; } "GetUserProfileDirectoryW" => { - let [token, buf, size] = this.check_shim(abi, sys_conv, link_name, args)?; + let [token, buf, size] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let result = this.GetUserProfileDirectoryW(token, buf, size)?; this.write_scalar(result, dest)?; } "GetCurrentProcessId" => { - let [] = this.check_shim(abi, sys_conv, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let result = this.GetCurrentProcessId()?; this.write_scalar(result, dest)?; } @@ -209,7 +211,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { n, byte_offset, key, - ] = this.check_shim(abi, sys_conv, link_name, args)?; + ] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.NtWriteFile( handle, event, @@ -234,7 +236,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { n, byte_offset, key, - ] = this.check_shim(abi, sys_conv, link_name, args)?; + ] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.NtReadFile( handle, event, @@ -250,7 +252,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "GetFullPathNameW" => { let [filename, size, buffer, filepart] = - this.check_shim(abi, sys_conv, link_name, args)?; + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.check_no_isolation("`GetFullPathNameW`")?; let filename = this.read_pointer(filename)?; @@ -287,7 +289,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { creation_disposition, flags_and_attributes, template_file, - ] = this.check_shim(abi, sys_conv, link_name, args)?; + ] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let handle = this.CreateFileW( file_name, desired_access, @@ -300,18 +302,18 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(handle.to_scalar(this), dest)?; } "GetFileInformationByHandle" => { - let [handle, info] = this.check_shim(abi, sys_conv, link_name, args)?; + let [handle, info] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let res = this.GetFileInformationByHandle(handle, info)?; this.write_scalar(res, dest)?; } "DeleteFileW" => { - let [file_name] = this.check_shim(abi, sys_conv, link_name, args)?; + let [file_name] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let res = this.DeleteFileW(file_name)?; this.write_scalar(res, dest)?; } "SetFilePointerEx" => { let [file, distance_to_move, new_file_pointer, move_method] = - this.check_shim(abi, sys_conv, link_name, args)?; + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let res = this.SetFilePointerEx(file, distance_to_move, new_file_pointer, move_method)?; this.write_scalar(res, dest)?; @@ -319,7 +321,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Allocation "HeapAlloc" => { - let [handle, flags, size] = this.check_shim(abi, sys_conv, link_name, args)?; + let [handle, flags, size] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.read_target_isize(handle)?; let flags = this.read_scalar(flags)?.to_u32()?; let size = this.read_target_usize(size)?; @@ -341,7 +344,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_pointer(ptr, dest)?; } "HeapFree" => { - let [handle, flags, ptr] = this.check_shim(abi, sys_conv, link_name, args)?; + let [handle, flags, ptr] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.read_target_isize(handle)?; this.read_scalar(flags)?.to_u32()?; let ptr = this.read_pointer(ptr)?; @@ -354,7 +358,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "HeapReAlloc" => { let [handle, flags, old_ptr, size] = - this.check_shim(abi, sys_conv, link_name, args)?; + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.read_target_isize(handle)?; this.read_scalar(flags)?.to_u32()?; let old_ptr = this.read_pointer(old_ptr)?; @@ -374,7 +378,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_pointer(new_ptr, dest)?; } "LocalFree" => { - let [ptr] = this.check_shim(abi, sys_conv, link_name, args)?; + let [ptr] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let ptr = this.read_pointer(ptr)?; // "If the hMem parameter is NULL, LocalFree ignores the parameter and returns NULL." // (https://learn.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-localfree) @@ -386,17 +390,17 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // errno "SetLastError" => { - let [error] = this.check_shim(abi, sys_conv, link_name, args)?; + let [error] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let error = this.read_scalar(error)?; this.set_last_error(error)?; } "GetLastError" => { - let [] = this.check_shim(abi, sys_conv, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let last_error = this.get_last_error()?; this.write_scalar(last_error, dest)?; } "RtlNtStatusToDosError" => { - let [status] = this.check_shim(abi, sys_conv, link_name, args)?; + let [status] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let status = this.read_scalar(status)?.to_u32()?; let err = match status { // STATUS_MEDIA_WRITE_PROTECTED => ERROR_WRITE_PROTECT @@ -418,7 +422,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Querying system information "GetSystemInfo" => { // Also called from `page_size` crate. - let [system_info] = this.check_shim(abi, sys_conv, link_name, args)?; + let [system_info] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let system_info = this.deref_pointer_as(system_info, this.windows_ty_layout("SYSTEM_INFO"))?; // Initialize with `0`. @@ -441,19 +445,19 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // This just creates a key; Windows does not natively support TLS destructors. // Create key and return it. - let [] = this.check_shim(abi, sys_conv, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let key = this.machine.tls.create_tls_key(None, dest.layout.size)?; this.write_scalar(Scalar::from_uint(key, dest.layout.size), dest)?; } "TlsGetValue" => { - let [key] = this.check_shim(abi, sys_conv, link_name, args)?; + let [key] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let key = u128::from(this.read_scalar(key)?.to_u32()?); let active_thread = this.active_thread(); let ptr = this.machine.tls.load_tls(key, active_thread, this)?; this.write_scalar(ptr, dest)?; } "TlsSetValue" => { - let [key, new_ptr] = this.check_shim(abi, sys_conv, link_name, args)?; + let [key, new_ptr] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let key = u128::from(this.read_scalar(key)?.to_u32()?); let active_thread = this.active_thread(); let new_data = this.read_scalar(new_ptr)?; @@ -463,7 +467,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_int(1, dest)?; } "TlsFree" => { - let [key] = this.check_shim(abi, sys_conv, link_name, args)?; + let [key] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let key = u128::from(this.read_scalar(key)?.to_u32()?); this.machine.tls.delete_tls_key(key)?; @@ -473,7 +477,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Access to command-line arguments "GetCommandLineW" => { - let [] = this.check_shim(abi, sys_conv, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.write_pointer( this.machine.cmd_line.expect("machine must be initialized"), dest, @@ -483,29 +487,30 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Time related shims "GetSystemTimeAsFileTime" | "GetSystemTimePreciseAsFileTime" => { #[allow(non_snake_case)] - let [LPFILETIME] = this.check_shim(abi, sys_conv, link_name, args)?; + let [LPFILETIME] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.GetSystemTimeAsFileTime(link_name.as_str(), LPFILETIME)?; } "QueryPerformanceCounter" => { #[allow(non_snake_case)] - let [lpPerformanceCount] = this.check_shim(abi, sys_conv, link_name, args)?; + let [lpPerformanceCount] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let result = this.QueryPerformanceCounter(lpPerformanceCount)?; this.write_scalar(result, dest)?; } "QueryPerformanceFrequency" => { #[allow(non_snake_case)] - let [lpFrequency] = this.check_shim(abi, sys_conv, link_name, args)?; + let [lpFrequency] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let result = this.QueryPerformanceFrequency(lpFrequency)?; this.write_scalar(result, dest)?; } "Sleep" => { - let [timeout] = this.check_shim(abi, sys_conv, link_name, args)?; + let [timeout] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.Sleep(timeout)?; } "CreateWaitableTimerExW" => { let [attributes, name, flags, access] = - this.check_shim(abi, sys_conv, link_name, args)?; + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.read_pointer(attributes)?; this.read_pointer(name)?; this.read_scalar(flags)?.to_u32()?; @@ -519,27 +524,28 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Synchronization primitives "InitOnceBeginInitialize" => { let [ptr, flags, pending, context] = - this.check_shim(abi, sys_conv, link_name, args)?; + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.InitOnceBeginInitialize(ptr, flags, pending, context, dest)?; } "InitOnceComplete" => { - let [ptr, flags, context] = this.check_shim(abi, sys_conv, link_name, args)?; + let [ptr, flags, context] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let result = this.InitOnceComplete(ptr, flags, context)?; this.write_scalar(result, dest)?; } "WaitOnAddress" => { let [ptr_op, compare_op, size_op, timeout_op] = - this.check_shim(abi, sys_conv, link_name, args)?; + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.WaitOnAddress(ptr_op, compare_op, size_op, timeout_op, dest)?; } "WakeByAddressSingle" => { - let [ptr_op] = this.check_shim(abi, sys_conv, link_name, args)?; + let [ptr_op] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.WakeByAddressSingle(ptr_op)?; } "WakeByAddressAll" => { - let [ptr_op] = this.check_shim(abi, sys_conv, link_name, args)?; + let [ptr_op] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.WakeByAddressAll(ptr_op)?; } @@ -547,7 +553,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Dynamic symbol loading "GetProcAddress" => { #[allow(non_snake_case)] - let [hModule, lpProcName] = this.check_shim(abi, sys_conv, link_name, args)?; + let [hModule, lpProcName] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.read_target_isize(hModule)?; let name = this.read_c_str(this.read_pointer(lpProcName)?)?; if let Ok(name) = str::from_utf8(name) @@ -563,7 +570,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Threading "CreateThread" => { let [security, stacksize, start, arg, flags, thread] = - this.check_shim(abi, sys_conv, link_name, args)?; + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let thread_id = this.CreateThread(security, stacksize, start, arg, flags, thread)?; @@ -571,12 +578,13 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(Handle::Thread(thread_id).to_scalar(this), dest)?; } "WaitForSingleObject" => { - let [handle, timeout] = this.check_shim(abi, sys_conv, link_name, args)?; + let [handle, timeout] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.WaitForSingleObject(handle, timeout, dest)?; } "GetCurrentProcess" => { - let [] = this.check_shim(abi, sys_conv, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.write_scalar( Handle::Pseudo(PseudoHandle::CurrentProcess).to_scalar(this), @@ -584,7 +592,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { )?; } "GetCurrentThread" => { - let [] = this.check_shim(abi, sys_conv, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.write_scalar( Handle::Pseudo(PseudoHandle::CurrentThread).to_scalar(this), @@ -592,7 +600,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { )?; } "SetThreadDescription" => { - let [handle, name] = this.check_shim(abi, sys_conv, link_name, args)?; + let [handle, name] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let handle = this.read_handle(handle, "SetThreadDescription")?; let name = this.read_wide_str(this.read_pointer(name)?)?; @@ -607,7 +615,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(Scalar::from_u32(0), dest)?; } "GetThreadDescription" => { - let [handle, name_ptr] = this.check_shim(abi, sys_conv, link_name, args)?; + let [handle, name_ptr] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let handle = this.read_handle(handle, "GetThreadDescription")?; let name_ptr = this.deref_pointer_as(name_ptr, this.machine.layouts.mut_raw_ptr)?; // the pointer where we should store the ptr to the name @@ -630,7 +639,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(res, dest)?; } "GetThreadId" => { - let [handle] = this.check_shim(abi, sys_conv, link_name, args)?; + let [handle] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let handle = this.read_handle(handle, "GetThreadId")?; let thread = match handle { Handle::Thread(thread) => thread, @@ -641,7 +650,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(Scalar::from_u32(tid), dest)?; } "GetCurrentThreadId" => { - let [] = this.check_shim(abi, sys_conv, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let thread = this.active_thread(); let tid = this.get_tid(thread); this.write_scalar(Scalar::from_u32(tid), dest)?; @@ -649,7 +658,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Miscellaneous "ExitProcess" => { - let [code] = this.check_shim(abi, sys_conv, link_name, args)?; + let [code] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; // Windows technically uses u32, but we unify everything to a Unix-style i32. let code = this.read_scalar(code)?.to_i32()?; throw_machine_stop!(TerminationInfo::Exit { code, leak_check: false }); @@ -657,7 +666,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "SystemFunction036" => { // used by getrandom 0.1 // This is really 'RtlGenRandom'. - let [ptr, len] = this.check_shim(abi, sys_conv, link_name, args)?; + let [ptr, len] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let ptr = this.read_pointer(ptr)?; let len = this.read_scalar(len)?.to_u32()?; this.gen_random(ptr, len.into())?; @@ -665,7 +674,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "ProcessPrng" => { // used by `std` - let [ptr, len] = this.check_shim(abi, sys_conv, link_name, args)?; + let [ptr, len] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let ptr = this.read_pointer(ptr)?; let len = this.read_target_usize(len)?; this.gen_random(ptr, len)?; @@ -674,7 +683,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "BCryptGenRandom" => { // used by getrandom 0.2 let [algorithm, ptr, len, flags] = - this.check_shim(abi, sys_conv, link_name, args)?; + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let algorithm = this.read_scalar(algorithm)?; let algorithm = algorithm.to_target_usize(this)?; let ptr = this.read_pointer(ptr)?; @@ -708,7 +717,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "GetConsoleScreenBufferInfo" => { // `term` needs this, so we fake it. - let [console, buffer_info] = this.check_shim(abi, sys_conv, link_name, args)?; + let [console, buffer_info] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.read_target_isize(console)?; // FIXME: this should use deref_pointer_as, but CONSOLE_SCREEN_BUFFER_INFO is not in std this.deref_pointer(buffer_info)?; @@ -717,13 +727,13 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_null(dest)?; } "GetStdHandle" => { - let [which] = this.check_shim(abi, sys_conv, link_name, args)?; + let [which] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let res = this.GetStdHandle(which)?; this.write_scalar(res, dest)?; } "DuplicateHandle" => { let [src_proc, src_handle, target_proc, target_handle, access, inherit, options] = - this.check_shim(abi, sys_conv, link_name, args)?; + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let res = this.DuplicateHandle( src_proc, src_handle, @@ -736,14 +746,15 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_scalar(res, dest)?; } "CloseHandle" => { - let [handle] = this.check_shim(abi, sys_conv, link_name, args)?; + let [handle] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let ret = this.CloseHandle(handle)?; this.write_scalar(ret, dest)?; } "GetModuleFileNameW" => { - let [handle, filename, size] = this.check_shim(abi, sys_conv, link_name, args)?; + let [handle, filename, size] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.check_no_isolation("`GetModuleFileNameW`")?; let handle = this.read_handle(handle, "GetModuleFileNameW")?; @@ -777,7 +788,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "FormatMessageW" => { let [flags, module, message_id, language_id, buffer, size, arguments] = - this.check_shim(abi, sys_conv, link_name, args)?; + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; let flags = this.read_scalar(flags)?.to_u32()?; let _module = this.read_pointer(module)?; // seems to contain a module name @@ -812,26 +823,28 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Incomplete shims that we "stub out" just to get pre-main initialization code to work. // These shims are enabled only when the caller is in the standard library. "GetProcessHeap" if this.frame_in_std() => { - let [] = this.check_shim(abi, sys_conv, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; // Just fake a HANDLE // It's fine to not use the Handle type here because its a stub this.write_int(1, dest)?; } "GetModuleHandleA" if this.frame_in_std() => { #[allow(non_snake_case)] - let [_lpModuleName] = this.check_shim(abi, sys_conv, link_name, args)?; + let [_lpModuleName] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; // We need to return something non-null here to make `compat_fn!` work. this.write_int(1, dest)?; } "SetConsoleTextAttribute" if this.frame_in_std() => { #[allow(non_snake_case)] let [_hConsoleOutput, _wAttribute] = - this.check_shim(abi, sys_conv, link_name, args)?; + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; // Pretend these does not exist / nothing happened, by returning zero. this.write_null(dest)?; } "GetConsoleMode" if this.frame_in_std() => { - let [console, mode] = this.check_shim(abi, sys_conv, link_name, args)?; + let [console, mode] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.read_target_isize(console)?; this.deref_pointer_as(mode, this.machine.layouts.u32)?; // Indicate an error. @@ -839,25 +852,27 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } "GetFileType" if this.frame_in_std() => { #[allow(non_snake_case)] - let [_hFile] = this.check_shim(abi, sys_conv, link_name, args)?; + let [_hFile] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; // Return unknown file type. this.write_null(dest)?; } "AddVectoredExceptionHandler" if this.frame_in_std() => { #[allow(non_snake_case)] - let [_First, _Handler] = this.check_shim(abi, sys_conv, link_name, args)?; + let [_First, _Handler] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; // Any non zero value works for the stdlib. This is just used for stack overflows anyway. this.write_int(1, dest)?; } "SetThreadStackGuarantee" if this.frame_in_std() => { #[allow(non_snake_case)] - let [_StackSizeInBytes] = this.check_shim(abi, sys_conv, link_name, args)?; + let [_StackSizeInBytes] = + this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; // Any non zero value works for the stdlib. This is just used for stack overflows anyway. this.write_int(1, dest)?; } // this is only callable from std because we know that std ignores the return value "SwitchToThread" if this.frame_in_std() => { - let [] = this.check_shim(abi, sys_conv, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, sys_conv, link_name, args)?; this.yield_active_thread(); @@ -876,7 +891,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { ); } // This function looks and behaves excatly like miri_start_unwind. - let [payload] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [payload] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.handle_miri_start_unwind(payload)?; return interp_ok(EmulateItemResult::NeedsUnwind); } diff --git a/src/tools/miri/src/shims/windows/fs.rs b/src/tools/miri/src/shims/windows/fs.rs index 72e016c12e9..e4ec1b0130c 100644 --- a/src/tools/miri/src/shims/windows/fs.rs +++ b/src/tools/miri/src/shims/windows/fs.rs @@ -462,6 +462,9 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { }; let io_status_info = this.project_field_named(&io_status_block, "Information")?; + // It seems like short writes are not a thing on Windows, so we don't truncate `count` here. + // FIXME: if we are on a Unix host, short host writes are still visible to the program! + let finish = { let io_status = io_status.clone(); let io_status_info = io_status_info.clone(); @@ -491,7 +494,6 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { }} ) }; - desc.write(this.machine.communicate(), buf, count.try_into().unwrap(), this, finish)?; // Return status is written to `dest` and `io_status_block` on callback completion. @@ -556,6 +558,16 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { }; let io_status_info = this.project_field_named(&io_status_block, "Information")?; + let fd = match handle { + Handle::File(fd) => fd, + _ => this.invalid_handle("NtWriteFile")?, + }; + + let Some(desc) = this.machine.fds.get(fd) else { this.invalid_handle("NtReadFile")? }; + + // It seems like short reads are not a thing on Windows, so we don't truncate `count` here. + // FIXME: if we are on a Unix host, short host reads are still visible to the program! + let finish = { let io_status = io_status.clone(); let io_status_info = io_status_info.clone(); @@ -585,14 +597,6 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { }} ) }; - - let fd = match handle { - Handle::File(fd) => fd, - _ => this.invalid_handle("NtWriteFile")?, - }; - - let Some(desc) = this.machine.fds.get(fd) else { this.invalid_handle("NtReadFile")? }; - desc.read(this.machine.communicate(), buf, count.try_into().unwrap(), this, finish)?; // See NtWriteFile for commentary on this diff --git a/src/tools/miri/src/shims/x86/aesni.rs b/src/tools/miri/src/shims/x86/aesni.rs index 058ca24e730..fdd3e78c610 100644 --- a/src/tools/miri/src/shims/x86/aesni.rs +++ b/src/tools/miri/src/shims/x86/aesni.rs @@ -26,7 +26,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // `state` with the corresponding 128-bit key of `key`. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_aesdec_si128 "aesdec" | "aesdec.256" | "aesdec.512" => { - let [state, key] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [state, key] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; aes_round(this, state, key, dest, |state, key| { let key = aes::Block::from(key.to_le_bytes()); let mut state = aes::Block::from(state.to_le_bytes()); @@ -42,7 +43,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // `state` with the corresponding 128-bit key of `key`. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_aesdeclast_si128 "aesdeclast" | "aesdeclast.256" | "aesdeclast.512" => { - let [state, key] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [state, key] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; aes_round(this, state, key, dest, |state, key| { let mut state = aes::Block::from(state.to_le_bytes()); @@ -66,7 +68,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // `state` with the corresponding 128-bit key of `key`. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_aesenc_si128 "aesenc" | "aesenc.256" | "aesenc.512" => { - let [state, key] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [state, key] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; aes_round(this, state, key, dest, |state, key| { let key = aes::Block::from(key.to_le_bytes()); let mut state = aes::Block::from(state.to_le_bytes()); @@ -82,7 +85,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // `state` with the corresponding 128-bit key of `key`. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_aesenclast_si128 "aesenclast" | "aesenclast.256" | "aesenclast.512" => { - let [state, key] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [state, key] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; aes_round(this, state, key, dest, |state, key| { let mut state = aes::Block::from(state.to_le_bytes()); // `aes::hazmat::cipher_round` does the following operations: @@ -102,7 +106,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Used to implement the _mm_aesimc_si128 function. // Performs the AES InvMixColumns operation on `op` "aesimc" => { - let [op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; // Transmute to `u128` let op = op.transmute(this.machine.layouts.u128, this)?; let dest = dest.transmute(this.machine.layouts.u128, this)?; diff --git a/src/tools/miri/src/shims/x86/avx.rs b/src/tools/miri/src/shims/x86/avx.rs index 83d23d6ad36..269ce3b51b9 100644 --- a/src/tools/miri/src/shims/x86/avx.rs +++ b/src/tools/miri/src/shims/x86/avx.rs @@ -33,7 +33,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // matches the IEEE min/max operations, while x86 has different // semantics. "min.ps.256" | "max.ps.256" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "min.ps.256" => FloatBinOp::Min, @@ -45,7 +46,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } // Used to implement _mm256_min_pd and _mm256_max_pd functions. "min.pd.256" | "max.pd.256" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "min.pd.256" => FloatBinOp::Min, @@ -58,21 +60,23 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Used to implement the _mm256_round_ps function. // Rounds the elements of `op` according to `rounding`. "round.ps.256" => { - let [op, rounding] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op, rounding] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; round_all::<rustc_apfloat::ieee::Single>(this, op, rounding, dest)?; } // Used to implement the _mm256_round_pd function. // Rounds the elements of `op` according to `rounding`. "round.pd.256" => { - let [op, rounding] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op, rounding] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; round_all::<rustc_apfloat::ieee::Double>(this, op, rounding, dest)?; } // Used to implement _mm256_{rcp,rsqrt}_ps functions. // Performs the operations on all components of `op`. "rcp.ps.256" | "rsqrt.ps.256" => { - let [op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "rcp.ps.256" => FloatUnaryOp::Rcp, @@ -84,7 +88,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } // Used to implement the _mm256_dp_ps function. "dp.ps.256" => { - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; conditional_dot_product(this, left, right, imm, dest)?; } @@ -92,7 +97,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Horizontally add/subtract adjacent floating point values // in `left` and `right`. "hadd.ps.256" | "hadd.pd.256" | "hsub.ps.256" | "hsub.pd.256" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "hadd.ps.256" | "hadd.pd.256" => mir::BinOp::Add, @@ -107,7 +113,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // and `right`. For each component, returns 0 if false or u32::MAX // if true. "cmp.ps.256" => { - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = FloatBinOp::cmp_from_imm(this, this.read_scalar(imm)?.to_i8()?, link_name)?; @@ -119,7 +126,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // and `right`. For each component, returns 0 if false or u64::MAX // if true. "cmp.pd.256" => { - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = FloatBinOp::cmp_from_imm(this, this.read_scalar(imm)?.to_i8()?, link_name)?; @@ -130,7 +138,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // and _mm256_cvttpd_epi32 functions. // Converts packed f32/f64 to packed i32. "cvt.ps2dq.256" | "cvtt.ps2dq.256" | "cvt.pd2dq.256" | "cvtt.pd2dq.256" => { - let [op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let rnd = match unprefixed_name { // "current SSE rounding mode", assume nearest @@ -148,7 +156,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // sequence of 4-element arrays, and we shuffle each of these arrays, where // `control` determines which element of the current `data` array is written. "vpermilvar.ps" | "vpermilvar.ps.256" => { - let [data, control] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [data, control] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (data, data_len) = this.project_to_simd(data)?; let (control, control_len) = this.project_to_simd(control)?; @@ -181,7 +190,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // where `right` determines which element of the current `left` array is // written. "vpermilvar.pd" | "vpermilvar.pd.256" => { - let [data, control] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [data, control] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (data, data_len) = this.project_to_simd(data)?; let (control, control_len) = this.project_to_simd(control)?; @@ -213,7 +223,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // For each 128-bit element of `dest`, copies one from `left`, `right` or // zero, according to `imm`. "vperm2f128.ps.256" | "vperm2f128.pd.256" | "vperm2f128.si.256" => { - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; assert_eq!(dest.layout, left.layout); assert_eq!(dest.layout, right.layout); @@ -256,7 +267,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // is one, it is loaded from `ptr.wrapping_add(i)`, otherwise zero is // loaded. "maskload.ps" | "maskload.pd" | "maskload.ps.256" | "maskload.pd.256" => { - let [ptr, mask] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr, mask] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; mask_load(this, ptr, mask, dest)?; } @@ -266,7 +277,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // is one, it is stored into `ptr.wapping_add(i)`. // Unlike SSE2's _mm_maskmoveu_si128, these are not non-temporal stores. "maskstore.ps" | "maskstore.pd" | "maskstore.ps.256" | "maskstore.pd.256" => { - let [ptr, mask, value] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr, mask, value] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; mask_store(this, ptr, mask, value)?; } @@ -276,7 +288,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // the data crosses a cache line, but for Miri this is just a regular // unaligned read. "ldu.dq.256" => { - let [src_ptr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [src_ptr] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let src_ptr = this.read_pointer(src_ptr)?; let dest = dest.force_mplace(this)?; @@ -288,7 +300,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Tests `op & mask == 0`, `op & mask == mask` or // `op & mask != 0 && op & mask != mask` "ptestz.256" | "ptestc.256" | "ptestnzc.256" => { - let [op, mask] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op, mask] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (all_zero, masked_set) = test_bits_masked(this, op, mask)?; let res = match unprefixed_name { @@ -311,7 +323,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "vtestz.pd.256" | "vtestc.pd.256" | "vtestnzc.pd.256" | "vtestz.pd" | "vtestc.pd" | "vtestnzc.pd" | "vtestz.ps.256" | "vtestc.ps.256" | "vtestnzc.ps.256" | "vtestz.ps" | "vtestc.ps" | "vtestnzc.ps" => { - let [op, mask] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op, mask] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (direct, negated) = test_high_bits_masked(this, op, mask)?; let res = match unprefixed_name { @@ -333,7 +345,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // compiler, making these functions no-ops. // The only thing that needs to be ensured is the correct calling convention. - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; } _ => return interp_ok(EmulateItemResult::NotSupported), } diff --git a/src/tools/miri/src/shims/x86/avx2.rs b/src/tools/miri/src/shims/x86/avx2.rs index 49d5977078b..ca80c0eba1e 100644 --- a/src/tools/miri/src/shims/x86/avx2.rs +++ b/src/tools/miri/src/shims/x86/avx2.rs @@ -28,7 +28,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Used to implement the _mm256_abs_epi{8,16,32} functions. // Calculates the absolute value of packed 8/16/32-bit integers. "pabs.b" | "pabs.w" | "pabs.d" => { - let [op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; int_abs(this, op, dest)?; } @@ -36,7 +36,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Horizontally add / add with saturation / subtract adjacent 16/32-bit // integer values in `left` and `right`. "phadd.w" | "phadd.sw" | "phadd.d" | "phsub.w" | "phsub.sw" | "phsub.d" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (which, saturating) = match unprefixed_name { "phadd.w" | "phadd.d" => (mir::BinOp::Add, false), @@ -57,7 +58,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { | "gather.d.pd.256" | "gather.q.pd" | "gather.q.pd.256" | "gather.d.ps" | "gather.d.ps.256" | "gather.q.ps" | "gather.q.ps.256" => { let [src, slice, offsets, mask, scale] = - this.check_shim(abi, CanonAbi::C, link_name, args)?; + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; assert_eq!(dest.layout, src.layout); @@ -114,7 +115,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // intermediate signed 32-bit integers. Horizontally add adjacent pairs of // intermediate 32-bit integers, and pack the results in `dest`. "pmadd.wd" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; @@ -150,7 +152,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // the saturating sum of the products with indices `2*i` and `2*i+1` // produces the output at index `i`. "pmadd.ub.sw" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; @@ -184,7 +187,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // is one, it is loaded from `ptr.wrapping_add(i)`, otherwise zero is // loaded. "maskload.d" | "maskload.q" | "maskload.d.256" | "maskload.q.256" => { - let [ptr, mask] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr, mask] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; mask_load(this, ptr, mask, dest)?; } @@ -194,7 +197,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // is one, it is stored into `ptr.wapping_add(i)`. // Unlike SSE2's _mm_maskmoveu_si128, these are not non-temporal stores. "maskstore.d" | "maskstore.q" | "maskstore.d.256" | "maskstore.q.256" => { - let [ptr, mask, value] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [ptr, mask, value] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; mask_store(this, ptr, mask, value)?; } @@ -205,7 +209,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // offsets specified in `imm`. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm256_mpsadbw_epu8 "mpsadbw" => { - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; mpsadbw(this, left, right, imm, dest)?; } @@ -216,7 +221,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // 1 and then taking the bits `1..=16`. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm256_mulhrs_epi16 "pmul.hr.sw" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; pmulhrsw(this, left, right, dest)?; } @@ -224,7 +230,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Converts two 16-bit integer vectors to a single 8-bit integer // vector with signed saturation. "packsswb" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; packsswb(this, left, right, dest)?; } @@ -232,7 +239,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Converts two 32-bit integer vectors to a single 16-bit integer // vector with signed saturation. "packssdw" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; packssdw(this, left, right, dest)?; } @@ -240,7 +248,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Converts two 16-bit signed integer vectors to a single 8-bit // unsigned integer vector with saturation. "packuswb" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; packuswb(this, left, right, dest)?; } @@ -248,7 +257,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Concatenates two 32-bit signed integer vectors and converts // the result to a 16-bit unsigned integer vector with saturation. "packusdw" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; packusdw(this, left, right, dest)?; } @@ -257,7 +267,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Shuffles `left` using the three low bits of each element of `right` // as indices. "permd" | "permps" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; @@ -277,7 +288,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Used to implement the _mm256_permute2x128_si256 function. // Shuffles 128-bit blocks of `a` and `b` using `imm` as pattern. "vperm2i128" => { - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; assert_eq!(left.layout.size.bits(), 256); assert_eq!(right.layout.size.bits(), 256); @@ -314,7 +326,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // in `dest`. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm256_sad_epu8 "psad.bw" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; @@ -346,7 +359,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Shuffles bytes from `left` using `right` as pattern. // Each 128-bit block is shuffled independently. "pshuf.b" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; @@ -377,7 +391,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // is writen to the corresponding output element. // Basically, we multiply `left` with `right.signum()`. "psign.b" | "psign.w" | "psign.d" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; psign(this, left, right, dest)?; } @@ -391,7 +406,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // is copied to remaining bits. "psll.w" | "psrl.w" | "psra.w" | "psll.d" | "psrl.d" | "psra.d" | "psll.q" | "psrl.q" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "psll.w" | "psll.d" | "psll.q" => ShiftOp::Left, @@ -406,7 +422,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // (except _mm{,256}_srav_epi64, which are not available in AVX2). "psllv.d" | "psllv.d.256" | "psllv.q" | "psllv.q.256" | "psrlv.d" | "psrlv.d.256" | "psrlv.q" | "psrlv.q.256" | "psrav.d" | "psrav.d.256" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "psllv.d" | "psllv.d.256" | "psllv.q" | "psllv.q.256" => ShiftOp::Left, diff --git a/src/tools/miri/src/shims/x86/bmi.rs b/src/tools/miri/src/shims/x86/bmi.rs index 80b1b2e16e6..140e31cc513 100644 --- a/src/tools/miri/src/shims/x86/bmi.rs +++ b/src/tools/miri/src/shims/x86/bmi.rs @@ -35,7 +35,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { return interp_ok(EmulateItemResult::NotSupported); } - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let left = this.read_scalar(left)?; let right = this.read_scalar(right)?; diff --git a/src/tools/miri/src/shims/x86/gfni.rs b/src/tools/miri/src/shims/x86/gfni.rs index f83ce560c84..9a98a80d6dc 100644 --- a/src/tools/miri/src/shims/x86/gfni.rs +++ b/src/tools/miri/src/shims/x86/gfni.rs @@ -31,14 +31,16 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // See `affine_transform` for details. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=gf2p8affine_ "vgf2p8affineqb.128" | "vgf2p8affineqb.256" | "vgf2p8affineqb.512" => { - let [left, right, imm8] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm8] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; affine_transform(this, left, right, imm8, dest, /* inverse */ false)?; } // Used to implement the `_mm{, 256, 512}_gf2p8affineinv_epi64_epi8` functions. // See `affine_transform` for details. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=gf2p8affineinv "vgf2p8affineinvqb.128" | "vgf2p8affineinvqb.256" | "vgf2p8affineinvqb.512" => { - let [left, right, imm8] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm8] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; affine_transform(this, left, right, imm8, dest, /* inverse */ true)?; } // Used to implement the `_mm{, 256, 512}_gf2p8mul_epi8` functions. @@ -47,7 +49,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // polynomial representation with the reduction polynomial x^8 + x^4 + x^3 + x + 1. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=gf2p8mul "vgf2p8mulb.128" | "vgf2p8mulb.256" | "vgf2p8mulb.512" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; let (dest, dest_len) = this.project_to_simd(dest)?; diff --git a/src/tools/miri/src/shims/x86/mod.rs b/src/tools/miri/src/shims/x86/mod.rs index fbfe459711e..3324b7b024a 100644 --- a/src/tools/miri/src/shims/x86/mod.rs +++ b/src/tools/miri/src/shims/x86/mod.rs @@ -45,7 +45,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { return interp_ok(EmulateItemResult::NotSupported); } - let [cb_in, a, b] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [cb_in, a, b] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let op = if unprefixed_name.starts_with("add") { mir::BinOp::AddWithOverflow } else { @@ -67,7 +68,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { if is_u64 && this.tcx.sess.target.arch != "x86_64" { return interp_ok(EmulateItemResult::NotSupported); } - let [c_in, a, b, out] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [c_in, a, b, out] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let out = this.deref_pointer_as( out, if is_u64 { this.machine.layouts.u64 } else { this.machine.layouts.u32 }, @@ -84,7 +86,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // the instruction behaves like a no-op, so it is always safe to call the // intrinsic. "sse2.pause" => { - let [] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; // Only exhibit the spin-loop hint behavior when SSE2 is enabled. if this.tcx.sess.unstable_target_features.contains(&Symbol::intern("sse2")) { this.yield_active_thread(); @@ -103,7 +105,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { len = 8; } - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; pclmulqdq(this, left, right, imm, dest, len)?; } diff --git a/src/tools/miri/src/shims/x86/sha.rs b/src/tools/miri/src/shims/x86/sha.rs index d37fad3e6c7..00fe58119e4 100644 --- a/src/tools/miri/src/shims/x86/sha.rs +++ b/src/tools/miri/src/shims/x86/sha.rs @@ -53,7 +53,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { match unprefixed_name { // Used to implement the _mm_sha256rnds2_epu32 function. "256rnds2" => { - let [a, b, k] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [a, b, k] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (a_reg, a_len) = this.project_to_simd(a)?; let (b_reg, b_len) = this.project_to_simd(b)?; @@ -74,7 +74,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } // Used to implement the _mm_sha256msg1_epu32 function. "256msg1" => { - let [a, b] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [a, b] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (a_reg, a_len) = this.project_to_simd(a)?; let (b_reg, b_len) = this.project_to_simd(b)?; @@ -92,7 +92,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } // Used to implement the _mm_sha256msg2_epu32 function. "256msg2" => { - let [a, b] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [a, b] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (a_reg, a_len) = this.project_to_simd(a)?; let (b_reg, b_len) = this.project_to_simd(b)?; diff --git a/src/tools/miri/src/shims/x86/sse.rs b/src/tools/miri/src/shims/x86/sse.rs index 1ec15d609c6..6d8def5b53f 100644 --- a/src/tools/miri/src/shims/x86/sse.rs +++ b/src/tools/miri/src/shims/x86/sse.rs @@ -34,7 +34,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Performs the operations on the first component of `left` and // `right` and copies the remaining components from `left`. "min.ss" | "max.ss" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "min.ss" => FloatBinOp::Min, @@ -50,7 +51,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // matches the IEEE min/max operations, while x86 has different // semantics. "min.ps" | "max.ps" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "min.ps" => FloatBinOp::Min, @@ -64,7 +66,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Performs the operations on the first component of `op` and // copies the remaining components from `op`. "rcp.ss" | "rsqrt.ss" => { - let [op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "rcp.ss" => FloatUnaryOp::Rcp, @@ -77,7 +79,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Used to implement _mm_{sqrt,rcp,rsqrt}_ps functions. // Performs the operations on all components of `op`. "rcp.ps" | "rsqrt.ps" => { - let [op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "rcp.ps" => FloatUnaryOp::Rcp, @@ -96,7 +98,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // _mm_cmp{eq,lt,le,gt,ge,neq,nlt,nle,ngt,nge,ord,unord}_ss are SSE functions // with hard-coded operations. "cmp.ss" => { - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = FloatBinOp::cmp_from_imm(this, this.read_scalar(imm)?.to_i8()?, link_name)?; @@ -112,7 +115,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // _mm_cmp{eq,lt,le,gt,ge,neq,nlt,nle,ngt,nge,ord,unord}_ps are SSE functions // with hard-coded operations. "cmp.ps" => { - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = FloatBinOp::cmp_from_imm(this, this.read_scalar(imm)?.to_i8()?, link_name)?; @@ -125,7 +129,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "comieq.ss" | "comilt.ss" | "comile.ss" | "comigt.ss" | "comige.ss" | "comineq.ss" | "ucomieq.ss" | "ucomilt.ss" | "ucomile.ss" | "ucomigt.ss" | "ucomige.ss" | "ucomineq.ss" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; @@ -153,7 +158,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // _mm_cvtss_si64 and _mm_cvttss_si64 functions. // Converts the first component of `op` from f32 to i32/i64. "cvtss2si" | "cvttss2si" | "cvtss2si64" | "cvttss2si64" => { - let [op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (op, _) = this.project_to_simd(op)?; let op = this.read_immediate(&this.project_index(&op, 0)?)?; @@ -181,7 +186,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // are copied from `left`. // https://www.felixcloutier.com/x86/cvtsi2ss "cvtsi2ss" | "cvtsi642ss" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (dest, dest_len) = this.project_to_simd(dest)?; diff --git a/src/tools/miri/src/shims/x86/sse2.rs b/src/tools/miri/src/shims/x86/sse2.rs index d6052f83077..8f53adfb5ec 100644 --- a/src/tools/miri/src/shims/x86/sse2.rs +++ b/src/tools/miri/src/shims/x86/sse2.rs @@ -41,7 +41,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // intermediate signed 32-bit integers. Horizontally add adjacent pairs of // intermediate 32-bit integers, and pack the results in `dest`. "pmadd.wd" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; @@ -79,7 +80,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_sad_epu8 "psad.bw" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; @@ -117,7 +119,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // is copied to remaining bits. "psll.w" | "psrl.w" | "psra.w" | "psll.d" | "psrl.d" | "psra.d" | "psll.q" | "psrl.q" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "psll.w" | "psll.d" | "psll.q" => ShiftOp::Left, @@ -132,7 +135,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // and _mm_cvttpd_epi32 functions. // Converts packed f32/f64 to packed i32. "cvtps2dq" | "cvttps2dq" | "cvtpd2dq" | "cvttpd2dq" => { - let [op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (op_len, _) = op.layout.ty.simd_size_and_type(*this.tcx); let (dest_len, _) = dest.layout.ty.simd_size_and_type(*this.tcx); @@ -169,7 +172,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Converts two 16-bit integer vectors to a single 8-bit integer // vector with signed saturation. "packsswb.128" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; packsswb(this, left, right, dest)?; } @@ -177,7 +181,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Converts two 16-bit signed integer vectors to a single 8-bit // unsigned integer vector with saturation. "packuswb.128" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; packuswb(this, left, right, dest)?; } @@ -185,7 +190,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Converts two 32-bit integer vectors to a single 16-bit integer // vector with signed saturation. "packssdw.128" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; packssdw(this, left, right, dest)?; } @@ -195,7 +201,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // matches the IEEE min/max operations, while x86 has different // semantics. "min.sd" | "max.sd" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "min.sd" => FloatBinOp::Min, @@ -211,7 +218,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // matches the IEEE min/max operations, while x86 has different // semantics. "min.pd" | "max.pd" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "min.pd" => FloatBinOp::Min, @@ -230,7 +238,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // _mm_cmp{eq,lt,le,gt,ge,neq,nlt,nle,ngt,nge,ord,unord}_sd are SSE2 functions // with hard-coded operations. "cmp.sd" => { - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = FloatBinOp::cmp_from_imm(this, this.read_scalar(imm)?.to_i8()?, link_name)?; @@ -246,7 +255,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // _mm_cmp{eq,lt,le,gt,ge,neq,nlt,nle,ngt,nge,ord,unord}_pd are SSE2 functions // with hard-coded operations. "cmp.pd" => { - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = FloatBinOp::cmp_from_imm(this, this.read_scalar(imm)?.to_i8()?, link_name)?; @@ -259,7 +269,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "comieq.sd" | "comilt.sd" | "comile.sd" | "comigt.sd" | "comige.sd" | "comineq.sd" | "ucomieq.sd" | "ucomilt.sd" | "ucomile.sd" | "ucomigt.sd" | "ucomige.sd" | "ucomineq.sd" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; @@ -287,7 +298,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // _mm_cvtsd_si64 and _mm_cvttsd_si64 functions. // Converts the first component of `op` from f64 to i32/i64. "cvtsd2si" | "cvttsd2si" | "cvtsd2si64" | "cvttsd2si64" => { - let [op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (op, _) = this.project_to_simd(op)?; let op = this.read_immediate(&this.project_index(&op, 0)?)?; @@ -313,7 +324,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Converts the first f64/f32 from `right` to f32/f64 and copies // the remaining elements from `left` "cvtsd2ss" | "cvtss2sd" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, _) = this.project_to_simd(right)?; diff --git a/src/tools/miri/src/shims/x86/sse3.rs b/src/tools/miri/src/shims/x86/sse3.rs index ebf3cb5c3ee..0fd8c3bc389 100644 --- a/src/tools/miri/src/shims/x86/sse3.rs +++ b/src/tools/miri/src/shims/x86/sse3.rs @@ -26,7 +26,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Horizontally add/subtract adjacent floating point values // in `left` and `right`. "hadd.ps" | "hadd.pd" | "hsub.ps" | "hsub.pd" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let which = match unprefixed_name { "hadd.ps" | "hadd.pd" => mir::BinOp::Add, @@ -42,7 +43,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // the data crosses a cache line, but for Miri this is just a regular // unaligned read. "ldu.dq" => { - let [src_ptr] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [src_ptr] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let src_ptr = this.read_pointer(src_ptr)?; let dest = dest.force_mplace(this)?; diff --git a/src/tools/miri/src/shims/x86/sse41.rs b/src/tools/miri/src/shims/x86/sse41.rs index 6797039cf56..7736b5e443d 100644 --- a/src/tools/miri/src/shims/x86/sse41.rs +++ b/src/tools/miri/src/shims/x86/sse41.rs @@ -28,7 +28,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // bits `4..=5` if `imm`, and `i`th bit specifies whether element // `i` is zeroed. "insertps" => { - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; @@ -63,7 +64,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Concatenates two 32-bit signed integer vectors and converts // the result to a 16-bit unsigned integer vector with saturation. "packusdw" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; packusdw(this, left, right, dest)?; } @@ -73,7 +75,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // products, and conditionally stores the sum in `dest` using the low // 4 bits of `imm`. "dpps" | "dppd" => { - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; conditional_dot_product(this, left, right, imm, dest)?; } @@ -81,14 +84,16 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // functions. Rounds the first element of `right` according to `rounding` // and copies the remaining elements from `left`. "round.ss" => { - let [left, right, rounding] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, rounding] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; round_first::<rustc_apfloat::ieee::Single>(this, left, right, rounding, dest)?; } // Used to implement the _mm_floor_ps, _mm_ceil_ps and _mm_round_ps // functions. Rounds the elements of `op` according to `rounding`. "round.ps" => { - let [op, rounding] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op, rounding] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; round_all::<rustc_apfloat::ieee::Single>(this, op, rounding, dest)?; } @@ -96,14 +101,16 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // functions. Rounds the first element of `right` according to `rounding` // and copies the remaining elements from `left`. "round.sd" => { - let [left, right, rounding] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, rounding] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; round_first::<rustc_apfloat::ieee::Double>(this, left, right, rounding, dest)?; } // Used to implement the _mm_floor_pd, _mm_ceil_pd and _mm_round_pd // functions. Rounds the elements of `op` according to `rounding`. "round.pd" => { - let [op, rounding] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op, rounding] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; round_all::<rustc_apfloat::ieee::Double>(this, op, rounding, dest)?; } @@ -111,7 +118,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Find the minimum unsinged 16-bit integer in `op` and // returns its value and position. "phminposuw" => { - let [op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (op, op_len) = this.project_to_simd(op)?; let (dest, dest_len) = this.project_to_simd(dest)?; @@ -145,7 +152,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // offsets specified in `imm`. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_mpsadbw_epu8 "mpsadbw" => { - let [left, right, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; mpsadbw(this, left, right, imm, dest)?; } @@ -154,7 +162,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Tests `(op & mask) == 0`, `(op & mask) == mask` or // `(op & mask) != 0 && (op & mask) != mask` "ptestz" | "ptestc" | "ptestnzc" => { - let [op, mask] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op, mask] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (all_zero, masked_set) = test_bits_masked(this, op, mask)?; let res = match unprefixed_name { diff --git a/src/tools/miri/src/shims/x86/sse42.rs b/src/tools/miri/src/shims/x86/sse42.rs index 7e1e1482ef4..72c5039a12d 100644 --- a/src/tools/miri/src/shims/x86/sse42.rs +++ b/src/tools/miri/src/shims/x86/sse42.rs @@ -222,7 +222,8 @@ fn deconstruct_args<'tcx>( }; if is_explicit { - let [str1, len1, str2, len2, imm] = ecx.check_shim(abi, CanonAbi::C, link_name, args)?; + let [str1, len1, str2, len2, imm] = + ecx.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let imm = ecx.read_scalar(imm)?.to_u8()?; let default_len = default_len::<u32>(imm); @@ -235,7 +236,7 @@ fn deconstruct_args<'tcx>( interp_ok((str1, str2, Some((len1, len2)), imm)) } else { - let [str1, str2, imm] = ecx.check_shim(abi, CanonAbi::C, link_name, args)?; + let [str1, str2, imm] = ecx.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let imm = ecx.read_scalar(imm)?.to_u8()?; let array_layout = array_layout_fn(ecx, imm)?; @@ -385,7 +386,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // search for a null terminator (see `deconstruct_args` for more details). // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#ig_expand=924,925 "pcmpistriz128" | "pcmpistris128" => { - let [str1, str2, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [str1, str2, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let imm = this.read_scalar(imm)?.to_u8()?; let str = if unprefixed_name == "pcmpistris128" { str1 } else { str2 }; @@ -405,7 +407,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // than 16 for byte-sized operands or 8 for word-sized operands. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#ig_expand=1046,1047 "pcmpestriz128" | "pcmpestris128" => { - let [_, len1, _, len2, imm] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [_, len1, _, len2, imm] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let len = if unprefixed_name == "pcmpestris128" { len1 } else { len2 }; let len = this.read_scalar(len)?.to_i32()?; let imm = this.read_scalar(imm)?.to_u8()?; @@ -432,7 +435,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { return interp_ok(EmulateItemResult::NotSupported); } - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let left = this.read_scalar(left)?; let right = this.read_scalar(right)?; diff --git a/src/tools/miri/src/shims/x86/ssse3.rs b/src/tools/miri/src/shims/x86/ssse3.rs index 310d6b8f765..52ad6bd4419 100644 --- a/src/tools/miri/src/shims/x86/ssse3.rs +++ b/src/tools/miri/src/shims/x86/ssse3.rs @@ -25,7 +25,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Used to implement the _mm_abs_epi{8,16,32} functions. // Calculates the absolute value of packed 8/16/32-bit integers. "pabs.b.128" | "pabs.w.128" | "pabs.d.128" => { - let [op] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; int_abs(this, op, dest)?; } @@ -33,7 +33,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Shuffles bytes from `left` using `right` as pattern. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_shuffle_epi8 "pshuf.b.128" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; @@ -62,7 +63,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // integer values in `left` and `right`. "phadd.w.128" | "phadd.sw.128" | "phadd.d.128" | "phsub.w.128" | "phsub.sw.128" | "phsub.d.128" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (which, saturating) = match unprefixed_name { "phadd.w.128" | "phadd.d.128" => (mir::BinOp::Add, false), @@ -81,7 +83,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // produces the output at index `i`. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_maddubs_epi16 "pmadd.ub.sw.128" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; let (left, left_len) = this.project_to_simd(left)?; let (right, right_len) = this.project_to_simd(right)?; @@ -116,7 +119,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // 1 and then taking the bits `1..=16`. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_mulhrs_epi16 "pmul.hr.sw.128" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; pmulhrsw(this, left, right, dest)?; } @@ -126,7 +130,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // is writen to the corresponding output element. // Basically, we multiply `left` with `right.signum()`. "psign.b.128" | "psign.w.128" | "psign.d.128" => { - let [left, right] = this.check_shim(abi, CanonAbi::C, link_name, args)?; + let [left, right] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; psign(this, left, right, dest)?; } diff --git a/src/tools/miri/tests/fail-dep/libc/libc-epoll-data-race.rs b/src/tools/miri/tests/fail-dep/libc/libc-epoll-data-race.rs index 314ce90cfb5..f6ec5be61bb 100644 --- a/src/tools/miri/tests/fail-dep/libc/libc-epoll-data-race.rs +++ b/src/tools/miri/tests/fail-dep/libc/libc-epoll-data-race.rs @@ -10,6 +10,9 @@ use std::convert::TryInto; use std::thread; use std::thread::spawn; +#[path = "../../utils/libc.rs"] +mod libc_utils; + #[track_caller] fn check_epoll_wait<const N: usize>(epfd: i32, expected_notifications: &[(u32, u64)]) { let epoll_event = libc::epoll_event { events: 0, u64: 0 }; @@ -69,12 +72,12 @@ fn main() { unsafe { VAL_ONE = 41 }; let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds_a[0], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds_a[0], data as *const libc::c_void, 5) }; assert_eq!(res, 5); unsafe { VAL_TWO = 51 }; - let res = unsafe { libc::write(fds_b[0], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds_b[0], data as *const libc::c_void, 5) }; assert_eq!(res, 5); }); thread::yield_now(); diff --git a/src/tools/miri/tests/fail-dep/libc/libc-read-and-uninit-premature-eof.rs b/src/tools/miri/tests/fail-dep/libc/libc-read-and-uninit-premature-eof.rs index 1dc334486c3..e2fd6463a11 100644 --- a/src/tools/miri/tests/fail-dep/libc/libc-read-and-uninit-premature-eof.rs +++ b/src/tools/miri/tests/fail-dep/libc/libc-read-and-uninit-premature-eof.rs @@ -10,6 +10,9 @@ use std::mem::MaybeUninit; #[path = "../../utils/mod.rs"] mod utils; +#[path = "../../utils/libc.rs"] +mod libc_utils; + fn main() { let path = utils::prepare_with_content("fail-libc-read-and-uninit-premature-eof.txt", &[1u8, 2, 3]); @@ -18,8 +21,9 @@ fn main() { let fd = libc::open(cpath.as_ptr(), libc::O_RDONLY); assert_ne!(fd, -1); let mut buf: MaybeUninit<[u8; 4]> = std::mem::MaybeUninit::uninit(); - // Read 4 bytes from a 3-byte file. - assert_eq!(libc::read(fd, buf.as_mut_ptr().cast::<std::ffi::c_void>(), 4), 3); + // Read as much as we can from a 3-byte file. + let res = libc_utils::read_all(fd, buf.as_mut_ptr().cast::<std::ffi::c_void>(), 4); + assert!(res == 3); buf.assume_init(); //~ERROR: encountered uninitialized memory, but expected an integer assert_eq!(libc::close(fd), 0); } diff --git a/src/tools/miri/tests/fail-dep/libc/libc_epoll_block_two_thread.rs b/src/tools/miri/tests/fail-dep/libc/libc_epoll_block_two_thread.rs index f6f2e2b9312..054cb812d9e 100644 --- a/src/tools/miri/tests/fail-dep/libc/libc_epoll_block_two_thread.rs +++ b/src/tools/miri/tests/fail-dep/libc/libc_epoll_block_two_thread.rs @@ -75,9 +75,10 @@ fn main() { }); let thread3 = spawn(move || { + // Just a single write, so we only wake up one of them. let data = "abcde".as_bytes().as_ptr(); let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 5) }; - assert_eq!(res, 5); + assert!(res > 0 && res <= 5); }); thread1.join().unwrap(); diff --git a/src/tools/miri/tests/fail-dep/libc/socketpair_block_read_twice.rs b/src/tools/miri/tests/fail-dep/libc/socketpair_block_read_twice.rs index b3839859500..0fecfb8f663 100644 --- a/src/tools/miri/tests/fail-dep/libc/socketpair_block_read_twice.rs +++ b/src/tools/miri/tests/fail-dep/libc/socketpair_block_read_twice.rs @@ -4,6 +4,7 @@ // test_race depends on a deterministic schedule. //@compile-flags: -Zmiri-deterministic-concurrency //@error-in-other-file: deadlock +//@require-annotations-for-level: error use std::thread; @@ -22,24 +23,26 @@ fn main() { assert_eq!(res, 0); let thread1 = thread::spawn(move || { // Let this thread block on read. - let mut buf: [u8; 3] = [0; 3]; + let mut buf: [u8; 1] = [0; 1]; let res = unsafe { libc::read(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; - assert_eq!(res, 3); - assert_eq!(&buf, "abc".as_bytes()); + assert_eq!(res, buf.len().cast_signed()); + assert_eq!(&buf, "a".as_bytes()); }); let thread2 = thread::spawn(move || { // Let this thread block on read. - let mut buf: [u8; 3] = [0; 3]; - let res = unsafe { libc::read(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; - //~^ERROR: deadlocked - assert_eq!(res, 3); - assert_eq!(&buf, "abc".as_bytes()); + let mut buf: [u8; 1] = [0; 1]; + let res = unsafe { + libc::read(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) + //~^ERROR: deadlock + }; + assert_eq!(res, buf.len().cast_signed()); + assert_eq!(&buf, "a".as_bytes()); }); let thread3 = thread::spawn(move || { // Unblock thread1 by writing something. - let data = "abc".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 3) }; - assert_eq!(res, 3); + let data = "a".as_bytes(); + let res = unsafe { libc::write(fds[0], data.as_ptr() as *const libc::c_void, data.len()) }; + assert_eq!(res, data.len().cast_signed()); }); thread1.join().unwrap(); thread2.join().unwrap(); diff --git a/src/tools/miri/tests/fail-dep/libc/socketpair_block_read_twice.stderr b/src/tools/miri/tests/fail-dep/libc/socketpair_block_read_twice.stderr index 9f19a60e6ae..99d242ec7da 100644 --- a/src/tools/miri/tests/fail-dep/libc/socketpair_block_read_twice.stderr +++ b/src/tools/miri/tests/fail-dep/libc/socketpair_block_read_twice.stderr @@ -23,8 +23,8 @@ error: the evaluated program deadlocked error: the evaluated program deadlocked --> tests/fail-dep/libc/socketpair_block_read_twice.rs:LL:CC | -LL | let res = unsafe { libc::read(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; - | ^ this thread got stuck here +LL | libc::read(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) + | ^ this thread got stuck here | = note: BACKTRACE on thread `unnamed-ID`: = note: inside closure at tests/fail-dep/libc/socketpair_block_read_twice.rs:LL:CC diff --git a/src/tools/miri/tests/fail-dep/libc/socketpair_block_write_twice.rs b/src/tools/miri/tests/fail-dep/libc/socketpair_block_write_twice.rs index 7d84d87ebbb..048938c091e 100644 --- a/src/tools/miri/tests/fail-dep/libc/socketpair_block_write_twice.rs +++ b/src/tools/miri/tests/fail-dep/libc/socketpair_block_write_twice.rs @@ -4,16 +4,20 @@ // test_race depends on a deterministic schedule. //@compile-flags: -Zmiri-deterministic-concurrency //@error-in-other-file: deadlock +//@require-annotations-for-level: error use std::thread; +#[path = "../../utils/libc.rs"] +mod libc_utils; + // Test the behaviour of a thread being blocked on write, get unblocked, then blocked again. // The expected execution is // 1. Thread 1 blocks. // 2. Thread 2 blocks. // 3. Thread 3 unblocks both thread 1 and thread 2. -// 4. Thread 1 reads. +// 4. Thread 1 writes. // 5. Thread 2's `write` can never complete -> deadlocked. fn main() { let mut fds = [-1, -1]; @@ -21,27 +25,28 @@ fn main() { assert_eq!(res, 0); let arr1: [u8; 212992] = [1; 212992]; // Exhaust the space in the buffer so the subsequent write will block. - let res = unsafe { libc::write(fds[0], arr1.as_ptr() as *const libc::c_void, 212992) }; + let res = + unsafe { libc_utils::write_all(fds[0], arr1.as_ptr() as *const libc::c_void, 212992) }; assert_eq!(res, 212992); let thread1 = thread::spawn(move || { - let data = "abc".as_bytes().as_ptr(); + let data = "a".as_bytes(); // The write below will be blocked because the buffer is already full. - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 3) }; - assert_eq!(res, 3); + let res = unsafe { libc::write(fds[0], data.as_ptr() as *const libc::c_void, data.len()) }; + assert_eq!(res, data.len().cast_signed()); }); let thread2 = thread::spawn(move || { - let data = "abc".as_bytes().as_ptr(); + let data = "a".as_bytes(); // The write below will be blocked because the buffer is already full. - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 3) }; - //~^ERROR: deadlocked - assert_eq!(res, 3); + let res = unsafe { libc::write(fds[0], data.as_ptr() as *const libc::c_void, data.len()) }; + //~^ERROR: deadlock + assert_eq!(res, data.len().cast_signed()); }); let thread3 = thread::spawn(move || { // Unblock thread1 by freeing up some space. - let mut buf: [u8; 3] = [0; 3]; + let mut buf: [u8; 1] = [0; 1]; let res = unsafe { libc::read(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; - assert_eq!(res, 3); - assert_eq!(buf, [1, 1, 1]); + assert_eq!(res, buf.len().cast_signed()); + assert_eq!(buf, [1]); }); thread1.join().unwrap(); thread2.join().unwrap(); diff --git a/src/tools/miri/tests/fail-dep/libc/socketpair_block_write_twice.stderr b/src/tools/miri/tests/fail-dep/libc/socketpair_block_write_twice.stderr index b29cd70f35e..f766500d331 100644 --- a/src/tools/miri/tests/fail-dep/libc/socketpair_block_write_twice.stderr +++ b/src/tools/miri/tests/fail-dep/libc/socketpair_block_write_twice.stderr @@ -23,8 +23,8 @@ error: the evaluated program deadlocked error: the evaluated program deadlocked --> tests/fail-dep/libc/socketpair_block_write_twice.rs:LL:CC | -LL | let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 3) }; - | ^ this thread got stuck here +LL | let res = unsafe { libc::write(fds[0], data.as_ptr() as *const libc::c_void, data.len()) }; + | ^ this thread got stuck here | = note: BACKTRACE on thread `unnamed-ID`: = note: inside closure at tests/fail-dep/libc/socketpair_block_write_twice.rs:LL:CC diff --git a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_array_vs_struct.rs b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_array_vs_struct.rs index 4468eb299f3..26f2e73dd75 100644 --- a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_array_vs_struct.rs +++ b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_array_vs_struct.rs @@ -17,5 +17,5 @@ fn main() { // These two types have the same size but are still not compatible. let g = unsafe { std::mem::transmute::<fn(S), fn(A)>(f) }; - g(Default::default()) //~ ERROR: calling a function with argument of type S passing data of type [i32; 4] + g(Default::default()) //~ ERROR: type S passing argument of type [i32; 4] } diff --git a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_array_vs_struct.stderr b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_array_vs_struct.stderr index cabefa8bee9..f793abb0b62 100644 --- a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_array_vs_struct.stderr +++ b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_array_vs_struct.stderr @@ -1,4 +1,4 @@ -error: Undefined Behavior: calling a function with argument of type S passing data of type [i32; 4] +error: Undefined Behavior: calling a function whose parameter #1 has type S passing argument of type [i32; 4] --> tests/fail/function_pointers/abi_mismatch_array_vs_struct.rs:LL:CC | LL | g(Default::default()) diff --git a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_int_vs_float.rs b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_int_vs_float.rs index a1fda329e8d..0cca4a13233 100644 --- a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_int_vs_float.rs +++ b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_int_vs_float.rs @@ -3,5 +3,5 @@ fn main() { let g = unsafe { std::mem::transmute::<fn(f32), fn(i32)>(f) }; - g(42) //~ ERROR: calling a function with argument of type f32 passing data of type i32 + g(42) //~ ERROR: type f32 passing argument of type i32 } diff --git a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_int_vs_float.stderr b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_int_vs_float.stderr index 52cc48d58ce..3651fc9b3f7 100644 --- a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_int_vs_float.stderr +++ b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_int_vs_float.stderr @@ -1,4 +1,4 @@ -error: Undefined Behavior: calling a function with argument of type f32 passing data of type i32 +error: Undefined Behavior: calling a function whose parameter #1 has type f32 passing argument of type i32 --> tests/fail/function_pointers/abi_mismatch_int_vs_float.rs:LL:CC | LL | g(42) diff --git a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_raw_pointer.rs b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_raw_pointer.rs index f0ea5ccfe0f..053a4a5f284 100644 --- a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_raw_pointer.rs +++ b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_raw_pointer.rs @@ -3,5 +3,5 @@ fn main() { let g = unsafe { std::mem::transmute::<fn(*const [i32]), fn(*const i32)>(f) }; - g(&42 as *const i32) //~ ERROR: calling a function with argument of type *const [i32] passing data of type *const i32 + g(&42 as *const i32) //~ ERROR: type *const [i32] passing argument of type *const i32 } diff --git a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_raw_pointer.stderr b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_raw_pointer.stderr index 2fbb0408c59..88345a0688c 100644 --- a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_raw_pointer.stderr +++ b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_raw_pointer.stderr @@ -1,4 +1,4 @@ -error: Undefined Behavior: calling a function with argument of type *const [i32] passing data of type *const i32 +error: Undefined Behavior: calling a function whose parameter #1 has type *const [i32] passing argument of type *const i32 --> tests/fail/function_pointers/abi_mismatch_raw_pointer.rs:LL:CC | LL | g(&42 as *const i32) diff --git a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_repr_C.rs b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_repr_C.rs index c5900489b4c..f3dffcc4e86 100644 --- a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_repr_C.rs +++ b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_repr_C.rs @@ -12,5 +12,5 @@ fn main() { let fnptr: fn(S2) = callee; let fnptr: fn(S1) = unsafe { std::mem::transmute(fnptr) }; fnptr(S1(NonZero::new(1).unwrap())); - //~^ ERROR: calling a function with argument of type S2 passing data of type S1 + //~^ ERROR: type S2 passing argument of type S1 } diff --git a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_repr_C.stderr b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_repr_C.stderr index 2c1ac0ee702..47658395132 100644 --- a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_repr_C.stderr +++ b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_repr_C.stderr @@ -1,4 +1,4 @@ -error: Undefined Behavior: calling a function with argument of type S2 passing data of type S1 +error: Undefined Behavior: calling a function whose parameter #1 has type S2 passing argument of type S1 --> tests/fail/function_pointers/abi_mismatch_repr_C.rs:LL:CC | LL | fnptr(S1(NonZero::new(1).unwrap())); diff --git a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_return_type.rs b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_return_type.rs index 0fdab49b94b..05b645cf75a 100644 --- a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_return_type.rs +++ b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_return_type.rs @@ -5,5 +5,5 @@ fn main() { let g = unsafe { std::mem::transmute::<fn() -> u32, fn()>(f) }; - g() //~ ERROR: calling a function with return type u32 passing return place of type () + g() //~ ERROR: type u32 passing return place of type () } diff --git a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_simple.rs b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_simple.rs index 20384f0965b..ca43c06008f 100644 --- a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_simple.rs +++ b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_simple.rs @@ -3,5 +3,5 @@ fn main() { let g = unsafe { std::mem::transmute::<fn((i32, i32)), fn(i32)>(f) }; - g(42) //~ ERROR: calling a function with argument of type (i32, i32) passing data of type i32 + g(42) //~ ERROR: type (i32, i32) passing argument of type i32 } diff --git a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_simple.stderr b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_simple.stderr index e45ad12ec05..2ed9ac2e6da 100644 --- a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_simple.stderr +++ b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_simple.stderr @@ -1,4 +1,4 @@ -error: Undefined Behavior: calling a function with argument of type (i32, i32) passing data of type i32 +error: Undefined Behavior: calling a function whose parameter #1 has type (i32, i32) passing argument of type i32 --> tests/fail/function_pointers/abi_mismatch_simple.rs:LL:CC | LL | g(42) diff --git a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_vector.rs b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_vector.rs index 80f357b61ba..dedcaac6142 100644 --- a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_vector.rs +++ b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_vector.rs @@ -7,5 +7,5 @@ fn main() { // These two vector types have the same size but are still not compatible. let g = unsafe { std::mem::transmute::<fn(simd::u32x8), fn(simd::u64x4)>(f) }; - g(Default::default()) //~ ERROR: calling a function with argument of type std::simd::Simd<u32, 8> passing data of type std::simd::Simd<u64, 4> + g(Default::default()) //~ ERROR: type std::simd::Simd<u32, 8> passing argument of type std::simd::Simd<u64, 4> } diff --git a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_vector.stderr b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_vector.stderr index bad2495cb39..b13e8d936db 100644 --- a/src/tools/miri/tests/fail/function_pointers/abi_mismatch_vector.stderr +++ b/src/tools/miri/tests/fail/function_pointers/abi_mismatch_vector.stderr @@ -1,4 +1,4 @@ -error: Undefined Behavior: calling a function with argument of type std::simd::Simd<u32, 8> passing data of type std::simd::Simd<u64, 4> +error: Undefined Behavior: calling a function whose parameter #1 has type std::simd::Simd<u32, 8> passing argument of type std::simd::Simd<u64, 4> --> tests/fail/function_pointers/abi_mismatch_vector.rs:LL:CC | LL | g(Default::default()) diff --git a/src/tools/miri/tests/fail/shims/ctor_wrong_ret_type.rs b/src/tools/miri/tests/fail/shims/ctor_wrong_ret_type.rs new file mode 100644 index 00000000000..1e10f682e71 --- /dev/null +++ b/src/tools/miri/tests/fail/shims/ctor_wrong_ret_type.rs @@ -0,0 +1,39 @@ +unsafe extern "C" fn ctor() -> i32 { + //~^ERROR: calling a function with return type i32 passing return place of type () + 0 +} + +#[rustfmt::skip] +macro_rules! ctor { + ($ident:ident = $ctor:ident) => { + #[cfg_attr( + all(any( + target_os = "linux", + target_os = "android", + target_os = "dragonfly", + target_os = "freebsd", + target_os = "haiku", + target_os = "illumos", + target_os = "netbsd", + target_os = "openbsd", + target_os = "solaris", + target_os = "none", + target_family = "wasm", + )), + link_section = ".init_array" + )] + #[cfg_attr(windows, link_section = ".CRT$XCU")] + #[cfg_attr( + any(target_os = "macos", target_os = "ios"), + // We do not set the `mod_init_funcs` flag here since ctor/inventory also do not do + // that. See <https://github.com/rust-lang/miri/pull/4459#discussion_r2200115629>. + link_section = "__DATA,__mod_init_func" + )] + #[used] + static $ident: unsafe extern "C" fn() -> i32 = $ctor; + }; +} + +ctor! { CTOR = ctor } + +fn main() {} diff --git a/src/tools/miri/tests/fail/shims/ctor_wrong_ret_type.stderr b/src/tools/miri/tests/fail/shims/ctor_wrong_ret_type.stderr new file mode 100644 index 00000000000..664bfbd32db --- /dev/null +++ b/src/tools/miri/tests/fail/shims/ctor_wrong_ret_type.stderr @@ -0,0 +1,12 @@ +error: Undefined Behavior: calling a function with return type i32 passing return place of type () + | + = note: Undefined Behavior occurred here + = note: (no span available) + = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior + = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information + = help: this means these two types are not *guaranteed* to be ABI-compatible across all targets + = help: if you think this code should be accepted anyway, please report an issue with Miri + = note: BACKTRACE: + +error: aborting due to 1 previous error + diff --git a/src/tools/miri/tests/fail/shims/input_arg_mismatch.rs b/src/tools/miri/tests/fail/shims/input_arg_mismatch.rs index eb8de04dcc4..77699776aea 100644 --- a/src/tools/miri/tests/fail/shims/input_arg_mismatch.rs +++ b/src/tools/miri/tests/fail/shims/input_arg_mismatch.rs @@ -16,6 +16,6 @@ fn main() { } as u32; let _ = unsafe { close(fd); - //~^ ERROR: calling a function with argument of type i32 passing data of type u32 + //~^ ERROR: type i32 passing argument of type u32 }; } diff --git a/src/tools/miri/tests/fail/shims/input_arg_mismatch.stderr b/src/tools/miri/tests/fail/shims/input_arg_mismatch.stderr index ce00b624a42..ec27fd5ebb8 100644 --- a/src/tools/miri/tests/fail/shims/input_arg_mismatch.stderr +++ b/src/tools/miri/tests/fail/shims/input_arg_mismatch.stderr @@ -1,4 +1,4 @@ -error: Undefined Behavior: calling a function with argument of type i32 passing data of type u32 +error: Undefined Behavior: calling a function whose parameter #1 has type i32 passing argument of type u32 --> tests/fail/shims/input_arg_mismatch.rs:LL:CC | LL | close(fd); diff --git a/src/tools/miri/tests/fail/tail_calls/signature-mismatch-arg.rs b/src/tools/miri/tests/fail/tail_calls/signature-mismatch-arg.rs index 6df132d3255..36bd1e99cfb 100644 --- a/src/tools/miri/tests/fail/tail_calls/signature-mismatch-arg.rs +++ b/src/tools/miri/tests/fail/tail_calls/signature-mismatch-arg.rs @@ -6,7 +6,7 @@ fn main() { // the error should point to `become g(x)`, // but tail calls mess up the backtrace it seems like... f(0); - //~^ error: Undefined Behavior: calling a function with argument of type i32 passing data of type u32 + //~^ error: type i32 passing argument of type u32 } fn f(x: u32) { diff --git a/src/tools/miri/tests/fail/tail_calls/signature-mismatch-arg.stderr b/src/tools/miri/tests/fail/tail_calls/signature-mismatch-arg.stderr index fbb0d3d565d..cabea5df85d 100644 --- a/src/tools/miri/tests/fail/tail_calls/signature-mismatch-arg.stderr +++ b/src/tools/miri/tests/fail/tail_calls/signature-mismatch-arg.stderr @@ -1,4 +1,4 @@ -error: Undefined Behavior: calling a function with argument of type i32 passing data of type u32 +error: Undefined Behavior: calling a function whose parameter #1 has type i32 passing argument of type u32 --> tests/fail/tail_calls/signature-mismatch-arg.rs:LL:CC | LL | f(0); diff --git a/src/tools/miri/tests/genmc/pass/test_cxx_build.rs b/src/tools/miri/tests/genmc/pass/test_cxx_build.rs new file mode 100644 index 00000000000..f621bd9114f --- /dev/null +++ b/src/tools/miri/tests/genmc/pass/test_cxx_build.rs @@ -0,0 +1,8 @@ +//@compile-flags: -Zmiri-genmc + +#![no_main] + +#[unsafe(no_mangle)] +fn miri_start(_argc: isize, _argv: *const *const u8) -> isize { + 0 +} diff --git a/src/tools/miri/tests/genmc/pass/test_cxx_build.stderr b/src/tools/miri/tests/genmc/pass/test_cxx_build.stderr new file mode 100644 index 00000000000..4b7aa824bd1 --- /dev/null +++ b/src/tools/miri/tests/genmc/pass/test_cxx_build.stderr @@ -0,0 +1,5 @@ +warning: borrow tracking has been disabled, it is not (yet) supported in GenMC mode. +C++: GenMC handle created! +Miri: GenMC handle creation successful! +C++: GenMC handle destroyed! +Miri: Dropping GenMC handle successful! diff --git a/src/tools/miri/tests/pass-dep/libc/libc-epoll-blocking.rs b/src/tools/miri/tests/pass-dep/libc/libc-epoll-blocking.rs index 54ebfa9d198..c97206487a1 100644 --- a/src/tools/miri/tests/pass-dep/libc/libc-epoll-blocking.rs +++ b/src/tools/miri/tests/pass-dep/libc/libc-epoll-blocking.rs @@ -6,6 +6,9 @@ use std::convert::TryInto; use std::thread; use std::thread::spawn; +#[path = "../../utils/libc.rs"] +mod libc_utils; + // This is a set of testcases for blocking epoll. fn main() { @@ -97,7 +100,7 @@ fn test_epoll_block_then_unblock() { let thread1 = spawn(move || { thread::yield_now(); let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[1], data as *const libc::c_void, 5) }; assert_eq!(res, 5); }); check_epoll_wait::<1>(epfd, &[(expected_event, expected_value)], 10); @@ -130,7 +133,7 @@ fn test_notification_after_timeout() { // Trigger epoll notification after timeout. let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[1], data as *const libc::c_void, 5) }; assert_eq!(res, 5); // Check the result of the notification. diff --git a/src/tools/miri/tests/pass-dep/libc/libc-epoll-no-blocking.rs b/src/tools/miri/tests/pass-dep/libc/libc-epoll-no-blocking.rs index dc3ab2828fa..7130790b86d 100644 --- a/src/tools/miri/tests/pass-dep/libc/libc-epoll-no-blocking.rs +++ b/src/tools/miri/tests/pass-dep/libc/libc-epoll-no-blocking.rs @@ -2,6 +2,9 @@ use std::convert::TryInto; +#[path = "../../utils/libc.rs"] +mod libc_utils; + fn main() { test_epoll_socketpair(); test_epoll_socketpair_both_sides(); @@ -64,7 +67,7 @@ fn test_epoll_socketpair() { // Write to fd[0] let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[0], data as *const libc::c_void, 5) }; assert_eq!(res, 5); // Register fd[1] with EPOLLIN|EPOLLOUT|EPOLLET|EPOLLRDHUP @@ -85,7 +88,7 @@ fn test_epoll_socketpair() { // Write some more to fd[0]. let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[0], data as *const libc::c_void, 5) }; assert_eq!(res, 5); // This did not change the readiness of fd[1]. And yet, we're seeing the event reported @@ -153,7 +156,7 @@ fn test_epoll_ctl_del() { // Write to fd[0] let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[0], data as *const libc::c_void, 5) }; assert_eq!(res, 5); // Register fd[1] with EPOLLIN|EPOLLOUT|EPOLLET @@ -182,7 +185,7 @@ fn test_two_epoll_instance() { // Write to the socketpair. let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[0], data as *const libc::c_void, 5) }; assert_eq!(res, 5); // Register one side of the socketpair with EPOLLIN | EPOLLOUT | EPOLLET. @@ -224,7 +227,7 @@ fn test_two_same_fd_in_same_epoll_instance() { // Write to the socketpair. let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[0], data as *const libc::c_void, 5) }; assert_eq!(res, 5); //Two notification should be received. @@ -243,7 +246,7 @@ fn test_epoll_eventfd() { // Write to the eventfd instance. let sized_8_data: [u8; 8] = 1_u64.to_ne_bytes(); - let res = unsafe { libc::write(fd, sized_8_data.as_ptr() as *const libc::c_void, 8) }; + let res = unsafe { libc_utils::write_all(fd, sized_8_data.as_ptr() as *const libc::c_void, 8) }; assert_eq!(res, 8); // Create an epoll instance. @@ -282,7 +285,7 @@ fn test_epoll_socketpair_both_sides() { // Write to fds[1]. let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[1], data as *const libc::c_void, 5) }; assert_eq!(res, 5); //Two notification should be received. @@ -297,7 +300,8 @@ fn test_epoll_socketpair_both_sides() { // Read from fds[0]. let mut buf: [u8; 5] = [0; 5]; - let res = unsafe { libc::read(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; + let res = + unsafe { libc_utils::read_all(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; assert_eq!(res, 5); assert_eq!(buf, "abcde".as_bytes()); @@ -325,7 +329,7 @@ fn test_closed_fd() { // Write to the eventfd instance. let sized_8_data: [u8; 8] = 1_u64.to_ne_bytes(); - let res = unsafe { libc::write(fd, sized_8_data.as_ptr() as *const libc::c_void, 8) }; + let res = unsafe { libc_utils::write_all(fd, sized_8_data.as_ptr() as *const libc::c_void, 8) }; assert_eq!(res, 8); // Close the eventfd. @@ -371,7 +375,8 @@ fn test_not_fully_closed_fd() { // Write to the eventfd instance to produce notification. let sized_8_data: [u8; 8] = 1_u64.to_ne_bytes(); - let res = unsafe { libc::write(newfd, sized_8_data.as_ptr() as *const libc::c_void, 8) }; + let res = + unsafe { libc_utils::write_all(newfd, sized_8_data.as_ptr() as *const libc::c_void, 8) }; assert_eq!(res, 8); // Close the dupped fd. @@ -391,7 +396,7 @@ fn test_event_overwrite() { // Write to the eventfd instance. let sized_8_data: [u8; 8] = 1_u64.to_ne_bytes(); - let res = unsafe { libc::write(fd, sized_8_data.as_ptr() as *const libc::c_void, 8) }; + let res = unsafe { libc_utils::write_all(fd, sized_8_data.as_ptr() as *const libc::c_void, 8) }; assert_eq!(res, 8); // Create an epoll instance. @@ -445,7 +450,7 @@ fn test_socketpair_read() { // Write 5 bytes to fds[1]. let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[1], data as *const libc::c_void, 5) }; assert_eq!(res, 5); //Two notification should be received. @@ -460,7 +465,8 @@ fn test_socketpair_read() { // Read 3 bytes from fds[0]. let mut buf: [u8; 3] = [0; 3]; - let res = unsafe { libc::read(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; + let res = + unsafe { libc_utils::read_all(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; assert_eq!(res, 3); assert_eq!(buf, "abc".as_bytes()); @@ -478,7 +484,8 @@ fn test_socketpair_read() { // Read until the buffer is empty. let mut buf: [u8; 2] = [0; 2]; - let res = unsafe { libc::read(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; + let res = + unsafe { libc_utils::read_all(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; assert_eq!(res, 2); assert_eq!(buf, "de".as_bytes()); @@ -510,8 +517,9 @@ fn test_no_notification_for_unregister_flag() { // Write to fd[1]. let data = "abcde".as_bytes().as_ptr(); - let res: i32 = - unsafe { libc::write(fds[1], data as *const libc::c_void, 5).try_into().unwrap() }; + let res: i32 = unsafe { + libc_utils::write_all(fds[1], data as *const libc::c_void, 5).try_into().unwrap() + }; assert_eq!(res, 5); // Check result from epoll_wait. Since we didn't register EPOLLIN flag, the notification won't @@ -546,7 +554,7 @@ fn test_socketpair_epollerr() { // Write to fd[0] let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[0], data as *const libc::c_void, 5) }; assert_eq!(res, 5); // Close fds[1]. @@ -717,6 +725,6 @@ fn test_issue_3858() { // Write to the eventfd instance. let sized_8_data: [u8; 8] = 1_u64.to_ne_bytes(); - let res = unsafe { libc::write(fd, sized_8_data.as_ptr() as *const libc::c_void, 8) }; + let res = unsafe { libc_utils::write_all(fd, sized_8_data.as_ptr() as *const libc::c_void, 8) }; assert_eq!(res, 8); } diff --git a/src/tools/miri/tests/pass-dep/libc/libc-fs.rs b/src/tools/miri/tests/pass-dep/libc/libc-fs.rs index 0ff48c389e8..86cf2a041f0 100644 --- a/src/tools/miri/tests/pass-dep/libc/libc-fs.rs +++ b/src/tools/miri/tests/pass-dep/libc/libc-fs.rs @@ -14,6 +14,9 @@ use std::path::PathBuf; #[path = "../../utils/mod.rs"] mod utils; +#[path = "../../utils/libc.rs"] +mod libc_utils; + fn main() { test_dup(); test_dup_stdout_stderr(); @@ -74,8 +77,8 @@ fn test_dup_stdout_stderr() { unsafe { let new_stdout = libc::fcntl(1, libc::F_DUPFD, 0); let new_stderr = libc::fcntl(2, libc::F_DUPFD, 0); - libc::write(new_stdout, bytes.as_ptr() as *const libc::c_void, bytes.len()); - libc::write(new_stderr, bytes.as_ptr() as *const libc::c_void, bytes.len()); + libc_utils::write_all(new_stdout, bytes.as_ptr() as *const libc::c_void, bytes.len()); + libc_utils::write_all(new_stderr, bytes.as_ptr() as *const libc::c_void, bytes.len()); } } @@ -92,16 +95,24 @@ fn test_dup() { let new_fd2 = libc::dup2(fd, 8); let mut first_buf = [0u8; 4]; - libc::read(fd, first_buf.as_mut_ptr() as *mut libc::c_void, 4); - assert_eq!(&first_buf, b"dup "); + let first_len = libc::read(fd, first_buf.as_mut_ptr() as *mut libc::c_void, 4); + assert!(first_len > 0); + let first_len = first_len as usize; + assert_eq!(first_buf[..first_len], bytes[..first_len]); + let remaining_bytes = &bytes[first_len..]; let mut second_buf = [0u8; 4]; - libc::read(new_fd, second_buf.as_mut_ptr() as *mut libc::c_void, 4); - assert_eq!(&second_buf, b"and "); + let second_len = libc::read(new_fd, second_buf.as_mut_ptr() as *mut libc::c_void, 4); + assert!(second_len > 0); + let second_len = second_len as usize; + assert_eq!(second_buf[..second_len], remaining_bytes[..second_len]); + let remaining_bytes = &remaining_bytes[second_len..]; let mut third_buf = [0u8; 4]; - libc::read(new_fd2, third_buf.as_mut_ptr() as *mut libc::c_void, 4); - assert_eq!(&third_buf, b"dup2"); + let third_len = libc::read(new_fd2, third_buf.as_mut_ptr() as *mut libc::c_void, 4); + assert!(third_len > 0); + let third_len = third_len as usize; + assert_eq!(third_buf[..third_len], remaining_bytes[..third_len]); } } @@ -145,7 +156,7 @@ fn test_ftruncate<T: From<i32>>( let bytes = b"hello"; let path = utils::prepare("miri_test_libc_fs_ftruncate.txt"); let mut file = File::create(&path).unwrap(); - file.write(bytes).unwrap(); + file.write_all(bytes).unwrap(); file.sync_all().unwrap(); assert_eq!(file.metadata().unwrap().len(), 5); @@ -402,10 +413,10 @@ fn test_read_and_uninit() { unsafe { let fd = libc::open(cpath.as_ptr(), libc::O_RDONLY); assert_ne!(fd, -1); - let mut buf: MaybeUninit<[u8; 2]> = std::mem::MaybeUninit::uninit(); - assert_eq!(libc::read(fd, buf.as_mut_ptr().cast::<std::ffi::c_void>(), 2), 2); + let mut buf: MaybeUninit<u8> = std::mem::MaybeUninit::uninit(); + assert_eq!(libc::read(fd, buf.as_mut_ptr().cast::<std::ffi::c_void>(), 1), 1); let buf = buf.assume_init(); - assert_eq!(buf, [1, 2]); + assert_eq!(buf, 1); assert_eq!(libc::close(fd), 0); } remove_file(&path).unwrap(); @@ -413,14 +424,22 @@ fn test_read_and_uninit() { { // We test that if we requested to read 4 bytes, but actually read 3 bytes, then // 3 bytes (not 4) will be overwritten, and remaining byte will be left as-is. - let path = utils::prepare_with_content("pass-libc-read-and-uninit-2.txt", &[1u8, 2, 3]); + let data = [1u8, 2, 3]; + let path = utils::prepare_with_content("pass-libc-read-and-uninit-2.txt", &data); let cpath = CString::new(path.clone().into_os_string().into_encoded_bytes()).unwrap(); unsafe { let fd = libc::open(cpath.as_ptr(), libc::O_RDONLY); assert_ne!(fd, -1); let mut buf = [42u8; 5]; - assert_eq!(libc::read(fd, buf.as_mut_ptr().cast::<std::ffi::c_void>(), 4), 3); - assert_eq!(buf, [1, 2, 3, 42, 42]); + let res = libc::read(fd, buf.as_mut_ptr().cast::<std::ffi::c_void>(), 4); + assert!(res > 0 && res < 4); + for i in 0..buf.len() { + assert_eq!( + buf[i], + if i < res as usize { data[i] } else { 42 }, + "wrong result at pos {i}" + ); + } assert_eq!(libc::close(fd), 0); } remove_file(&path).unwrap(); diff --git a/src/tools/miri/tests/pass-dep/libc/libc-pipe.rs b/src/tools/miri/tests/pass-dep/libc/libc-pipe.rs index bc755af864c..ffbcf633b98 100644 --- a/src/tools/miri/tests/pass-dep/libc/libc-pipe.rs +++ b/src/tools/miri/tests/pass-dep/libc/libc-pipe.rs @@ -2,6 +2,10 @@ // test_race depends on a deterministic schedule. //@compile-flags: -Zmiri-deterministic-concurrency use std::thread; + +#[path = "../../utils/libc.rs"] +mod libc_utils; + fn main() { test_pipe(); test_pipe_threaded(); @@ -26,21 +30,29 @@ fn test_pipe() { // Read size == data available in buffer. let data = "12345".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[1], data as *const libc::c_void, 5) }; assert_eq!(res, 5); let mut buf3: [u8; 5] = [0; 5]; - let res = unsafe { libc::read(fds[0], buf3.as_mut_ptr().cast(), buf3.len() as libc::size_t) }; + let res = unsafe { + libc_utils::read_all(fds[0], buf3.as_mut_ptr().cast(), buf3.len() as libc::size_t) + }; assert_eq!(res, 5); assert_eq!(buf3, "12345".as_bytes()); // Read size > data available in buffer. - let data = "123".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 3) }; + let data = "123".as_bytes(); + let res = unsafe { libc_utils::write_all(fds[1], data.as_ptr() as *const libc::c_void, 3) }; assert_eq!(res, 3); let mut buf4: [u8; 5] = [0; 5]; let res = unsafe { libc::read(fds[0], buf4.as_mut_ptr().cast(), buf4.len() as libc::size_t) }; - assert_eq!(res, 3); - assert_eq!(&buf4[0..3], "123".as_bytes()); + assert!(res > 0 && res <= 3); + let res = res as usize; + assert_eq!(buf4[..res], data[..res]); + if res < 3 { + // Drain the rest from the read end. + let res = unsafe { libc_utils::read_all(fds[0], buf4[res..].as_mut_ptr().cast(), 3 - res) }; + assert!(res > 0); + } } fn test_pipe_threaded() { @@ -51,7 +63,7 @@ fn test_pipe_threaded() { let thread1 = thread::spawn(move || { let mut buf: [u8; 5] = [0; 5]; let res: i64 = unsafe { - libc::read(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) + libc_utils::read_all(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) .try_into() .unwrap() }; @@ -60,7 +72,7 @@ fn test_pipe_threaded() { }); thread::yield_now(); let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[1], data as *const libc::c_void, 5) }; assert_eq!(res, 5); thread1.join().unwrap(); @@ -68,11 +80,12 @@ fn test_pipe_threaded() { let thread2 = thread::spawn(move || { thread::yield_now(); let data = "12345".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[1], data as *const libc::c_void, 5) }; assert_eq!(res, 5); }); let mut buf: [u8; 5] = [0; 5]; - let res = unsafe { libc::read(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; + let res = + unsafe { libc_utils::read_all(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; assert_eq!(res, 5); assert_eq!(buf, "12345".as_bytes()); thread2.join().unwrap(); @@ -90,7 +103,7 @@ fn test_race() { // write() from the main thread will occur before the read() here // because preemption is disabled and the main thread yields after write(). let res: i32 = unsafe { - libc::read(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) + libc_utils::read_all(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) .try_into() .unwrap() }; @@ -101,7 +114,7 @@ fn test_race() { }); unsafe { VAL = 1 }; let data = "a".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 1) }; + let res = unsafe { libc_utils::write_all(fds[1], data as *const libc::c_void, 1) }; assert_eq!(res, 1); thread::yield_now(); thread1.join().unwrap(); @@ -186,11 +199,12 @@ fn test_pipe_fcntl_threaded() { // the socket is now "non-blocking", the shim needs to deal correctly // with threads that were blocked before the socket was made non-blocking. let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[1], data as *const libc::c_void, 5) }; assert_eq!(res, 5); }); // The `read` below will block. - let res = unsafe { libc::read(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; + let res = + unsafe { libc_utils::read_all(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; thread1.join().unwrap(); assert_eq!(res, 5); } diff --git a/src/tools/miri/tests/pass-dep/libc/libc-socketpair.rs b/src/tools/miri/tests/pass-dep/libc/libc-socketpair.rs index c36f6b11224..9c211ffbdbe 100644 --- a/src/tools/miri/tests/pass-dep/libc/libc-socketpair.rs +++ b/src/tools/miri/tests/pass-dep/libc/libc-socketpair.rs @@ -6,6 +6,10 @@ #![allow(static_mut_refs)] use std::thread; + +#[path = "../../utils/libc.rs"] +mod libc_utils; + fn main() { test_socketpair(); test_socketpair_threaded(); @@ -22,54 +26,71 @@ fn test_socketpair() { // Read size == data available in buffer. let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[0], data as *const libc::c_void, 5) }; assert_eq!(res, 5); let mut buf: [u8; 5] = [0; 5]; - let res = unsafe { libc::read(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; + let res = + unsafe { libc_utils::read_all(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; assert_eq!(res, 5); assert_eq!(buf, "abcde".as_bytes()); // Read size > data available in buffer. - let data = "abc".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 3) }; + let data = "abc".as_bytes(); + let res = unsafe { libc_utils::write_all(fds[0], data.as_ptr() as *const libc::c_void, 3) }; assert_eq!(res, 3); let mut buf2: [u8; 5] = [0; 5]; let res = unsafe { libc::read(fds[1], buf2.as_mut_ptr().cast(), buf2.len() as libc::size_t) }; - assert_eq!(res, 3); - assert_eq!(&buf2[0..3], "abc".as_bytes()); + assert!(res > 0 && res <= 3); + let res = res as usize; + assert_eq!(buf2[..res], data[..res]); + if res < 3 { + // Drain the rest from the read end. + let res = unsafe { libc_utils::read_all(fds[1], buf2[res..].as_mut_ptr().cast(), 3 - res) }; + assert!(res > 0); + } // Test read and write from another direction. // Read size == data available in buffer. let data = "12345".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[1], data as *const libc::c_void, 5) }; assert_eq!(res, 5); let mut buf3: [u8; 5] = [0; 5]; - let res = unsafe { libc::read(fds[0], buf3.as_mut_ptr().cast(), buf3.len() as libc::size_t) }; + let res = unsafe { + libc_utils::read_all(fds[0], buf3.as_mut_ptr().cast(), buf3.len() as libc::size_t) + }; assert_eq!(res, 5); assert_eq!(buf3, "12345".as_bytes()); // Read size > data available in buffer. - let data = "123".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 3) }; + let data = "123".as_bytes(); + let res = unsafe { libc_utils::write_all(fds[1], data.as_ptr() as *const libc::c_void, 3) }; assert_eq!(res, 3); let mut buf4: [u8; 5] = [0; 5]; let res = unsafe { libc::read(fds[0], buf4.as_mut_ptr().cast(), buf4.len() as libc::size_t) }; - assert_eq!(res, 3); - assert_eq!(&buf4[0..3], "123".as_bytes()); + assert!(res > 0 && res <= 3); + let res = res as usize; + assert_eq!(buf4[..res], data[..res]); + if res < 3 { + // Drain the rest from the read end. + let res = unsafe { libc_utils::read_all(fds[0], buf4[res..].as_mut_ptr().cast(), 3 - res) }; + assert!(res > 0); + } // Test when happens when we close one end, with some data in the buffer. - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 3) }; + let res = unsafe { libc_utils::write_all(fds[0], data.as_ptr() as *const libc::c_void, 3) }; assert_eq!(res, 3); unsafe { libc::close(fds[0]) }; // Reading the other end should return that data, then EOF. let mut buf: [u8; 5] = [0; 5]; - let res = unsafe { libc::read(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; + let res = + unsafe { libc_utils::read_all(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; assert_eq!(res, 3); assert_eq!(&buf[0..3], "123".as_bytes()); - let res = unsafe { libc::read(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; + let res = + unsafe { libc_utils::read_all(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; assert_eq!(res, 0); // 0-sized read: EOF. // Writing the other end should emit EPIPE. - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 1) }; + let res = unsafe { libc_utils::write_all(fds[1], data.as_ptr() as *const libc::c_void, 1) }; assert_eq!(res, -1); assert_eq!(std::io::Error::last_os_error().raw_os_error(), Some(libc::EPIPE)); } @@ -82,7 +103,7 @@ fn test_socketpair_threaded() { let thread1 = thread::spawn(move || { let mut buf: [u8; 5] = [0; 5]; let res: i64 = unsafe { - libc::read(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) + libc_utils::read_all(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) .try_into() .unwrap() }; @@ -91,7 +112,7 @@ fn test_socketpair_threaded() { }); thread::yield_now(); let data = "abcde".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[0], data as *const libc::c_void, 5) }; assert_eq!(res, 5); thread1.join().unwrap(); @@ -99,11 +120,12 @@ fn test_socketpair_threaded() { let thread2 = thread::spawn(move || { thread::yield_now(); let data = "12345".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[1], data as *const libc::c_void, 5) }; + let res = unsafe { libc_utils::write_all(fds[1], data as *const libc::c_void, 5) }; assert_eq!(res, 5); }); let mut buf: [u8; 5] = [0; 5]; - let res = unsafe { libc::read(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; + let res = + unsafe { libc_utils::read_all(fds[0], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; assert_eq!(res, 5); assert_eq!(buf, "12345".as_bytes()); thread2.join().unwrap(); @@ -119,7 +141,7 @@ fn test_race() { // write() from the main thread will occur before the read() here // because preemption is disabled and the main thread yields after write(). let res: i32 = unsafe { - libc::read(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) + libc_utils::read_all(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) .try_into() .unwrap() }; @@ -130,7 +152,7 @@ fn test_race() { }); unsafe { VAL = 1 }; let data = "a".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 1) }; + let res = unsafe { libc_utils::write_all(fds[0], data as *const libc::c_void, 1) }; assert_eq!(res, 1); thread::yield_now(); thread1.join().unwrap(); @@ -144,14 +166,16 @@ fn test_blocking_read() { let thread1 = thread::spawn(move || { // Let this thread block on read. let mut buf: [u8; 3] = [0; 3]; - let res = unsafe { libc::read(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; + let res = unsafe { + libc_utils::read_all(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) + }; assert_eq!(res, 3); assert_eq!(&buf, "abc".as_bytes()); }); let thread2 = thread::spawn(move || { // Unblock thread1 by doing writing something. let data = "abc".as_bytes().as_ptr(); - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 3) }; + let res = unsafe { libc_utils::write_all(fds[0], data as *const libc::c_void, 3) }; assert_eq!(res, 3); }); thread1.join().unwrap(); @@ -165,18 +189,21 @@ fn test_blocking_write() { assert_eq!(res, 0); let arr1: [u8; 212992] = [1; 212992]; // Exhaust the space in the buffer so the subsequent write will block. - let res = unsafe { libc::write(fds[0], arr1.as_ptr() as *const libc::c_void, 212992) }; + let res = + unsafe { libc_utils::write_all(fds[0], arr1.as_ptr() as *const libc::c_void, 212992) }; assert_eq!(res, 212992); let thread1 = thread::spawn(move || { let data = "abc".as_bytes().as_ptr(); // The write below will be blocked because the buffer is already full. - let res = unsafe { libc::write(fds[0], data as *const libc::c_void, 3) }; + let res = unsafe { libc_utils::write_all(fds[0], data as *const libc::c_void, 3) }; assert_eq!(res, 3); }); let thread2 = thread::spawn(move || { // Unblock thread1 by freeing up some space. let mut buf: [u8; 3] = [0; 3]; - let res = unsafe { libc::read(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) }; + let res = unsafe { + libc_utils::read_all(fds[1], buf.as_mut_ptr().cast(), buf.len() as libc::size_t) + }; assert_eq!(res, 3); assert_eq!(buf, [1, 1, 1]); }); diff --git a/src/tools/miri/tests/pass/const-addrs.rs b/src/tools/miri/tests/pass/const-addrs.rs index af68b28b2b8..0d1531c73cd 100644 --- a/src/tools/miri/tests/pass/const-addrs.rs +++ b/src/tools/miri/tests/pass/const-addrs.rs @@ -1,14 +1,10 @@ -// The const fn interpreter creates a new AllocId every time it evaluates any const. -// If we do that in Miri, repeatedly evaluating a const causes unbounded memory use -// we need to keep track of the base address for that AllocId, and the allocation is never -// deallocated. -// In Miri we explicitly store previously-assigned AllocIds for each const and ensure -// that we only hand out a finite number of AllocIds per const. -// MIR inlining will put every evaluation of the const we're repeatedly evaluating into the same -// stack frame, breaking this test. +// The interpreter used to create a new AllocId every time it evaluates any const. +// This caused unbounded memory use in Miri. +// This test verifies that we only create a bounded amount of addresses for any given const. +// In practice, the interpreter always returns the same address, but we *do not guarantee* that. //@compile-flags: -Zinline-mir=no -const EVALS: usize = 256; +const EVALS: usize = 64; use std::collections::HashSet; fn main() { @@ -16,10 +12,8 @@ fn main() { for _ in 0..EVALS { addrs.insert(const_addr()); } - // Check that the const allocation has multiple base addresses - assert!(addrs.len() > 1); - // But also that we get a limited number of unique base addresses - assert!(addrs.len() < EVALS); + // Check that we always return the same base address for the const allocation. + assert_eq!(addrs.len(), 1); // Check that within a call we always produce the same address let mut prev = 0; diff --git a/src/tools/miri/tests/pass/shims/ctor.rs b/src/tools/miri/tests/pass/shims/ctor.rs index b997d2386b8..a0fcdb1081e 100644 --- a/src/tools/miri/tests/pass/shims/ctor.rs +++ b/src/tools/miri/tests/pass/shims/ctor.rs @@ -2,13 +2,13 @@ use std::sync::atomic::{AtomicUsize, Ordering}; static COUNT: AtomicUsize = AtomicUsize::new(0); -unsafe extern "C" fn ctor() { - COUNT.fetch_add(1, Ordering::Relaxed); +unsafe extern "C" fn ctor<const N: usize>() { + COUNT.fetch_add(N, Ordering::Relaxed); } #[rustfmt::skip] macro_rules! ctor { - ($ident:ident = $ctor:ident) => { + ($ident:ident: $ty:ty = $ctor:expr) => { #[cfg_attr( all(any( target_os = "linux", @@ -33,14 +33,13 @@ macro_rules! ctor { link_section = "__DATA,__mod_init_func" )] #[used] - static $ident: unsafe extern "C" fn() = $ctor; + static $ident: $ty = $ctor; }; } -ctor! { CTOR1 = ctor } -ctor! { CTOR2 = ctor } -ctor! { CTOR3 = ctor } +ctor! { CTOR1: unsafe extern "C" fn() = ctor::<1> } +ctor! { CTOR2: [unsafe extern "C" fn(); 2] = [ctor::<2>, ctor::<3>] } fn main() { - assert_eq!(COUNT.load(Ordering::Relaxed), 3, "ctors did not run"); + assert_eq!(COUNT.load(Ordering::Relaxed), 6, "ctors did not run"); } diff --git a/src/tools/miri/tests/pass/shims/fs.rs b/src/tools/miri/tests/pass/shims/fs.rs index 9d5725773e6..e7f11c54704 100644 --- a/src/tools/miri/tests/pass/shims/fs.rs +++ b/src/tools/miri/tests/pass/shims/fs.rs @@ -17,6 +17,10 @@ mod utils; fn main() { test_path_conversion(); test_file(); + // Partial reads/writes are apparently not a thing on Windows. + if cfg!(not(windows)) { + test_file_partial_reads_writes(); + } test_file_create_new(); test_metadata(); test_seek(); @@ -53,7 +57,7 @@ fn test_file() { file.write(&mut []).unwrap(); assert_eq!(file.metadata().unwrap().len(), 0); - file.write(bytes).unwrap(); + file.write_all(bytes).unwrap(); assert_eq!(file.metadata().unwrap().len(), bytes.len() as u64); // Test opening, reading and closing a file. let mut file = File::open(&path).unwrap(); @@ -66,10 +70,36 @@ fn test_file() { assert!(!file.is_terminal()); + // Writing to a file opened for reading should error (and not stop interpretation). std does not + // categorize the error so we don't check for details. + file.write(&[]).unwrap_err(); + // Removing file should succeed. remove_file(&path).unwrap(); } +fn test_file_partial_reads_writes() { + let path = utils::prepare_with_content("miri_test_fs_file.txt", b"abcdefg"); + + // Ensure we sometimes do incomplete writes. + let got_short_write = (0..16).any(|_| { + let _ = remove_file(&path); // FIXME(win, issue #4483): errors if the file already exists + let mut file = File::create(&path).unwrap(); + file.write(&[0; 4]).unwrap() != 4 + }); + assert!(got_short_write); + // Ensure we sometimes do incomplete reads. + let got_short_read = (0..16).any(|_| { + let mut file = File::open(&path).unwrap(); + let mut buf = [0; 4]; + file.read(&mut buf).unwrap() != 4 + }); + assert!(got_short_read); + + // Clean up + remove_file(&path).unwrap(); +} + fn test_file_clone() { let bytes = b"Hello, World!\n"; let path = utils::prepare_with_content("miri_test_fs_file_clone.txt", bytes); diff --git a/src/tools/miri/tests/pass/shims/pipe.rs b/src/tools/miri/tests/pass/shims/pipe.rs index c47feb8774a..4915e54c533 100644 --- a/src/tools/miri/tests/pass/shims/pipe.rs +++ b/src/tools/miri/tests/pass/shims/pipe.rs @@ -4,8 +4,8 @@ use std::io::{Read, Write, pipe}; fn main() { let (mut ping_rx, mut ping_tx) = pipe().unwrap(); - ping_tx.write(b"hello").unwrap(); + ping_tx.write_all(b"hello").unwrap(); let mut buf: [u8; 5] = [0; 5]; - ping_rx.read(&mut buf).unwrap(); + ping_rx.read_exact(&mut buf).unwrap(); assert_eq!(&buf, "hello".as_bytes()); } diff --git a/src/tools/miri/tests/ui.rs b/src/tools/miri/tests/ui.rs index cb915b11b67..73fbe2cc020 100644 --- a/src/tools/miri/tests/ui.rs +++ b/src/tools/miri/tests/ui.rs @@ -338,6 +338,20 @@ fn main() -> Result<()> { ui(Mode::Fail, "tests/native-lib/fail", &target, WithoutDependencies, tmpdir.path())?; } + // We only enable GenMC tests when the `genmc` feature is enabled, but also only on platforms we support: + // FIXME(genmc,macos): Add `target_os = "macos"` once `https://github.com/dtolnay/cxx/issues/1535` is fixed. + // FIXME(genmc,cross-platform): remove `host == target` check once cross-platform support with GenMC is possible. + if cfg!(all( + feature = "genmc", + target_os = "linux", + target_pointer_width = "64", + target_endian = "little" + )) && host == target + { + ui(Mode::Pass, "tests/genmc/pass", &target, WithDependencies, tmpdir.path())?; + ui(Mode::Fail, "tests/genmc/fail", &target, WithDependencies, tmpdir.path())?; + } + Ok(()) } diff --git a/src/tools/miri/tests/utils/fs.rs b/src/tools/miri/tests/utils/fs.rs index 7340908626f..7d75b3fced3 100644 --- a/src/tools/miri/tests/utils/fs.rs +++ b/src/tools/miri/tests/utils/fs.rs @@ -1,6 +1,6 @@ use std::ffi::OsString; -use std::fs; use std::path::PathBuf; +use std::{fs, io}; use super::miri_extern; diff --git a/src/tools/miri/tests/utils/libc.rs b/src/tools/miri/tests/utils/libc.rs new file mode 100644 index 00000000000..1a3cd067c04 --- /dev/null +++ b/src/tools/miri/tests/utils/libc.rs @@ -0,0 +1,44 @@ +//! Utils that need libc. +#![allow(dead_code)] + +pub unsafe fn read_all( + fd: libc::c_int, + buf: *mut libc::c_void, + count: libc::size_t, +) -> libc::ssize_t { + assert!(count > 0); + let mut read_so_far = 0; + while read_so_far < count { + let res = libc::read(fd, buf.add(read_so_far), count - read_so_far); + if res < 0 { + return res; + } + if res == 0 { + // EOF + break; + } + read_so_far += res as libc::size_t; + } + return read_so_far as libc::ssize_t; +} + +pub unsafe fn write_all( + fd: libc::c_int, + buf: *const libc::c_void, + count: libc::size_t, +) -> libc::ssize_t { + assert!(count > 0); + let mut written_so_far = 0; + while written_so_far < count { + let res = libc::write(fd, buf.add(written_so_far), count - written_so_far); + if res < 0 { + return res; + } + if res == 0 { + // EOF? + break; + } + written_so_far += res as libc::size_t; + } + return written_so_far as libc::ssize_t; +} diff --git a/src/tools/miri/tests/x86_64-unknown-kernel.json b/src/tools/miri/tests/x86_64-unknown-kernel.json index 8da67d3a1c6..a5eaceb4f68 100644 --- a/src/tools/miri/tests/x86_64-unknown-kernel.json +++ b/src/tools/miri/tests/x86_64-unknown-kernel.json @@ -2,7 +2,7 @@ "llvm-target": "x86_64-unknown-none", "target-endian": "little", "target-pointer-width": "64", - "target-c-int-width": "32", + "target-c-int-width": 32, "data-layout": "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-i128:128-f80:128-n8:16:32:64-S128", "arch": "x86_64", "os": "none", diff --git a/src/tools/miropt-test-tools/Cargo.toml b/src/tools/miropt-test-tools/Cargo.toml index 09b4c7d16dc..3eb5020968d 100644 --- a/src/tools/miropt-test-tools/Cargo.toml +++ b/src/tools/miropt-test-tools/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "miropt-test-tools" version = "0.1.0" -edition = "2021" +edition = "2024" [dependencies] diff --git a/src/tools/miropt-test-tools/src/lib.rs b/src/tools/miropt-test-tools/src/lib.rs index 41b53d2ad0e..10769c9c8ab 100644 --- a/src/tools/miropt-test-tools/src/lib.rs +++ b/src/tools/miropt-test-tools/src/lib.rs @@ -34,7 +34,7 @@ fn output_file_suffix(testfile: &Path, bit_width: u32, panic_strategy: PanicStra let mut suffix = String::new(); if each_bit_width { - suffix.push_str(&format!(".{}bit", bit_width)); + suffix.push_str(&format!(".{bit_width}bit")); } if each_panic_strategy { match panic_strategy { @@ -51,7 +51,7 @@ pub fn files_for_miropt_test( panic_strategy: PanicStrategy, ) -> MiroptTest { let mut out = Vec::new(); - let test_file_contents = fs::read_to_string(&testfile).unwrap(); + let test_file_contents = fs::read_to_string(testfile).unwrap(); let test_dir = testfile.parent().unwrap(); let test_crate = testfile.file_stem().unwrap().to_str().unwrap().replace('-', "_"); @@ -76,10 +76,10 @@ pub fn files_for_miropt_test( if test_name.ends_with(".diff") { let trimmed = test_name.trim_end_matches(".diff"); - passes.push(trimmed.split('.').last().unwrap().to_owned()); - let test_against = format!("{}.after.mir", trimmed); - from_file = format!("{}.before.mir", trimmed); - expected_file = format!("{}{}.diff", trimmed, suffix); + passes.push(trimmed.split('.').next_back().unwrap().to_owned()); + let test_against = format!("{trimmed}.after.mir"); + from_file = format!("{trimmed}.before.mir"); + expected_file = format!("{trimmed}{suffix}.diff"); assert!(test_names.next().is_none(), "two mir pass names specified for MIR diff"); to_file = Some(test_against); } else if let Some(first_pass) = test_names.next() { @@ -92,10 +92,9 @@ pub fn files_for_miropt_test( } assert!(test_names.next().is_none(), "three mir pass names specified for MIR diff"); - expected_file = - format!("{}{}.{}-{}.diff", test_name, suffix, first_pass, second_pass); - let second_file = format!("{}.{}.mir", test_name, second_pass); - from_file = format!("{}.{}.mir", test_name, first_pass); + expected_file = format!("{test_name}{suffix}.{first_pass}-{second_pass}.diff"); + let second_file = format!("{test_name}.{second_pass}.mir"); + from_file = format!("{test_name}.{first_pass}.mir"); to_file = Some(second_file); } else { // Allow-list for file extensions that can be produced by MIR dumps. @@ -112,7 +111,7 @@ pub fn files_for_miropt_test( ) } - expected_file = format!("{}{}.{}", test_name_wo_ext, suffix, test_name_ext); + expected_file = format!("{test_name_wo_ext}{suffix}.{test_name_ext}"); from_file = test_name.to_string(); assert!(test_names.next().is_none(), "two mir pass names specified for MIR dump"); to_file = None; @@ -123,7 +122,7 @@ pub fn files_for_miropt_test( ); }; if !expected_file.starts_with(&test_crate) { - expected_file = format!("{}.{}", test_crate, expected_file); + expected_file = format!("{test_crate}.{expected_file}"); } let expected_file = test_dir.join(expected_file); diff --git a/src/tools/opt-dist/src/tests.rs b/src/tools/opt-dist/src/tests.rs index c9a21fc6fb2..d5121b8c786 100644 --- a/src/tools/opt-dist/src/tests.rs +++ b/src/tools/opt-dist/src/tests.rs @@ -79,6 +79,7 @@ lld = false rustc = "{rustc}" cargo = "{cargo}" local-rebuild = true +compiletest-allow-stage0=true [target.{host_triple}] llvm-config = "{llvm_config}" @@ -117,7 +118,6 @@ llvm-config = "{llvm_config}" args.extend(["--skip", test_path]); } cmd(&args) - .env("COMPILETEST_FORCE_STAGE0", "1") // Also run dist-only tests .env("COMPILETEST_ENABLE_DIST_TESTS", "1") .run() diff --git a/src/tools/rust-analyzer/.github/workflows/rustc-pull.yml b/src/tools/rust-analyzer/.github/workflows/rustc-pull.yml new file mode 100644 index 00000000000..2a842f3b311 --- /dev/null +++ b/src/tools/rust-analyzer/.github/workflows/rustc-pull.yml @@ -0,0 +1,20 @@ +name: rustc-pull + +on: + workflow_dispatch: + schedule: + # Run at 04:00 UTC every Monday and Thursday + - cron: '0 4 * * 1,4' + +jobs: + pull: + if: github.repository == 'rust-lang/rust-analyzer' + uses: rust-lang/josh-sync/.github/workflows/rustc-pull.yml@main + with: + zulip-stream-id: 185405 + zulip-bot-email: "rust-analyzer-ci-bot@rust-lang.zulipchat.com" + pr-base-branch: master + branch-name: rustc-pull + secrets: + zulip-api-token: ${{ secrets.ZULIP_API_TOKEN }} + token: ${{ secrets.GITHUB_TOKEN }} diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index c471234bbe3..7d03300c221 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -396,15 +396,6 @@ dependencies = [ ] [[package]] -name = "directories" -version = "6.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16f5094c54661b38d03bd7e50df373292118db60b585c08a411c6d840017fe7d" -dependencies = [ - "dirs-sys", -] - -[[package]] name = "dirs" version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1268,7 +1259,7 @@ dependencies = [ "expect-test", "intern", "parser", - "ra-ap-rustc_lexer 0.122.0", + "ra-ap-rustc_lexer 0.123.0", "rustc-hash 2.1.1", "smallvec", "span", @@ -1504,7 +1495,7 @@ dependencies = [ "drop_bomb", "edition", "expect-test", - "ra-ap-rustc_lexer 0.122.0", + "ra-ap-rustc_lexer 0.123.0", "rustc-literal-escaper", "stdx", "tracing", @@ -1614,7 +1605,7 @@ dependencies = [ "object", "paths", "proc-macro-test", - "ra-ap-rustc_lexer 0.122.0", + "ra-ap-rustc_lexer 0.123.0", "span", "syntax-bridge", "tt", @@ -1688,6 +1679,7 @@ dependencies = [ "serde_json", "span", "stdx", + "temp-dir", "toolchain", "tracing", "triomphe", @@ -1756,9 +1748,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_abi" -version = "0.122.0" +version = "0.123.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb01e1fec578003c85481c1cad4ff8cd8195b07c2dc85ae3f716108507ae15d5" +checksum = "f18c877575c259d127072e9bfc41d985202262fb4d6bfdae3d1252147c2562c2" dependencies = [ "bitflags 2.9.1", "ra-ap-rustc_hashes", @@ -1768,18 +1760,18 @@ dependencies = [ [[package]] name = "ra-ap-rustc_hashes" -version = "0.122.0" +version = "0.123.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0ec056e72a472ffef8761ce96ece6c626eb07368c09d0105b6df30d27d07673" +checksum = "2439ed1df3472443133b66949f81080dff88089b42f825761455463709ee1cad" dependencies = [ "rustc-stable-hash", ] [[package]] name = "ra-ap-rustc_index" -version = "0.122.0" +version = "0.123.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fcdd1001db0295e59052e9f53aeda588bbe81e362534f4687d41bd44777b5a7" +checksum = "57a24fe0be21be1f8ebc21dcb40129214fb4cefb0f2753f3d46b6dbe656a1a45" dependencies = [ "ra-ap-rustc_index_macros", "smallvec", @@ -1787,9 +1779,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_index_macros" -version = "0.122.0" +version = "0.123.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "728d64dd98e25530b32e3f7c7c1e844e52722b269360daa1cdeba9dff9727a26" +checksum = "844a27ddcad0116facae2df8e741fd788662cf93dc13029cd864f2b8013b81f9" dependencies = [ "proc-macro2", "quote", @@ -1809,9 +1801,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_lexer" -version = "0.122.0" +version = "0.123.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "415f0821f512608d825b3215489a6a6a2c18ed9f0045953d514e7ec23d4b90ab" +checksum = "2b734cfcb577d09877799a22742f1bd398be6c00bc428d9de56d48d11ece5771" dependencies = [ "memchr", "unicode-properties", @@ -1830,9 +1822,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_pattern_analysis" -version = "0.122.0" +version = "0.123.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4657fcfdfe06e2a02ec8180d4e7c95aecf4811ba50367e363d1a2300b7623284" +checksum = "75b0ee1f059b9dea0818c6c7267478926eee95ba4c7dcf89c8db32fa165d3904" dependencies = [ "ra-ap-rustc_index", "rustc-hash 2.1.1", @@ -2294,6 +2286,12 @@ dependencies = [ ] [[package]] +name = "temp-dir" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83176759e9416cf81ee66cb6508dbfe9c96f20b8b56265a39917551c23c70964" + +[[package]] name = "tenthash" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -2592,7 +2590,7 @@ version = "0.0.0" dependencies = [ "arrayvec", "intern", - "ra-ap-rustc_lexer 0.122.0", + "ra-ap-rustc_lexer 0.123.0", "stdx", "text-size", ] @@ -3105,7 +3103,6 @@ name = "xtask" version = "0.1.0" dependencies = [ "anyhow", - "directories", "edition", "either", "flate2", diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml index 700c116ec18..e7cf0212bf2 100644 --- a/src/tools/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/Cargo.toml @@ -89,11 +89,11 @@ vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" } vfs = { path = "./crates/vfs", version = "0.0.0" } edition = { path = "./crates/edition", version = "0.0.0" } -ra-ap-rustc_lexer = { version = "0.122", default-features = false } +ra-ap-rustc_lexer = { version = "0.123", default-features = false } ra-ap-rustc_parse_format = { version = "0.121", default-features = false } -ra-ap-rustc_index = { version = "0.122", default-features = false } -ra-ap-rustc_abi = { version = "0.122", default-features = false } -ra-ap-rustc_pattern_analysis = { version = "0.122", default-features = false } +ra-ap-rustc_index = { version = "0.123", default-features = false } +ra-ap-rustc_abi = { version = "0.123", default-features = false } +ra-ap-rustc_pattern_analysis = { version = "0.123", default-features = false } # local crates that aren't published to crates.io. These should not have versions. @@ -156,6 +156,7 @@ smallvec = { version = "1.15.1", features = [ "const_generics", ] } smol_str = "0.3.2" +temp-dir = "0.1.16" text-size = "1.1.1" tracing = "0.1.41" tracing-tree = "0.4.0" diff --git a/src/tools/rust-analyzer/crates/base-db/src/input.rs b/src/tools/rust-analyzer/crates/base-db/src/input.rs index 8c9393bcc93..0bf4fbdfbd6 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/input.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/input.rs @@ -30,6 +30,7 @@ pub type ProcMacroPaths = pub enum ProcMacroLoadingError { Disabled, FailedToBuild, + ExpectedProcMacroArtifact, MissingDylibPath, NotYetBuilt, NoProcMacros, @@ -39,7 +40,8 @@ impl ProcMacroLoadingError { pub fn is_hard_error(&self) -> bool { match self { ProcMacroLoadingError::Disabled | ProcMacroLoadingError::NotYetBuilt => false, - ProcMacroLoadingError::FailedToBuild + ProcMacroLoadingError::ExpectedProcMacroArtifact + | ProcMacroLoadingError::FailedToBuild | ProcMacroLoadingError::MissingDylibPath | ProcMacroLoadingError::NoProcMacros | ProcMacroLoadingError::ProcMacroSrvError(_) => true, @@ -51,10 +53,16 @@ impl Error for ProcMacroLoadingError {} impl fmt::Display for ProcMacroLoadingError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { + ProcMacroLoadingError::ExpectedProcMacroArtifact => { + write!(f, "proc-macro crate did not build proc-macro artifact") + } ProcMacroLoadingError::Disabled => write!(f, "proc-macro expansion is disabled"), ProcMacroLoadingError::FailedToBuild => write!(f, "proc-macro failed to build"), ProcMacroLoadingError::MissingDylibPath => { - write!(f, "proc-macro crate build data is missing a dylib path") + write!( + f, + "proc-macro crate built but the dylib path is missing, this indicates a problem with your build system." + ) } ProcMacroLoadingError::NotYetBuilt => write!(f, "proc-macro not yet built"), ProcMacroLoadingError::NoProcMacros => { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs index d3dfc05eb29..5695ab7ed00 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs @@ -16,7 +16,7 @@ use std::{ use cfg::{CfgExpr, CfgOptions}; use either::Either; -use hir_expand::{ExpandError, InFile, MacroCallId, mod_path::ModPath, name::Name}; +use hir_expand::{InFile, MacroCallId, mod_path::ModPath, name::Name}; use la_arena::{Arena, ArenaMap}; use rustc_hash::FxHashMap; use smallvec::SmallVec; @@ -281,7 +281,6 @@ struct FormatTemplate { #[derive(Debug, Eq, PartialEq)] pub enum ExpressionStoreDiagnostics { InactiveCode { node: InFile<SyntaxNodePtr>, cfg: CfgExpr, opts: CfgOptions }, - MacroError { node: InFile<MacroCallPtr>, err: ExpandError }, UnresolvedMacroCall { node: InFile<MacroCallPtr>, path: ModPath }, UnreachableLabel { node: InFile<AstPtr<ast::Lifetime>>, name: Name }, AwaitOutsideOfAsync { node: InFile<AstPtr<ast::AwaitExpr>>, location: String }, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs index 4e877748ca2..abd1382801d 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs @@ -960,38 +960,29 @@ impl ExprCollector<'_> { impl_trait_lower_fn: ImplTraitLowerFn<'_>, ) -> TypeBound { match node.kind() { - ast::TypeBoundKind::PathType(path_type) => { + ast::TypeBoundKind::PathType(binder, path_type) => { + let binder = match binder.and_then(|it| it.generic_param_list()) { + Some(gpl) => gpl + .lifetime_params() + .flat_map(|lp| lp.lifetime().map(|lt| Name::new_lifetime(<.text()))) + .collect(), + None => ThinVec::default(), + }; let m = match node.question_mark_token() { Some(_) => TraitBoundModifier::Maybe, None => TraitBoundModifier::None, }; self.lower_path_type(&path_type, impl_trait_lower_fn) .map(|p| { - TypeBound::Path(self.alloc_path(p, AstPtr::new(&path_type).upcast()), m) + let path = self.alloc_path(p, AstPtr::new(&path_type).upcast()); + if binder.is_empty() { + TypeBound::Path(path, m) + } else { + TypeBound::ForLifetime(binder, path) + } }) .unwrap_or(TypeBound::Error) } - ast::TypeBoundKind::ForType(for_type) => { - let lt_refs = match for_type.generic_param_list() { - Some(gpl) => gpl - .lifetime_params() - .flat_map(|lp| lp.lifetime().map(|lt| Name::new_lifetime(<.text()))) - .collect(), - None => ThinVec::default(), - }; - let path = for_type.ty().and_then(|ty| match &ty { - ast::Type::PathType(path_type) => { - self.lower_path_type(path_type, impl_trait_lower_fn).map(|p| (p, ty)) - } - _ => None, - }); - match path { - Some((p, ty)) => { - TypeBound::ForLifetime(lt_refs, self.alloc_path(p, AstPtr::new(&ty))) - } - None => TypeBound::Error, - } - } ast::TypeBoundKind::Use(gal) => TypeBound::Use( gal.use_bound_generic_args() .map(|p| match p { @@ -1981,13 +1972,7 @@ impl ExprCollector<'_> { return collector(self, None); } }; - if record_diagnostics { - if let Some(err) = res.err { - self.store - .diagnostics - .push(ExpressionStoreDiagnostics::MacroError { node: macro_call_ptr, err }); - } - } + // No need to push macro and parsing errors as they'll be recreated from `macro_calls()`. match res.value { Some((mark, expansion)) => { @@ -1997,10 +1982,6 @@ impl ExprCollector<'_> { self.store.expansions.insert(macro_call_ptr, macro_file); } - if record_diagnostics { - // FIXME: Report parse errors here - } - let id = collector(self, expansion.map(|it| it.tree())); self.expander.exit(mark); id diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/generics.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/generics.rs index 02a1d274fb5..c570df42b2f 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/generics.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/generics.rs @@ -180,17 +180,18 @@ impl GenericParamsCollector { continue; }; - let lifetimes: Option<Box<_>> = pred.generic_param_list().map(|param_list| { - // Higher-Ranked Trait Bounds - param_list - .lifetime_params() - .map(|lifetime_param| { - lifetime_param - .lifetime() - .map_or_else(Name::missing, |lt| Name::new_lifetime(<.text())) - }) - .collect() - }); + let lifetimes: Option<Box<_>> = + pred.for_binder().and_then(|it| it.generic_param_list()).map(|param_list| { + // Higher-Ranked Trait Bounds + param_list + .lifetime_params() + .map(|lifetime_param| { + lifetime_param + .lifetime() + .map_or_else(Name::missing, |lt| Name::new_lifetime(<.text())) + }) + .collect() + }); for bound in pred.type_bound_list().iter().flat_map(|l| l.bounds()) { self.lower_type_bound_as_predicate(ec, bound, lifetimes.as_deref(), target); } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/path.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/path.rs index 19c7ce0ce04..55e738b58bd 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/path.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/path.rs @@ -27,7 +27,7 @@ pub enum Path { } // This type is being used a lot, make sure it doesn't grow unintentionally. -#[cfg(target_arch = "x86_64")] +#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] const _: () = { assert!(size_of::<Path>() == 24); assert!(size_of::<Option<Path>>() == 24); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs index eacc3f3cedf..da0f058a9cb 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs @@ -148,7 +148,7 @@ pub enum TypeRef { Error, } -#[cfg(target_arch = "x86_64")] +#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] const _: () = assert!(size_of::<TypeRef>() == 24); pub type TypeRefId = Idx<TypeRef>; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs index 5ae6bf6dffd..cc531f076dd 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs @@ -175,8 +175,9 @@ impl ExprValidator { }); } - let receiver_ty = self.infer[*receiver].clone(); - checker.prev_receiver_ty = Some(receiver_ty); + if let Some(receiver_ty) = self.infer.type_of_expr_with_adjust(*receiver) { + checker.prev_receiver_ty = Some(receiver_ty.clone()); + } } } @@ -187,7 +188,9 @@ impl ExprValidator { arms: &[MatchArm], db: &dyn HirDatabase, ) { - let scrut_ty = &self.infer[scrutinee_expr]; + let Some(scrut_ty) = self.infer.type_of_expr_with_adjust(scrutinee_expr) else { + return; + }; if scrut_ty.contains_unknown() { return; } @@ -200,7 +203,7 @@ impl ExprValidator { // Note: Skipping the entire diagnostic rather than just not including a faulty match arm is // preferred to avoid the chance of false positives. for arm in arms { - let Some(pat_ty) = self.infer.type_of_pat.get(arm.pat) else { + let Some(pat_ty) = self.infer.type_of_pat_with_adjust(arm.pat) else { return; }; if pat_ty.contains_unknown() { @@ -328,7 +331,7 @@ impl ExprValidator { continue; } let Some(initializer) = initializer else { continue }; - let ty = &self.infer[initializer]; + let Some(ty) = self.infer.type_of_expr_with_adjust(initializer) else { continue }; if ty.contains_unknown() { continue; } @@ -433,44 +436,44 @@ impl ExprValidator { Statement::Expr { expr, .. } => Some(*expr), _ => None, }); - if let Some(last_then_expr) = last_then_expr { - let last_then_expr_ty = &self.infer[last_then_expr]; - if last_then_expr_ty.is_never() { - // Only look at sources if the then branch diverges and we have an else branch. - let source_map = db.body_with_source_map(self.owner).1; - let Ok(source_ptr) = source_map.expr_syntax(id) else { - return; - }; - let root = source_ptr.file_syntax(db); - let either::Left(ast::Expr::IfExpr(if_expr)) = - source_ptr.value.to_node(&root) - else { + if let Some(last_then_expr) = last_then_expr + && let Some(last_then_expr_ty) = + self.infer.type_of_expr_with_adjust(last_then_expr) + && last_then_expr_ty.is_never() + { + // Only look at sources if the then branch diverges and we have an else branch. + let source_map = db.body_with_source_map(self.owner).1; + let Ok(source_ptr) = source_map.expr_syntax(id) else { + return; + }; + let root = source_ptr.file_syntax(db); + let either::Left(ast::Expr::IfExpr(if_expr)) = source_ptr.value.to_node(&root) + else { + return; + }; + let mut top_if_expr = if_expr; + loop { + let parent = top_if_expr.syntax().parent(); + let has_parent_expr_stmt_or_stmt_list = + parent.as_ref().is_some_and(|node| { + ast::ExprStmt::can_cast(node.kind()) + | ast::StmtList::can_cast(node.kind()) + }); + if has_parent_expr_stmt_or_stmt_list { + // Only emit diagnostic if parent or direct ancestor is either + // an expr stmt or a stmt list. + break; + } + let Some(parent_if_expr) = parent.and_then(ast::IfExpr::cast) else { + // Bail if parent is neither an if expr, an expr stmt nor a stmt list. return; }; - let mut top_if_expr = if_expr; - loop { - let parent = top_if_expr.syntax().parent(); - let has_parent_expr_stmt_or_stmt_list = - parent.as_ref().is_some_and(|node| { - ast::ExprStmt::can_cast(node.kind()) - | ast::StmtList::can_cast(node.kind()) - }); - if has_parent_expr_stmt_or_stmt_list { - // Only emit diagnostic if parent or direct ancestor is either - // an expr stmt or a stmt list. - break; - } - let Some(parent_if_expr) = parent.and_then(ast::IfExpr::cast) else { - // Bail if parent is neither an if expr, an expr stmt nor a stmt list. - return; - }; - // Check parent if expr. - top_if_expr = parent_if_expr; - } - - self.diagnostics - .push(BodyValidationDiagnostic::RemoveUnnecessaryElse { if_expr: id }) + // Check parent if expr. + top_if_expr = parent_if_expr; } + + self.diagnostics + .push(BodyValidationDiagnostic::RemoveUnnecessaryElse { if_expr: id }) } } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs index e880438e3a7..7c39afa0ef8 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs @@ -561,6 +561,32 @@ impl InferenceResult { ExprOrPatId::PatId(id) => self.type_of_pat.get(id), } } + pub fn type_of_expr_with_adjust(&self, id: ExprId) -> Option<&Ty> { + match self.expr_adjustments.get(&id).and_then(|adjustments| { + adjustments + .iter() + .filter(|adj| { + // https://github.com/rust-lang/rust/blob/67819923ac8ea353aaa775303f4c3aacbf41d010/compiler/rustc_mir_build/src/thir/cx/expr.rs#L140 + !matches!( + adj, + Adjustment { + kind: Adjust::NeverToAny, + target, + } if target.is_never() + ) + }) + .next_back() + }) { + Some(adjustment) => Some(&adjustment.target), + None => self.type_of_expr.get(id), + } + } + pub fn type_of_pat_with_adjust(&self, id: PatId) -> Option<&Ty> { + match self.pat_adjustments.get(&id).and_then(|adjustments| adjustments.last()) { + adjusted @ Some(_) => adjusted, + None => self.type_of_pat.get(id), + } + } pub fn is_erroneous(&self) -> bool { self.has_errors && self.type_of_expr.iter().count() == 0 } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs index 236f316366d..3f310c26ec1 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs @@ -85,16 +85,6 @@ pub fn layout_of_adt_query( let d = db.const_eval_discriminant(e.enum_variants(db).variants[id.0].0).ok()?; Some((id, d)) }), - // FIXME: The current code for niche-filling relies on variant indices - // instead of actual discriminants, so enums with - // explicit discriminants (RFC #2363) would misbehave and we should disable - // niche optimization for them. - // The code that do it in rustc: - // repr.inhibit_enum_layout_opt() || def - // .variants() - // .iter_enumerated() - // .any(|(i, v)| v.discr != ty::VariantDiscr::Relative(i.as_u32())) - repr.inhibit_enum_layout_opt(), !matches!(def, AdtId::EnumId(..)) && variants .iter() diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs index f32b6af4d85..d61e7de6672 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs @@ -590,9 +590,14 @@ impl<'a> TyLoweringContext<'a> { .resolve_trait(ctx.ty_ctx().db, ctx.ty_ctx().resolver.krate()); let pointee_sized = LangItem::PointeeSized .resolve_trait(ctx.ty_ctx().db, ctx.ty_ctx().resolver.krate()); - if meta_sized.is_some_and(|it| it == trait_ref.hir_trait_id()) { + let destruct = LangItem::Destruct + .resolve_trait(ctx.ty_ctx().db, ctx.ty_ctx().resolver.krate()); + let hir_trait_id = trait_ref.hir_trait_id(); + if meta_sized.is_some_and(|it| it == hir_trait_id) + || destruct.is_some_and(|it| it == hir_trait_id) + { // Ignore this bound - } else if pointee_sized.is_some_and(|it| it == trait_ref.hir_trait_id()) { + } else if pointee_sized.is_some_and(|it| it == hir_trait_id) { // Regard this as `?Sized` bound ctx.ty_ctx().unsized_types.insert(self_ty); } else { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs index 238753e12e4..c4c17a93c9c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs @@ -2349,3 +2349,37 @@ fn test() { "#]], ); } + +#[test] +fn rust_destruct_option_clone() { + check_types( + r#" +//- minicore: option, drop +fn test(o: &Option<i32>) { + o.my_clone(); + //^^^^^^^^^^^^ Option<i32> +} +pub trait MyClone: Sized { + fn my_clone(&self) -> Self; +} +impl<T> const MyClone for Option<T> +where + T: ~const MyClone + ~const Destruct, +{ + fn my_clone(&self) -> Self { + match self { + Some(x) => Some(x.my_clone()), + None => None, + } + } +} +impl const MyClone for i32 { + fn my_clone(&self) -> Self { + *self + } +} +#[lang = "destruct"] +pub trait Destruct {} +"#, + ); +} diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index 1b2b76999f7..4ddb04b24f7 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -1922,10 +1922,6 @@ impl DefWithBody { Module { id: def_map.module_id(DefMap::ROOT) }.diagnostics(db, acc, style_lints); } - source_map - .macro_calls() - .for_each(|(_ast_id, call_id)| macro_call_diagnostics(db, call_id, acc)); - expr_store_diagnostics(db, acc, &source_map); let infer = db.infer(self.into()); @@ -2130,9 +2126,9 @@ impl DefWithBody { } } -fn expr_store_diagnostics( - db: &dyn HirDatabase, - acc: &mut Vec<AnyDiagnostic<'_>>, +fn expr_store_diagnostics<'db>( + db: &'db dyn HirDatabase, + acc: &mut Vec<AnyDiagnostic<'db>>, source_map: &ExpressionStoreSourceMap, ) { for diag in source_map.diagnostics() { @@ -2140,30 +2136,6 @@ fn expr_store_diagnostics( ExpressionStoreDiagnostics::InactiveCode { node, cfg, opts } => { InactiveCode { node: *node, cfg: cfg.clone(), opts: opts.clone() }.into() } - ExpressionStoreDiagnostics::MacroError { node, err } => { - let RenderedExpandError { message, error, kind } = err.render_to_string(db); - - let editioned_file_id = EditionedFileId::from_span(db, err.span().anchor.file_id); - let precise_location = if editioned_file_id == node.file_id { - Some( - err.span().range - + db.ast_id_map(editioned_file_id.into()) - .get_erased(err.span().anchor.ast_id) - .text_range() - .start(), - ) - } else { - None - }; - MacroError { - node: (node).map(|it| it.into()), - precise_location, - message, - error, - kind, - } - .into() - } ExpressionStoreDiagnostics::UnresolvedMacroCall { node, path } => UnresolvedMacroCall { macro_call: (*node).map(|ast_ptr| ast_ptr.into()), precise_location: None, @@ -2182,6 +2154,10 @@ fn expr_store_diagnostics( } }); } + + source_map + .macro_calls() + .for_each(|(_ast_id, call_id)| macro_call_diagnostics(db, call_id, acc)); } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct Function { diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs index ecc6e5f3d03..0b554a9d4e3 100644 --- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs +++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs @@ -441,7 +441,7 @@ impl<'db> SourceAnalyzer<'db> { ) -> Option<GenericSubstitution<'db>> { let body = self.store()?; if let Expr::Field { expr: object_expr, name: _ } = body[field_expr] { - let (adt, subst) = type_of_expr_including_adjust(infer, object_expr)?.as_adt()?; + let (adt, subst) = infer.type_of_expr_with_adjust(object_expr)?.as_adt()?; return Some(GenericSubstitution::new( adt.into(), subst.clone(), @@ -1780,10 +1780,3 @@ pub(crate) fn name_hygiene(db: &dyn HirDatabase, name: InFile<&SyntaxNode>) -> H let ctx = span_map.span_at(name.value.text_range().start()).ctx; HygieneId::new(ctx.opaque_and_semitransparent(db)) } - -fn type_of_expr_including_adjust(infer: &InferenceResult, id: ExprId) -> Option<&Ty> { - match infer.expr_adjustment(id).and_then(|adjustments| adjustments.last()) { - Some(adjustment) => Some(&adjustment.target), - None => Some(&infer[id]), - } -} diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs index 9f9d21923ff..ab183ac7089 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs @@ -2,6 +2,7 @@ use hir::HasSource; use syntax::{ Edition, ast::{self, AstNode, make}, + syntax_editor::{Position, SyntaxEditor}, }; use crate::{ @@ -147,45 +148,78 @@ fn add_missing_impl_members_inner( let target = impl_def.syntax().text_range(); acc.add(AssistId::quick_fix(assist_id), label, target, |edit| { - let new_impl_def = edit.make_mut(impl_def.clone()); - let first_new_item = add_trait_assoc_items_to_impl( + let new_item = add_trait_assoc_items_to_impl( &ctx.sema, ctx.config, &missing_items, trait_, - &new_impl_def, + &impl_def, &target_scope, ); + let Some((first_new_item, other_items)) = new_item.split_first() else { + return; + }; + + let mut first_new_item = if let DefaultMethods::No = mode + && let ast::AssocItem::Fn(func) = &first_new_item + && let Some(body) = try_gen_trait_body( + ctx, + func, + trait_ref, + &impl_def, + target_scope.krate().edition(ctx.sema.db), + ) + && let Some(func_body) = func.body() + { + let mut func_editor = SyntaxEditor::new(first_new_item.syntax().clone_subtree()); + func_editor.replace(func_body.syntax(), body.syntax()); + ast::AssocItem::cast(func_editor.finish().new_root().clone()) + } else { + Some(first_new_item.clone()) + }; + + let new_assoc_items = first_new_item + .clone() + .into_iter() + .chain(other_items.iter().cloned()) + .map(either::Either::Right) + .collect::<Vec<_>>(); + + let mut editor = edit.make_editor(impl_def.syntax()); + if let Some(assoc_item_list) = impl_def.assoc_item_list() { + let items = new_assoc_items.into_iter().filter_map(either::Either::right).collect(); + assoc_item_list.add_items(&mut editor, items); + } else { + let assoc_item_list = make::assoc_item_list(Some(new_assoc_items)).clone_for_update(); + editor.insert_all( + Position::after(impl_def.syntax()), + vec![make::tokens::whitespace(" ").into(), assoc_item_list.syntax().clone().into()], + ); + first_new_item = assoc_item_list.assoc_items().next(); + } + if let Some(cap) = ctx.config.snippet_cap { let mut placeholder = None; if let DefaultMethods::No = mode { - if let ast::AssocItem::Fn(func) = &first_new_item { - if try_gen_trait_body( - ctx, - func, - trait_ref, - &impl_def, - target_scope.krate().edition(ctx.sema.db), - ) - .is_none() + if let Some(ast::AssocItem::Fn(func)) = &first_new_item { + if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast) + && m.syntax().text() == "todo!()" { - if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast) - { - if m.syntax().text() == "todo!()" { - placeholder = Some(m); - } - } + placeholder = Some(m); } } } if let Some(macro_call) = placeholder { - edit.add_placeholder_snippet(cap, macro_call); - } else { - edit.add_tabstop_before(cap, first_new_item); + let placeholder = edit.make_placeholder_snippet(cap); + editor.add_annotation(macro_call.syntax(), placeholder); + } else if let Some(first_new_item) = first_new_item { + let tabstop = edit.make_tabstop_before(cap); + editor.add_annotation(first_new_item.syntax(), tabstop); }; }; + edit.add_file_edits(ctx.vfs_file_id(), editor); }) } @@ -195,7 +229,7 @@ fn try_gen_trait_body( trait_ref: hir::TraitRef<'_>, impl_def: &ast::Impl, edition: Edition, -) -> Option<()> { +) -> Option<ast::BlockExpr> { let trait_path = make::ext::ident_path( &trait_ref.trait_().name(ctx.db()).display(ctx.db(), edition).to_string(), ); @@ -322,7 +356,7 @@ impl Foo for S { } #[test] - fn test_impl_def_without_braces() { + fn test_impl_def_without_braces_macro() { check_assist( add_missing_impl_members, r#" @@ -341,6 +375,33 @@ impl Foo for S { } #[test] + fn test_impl_def_without_braces_tabstop_first_item() { + check_assist( + add_missing_impl_members, + r#" +trait Foo { + type Output; + fn foo(&self); +} +struct S; +impl Foo for S { $0 }"#, + r#" +trait Foo { + type Output; + fn foo(&self); +} +struct S; +impl Foo for S { + $0type Output; + + fn foo(&self) { + todo!() + } +}"#, + ); + } + + #[test] fn fill_in_type_params_1() { check_assist( add_missing_impl_members, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs index bcd06c1ef72..d7b7e8d9cad 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_then.rs @@ -228,8 +228,7 @@ pub(crate) fn convert_bool_then_to_if(acc: &mut Assists, ctx: &AssistContext<'_> closure_body, Some(ast::ElseBranch::Block(make.block_expr(None, Some(none_path)))), ) - .indent(mcall.indent_level()) - .clone_for_update(); + .indent(mcall.indent_level()); editor.replace(mcall.syntax().clone(), if_expr.syntax().clone()); editor.add_mappings(make.finish_with_mappings()); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs index 71a61f2db00..2ea032fb62b 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_to_guarded_return.rs @@ -13,7 +13,6 @@ use syntax::{ edit::{AstNodeEdit, IndentLevel}, make, }, - ted, }; use crate::{ @@ -117,7 +116,7 @@ fn if_expr_to_guarded_return( then_block.syntax().last_child_or_token().filter(|t| t.kind() == T!['}'])?; - let then_block_items = then_block.dedent(IndentLevel(1)).clone_for_update(); + let then_block_items = then_block.dedent(IndentLevel(1)); let end_of_then = then_block_items.syntax().last_child_or_token()?; let end_of_then = if end_of_then.prev_sibling_or_token().map(|n| n.kind()) == Some(WHITESPACE) { @@ -132,7 +131,6 @@ fn if_expr_to_guarded_return( "Convert to guarded return", target, |edit| { - let if_expr = edit.make_mut(if_expr); let if_indent_level = IndentLevel::from_node(if_expr.syntax()); let replacement = match if_let_pat { None => { @@ -143,7 +141,7 @@ fn if_expr_to_guarded_return( let cond = invert_boolean_expression_legacy(cond_expr); make::expr_if(cond, then_branch, None).indent(if_indent_level) }; - new_expr.syntax().clone_for_update() + new_expr.syntax().clone() } Some(pat) => { // If-let. @@ -154,7 +152,7 @@ fn if_expr_to_guarded_return( ast::make::tail_only_block_expr(early_expression), ); let let_else_stmt = let_else_stmt.indent(if_indent_level); - let_else_stmt.syntax().clone_for_update() + let_else_stmt.syntax().clone() } }; @@ -168,8 +166,9 @@ fn if_expr_to_guarded_return( .take_while(|i| *i != end_of_then), ) .collect(); - - ted::replace_with_many(if_expr.syntax(), then_statements) + let mut editor = edit.make_editor(if_expr.syntax()); + editor.replace_with_many(if_expr.syntax(), then_statements); + edit.add_file_edits(ctx.vfs_file_id(), editor); }, ) } @@ -214,7 +213,6 @@ fn let_stmt_to_guarded_return( "Convert to guarded return", target, |edit| { - let let_stmt = edit.make_mut(let_stmt); let let_indent_level = IndentLevel::from_node(let_stmt.syntax()); let replacement = { @@ -225,10 +223,11 @@ fn let_stmt_to_guarded_return( ast::make::tail_only_block_expr(early_expression), ); let let_else_stmt = let_else_stmt.indent(let_indent_level); - let_else_stmt.syntax().clone_for_update() + let_else_stmt.syntax().clone() }; - - ted::replace(let_stmt.syntax(), replacement) + let mut editor = edit.make_editor(let_stmt.syntax()); + editor.replace(let_stmt.syntax(), replacement); + edit.add_file_edits(ctx.vfs_file_id(), editor); }, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs index 54699a9454f..cdc0e967101 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_expressions_from_format_string.rs @@ -8,8 +8,7 @@ use syntax::{ AstNode, AstToken, NodeOrToken, SyntaxKind::WHITESPACE, T, - ast::{self, make}, - ted, + ast::{self, make, syntax_factory::SyntaxFactory}, }; // Assist: extract_expressions_from_format_string @@ -58,8 +57,6 @@ pub(crate) fn extract_expressions_from_format_string( "Extract format expressions", tt.syntax().text_range(), |edit| { - let tt = edit.make_mut(tt); - // Extract existing arguments in macro let tokens = tt.token_trees_and_tokens().collect_vec(); @@ -131,8 +128,10 @@ pub(crate) fn extract_expressions_from_format_string( } // Insert new args - let new_tt = make::token_tree(tt_delimiter, new_tt_bits).clone_for_update(); - ted::replace(tt.syntax(), new_tt.syntax()); + let make = SyntaxFactory::with_mappings(); + let new_tt = make.token_tree(tt_delimiter, new_tt_bits); + let mut editor = edit.make_editor(tt.syntax()); + editor.replace(tt.syntax(), new_tt.syntax()); if let Some(cap) = ctx.config.snippet_cap { // Add placeholder snippets over placeholder args @@ -145,15 +144,19 @@ pub(crate) fn extract_expressions_from_format_string( }; if stdx::always!(placeholder.kind() == T![_]) { - edit.add_placeholder_snippet_token(cap, placeholder); + let annotation = edit.make_placeholder_snippet(cap); + editor.add_annotation(placeholder, annotation); } } // Add the final tabstop after the format literal if let Some(NodeOrToken::Token(literal)) = new_tt.token_trees_and_tokens().nth(1) { - edit.add_tabstop_after_token(cap, literal); + let annotation = edit.make_tabstop_after(cap); + editor.add_annotation(literal, annotation); } } + editor.add_mappings(make.finish_with_mappings()); + edit.add_file_edits(ctx.vfs_file_id(), editor); }, ); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs index b9c42285d25..9095b1825f5 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs @@ -16,8 +16,9 @@ use syntax::{ SyntaxKind::*, SyntaxNode, T, ast::{ - self, AstNode, HasAttrs, HasGenericParams, HasName, HasVisibility, edit::IndentLevel, - edit_in_place::Indent, make, + self, AstNode, HasAttrs, HasGenericParams, HasName, HasVisibility, + edit::{AstNodeEdit, IndentLevel}, + make, }, match_ast, ted, }; @@ -110,20 +111,30 @@ pub(crate) fn extract_struct_from_enum_variant( let generics = generic_params.as_ref().map(|generics| generics.clone_for_update()); // resolve GenericArg in field_list to actual type - let field_list = field_list.clone_for_update(); - if let Some((target_scope, source_scope)) = + let field_list = if let Some((target_scope, source_scope)) = ctx.sema.scope(enum_ast.syntax()).zip(ctx.sema.scope(field_list.syntax())) { - PathTransform::generic_transformation(&target_scope, &source_scope) - .apply(field_list.syntax()); - } + let field_list = field_list.reset_indent(); + let field_list = + PathTransform::generic_transformation(&target_scope, &source_scope) + .apply(field_list.syntax()); + match_ast! { + match field_list { + ast::RecordFieldList(field_list) => Either::Left(field_list), + ast::TupleFieldList(field_list) => Either::Right(field_list), + _ => unreachable!(), + } + } + } else { + field_list.clone_for_update() + }; let def = create_struct_def(variant_name.clone(), &variant, &field_list, generics, &enum_ast); let enum_ast = variant.parent_enum(); let indent = enum_ast.indent_level(); - def.reindent_to(indent); + let def = def.indent(indent); ted::insert_all( ted::Position::before(enum_ast.syntax()), @@ -279,7 +290,7 @@ fn create_struct_def( field_list.clone().into() } }; - field_list.reindent_to(IndentLevel::single()); + let field_list = field_list.indent(IndentLevel::single()); let strukt = make::struct_(enum_vis, name, generics, field_list).clone_for_update(); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs index 31e84e9adcf..db2d316d58e 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs @@ -7,7 +7,9 @@ use syntax::{ NodeOrToken, SyntaxKind, SyntaxNode, T, algo::ancestors_at_offset, ast::{ - self, AstNode, edit::IndentLevel, edit_in_place::Indent, make, + self, AstNode, + edit::{AstNodeEdit, IndentLevel}, + make, syntax_factory::SyntaxFactory, }, syntax_editor::Position, @@ -253,12 +255,11 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op // `expr_replace` is a descendant of `to_wrap`, so we just replace it with `name_expr`. editor.replace(expr_replace, name_expr.syntax()); make.block_expr([new_stmt], Some(to_wrap.clone())) - }; + } + // fixup indentation of block + .indent_with_mapping(indent_to, &make); editor.replace(to_wrap.syntax(), block.syntax()); - - // fixup indentation of block - block.indent(indent_to); } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs index ca66cb69dcc..60638980760 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs @@ -114,9 +114,13 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' let source_scope = ctx.sema.scope(v.syntax()); let target_scope = ctx.sema.scope(strukt.syntax()); if let (Some(s), Some(t)) = (source_scope, target_scope) { - PathTransform::generic_transformation(&t, &s).apply(v.syntax()); + ast::Fn::cast( + PathTransform::generic_transformation(&t, &s).apply(v.syntax()), + ) + .unwrap_or(v) + } else { + v } - v } None => return, }; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs index 848c63810a4..e96250f3c50 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs @@ -255,7 +255,6 @@ fn generate_impl( delegee: &Delegee, edition: Edition, ) -> Option<ast::Impl> { - let delegate: ast::Impl; let db = ctx.db(); let ast_strukt = &strukt.strukt; let strukt_ty = make::ty_path(make::ext::ident_path(&strukt.name.to_string())); @@ -266,7 +265,7 @@ fn generate_impl( let bound_def = ctx.sema.source(delegee.to_owned())?.value; let bound_params = bound_def.generic_param_list(); - delegate = make::impl_trait( + let delegate = make::impl_trait( delegee.is_unsafe(db), bound_params.clone(), bound_params.map(|params| params.to_generic_args()), @@ -304,7 +303,7 @@ fn generate_impl( let target_scope = ctx.sema.scope(strukt.strukt.syntax())?; let source_scope = ctx.sema.scope(bound_def.syntax())?; let transform = PathTransform::generic_transformation(&target_scope, &source_scope); - transform.apply(delegate.syntax()); + ast::Impl::cast(transform.apply(delegate.syntax())) } Delegee::Impls(trait_, old_impl) => { let old_impl = ctx.sema.source(old_impl.to_owned())?.value; @@ -358,20 +357,28 @@ fn generate_impl( // 2.3) Instantiate generics with `transform_impl`, this step also // remove unused params. - let mut trait_gen_args = old_impl.trait_()?.generic_arg_list(); - if let Some(trait_args) = &mut trait_gen_args { - *trait_args = trait_args.clone_for_update(); - transform_impl(ctx, ast_strukt, &old_impl, &transform_args, trait_args.syntax())?; - } + let trait_gen_args = old_impl.trait_()?.generic_arg_list().and_then(|trait_args| { + let trait_args = &mut trait_args.clone_for_update(); + if let Some(new_args) = transform_impl( + ctx, + ast_strukt, + &old_impl, + &transform_args, + trait_args.clone_subtree(), + ) { + *trait_args = new_args.clone_subtree(); + Some(new_args) + } else { + None + } + }); let type_gen_args = strukt_params.clone().map(|params| params.to_generic_args()); - let path_type = make::ty(&trait_.name(db).display_no_db(edition).to_smolstr()).clone_for_update(); - transform_impl(ctx, ast_strukt, &old_impl, &transform_args, path_type.syntax())?; - + let path_type = transform_impl(ctx, ast_strukt, &old_impl, &transform_args, path_type)?; // 3) Generate delegate trait impl - delegate = make::impl_trait( + let delegate = make::impl_trait( trait_.is_unsafe(db), trait_gen_params, trait_gen_args, @@ -385,7 +392,6 @@ fn generate_impl( None, ) .clone_for_update(); - // Goto link : https://doc.rust-lang.org/reference/paths.html#qualified-paths let qualified_path_type = make::path_from_text(&format!("<{} as {}>", field_ty, delegate.trait_()?)); @@ -398,7 +404,7 @@ fn generate_impl( .filter(|item| matches!(item, AssocItem::MacroCall(_)).not()) { let item = item.clone_for_update(); - transform_impl(ctx, ast_strukt, &old_impl, &transform_args, item.syntax())?; + let item = transform_impl(ctx, ast_strukt, &old_impl, &transform_args, item)?; let assoc = process_assoc_item(item, qualified_path_type.clone(), field_name)?; delegate_assoc_items.add_item(assoc); @@ -408,19 +414,18 @@ fn generate_impl( if let Some(wc) = delegate.where_clause() { remove_useless_where_clauses(&delegate.trait_()?, &delegate.self_ty()?, wc); } + Some(delegate) } } - - Some(delegate) } -fn transform_impl( +fn transform_impl<N: ast::AstNode>( ctx: &AssistContext<'_>, strukt: &ast::Struct, old_impl: &ast::Impl, args: &Option<GenericArgList>, - syntax: &syntax::SyntaxNode, -) -> Option<()> { + syntax: N, +) -> Option<N> { let source_scope = ctx.sema.scope(old_impl.self_ty()?.syntax())?; let target_scope = ctx.sema.scope(strukt.syntax())?; let hir_old_impl = ctx.sema.to_impl_def(old_impl)?; @@ -437,8 +442,7 @@ fn transform_impl( }, ); - transform.apply(syntax); - Some(()) + N::cast(transform.apply(syntax.syntax())) } fn remove_instantiated_params( @@ -570,9 +574,7 @@ where let scope = ctx.sema.scope(item.syntax())?; let transform = PathTransform::adt_transformation(&scope, &scope, hir_adt, args.clone()); - transform.apply(item.syntax()); - - Some(item) + N::cast(transform.apply(item.syntax())) } fn has_self_type(trait_: hir::Trait, ctx: &AssistContext<'_>) -> Option<()> { @@ -767,7 +769,7 @@ fn func_assoc_item( ) .clone_for_update(); - Some(AssocItem::Fn(func.indent(edit::IndentLevel(1)).clone_for_update())) + Some(AssocItem::Fn(func.indent(edit::IndentLevel(1)))) } fn ty_assoc_item(item: syntax::ast::TypeAlias, qual_path_ty: Path) -> Option<AssocItem> { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs index 78ae815dc87..3290a70e1c6 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs @@ -743,17 +743,30 @@ fn fn_generic_params( let where_preds: Vec<ast::WherePred> = where_preds.into_iter().map(|it| it.node.clone_for_update()).collect(); - // 4. Rewrite paths - if let Some(param) = generic_params.first() { - let source_scope = ctx.sema.scope(param.syntax())?; - let target_scope = ctx.sema.scope(&target.parent())?; - if source_scope.module() != target_scope.module() { + let (generic_params, where_preds): (Vec<ast::GenericParam>, Vec<ast::WherePred>) = + if let Some(param) = generic_params.first() + && let source_scope = ctx.sema.scope(param.syntax())? + && let target_scope = ctx.sema.scope(&target.parent())? + && source_scope.module() != target_scope.module() + { + // 4. Rewrite paths let transform = PathTransform::generic_transformation(&target_scope, &source_scope); let generic_params = generic_params.iter().map(|it| it.syntax()); let where_preds = where_preds.iter().map(|it| it.syntax()); - transform.apply_all(generic_params.chain(where_preds)); - } - } + transform + .apply_all(generic_params.chain(where_preds)) + .into_iter() + .filter_map(|it| { + if let Some(it) = ast::GenericParam::cast(it.clone()) { + Some(either::Either::Left(it)) + } else { + ast::WherePred::cast(it).map(either::Either::Right) + } + }) + .partition_map(|it| it) + } else { + (generic_params, where_preds) + }; let generic_param_list = make::generic_param_list(generic_params); let where_clause = diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs index 14601ca0207..31cadcf5ea8 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs @@ -1,12 +1,17 @@ use syntax::{ - ast::{self, AstNode, HasName, edit_in_place::Indent, make}, + ast::{self, AstNode, HasGenericParams, HasName, edit_in_place::Indent, make}, syntax_editor::{Position, SyntaxEditor}, }; -use crate::{AssistContext, AssistId, Assists, utils}; +use crate::{ + AssistContext, AssistId, Assists, + utils::{self, DefaultMethods, IgnoreAssocItems}, +}; -fn insert_impl(editor: &mut SyntaxEditor, impl_: &ast::Impl, nominal: &ast::Adt) { +fn insert_impl(editor: &mut SyntaxEditor, impl_: &ast::Impl, nominal: &impl Indent) { let indent = nominal.indent_level(); + + impl_.indent(indent); editor.insert_all( Position::after(nominal.syntax()), vec![ @@ -120,6 +125,126 @@ pub(crate) fn generate_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> ) } +// Assist: generate_impl_trait +// +// Adds this trait impl for a type. +// +// ``` +// trait $0Foo { +// fn foo(&self) -> i32; +// } +// ``` +// -> +// ``` +// trait Foo { +// fn foo(&self) -> i32; +// } +// +// impl Foo for ${1:_} { +// fn foo(&self) -> i32 { +// $0todo!() +// } +// } +// ``` +pub(crate) fn generate_impl_trait(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let name = ctx.find_node_at_offset::<ast::Name>()?; + let trait_ = ast::Trait::cast(name.syntax().parent()?)?; + let target_scope = ctx.sema.scope(trait_.syntax())?; + let hir_trait = ctx.sema.to_def(&trait_)?; + + let target = trait_.syntax().text_range(); + acc.add( + AssistId::generate("generate_impl_trait"), + format!("Generate `{name}` impl for type"), + target, + |edit| { + let mut editor = edit.make_editor(trait_.syntax()); + + let holder_arg = ast::GenericArg::TypeArg(make::type_arg(make::ty_placeholder())); + let missing_items = utils::filter_assoc_items( + &ctx.sema, + &hir_trait.items(ctx.db()), + DefaultMethods::No, + IgnoreAssocItems::DocHiddenAttrPresent, + ); + + let trait_gen_args = trait_.generic_param_list().map(|list| { + make::generic_arg_list(list.generic_params().map(|_| holder_arg.clone())) + }); + + let make_impl_ = |body| { + make::impl_trait( + trait_.unsafe_token().is_some(), + None, + trait_gen_args.clone(), + None, + None, + false, + make::ty(&name.text()), + make::ty_placeholder(), + None, + None, + body, + ) + .clone_for_update() + }; + + let impl_ = if missing_items.is_empty() { + make_impl_(None) + } else { + let impl_ = make_impl_(None); + let assoc_items = utils::add_trait_assoc_items_to_impl( + &ctx.sema, + ctx.config, + &missing_items, + hir_trait, + &impl_, + &target_scope, + ); + let assoc_items = assoc_items.into_iter().map(either::Either::Right).collect(); + let assoc_item_list = make::assoc_item_list(Some(assoc_items)); + make_impl_(Some(assoc_item_list)) + }; + + if let Some(cap) = ctx.config.snippet_cap { + if let Some(generics) = impl_.trait_().and_then(|it| it.generic_arg_list()) { + for generic in generics.generic_args() { + let placeholder = edit.make_placeholder_snippet(cap); + editor.add_annotation(generic.syntax(), placeholder); + } + } + + if let Some(ty) = impl_.self_ty() { + let placeholder = edit.make_placeholder_snippet(cap); + editor.add_annotation(ty.syntax(), placeholder); + } + + if let Some(expr) = + impl_.assoc_item_list().and_then(|it| it.assoc_items().find_map(extract_expr)) + { + let tabstop = edit.make_tabstop_before(cap); + editor.add_annotation(expr.syntax(), tabstop); + } else if let Some(l_curly) = + impl_.assoc_item_list().and_then(|it| it.l_curly_token()) + { + let tabstop = edit.make_tabstop_after(cap); + editor.add_annotation(l_curly, tabstop); + } + } + + insert_impl(&mut editor, &impl_, &trait_); + edit.add_file_edits(ctx.vfs_file_id(), editor); + }, + ) +} + +fn extract_expr(item: ast::AssocItem) -> Option<ast::Expr> { + let ast::AssocItem::Fn(f) = item else { + return None; + }; + f.body()?.tail_expr() +} + #[cfg(test)] mod tests { use crate::tests::{check_assist, check_assist_target}; @@ -492,4 +617,209 @@ mod tests { "#, ); } + + #[test] + fn test_add_impl_trait() { + check_assist( + generate_impl_trait, + r#" + trait $0Foo { + fn foo(&self) -> i32; + + fn bar(&self) -> i32 { + self.foo() + } + } + "#, + r#" + trait Foo { + fn foo(&self) -> i32; + + fn bar(&self) -> i32 { + self.foo() + } + } + + impl Foo for ${1:_} { + fn foo(&self) -> i32 { + $0todo!() + } + } + "#, + ); + } + + #[test] + fn test_add_impl_trait_use_generic() { + check_assist( + generate_impl_trait, + r#" + trait $0Foo<T> { + fn foo(&self) -> T; + + fn bar(&self) -> T { + self.foo() + } + } + "#, + r#" + trait Foo<T> { + fn foo(&self) -> T; + + fn bar(&self) -> T { + self.foo() + } + } + + impl Foo<${1:_}> for ${2:_} { + fn foo(&self) -> _ { + $0todo!() + } + } + "#, + ); + check_assist( + generate_impl_trait, + r#" + trait $0Foo<T, U> { + fn foo(&self) -> T; + + fn bar(&self) -> T { + self.foo() + } + } + "#, + r#" + trait Foo<T, U> { + fn foo(&self) -> T; + + fn bar(&self) -> T { + self.foo() + } + } + + impl Foo<${1:_}, ${2:_}> for ${3:_} { + fn foo(&self) -> _ { + $0todo!() + } + } + "#, + ); + } + + #[test] + fn test_add_impl_trait_docs() { + check_assist( + generate_impl_trait, + r#" + /// foo + trait $0Foo { + /// foo method + fn foo(&self) -> i32; + + fn bar(&self) -> i32 { + self.foo() + } + } + "#, + r#" + /// foo + trait Foo { + /// foo method + fn foo(&self) -> i32; + + fn bar(&self) -> i32 { + self.foo() + } + } + + impl Foo for ${1:_} { + fn foo(&self) -> i32 { + $0todo!() + } + } + "#, + ); + } + + #[test] + fn test_add_impl_trait_assoc_types() { + check_assist( + generate_impl_trait, + r#" + trait $0Foo { + type Output; + + fn foo(&self) -> Self::Output; + } + "#, + r#" + trait Foo { + type Output; + + fn foo(&self) -> Self::Output; + } + + impl Foo for ${1:_} { + type Output; + + fn foo(&self) -> Self::Output { + $0todo!() + } + } + "#, + ); + } + + #[test] + fn test_add_impl_trait_indent() { + check_assist( + generate_impl_trait, + r#" + mod foo { + mod bar { + trait $0Foo { + type Output; + + fn foo(&self) -> Self::Output; + } + } + } + "#, + r#" + mod foo { + mod bar { + trait Foo { + type Output; + + fn foo(&self) -> Self::Output; + } + + impl Foo for ${1:_} { + type Output; + + fn foo(&self) -> Self::Output { + $0todo!() + } + } + } + } + "#, + ); + } + + #[test] + fn test_add_impl_trait_empty() { + check_assist( + generate_impl_trait, + r#" + trait $0Foo {} + "#, + r#" + trait Foo {} + + impl Foo for ${1:_} {$0} + "#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs index dc26ec79a74..9c4bcdd4030 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs @@ -94,7 +94,7 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_> })?; let _ = process_ref_mut(&fn_); - let assoc_list = make::assoc_item_list().clone_for_update(); + let assoc_list = make::assoc_item_list(None).clone_for_update(); ted::replace(impl_def.assoc_item_list()?.syntax(), assoc_list.syntax()); impl_def.get_or_create_assoc_item_list().add_item(syntax::ast::AssocItem::Fn(fn_)); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs index 51c2f65e025..5bda1226cda 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs @@ -4,12 +4,12 @@ use ide_db::{ }; use syntax::{ ast::{self, AstNode, HasName, HasVisibility, StructKind, edit_in_place::Indent, make}, - ted, + syntax_editor::Position, }; use crate::{ AssistContext, AssistId, Assists, - utils::{find_struct_impl, generate_impl}, + utils::{find_struct_impl, generate_impl_with_item}, }; // Assist: generate_new @@ -149,7 +149,53 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option .clone_for_update(); fn_.indent(1.into()); - if let Some(cap) = ctx.config.snippet_cap { + let mut editor = builder.make_editor(strukt.syntax()); + + // Get the node for set annotation + let contain_fn = if let Some(impl_def) = impl_def { + fn_.indent(impl_def.indent_level()); + + if let Some(l_curly) = impl_def.assoc_item_list().and_then(|list| list.l_curly_token()) + { + editor.insert_all( + Position::after(l_curly), + vec![ + make::tokens::whitespace(&format!("\n{}", impl_def.indent_level() + 1)) + .into(), + fn_.syntax().clone().into(), + make::tokens::whitespace("\n").into(), + ], + ); + fn_.syntax().clone() + } else { + let items = vec![either::Either::Right(ast::AssocItem::Fn(fn_))]; + let list = make::assoc_item_list(Some(items)); + editor.insert(Position::after(impl_def.syntax()), list.syntax()); + list.syntax().clone() + } + } else { + // Generate a new impl to add the method to + let indent_level = strukt.indent_level(); + let body = vec![either::Either::Right(ast::AssocItem::Fn(fn_))]; + let list = make::assoc_item_list(Some(body)); + let impl_def = generate_impl_with_item(&ast::Adt::Struct(strukt.clone()), Some(list)); + + impl_def.indent(strukt.indent_level()); + + // Insert it after the adt + editor.insert_all( + Position::after(strukt.syntax()), + vec![ + make::tokens::whitespace(&format!("\n\n{indent_level}")).into(), + impl_def.syntax().clone().into(), + ], + ); + impl_def.syntax().clone() + }; + + if let Some(fn_) = contain_fn.descendants().find_map(ast::Fn::cast) + && let Some(cap) = ctx.config.snippet_cap + { match strukt.kind() { StructKind::Tuple(_) => { let struct_args = fn_ @@ -168,8 +214,8 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option for (struct_arg, fn_param) in struct_args.zip(fn_params.params()) { if let Some(fn_pat) = fn_param.pat() { let fn_pat = fn_pat.syntax().clone(); - builder - .add_placeholder_snippet_group(cap, vec![struct_arg, fn_pat]); + let placeholder = builder.make_placeholder_snippet(cap); + editor.add_annotation_all(vec![struct_arg, fn_pat], placeholder) } } } @@ -179,36 +225,12 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option // Add a tabstop before the name if let Some(name) = fn_.name() { - builder.add_tabstop_before(cap, name); + let tabstop_before = builder.make_tabstop_before(cap); + editor.add_annotation(name.syntax(), tabstop_before); } } - // Get the mutable version of the impl to modify - let impl_def = if let Some(impl_def) = impl_def { - fn_.indent(impl_def.indent_level()); - builder.make_mut(impl_def) - } else { - // Generate a new impl to add the method to - let impl_def = generate_impl(&ast::Adt::Struct(strukt.clone())); - let indent_level = strukt.indent_level(); - fn_.indent(indent_level); - - // Insert it after the adt - let strukt = builder.make_mut(strukt.clone()); - - ted::insert_all_raw( - ted::Position::after(strukt.syntax()), - vec![ - make::tokens::whitespace(&format!("\n\n{indent_level}")).into(), - impl_def.syntax().clone().into(), - ], - ); - - impl_def - }; - - // Add the `new` method at the start of the impl - impl_def.get_or_create_assoc_item_list().add_item_at_start(fn_.into()); + builder.add_file_edits(ctx.vfs_file_id(), editor); }) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs index 154b502e1bf..92a4bd35b3e 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs @@ -3,7 +3,7 @@ use ide_db::assists::AssistId; use syntax::{ AstNode, SyntaxKind, T, ast::{ - self, HasGenericParams, HasName, + self, HasGenericParams, HasName, HasVisibility, edit_in_place::{HasVisibilityEdit, Indent}, make, }, @@ -164,6 +164,12 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ /// `E0449` Trait items always share the visibility of their trait fn remove_items_visibility(item: &ast::AssocItem) { if let Some(has_vis) = ast::AnyHasVisibility::cast(item.syntax().clone()) { + if let Some(vis) = has_vis.visibility() + && let Some(token) = vis.syntax().next_sibling_or_token() + && token.kind() == SyntaxKind::WHITESPACE + { + ted::remove(token); + } has_vis.set_visibility(None); } } @@ -333,11 +339,11 @@ impl F$0oo { struct Foo; trait NewTrait { - fn a_func() -> Option<()>; + fn a_func() -> Option<()>; } impl NewTrait for Foo { - fn a_func() -> Option<()> { + fn a_func() -> Option<()> { Some(()) } }"#, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs index b7b8bc604a5..1549b414dcc 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs @@ -537,8 +537,13 @@ fn inline( if let Some(generic_arg_list) = generic_arg_list.clone() { if let Some((target, source)) = &sema.scope(node.syntax()).zip(sema.scope(fn_body.syntax())) { - PathTransform::function_call(target, source, function, generic_arg_list) - .apply(body.syntax()); + body.reindent_to(IndentLevel(0)); + if let Some(new_body) = ast::BlockExpr::cast( + PathTransform::function_call(target, source, function, generic_arg_list) + .apply(body.syntax()), + ) { + body = new_body; + } } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs index 806c8fba9ea..45bb6ce9129 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs @@ -5,12 +5,12 @@ use syntax::{ SyntaxKind::WHITESPACE, T, ast::{self, AstNode, HasName, make}, - ted::{self, Position}, + syntax_editor::{Position, SyntaxEditor}, }; use crate::{ AssistConfig, AssistId, - assist_context::{AssistContext, Assists, SourceChangeBuilder}, + assist_context::{AssistContext, Assists}, utils::{ DefaultMethods, IgnoreAssocItems, add_trait_assoc_items_to_impl, filter_assoc_items, gen_trait_fn_body, generate_trait_impl, @@ -126,98 +126,56 @@ fn add_assist( let label = format!("Convert to manual `impl {replace_trait_path} for {annotated_name}`"); acc.add(AssistId::refactor("replace_derive_with_manual_impl"), label, target, |builder| { - let insert_after = ted::Position::after(builder.make_mut(adt.clone()).syntax()); + let insert_after = Position::after(adt.syntax()); let impl_is_unsafe = trait_.map(|s| s.is_unsafe(ctx.db())).unwrap_or(false); - let impl_def_with_items = impl_def_from_trait( + let impl_def = impl_def_from_trait( &ctx.sema, ctx.config, adt, &annotated_name, trait_, replace_trait_path, + impl_is_unsafe, ); - update_attribute(builder, old_derives, old_tree, old_trait_path, attr); - let trait_path = make::ty_path(replace_trait_path.clone()); + let mut editor = builder.make_editor(attr.syntax()); + update_attribute(&mut editor, old_derives, old_tree, old_trait_path, attr); - match (ctx.config.snippet_cap, impl_def_with_items) { - (None, None) => { - let impl_def = generate_trait_impl(adt, trait_path); - if impl_is_unsafe { - ted::insert( - Position::first_child_of(impl_def.syntax()), - make::token(T![unsafe]), - ); - } + let trait_path = make::ty_path(replace_trait_path.clone()); - ted::insert_all( - insert_after, - vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()], - ); - } - (None, Some((impl_def, _))) => { - if impl_is_unsafe { - ted::insert( - Position::first_child_of(impl_def.syntax()), - make::token(T![unsafe]), - ); - } - ted::insert_all( - insert_after, - vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()], - ); - } - (Some(cap), None) => { - let impl_def = generate_trait_impl(adt, trait_path); - - if impl_is_unsafe { - ted::insert( - Position::first_child_of(impl_def.syntax()), - make::token(T![unsafe]), - ); - } + let (impl_def, first_assoc_item) = if let Some(impl_def) = impl_def { + ( + impl_def.clone(), + impl_def.assoc_item_list().and_then(|list| list.assoc_items().next()), + ) + } else { + (generate_trait_impl(impl_is_unsafe, adt, trait_path), None) + }; - if let Some(l_curly) = impl_def.assoc_item_list().and_then(|it| it.l_curly_token()) + if let Some(cap) = ctx.config.snippet_cap { + if let Some(first_assoc_item) = first_assoc_item { + if let ast::AssocItem::Fn(ref func) = first_assoc_item + && let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast) + && m.syntax().text() == "todo!()" { - builder.add_tabstop_after_token(cap, l_curly); - } - - ted::insert_all( - insert_after, - vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()], - ); - } - (Some(cap), Some((impl_def, first_assoc_item))) => { - let mut added_snippet = false; - - if impl_is_unsafe { - ted::insert( - Position::first_child_of(impl_def.syntax()), - make::token(T![unsafe]), - ); - } - - if let ast::AssocItem::Fn(ref func) = first_assoc_item { - if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast) { - if m.syntax().text() == "todo!()" { - // Make the `todo!()` a placeholder - builder.add_placeholder_snippet(cap, m); - added_snippet = true; - } - } - } - - if !added_snippet { + // Make the `todo!()` a placeholder + builder.add_placeholder_snippet(cap, m); + } else { // If we haven't already added a snippet, add a tabstop before the generated function builder.add_tabstop_before(cap, first_assoc_item); } - - ted::insert_all( - insert_after, - vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()], - ); + } else if let Some(l_curly) = + impl_def.assoc_item_list().and_then(|it| it.l_curly_token()) + { + builder.add_tabstop_after_token(cap, l_curly); } - }; + } + + editor.insert_all( + insert_after, + vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()], + ); + builder.add_file_edits(ctx.vfs_file_id(), editor); }) } @@ -228,7 +186,8 @@ fn impl_def_from_trait( annotated_name: &ast::Name, trait_: Option<hir::Trait>, trait_path: &ast::Path, -) -> Option<(ast::Impl, ast::AssocItem)> { + impl_is_unsafe: bool, +) -> Option<ast::Impl> { let trait_ = trait_?; let target_scope = sema.scope(annotated_name.syntax())?; @@ -245,21 +204,43 @@ fn impl_def_from_trait( if trait_items.is_empty() { return None; } - let impl_def = generate_trait_impl(adt, make::ty_path(trait_path.clone())); + let impl_def = generate_trait_impl(impl_is_unsafe, adt, make::ty_path(trait_path.clone())); - let first_assoc_item = + let assoc_items = add_trait_assoc_items_to_impl(sema, config, &trait_items, trait_, &impl_def, &target_scope); + let assoc_item_list = if let Some((first, other)) = + assoc_items.split_first().map(|(first, other)| (first.clone_subtree(), other)) + { + let first_item = if let ast::AssocItem::Fn(ref func) = first + && let Some(body) = gen_trait_fn_body(func, trait_path, adt, None) + && let Some(func_body) = func.body() + { + let mut editor = SyntaxEditor::new(first.syntax().clone()); + editor.replace(func_body.syntax(), body.syntax()); + ast::AssocItem::cast(editor.finish().new_root().clone()) + } else { + Some(first.clone()) + }; + let items = first_item + .into_iter() + .chain(other.iter().cloned()) + .map(either::Either::Right) + .collect(); + make::assoc_item_list(Some(items)) + } else { + make::assoc_item_list(None) + } + .clone_for_update(); - // Generate a default `impl` function body for the derived trait. - if let ast::AssocItem::Fn(ref func) = first_assoc_item { - let _ = gen_trait_fn_body(func, trait_path, adt, None); - }; - - Some((impl_def, first_assoc_item)) + let impl_def = impl_def.clone_subtree(); + let mut editor = SyntaxEditor::new(impl_def.syntax().clone()); + editor.replace(impl_def.assoc_item_list()?.syntax(), assoc_item_list.syntax()); + let impl_def = ast::Impl::cast(editor.finish().new_root().clone())?; + Some(impl_def) } fn update_attribute( - builder: &mut SourceChangeBuilder, + editor: &mut SyntaxEditor, old_derives: &[ast::Path], old_tree: &ast::TokenTree, old_trait_path: &ast::Path, @@ -272,8 +253,6 @@ fn update_attribute( let has_more_derives = !new_derives.is_empty(); if has_more_derives { - let old_tree = builder.make_mut(old_tree.clone()); - // Make the paths into flat lists of tokens in a vec let tt = new_derives.iter().map(|path| path.syntax().clone()).map(|node| { node.descendants_with_tokens() @@ -288,18 +267,17 @@ fn update_attribute( let tt = tt.collect::<Vec<_>>(); let new_tree = make::token_tree(T!['('], tt).clone_for_update(); - ted::replace(old_tree.syntax(), new_tree.syntax()); + editor.replace(old_tree.syntax(), new_tree.syntax()); } else { // Remove the attr and any trailing whitespace - let attr = builder.make_mut(attr.clone()); if let Some(line_break) = attr.syntax().next_sibling_or_token().filter(|t| t.kind() == WHITESPACE) { - ted::remove(line_break) + editor.delete(line_break) } - ted::remove(attr.syntax()) + editor.delete(attr.syntax()) } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs index cde0d875e0d..4682c047323 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs @@ -302,6 +302,7 @@ mod handlers { generate_function::generate_function, generate_impl::generate_impl, generate_impl::generate_trait_impl, + generate_impl::generate_impl_trait, generate_is_empty_from_len::generate_is_empty_from_len, generate_mut_trait_impl::generate_mut_trait_impl, generate_new::generate_new, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs index fc1c6928ff3..91348be97eb 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs @@ -1881,6 +1881,29 @@ impl<T: Clone> Ctx<T> {$0} } #[test] +fn doctest_generate_impl_trait() { + check_doc_test( + "generate_impl_trait", + r#####" +trait $0Foo { + fn foo(&self) -> i32; +} +"#####, + r#####" +trait Foo { + fn foo(&self) -> i32; +} + +impl Foo for ${1:_} { + fn foo(&self) -> i32 { + $0todo!() + } +} +"#####, + ) +} + +#[test] fn doctest_generate_is_empty_from_len() { check_doc_test( "generate_is_empty_from_len", diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs index fbce1d31eae..15c7a6a3fc2 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs @@ -23,10 +23,11 @@ use syntax::{ ast::{ self, HasArgList, HasAttrs, HasGenericParams, HasName, HasTypeBounds, Whitespace, edit::{AstNodeEdit, IndentLevel}, - edit_in_place::{AttrsOwnerEdit, Indent, Removable}, + edit_in_place::{AttrsOwnerEdit, Removable}, make, syntax_factory::SyntaxFactory, }, + syntax_editor::SyntaxEditor, ted, }; @@ -178,6 +179,7 @@ pub fn filter_assoc_items( /// [`filter_assoc_items()`]), clones each item for update and applies path transformation to it, /// then inserts into `impl_`. Returns the modified `impl_` and the first associated item that got /// inserted. +#[must_use] pub fn add_trait_assoc_items_to_impl( sema: &Semantics<'_, RootDatabase>, config: &AssistConfig, @@ -185,71 +187,66 @@ pub fn add_trait_assoc_items_to_impl( trait_: hir::Trait, impl_: &ast::Impl, target_scope: &hir::SemanticsScope<'_>, -) -> ast::AssocItem { +) -> Vec<ast::AssocItem> { let new_indent_level = IndentLevel::from_node(impl_.syntax()) + 1; - let items = original_items.iter().map(|InFile { file_id, value: original_item }| { - let cloned_item = { - if let Some(macro_file) = file_id.macro_file() { - let span_map = sema.db.expansion_span_map(macro_file); - let item_prettified = prettify_macro_expansion( - sema.db, - original_item.syntax().clone(), - &span_map, - target_scope.krate().into(), - ); - if let Some(formatted) = ast::AssocItem::cast(item_prettified) { - return formatted; - } else { - stdx::never!("formatted `AssocItem` could not be cast back to `AssocItem`"); + original_items + .iter() + .map(|InFile { file_id, value: original_item }| { + let mut cloned_item = { + if let Some(macro_file) = file_id.macro_file() { + let span_map = sema.db.expansion_span_map(macro_file); + let item_prettified = prettify_macro_expansion( + sema.db, + original_item.syntax().clone(), + &span_map, + target_scope.krate().into(), + ); + if let Some(formatted) = ast::AssocItem::cast(item_prettified) { + return formatted; + } else { + stdx::never!("formatted `AssocItem` could not be cast back to `AssocItem`"); + } } + original_item.clone_for_update() } - original_item.clone_for_update() - }; - - if let Some(source_scope) = sema.scope(original_item.syntax()) { - // FIXME: Paths in nested macros are not handled well. See - // `add_missing_impl_members::paths_in_nested_macro_should_get_transformed` test. - let transform = - PathTransform::trait_impl(target_scope, &source_scope, trait_, impl_.clone()); - transform.apply(cloned_item.syntax()); - } - cloned_item.remove_attrs_and_docs(); - cloned_item.reindent_to(new_indent_level); - cloned_item - }); - - let assoc_item_list = impl_.get_or_create_assoc_item_list(); - - let mut first_item = None; - for item in items { - first_item.get_or_insert_with(|| item.clone()); - match &item { - ast::AssocItem::Fn(fn_) if fn_.body().is_none() => { - let body = AstNodeEdit::indent( - &make::block_expr( - None, - Some(match config.expr_fill_default { - ExprFillDefaultMode::Todo => make::ext::expr_todo(), - ExprFillDefaultMode::Underscore => make::ext::expr_underscore(), - ExprFillDefaultMode::Default => make::ext::expr_todo(), - }), - ), - new_indent_level, - ); - ted::replace(fn_.get_or_create_body().syntax(), body.clone_for_update().syntax()) + .reset_indent(); + + if let Some(source_scope) = sema.scope(original_item.syntax()) { + // FIXME: Paths in nested macros are not handled well. See + // `add_missing_impl_members::paths_in_nested_macro_should_get_transformed` test. + let transform = + PathTransform::trait_impl(target_scope, &source_scope, trait_, impl_.clone()); + cloned_item = ast::AssocItem::cast(transform.apply(cloned_item.syntax())).unwrap(); } - ast::AssocItem::TypeAlias(type_alias) => { - if let Some(type_bound_list) = type_alias.type_bound_list() { - type_bound_list.remove() + cloned_item.remove_attrs_and_docs(); + cloned_item + }) + .map(|item| { + match &item { + ast::AssocItem::Fn(fn_) if fn_.body().is_none() => { + let body = AstNodeEdit::indent( + &make::block_expr( + None, + Some(match config.expr_fill_default { + ExprFillDefaultMode::Todo => make::ext::expr_todo(), + ExprFillDefaultMode::Underscore => make::ext::expr_underscore(), + ExprFillDefaultMode::Default => make::ext::expr_todo(), + }), + ), + IndentLevel::single(), + ); + ted::replace(fn_.get_or_create_body().syntax(), body.syntax()); } + ast::AssocItem::TypeAlias(type_alias) => { + if let Some(type_bound_list) = type_alias.type_bound_list() { + type_bound_list.remove() + } + } + _ => {} } - _ => {} - } - - assoc_item_list.add_item(item) - } - - first_item.unwrap() + AstNodeEdit::indent(&item, new_indent_level) + }) + .collect() } pub(crate) fn vis_offset(node: &SyntaxNode) -> TextSize { @@ -334,7 +331,7 @@ fn invert_special_case(make: &SyntaxFactory, expr: &ast::Expr) -> Option<ast::Ex fn invert_special_case_legacy(expr: &ast::Expr) -> Option<ast::Expr> { match expr { ast::Expr::BinExpr(bin) => { - let bin = bin.clone_for_update(); + let bin = bin.clone_subtree(); let op_token = bin.op_token()?; let rev_token = match op_token.kind() { T![==] => T![!=], @@ -350,8 +347,9 @@ fn invert_special_case_legacy(expr: &ast::Expr) -> Option<ast::Expr> { ); } }; - ted::replace(op_token, make::token(rev_token)); - Some(bin.into()) + let mut bin_editor = SyntaxEditor::new(bin.syntax().clone()); + bin_editor.replace(op_token, make::token(rev_token)); + ast::Expr::cast(bin_editor.finish().new_root().clone()) } ast::Expr::MethodCallExpr(mce) => { let receiver = mce.receiver()?; @@ -664,16 +662,23 @@ fn generate_impl_text_inner( /// Generates the corresponding `impl Type {}` including type and lifetime /// parameters. +pub(crate) fn generate_impl_with_item( + adt: &ast::Adt, + body: Option<ast::AssocItemList>, +) -> ast::Impl { + generate_impl_inner(false, adt, None, true, body) +} + pub(crate) fn generate_impl(adt: &ast::Adt) -> ast::Impl { - generate_impl_inner(adt, None, true) + generate_impl_inner(false, adt, None, true, None) } /// Generates the corresponding `impl <trait> for Type {}` including type /// and lifetime parameters, with `<trait>` appended to `impl`'s generic parameters' bounds. /// /// This is useful for traits like `PartialEq`, since `impl<T> PartialEq for U<T>` often requires `T: PartialEq`. -pub(crate) fn generate_trait_impl(adt: &ast::Adt, trait_: ast::Type) -> ast::Impl { - generate_impl_inner(adt, Some(trait_), true) +pub(crate) fn generate_trait_impl(is_unsafe: bool, adt: &ast::Adt, trait_: ast::Type) -> ast::Impl { + generate_impl_inner(is_unsafe, adt, Some(trait_), true, None) } /// Generates the corresponding `impl <trait> for Type {}` including type @@ -681,13 +686,15 @@ pub(crate) fn generate_trait_impl(adt: &ast::Adt, trait_: ast::Type) -> ast::Imp /// /// This is useful for traits like `From<T>`, since `impl<T> From<T> for U<T>` doesn't require `T: From<T>`. pub(crate) fn generate_trait_impl_intransitive(adt: &ast::Adt, trait_: ast::Type) -> ast::Impl { - generate_impl_inner(adt, Some(trait_), false) + generate_impl_inner(false, adt, Some(trait_), false, None) } fn generate_impl_inner( + is_unsafe: bool, adt: &ast::Adt, trait_: Option<ast::Type>, trait_is_transitive: bool, + body: Option<ast::AssocItemList>, ) -> ast::Impl { // Ensure lifetime params are before type & const params let generic_params = adt.generic_param_list().map(|generic_params| { @@ -727,7 +734,7 @@ fn generate_impl_inner( let impl_ = match trait_ { Some(trait_) => make::impl_trait( - false, + is_unsafe, None, None, generic_params, @@ -737,9 +744,9 @@ fn generate_impl_inner( ty, None, adt.where_clause(), - None, + body, ), - None => make::impl_(generic_params, generic_args, ty, adt.where_clause(), None), + None => make::impl_(generic_params, generic_args, ty, adt.where_clause(), body), } .clone_for_update(); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs index c58bdd9e8ed..87e90e85193 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs @@ -1,10 +1,7 @@ //! This module contains functions to generate default trait impl function bodies where possible. use hir::TraitRef; -use syntax::{ - ast::{self, AstNode, BinaryOp, CmpOp, HasName, LogicOp, edit::AstNodeEdit, make}, - ted, -}; +use syntax::ast::{self, AstNode, BinaryOp, CmpOp, HasName, LogicOp, edit::AstNodeEdit, make}; /// Generate custom trait bodies without default implementation where possible. /// @@ -18,21 +15,33 @@ pub(crate) fn gen_trait_fn_body( trait_path: &ast::Path, adt: &ast::Adt, trait_ref: Option<TraitRef<'_>>, -) -> Option<()> { +) -> Option<ast::BlockExpr> { + let _ = func.body()?; match trait_path.segment()?.name_ref()?.text().as_str() { - "Clone" => gen_clone_impl(adt, func), - "Debug" => gen_debug_impl(adt, func), - "Default" => gen_default_impl(adt, func), - "Hash" => gen_hash_impl(adt, func), - "PartialEq" => gen_partial_eq(adt, func, trait_ref), - "PartialOrd" => gen_partial_ord(adt, func, trait_ref), + "Clone" => { + stdx::always!(func.name().is_some_and(|name| name.text() == "clone")); + gen_clone_impl(adt) + } + "Debug" => gen_debug_impl(adt), + "Default" => gen_default_impl(adt), + "Hash" => { + stdx::always!(func.name().is_some_and(|name| name.text() == "hash")); + gen_hash_impl(adt) + } + "PartialEq" => { + stdx::always!(func.name().is_some_and(|name| name.text() == "eq")); + gen_partial_eq(adt, trait_ref) + } + "PartialOrd" => { + stdx::always!(func.name().is_some_and(|name| name.text() == "partial_cmp")); + gen_partial_ord(adt, trait_ref) + } _ => None, } } /// Generate a `Clone` impl based on the fields and members of the target type. -fn gen_clone_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { - stdx::always!(func.name().is_some_and(|name| name.text() == "clone")); +fn gen_clone_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> { fn gen_clone_call(target: ast::Expr) -> ast::Expr { let method = make::name_ref("clone"); make::expr_method_call(target, method, make::arg_list(None)).into() @@ -139,12 +148,11 @@ fn gen_clone_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { } }; let body = make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1)); - ted::replace(func.body()?.syntax(), body.clone_for_update().syntax()); - Some(()) + Some(body) } /// Generate a `Debug` impl based on the fields and members of the target type. -fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { +fn gen_debug_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> { let annotated_name = adt.name()?; match adt { // `Debug` cannot be derived for unions, so no default impl can be provided. @@ -248,8 +256,7 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { let body = make::block_expr(None, Some(match_expr.into())); let body = body.indent(ast::edit::IndentLevel(1)); - ted::replace(func.body()?.syntax(), body.clone_for_update().syntax()); - Some(()) + Some(body) } ast::Adt::Struct(strukt) => { @@ -296,14 +303,13 @@ fn gen_debug_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { let method = make::name_ref("finish"); let expr = make::expr_method_call(expr, method, make::arg_list(None)).into(); let body = make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1)); - ted::replace(func.body()?.syntax(), body.clone_for_update().syntax()); - Some(()) + Some(body) } } } /// Generate a `Debug` impl based on the fields and members of the target type. -fn gen_default_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { +fn gen_default_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> { fn gen_default_call() -> Option<ast::Expr> { let fn_name = make::ext::path_from_idents(["Default", "default"])?; Some(make::expr_call(make::expr_path(fn_name), make::arg_list(None)).into()) @@ -342,15 +348,13 @@ fn gen_default_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { } }; let body = make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1)); - ted::replace(func.body()?.syntax(), body.clone_for_update().syntax()); - Some(()) + Some(body) } } } /// Generate a `Hash` impl based on the fields and members of the target type. -fn gen_hash_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { - stdx::always!(func.name().is_some_and(|name| name.text() == "hash")); +fn gen_hash_impl(adt: &ast::Adt) -> Option<ast::BlockExpr> { fn gen_hash_call(target: ast::Expr) -> ast::Stmt { let method = make::name_ref("hash"); let arg = make::expr_path(make::ext::ident_path("state")); @@ -400,13 +404,11 @@ fn gen_hash_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> { }, }; - ted::replace(func.body()?.syntax(), body.clone_for_update().syntax()); - Some(()) + Some(body) } /// Generate a `PartialEq` impl based on the fields and members of the target type. -fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef<'_>>) -> Option<()> { - stdx::always!(func.name().is_some_and(|name| name.text() == "eq")); +fn gen_partial_eq(adt: &ast::Adt, trait_ref: Option<TraitRef<'_>>) -> Option<ast::BlockExpr> { fn gen_eq_chain(expr: Option<ast::Expr>, cmp: ast::Expr) -> Option<ast::Expr> { match expr { Some(expr) => Some(make::expr_bin_op(expr, BinaryOp::LogicOp(LogicOp::And), cmp)), @@ -595,12 +597,10 @@ fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef<'_> }, }; - ted::replace(func.body()?.syntax(), body.clone_for_update().syntax()); - Some(()) + Some(body) } -fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef<'_>>) -> Option<()> { - stdx::always!(func.name().is_some_and(|name| name.text() == "partial_cmp")); +fn gen_partial_ord(adt: &ast::Adt, trait_ref: Option<TraitRef<'_>>) -> Option<ast::BlockExpr> { fn gen_partial_eq_match(match_target: ast::Expr) -> Option<ast::Stmt> { let mut arms = vec![]; @@ -686,8 +686,7 @@ fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef<'_ }, }; - ted::replace(func.body()?.syntax(), body.clone_for_update().syntax()); - Some(()) + Some(body) } fn make_discriminant() -> Option<ast::Expr> { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs index 975c2f02259..bcf8c0ec527 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs @@ -276,7 +276,7 @@ fn get_transformed_assoc_item( let assoc_item = assoc_item.clone_for_update(); // FIXME: Paths in nested macros are not handled well. See // `macro_generated_assoc_item2` test. - transform.apply(assoc_item.syntax()); + let assoc_item = ast::AssocItem::cast(transform.apply(assoc_item.syntax()))?; assoc_item.remove_attrs_and_docs(); Some(assoc_item) } @@ -301,7 +301,7 @@ fn get_transformed_fn( let fn_ = fn_.clone_for_update(); // FIXME: Paths in nested macros are not handled well. See // `macro_generated_assoc_item2` test. - transform.apply(fn_.syntax()); + let fn_ = ast::Fn::cast(transform.apply(fn_.syntax()))?; fn_.remove_attrs_and_docs(); match async_ { AsyncSugaring::Desugar => { diff --git a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs index 0ab880bcfe7..b7432d89c7b 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs @@ -12,15 +12,16 @@ use span::Edition; use syntax::{ NodeOrToken, SyntaxNode, ast::{self, AstNode, HasGenericArgs, make}, - ted, + syntax_editor::{self, SyntaxEditor}, }; -#[derive(Default)] +#[derive(Default, Debug)] struct AstSubsts { types_and_consts: Vec<TypeOrConst>, lifetimes: Vec<ast::LifetimeArg>, } +#[derive(Debug)] enum TypeOrConst { Either(ast::TypeArg), // indistinguishable type or const param Const(ast::ConstArg), @@ -128,15 +129,18 @@ impl<'a> PathTransform<'a> { } } - pub fn apply(&self, syntax: &SyntaxNode) { + #[must_use] + pub fn apply(&self, syntax: &SyntaxNode) -> SyntaxNode { self.build_ctx().apply(syntax) } - pub fn apply_all<'b>(&self, nodes: impl IntoIterator<Item = &'b SyntaxNode>) { + #[must_use] + pub fn apply_all<'b>( + &self, + nodes: impl IntoIterator<Item = &'b SyntaxNode>, + ) -> Vec<SyntaxNode> { let ctx = self.build_ctx(); - for node in nodes { - ctx.apply(node); - } + nodes.into_iter().map(|node| ctx.apply(&node.clone())).collect() } fn prettify_target_node(&self, node: SyntaxNode) -> SyntaxNode { @@ -236,7 +240,7 @@ impl<'a> PathTransform<'a> { Some((k.name(db).display(db, target_edition).to_string(), v.lifetime()?)) }) .collect(); - let ctx = Ctx { + let mut ctx = Ctx { type_substs, const_substs, lifetime_substs, @@ -272,42 +276,75 @@ fn preorder_rev(item: &SyntaxNode) -> impl Iterator<Item = SyntaxNode> { } impl Ctx<'_> { - fn apply(&self, item: &SyntaxNode) { + fn apply(&self, item: &SyntaxNode) -> SyntaxNode { // `transform_path` may update a node's parent and that would break the // tree traversal. Thus all paths in the tree are collected into a vec // so that such operation is safe. - let paths = preorder_rev(item).filter_map(ast::Path::cast).collect::<Vec<_>>(); - for path in paths { - self.transform_path(path); - } - - preorder_rev(item).filter_map(ast::Lifetime::cast).for_each(|lifetime| { + let item = self.transform_path(item).clone_subtree(); + let mut editor = SyntaxEditor::new(item.clone()); + preorder_rev(&item).filter_map(ast::Lifetime::cast).for_each(|lifetime| { if let Some(subst) = self.lifetime_substs.get(&lifetime.syntax().text().to_string()) { - ted::replace(lifetime.syntax(), subst.clone_subtree().clone_for_update().syntax()); + editor + .replace(lifetime.syntax(), subst.clone_subtree().clone_for_update().syntax()); } }); + + editor.finish().new_root().clone() } - fn transform_default_values(&self, defaulted_params: Vec<DefaultedParam>) { + fn transform_default_values(&mut self, defaulted_params: Vec<DefaultedParam>) { // By now the default values are simply copied from where they are declared // and should be transformed. As any value is allowed to refer to previous // generic (both type and const) parameters, they should be all iterated left-to-right. for param in defaulted_params { - let value = match param { - Either::Left(k) => self.type_substs.get(&k).unwrap().syntax(), - Either::Right(k) => self.const_substs.get(&k).unwrap(), + let value = match ¶m { + Either::Left(k) => self.type_substs.get(k).unwrap().syntax(), + Either::Right(k) => self.const_substs.get(k).unwrap(), }; // `transform_path` may update a node's parent and that would break the // tree traversal. Thus all paths in the tree are collected into a vec // so that such operation is safe. - let paths = preorder_rev(value).filter_map(ast::Path::cast).collect::<Vec<_>>(); - for path in paths { - self.transform_path(path); + let new_value = self.transform_path(value); + match param { + Either::Left(k) => { + self.type_substs.insert(k, ast::Type::cast(new_value.clone()).unwrap()); + } + Either::Right(k) => { + self.const_substs.insert(k, new_value.clone()); + } } } } - fn transform_path(&self, path: ast::Path) -> Option<()> { + fn transform_path(&self, path: &SyntaxNode) -> SyntaxNode { + fn find_child_paths(root_path: &SyntaxNode) -> Vec<ast::Path> { + let mut result = Vec::new(); + for child in root_path.children() { + if let Some(child_path) = ast::Path::cast(child.clone()) { + result.push(child_path); + } else { + result.extend(find_child_paths(&child)); + } + } + result + } + let root_path = path.clone_subtree(); + let result = find_child_paths(&root_path); + let mut editor = SyntaxEditor::new(root_path.clone()); + for sub_path in result { + let new = self.transform_path(sub_path.syntax()); + editor.replace(sub_path.syntax(), new); + } + let update_sub_item = editor.finish().new_root().clone().clone_subtree(); + let item = find_child_paths(&update_sub_item); + let mut editor = SyntaxEditor::new(update_sub_item); + for sub_path in item { + self.transform_path_(&mut editor, &sub_path); + } + editor.finish().new_root().clone() + } + + fn transform_path_(&self, editor: &mut SyntaxEditor, path: &ast::Path) -> Option<()> { if path.qualifier().is_some() { return None; } @@ -319,8 +356,7 @@ impl Ctx<'_> { // don't try to qualify sole `self` either, they are usually locals, but are returned as modules due to namespace clashing return None; } - - let resolution = self.source_scope.speculative_resolve(&path)?; + let resolution = self.source_scope.speculative_resolve(path)?; match resolution { hir::PathResolution::TypeParam(tp) => { @@ -360,12 +396,12 @@ impl Ctx<'_> { let segment = make::path_segment_ty(subst.clone(), trait_ref); let qualified = make::path_from_segments(std::iter::once(segment), false); - ted::replace(path.syntax(), qualified.clone_for_update().syntax()); + editor.replace(path.syntax(), qualified.clone_for_update().syntax()); } else if let Some(path_ty) = ast::PathType::cast(parent) { let old = path_ty.syntax(); if old.parent().is_some() { - ted::replace(old, subst.clone_subtree().clone_for_update().syntax()); + editor.replace(old, subst.clone_subtree().clone_for_update().syntax()); } else { // Some `path_ty` has no parent, especially ones made for default value // of type parameters. @@ -377,13 +413,13 @@ impl Ctx<'_> { } let start = path_ty.syntax().first_child().map(NodeOrToken::Node)?; let end = path_ty.syntax().last_child().map(NodeOrToken::Node)?; - ted::replace_all( + editor.replace_all( start..=end, new.syntax().children().map(NodeOrToken::Node).collect::<Vec<_>>(), ); } } else { - ted::replace( + editor.replace( path.syntax(), subst.clone_subtree().clone_for_update().syntax(), ); @@ -409,17 +445,28 @@ impl Ctx<'_> { }; let found_path = self.target_module.find_path(self.source_scope.db, def, cfg)?; let res = mod_path_to_ast(&found_path, self.target_edition).clone_for_update(); + let mut res_editor = SyntaxEditor::new(res.syntax().clone_subtree()); if let Some(args) = path.segment().and_then(|it| it.generic_arg_list()) { if let Some(segment) = res.segment() { - let old = segment.get_or_create_generic_arg_list(); - ted::replace(old.syntax(), args.clone_subtree().syntax().clone_for_update()) + if let Some(old) = segment.generic_arg_list() { + res_editor.replace( + old.syntax(), + args.clone_subtree().syntax().clone_for_update(), + ) + } else { + res_editor.insert( + syntax_editor::Position::last_child_of(segment.syntax()), + args.clone_subtree().syntax().clone_for_update(), + ); + } } } - ted::replace(path.syntax(), res.syntax()) + let res = res_editor.finish().new_root().clone(); + editor.replace(path.syntax().clone(), res); } hir::PathResolution::ConstParam(cp) => { if let Some(subst) = self.const_substs.get(&cp) { - ted::replace(path.syntax(), subst.clone_subtree().clone_for_update()); + editor.replace(path.syntax(), subst.clone_subtree().clone_for_update()); } } hir::PathResolution::SelfType(imp) => { @@ -456,13 +503,13 @@ impl Ctx<'_> { mod_path_to_ast(&found_path, self.target_edition).qualifier() { let res = make::path_concat(qual, path_ty.path()?).clone_for_update(); - ted::replace(path.syntax(), res.syntax()); + editor.replace(path.syntax(), res.syntax()); return Some(()); } } } - ted::replace(path.syntax(), ast_ty.syntax()); + editor.replace(path.syntax(), ast_ty.syntax()); } hir::PathResolution::Local(_) | hir::PathResolution::Def(_) diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs index f20b6dea122..e31367f3b14 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs @@ -131,4 +131,28 @@ fn foo(v: Enum<()>) { "#, ); } + + #[test] + fn regression_20259() { + check_diagnostics( + r#" +//- minicore: deref +use core::ops::Deref; + +struct Foo<T>(T); + +impl<T> Deref for Foo<T> { + type Target = T; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +fn test(x: Foo<(i32, bool)>) { + let (_a, _b): &(i32, bool) = &x; +} +"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs b/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs index 698fd147789..1901bcc797e 100755 --- a/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs +++ b/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs @@ -73,11 +73,13 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> { } if fn_node.body().is_some() { + // Get the actual start of the function (excluding doc comments) + let fn_start = fn_node + .fn_token() + .map(|token| token.text_range().start()) + .unwrap_or(node.text_range().start()); res.push(Fold { - range: TextRange::new( - node.text_range().start(), - node.text_range().end(), - ), + range: TextRange::new(fn_start, node.text_range().end()), kind: FoldKind::Function, }); continue; @@ -688,4 +690,21 @@ type Foo<T, U> = foo<fold arglist>< "#, ) } + + #[test] + fn test_fold_doc_comments_with_multiline_paramlist_function() { + check( + r#" +<fold comment>/// A very very very very very very very very very very very very very very very +/// very very very long description</fold> +<fold function>fn foo<fold arglist>( + very_long_parameter_name: u32, + another_very_long_parameter_name: u32, + third_very_long_param: u32, +)</fold> <fold block>{ + todo!() +}</fold></fold> +"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs index 0069452e7b9..49fec0a793c 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/lifetime.rs @@ -77,17 +77,18 @@ pub(super) fn fn_ptr_hints( return None; } - let parent_for_type = func + let parent_for_binder = func .syntax() .ancestors() .skip(1) .take_while(|it| matches!(it.kind(), SyntaxKind::PAREN_TYPE | SyntaxKind::FOR_TYPE)) - .find_map(ast::ForType::cast); + .find_map(ast::ForType::cast) + .and_then(|it| it.for_binder()); let param_list = func.param_list()?; - let generic_param_list = parent_for_type.as_ref().and_then(|it| it.generic_param_list()); + let generic_param_list = parent_for_binder.as_ref().and_then(|it| it.generic_param_list()); let ret_type = func.ret_type(); - let for_kw = parent_for_type.as_ref().and_then(|it| it.for_token()); + let for_kw = parent_for_binder.as_ref().and_then(|it| it.for_token()); hints_( acc, ctx, @@ -143,15 +144,16 @@ pub(super) fn fn_path_hints( // FIXME: Support general path types let (param_list, ret_type) = func.path().as_ref().and_then(path_as_fn)?; - let parent_for_type = func + let parent_for_binder = func .syntax() .ancestors() .skip(1) .take_while(|it| matches!(it.kind(), SyntaxKind::PAREN_TYPE | SyntaxKind::FOR_TYPE)) - .find_map(ast::ForType::cast); + .find_map(ast::ForType::cast) + .and_then(|it| it.for_binder()); - let generic_param_list = parent_for_type.as_ref().and_then(|it| it.generic_param_list()); - let for_kw = parent_for_type.as_ref().and_then(|it| it.for_token()); + let generic_param_list = parent_for_binder.as_ref().and_then(|it| it.generic_param_list()); + let for_kw = parent_for_binder.as_ref().and_then(|it| it.for_token()); hints_( acc, ctx, diff --git a/src/tools/rust-analyzer/crates/ide/src/rename.rs b/src/tools/rust-analyzer/crates/ide/src/rename.rs index fb84e8e6b47..a07c647c2cb 100644 --- a/src/tools/rust-analyzer/crates/ide/src/rename.rs +++ b/src/tools/rust-analyzer/crates/ide/src/rename.rs @@ -12,6 +12,7 @@ use ide_db::{ source_change::SourceChangeBuilder, }; use itertools::Itertools; +use std::fmt::Write; use stdx::{always, never}; use syntax::{AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize, ast}; @@ -459,35 +460,22 @@ fn rename_self_to_param( } fn text_edit_from_self_param(self_param: &ast::SelfParam, new_name: String) -> Option<TextEdit> { - fn target_type_name(impl_def: &ast::Impl) -> Option<String> { - if let Some(ast::Type::PathType(p)) = impl_def.self_ty() { - return Some(p.path()?.segment()?.name_ref()?.text().to_string()); - } - None - } + let mut replacement_text = new_name; + replacement_text.push_str(": "); - match self_param.syntax().ancestors().find_map(ast::Impl::cast) { - Some(impl_def) => { - let type_name = target_type_name(&impl_def)?; + if self_param.amp_token().is_some() { + replacement_text.push('&'); + } + if let Some(lifetime) = self_param.lifetime() { + write!(replacement_text, "{lifetime} ").unwrap(); + } + if self_param.amp_token().and(self_param.mut_token()).is_some() { + replacement_text.push_str("mut "); + } - let mut replacement_text = new_name; - replacement_text.push_str(": "); - match (self_param.amp_token(), self_param.mut_token()) { - (Some(_), None) => replacement_text.push('&'), - (Some(_), Some(_)) => replacement_text.push_str("&mut "), - (_, _) => (), - }; - replacement_text.push_str(type_name.as_str()); + replacement_text.push_str("Self"); - Some(TextEdit::replace(self_param.syntax().text_range(), replacement_text)) - } - None => { - cov_mark::hit!(rename_self_outside_of_methods); - let mut replacement_text = new_name; - replacement_text.push_str(": _"); - Some(TextEdit::replace(self_param.syntax().text_range(), replacement_text)) - } - } + Some(TextEdit::replace(self_param.syntax().text_range(), replacement_text)) } #[cfg(test)] @@ -2069,7 +2057,7 @@ impl Foo { struct Foo { i: i32 } impl Foo { - fn f(foo: &mut Foo) -> i32 { + fn f(foo: &mut Self) -> i32 { foo.i } } @@ -2095,7 +2083,33 @@ impl Foo { struct Foo { i: i32 } impl Foo { - fn f(foo: Foo) -> i32 { + fn f(foo: Self) -> i32 { + foo.i + } +} +"#, + ); + } + + #[test] + fn test_owned_self_to_parameter_with_lifetime() { + cov_mark::check!(rename_self_to_param); + check( + "foo", + r#" +struct Foo<'a> { i: &'a i32 } + +impl<'a> Foo<'a> { + fn f(&'a $0self) -> i32 { + self.i + } +} +"#, + r#" +struct Foo<'a> { i: &'a i32 } + +impl<'a> Foo<'a> { + fn f(foo: &'a Self) -> i32 { foo.i } } @@ -2105,7 +2119,6 @@ impl Foo { #[test] fn test_self_outside_of_methods() { - cov_mark::check!(rename_self_outside_of_methods); check( "foo", r#" @@ -2114,7 +2127,7 @@ fn f($0self) -> i32 { } "#, r#" -fn f(foo: _) -> i32 { +fn f(foo: Self) -> i32 { foo.i } "#, @@ -2159,7 +2172,7 @@ impl Foo { struct Foo { i: i32 } impl Foo { - fn f(foo: &Foo) -> i32 { + fn f(foo: &Self) -> i32 { let self_var = 1; foo.i } diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs index 76656567e7f..ed8a91c39c0 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs @@ -572,9 +572,7 @@ fn closure_expr(p: &mut Parser<'_>) -> CompletedMarker { // test closure_binder // fn main() { for<'a> || (); } if p.at(T![for]) { - let b = p.start(); types::for_binder(p); - b.complete(p, CLOSURE_BINDER); } // test const_closure // fn main() { let cl = const || _ = 0; } diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs index 55c5dc400b9..cb1b59f6497 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs @@ -13,7 +13,7 @@ pub(super) fn opt_generic_param_list(p: &mut Parser<'_>) { // test_err generic_param_list_recover // fn f<T: Clone,, U:, V>() {} -fn generic_param_list(p: &mut Parser<'_>) { +pub(super) fn generic_param_list(p: &mut Parser<'_>) { assert!(p.at(T![<])); let m = p.start(); delimited( @@ -147,7 +147,15 @@ fn type_bound(p: &mut Parser<'_>) -> bool { let has_paren = p.eat(T!['(']); match p.current() { LIFETIME_IDENT => lifetime(p), - T![for] => types::for_type(p, false), + // test for_binder_bound + // fn foo<T: for<'a> [const] async Trait>() {} + T![for] => { + types::for_binder(p); + if path_type_bound(p).is_err() { + m.abandon(p); + return false; + } + } // test precise_capturing // fn captures<'a: 'a, 'b: 'b, T>() -> impl Sized + use<'b, T, Self> {} @@ -180,44 +188,8 @@ fn type_bound(p: &mut Parser<'_>) -> bool { p.bump_any(); types::for_type(p, false) } - current => { - match current { - T![?] => p.bump_any(), - T![~] => { - p.bump_any(); - p.expect(T![const]); - } - T!['['] => { - p.bump_any(); - p.expect(T![const]); - p.expect(T![']']); - } - // test const_trait_bound - // const fn foo(_: impl const Trait) {} - T![const] => { - p.bump_any(); - } - // test async_trait_bound - // fn async_foo(_: impl async Fn(&i32)) {} - T![async] => { - p.bump_any(); - } - _ => (), - } - if paths::is_use_path_start(p) { - types::path_type_bounds(p, false); - // test_err type_bounds_macro_call_recovery - // fn foo<T: T![], T: T!, T: T!{}>() -> Box<T! + T!{}> {} - if p.at(T![!]) { - let m = p.start(); - p.bump(T![!]); - p.error("unexpected `!` in type path, macro calls are not allowed here"); - if p.at_ts(TokenSet::new(&[T!['{'], T!['['], T!['(']])) { - items::token_tree(p); - } - m.complete(p, ERROR); - } - } else { + _ => { + if path_type_bound(p).is_err() { m.abandon(p); return false; } @@ -231,6 +203,43 @@ fn type_bound(p: &mut Parser<'_>) -> bool { true } +fn path_type_bound(p: &mut Parser<'_>) -> Result<(), ()> { + if p.eat(T![~]) { + p.expect(T![const]); + } else if p.eat(T!['[']) { + // test maybe_const_trait_bound + // const fn foo(_: impl [const] Trait) {} + p.expect(T![const]); + p.expect(T![']']); + } else { + // test const_trait_bound + // const fn foo(_: impl const Trait) {} + p.eat(T![const]); + } + // test async_trait_bound + // fn async_foo(_: impl async Fn(&i32)) {} + p.eat(T![async]); + p.eat(T![?]); + + if paths::is_use_path_start(p) { + types::path_type_bounds(p, false); + // test_err type_bounds_macro_call_recovery + // fn foo<T: T![], T: T!, T: T!{}>() -> Box<T! + T!{}> {} + if p.at(T![!]) { + let m = p.start(); + p.bump(T![!]); + p.error("unexpected `!` in type path, macro calls are not allowed here"); + if p.at_ts(TokenSet::new(&[T!['{'], T!['['], T!['(']])) { + items::token_tree(p); + } + m.complete(p, ERROR); + } + Ok(()) + } else { + Err(()) + } +} + // test where_clause // fn foo() // where diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/types.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/types.rs index 908440b5d05..a7e97c5f850 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/types.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/types.rs @@ -249,13 +249,14 @@ fn fn_ptr_type(p: &mut Parser<'_>) { } pub(super) fn for_binder(p: &mut Parser<'_>) { - assert!(p.at(T![for])); + let m = p.start(); p.bump(T![for]); if p.at(T![<]) { - generic_params::opt_generic_param_list(p); + generic_params::generic_param_list(p); } else { p.error("expected `<`"); } + m.complete(p, FOR_BINDER); } // test for_type diff --git a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs index 12a13caa4d9..3a8041d2df9 100644 --- a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs +++ b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs @@ -185,7 +185,6 @@ pub enum SyntaxKind { BREAK_EXPR, CALL_EXPR, CAST_EXPR, - CLOSURE_BINDER, CLOSURE_EXPR, CONST, CONST_ARG, @@ -203,6 +202,7 @@ pub enum SyntaxKind { FN_PTR_TYPE, FORMAT_ARGS_ARG, FORMAT_ARGS_EXPR, + FOR_BINDER, FOR_EXPR, FOR_TYPE, GENERIC_ARG_LIST, @@ -358,7 +358,6 @@ impl SyntaxKind { | BREAK_EXPR | CALL_EXPR | CAST_EXPR - | CLOSURE_BINDER | CLOSURE_EXPR | CONST | CONST_ARG @@ -376,6 +375,7 @@ impl SyntaxKind { | FN_PTR_TYPE | FORMAT_ARGS_ARG | FORMAT_ARGS_EXPR + | FOR_BINDER | FOR_EXPR | FOR_TYPE | GENERIC_ARG_LIST diff --git a/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs b/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs index cef7b0ee239..c642e1a3354 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs @@ -253,6 +253,10 @@ mod ok { run_and_expect_no_errors("test_data/parser/inline/ok/fn_pointer_unnamed_arg.rs"); } #[test] + fn for_binder_bound() { + run_and_expect_no_errors("test_data/parser/inline/ok/for_binder_bound.rs"); + } + #[test] fn for_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/for_expr.rs"); } #[test] fn for_range_from() { @@ -402,6 +406,10 @@ mod ok { #[test] fn match_guard() { run_and_expect_no_errors("test_data/parser/inline/ok/match_guard.rs"); } #[test] + fn maybe_const_trait_bound() { + run_and_expect_no_errors("test_data/parser/inline/ok/maybe_const_trait_bound.rs"); + } + #[test] fn metas() { run_and_expect_no_errors("test_data/parser/inline/ok/metas.rs"); } #[test] fn method_call_expr() { diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0024_many_type_parens.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0024_many_type_parens.rast index 025c12e4c2a..2fd172539e4 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0024_many_type_parens.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0024_many_type_parens.rast @@ -37,7 +37,7 @@ SOURCE_FILE WHITESPACE " " TYPE_BOUND L_PAREN "(" - FOR_TYPE + FOR_BINDER FOR_KW "for" GENERIC_PARAM_LIST L_ANGLE "<" @@ -45,18 +45,18 @@ SOURCE_FILE LIFETIME LIFETIME_IDENT "'a" R_ANGLE ">" - WHITESPACE " " - PATH_TYPE - PATH - PATH_SEGMENT - NAME_REF - IDENT "Trait" - GENERIC_ARG_LIST - L_ANGLE "<" - LIFETIME_ARG - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Trait" + GENERIC_ARG_LIST + L_ANGLE "<" + LIFETIME_ARG + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" R_PAREN ")" R_ANGLE ">" PARAM_LIST @@ -124,7 +124,7 @@ SOURCE_FILE WHITESPACE " " TYPE_BOUND L_PAREN "(" - FOR_TYPE + FOR_BINDER FOR_KW "for" GENERIC_PARAM_LIST L_ANGLE "<" @@ -132,18 +132,18 @@ SOURCE_FILE LIFETIME LIFETIME_IDENT "'a" R_ANGLE ">" - WHITESPACE " " - PATH_TYPE - PATH - PATH_SEGMENT - NAME_REF - IDENT "Trait" - GENERIC_ARG_LIST - L_ANGLE "<" - LIFETIME_ARG - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Trait" + GENERIC_ARG_LIST + L_ANGLE "<" + LIFETIME_ARG + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" R_PAREN ")" ERROR R_ANGLE ">" @@ -186,7 +186,7 @@ SOURCE_FILE TUPLE_EXPR L_PAREN "(" CLOSURE_EXPR - CLOSURE_BINDER + FOR_BINDER FOR_KW "for" GENERIC_PARAM_LIST L_ANGLE "<" @@ -243,13 +243,14 @@ SOURCE_FILE PAREN_TYPE L_PAREN "(" FOR_TYPE - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " PATH_TYPE PATH diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0027_incomplete_where_for.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0027_incomplete_where_for.rast index 674c8d536ca..3768a55d530 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0027_incomplete_where_for.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0027_incomplete_where_for.rast @@ -12,13 +12,14 @@ SOURCE_FILE WHERE_KW "where" WHITESPACE " " WHERE_PRED - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE "\n" BLOCK_EXPR STMT_LIST diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0043_unexpected_for_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0043_unexpected_for_type.rast index cb4fb1642d9..9c4ee6f712a 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0043_unexpected_for_type.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0043_unexpected_for_type.rast @@ -8,13 +8,14 @@ SOURCE_FILE EQ "=" WHITESPACE " " FOR_TYPE - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " REF_TYPE AMP "&" @@ -37,13 +38,14 @@ SOURCE_FILE EQ "=" WHITESPACE " " FOR_TYPE - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " TUPLE_TYPE L_PAREN "(" @@ -70,13 +72,14 @@ SOURCE_FILE EQ "=" WHITESPACE " " FOR_TYPE - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " SLICE_TYPE L_BRACK "[" @@ -97,22 +100,24 @@ SOURCE_FILE EQ "=" WHITESPACE " " FOR_TYPE - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" - WHITESPACE " " - FOR_TYPE + FOR_BINDER FOR_KW "for" GENERIC_PARAM_LIST L_ANGLE "<" LIFETIME_PARAM LIFETIME - LIFETIME_IDENT "'b" + LIFETIME_IDENT "'a" R_ANGLE ">" + WHITESPACE " " + FOR_TYPE + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'b" + R_ANGLE ">" WHITESPACE " " FN_PTR_TYPE FN_KW "fn" @@ -164,31 +169,34 @@ SOURCE_FILE WHERE_KW "where" WHITESPACE "\n " WHERE_PRED - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" - WHITESPACE " " - FOR_TYPE + FOR_BINDER FOR_KW "for" GENERIC_PARAM_LIST L_ANGLE "<" LIFETIME_PARAM LIFETIME - LIFETIME_IDENT "'b" + LIFETIME_IDENT "'a" R_ANGLE ">" - WHITESPACE " " - FOR_TYPE + WHITESPACE " " + FOR_TYPE + FOR_BINDER FOR_KW "for" GENERIC_PARAM_LIST L_ANGLE "<" LIFETIME_PARAM LIFETIME - LIFETIME_IDENT "'c" + LIFETIME_IDENT "'b" R_ANGLE ">" + WHITESPACE " " + FOR_TYPE + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'c" + R_ANGLE ">" WHITESPACE " " FN_PTR_TYPE FN_KW "fn" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/closure_binder.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/closure_binder.rast index c04dbe1ea0a..c96ccf7c7f1 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/closure_binder.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/closure_binder.rast @@ -14,7 +14,7 @@ SOURCE_FILE WHITESPACE " " EXPR_STMT CLOSURE_EXPR - CLOSURE_BINDER + FOR_BINDER FOR_KW "for" GENERIC_PARAM_LIST L_ANGLE "<" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/dyn_trait_type_weak.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/dyn_trait_type_weak.rast index dcc66dc1e2b..6578809cb0e 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/dyn_trait_type_weak.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/dyn_trait_type_weak.rast @@ -103,7 +103,7 @@ SOURCE_FILE WHITESPACE " " TYPE_BOUND_LIST TYPE_BOUND - FOR_TYPE + FOR_BINDER FOR_KW "for" GENERIC_PARAM_LIST L_ANGLE "<" @@ -111,12 +111,12 @@ SOURCE_FILE LIFETIME LIFETIME_IDENT "'a" R_ANGLE ">" - WHITESPACE " " - PATH_TYPE - PATH - PATH_SEGMENT - NAME_REF - IDENT "Path" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Path" SEMICOLON ";" WHITESPACE "\n" TYPE_ALIAS diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_binder_bound.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_binder_bound.rast new file mode 100644 index 00000000000..17dbbf30a7b --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_binder_bound.rast @@ -0,0 +1,45 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "foo" + GENERIC_PARAM_LIST + L_ANGLE "<" + TYPE_PARAM + NAME + IDENT "T" + COLON ":" + WHITESPACE " " + TYPE_BOUND_LIST + TYPE_BOUND + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" + WHITESPACE " " + L_BRACK "[" + CONST_KW "const" + R_BRACK "]" + WHITESPACE " " + ASYNC_KW "async" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Trait" + R_ANGLE ">" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + R_CURLY "}" + WHITESPACE "\n" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_binder_bound.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_binder_bound.rs new file mode 100644 index 00000000000..427cf558710 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_binder_bound.rs @@ -0,0 +1 @@ +fn foo<T: for<'a> [const] async Trait>() {} diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_type.rast index 7600457a9b8..58623058cae 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_type.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/for_type.rast @@ -8,13 +8,14 @@ SOURCE_FILE EQ "=" WHITESPACE " " FOR_TYPE - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " FN_PTR_TYPE FN_KW "fn" @@ -39,13 +40,14 @@ SOURCE_FILE EQ "=" WHITESPACE " " FOR_TYPE - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " FN_PTR_TYPE UNSAFE_KW "unsafe" @@ -86,13 +88,14 @@ SOURCE_FILE EQ "=" WHITESPACE " " FOR_TYPE - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " PATH_TYPE PATH diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/lambda_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/lambda_expr.rast index ea401d224e6..bf24a579124 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/lambda_expr.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/lambda_expr.rast @@ -202,7 +202,7 @@ SOURCE_FILE WHITESPACE "\n " EXPR_STMT CLOSURE_EXPR - CLOSURE_BINDER + FOR_BINDER FOR_KW "for" GENERIC_PARAM_LIST L_ANGLE "<" @@ -223,7 +223,7 @@ SOURCE_FILE WHITESPACE "\n " EXPR_STMT CLOSURE_EXPR - CLOSURE_BINDER + FOR_BINDER FOR_KW "for" GENERIC_PARAM_LIST L_ANGLE "<" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/maybe_const_trait_bound.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/maybe_const_trait_bound.rast new file mode 100644 index 00000000000..8d12f814c2a --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/maybe_const_trait_bound.rast @@ -0,0 +1,36 @@ +SOURCE_FILE + FN + CONST_KW "const" + WHITESPACE " " + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "foo" + PARAM_LIST + L_PAREN "(" + PARAM + WILDCARD_PAT + UNDERSCORE "_" + COLON ":" + WHITESPACE " " + IMPL_TRAIT_TYPE + IMPL_KW "impl" + WHITESPACE " " + TYPE_BOUND_LIST + TYPE_BOUND + L_BRACK "[" + CONST_KW "const" + R_BRACK "]" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Trait" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + R_CURLY "}" + WHITESPACE "\n" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/maybe_const_trait_bound.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/maybe_const_trait_bound.rs new file mode 100644 index 00000000000..e1da9206098 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/maybe_const_trait_bound.rs @@ -0,0 +1 @@ +const fn foo(_: impl [const] Trait) {} diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/no_dyn_trait_leading_for.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/no_dyn_trait_leading_for.rast index 30a2842e538..6afa0613f39 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/no_dyn_trait_leading_for.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/no_dyn_trait_leading_for.rast @@ -11,13 +11,14 @@ SOURCE_FILE TYPE_BOUND_LIST TYPE_BOUND FOR_TYPE - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " PATH_TYPE PATH diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/question_for_type_trait_bound.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/question_for_type_trait_bound.rast index 56e2d1095d2..cb296153c8f 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/question_for_type_trait_bound.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/question_for_type_trait_bound.rast @@ -29,10 +29,11 @@ SOURCE_FILE TYPE_BOUND QUESTION "?" FOR_TYPE - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + R_ANGLE ">" WHITESPACE " " PATH_TYPE PATH diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/where_pred_for.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/where_pred_for.rast index 0cc365efbe6..b10b953f2fb 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/where_pred_for.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/where_pred_for.rast @@ -18,13 +18,14 @@ SOURCE_FILE WHERE_KW "where" WHITESPACE "\n " WHERE_PRED - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " PATH_TYPE PATH diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0032_where_for.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0032_where_for.rast index 86f6af97c73..dcaf58f7f98 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0032_where_for.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0032_where_for.rast @@ -36,7 +36,7 @@ SOURCE_FILE PLUS "+" WHITESPACE " " TYPE_BOUND - FOR_TYPE + FOR_BINDER FOR_KW "for" GENERIC_PARAM_LIST L_ANGLE "<" @@ -44,18 +44,18 @@ SOURCE_FILE LIFETIME LIFETIME_IDENT "'de" R_ANGLE ">" - WHITESPACE " " - PATH_TYPE - PATH - PATH_SEGMENT - NAME_REF - IDENT "Deserialize" - GENERIC_ARG_LIST - L_ANGLE "<" - LIFETIME_ARG - LIFETIME - LIFETIME_IDENT "'de" - R_ANGLE ">" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Deserialize" + GENERIC_ARG_LIST + L_ANGLE "<" + LIFETIME_ARG + LIFETIME + LIFETIME_IDENT "'de" + R_ANGLE ">" WHITESPACE " " PLUS "+" WHITESPACE " " diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0067_where_for_pred.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0067_where_for_pred.rast index 8bf1090f9cf..5cef4dff062 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0067_where_for_pred.rast +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/ok/0067_where_for_pred.rast @@ -18,13 +18,14 @@ SOURCE_FILE WHERE_KW "where" WHITESPACE "\n " WHERE_PRED - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " PATH_TYPE PATH @@ -81,13 +82,14 @@ SOURCE_FILE WHERE_KW "where" WHITESPACE "\n " WHERE_PRED - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " REF_TYPE AMP "&" @@ -135,13 +137,14 @@ SOURCE_FILE WHERE_KW "where" WHITESPACE "\n " WHERE_PRED - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " PAREN_TYPE L_PAREN "(" @@ -206,13 +209,14 @@ SOURCE_FILE WHERE_KW "where" WHITESPACE "\n " WHERE_PRED - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " SLICE_TYPE L_BRACK "[" @@ -276,13 +280,14 @@ SOURCE_FILE WHERE_KW "where" WHITESPACE "\n " WHERE_PRED - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'a" + R_ANGLE ">" WHITESPACE " " PATH_TYPE PATH @@ -349,22 +354,24 @@ SOURCE_FILE WHERE_KW "where" WHITESPACE "\n " WHERE_PRED - FOR_KW "for" - GENERIC_PARAM_LIST - L_ANGLE "<" - LIFETIME_PARAM - LIFETIME - LIFETIME_IDENT "'a" - R_ANGLE ">" - WHITESPACE " " - FOR_TYPE + FOR_BINDER FOR_KW "for" GENERIC_PARAM_LIST L_ANGLE "<" LIFETIME_PARAM LIFETIME - LIFETIME_IDENT "'b" + LIFETIME_IDENT "'a" R_ANGLE ">" + WHITESPACE " " + FOR_TYPE + FOR_BINDER + FOR_KW "for" + GENERIC_PARAM_LIST + L_ANGLE "<" + LIFETIME_PARAM + LIFETIME + LIFETIME_IDENT "'b" + R_ANGLE ">" WHITESPACE " " FN_PTR_TYPE FN_KW "fn" diff --git a/src/tools/rust-analyzer/crates/project-model/Cargo.toml b/src/tools/rust-analyzer/crates/project-model/Cargo.toml index 27fe9f79bbc..0dbb309a62a 100644 --- a/src/tools/rust-analyzer/crates/project-model/Cargo.toml +++ b/src/tools/rust-analyzer/crates/project-model/Cargo.toml @@ -20,6 +20,7 @@ semver.workspace = true serde_json.workspace = true serde.workspace = true serde_derive.workspace = true +temp-dir.workspace = true tracing.workspace = true triomphe.workspace = true la-arena.workspace = true diff --git a/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs b/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs index 499caa622c4..5bea74bed7e 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/build_dependencies.rs @@ -16,11 +16,13 @@ use la_arena::ArenaMap; use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; use rustc_hash::{FxHashMap, FxHashSet}; use serde::Deserialize as _; +use stdx::never; use toolchain::Tool; use crate::{ CargoConfig, CargoFeatures, CargoWorkspace, InvocationStrategy, ManifestPath, Package, Sysroot, - TargetKind, utf8_stdout, + TargetKind, cargo_config_file::make_lockfile_copy, + cargo_workspace::MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH, utf8_stdout, }; /// Output of the build script and proc-macro building steps for a workspace. @@ -30,6 +32,15 @@ pub struct WorkspaceBuildScripts { error: Option<String>, } +#[derive(Debug, Clone, Default, PartialEq, Eq)] +pub enum ProcMacroDylibPath { + Path(AbsPathBuf), + DylibNotFound, + NotProcMacro, + #[default] + NotBuilt, +} + /// Output of the build script and proc-macro building step for a concrete package. #[derive(Debug, Clone, Default, PartialEq, Eq)] pub(crate) struct BuildScriptOutput { @@ -43,7 +54,7 @@ pub(crate) struct BuildScriptOutput { /// Directory where a build script might place its output. pub(crate) out_dir: Option<AbsPathBuf>, /// Path to the proc-macro library file if this package exposes proc-macros. - pub(crate) proc_macro_dylib_path: Option<AbsPathBuf>, + pub(crate) proc_macro_dylib_path: ProcMacroDylibPath, } impl BuildScriptOutput { @@ -51,7 +62,10 @@ impl BuildScriptOutput { self.cfgs.is_empty() && self.envs.is_empty() && self.out_dir.is_none() - && self.proc_macro_dylib_path.is_none() + && matches!( + self.proc_macro_dylib_path, + ProcMacroDylibPath::NotBuilt | ProcMacroDylibPath::NotProcMacro + ) } } @@ -67,7 +81,7 @@ impl WorkspaceBuildScripts { let current_dir = workspace.workspace_root(); let allowed_features = workspace.workspace_features(); - let cmd = Self::build_command( + let (_guard, cmd) = Self::build_command( config, &allowed_features, workspace.manifest_path(), @@ -88,7 +102,7 @@ impl WorkspaceBuildScripts { ) -> io::Result<Vec<WorkspaceBuildScripts>> { assert_eq!(config.invocation_strategy, InvocationStrategy::Once); - let cmd = Self::build_command( + let (_guard, cmd) = Self::build_command( config, &Default::default(), // This is not gonna be used anyways, so just construct a dummy here @@ -126,6 +140,8 @@ impl WorkspaceBuildScripts { |package, cb| { if let Some(&(package, workspace)) = by_id.get(package) { cb(&workspaces[workspace][package].name, &mut res[workspace].outputs[package]); + } else { + never!("Received compiler message for unknown package: {}", package); } }, progress, @@ -140,12 +156,9 @@ impl WorkspaceBuildScripts { if tracing::enabled!(tracing::Level::INFO) { for (idx, workspace) in workspaces.iter().enumerate() { for package in workspace.packages() { - let package_build_data = &mut res[idx].outputs[package]; + let package_build_data: &mut BuildScriptOutput = &mut res[idx].outputs[package]; if !package_build_data.is_empty() { - tracing::info!( - "{}: {package_build_data:?}", - workspace[package].manifest.parent(), - ); + tracing::info!("{}: {package_build_data:?}", workspace[package].manifest,); } } } @@ -198,10 +211,33 @@ impl WorkspaceBuildScripts { let path = dir_entry.path(); let extension = path.extension()?; if extension == std::env::consts::DLL_EXTENSION { - let name = path.file_stem()?.to_str()?.split_once('-')?.0.to_owned(); - let path = AbsPathBuf::try_from(Utf8PathBuf::from_path_buf(path).ok()?) - .ok()?; - return Some((name, path)); + let name = path + .file_stem()? + .to_str()? + .split_once('-')? + .0 + .trim_start_matches("lib") + .to_owned(); + let path = match Utf8PathBuf::from_path_buf(path) { + Ok(path) => path, + Err(path) => { + tracing::warn!( + "Proc-macro dylib path contains non-UTF8 characters: {:?}", + path.display() + ); + return None; + } + }; + return match AbsPathBuf::try_from(path) { + Ok(path) => Some((name, path)), + Err(path) => { + tracing::error!( + "proc-macro dylib path is not absolute: {:?}", + path + ); + None + } + }; } } None @@ -209,28 +245,24 @@ impl WorkspaceBuildScripts { .collect(); for p in rustc.packages() { let package = &rustc[p]; - if package - .targets - .iter() - .any(|&it| matches!(rustc[it].kind, TargetKind::Lib { is_proc_macro: true })) - { - if let Some((_, path)) = proc_macro_dylibs - .iter() - .find(|(name, _)| *name.trim_start_matches("lib") == package.name) - { - bs.outputs[p].proc_macro_dylib_path = Some(path.clone()); + bs.outputs[p].proc_macro_dylib_path = + if package.targets.iter().any(|&it| { + matches!(rustc[it].kind, TargetKind::Lib { is_proc_macro: true }) + }) { + match proc_macro_dylibs.iter().find(|(name, _)| *name == package.name) { + Some((_, path)) => ProcMacroDylibPath::Path(path.clone()), + _ => ProcMacroDylibPath::DylibNotFound, + } + } else { + ProcMacroDylibPath::NotProcMacro } - } } if tracing::enabled!(tracing::Level::INFO) { for package in rustc.packages() { let package_build_data = &bs.outputs[package]; if !package_build_data.is_empty() { - tracing::info!( - "{}: {package_build_data:?}", - rustc[package].manifest.parent(), - ); + tracing::info!("{}: {package_build_data:?}", rustc[package].manifest,); } } } @@ -263,6 +295,12 @@ impl WorkspaceBuildScripts { |package, cb| { if let Some(&package) = by_id.get(package) { cb(&workspace[package].name, &mut outputs[package]); + } else { + never!( + "Received compiler message for unknown package: {}\n {}", + package, + by_id.keys().join(", ") + ); } }, progress, @@ -272,10 +310,7 @@ impl WorkspaceBuildScripts { for package in workspace.packages() { let package_build_data = &outputs[package]; if !package_build_data.is_empty() { - tracing::info!( - "{}: {package_build_data:?}", - workspace[package].manifest.parent(), - ); + tracing::info!("{}: {package_build_data:?}", workspace[package].manifest,); } } } @@ -348,15 +383,23 @@ impl WorkspaceBuildScripts { progress(format!( "building compile-time-deps: proc-macro {name} built" )); - if message.target.kind.contains(&cargo_metadata::TargetKind::ProcMacro) + if data.proc_macro_dylib_path == ProcMacroDylibPath::NotBuilt { + data.proc_macro_dylib_path = ProcMacroDylibPath::NotProcMacro; + } + if !matches!(data.proc_macro_dylib_path, ProcMacroDylibPath::Path(_)) + && message + .target + .kind + .contains(&cargo_metadata::TargetKind::ProcMacro) { - // Skip rmeta file - if let Some(filename) = - message.filenames.iter().find(|file| is_dylib(file)) - { - let filename = AbsPath::assert(filename); - data.proc_macro_dylib_path = Some(filename.to_owned()); - } + data.proc_macro_dylib_path = + match message.filenames.iter().find(|file| is_dylib(file)) { + Some(filename) => { + let filename = AbsPath::assert(filename); + ProcMacroDylibPath::Path(filename.to_owned()) + } + None => ProcMacroDylibPath::DylibNotFound, + }; } }); } @@ -393,14 +436,15 @@ impl WorkspaceBuildScripts { current_dir: &AbsPath, sysroot: &Sysroot, toolchain: Option<&semver::Version>, - ) -> io::Result<Command> { + ) -> io::Result<(Option<temp_dir::TempDir>, Command)> { match config.run_build_script_command.as_deref() { Some([program, args @ ..]) => { let mut cmd = toolchain::command(program, current_dir, &config.extra_env); cmd.args(args); - Ok(cmd) + Ok((None, cmd)) } _ => { + let mut requires_unstable_options = false; let mut cmd = sysroot.tool(Tool::Cargo, current_dir, &config.extra_env); cmd.args(["check", "--quiet", "--workspace", "--message-format=json"]); @@ -416,7 +460,19 @@ impl WorkspaceBuildScripts { if let Some(target) = &config.target { cmd.args(["--target", target]); } - + let mut temp_dir_guard = None; + if toolchain + .is_some_and(|v| *v >= MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH) + { + let lockfile_path = + <_ as AsRef<Utf8Path>>::as_ref(manifest_path).with_extension("lock"); + if let Some((temp_dir, target_lockfile)) = make_lockfile_copy(&lockfile_path) { + requires_unstable_options = true; + temp_dir_guard = Some(temp_dir); + cmd.arg("--lockfile-path"); + cmd.arg(target_lockfile.as_str()); + } + } match &config.features { CargoFeatures::All => { cmd.arg("--all-features"); @@ -438,6 +494,7 @@ impl WorkspaceBuildScripts { } if manifest_path.is_rust_manifest() { + requires_unstable_options = true; cmd.arg("-Zscript"); } @@ -457,8 +514,7 @@ impl WorkspaceBuildScripts { toolchain.is_some_and(|v| *v >= COMP_TIME_DEPS_MIN_TOOLCHAIN_VERSION); if cargo_comp_time_deps_available { - cmd.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly"); - cmd.arg("-Zunstable-options"); + requires_unstable_options = true; cmd.arg("--compile-time-deps"); // we can pass this unconditionally, because we won't actually build the // binaries, and as such, this will succeed even on targets without libtest @@ -481,7 +537,11 @@ impl WorkspaceBuildScripts { cmd.env("RA_RUSTC_WRAPPER", "1"); } } - Ok(cmd) + if requires_unstable_options { + cmd.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly"); + cmd.arg("-Zunstable-options"); + } + Ok((temp_dir_guard, cmd)) } } } diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_config_file.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_config_file.rs index 7966f74df30..a1e7ed09232 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/cargo_config_file.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_config_file.rs @@ -1,4 +1,5 @@ //! Read `.cargo/config.toml` as a JSON object +use paths::{Utf8Path, Utf8PathBuf}; use rustc_hash::FxHashMap; use toolchain::Tool; @@ -32,3 +33,24 @@ pub(crate) fn read( Some(json) } + +pub(crate) fn make_lockfile_copy( + lockfile_path: &Utf8Path, +) -> Option<(temp_dir::TempDir, Utf8PathBuf)> { + let temp_dir = temp_dir::TempDir::with_prefix("rust-analyzer").ok()?; + let target_lockfile = temp_dir.path().join("Cargo.lock").try_into().ok()?; + match std::fs::copy(lockfile_path, &target_lockfile) { + Ok(_) => { + tracing::debug!("Copied lock file from `{}` to `{}`", lockfile_path, target_lockfile); + Some((temp_dir, target_lockfile)) + } + // lockfile does not yet exist, so we can just create a new one in the temp dir + Err(e) if e.kind() == std::io::ErrorKind::NotFound => Some((temp_dir, target_lockfile)), + Err(e) => { + tracing::warn!( + "Failed to copy lock file from `{lockfile_path}` to `{target_lockfile}`: {e}", + ); + None + } + } +} diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs index daadcd9d79a..e613fd590c7 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs @@ -15,16 +15,18 @@ use span::Edition; use stdx::process::spawn_with_streaming_output; use toolchain::Tool; +use crate::cargo_config_file::make_lockfile_copy; use crate::{CfgOverrides, InvocationStrategy}; use crate::{ManifestPath, Sysroot}; -const MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH: semver::Version = semver::Version { - major: 1, - minor: 82, - patch: 0, - pre: semver::Prerelease::EMPTY, - build: semver::BuildMetadata::EMPTY, -}; +pub(crate) const MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH: semver::Version = + semver::Version { + major: 1, + minor: 82, + patch: 0, + pre: semver::Prerelease::EMPTY, + build: semver::BuildMetadata::EMPTY, + }; /// [`CargoWorkspace`] represents the logical structure of, well, a Cargo /// workspace. It pretty closely mirrors `cargo metadata` output. @@ -245,7 +247,7 @@ pub enum TargetKind { } impl TargetKind { - fn new(kinds: &[cargo_metadata::TargetKind]) -> TargetKind { + pub fn new(kinds: &[cargo_metadata::TargetKind]) -> TargetKind { for kind in kinds { return match kind { cargo_metadata::TargetKind::Bin => TargetKind::Bin, @@ -552,7 +554,10 @@ impl CargoWorkspace { pub(crate) struct FetchMetadata { command: cargo_metadata::MetadataCommand, + #[expect(dead_code)] + manifest_path: ManifestPath, lockfile_path: Option<Utf8PathBuf>, + #[expect(dead_code)] kind: &'static str, no_deps: bool, no_deps_result: anyhow::Result<cargo_metadata::Metadata>, @@ -596,25 +601,22 @@ impl FetchMetadata { } command.current_dir(current_dir); - let mut needs_nightly = false; let mut other_options = vec![]; // cargo metadata only supports a subset of flags of what cargo usually accepts, and usually // the only relevant flags for metadata here are unstable ones, so we pass those along // but nothing else let mut extra_args = config.extra_args.iter(); while let Some(arg) = extra_args.next() { - if arg == "-Z" { - if let Some(arg) = extra_args.next() { - needs_nightly = true; - other_options.push("-Z".to_owned()); - other_options.push(arg.to_owned()); - } + if arg == "-Z" + && let Some(arg) = extra_args.next() + { + other_options.push("-Z".to_owned()); + other_options.push(arg.to_owned()); } } let mut lockfile_path = None; if cargo_toml.is_rust_manifest() { - needs_nightly = true; other_options.push("-Zscript".to_owned()); } else if config .toolchain_version @@ -632,10 +634,6 @@ impl FetchMetadata { command.other_options(other_options.clone()); - if needs_nightly { - command.env("RUSTC_BOOTSTRAP", "1"); - } - // Pre-fetch basic metadata using `--no-deps`, which: // - avoids fetching registries like crates.io, // - skips dependency resolution and does not modify lockfiles, @@ -655,7 +653,15 @@ impl FetchMetadata { } .with_context(|| format!("Failed to run `{cargo_command:?}`")); - Self { command, lockfile_path, kind: config.kind, no_deps, no_deps_result, other_options } + Self { + manifest_path: cargo_toml.clone(), + command, + lockfile_path, + kind: config.kind, + no_deps, + no_deps_result, + other_options, + } } pub(crate) fn no_deps_metadata(&self) -> Option<&cargo_metadata::Metadata> { @@ -672,40 +678,34 @@ impl FetchMetadata { locked: bool, progress: &dyn Fn(String), ) -> anyhow::Result<(cargo_metadata::Metadata, Option<anyhow::Error>)> { - let Self { mut command, lockfile_path, kind, no_deps, no_deps_result, mut other_options } = - self; + _ = target_dir; + let Self { + mut command, + manifest_path: _, + lockfile_path, + kind: _, + no_deps, + no_deps_result, + mut other_options, + } = self; if no_deps { return no_deps_result.map(|m| (m, None)); } let mut using_lockfile_copy = false; - // The manifest is a rust file, so this means its a script manifest - if let Some(lockfile) = lockfile_path { - let target_lockfile = - target_dir.join("rust-analyzer").join("metadata").join(kind).join("Cargo.lock"); - match std::fs::copy(&lockfile, &target_lockfile) { - Ok(_) => { - using_lockfile_copy = true; - other_options.push("--lockfile-path".to_owned()); - other_options.push(target_lockfile.to_string()); - } - Err(e) if e.kind() == std::io::ErrorKind::NotFound => { - // There exists no lockfile yet - using_lockfile_copy = true; - other_options.push("--lockfile-path".to_owned()); - other_options.push(target_lockfile.to_string()); - } - Err(e) => { - tracing::warn!( - "Failed to copy lock file from `{lockfile}` to `{target_lockfile}`: {e}", - ); - } - } + let mut _temp_dir_guard; + if let Some(lockfile) = lockfile_path + && let Some((temp_dir, target_lockfile)) = make_lockfile_copy(&lockfile) + { + _temp_dir_guard = temp_dir; + other_options.push("--lockfile-path".to_owned()); + other_options.push(target_lockfile.to_string()); + using_lockfile_copy = true; } - if using_lockfile_copy { + if using_lockfile_copy || other_options.iter().any(|it| it.starts_with("-Z")) { + command.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly"); other_options.push("-Zunstable-options".to_owned()); - command.env("RUSTC_BOOTSTRAP", "1"); } // No need to lock it if we copied the lockfile, we won't modify the original after all/ // This way cargo cannot error out on us if the lockfile requires updating. @@ -714,13 +714,11 @@ impl FetchMetadata { } command.other_options(other_options); - // FIXME: Fetching metadata is a slow process, as it might require - // calling crates.io. We should be reporting progress here, but it's - // unclear whether cargo itself supports it. progress("cargo metadata: started".to_owned()); let res = (|| -> anyhow::Result<(_, _)> { let mut errored = false; + tracing::debug!("Running `{:?}`", command.cargo_command()); let output = spawn_with_streaming_output(command.cargo_command(), &mut |_| (), &mut |line| { errored = errored || line.starts_with("error") || line.starts_with("warning"); diff --git a/src/tools/rust-analyzer/crates/project-model/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/src/lib.rs index 3bf3d06e6b1..d39781b1506 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/lib.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/lib.rs @@ -59,7 +59,7 @@ use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; use rustc_hash::FxHashSet; pub use crate::{ - build_dependencies::WorkspaceBuildScripts, + build_dependencies::{ProcMacroDylibPath, WorkspaceBuildScripts}, cargo_workspace::{ CargoConfig, CargoFeatures, CargoMetadataConfig, CargoWorkspace, Package, PackageData, PackageDependency, RustLibSource, Target, TargetData, TargetKind, @@ -139,21 +139,22 @@ impl ProjectManifest { } fn find_in_parent_dirs(path: &AbsPath, target_file_name: &str) -> Option<ManifestPath> { - if path.file_name().unwrap_or_default() == target_file_name { - if let Ok(manifest) = ManifestPath::try_from(path.to_path_buf()) { - return Some(manifest); - } + if path.file_name().unwrap_or_default() == target_file_name + && let Ok(manifest) = ManifestPath::try_from(path.to_path_buf()) + { + return Some(manifest); } let mut curr = Some(path); while let Some(path) = curr { let candidate = path.join(target_file_name); - if fs::metadata(&candidate).is_ok() { - if let Ok(manifest) = ManifestPath::try_from(candidate) { - return Some(manifest); - } + if fs::metadata(&candidate).is_ok() + && let Ok(manifest) = ManifestPath::try_from(candidate) + { + return Some(manifest); } + curr = path.parent(); } diff --git a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs index 9781c46737d..c0a5009afba 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs @@ -143,12 +143,11 @@ impl Sysroot { Some(root) => { // special case rustc, we can look that up directly in the sysroot's bin folder // as it should never invoke another cargo binary - if let Tool::Rustc = tool { - if let Some(path) = + if let Tool::Rustc = tool + && let Some(path) = probe_for_binary(root.join("bin").join(Tool::Rustc.name()).into()) - { - return toolchain::command(path, current_dir, envs); - } + { + return toolchain::command(path, current_dir, envs); } let mut cmd = toolchain::command(tool.prefer_proxy(), current_dir, envs); @@ -291,29 +290,26 @@ impl Sysroot { pub fn set_workspace(&mut self, workspace: RustLibSrcWorkspace) { self.workspace = workspace; - if self.error.is_none() { - if let Some(src_root) = &self.rust_lib_src_root { - let has_core = match &self.workspace { - RustLibSrcWorkspace::Workspace(ws) => { - ws.packages().any(|p| ws[p].name == "core") - } - RustLibSrcWorkspace::Json(project_json) => project_json - .crates() - .filter_map(|(_, krate)| krate.display_name.clone()) - .any(|name| name.canonical_name().as_str() == "core"), - RustLibSrcWorkspace::Stitched(stitched) => stitched.by_name("core").is_some(), - RustLibSrcWorkspace::Empty => true, + if self.error.is_none() + && let Some(src_root) = &self.rust_lib_src_root + { + let has_core = match &self.workspace { + RustLibSrcWorkspace::Workspace(ws) => ws.packages().any(|p| ws[p].name == "core"), + RustLibSrcWorkspace::Json(project_json) => project_json + .crates() + .filter_map(|(_, krate)| krate.display_name.clone()) + .any(|name| name.canonical_name().as_str() == "core"), + RustLibSrcWorkspace::Stitched(stitched) => stitched.by_name("core").is_some(), + RustLibSrcWorkspace::Empty => true, + }; + if !has_core { + let var_note = if env::var_os("RUST_SRC_PATH").is_some() { + " (env var `RUST_SRC_PATH` is set and may be incorrect, try unsetting it)" + } else { + ", try running `rustup component add rust-src` to possibly fix this" }; - if !has_core { - let var_note = if env::var_os("RUST_SRC_PATH").is_some() { - " (env var `RUST_SRC_PATH` is set and may be incorrect, try unsetting it)" - } else { - ", try running `rustup component add rust-src` to possibly fix this" - }; - self.error = Some(format!( - "sysroot at `{src_root}` is missing a `core` library{var_note}", - )); - } + self.error = + Some(format!("sysroot at `{src_root}` is missing a `core` library{var_note}",)); } } } diff --git a/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/rustc_cfg.rs b/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/rustc_cfg.rs index 6e06e88bf7a..ab69c8e0e4a 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/rustc_cfg.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/rustc_cfg.rs @@ -65,6 +65,7 @@ fn rustc_print_cfg( let (sysroot, current_dir) = match config { QueryConfig::Cargo(sysroot, cargo_toml, _) => { let mut cmd = sysroot.tool(Tool::Cargo, cargo_toml.parent(), extra_env); + cmd.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly"); cmd.args(["rustc", "-Z", "unstable-options"]).args(RUSTC_ARGS); if let Some(target) = target { cmd.args(["--target", target]); diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs index 677f29e3c60..5b36e10fd69 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs @@ -24,7 +24,7 @@ use crate::{ CargoConfig, CargoWorkspace, CfgOverrides, InvocationStrategy, ManifestPath, Package, ProjectJson, ProjectManifest, RustSourceWorkspaceConfig, Sysroot, TargetData, TargetKind, WorkspaceBuildScripts, - build_dependencies::BuildScriptOutput, + build_dependencies::{BuildScriptOutput, ProcMacroDylibPath}, cargo_config_file, cargo_workspace::{CargoMetadataConfig, DepKind, FetchMetadata, PackageData, RustLibSource}, env::{cargo_config_env, inject_cargo_env, inject_cargo_package_env, inject_rustc_tool_env}, @@ -424,12 +424,12 @@ impl ProjectWorkspace { sysroot.set_workspace(loaded_sysroot); } - if !cargo.requires_rustc_private() { - if let Err(e) = &mut rustc { - // We don't need the rustc sources here, - // so just discard the error. - _ = e.take(); - } + if !cargo.requires_rustc_private() + && let Err(e) = &mut rustc + { + // We don't need the rustc sources here, + // so just discard the error. + _ = e.take(); } Ok(ProjectWorkspace { @@ -1163,17 +1163,15 @@ fn project_json_to_crate_graph( crate = display_name.as_ref().map(|name| name.canonical_name().as_str()), "added root to crate graph" ); - if *is_proc_macro { - if let Some(path) = proc_macro_dylib_path.clone() { - let node = Ok(( - display_name - .as_ref() - .map(|it| it.canonical_name().as_str().to_owned()) - .unwrap_or_else(|| format!("crate{}", idx.0)), - path, - )); - proc_macros.insert(crate_graph_crate_id, node); - } + if *is_proc_macro && let Some(path) = proc_macro_dylib_path.clone() { + let node = Ok(( + display_name + .as_ref() + .map(|it| it.canonical_name().as_str().to_owned()) + .unwrap_or_else(|| format!("crate{}", idx.0)), + path, + )); + proc_macros.insert(crate_graph_crate_id, node); } (idx, crate_graph_crate_id) }, @@ -1318,16 +1316,17 @@ fn cargo_to_crate_graph( public_deps.add_to_crate_graph(crate_graph, from); // Add dep edge of all targets to the package's lib target - if let Some((to, name)) = lib_tgt.clone() { - if to != from && kind != TargetKind::BuildScript { - // (build script can not depend on its library target) - - // For root projects with dashes in their name, - // cargo metadata does not do any normalization, - // so we do it ourselves currently - let name = CrateName::normalize_dashes(&name); - add_dep(crate_graph, from, name, to); - } + if let Some((to, name)) = lib_tgt.clone() + && to != from + && kind != TargetKind::BuildScript + { + // (build script can not depend on its library target) + + // For root projects with dashes in their name, + // cargo metadata does not do any normalization, + // so we do it ourselves currently + let name = CrateName::normalize_dashes(&name); + add_dep(crate_graph, from, name, to); } } } @@ -1638,9 +1637,19 @@ fn add_target_crate_root( let proc_macro = match build_data { Some((BuildScriptOutput { proc_macro_dylib_path, .. }, has_errors)) => { match proc_macro_dylib_path { - Some(path) => Ok((cargo_name.to_owned(), path.clone())), - None if has_errors => Err(ProcMacroLoadingError::FailedToBuild), - None => Err(ProcMacroLoadingError::MissingDylibPath), + ProcMacroDylibPath::Path(path) => Ok((cargo_name.to_owned(), path.clone())), + ProcMacroDylibPath::NotBuilt => Err(ProcMacroLoadingError::NotYetBuilt), + ProcMacroDylibPath::NotProcMacro | ProcMacroDylibPath::DylibNotFound + if has_errors => + { + Err(ProcMacroLoadingError::FailedToBuild) + } + ProcMacroDylibPath::NotProcMacro => { + Err(ProcMacroLoadingError::ExpectedProcMacroArtifact) + } + ProcMacroDylibPath::DylibNotFound => { + Err(ProcMacroLoadingError::MissingDylibPath) + } } } None => Err(ProcMacroLoadingError::NotYetBuilt), @@ -1905,7 +1914,8 @@ fn cargo_target_dir( meta.manifest_path(manifest); // `--no-deps` doesn't (over)write lockfiles as it doesn't do any package resolve. // So we can use it to get `target_directory` before copying lockfiles - let mut other_options = vec!["--no-deps".to_owned()]; + meta.no_deps(); + let mut other_options = vec![]; if manifest.is_rust_manifest() { meta.env("RUSTC_BOOTSTRAP", "1"); other_options.push("-Zscript".to_owned()); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs index fc89f486f84..4f75d14834c 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -656,22 +656,26 @@ impl flags::AnalysisStats { let mut sw = self.stop_watch(); let mut all = 0; let mut fail = 0; - for &body in bodies { - if matches!(body, DefWithBody::Variant(_)) { + for &body_id in bodies { + if matches!(body_id, DefWithBody::Variant(_)) { + continue; + } + let module = body_id.module(db); + if !self.should_process(db, body_id, module) { continue; } + all += 1; - let Err(e) = db.mir_body(body.into()) else { + let Err(e) = db.mir_body(body_id.into()) else { continue; }; if verbosity.is_spammy() { - let full_name = body - .module(db) + let full_name = module .path_to_root(db) .into_iter() .rev() .filter_map(|it| it.name(db)) - .chain(Some(body.name(db).unwrap_or_else(Name::missing))) + .chain(Some(body_id.name(db).unwrap_or_else(Name::missing))) .map(|it| it.display(db, Edition::LATEST).to_string()) .join("::"); bar.println(format!("Mir body for {full_name} failed due {e:?}")); @@ -727,26 +731,9 @@ impl flags::AnalysisStats { let name = body_id.name(db).unwrap_or_else(Name::missing); let module = body_id.module(db); let display_target = module.krate().to_display_target(db); - let full_name = move || { - module - .krate() - .display_name(db) - .map(|it| it.canonical_name().as_str().to_owned()) - .into_iter() - .chain( - module - .path_to_root(db) - .into_iter() - .filter_map(|it| it.name(db)) - .rev() - .chain(Some(body_id.name(db).unwrap_or_else(Name::missing))) - .map(|it| it.display(db, Edition::LATEST).to_string()), - ) - .join("::") - }; if let Some(only_name) = self.only.as_deref() { if name.display(db, Edition::LATEST).to_string() != only_name - && full_name() != only_name + && full_name(db, body_id, module) != only_name { continue; } @@ -763,12 +750,17 @@ impl flags::AnalysisStats { let original_file = src.file_id.original_file(db); let path = vfs.file_path(original_file.file_id(db)); let syntax_range = src.text_range(); - format!("processing: {} ({} {:?})", full_name(), path, syntax_range) + format!( + "processing: {} ({} {:?})", + full_name(db, body_id, module), + path, + syntax_range + ) } else { - format!("processing: {}", full_name()) + format!("processing: {}", full_name(db, body_id, module)) } } else { - format!("processing: {}", full_name()) + format!("processing: {}", full_name(db, body_id, module)) } }; if verbosity.is_spammy() { @@ -781,9 +773,11 @@ impl flags::AnalysisStats { Ok(inference_result) => inference_result, Err(p) => { if let Some(s) = p.downcast_ref::<&str>() { - eprintln!("infer panicked for {}: {}", full_name(), s); + eprintln!("infer panicked for {}: {}", full_name(db, body_id, module), s); } else if let Some(s) = p.downcast_ref::<String>() { - eprintln!("infer panicked for {}: {}", full_name(), s); + eprintln!("infer panicked for {}: {}", full_name(db, body_id, module), s); + } else { + eprintln!("infer panicked for {}", full_name(db, body_id, module)); } panics += 1; bar.inc(1); @@ -890,7 +884,7 @@ impl flags::AnalysisStats { if verbosity.is_spammy() { bar.println(format!( "In {}: {} exprs, {} unknown, {} partial", - full_name(), + full_name(db, body_id, module), num_exprs - previous_exprs, num_exprs_unknown - previous_unknown, num_exprs_partially_unknown - previous_partially_unknown @@ -993,7 +987,7 @@ impl flags::AnalysisStats { if verbosity.is_spammy() { bar.println(format!( "In {}: {} pats, {} unknown, {} partial", - full_name(), + full_name(db, body_id, module), num_pats - previous_pats, num_pats_unknown - previous_unknown, num_pats_partially_unknown - previous_partially_unknown @@ -1049,34 +1043,8 @@ impl flags::AnalysisStats { bar.tick(); for &body_id in bodies { let module = body_id.module(db); - let full_name = move || { - module - .krate() - .display_name(db) - .map(|it| it.canonical_name().as_str().to_owned()) - .into_iter() - .chain( - module - .path_to_root(db) - .into_iter() - .filter_map(|it| it.name(db)) - .rev() - .chain(Some(body_id.name(db).unwrap_or_else(Name::missing))) - .map(|it| it.display(db, Edition::LATEST).to_string()), - ) - .join("::") - }; - if let Some(only_name) = self.only.as_deref() { - if body_id - .name(db) - .unwrap_or_else(Name::missing) - .display(db, Edition::LATEST) - .to_string() - != only_name - && full_name() != only_name - { - continue; - } + if !self.should_process(db, body_id, module) { + continue; } let msg = move || { if verbosity.is_verbose() { @@ -1090,12 +1058,17 @@ impl flags::AnalysisStats { let original_file = src.file_id.original_file(db); let path = vfs.file_path(original_file.file_id(db)); let syntax_range = src.text_range(); - format!("processing: {} ({} {:?})", full_name(), path, syntax_range) + format!( + "processing: {} ({} {:?})", + full_name(db, body_id, module), + path, + syntax_range + ) } else { - format!("processing: {}", full_name()) + format!("processing: {}", full_name(db, body_id, module)) } } else { - format!("processing: {}", full_name()) + format!("processing: {}", full_name(db, body_id, module)) } }; if verbosity.is_spammy() { @@ -1205,11 +1178,42 @@ impl flags::AnalysisStats { eprintln!("{:<20} {} ({} files)", "IDE:", ide_time, file_ids.len()); } + fn should_process(&self, db: &RootDatabase, body_id: DefWithBody, module: hir::Module) -> bool { + if let Some(only_name) = self.only.as_deref() { + let name = body_id.name(db).unwrap_or_else(Name::missing); + + if name.display(db, Edition::LATEST).to_string() != only_name + && full_name(db, body_id, module) != only_name + { + return false; + } + } + true + } + fn stop_watch(&self) -> StopWatch { StopWatch::start() } } +fn full_name(db: &RootDatabase, body_id: DefWithBody, module: hir::Module) -> String { + module + .krate() + .display_name(db) + .map(|it| it.canonical_name().as_str().to_owned()) + .into_iter() + .chain( + module + .path_to_root(db) + .into_iter() + .filter_map(|it| it.name(db)) + .rev() + .chain(Some(body_id.name(db).unwrap_or_else(Name::missing))) + .map(|it| it.display(db, Edition::LATEST).to_string()), + ) + .join("::") +} + fn location_csv_expr(db: &RootDatabase, vfs: &Vfs, sm: &BodySourceMap, expr_id: ExprId) -> String { let src = match sm.expr_syntax(expr_id) { Ok(s) => s, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index 51d4c29aa74..9456fd8809b 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -2162,6 +2162,7 @@ impl Config { extra_test_bin_args: self.runnables_extraTestBinaryArgs(source_root).clone(), extra_env: self.extra_env(source_root).clone(), target_dir: self.target_dir_from_config(source_root), + set_test: true, } } @@ -2219,6 +2220,7 @@ impl Config { extra_test_bin_args: self.runnables_extraTestBinaryArgs(source_root).clone(), extra_env: self.check_extra_env(source_root), target_dir: self.target_dir_from_config(source_root), + set_test: *self.cfg_setTest(source_root), }, ansi_color_output: self.color_diagnostic_output(), }, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs index 91d37bd7c9e..512ce0b9de3 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs @@ -31,6 +31,7 @@ pub(crate) enum InvocationStrategy { pub(crate) struct CargoOptions { pub(crate) target_tuples: Vec<String>, pub(crate) all_targets: bool, + pub(crate) set_test: bool, pub(crate) no_default_features: bool, pub(crate) all_features: bool, pub(crate) features: Vec<String>, @@ -54,7 +55,13 @@ impl CargoOptions { cmd.args(["--target", target.as_str()]); } if self.all_targets { - cmd.arg("--all-targets"); + if self.set_test { + cmd.arg("--all-targets"); + } else { + // No --benches unfortunately, as this implies --tests (see https://github.com/rust-lang/cargo/issues/6454), + // and users setting `cfg.seTest = false` probably prefer disabling benches than enabling tests. + cmd.args(["--lib", "--bins", "--examples"]); + } } if self.all_features { cmd.arg("--all-features"); @@ -104,7 +111,18 @@ impl fmt::Display for FlycheckConfig { match self { FlycheckConfig::CargoCommand { command, .. } => write!(f, "cargo {command}"), FlycheckConfig::CustomCommand { command, args, .. } => { - write!(f, "{command} {}", args.join(" ")) + // Don't show `my_custom_check --foo $saved_file` literally to the user, as it + // looks like we've forgotten to substitute $saved_file. + // + // Instead, show `my_custom_check --foo ...`. The + // actual path is often too long to be worth showing + // in the IDE (e.g. in the VS Code status bar). + let display_args = args + .iter() + .map(|arg| if arg == SAVED_FILE_PLACEHOLDER { "..." } else { arg }) + .collect::<Vec<_>>(); + + write!(f, "{command} {}", display_args.join(" ")) } } } diff --git a/src/tools/rust-analyzer/crates/syntax/rust.ungram b/src/tools/rust-analyzer/crates/syntax/rust.ungram index 4cbc88cfb5e..6d8a360d715 100644 --- a/src/tools/rust-analyzer/crates/syntax/rust.ungram +++ b/src/tools/rust-analyzer/crates/syntax/rust.ungram @@ -101,7 +101,7 @@ WhereClause = 'where' predicates:(WherePred (',' WherePred)* ','?) WherePred = - ('for' GenericParamList)? (Lifetime | Type) ':' TypeBoundList? + ForBinder? (Lifetime | Type) ':' TypeBoundList? //*************************// @@ -534,10 +534,10 @@ FieldExpr = Attr* Expr '.' NameRef ClosureExpr = - Attr* ClosureBinder? 'const'? 'static'? 'async'? 'gen'? 'move'? ParamList RetType? + Attr* ForBinder? 'const'? 'static'? 'async'? 'gen'? 'move'? ParamList RetType? body:Expr -ClosureBinder = +ForBinder = 'for' GenericParamList IfExpr = @@ -658,7 +658,7 @@ FnPtrType = 'const'? 'async'? 'unsafe'? Abi? 'fn' ParamList RetType? ForType = - 'for' GenericParamList Type + ForBinder Type ImplTraitType = 'impl' TypeBoundList @@ -671,7 +671,7 @@ TypeBoundList = TypeBound = Lifetime -| ('~' 'const' | '[' 'const' ']' | 'const')? 'async'? '?'? Type +| ForBinder? ('~' 'const' | '[' 'const' ']' | 'const')? 'async'? '?'? Type | 'use' UseBoundGenericArgs UseBoundGenericArgs = diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast.rs b/src/tools/rust-analyzer/crates/syntax/src/ast.rs index d787fd076fc..a9aeeedb654 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast.rs @@ -393,8 +393,7 @@ where let pred = predicates.next().unwrap(); let mut bounds = pred.type_bound_list().unwrap().bounds(); - assert!(pred.for_token().is_none()); - assert!(pred.generic_param_list().is_none()); + assert!(pred.for_binder().is_none()); assert_eq!("T", pred.ty().unwrap().syntax().text().to_string()); assert_bound("Clone", bounds.next()); assert_bound("Copy", bounds.next()); @@ -432,8 +431,10 @@ where let pred = predicates.next().unwrap(); let mut bounds = pred.type_bound_list().unwrap().bounds(); - assert!(pred.for_token().is_some()); - assert_eq!("<'a>", pred.generic_param_list().unwrap().syntax().text().to_string()); + assert_eq!( + "<'a>", + pred.for_binder().unwrap().generic_param_list().unwrap().syntax().text().to_string() + ); assert_eq!("F", pred.ty().unwrap().syntax().text().to_string()); assert_bound("Fn(&'a str)", bounds.next()); } diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs index 37cb4a434f3..d97fdec524f 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit.rs @@ -6,9 +6,12 @@ use std::{fmt, iter, ops}; use crate::{ AstToken, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken, ast::{self, AstNode, make}, + syntax_editor::{SyntaxEditor, SyntaxMappingBuilder}, ted, }; +use super::syntax_factory::SyntaxFactory; + #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct IndentLevel(pub u8); @@ -95,6 +98,24 @@ impl IndentLevel { } } + pub(super) fn clone_increase_indent(self, node: &SyntaxNode) -> SyntaxNode { + let node = node.clone_subtree(); + let mut editor = SyntaxEditor::new(node.clone()); + let tokens = node + .preorder_with_tokens() + .filter_map(|event| match event { + rowan::WalkEvent::Leave(NodeOrToken::Token(it)) => Some(it), + _ => None, + }) + .filter_map(ast::Whitespace::cast) + .filter(|ws| ws.text().contains('\n')); + for ws in tokens { + let new_ws = make::tokens::whitespace(&format!("{}{self}", ws.syntax())); + editor.replace(ws.syntax(), &new_ws); + } + editor.finish().new_root().clone() + } + pub(super) fn decrease_indent(self, node: &SyntaxNode) { let tokens = node.preorder_with_tokens().filter_map(|event| match event { rowan::WalkEvent::Leave(NodeOrToken::Token(it)) => Some(it), @@ -111,36 +132,54 @@ impl IndentLevel { } } } + + pub(super) fn clone_decrease_indent(self, node: &SyntaxNode) -> SyntaxNode { + let node = node.clone_subtree(); + let mut editor = SyntaxEditor::new(node.clone()); + let tokens = node + .preorder_with_tokens() + .filter_map(|event| match event { + rowan::WalkEvent::Leave(NodeOrToken::Token(it)) => Some(it), + _ => None, + }) + .filter_map(ast::Whitespace::cast) + .filter(|ws| ws.text().contains('\n')); + for ws in tokens { + let new_ws = + make::tokens::whitespace(&ws.syntax().text().replace(&format!("\n{self}"), "\n")); + editor.replace(ws.syntax(), &new_ws); + } + editor.finish().new_root().clone() + } } fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> { iter::successors(Some(token), |token| token.prev_token()) } -/// Soft-deprecated in favor of mutable tree editing API `edit_in_place::Ident`. pub trait AstNodeEdit: AstNode + Clone + Sized { fn indent_level(&self) -> IndentLevel { IndentLevel::from_node(self.syntax()) } #[must_use] fn indent(&self, level: IndentLevel) -> Self { - fn indent_inner(node: &SyntaxNode, level: IndentLevel) -> SyntaxNode { - let res = node.clone_subtree().clone_for_update(); - level.increase_indent(&res); - res.clone_subtree() + Self::cast(level.clone_increase_indent(self.syntax())).unwrap() + } + #[must_use] + fn indent_with_mapping(&self, level: IndentLevel, make: &SyntaxFactory) -> Self { + let new_node = self.indent(level); + if let Some(mut mapping) = make.mappings() { + let mut builder = SyntaxMappingBuilder::new(new_node.syntax().clone()); + for (old, new) in self.syntax().children().zip(new_node.syntax().children()) { + builder.map_node(old, new); + } + builder.finish(&mut mapping); } - - Self::cast(indent_inner(self.syntax(), level)).unwrap() + new_node } #[must_use] fn dedent(&self, level: IndentLevel) -> Self { - fn dedent_inner(node: &SyntaxNode, level: IndentLevel) -> SyntaxNode { - let res = node.clone_subtree().clone_for_update(); - level.decrease_indent(&res); - res.clone_subtree() - } - - Self::cast(dedent_inner(self.syntax(), level)).unwrap() + Self::cast(level.clone_decrease_indent(self.syntax())).unwrap() } #[must_use] fn reset_indent(&self) -> Self { diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs index e902516471d..28b543ea706 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs @@ -644,7 +644,7 @@ impl Removable for ast::Use { impl ast::Impl { pub fn get_or_create_assoc_item_list(&self) -> ast::AssocItemList { if self.assoc_item_list().is_none() { - let assoc_item_list = make::assoc_item_list().clone_for_update(); + let assoc_item_list = make::assoc_item_list(None).clone_for_update(); ted::append_child(self.syntax(), assoc_item_list.syntax()); } self.assoc_item_list().unwrap() diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs index 2b862465420..ceb2866ebcd 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs @@ -377,22 +377,13 @@ impl CastExpr { #[inline] pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) } } -pub struct ClosureBinder { - pub(crate) syntax: SyntaxNode, -} -impl ClosureBinder { - #[inline] - pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) } - #[inline] - pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) } -} pub struct ClosureExpr { pub(crate) syntax: SyntaxNode, } impl ast::HasAttrs for ClosureExpr {} impl ClosureExpr { #[inline] - pub fn closure_binder(&self) -> Option<ClosureBinder> { support::child(&self.syntax) } + pub fn for_binder(&self) -> Option<ForBinder> { support::child(&self.syntax) } #[inline] pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) } #[inline] @@ -615,6 +606,15 @@ impl FnPtrType { #[inline] pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) } } +pub struct ForBinder { + pub(crate) syntax: SyntaxNode, +} +impl ForBinder { + #[inline] + pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) } + #[inline] + pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) } +} pub struct ForExpr { pub(crate) syntax: SyntaxNode, } @@ -632,11 +632,9 @@ pub struct ForType { } impl ForType { #[inline] - pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) } + pub fn for_binder(&self) -> Option<ForBinder> { support::child(&self.syntax) } #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } - #[inline] - pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) } } pub struct FormatArgsArg { pub(crate) syntax: SyntaxNode, @@ -1766,6 +1764,8 @@ pub struct TypeBound { } impl TypeBound { #[inline] + pub fn for_binder(&self) -> Option<ForBinder> { support::child(&self.syntax) } + #[inline] pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) } #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } @@ -1938,13 +1938,11 @@ pub struct WherePred { impl ast::HasTypeBounds for WherePred {} impl WherePred { #[inline] - pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) } + pub fn for_binder(&self) -> Option<ForBinder> { support::child(&self.syntax) } #[inline] pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) } #[inline] pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) } - #[inline] - pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) } } pub struct WhileExpr { pub(crate) syntax: SyntaxNode, @@ -3239,42 +3237,6 @@ impl fmt::Debug for CastExpr { f.debug_struct("CastExpr").field("syntax", &self.syntax).finish() } } -impl AstNode for ClosureBinder { - #[inline] - fn kind() -> SyntaxKind - where - Self: Sized, - { - CLOSURE_BINDER - } - #[inline] - fn can_cast(kind: SyntaxKind) -> bool { kind == CLOSURE_BINDER } - #[inline] - fn cast(syntax: SyntaxNode) -> Option<Self> { - if Self::can_cast(syntax.kind()) { - Some(Self { syntax }) - } else { - None - } - } - #[inline] - fn syntax(&self) -> &SyntaxNode { &self.syntax } -} -impl hash::Hash for ClosureBinder { - fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); } -} -impl Eq for ClosureBinder {} -impl PartialEq for ClosureBinder { - fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax } -} -impl Clone for ClosureBinder { - fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } } -} -impl fmt::Debug for ClosureBinder { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("ClosureBinder").field("syntax", &self.syntax).finish() - } -} impl AstNode for ClosureExpr { #[inline] fn kind() -> SyntaxKind @@ -3815,6 +3777,42 @@ impl fmt::Debug for FnPtrType { f.debug_struct("FnPtrType").field("syntax", &self.syntax).finish() } } +impl AstNode for ForBinder { + #[inline] + fn kind() -> SyntaxKind + where + Self: Sized, + { + FOR_BINDER + } + #[inline] + fn can_cast(kind: SyntaxKind) -> bool { kind == FOR_BINDER } + #[inline] + fn cast(syntax: SyntaxNode) -> Option<Self> { + if Self::can_cast(syntax.kind()) { + Some(Self { syntax }) + } else { + None + } + } + #[inline] + fn syntax(&self) -> &SyntaxNode { &self.syntax } +} +impl hash::Hash for ForBinder { + fn hash<H: hash::Hasher>(&self, state: &mut H) { self.syntax.hash(state); } +} +impl Eq for ForBinder {} +impl PartialEq for ForBinder { + fn eq(&self, other: &Self) -> bool { self.syntax == other.syntax } +} +impl Clone for ForBinder { + fn clone(&self) -> Self { Self { syntax: self.syntax.clone() } } +} +impl fmt::Debug for ForBinder { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("ForBinder").field("syntax", &self.syntax).finish() + } +} impl AstNode for ForExpr { #[inline] fn kind() -> SyntaxKind @@ -10146,11 +10144,6 @@ impl std::fmt::Display for CastExpr { std::fmt::Display::fmt(self.syntax(), f) } } -impl std::fmt::Display for ClosureBinder { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - std::fmt::Display::fmt(self.syntax(), f) - } -} impl std::fmt::Display for ClosureExpr { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) @@ -10226,6 +10219,11 @@ impl std::fmt::Display for FnPtrType { std::fmt::Display::fmt(self.syntax(), f) } } +impl std::fmt::Display for ForBinder { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Display::fmt(self.syntax(), f) + } +} impl std::fmt::Display for ForExpr { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs index d67f24fda96..2a7b51c3c24 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs @@ -229,8 +229,18 @@ pub fn ty_fn_ptr<I: Iterator<Item = Param>>( } } -pub fn assoc_item_list() -> ast::AssocItemList { - ast_from_text("impl C for D {}") +pub fn assoc_item_list( + body: Option<Vec<either::Either<ast::Attr, ast::AssocItem>>>, +) -> ast::AssocItemList { + let is_break_braces = body.is_some(); + let body_newline = if is_break_braces { "\n".to_owned() } else { String::new() }; + let body_indent = if is_break_braces { " ".to_owned() } else { String::new() }; + + let body = match body { + Some(bd) => bd.iter().map(|elem| elem.to_string()).join("\n\n "), + None => String::new(), + }; + ast_from_text(&format!("impl C for D {{{body_newline}{body_indent}{body}{body_newline}}}")) } fn merge_gen_params( @@ -273,7 +283,7 @@ pub fn impl_( generic_args: Option<ast::GenericArgList>, path_type: ast::Type, where_clause: Option<ast::WhereClause>, - body: Option<Vec<either::Either<ast::Attr, ast::AssocItem>>>, + body: Option<ast::AssocItemList>, ) -> ast::Impl { let gen_args = generic_args.map_or_else(String::new, |it| it.to_string()); @@ -281,20 +291,13 @@ pub fn impl_( let body_newline = if where_clause.is_some() && body.is_none() { "\n".to_owned() } else { String::new() }; - let where_clause = match where_clause { Some(pr) => format!("\n{pr}\n"), None => " ".to_owned(), }; - let body = match body { - Some(bd) => bd.iter().map(|elem| elem.to_string()).join(""), - None => String::new(), - }; - - ast_from_text(&format!( - "impl{gen_params} {path_type}{gen_args}{where_clause}{{{body_newline}{body}}}" - )) + let body = body.map_or_else(|| format!("{{{body_newline}}}"), |it| it.to_string()); + ast_from_text(&format!("impl{gen_params} {path_type}{gen_args}{where_clause}{body}")) } pub fn impl_trait( @@ -308,7 +311,7 @@ pub fn impl_trait( ty: ast::Type, trait_where_clause: Option<ast::WhereClause>, ty_where_clause: Option<ast::WhereClause>, - body: Option<Vec<either::Either<ast::Attr, ast::AssocItem>>>, + body: Option<ast::AssocItemList>, ) -> ast::Impl { let is_unsafe = if is_unsafe { "unsafe " } else { "" }; @@ -330,13 +333,10 @@ pub fn impl_trait( let where_clause = merge_where_clause(ty_where_clause, trait_where_clause) .map_or_else(|| " ".to_owned(), |wc| format!("\n{wc}\n")); - let body = match body { - Some(bd) => bd.iter().map(|elem| elem.to_string()).join(""), - None => String::new(), - }; + let body = body.map_or_else(|| format!("{{{body_newline}}}"), |it| it.to_string()); ast_from_text(&format!( - "{is_unsafe}impl{gen_params} {is_negative}{path_type}{trait_gen_args} for {ty}{type_gen_args}{where_clause}{{{body_newline}{body}}}" + "{is_unsafe}impl{gen_params} {is_negative}{path_type}{trait_gen_args} for {ty}{type_gen_args}{where_clause}{body}" )) } diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs index f5530c5fffd..62a7d4df2cf 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/node_ext.rs @@ -805,9 +805,7 @@ impl ast::SelfParam { #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub enum TypeBoundKind { /// Trait - PathType(ast::PathType), - /// for<'a> ... - ForType(ast::ForType), + PathType(Option<ast::ForBinder>, ast::PathType), /// use Use(ast::UseBoundGenericArgs), /// 'a @@ -817,9 +815,7 @@ pub enum TypeBoundKind { impl ast::TypeBound { pub fn kind(&self) -> TypeBoundKind { if let Some(path_type) = support::children(self.syntax()).next() { - TypeBoundKind::PathType(path_type) - } else if let Some(for_type) = support::children(self.syntax()).next() { - TypeBoundKind::ForType(for_type) + TypeBoundKind::PathType(self.for_binder(), path_type) } else if let Some(args) = self.use_bound_generic_args() { TypeBoundKind::Use(args) } else if let Some(lifetime) = self.lifetime() { diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory.rs index 7142e4f6e1b..f3ae7544cc3 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory.rs @@ -38,7 +38,7 @@ impl SyntaxFactory { self.mappings.as_ref().map(|mappings| mappings.take()).unwrap_or_default() } - fn mappings(&self) -> Option<RefMut<'_, SyntaxMapping>> { + pub(crate) fn mappings(&self) -> Option<RefMut<'_, SyntaxMapping>> { self.mappings.as_ref().map(|it| it.borrow_mut()) } } diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs index 3fa584850f7..5107754b182 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs @@ -5,7 +5,7 @@ //! [`SyntaxEditor`]: https://github.com/dotnet/roslyn/blob/43b0b05cc4f492fd5de00f6f6717409091df8daa/src/Workspaces/Core/Portable/Editing/SyntaxEditor.cs use std::{ - fmt, + fmt, iter, num::NonZeroU32, ops::RangeInclusive, sync::atomic::{AtomicU32, Ordering}, @@ -41,6 +41,15 @@ impl SyntaxEditor { self.annotations.push((element.syntax_element(), annotation)) } + pub fn add_annotation_all( + &mut self, + elements: Vec<impl Element>, + annotation: SyntaxAnnotation, + ) { + self.annotations + .extend(elements.into_iter().map(|e| e.syntax_element()).zip(iter::repeat(annotation))); + } + pub fn merge(&mut self, mut other: SyntaxEditor) { debug_assert!( self.root == other.root || other.root.ancestors().any(|node| node == self.root), diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edits.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edits.rs index d66ea8aa28c..840e7697979 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edits.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/edits.rs @@ -92,6 +92,42 @@ fn get_or_insert_comma_after(editor: &mut SyntaxEditor, syntax: &SyntaxNode) -> } } +impl ast::AssocItemList { + /// Adds a new associated item after all of the existing associated items. + /// + /// Attention! This function does align the first line of `item` with respect to `self`, + /// but it does _not_ change indentation of other lines (if any). + pub fn add_items(&self, editor: &mut SyntaxEditor, items: Vec<ast::AssocItem>) { + let (indent, position, whitespace) = match self.assoc_items().last() { + Some(last_item) => ( + IndentLevel::from_node(last_item.syntax()), + Position::after(last_item.syntax()), + "\n\n", + ), + None => match self.l_curly_token() { + Some(l_curly) => { + normalize_ws_between_braces(editor, self.syntax()); + (IndentLevel::from_token(&l_curly) + 1, Position::after(&l_curly), "\n") + } + None => (IndentLevel::single(), Position::last_child_of(self.syntax()), "\n"), + }, + }; + + let elements: Vec<SyntaxElement> = items + .into_iter() + .enumerate() + .flat_map(|(i, item)| { + let whitespace = if i != 0 { "\n\n" } else { whitespace }; + vec![ + make::tokens::whitespace(&format!("{whitespace}{indent}")).into(), + item.syntax().clone().into(), + ] + }) + .collect(); + editor.insert_all(position, elements); + } +} + impl ast::VariantList { pub fn add_variant(&self, editor: &mut SyntaxEditor, variant: &ast::Variant) { let make = SyntaxFactory::without_mappings(); diff --git a/src/tools/rust-analyzer/docs/book/src/contributing/README.md b/src/tools/rust-analyzer/docs/book/src/contributing/README.md index beb94cdfc41..57c7a9c5996 100644 --- a/src/tools/rust-analyzer/docs/book/src/contributing/README.md +++ b/src/tools/rust-analyzer/docs/book/src/contributing/README.md @@ -252,18 +252,8 @@ Release steps: 4. Commit & push the changelog. 5. Run `cargo xtask publish-release-notes <CHANGELOG>` -- this will convert the changelog entry in AsciiDoc to Markdown and update the body of GitHub Releases entry. 6. Tweet. -7. Make a new branch and run `cargo xtask rustc-pull`, open a PR, and merge it. - This will pull any changes from `rust-lang/rust` into `rust-analyzer`. -8. Switch to `master`, pull, then run `cargo xtask rustc-push --rust-path ../rust-rust-analyzer --rust-fork matklad/rust`. - Replace `matklad/rust` with your own fork of `rust-lang/rust`. - You can use the token to authenticate when you get prompted for a password, since `josh` will push over HTTPS, not SSH. - This will push the `rust-analyzer` changes to your fork. - You can then open a PR against `rust-lang/rust`. - -Note: besides the `rust-rust-analyzer` clone, the Josh cache (stored under `~/.cache/rust-analyzer-josh`) will contain a bare clone of `rust-lang/rust`. -This currently takes about 3.5 GB. - -This [HackMD](https://hackmd.io/7pOuxnkdQDaL1Y1FQr65xg) has details about how `josh` syncs work. +7. Perform a subtree [pull](#performing-a-pull). +8. Perform a subtree [push](#performing-a-push). If the GitHub Actions release fails because of a transient problem like a timeout, you can re-run the job from the Actions console. If it fails because of something that needs to be fixed, remove the release tag (if needed), fix the problem, then start over. @@ -288,3 +278,43 @@ There are two sets of people with extra permissions: If you don't feel like reviewing for whatever reason, someone else will pick the review up (but please speak up if you don't feel like it)! * The [rust-lang](https://github.com/rust-lang) team [t-rust-analyzer-contributors]([https://github.com/orgs/rust-analyzer/teams/triage](https://github.com/rust-lang/team/blob/master/teams/rust-analyzer-contributors.toml)). This team has general triaging permissions allowing to label, close and re-open issues. + +## Synchronizing subtree changes +`rust-analyzer` is a [josh](https://josh-project.github.io/josh/intro.html) subtree of the [rust-lang/rust](https://github.com/rust-lang/rust) +repository. We use the [rustc-josh-sync](https://github.com/rust-lang/josh-sync) tool to perform synchronization between these two +repositories. You can find documentation of the tool [here](https://github.com/rust-lang/josh-sync). + +You can install the synchronization tool using the following commands: +``` +cargo install --locked --git https://github.com/rust-lang/josh-sync +``` + +Both pulls (synchronizing changes from rust-lang/rust into rust-analyzer) and pushes (synchronizing +changes from rust-analyzer into rust-lang/rust) are performed from this repository. +changes from rust-analyzer to rust-lang/rust) are performed from this repository. + +Usually we first perform a pull, wait for it to be merged, and then perform a push. + +### Performing a pull +1) Checkout a new branch that will be used to create a PR against rust-analyzer +2) Run the pull command + ``` + rustc-josh-sync pull + ``` +3) Push the branch to your fork of `rust-analyzer` and create a PR + - If you have the `gh` CLI installed, `rustc-josh-sync` can create the PR for you. + +### Performing a push + +Wait for the previous pull to be merged. + +1) Switch to `master` and pull +2) Run the push command to create a branch named `<branch-name>` in a `rustc` fork under the `<gh-username>` account + ``` + rustc-josh-sync push <branch-name> <gh-username> + ``` + - The push will ask you to download a checkout of the `rust-lang/rust` repository. + - If you get prompted for a password, see [this](https://github.com/rust-lang/josh-sync?tab=readme-ov-file#git-peculiarities). +3) Create a PR from `<branch-name>` into `rust-lang/rust` + +> Besides the `rust` checkout, the Josh cache (stored under `~/.cache/rustc-josh`) will contain a bare clone of `rust-lang/rust`. This currently takes several GBs. diff --git a/src/tools/rust-analyzer/editors/code/package-lock.json b/src/tools/rust-analyzer/editors/code/package-lock.json index 57d67a69b2e..534c24be52e 100644 --- a/src/tools/rust-analyzer/editors/code/package-lock.json +++ b/src/tools/rust-analyzer/editors/code/package-lock.json @@ -3336,15 +3336,16 @@ } }, "node_modules/form-data": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.2.tgz", - "integrity": "sha512-hGfm/slu0ZabnNt4oaRZ6uREyfCj6P4fT/n6A1rGV+Z0VdGXjfOhVUpkn6qVQONHGIFwmveGXyDs75+nr6FM8w==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", + "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", "dev": true, "license": "MIT", "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", "mime-types": "^2.1.12" }, "engines": { diff --git a/src/tools/rust-analyzer/editors/code/src/config.ts b/src/tools/rust-analyzer/editors/code/src/config.ts index d2dc740c09b..3b1b0768d3c 100644 --- a/src/tools/rust-analyzer/editors/code/src/config.ts +++ b/src/tools/rust-analyzer/editors/code/src/config.ts @@ -8,10 +8,9 @@ import type { Disposable } from "vscode"; export type RunnableEnvCfgItem = { mask?: string; - env: Record<string, string>; + env: { [key: string]: { toString(): string } | null }; platform?: string | string[]; }; -export type RunnableEnvCfg = Record<string, string> | RunnableEnvCfgItem[]; type ShowStatusBar = "always" | "never" | { documentSelector: vscode.DocumentSelector }; @@ -261,18 +260,13 @@ export class Config { return this.get<boolean | undefined>("testExplorer"); } - runnablesExtraEnv(label: string): Record<string, string> | undefined { - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const item = this.get<any>("runnables.extraEnv") ?? this.get<any>("runnableEnv"); - if (!item) return undefined; - // eslint-disable-next-line @typescript-eslint/no-explicit-any - const fixRecord = (r: Record<string, any>) => { - for (const key in r) { - if (typeof r[key] !== "string") { - r[key] = String(r[key]); - } - } - }; + runnablesExtraEnv(label: string): Env { + const serverEnv = this.serverExtraEnv; + let extraEnv = + this.get< + RunnableEnvCfgItem[] | { [key: string]: { toString(): string } | null } | null + >("runnables.extraEnv") ?? {}; + if (!extraEnv) return serverEnv; const platform = process.platform; const checkPlatform = (it: RunnableEnvCfgItem) => { @@ -283,19 +277,25 @@ export class Config { return true; }; - if (item instanceof Array) { + if (extraEnv instanceof Array) { const env = {}; - for (const it of item) { + for (const it of extraEnv) { const masked = !it.mask || new RegExp(it.mask).test(label); if (masked && checkPlatform(it)) { Object.assign(env, it.env); } } - fixRecord(env); - return env; + extraEnv = env; } - fixRecord(item); - return item; + const runnableExtraEnv = substituteVariablesInEnv( + Object.fromEntries( + Object.entries(extraEnv).map(([k, v]) => [ + k, + typeof v === "string" ? v : v?.toString(), + ]), + ), + ); + return { ...runnableExtraEnv, ...serverEnv }; } get restartServerOnConfigChange() { diff --git a/src/tools/rust-analyzer/editors/code/src/debug.ts b/src/tools/rust-analyzer/editors/code/src/debug.ts index adb75c23c70..24f8d908730 100644 --- a/src/tools/rust-analyzer/editors/code/src/debug.ts +++ b/src/tools/rust-analyzer/editors/code/src/debug.ts @@ -6,7 +6,14 @@ import type * as ra from "./lsp_ext"; import { Cargo } from "./toolchain"; import type { Ctx } from "./ctx"; import { createTaskFromRunnable, prepareEnv } from "./run"; -import { execute, isCargoRunnableArgs, unwrapUndefinable, log, normalizeDriveLetter } from "./util"; +import { + execute, + isCargoRunnableArgs, + unwrapUndefinable, + log, + normalizeDriveLetter, + Env, +} from "./util"; import type { Config } from "./config"; // Here we want to keep track on everything that's currently running @@ -206,10 +213,7 @@ type SourceFileMap = { destination: string; }; -async function discoverSourceFileMap( - env: Record<string, string>, - cwd: string, -): Promise<SourceFileMap | undefined> { +async function discoverSourceFileMap(env: Env, cwd: string): Promise<SourceFileMap | undefined> { const sysroot = env["RUSTC_TOOLCHAIN"]; if (sysroot) { // let's try to use the default toolchain @@ -232,7 +236,7 @@ type PropertyFetcher<Config, Input, Key extends keyof Config> = ( type DebugConfigProvider<Type extends string, DebugConfig extends BaseDebugConfig<Type>> = { executableProperty: keyof DebugConfig; - environmentProperty: PropertyFetcher<DebugConfig, Record<string, string>, keyof DebugConfig>; + environmentProperty: PropertyFetcher<DebugConfig, Env, keyof DebugConfig>; runnableArgsProperty: PropertyFetcher<DebugConfig, ra.CargoRunnableArgs, keyof DebugConfig>; sourceFileMapProperty?: keyof DebugConfig; type: Type; @@ -276,7 +280,7 @@ const knownEngines: { "environment", Object.entries(env).map((entry) => ({ name: entry[0], - value: entry[1], + value: entry[1] ?? "", })), ], runnableArgsProperty: (runnableArgs: ra.CargoRunnableArgs) => [ @@ -304,10 +308,7 @@ const knownEngines: { }, }; -async function getDebugExecutable( - runnableArgs: ra.CargoRunnableArgs, - env: Record<string, string>, -): Promise<string> { +async function getDebugExecutable(runnableArgs: ra.CargoRunnableArgs, env: Env): Promise<string> { const cargo = new Cargo(runnableArgs.workspaceRoot || ".", env); const executable = await cargo.executableFromArgs(runnableArgs); @@ -328,7 +329,7 @@ function getDebugConfig( runnable: ra.Runnable, runnableArgs: ra.CargoRunnableArgs, executable: string, - env: Record<string, string>, + env: Env, sourceFileMap?: Record<string, string>, ): vscode.DebugConfiguration { const { @@ -380,14 +381,14 @@ type CodeLldbDebugConfig = { args: string[]; sourceMap: Record<string, string> | undefined; sourceLanguages: ["rust"]; - env: Record<string, string>; + env: Env; } & BaseDebugConfig<"lldb">; type NativeDebugConfig = { target: string; // See https://github.com/WebFreak001/code-debug/issues/359 arguments: string; - env: Record<string, string>; + env: Env; valuesFormatting: "prettyPrinters"; } & BaseDebugConfig<"gdb">; diff --git a/src/tools/rust-analyzer/editors/code/src/run.ts b/src/tools/rust-analyzer/editors/code/src/run.ts index 95166c427b2..87c1d529f7e 100644 --- a/src/tools/rust-analyzer/editors/code/src/run.ts +++ b/src/tools/rust-analyzer/editors/code/src/run.ts @@ -7,7 +7,7 @@ import type { CtxInit } from "./ctx"; import { makeDebugConfig } from "./debug"; import type { Config } from "./config"; import type { LanguageClient } from "vscode-languageclient/node"; -import { log, unwrapUndefinable, type RustEditor } from "./util"; +import { Env, log, unwrapUndefinable, type RustEditor } from "./util"; const quickPickButtons = [ { iconPath: new vscode.ThemeIcon("save"), tooltip: "Save as a launch.json configuration." }, @@ -122,11 +122,8 @@ export class RunnableQuickPick implements vscode.QuickPickItem { } } -export function prepareBaseEnv( - inheritEnv: boolean, - base?: Record<string, string>, -): Record<string, string> { - const env: Record<string, string> = { RUST_BACKTRACE: "short" }; +export function prepareBaseEnv(inheritEnv: boolean, base?: Env): Env { + const env: Env = { RUST_BACKTRACE: "short" }; if (inheritEnv) { Object.assign(env, process.env); } @@ -136,11 +133,7 @@ export function prepareBaseEnv( return env; } -export function prepareEnv( - inheritEnv: boolean, - runnableEnv?: Record<string, string>, - runnableEnvCfg?: Record<string, string>, -): Record<string, string> { +export function prepareEnv(inheritEnv: boolean, runnableEnv?: Env, runnableEnvCfg?: Env): Env { const env = prepareBaseEnv(inheritEnv, runnableEnv); if (runnableEnvCfg) { diff --git a/src/tools/rust-analyzer/editors/code/src/tasks.ts b/src/tools/rust-analyzer/editors/code/src/tasks.ts index 730ec6d1e90..eb0748a704b 100644 --- a/src/tools/rust-analyzer/editors/code/src/tasks.ts +++ b/src/tools/rust-analyzer/editors/code/src/tasks.ts @@ -1,6 +1,7 @@ import * as vscode from "vscode"; import type { Config } from "./config"; import * as toolchain from "./toolchain"; +import { Env } from "./util"; // This ends up as the `type` key in tasks.json. RLS also uses `cargo` and // our configuration should be compatible with it so use the same key. @@ -117,8 +118,8 @@ export async function buildRustTask( export async function targetToExecution( definition: TaskDefinition, options?: { - env?: { [key: string]: string }; cwd?: string; + env?: Env; }, cargo?: string, ): Promise<vscode.ProcessExecution | vscode.ShellExecution> { @@ -131,7 +132,12 @@ export async function targetToExecution( command = definition.command; args = definition.args || []; } - return new vscode.ProcessExecution(command, args, options); + return new vscode.ProcessExecution(command, args, { + cwd: options?.cwd, + env: Object.fromEntries( + Object.entries(options?.env ?? {}).map(([key, value]) => [key, value ?? ""]), + ), + }); } export function activateTaskProvider(config: Config): vscode.Disposable { diff --git a/src/tools/rust-analyzer/editors/code/src/toolchain.ts b/src/tools/rust-analyzer/editors/code/src/toolchain.ts index a859ce6ff00..06f75a8f8d6 100644 --- a/src/tools/rust-analyzer/editors/code/src/toolchain.ts +++ b/src/tools/rust-analyzer/editors/code/src/toolchain.ts @@ -3,7 +3,7 @@ import * as os from "os"; import * as path from "path"; import * as readline from "readline"; import * as vscode from "vscode"; -import { log, memoizeAsync, unwrapUndefinable } from "./util"; +import { Env, log, memoizeAsync, unwrapUndefinable } from "./util"; import type { CargoRunnableArgs } from "./lsp_ext"; interface CompilationArtifact { @@ -37,7 +37,7 @@ interface CompilerMessage { export class Cargo { constructor( readonly rootFolder: string, - readonly env: Record<string, string>, + readonly env: Env, ) {} // Made public for testing purposes @@ -156,7 +156,7 @@ export class Cargo { /** Mirrors `toolchain::cargo()` implementation */ // FIXME: The server should provide this -export function cargoPath(env?: Record<string, string>): Promise<string> { +export function cargoPath(env?: Env): Promise<string> { if (env?.["RUSTC_TOOLCHAIN"]) { return Promise.resolve("cargo"); } diff --git a/src/tools/rust-analyzer/josh-sync.toml b/src/tools/rust-analyzer/josh-sync.toml new file mode 100644 index 00000000000..51ff0d71e71 --- /dev/null +++ b/src/tools/rust-analyzer/josh-sync.toml @@ -0,0 +1,2 @@ +repo = "rust-analyzer" +filter = ":rev(55d9a533b309119c8acd13061581b43ae8840823:prefix=src/tools/rust-analyzer):/src/tools/rust-analyzer" diff --git a/src/tools/rust-analyzer/rust-version b/src/tools/rust-analyzer/rust-version index c2b1c151b83..2178caf6396 100644 --- a/src/tools/rust-analyzer/rust-version +++ b/src/tools/rust-analyzer/rust-version @@ -1 +1 @@ -e05ab47e6c418fb2b9faa2eae9a7e70c65c98eaa +733dab558992d902d6d17576de1da768094e2cf3 diff --git a/src/tools/rust-analyzer/triagebot.toml b/src/tools/rust-analyzer/triagebot.toml index 2201b5a5e7c..27fdb672455 100644 --- a/src/tools/rust-analyzer/triagebot.toml +++ b/src/tools/rust-analyzer/triagebot.toml @@ -17,6 +17,7 @@ exclude_titles = [ # exclude syncs from subtree in rust-lang/rust "sync from downstream", "Sync from rust", "sync from rust", + "Rustc pull update", ] labels = ["has-merge-commits", "S-waiting-on-author"] @@ -27,3 +28,6 @@ labels = ["has-merge-commits", "S-waiting-on-author"] # Prevents mentions in commits to avoid users being spammed [no-mentions] + +# Automatically close and reopen PRs made by bots to run CI on them +[bot-pull-requests] diff --git a/src/tools/rust-analyzer/xtask/Cargo.toml b/src/tools/rust-analyzer/xtask/Cargo.toml index 8cd5811c0a6..9d8a1956d0a 100644 --- a/src/tools/rust-analyzer/xtask/Cargo.toml +++ b/src/tools/rust-analyzer/xtask/Cargo.toml @@ -8,7 +8,6 @@ rust-version.workspace = true [dependencies] anyhow.workspace = true -directories = "6.0" flate2 = "1.1.2" write-json = "0.1.4" xshell.workspace = true diff --git a/src/tools/rust-analyzer/xtask/src/flags.rs b/src/tools/rust-analyzer/xtask/src/flags.rs index 2fd471b35c7..72f6215d4c3 100644 --- a/src/tools/rust-analyzer/xtask/src/flags.rs +++ b/src/tools/rust-analyzer/xtask/src/flags.rs @@ -59,20 +59,6 @@ xflags::xflags! { optional --dry-run } - cmd rustc-pull { - /// rustc commit to pull. - optional --commit refspec: String - } - - cmd rustc-push { - /// rust local path, e.g. `../rust-rust-analyzer`. - required --rust-path rust_path: String - /// rust fork name, e.g. `matklad/rust`. - required --rust-fork rust_fork: String - /// branch name. - optional --branch branch: String - } - cmd dist { /// Use mimalloc allocator for server optional --mimalloc @@ -121,8 +107,6 @@ pub enum XtaskCmd { Install(Install), FuzzTests(FuzzTests), Release(Release), - RustcPull(RustcPull), - RustcPush(RustcPush), Dist(Dist), PublishReleaseNotes(PublishReleaseNotes), Metrics(Metrics), @@ -152,18 +136,6 @@ pub struct Release { } #[derive(Debug)] -pub struct RustcPull { - pub commit: Option<String>, -} - -#[derive(Debug)] -pub struct RustcPush { - pub rust_path: String, - pub rust_fork: String, - pub branch: Option<String>, -} - -#[derive(Debug)] pub struct Dist { pub mimalloc: bool, pub jemalloc: bool, diff --git a/src/tools/rust-analyzer/xtask/src/main.rs b/src/tools/rust-analyzer/xtask/src/main.rs index aaa8d0e1d4d..c5ad49cdcea 100644 --- a/src/tools/rust-analyzer/xtask/src/main.rs +++ b/src/tools/rust-analyzer/xtask/src/main.rs @@ -42,8 +42,6 @@ fn main() -> anyhow::Result<()> { flags::XtaskCmd::Install(cmd) => cmd.run(sh), flags::XtaskCmd::FuzzTests(_) => run_fuzzer(sh), flags::XtaskCmd::Release(cmd) => cmd.run(sh), - flags::XtaskCmd::RustcPull(cmd) => cmd.run(sh), - flags::XtaskCmd::RustcPush(cmd) => cmd.run(sh), flags::XtaskCmd::Dist(cmd) => cmd.run(sh), flags::XtaskCmd::PublishReleaseNotes(cmd) => cmd.run(sh), flags::XtaskCmd::Metrics(cmd) => cmd.run(sh), diff --git a/src/tools/rust-analyzer/xtask/src/release.rs b/src/tools/rust-analyzer/xtask/src/release.rs index e41f4ceb435..d06a25c8929 100644 --- a/src/tools/rust-analyzer/xtask/src/release.rs +++ b/src/tools/rust-analyzer/xtask/src/release.rs @@ -1,12 +1,5 @@ mod changelog; -use std::process::{Command, Stdio}; -use std::thread; -use std::time::Duration; - -use anyhow::{Context as _, bail}; -use directories::ProjectDirs; -use stdx::JodChild; use xshell::{Shell, cmd}; use crate::{date_iso, flags, is_release_tag, project_root}; @@ -59,171 +52,3 @@ impl flags::Release { Ok(()) } } - -// git sync implementation adapted from https://github.com/rust-lang/miri/blob/62039ac/miri-script/src/commands.rs -impl flags::RustcPull { - pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> { - sh.change_dir(project_root()); - let commit = self.commit.map(Result::Ok).unwrap_or_else(|| { - let rust_repo_head = - cmd!(sh, "git ls-remote https://github.com/rust-lang/rust/ HEAD").read()?; - rust_repo_head - .split_whitespace() - .next() - .map(|front| front.trim().to_owned()) - .ok_or_else(|| anyhow::format_err!("Could not obtain Rust repo HEAD from remote.")) - })?; - // Make sure the repo is clean. - if !cmd!(sh, "git status --untracked-files=no --porcelain").read()?.is_empty() { - bail!("working directory must be clean before running `cargo xtask pull`"); - } - // This should not add any new root commits. So count those before and after merging. - let num_roots = || -> anyhow::Result<u32> { - Ok(cmd!(sh, "git rev-list HEAD --max-parents=0 --count") - .read() - .context("failed to determine the number of root commits")? - .parse::<u32>()?) - }; - let num_roots_before = num_roots()?; - // Make sure josh is running. - let josh = start_josh()?; - - // Update rust-version file. As a separate commit, since making it part of - // the merge has confused the heck out of josh in the past. - // We pass `--no-verify` to avoid running any git hooks that might exist, - // in case they dirty the repository. - sh.write_file("rust-version", format!("{commit}\n"))?; - const PREPARING_COMMIT_MESSAGE: &str = "Preparing for merge from rust-lang/rust"; - cmd!(sh, "git commit rust-version --no-verify -m {PREPARING_COMMIT_MESSAGE}") - .run() - .context("FAILED to commit rust-version file, something went wrong")?; - - // Fetch given rustc commit. - cmd!(sh, "git fetch http://localhost:{JOSH_PORT}/rust-lang/rust.git@{commit}{JOSH_FILTER}.git") - .run() - .inspect_err(|_| { - // Try to un-do the previous `git commit`, to leave the repo in the state we found it it. - cmd!(sh, "git reset --hard HEAD^") - .run() - .expect("FAILED to clean up again after failed `git fetch`, sorry for that"); - }) - .context("FAILED to fetch new commits, something went wrong (committing the rust-version file has been undone)")?; - - // Merge the fetched commit. - const MERGE_COMMIT_MESSAGE: &str = "Merge from rust-lang/rust"; - cmd!(sh, "git merge FETCH_HEAD --no-verify --no-ff -m {MERGE_COMMIT_MESSAGE}") - .run() - .context("FAILED to merge new commits, something went wrong")?; - - // Check that the number of roots did not increase. - if num_roots()? != num_roots_before { - bail!("Josh created a new root commit. This is probably not the history you want."); - } - - drop(josh); - Ok(()) - } -} - -impl flags::RustcPush { - pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> { - let branch = self.branch.as_deref().unwrap_or("sync-from-ra"); - let rust_path = self.rust_path; - let rust_fork = self.rust_fork; - - sh.change_dir(project_root()); - let base = sh.read_file("rust-version")?.trim().to_owned(); - // Make sure the repo is clean. - if !cmd!(sh, "git status --untracked-files=no --porcelain").read()?.is_empty() { - bail!("working directory must be clean before running `cargo xtask push`"); - } - // Make sure josh is running. - let josh = start_josh()?; - - // Find a repo we can do our preparation in. - sh.change_dir(rust_path); - - // Prepare the branch. Pushing works much better if we use as base exactly - // the commit that we pulled from last time, so we use the `rust-version` - // file to find out which commit that would be. - println!("Preparing {rust_fork} (base: {base})..."); - if cmd!(sh, "git fetch https://github.com/{rust_fork} {branch}") - .ignore_stderr() - .read() - .is_ok() - { - bail!( - "The branch `{branch}` seems to already exist in `https://github.com/{rust_fork}`. Please delete it and try again." - ); - } - cmd!(sh, "git fetch https://github.com/rust-lang/rust {base}").run()?; - cmd!(sh, "git push https://github.com/{rust_fork} {base}:refs/heads/{branch}") - .ignore_stdout() - .ignore_stderr() // silence the "create GitHub PR" message - .run()?; - println!(); - - // Do the actual push. - sh.change_dir(project_root()); - println!("Pushing rust-analyzer changes..."); - cmd!( - sh, - "git push http://localhost:{JOSH_PORT}/{rust_fork}.git{JOSH_FILTER}.git HEAD:{branch}" - ) - .run()?; - println!(); - - // Do a round-trip check to make sure the push worked as expected. - cmd!( - sh, - "git fetch http://localhost:{JOSH_PORT}/{rust_fork}.git{JOSH_FILTER}.git {branch}" - ) - .ignore_stderr() - .read()?; - let head = cmd!(sh, "git rev-parse HEAD").read()?; - let fetch_head = cmd!(sh, "git rev-parse FETCH_HEAD").read()?; - if head != fetch_head { - bail!( - "Josh created a non-roundtrip push! Do NOT merge this into rustc!\n\ - Expected {head}, got {fetch_head}." - ); - } - println!( - "Confirmed that the push round-trips back to rust-analyzer properly. Please create a rustc PR:" - ); - // https://github.com/github-linguist/linguist/compare/master...octocat:linguist:master - let fork_path = rust_fork.replace('/', ":"); - println!( - " https://github.com/rust-lang/rust/compare/{fork_path}:{branch}?quick_pull=1&title=Subtree+update+of+rust-analyzer&body=r?+@ghost" - ); - - drop(josh); - Ok(()) - } -} - -/// Used for rustc syncs. -const JOSH_FILTER: &str = ":rev(55d9a533b309119c8acd13061581b43ae8840823:prefix=src/tools/rust-analyzer):/src/tools/rust-analyzer"; -const JOSH_PORT: &str = "42042"; - -fn start_josh() -> anyhow::Result<impl Drop> { - // Determine cache directory. - let local_dir = { - let user_dirs = ProjectDirs::from("org", "rust-lang", "rust-analyzer-josh").unwrap(); - user_dirs.cache_dir().to_owned() - }; - - // Start josh, silencing its output. - let mut cmd = Command::new("josh-proxy"); - cmd.arg("--local").arg(local_dir); - cmd.arg("--remote").arg("https://github.com"); - cmd.arg("--port").arg(JOSH_PORT); - cmd.arg("--no-background"); - cmd.stdout(Stdio::null()); - cmd.stderr(Stdio::null()); - let josh = cmd.spawn().context("failed to start josh-proxy, make sure it is installed")?; - // Give it some time so hopefully the port is open. (100ms was not enough.) - thread::sleep(Duration::from_millis(200)); - - Ok(JodChild(josh)) -} diff --git a/src/tools/rustbook/Cargo.lock b/src/tools/rustbook/Cargo.lock index e363668d462..5f30c75732c 100644 --- a/src/tools/rustbook/Cargo.lock +++ b/src/tools/rustbook/Cargo.lock @@ -1343,9 +1343,9 @@ checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" [[package]] name = "redox_syscall" -version = "0.5.13" +version = "0.5.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d04b7d0ee6b4a0207a0a7adb104d23ecb0b47d6beae7152d0fa34b692b29fd6" +checksum = "7251471db004e509f4e75a62cca9435365b5ec7bcdff530d612ac7c87c44a792" dependencies = [ "bitflags 2.9.1", ] diff --git a/src/tools/rustc-perf b/src/tools/rustc-perf -Subproject 6a70166b92a1b1560cb3cf056427b011b2a1f2b +Subproject dde879cf1087cb34a32287bd8ccc4d545bb9fee diff --git a/src/tools/tidy/Cargo.toml b/src/tools/tidy/Cargo.toml index d995106ae02..c1f27de7ed4 100644 --- a/src/tools/tidy/Cargo.toml +++ b/src/tools/tidy/Cargo.toml @@ -6,7 +6,7 @@ autobins = false [dependencies] build_helper = { path = "../../build_helper" } -cargo_metadata = "0.19" +cargo_metadata = "0.21" regex = "1" miropt-test-tools = { path = "../miropt-test-tools" } walkdir = "2" diff --git a/src/tools/tidy/src/deps.rs b/src/tools/tidy/src/deps.rs index f43f5eae9a5..858b058cb7d 100644 --- a/src/tools/tidy/src/deps.rs +++ b/src/tools/tidy/src/deps.rs @@ -73,7 +73,6 @@ pub(crate) const WORKSPACES: &[(&str, ExceptionList, Option<(&[&str], &[&str])>, // tidy-alphabetical-start ("compiler/rustc_codegen_gcc", EXCEPTIONS_GCC, None, &[]), ("src/bootstrap", EXCEPTIONS_BOOTSTRAP, None, &[]), - ("src/ci/docker/host-x86_64/test-various/uefi_qemu_test", EXCEPTIONS_UEFI_QEMU_TEST, None, &[]), ("src/tools/cargo", EXCEPTIONS_CARGO, None, &["src/tools/cargo"]), //("src/tools/miri/test-cargo-miri", &[], None), // FIXME uncomment once all deps are vendored //("src/tools/miri/test_dependencies", &[], None), // FIXME uncomment once all deps are vendored @@ -81,6 +80,7 @@ pub(crate) const WORKSPACES: &[(&str, ExceptionList, Option<(&[&str], &[&str])>, ("src/tools/rustbook", EXCEPTIONS_RUSTBOOK, None, &["src/doc/book", "src/doc/reference"]), ("src/tools/rustc-perf", EXCEPTIONS_RUSTC_PERF, None, &["src/tools/rustc-perf"]), ("src/tools/test-float-parse", EXCEPTIONS, None, &[]), + ("tests/run-make/uefi-qemu/uefi_qemu_test", EXCEPTIONS_UEFI_QEMU_TEST, None, &[]), // tidy-alphabetical-end ]; @@ -135,6 +135,7 @@ const EXCEPTIONS_CARGO: ExceptionList = &[ ("libz-rs-sys", "Zlib"), ("normalize-line-endings", "Apache-2.0"), ("openssl", "Apache-2.0"), + ("ring", "Apache-2.0 AND ISC"), ("ryu", "Apache-2.0 OR BSL-1.0"), // BSL is not acceptble, but we use it under Apache-2.0 ("similar", "Apache-2.0"), ("sized-chunks", "MPL-2.0+"), @@ -166,7 +167,7 @@ const EXCEPTIONS_RUSTC_PERF: ExceptionList = &[ ("brotli-decompressor", "BSD-3-Clause/MIT"), ("encoding_rs", "(Apache-2.0 OR MIT) AND BSD-3-Clause"), ("inferno", "CDDL-1.0"), - ("ring", NON_STANDARD_LICENSE), // see EXCEPTIONS_NON_STANDARD_LICENSE_DEPS for more. + ("option-ext", "MPL-2.0"), ("ryu", "Apache-2.0 OR BSL-1.0"), ("snap", "BSD-3-Clause"), ("subtle", "BSD-3-Clause"), @@ -225,20 +226,6 @@ const EXCEPTIONS_UEFI_QEMU_TEST: ExceptionList = &[ ("r-efi", "MIT OR Apache-2.0 OR LGPL-2.1-or-later"), // LGPL is not acceptable, but we use it under MIT OR Apache-2.0 ]; -/// Placeholder for non-standard license file. -const NON_STANDARD_LICENSE: &str = "NON_STANDARD_LICENSE"; - -/// These dependencies have non-standard licenses but are genenrally permitted. -const EXCEPTIONS_NON_STANDARD_LICENSE_DEPS: &[&str] = &[ - // `ring` is included because it is an optional dependency of `hyper`, - // which is a training data in rustc-perf for optimized build. - // The license of it is generally `ISC AND MIT AND OpenSSL`, - // though the `package.license` field is not set. - // - // See https://github.com/briansmith/ring/issues/902 - "ring", -]; - const PERMITTED_DEPS_LOCATION: &str = concat!(file!(), ":", line!()); /// Crates rustc is allowed to depend on. Avoid adding to the list if possible. @@ -378,6 +365,7 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[ "serde", "serde_derive", "serde_json", + "serde_path_to_error", "sha1", "sha2", "sharded-slab", @@ -597,7 +585,7 @@ pub fn check(root: &Path, cargo: &Path, bless: bool, bad: &mut bool) { .other_options(vec!["--locked".to_owned()]); let metadata = t!(cmd.exec()); - check_license_exceptions(&metadata, exceptions, bad); + check_license_exceptions(&metadata, workspace, exceptions, bad); if let Some((crates, permitted_deps)) = permitted_deps { check_permitted_dependencies(&metadata, workspace, permitted_deps, crates, bad); } @@ -631,8 +619,8 @@ fn check_proc_macro_dep_list(root: &Path, cargo: &Path, bless: bool, bad: &mut b proc_macro_deps.retain(|pkg| !is_proc_macro_pkg(&metadata[pkg])); let proc_macro_deps: HashSet<_> = - proc_macro_deps.into_iter().map(|dep| metadata[dep].name.clone()).collect(); - let expected = proc_macro_deps::CRATES.iter().map(|s| s.to_string()).collect::<HashSet<_>>(); + proc_macro_deps.into_iter().map(|dep| metadata[dep].name.as_ref()).collect(); + let expected = proc_macro_deps::CRATES.iter().copied().collect::<HashSet<_>>(); let needs_blessing = proc_macro_deps.difference(&expected).next().is_some() || expected.difference(&proc_macro_deps).next().is_some(); @@ -716,7 +704,7 @@ fn check_runtime_license_exceptions(metadata: &Metadata, bad: &mut bool) { // See https://github.com/rust-lang/rust/issues/62620 for more. // In general, these should never be added and this exception // should not be taken as precedent for any new target. - if pkg.name == "fortanix-sgx-abi" && pkg.license.as_deref() == Some("MPL-2.0") { + if *pkg.name == "fortanix-sgx-abi" && pkg.license.as_deref() == Some("MPL-2.0") { continue; } @@ -728,36 +716,38 @@ fn check_runtime_license_exceptions(metadata: &Metadata, bad: &mut bool) { /// Check that all licenses of tool dependencies are in the valid list in `LICENSES`. /// /// Packages listed in `exceptions` are allowed for tools. -fn check_license_exceptions(metadata: &Metadata, exceptions: &[(&str, &str)], bad: &mut bool) { +fn check_license_exceptions( + metadata: &Metadata, + workspace: &str, + exceptions: &[(&str, &str)], + bad: &mut bool, +) { // Validate the EXCEPTIONS list hasn't changed. for (name, license) in exceptions { // Check that the package actually exists. - if !metadata.packages.iter().any(|p| p.name == *name) { + if !metadata.packages.iter().any(|p| *p.name == *name) { tidy_error!( bad, - "could not find exception package `{}`\n\ + "could not find exception package `{}` in workspace `{workspace}`\n\ Remove from EXCEPTIONS list if it is no longer used.", name ); } // Check that the license hasn't changed. - for pkg in metadata.packages.iter().filter(|p| p.name == *name) { + for pkg in metadata.packages.iter().filter(|p| *p.name == *name) { match &pkg.license { None => { - if *license == NON_STANDARD_LICENSE - && EXCEPTIONS_NON_STANDARD_LICENSE_DEPS.contains(&pkg.name.as_str()) - { - continue; - } tidy_error!( bad, - "dependency exception `{}` does not declare a license expression", + "dependency exception `{}` in workspace `{workspace}` does not declare a license expression", pkg.id ); } Some(pkg_license) => { if pkg_license.as_str() != *license { - println!("dependency exception `{name}` license has changed"); + println!( + "dependency exception `{name}` license in workspace `{workspace}` has changed" + ); println!(" previously `{license}` now `{pkg_license}`"); println!(" update EXCEPTIONS for the new license"); *bad = true; @@ -781,12 +771,21 @@ fn check_license_exceptions(metadata: &Metadata, exceptions: &[(&str, &str)], ba let license = match &pkg.license { Some(license) => license, None => { - tidy_error!(bad, "dependency `{}` does not define a license expression", pkg.id); + tidy_error!( + bad, + "dependency `{}` in workspace `{workspace}` does not define a license expression", + pkg.id + ); continue; } }; if !LICENSES.contains(&license.as_str()) { - tidy_error!(bad, "invalid license `{}` in `{}`", license, pkg.id); + tidy_error!( + bad, + "invalid license `{}` for package `{}` in workspace `{workspace}`", + license, + pkg.id + ); } } } @@ -816,9 +815,9 @@ fn check_permitted_dependencies( let Ok(version) = Version::parse(version) else { return false; }; - pkg.name == name && pkg.version == version + *pkg.name == name && pkg.version == version } else { - pkg.name == permitted + *pkg.name == permitted } } if !deps.iter().any(|dep_id| compare(pkg_from_id(metadata, dep_id), permitted)) { @@ -866,7 +865,7 @@ fn check_permitted_dependencies( /// Finds a package with the given name. fn pkg_from_name<'a>(metadata: &'a Metadata, name: &'static str) -> &'a Package { - let mut i = metadata.packages.iter().filter(|p| p.name == name); + let mut i = metadata.packages.iter().filter(|p| *p.name == name); let result = i.next().unwrap_or_else(|| panic!("could not find package `{name}` in package list")); assert!(i.next().is_none(), "more than one package found for `{name}`"); diff --git a/src/tools/tidy/src/ext_tool_checks.rs b/src/tools/tidy/src/extra_checks/mod.rs index 911d4daae5c..f90f716cd95 100644 --- a/src/tools/tidy/src/ext_tool_checks.rs +++ b/src/tools/tidy/src/extra_checks/mod.rs @@ -50,6 +50,7 @@ pub fn check( ci_info: &CiInfo, librustdoc_path: &Path, tools_path: &Path, + npm: &Path, bless: bool, extra_checks: Option<&str>, pos_args: &[String], @@ -61,6 +62,7 @@ pub fn check( ci_info, librustdoc_path, tools_path, + npm, bless, extra_checks, pos_args, @@ -75,6 +77,7 @@ fn check_impl( ci_info: &CiInfo, librustdoc_path: &Path, tools_path: &Path, + npm: &Path, bless: bool, extra_checks: Option<&str>, pos_args: &[String], @@ -83,7 +86,7 @@ fn check_impl( std::env::var("TIDY_PRINT_DIFF").is_ok_and(|v| v.eq_ignore_ascii_case("true") || v == "1"); // Split comma-separated args up - let lint_args = match extra_checks { + let mut lint_args = match extra_checks { Some(s) => s .strip_prefix("--extra-checks=") .unwrap() @@ -96,11 +99,7 @@ fn check_impl( }) .filter_map(|(res, src)| match res { Ok(arg) => { - if arg.is_inactive_auto(ci_info) { - None - } else { - Some(arg) - } + Some(arg) } Err(err) => { // only warn because before bad extra checks would be silently ignored. @@ -111,6 +110,11 @@ fn check_impl( .collect(), None => vec![], }; + if lint_args.iter().any(|ck| ck.auto) { + crate::files_modified_batch_filter(ci_info, &mut lint_args, |ck, path| { + ck.is_non_auto_or_matches(path) + }); + } macro_rules! extra_check { ($lang:ident, $kind:ident) => { @@ -293,7 +297,7 @@ fn check_impl( } if js_lint || js_typecheck { - rustdoc_js::npm_install(root_path, outdir)?; + rustdoc_js::npm_install(root_path, outdir, npm)?; } if js_lint { @@ -718,10 +722,10 @@ impl ExtraCheckArg { self.lang == lang && self.kind.map(|k| k == kind).unwrap_or(true) } - /// Returns `true` if this is an auto arg and the relevant files are not modified. - fn is_inactive_auto(&self, ci_info: &CiInfo) -> bool { + /// Returns `false` if this is an auto arg and the passed filename does not trigger the auto rule + fn is_non_auto_or_matches(&self, filepath: &str) -> bool { if !self.auto { - return false; + return true; } let ext = match self.lang { ExtraCheckLang::Py => ".py", @@ -729,12 +733,15 @@ impl ExtraCheckArg { ExtraCheckLang::Shell => ".sh", ExtraCheckLang::Js => ".js", ExtraCheckLang::Spellcheck => { - return !crate::files_modified(ci_info, |s| { - SPELLCHECK_DIRS.iter().any(|dir| Path::new(s).starts_with(dir)) - }); + for dir in SPELLCHECK_DIRS { + if Path::new(filepath).starts_with(dir) { + return true; + } + } + return false; } }; - !crate::files_modified(ci_info, |s| s.ends_with(ext)) + filepath.ends_with(ext) } fn has_supported_kind(&self) -> bool { diff --git a/src/tools/tidy/src/ext_tool_checks/rustdoc_js.rs b/src/tools/tidy/src/extra_checks/rustdoc_js.rs index c1a62cedd33..7708b128e23 100644 --- a/src/tools/tidy/src/ext_tool_checks/rustdoc_js.rs +++ b/src/tools/tidy/src/extra_checks/rustdoc_js.rs @@ -23,9 +23,8 @@ fn spawn_cmd(cmd: &mut Command) -> Result<Child, io::Error> { } /// install all js dependencies from package.json. -pub(super) fn npm_install(root_path: &Path, outdir: &Path) -> Result<(), super::Error> { - // FIXME(lolbinarycat): make this obey build.npm bootstrap option - npm::install(root_path, outdir, Path::new("npm"))?; +pub(super) fn npm_install(root_path: &Path, outdir: &Path, npm: &Path) -> Result<(), super::Error> { + npm::install(root_path, outdir, npm)?; Ok(()) } diff --git a/src/tools/tidy/src/issues.txt b/src/tools/tidy/src/issues.txt index 77414bec82d..ee06707415f 100644 --- a/src/tools/tidy/src/issues.txt +++ b/src/tools/tidy/src/issues.txt @@ -1364,1230 +1364,6 @@ ui/infinite/issue-41731-infinite-macro-println.rs ui/intrinsics/issue-28575.rs ui/intrinsics/issue-84297-reifying-copy.rs ui/invalid/issue-114435-layout-type-err.rs -ui/issues/auxiliary/issue-11224.rs -ui/issues/auxiliary/issue-11508.rs -ui/issues/auxiliary/issue-11529.rs -ui/issues/auxiliary/issue-11680.rs -ui/issues/auxiliary/issue-12612-1.rs -ui/issues/auxiliary/issue-12612-2.rs -ui/issues/auxiliary/issue-12660-aux.rs -ui/issues/auxiliary/issue-13507.rs -ui/issues/auxiliary/issue-13620-1.rs -ui/issues/auxiliary/issue-13620-2.rs -ui/issues/auxiliary/issue-14344-1.rs -ui/issues/auxiliary/issue-14344-2.rs -ui/issues/auxiliary/issue-14422.rs -ui/issues/auxiliary/issue-15562.rs -ui/issues/auxiliary/issue-16643.rs -ui/issues/auxiliary/issue-16725.rs -ui/issues/auxiliary/issue-17662.rs -ui/issues/auxiliary/issue-18501.rs -ui/issues/auxiliary/issue-18514.rs -ui/issues/auxiliary/issue-18711.rs -ui/issues/auxiliary/issue-18913-1.rs -ui/issues/auxiliary/issue-18913-2.rs -ui/issues/auxiliary/issue-19293.rs -ui/issues/auxiliary/issue-20389.rs -ui/issues/auxiliary/issue-21202.rs -ui/issues/auxiliary/issue-2170-lib.rs -ui/issues/auxiliary/issue-2316-a.rs -ui/issues/auxiliary/issue-2316-b.rs -ui/issues/auxiliary/issue-2380.rs -ui/issues/auxiliary/issue-2414-a.rs -ui/issues/auxiliary/issue-2414-b.rs -ui/issues/auxiliary/issue-2472-b.rs -ui/issues/auxiliary/issue-25185-1.rs -ui/issues/auxiliary/issue-25185-2.rs -ui/issues/auxiliary/issue-2526.rs -ui/issues/auxiliary/issue-25467.rs -ui/issues/auxiliary/issue-2631-a.rs -ui/issues/auxiliary/issue-2723-a.rs -ui/issues/auxiliary/issue-29265.rs -ui/issues/auxiliary/issue-29485.rs -ui/issues/auxiliary/issue-3012-1.rs -ui/issues/auxiliary/issue-30123-aux.rs -ui/issues/auxiliary/issue-3136-a.rs -ui/issues/auxiliary/issue-31702-1.rs -ui/issues/auxiliary/issue-31702-2.rs -ui/issues/auxiliary/issue-34796-aux.rs -ui/issues/auxiliary/issue-36954.rs -ui/issues/auxiliary/issue-38190.rs -ui/issues/auxiliary/issue-38226-aux.rs -ui/issues/auxiliary/issue-3979-traits.rs -ui/issues/auxiliary/issue-41053.rs -ui/issues/auxiliary/issue-41549.rs -ui/issues/auxiliary/issue-42007-s.rs -ui/issues/auxiliary/issue-4208-cc.rs -ui/issues/auxiliary/issue-4545.rs -ui/issues/auxiliary/issue-48984-aux.rs -ui/issues/auxiliary/issue-49544.rs -ui/issues/auxiliary/issue-51798.rs -ui/issues/auxiliary/issue-52489.rs -ui/issues/auxiliary/issue-5518.rs -ui/issues/auxiliary/issue-5521.rs -ui/issues/auxiliary/issue-56943.rs -ui/issues/auxiliary/issue-57271-lib.rs -ui/issues/auxiliary/issue-5844-aux.rs -ui/issues/auxiliary/issue-7178.rs -ui/issues/auxiliary/issue-73112.rs -ui/issues/auxiliary/issue-7899.rs -ui/issues/auxiliary/issue-8044.rs -ui/issues/auxiliary/issue-8259.rs -ui/issues/auxiliary/issue-8401.rs -ui/issues/auxiliary/issue-9123.rs -ui/issues/auxiliary/issue-9155.rs -ui/issues/auxiliary/issue-9188.rs -ui/issues/auxiliary/issue-9906.rs -ui/issues/auxiliary/issue-9968.rs -ui/issues/issue-10228.rs -ui/issues/issue-10291.rs -ui/issues/issue-102964.rs -ui/issues/issue-10396.rs -ui/issues/issue-10412.rs -ui/issues/issue-10436.rs -ui/issues/issue-10456.rs -ui/issues/issue-10465.rs -ui/issues/issue-10545.rs -ui/issues/issue-10638.rs -ui/issues/issue-10656.rs -ui/issues/issue-106755.rs -ui/issues/issue-10683.rs -ui/issues/issue-10718.rs -ui/issues/issue-10734.rs -ui/issues/issue-10764.rs -ui/issues/issue-10767.rs -ui/issues/issue-10802.rs -ui/issues/issue-10806.rs -ui/issues/issue-10853.rs -ui/issues/issue-10877.rs -ui/issues/issue-10902.rs -ui/issues/issue-11004.rs -ui/issues/issue-11047.rs -ui/issues/issue-11085.rs -ui/issues/issue-11192.rs -ui/issues/issue-11205.rs -ui/issues/issue-11224.rs -ui/issues/issue-11267.rs -ui/issues/issue-11374.rs -ui/issues/issue-11382.rs -ui/issues/issue-11384.rs -ui/issues/issue-11508.rs -ui/issues/issue-11529.rs -ui/issues/issue-11552.rs -ui/issues/issue-11592.rs -ui/issues/issue-11677.rs -ui/issues/issue-11680.rs -ui/issues/issue-11681.rs -ui/issues/issue-11709.rs -ui/issues/issue-11740.rs -ui/issues/issue-11771.rs -ui/issues/issue-11820.rs -ui/issues/issue-11844.rs -ui/issues/issue-11869.rs -ui/issues/issue-11958.rs -ui/issues/issue-12033.rs -ui/issues/issue-12041.rs -ui/issues/issue-12127.rs -ui/issues/issue-12285.rs -ui/issues/issue-12567.rs -ui/issues/issue-12612.rs -ui/issues/issue-12660.rs -ui/issues/issue-12677.rs -ui/issues/issue-12729.rs -ui/issues/issue-12744.rs -ui/issues/issue-12860.rs -ui/issues/issue-12863.rs -ui/issues/issue-12909.rs -ui/issues/issue-12920.rs -ui/issues/issue-13027.rs -ui/issues/issue-13058.rs -ui/issues/issue-13105.rs -ui/issues/issue-13167.rs -ui/issues/issue-13202.rs -ui/issues/issue-13204.rs -ui/issues/issue-13214.rs -ui/issues/issue-13259-windows-tcb-trash.rs -ui/issues/issue-13264.rs -ui/issues/issue-13323.rs -ui/issues/issue-13359.rs -ui/issues/issue-13405.rs -ui/issues/issue-13407.rs -ui/issues/issue-13434.rs -ui/issues/issue-13446.rs -ui/issues/issue-13466.rs -ui/issues/issue-13482-2.rs -ui/issues/issue-13482.rs -ui/issues/issue-13497-2.rs -ui/issues/issue-13497.rs -ui/issues/issue-13507-2.rs -ui/issues/issue-13620.rs -ui/issues/issue-13665.rs -ui/issues/issue-13703.rs -ui/issues/issue-13763.rs -ui/issues/issue-13775.rs -ui/issues/issue-13808.rs -ui/issues/issue-13847.rs -ui/issues/issue-13867.rs -ui/issues/issue-14082.rs -ui/issues/issue-14091-2.rs -ui/issues/issue-14091.rs -ui/issues/issue-14092.rs -ui/issues/issue-14229.rs -ui/issues/issue-14254.rs -ui/issues/issue-14285.rs -ui/issues/issue-14308.rs -ui/issues/issue-14330.rs -ui/issues/issue-14344.rs -ui/issues/issue-14366.rs -ui/issues/issue-14382.rs -ui/issues/issue-14393.rs -ui/issues/issue-14399.rs -ui/issues/issue-14422.rs -ui/issues/issue-14541.rs -ui/issues/issue-14721.rs -ui/issues/issue-14821.rs -ui/issues/issue-14845.rs -ui/issues/issue-14853.rs -ui/issues/issue-14865.rs -ui/issues/issue-14875.rs -ui/issues/issue-14901.rs -ui/issues/issue-14915.rs -ui/issues/issue-14919.rs -ui/issues/issue-14959.rs -ui/issues/issue-15034.rs -ui/issues/issue-15043.rs -ui/issues/issue-15063.rs -ui/issues/issue-15094.rs -ui/issues/issue-15104.rs -ui/issues/issue-15129-rpass.rs -ui/issues/issue-15167.rs -ui/issues/issue-15189.rs -ui/issues/issue-15207.rs -ui/issues/issue-15260.rs -ui/issues/issue-15381.rs -ui/issues/issue-15444.rs -ui/issues/issue-15523-big.rs -ui/issues/issue-15523.rs -ui/issues/issue-15562.rs -ui/issues/issue-15571.rs -ui/issues/issue-15673.rs -ui/issues/issue-15734.rs -ui/issues/issue-15735.rs -ui/issues/issue-15756.rs -ui/issues/issue-15763.rs -ui/issues/issue-15774.rs -ui/issues/issue-15783.rs -ui/issues/issue-15793.rs -ui/issues/issue-15858.rs -ui/issues/issue-15896.rs -ui/issues/issue-15965.rs -ui/issues/issue-16048.rs -ui/issues/issue-16149.rs -ui/issues/issue-16151.rs -ui/issues/issue-16256.rs -ui/issues/issue-16278.rs -ui/issues/issue-16401.rs -ui/issues/issue-16441.rs -ui/issues/issue-16452.rs -ui/issues/issue-16492.rs -ui/issues/issue-16530.rs -ui/issues/issue-16560.rs -ui/issues/issue-16562.rs -ui/issues/issue-16596.rs -ui/issues/issue-16643.rs -ui/issues/issue-16648.rs -ui/issues/issue-16668.rs -ui/issues/issue-16671.rs -ui/issues/issue-16683.rs -ui/issues/issue-16725.rs -ui/issues/issue-16739.rs -ui/issues/issue-16745.rs -ui/issues/issue-16774.rs -ui/issues/issue-16783.rs -ui/issues/issue-16819.rs -ui/issues/issue-16922-rpass.rs -ui/issues/issue-16966.rs -ui/issues/issue-16994.rs -ui/issues/issue-17001.rs -ui/issues/issue-17033.rs -ui/issues/issue-17068.rs -ui/issues/issue-17121.rs -ui/issues/issue-17216.rs -ui/issues/issue-17252.rs -ui/issues/issue-17322.rs -ui/issues/issue-17336.rs -ui/issues/issue-17337.rs -ui/issues/issue-17351.rs -ui/issues/issue-17361.rs -ui/issues/issue-17373.rs -ui/issues/issue-17385.rs -ui/issues/issue-17405.rs -ui/issues/issue-17441.rs -ui/issues/issue-17450.rs -ui/issues/issue-17503.rs -ui/issues/issue-17546.rs -ui/issues/issue-17551.rs -ui/issues/issue-17651.rs -ui/issues/issue-17662.rs -ui/issues/issue-17732.rs -ui/issues/issue-17734.rs -ui/issues/issue-17740.rs -ui/issues/issue-17746.rs -ui/issues/issue-17758.rs -ui/issues/issue-17771.rs -ui/issues/issue-17800.rs -ui/issues/issue-17816.rs -ui/issues/issue-17877.rs -ui/issues/issue-17897.rs -ui/issues/issue-17904-2.rs -ui/issues/issue-17904.rs -ui/issues/issue-17905-2.rs -ui/issues/issue-17905.rs -ui/issues/issue-17933.rs -ui/issues/issue-17954.rs -ui/issues/issue-17959.rs -ui/issues/issue-17994.rs -ui/issues/issue-17999.rs -ui/issues/issue-18058.rs -ui/issues/issue-18088.rs -ui/issues/issue-18107.rs -ui/issues/issue-18110.rs -ui/issues/issue-18119.rs -ui/issues/issue-18159.rs -ui/issues/issue-18173.rs -ui/issues/issue-18183.rs -ui/issues/issue-18188.rs -ui/issues/issue-18232.rs -ui/issues/issue-18352.rs -ui/issues/issue-18353.rs -ui/issues/issue-18389.rs -ui/issues/issue-18423.rs -ui/issues/issue-18446-2.rs -ui/issues/issue-18446.rs -ui/issues/issue-18464.rs -ui/issues/issue-18501.rs -ui/issues/issue-18514.rs -ui/issues/issue-18532.rs -ui/issues/issue-18539.rs -ui/issues/issue-18566.rs -ui/issues/issue-18611.rs -ui/issues/issue-18685.rs -ui/issues/issue-18711.rs -ui/issues/issue-18767.rs -ui/issues/issue-18783.rs -ui/issues/issue-18809.rs -ui/issues/issue-18845.rs -ui/issues/issue-18859.rs -ui/issues/issue-18906.rs -ui/issues/issue-18913.rs -ui/issues/issue-18919.rs -ui/issues/issue-18952.rs -ui/issues/issue-18959.rs -ui/issues/issue-18988.rs -ui/issues/issue-19001.rs -ui/issues/issue-19037.rs -ui/issues/issue-19086.rs -ui/issues/issue-19097.rs -ui/issues/issue-19098.rs -ui/issues/issue-19100.rs -ui/issues/issue-19127.rs -ui/issues/issue-19135.rs -ui/issues/issue-19293.rs -ui/issues/issue-19367.rs -ui/issues/issue-19380.rs -ui/issues/issue-19398.rs -ui/issues/issue-19404.rs -ui/issues/issue-19479.rs -ui/issues/issue-19482.rs -ui/issues/issue-19499.rs -ui/issues/issue-19601.rs -ui/issues/issue-19631.rs -ui/issues/issue-19632.rs -ui/issues/issue-19692.rs -ui/issues/issue-19734.rs -ui/issues/issue-19811-escape-unicode.rs -ui/issues/issue-19850.rs -ui/issues/issue-19922.rs -ui/issues/issue-19982.rs -ui/issues/issue-19991.rs -ui/issues/issue-20009.rs -ui/issues/issue-20055-box-trait.rs -ui/issues/issue-20055-box-unsized-array.rs -ui/issues/issue-20162.rs -ui/issues/issue-20174.rs -ui/issues/issue-20186.rs -ui/issues/issue-20225.rs -ui/issues/issue-20261.rs -ui/issues/issue-20313-rpass.rs -ui/issues/issue-20313.rs -ui/issues/issue-20389.rs -ui/issues/issue-20396.rs -ui/issues/issue-20413.rs -ui/issues/issue-20414.rs -ui/issues/issue-20427.rs -ui/issues/issue-20433.rs -ui/issues/issue-20454.rs -ui/issues/issue-20544.rs -ui/issues/issue-20575.rs -ui/issues/issue-20644.rs -ui/issues/issue-20676.rs -ui/issues/issue-20714.rs -ui/issues/issue-2074.rs -ui/issues/issue-20772.rs -ui/issues/issue-20797.rs -ui/issues/issue-20803.rs -ui/issues/issue-20831-debruijn.rs -ui/issues/issue-20847.rs -ui/issues/issue-20939.rs -ui/issues/issue-20953.rs -ui/issues/issue-20971.rs -ui/issues/issue-21033.rs -ui/issues/issue-21140.rs -ui/issues/issue-21160.rs -ui/issues/issue-21174-2.rs -ui/issues/issue-21174.rs -ui/issues/issue-21177.rs -ui/issues/issue-21202.rs -ui/issues/issue-21245.rs -ui/issues/issue-21291.rs -ui/issues/issue-21306.rs -ui/issues/issue-21332.rs -ui/issues/issue-21361.rs -ui/issues/issue-21384.rs -ui/issues/issue-21400.rs -ui/issues/issue-21402.rs -ui/issues/issue-21449.rs -ui/issues/issue-2150.rs -ui/issues/issue-2151.rs -ui/issues/issue-21546.rs -ui/issues/issue-21554.rs -ui/issues/issue-21600.rs -ui/issues/issue-21622.rs -ui/issues/issue-21634.rs -ui/issues/issue-21655.rs -ui/issues/issue-2170-exe.rs -ui/issues/issue-21701.rs -ui/issues/issue-21763.rs -ui/issues/issue-21891.rs -ui/issues/issue-2190-1.rs -ui/issues/issue-21909.rs -ui/issues/issue-21922.rs -ui/issues/issue-21946.rs -ui/issues/issue-21950.rs -ui/issues/issue-21974.rs -ui/issues/issue-22008.rs -ui/issues/issue-22034.rs -ui/issues/issue-22036.rs -ui/issues/issue-2214.rs -ui/issues/issue-22258.rs -ui/issues/issue-22289.rs -ui/issues/issue-22312.rs -ui/issues/issue-22346.rs -ui/issues/issue-22356.rs -ui/issues/issue-22370.rs -ui/issues/issue-22403.rs -ui/issues/issue-22426.rs -ui/issues/issue-22434.rs -ui/issues/issue-22468.rs -ui/issues/issue-22471.rs -ui/issues/issue-22577.rs -ui/issues/issue-22599.rs -ui/issues/issue-22603.rs -ui/issues/issue-22629.rs -ui/issues/issue-22638.rs -ui/issues/issue-22644.rs -ui/issues/issue-22673.rs -ui/issues/issue-22684.rs -ui/issues/issue-22706.rs -ui/issues/issue-22777.rs -ui/issues/issue-22781.rs -ui/issues/issue-22789.rs -ui/issues/issue-2281-part1.rs -ui/issues/issue-22814.rs -ui/issues/issue-2284.rs -ui/issues/issue-22872.rs -ui/issues/issue-22874.rs -ui/issues/issue-2288.rs -ui/issues/issue-22886.rs -ui/issues/issue-22894.rs -ui/issues/issue-22992-2.rs -ui/issues/issue-22992.rs -ui/issues/issue-23024.rs -ui/issues/issue-23036.rs -ui/issues/issue-23041.rs -ui/issues/issue-23046.rs -ui/issues/issue-23073.rs -ui/issues/issue-2311-2.rs -ui/issues/issue-2311.rs -ui/issues/issue-2312.rs -ui/issues/issue-2316-c.rs -ui/issues/issue-23173.rs -ui/issues/issue-23189.rs -ui/issues/issue-23217.rs -ui/issues/issue-23253.rs -ui/issues/issue-23261.rs -ui/issues/issue-23281.rs -ui/issues/issue-23311.rs -ui/issues/issue-23336.rs -ui/issues/issue-23354-2.rs -ui/issues/issue-23354.rs -ui/issues/issue-23406.rs -ui/issues/issue-23433.rs -ui/issues/issue-23442.rs -ui/issues/issue-23477.rs -ui/issues/issue-23485.rs -ui/issues/issue-23491.rs -ui/issues/issue-23543.rs -ui/issues/issue-23544.rs -ui/issues/issue-23550.rs -ui/issues/issue-23589.rs -ui/issues/issue-23699.rs -ui/issues/issue-2380-b.rs -ui/issues/issue-2383.rs -ui/issues/issue-23891.rs -ui/issues/issue-23898.rs -ui/issues/issue-23958.rs -ui/issues/issue-23966.rs -ui/issues/issue-23992.rs -ui/issues/issue-24013.rs -ui/issues/issue-24036.rs -ui/issues/issue-24086.rs -ui/issues/issue-2414-c.rs -ui/issues/issue-24161.rs -ui/issues/issue-24227.rs -ui/issues/issue-2428.rs -ui/issues/issue-24308.rs -ui/issues/issue-24322.rs -ui/issues/issue-24352.rs -ui/issues/issue-24353.rs -ui/issues/issue-24357.rs -ui/issues/issue-24363.rs -ui/issues/issue-24365.rs -ui/issues/issue-24389.rs -ui/issues/issue-24424.rs -ui/issues/issue-24434.rs -ui/issues/issue-2445-b.rs -ui/issues/issue-2445.rs -ui/issues/issue-24533.rs -ui/issues/issue-24589.rs -ui/issues/issue-2463.rs -ui/issues/issue-24682.rs -ui/issues/issue-24687-embed-debuginfo/auxiliary/issue-24687-lib.rs -ui/issues/issue-24687-embed-debuginfo/auxiliary/issue-24687-mbcs-in-comments.rs -ui/issues/issue-2470-bounds-check-overflow.rs -ui/issues/issue-2472.rs -ui/issues/issue-24779.rs -ui/issues/issue-24819.rs -ui/issues/issue-2487-a.rs -ui/issues/issue-24945-repeat-dash-opts.rs -ui/issues/issue-24947.rs -ui/issues/issue-24954.rs -ui/issues/issue-2502.rs -ui/issues/issue-25076.rs -ui/issues/issue-25089.rs -ui/issues/issue-25145.rs -ui/issues/issue-25180.rs -ui/issues/issue-25185.rs -ui/issues/issue-2526-a.rs -ui/issues/issue-25279.rs -ui/issues/issue-25343.rs -ui/issues/issue-25368.rs -ui/issues/issue-25386.rs -ui/issues/issue-25394.rs -ui/issues/issue-25467.rs -ui/issues/issue-25497.rs -ui/issues/issue-2550.rs -ui/issues/issue-25515.rs -ui/issues/issue-25549-multiple-drop.rs -ui/issues/issue-25579.rs -ui/issues/issue-25679.rs -ui/issues/issue-25693.rs -ui/issues/issue-25746-bool-transmute.rs -ui/issues/issue-25757.rs -ui/issues/issue-25810.rs -ui/issues/issue-2590.rs -ui/issues/issue-25901.rs -ui/issues/issue-26056.rs -ui/issues/issue-26093.rs -ui/issues/issue-26095.rs -ui/issues/issue-26127.rs -ui/issues/issue-26186.rs -ui/issues/issue-26205.rs -ui/issues/issue-26217.rs -ui/issues/issue-26237.rs -ui/issues/issue-2631-b.rs -ui/issues/issue-2642.rs -ui/issues/issue-26468.rs -ui/issues/issue-26472.rs -ui/issues/issue-26484.rs -ui/issues/issue-26614.rs -ui/issues/issue-26619.rs -ui/issues/issue-26641.rs -ui/issues/issue-26646.rs -ui/issues/issue-26655.rs -ui/issues/issue-26709.rs -ui/issues/issue-26802.rs -ui/issues/issue-26805.rs -ui/issues/issue-26812.rs -ui/issues/issue-26948.rs -ui/issues/issue-26997.rs -ui/issues/issue-27008.rs -ui/issues/issue-27033.rs -ui/issues/issue-27042.rs -ui/issues/issue-27054-primitive-binary-ops.rs -ui/issues/issue-27078.rs -ui/issues/issue-2708.rs -ui/issues/issue-27105.rs -ui/issues/issue-2723-b.rs -ui/issues/issue-27240.rs -ui/issues/issue-27268.rs -ui/issues/issue-27281.rs -ui/issues/issue-27340.rs -ui/issues/issue-27401-dropflag-reinit.rs -ui/issues/issue-27433.rs -ui/issues/issue-27592.rs -ui/issues/issue-2761.rs -ui/issues/issue-27639.rs -ui/issues/issue-27697.rs -ui/issues/issue-27815.rs -ui/issues/issue-27842.rs -ui/issues/issue-27889.rs -ui/issues/issue-27942.rs -ui/issues/issue-27949.rs -ui/issues/issue-27997.rs -ui/issues/issue-28105.rs -ui/issues/issue-28109.rs -ui/issues/issue-28181.rs -ui/issues/issue-28279.rs -ui/issues/issue-28344.rs -ui/issues/issue-28433.rs -ui/issues/issue-28472.rs -ui/issues/issue-2848.rs -ui/issues/issue-2849.rs -ui/issues/issue-28498-must-work-ex1.rs -ui/issues/issue-28498-must-work-ex2.rs -ui/issues/issue-28498-ugeh-ex1.rs -ui/issues/issue-28550.rs -ui/issues/issue-28561.rs -ui/issues/issue-28568.rs -ui/issues/issue-28586.rs -ui/issues/issue-28600.rs -ui/issues/issue-28625.rs -ui/issues/issue-28776.rs -ui/issues/issue-28828.rs -ui/issues/issue-28839.rs -ui/issues/issue-28936.rs -ui/issues/issue-2895.rs -ui/issues/issue-28971.rs -ui/issues/issue-28983.rs -ui/issues/issue-28999.rs -ui/issues/issue-29030.rs -ui/issues/issue-29037.rs -ui/issues/issue-2904.rs -ui/issues/issue-29048.rs -ui/issues/issue-29053.rs -ui/issues/issue-29071-2.rs -ui/issues/issue-29071.rs -ui/issues/issue-29092.rs -ui/issues/issue-29147-rpass.rs -ui/issues/issue-29147.rs -ui/issues/issue-29265.rs -ui/issues/issue-29276.rs -ui/issues/issue-2935.rs -ui/issues/issue-29466.rs -ui/issues/issue-29485.rs -ui/issues/issue-2951.rs -ui/issues/issue-29516.rs -ui/issues/issue-29522.rs -ui/issues/issue-29540.rs -ui/issues/issue-29663.rs -ui/issues/issue-29668.rs -ui/issues/issue-29710.rs -ui/issues/issue-29723.rs -ui/issues/issue-29740.rs -ui/issues/issue-29743.rs -ui/issues/issue-29821.rs -ui/issues/issue-29857.rs -ui/issues/issue-29861.rs -ui/issues/issue-2989.rs -ui/issues/issue-29948.rs -ui/issues/issue-2995.rs -ui/issues/issue-30018-panic.rs -ui/issues/issue-30081.rs -ui/issues/issue-3012-2.rs -ui/issues/issue-30123.rs -ui/issues/issue-3021-b.rs -ui/issues/issue-3021-d.rs -ui/issues/issue-30236.rs -ui/issues/issue-30255.rs -ui/issues/issue-3026.rs -ui/issues/issue-3029.rs -ui/issues/issue-3037.rs -ui/issues/issue-30371.rs -ui/issues/issue-3038.rs -ui/issues/issue-30380.rs -ui/issues/issue-3052.rs -ui/issues/issue-30530.rs -ui/issues/issue-30589.rs -ui/issues/issue-30615.rs -ui/issues/issue-30756.rs -ui/issues/issue-30891.rs -ui/issues/issue-3091.rs -ui/issues/issue-31011.rs -ui/issues/issue-3109.rs -ui/issues/issue-3121.rs -ui/issues/issue-31267-additional.rs -ui/issues/issue-31267.rs -ui/issues/issue-31299.rs -ui/issues/issue-3136-b.rs -ui/issues/issue-3149.rs -ui/issues/issue-31511.rs -ui/issues/issue-3154.rs -ui/issues/issue-31702.rs -ui/issues/issue-31769.rs -ui/issues/issue-31776.rs -ui/issues/issue-31910.rs -ui/issues/issue-32004.rs -ui/issues/issue-32008.rs -ui/issues/issue-32086.rs -ui/issues/issue-3220.rs -ui/issues/issue-32292.rs -ui/issues/issue-32324.rs -ui/issues/issue-32326.rs -ui/issues/issue-32377.rs -ui/issues/issue-32389.rs -ui/issues/issue-32518.rs -ui/issues/issue-32655.rs -ui/issues/issue-32782.rs -ui/issues/issue-32797.rs -ui/issues/issue-32805.rs -ui/issues/issue-3290.rs -ui/issues/issue-32995-2.rs -ui/issues/issue-32995.rs -ui/issues/issue-33202.rs -ui/issues/issue-33241.rs -ui/issues/issue-33287.rs -ui/issues/issue-33293.rs -ui/issues/issue-33387.rs -ui/issues/issue-3344.rs -ui/issues/issue-33461.rs -ui/issues/issue-33504.rs -ui/issues/issue-33525.rs -ui/issues/issue-33571.rs -ui/issues/issue-33687.rs -ui/issues/issue-33770.rs -ui/issues/issue-3389.rs -ui/issues/issue-33941.rs -ui/issues/issue-34047.rs -ui/issues/issue-34074.rs -ui/issues/issue-34209.rs -ui/issues/issue-34229.rs -ui/issues/issue-3424.rs -ui/issues/issue-3429.rs -ui/issues/issue-34334.rs -ui/issues/issue-34349.rs -ui/issues/issue-34373.rs -ui/issues/issue-34418.rs -ui/issues/issue-34427.rs -ui/issues/issue-3447.rs -ui/issues/issue-34503.rs -ui/issues/issue-34569.rs -ui/issues/issue-34571.rs -ui/issues/issue-34751.rs -ui/issues/issue-3477.rs -ui/issues/issue-34780.rs -ui/issues/issue-34796.rs -ui/issues/issue-34839.rs -ui/issues/issue-3500.rs -ui/issues/issue-35139.rs -ui/issues/issue-3521-2.rs -ui/issues/issue-35241.rs -ui/issues/issue-35423.rs -ui/issues/issue-3556.rs -ui/issues/issue-35570.rs -ui/issues/issue-3559.rs -ui/issues/issue-35600.rs -ui/issues/issue-3574.rs -ui/issues/issue-35815.rs -ui/issues/issue-35976.rs -ui/issues/issue-35988.rs -ui/issues/issue-36023.rs -ui/issues/issue-36036-associated-type-layout.rs -ui/issues/issue-36075.rs -ui/issues/issue-3609.rs -ui/issues/issue-36116.rs -ui/issues/issue-36260.rs -ui/issues/issue-36278-prefix-nesting.rs -ui/issues/issue-36299.rs -ui/issues/issue-36379.rs -ui/issues/issue-36400.rs -ui/issues/issue-36474.rs -ui/issues/issue-3656.rs -ui/issues/issue-3668-non-constant-value-in-constant/issue-3668-2.rs -ui/issues/issue-3668-non-constant-value-in-constant/issue-3668.rs -ui/issues/issue-36744-bitcast-args-if-needed.rs -ui/issues/issue-36786-resolve-call.rs -ui/issues/issue-3680.rs -ui/issues/issue-36816.rs -ui/issues/issue-36836.rs -ui/issues/issue-36839.rs -ui/issues/issue-36856.rs -ui/issues/issue-36936.rs -ui/issues/issue-36954.rs -ui/issues/issue-3702-2.rs -ui/issues/issue-3702.rs -ui/issues/issue-37051.rs -ui/issues/issue-37109.rs -ui/issues/issue-37131.rs -ui/issues/issue-37311-type-length-limit/issue-37311.rs -ui/issues/issue-37510.rs -ui/issues/issue-3753.rs -ui/issues/issue-37534.rs -ui/issues/issue-37576.rs -ui/issues/issue-3763.rs -ui/issues/issue-37665.rs -ui/issues/issue-37686.rs -ui/issues/issue-37725.rs -ui/issues/issue-37733.rs -ui/issues/issue-3779.rs -ui/issues/issue-37884.rs -ui/issues/issue-38160.rs -ui/issues/issue-38190.rs -ui/issues/issue-38226.rs -ui/issues/issue-38381.rs -ui/issues/issue-38412.rs -ui/issues/issue-38437.rs -ui/issues/issue-38458.rs -ui/issues/issue-3847.rs -ui/issues/issue-38556.rs -ui/issues/issue-38727.rs -ui/issues/issue-3874.rs -ui/issues/issue-38763.rs -ui/issues/issue-38857.rs -ui/issues/issue-38875/auxiliary/issue-38875-b.rs -ui/issues/issue-38875/issue-38875.rs -ui/issues/issue-3888-2.rs -ui/issues/issue-38919.rs -ui/issues/issue-38942.rs -ui/issues/issue-3895.rs -ui/issues/issue-38954.rs -ui/issues/issue-38987.rs -ui/issues/issue-39089.rs -ui/issues/issue-39211.rs -ui/issues/issue-39367.rs -ui/issues/issue-39548.rs -ui/issues/issue-39687.rs -ui/issues/issue-39709.rs -ui/issues/issue-3979-2.rs -ui/issues/issue-3979-xcrate.rs -ui/issues/issue-3979.rs -ui/issues/issue-39808.rs -ui/issues/issue-39827.rs -ui/issues/issue-39848.rs -ui/issues/issue-3991.rs -ui/issues/issue-3993.rs -ui/issues/issue-39970.rs -ui/issues/issue-39984.rs -ui/issues/issue-40000.rs -ui/issues/issue-4025.rs -ui/issues/issue-40288-2.rs -ui/issues/issue-40288.rs -ui/issues/issue-40951.rs -ui/issues/issue-41053.rs -ui/issues/issue-41229-ref-str.rs -ui/issues/issue-41298.rs -ui/issues/issue-41479.rs -ui/issues/issue-41498.rs -ui/issues/issue-41549.rs -ui/issues/issue-41604.rs -ui/issues/issue-41652/auxiliary/issue-41652-b.rs -ui/issues/issue-41652/issue-41652.rs -ui/issues/issue-41677.rs -ui/issues/issue-41696.rs -ui/issues/issue-41726.rs -ui/issues/issue-41742.rs -ui/issues/issue-41744.rs -ui/issues/issue-41849-variance-req.rs -ui/issues/issue-41880.rs -ui/issues/issue-41888.rs -ui/issues/issue-41936-variance-coerce-unsized-cycle.rs -ui/issues/issue-41974.rs -ui/issues/issue-41998.rs -ui/issues/issue-42007.rs -ui/issues/issue-4208.rs -ui/issues/issue-42106.rs -ui/issues/issue-42148.rs -ui/issues/issue-42210.rs -ui/issues/issue-4228.rs -ui/issues/issue-42312.rs -ui/issues/issue-42453.rs -ui/issues/issue-42467.rs -ui/issues/issue-4252.rs -ui/issues/issue-42552.rs -ui/issues/issue-4265.rs -ui/issues/issue-42755.rs -ui/issues/issue-42796.rs -ui/issues/issue-42880.rs -ui/issues/issue-42956.rs -ui/issues/issue-43057.rs -ui/issues/issue-43205.rs -ui/issues/issue-43250.rs -ui/issues/issue-43291.rs -ui/issues/issue-4333.rs -ui/issues/issue-4335.rs -ui/issues/issue-43355.rs -ui/issues/issue-43357.rs -ui/issues/issue-43420-no-over-suggest.rs -ui/issues/issue-43424.rs -ui/issues/issue-43431.rs -ui/issues/issue-43483.rs -ui/issues/issue-43692.rs -ui/issues/issue-43806.rs -ui/issues/issue-43853.rs -ui/issues/issue-4387.rs -ui/issues/issue-43910.rs -ui/issues/issue-43923.rs -ui/issues/issue-43988.rs -ui/issues/issue-44023.rs -ui/issues/issue-44056.rs -ui/issues/issue-44078.rs -ui/issues/issue-44216-add-instant.rs -ui/issues/issue-44216-add-system-time.rs -ui/issues/issue-44216-sub-instant.rs -ui/issues/issue-44216-sub-system-time.rs -ui/issues/issue-44239.rs -ui/issues/issue-44247.rs -ui/issues/issue-44405.rs -ui/issues/issue-4464.rs -ui/issues/issue-44730.rs -ui/issues/issue-44851.rs -ui/issues/issue-4517.rs -ui/issues/issue-4541.rs -ui/issues/issue-4542.rs -ui/issues/issue-45425.rs -ui/issues/issue-4545.rs -ui/issues/issue-45510.rs -ui/issues/issue-45562.rs -ui/issues/issue-45697-1.rs -ui/issues/issue-45697.rs -ui/issues/issue-45730.rs -ui/issues/issue-45731.rs -ui/issues/issue-45801.rs -ui/issues/issue-45965.rs -ui/issues/issue-46069.rs -ui/issues/issue-46101.rs -ui/issues/issue-46302.rs -ui/issues/issue-46311.rs -ui/issues/issue-46332.rs -ui/issues/issue-46471-1.rs -ui/issues/issue-46472.rs -ui/issues/issue-46604.rs -ui/issues/issue-46756-consider-borrowing-cast-or-binexpr.rs -ui/issues/issue-46771.rs -ui/issues/issue-46855.rs -ui/issues/issue-46964.rs -ui/issues/issue-46983.rs -ui/issues/issue-47073-zero-padded-tuple-struct-indices.rs -ui/issues/issue-47094.rs -ui/issues/issue-47184.rs -ui/issues/issue-47309.rs -ui/issues/issue-4734.rs -ui/issues/issue-4735.rs -ui/issues/issue-4736.rs -ui/issues/issue-47364.rs -ui/issues/issue-47377.rs -ui/issues/issue-47380.rs -ui/issues/issue-47486.rs -ui/issues/issue-4759-1.rs -ui/issues/issue-4759.rs -ui/issues/issue-47638.rs -ui/issues/issue-47673.rs -ui/issues/issue-47703-1.rs -ui/issues/issue-47703-tuple.rs -ui/issues/issue-47703.rs -ui/issues/issue-47715.rs -ui/issues/issue-47722.rs -ui/issues/issue-48006.rs -ui/issues/issue-48131.rs -ui/issues/issue-48132.rs -ui/issues/issue-48159.rs -ui/issues/issue-48276.rs -ui/issues/issue-4830.rs -ui/issues/issue-48364.rs -ui/issues/issue-48728.rs -ui/issues/issue-4875.rs -ui/issues/issue-48984.rs -ui/issues/issue-49298.rs -ui/issues/issue-4935.rs -ui/issues/issue-49544.rs -ui/issues/issue-49632.rs -ui/issues/issue-4968.rs -ui/issues/issue-4972.rs -ui/issues/issue-49824.rs -ui/issues/issue-49854.rs -ui/issues/issue-49919.rs -ui/issues/issue-49934-errors.rs -ui/issues/issue-49934.rs -ui/issues/issue-49955.rs -ui/issues/issue-49973.rs -ui/issues/issue-50187.rs -ui/issues/issue-50411.rs -ui/issues/issue-50415.rs -ui/issues/issue-50442.rs -ui/issues/issue-50471.rs -ui/issues/issue-50518.rs -ui/issues/issue-50581.rs -ui/issues/issue-50582.rs -ui/issues/issue-50585.rs -ui/issues/issue-50600.rs -ui/issues/issue-50618.rs -ui/issues/issue-5062.rs -ui/issues/issue-5067.rs -ui/issues/issue-50688.rs -ui/issues/issue-50714.rs -ui/issues/issue-50761.rs -ui/issues/issue-50781.rs -ui/issues/issue-50802.rs -ui/issues/issue-50811.rs -ui/issues/issue-5100.rs -ui/issues/issue-51022.rs -ui/issues/issue-51044.rs -ui/issues/issue-51102.rs -ui/issues/issue-51116.rs -ui/issues/issue-51154.rs -ui/issues/issue-51515.rs -ui/issues/issue-51632-try-desugar-incompatible-types.rs -ui/issues/issue-51655.rs -ui/issues/issue-51714.rs -ui/issues/issue-51798.rs -ui/issues/issue-51874.rs -ui/issues/issue-51907.rs -ui/issues/issue-5192.rs -ui/issues/issue-51947.rs -ui/issues/issue-52049.rs -ui/issues/issue-52126-assign-op-invariance.rs -ui/issues/issue-52262.rs -ui/issues/issue-52489.rs -ui/issues/issue-52533.rs -ui/issues/issue-52717.rs -ui/issues/issue-5280.rs -ui/issues/issue-5315.rs -ui/issues/issue-5321-immediates-with-bare-self.rs -ui/issues/issue-53251.rs -ui/issues/issue-53275.rs -ui/issues/issue-53300.rs -ui/issues/issue-53333.rs -ui/issues/issue-53348.rs -ui/issues/issue-53419.rs -ui/issues/issue-53568.rs -ui/issues/issue-5358-1.rs -ui/issues/issue-53728.rs -ui/issues/issue-53843.rs -ui/issues/issue-54044.rs -ui/issues/issue-54062.rs -ui/issues/issue-54094.rs -ui/issues/issue-5439.rs -ui/issues/issue-54410.rs -ui/issues/issue-54462-mutable-noalias-correctness.rs -ui/issues/issue-54477-reduced-2.rs -ui/issues/issue-54696.rs -ui/issues/issue-5518.rs -ui/issues/issue-5521.rs -ui/issues/issue-55376.rs -ui/issues/issue-55380.rs -ui/issues/issue-5550.rs -ui/issues/issue-5554.rs -ui/issues/issue-55587.rs -ui/issues/issue-5572.rs -ui/issues/issue-55731.rs -ui/issues/issue-56128.rs -ui/issues/issue-56175.rs -ui/issues/issue-56199.rs -ui/issues/issue-56229.rs -ui/issues/issue-56237.rs -ui/issues/issue-5666.rs -ui/issues/issue-56806.rs -ui/issues/issue-56835.rs -ui/issues/issue-56870.rs -ui/issues/issue-5688.rs -ui/issues/issue-56943.rs -ui/issues/issue-5708.rs -ui/issues/issue-57156.rs -ui/issues/issue-57162.rs -ui/issues/issue-5718.rs -ui/issues/issue-57198-pass.rs -ui/issues/issue-57271.rs -ui/issues/issue-57399-self-return-impl-trait.rs -ui/issues/issue-5741.rs -ui/issues/issue-5754.rs -ui/issues/issue-57741-dereference-boxed-value/issue-57741-1.rs -ui/issues/issue-57741-dereference-boxed-value/issue-57741.rs -ui/issues/issue-57781.rs -ui/issues/issue-57924.rs -ui/issues/issue-58212.rs -ui/issues/issue-58375-monomorphize-default-impls.rs -ui/issues/issue-5844.rs -ui/issues/issue-58463.rs -ui/issues/issue-58712.rs -ui/issues/issue-58734.rs -ui/issues/issue-5883.rs -ui/issues/issue-5884.rs -ui/issues/issue-58857.rs -ui/issues/issue-5900.rs -ui/issues/issue-59020.rs -ui/issues/issue-5917.rs -ui/issues/issue-59326.rs -ui/issues/issue-59488.rs -ui/issues/issue-59494.rs -ui/issues/issue-5950.rs -ui/issues/issue-59756.rs -ui/issues/issue-5988.rs -ui/issues/issue-5997-outer-generic-parameter/issue-5997-enum.rs -ui/issues/issue-5997-outer-generic-parameter/issue-5997-struct.rs -ui/issues/issue-5997-outer-generic-parameter/issue-5997.rs -ui/issues/issue-60218.rs -ui/issues/issue-60622.rs -ui/issues/issue-60989.rs -ui/issues/issue-61106.rs -ui/issues/issue-61108.rs -ui/issues/issue-6117.rs -ui/issues/issue-6130.rs -ui/issues/issue-61475.rs -ui/issues/issue-6153.rs -ui/issues/issue-61623.rs -ui/issues/issue-61894.rs -ui/issues/issue-62480.rs -ui/issues/issue-6318.rs -ui/issues/issue-6344-let.rs -ui/issues/issue-6344-match.rs -ui/issues/issue-63983.rs -ui/issues/issue-64430.rs -ui/issues/issue-64559.rs -ui/issues/issue-64593.rs -ui/issues/issue-64792-bad-unicode-ctor.rs -ui/issues/issue-65131.rs -ui/issues/issue-65230.rs -ui/issues/issue-65462.rs -ui/issues/issue-6557.rs -ui/issues/issue-66308.rs -ui/issues/issue-66353.rs -ui/issues/issue-66667-function-cmp-cycle.rs -ui/issues/issue-66702-break-outside-loop-val.rs -ui/issues/issue-66706.rs -ui/issues/issue-66923-show-error-for-correct-call.rs -ui/issues/issue-67039-unsound-pin-partialeq.rs -ui/issues/issue-6738.rs -ui/issues/issue-67535.rs -ui/issues/issue-67552.rs -ui/issues/issue-68010-large-zst-consts.rs -ui/issues/issue-68696-catch-during-unwind.rs -ui/issues/issue-6892.rs -ui/issues/issue-68951.rs -ui/issues/issue-6898.rs -ui/issues/issue-69130.rs -ui/issues/issue-6919.rs -ui/issues/issue-69306.rs -ui/issues/issue-6936.rs -ui/issues/issue-69455.rs -ui/issues/issue-69602-type-err-during-codegen-ice.rs -ui/issues/issue-69683.rs -ui/issues/issue-7012.rs -ui/issues/issue-70381.rs -ui/issues/issue-7044.rs -ui/issues/issue-7061.rs -ui/issues/issue-70673.rs -ui/issues/issue-70724-add_type_neq_err_label-unwrap.rs -ui/issues/issue-70746.rs -ui/issues/issue-7092.rs -ui/issues/issue-71406.rs -ui/issues/issue-7178.rs -ui/issues/issue-72002.rs -ui/issues/issue-72076.rs -ui/issues/issue-72278.rs -ui/issues/issue-7246.rs -ui/issues/issue-7268.rs -ui/issues/issue-72839-error-overflow.rs -ui/issues/issue-72933-match-stack-overflow.rs -ui/issues/issue-73112.rs -ui/issues/issue-73229.rs -ui/issues/issue-7344.rs -ui/issues/issue-7364.rs -ui/issues/issue-74082.rs -ui/issues/issue-74564-if-expr-stack-overflow.rs -ui/issues/issue-7519-match-unit-in-arg.rs -ui/issues/issue-75283.rs -ui/issues/issue-7563.rs -ui/issues/issue-75704.rs -ui/issues/issue-7575.rs -ui/issues/issue-76042.rs -ui/issues/issue-76077-inaccesible-private-fields/issue-76077-1.rs -ui/issues/issue-76077-inaccesible-private-fields/issue-76077.rs -ui/issues/issue-76191.rs -ui/issues/issue-7660.rs -ui/issues/issue-7663.rs -ui/issues/issue-7673-cast-generically-implemented-trait.rs -ui/issues/issue-77218/issue-77218-2.rs -ui/issues/issue-77218/issue-77218.rs -ui/issues/issue-7784.rs -ui/issues/issue-77919.rs -ui/issues/issue-78192.rs -ui/issues/issue-78622.rs -ui/issues/issue-7867.rs -ui/issues/issue-78957.rs -ui/issues/issue-7899.rs -ui/issues/issue-7911.rs -ui/issues/issue-7970a.rs -ui/issues/issue-8044.rs -ui/issues/issue-80607.rs -ui/issues/issue-81584.rs -ui/issues/issue-8171-default-method-self-inherit-builtin-trait.rs -ui/issues/issue-81918.rs -ui/issues/issue-8248.rs -ui/issues/issue-8249.rs -ui/issues/issue-8259.rs -ui/issues/issue-83048.rs -ui/issues/issue-8391.rs -ui/issues/issue-8398.rs -ui/issues/issue-8401.rs -ui/issues/issue-8498.rs -ui/issues/issue-8506.rs -ui/issues/issue-8521.rs -ui/issues/issue-85461.rs -ui/issues/issue-8578.rs -ui/issues/issue-86756.rs -ui/issues/issue-87199.rs -ui/issues/issue-8727.rs -ui/issues/issue-87490.rs -ui/issues/issue-8761.rs -ui/issues/issue-8767.rs -ui/issues/issue-87707.rs -ui/issues/issue-8783.rs -ui/issues/issue-88150.rs -ui/issues/issue-8860.rs -ui/issues/issue-8898.rs -ui/issues/issue-9047.rs -ui/issues/issue-9110.rs -ui/issues/issue-9123.rs -ui/issues/issue-9129.rs -ui/issues/issue-91489.rs -ui/issues/issue-9155.rs -ui/issues/issue-9188.rs -ui/issues/issue-9243.rs -ui/issues/issue-9249.rs -ui/issues/issue-9259.rs -ui/issues/issue-92741.rs -ui/issues/issue-9446.rs -ui/issues/issue-9725.rs -ui/issues/issue-9737.rs -ui/issues/issue-9814.rs -ui/issues/issue-98299.rs -ui/issues/issue-9837.rs -ui/issues/issue-9906.rs -ui/issues/issue-9918.rs -ui/issues/issue-9942.rs -ui/issues/issue-9951.rs -ui/issues/issue-9968.rs -ui/issues/issue-99838.rs ui/iterators/issue-28098.rs ui/iterators/issue-58952-filter-type-length.rs ui/lang-items/issue-83471.rs diff --git a/src/tools/tidy/src/lib.rs b/src/tools/tidy/src/lib.rs index 5f6796a9150..4ea9d051ddb 100644 --- a/src/tools/tidy/src/lib.rs +++ b/src/tools/tidy/src/lib.rs @@ -125,48 +125,69 @@ pub fn git_diff<S: AsRef<OsStr>>(base_commit: &str, extra_arg: S) -> Option<Stri Some(String::from_utf8_lossy(&output.stdout).into()) } -/// Returns true if any modified file matches the predicate, if we are in CI, or if unable to list modified files. -pub fn files_modified(ci_info: &CiInfo, pred: impl Fn(&str) -> bool) -> bool { +/// Similar to `files_modified`, but only involves a single call to `git`. +/// +/// removes all elements from `items` that do not cause any match when `pred` is called with the list of modifed files. +/// +/// if in CI, no elements will be removed. +pub fn files_modified_batch_filter<T>( + ci_info: &CiInfo, + items: &mut Vec<T>, + pred: impl Fn(&T, &str) -> bool, +) { if CiEnv::is_ci() { // assume everything is modified on CI because we really don't want false positives there. - return true; + return; } let Some(base_commit) = &ci_info.base_commit else { eprintln!("No base commit, assuming all files are modified"); - return true; + return; }; match crate::git_diff(base_commit, "--name-status") { Some(output) => { - let modified_files = output.lines().filter_map(|ln| { - let (status, name) = ln - .trim_end() - .split_once('\t') - .expect("bad format from `git diff --name-status`"); - if status == "M" { Some(name) } else { None } - }); - for modified_file in modified_files { - if pred(modified_file) { - return true; + let modified_files: Vec<_> = output + .lines() + .filter_map(|ln| { + let (status, name) = ln + .trim_end() + .split_once('\t') + .expect("bad format from `git diff --name-status`"); + if status == "M" { Some(name) } else { None } + }) + .collect(); + items.retain(|item| { + for modified_file in &modified_files { + if pred(item, modified_file) { + // at least one predicate matches, keep this item. + return true; + } } - } - false + // no predicates matched, remove this item. + false + }); } None => { eprintln!("warning: failed to run `git diff` to check for changes"); eprintln!("warning: assuming all files are modified"); - true } } } +/// Returns true if any modified file matches the predicate, if we are in CI, or if unable to list modified files. +pub fn files_modified(ci_info: &CiInfo, pred: impl Fn(&str) -> bool) -> bool { + let mut v = vec![()]; + files_modified_batch_filter(ci_info, &mut v, |_, p| pred(p)); + !v.is_empty() +} + pub mod alphabetical; pub mod bins; pub mod debug_artifacts; pub mod deps; pub mod edition; pub mod error_codes; -pub mod ext_tool_checks; pub mod extdeps; +pub mod extra_checks; pub mod features; pub mod filenames; pub mod fluent_alphabetical; diff --git a/src/tools/tidy/src/main.rs b/src/tools/tidy/src/main.rs index 13b20f33bd0..cd2567ddb64 100644 --- a/src/tools/tidy/src/main.rs +++ b/src/tools/tidy/src/main.rs @@ -29,6 +29,7 @@ fn main() { let concurrency: NonZeroUsize = FromStr::from_str(&env::args().nth(4).expect("need concurrency")) .expect("concurrency must be a number"); + let npm: PathBuf = env::args_os().nth(5).expect("need name/path of npm command").into(); let root_manifest = root_path.join("Cargo.toml"); let src_path = root_path.join("src"); @@ -127,9 +128,9 @@ fn main() { check!(pal, &library_path); // Checks that need to be done for both the compiler and std libraries. - check!(unit_tests, &src_path); - check!(unit_tests, &compiler_path); - check!(unit_tests, &library_path); + check!(unit_tests, &src_path, false); + check!(unit_tests, &compiler_path, false); + check!(unit_tests, &library_path, true); if bins::check_filesystem_support(&[&root_path], &output_directory) { check!(bins, &root_path); @@ -176,12 +177,13 @@ fn main() { check!(unstable_book, &src_path, collected); check!( - ext_tool_checks, + extra_checks, &root_path, &output_directory, &ci_info, &librustdoc_path, &tools_path, + &npm, bless, extra_checks, pos_args diff --git a/src/tools/tidy/src/style.rs b/src/tools/tidy/src/style.rs index 35ed61eacc7..fca097c091b 100644 --- a/src/tools/tidy/src/style.rs +++ b/src/tools/tidy/src/style.rs @@ -519,8 +519,11 @@ pub fn check(path: &Path, bad: &mut bool) { .any(|directive| matches!(directive, Directive::Ignore(_))); let has_alphabetical_directive = line.contains("tidy-alphabetical-start") || line.contains("tidy-alphabetical-end"); - let has_recognized_directive = - has_recognized_ignore_directive || has_alphabetical_directive; + let has_other_tidy_ignore_directive = + line.contains("ignore-tidy-target-specific-tests"); + let has_recognized_directive = has_recognized_ignore_directive + || has_alphabetical_directive + || has_other_tidy_ignore_directive; if contains_potential_directive && (!has_recognized_directive) { err("Unrecognized tidy directive") } diff --git a/src/tools/tidy/src/target_specific_tests.rs b/src/tools/tidy/src/target_specific_tests.rs index f4a6783abb6..b2d5f259eb2 100644 --- a/src/tools/tidy/src/target_specific_tests.rs +++ b/src/tools/tidy/src/target_specific_tests.rs @@ -12,12 +12,16 @@ const COMPILE_FLAGS_HEADER: &str = "compile-flags:"; #[derive(Default, Debug)] struct RevisionInfo<'a> { - target_arch: Option<&'a str>, + target_arch: Option<Option<&'a str>>, llvm_components: Option<Vec<&'a str>>, } pub fn check(tests_path: &Path, bad: &mut bool) { crate::walk::walk(tests_path, |path, _is_dir| filter_not_rust(path), &mut |entry, content| { + if content.contains("// ignore-tidy-target-specific-tests") { + return; + } + let file = entry.path().display(); let mut header_map = BTreeMap::new(); iter_header(content, &mut |HeaderLine { revision, directive, .. }| { @@ -34,10 +38,11 @@ pub fn check(tests_path: &Path, bad: &mut bool) { && let Some((_, v)) = compile_flags.split_once("--target") { let v = v.trim_start_matches([' ', '=']); - let v = if v == "{{target}}" { Some((v, v)) } else { v.split_once("-") }; - if let Some((arch, _)) = v { - let info = header_map.entry(revision).or_insert(RevisionInfo::default()); - info.target_arch.replace(arch); + let info = header_map.entry(revision).or_insert(RevisionInfo::default()); + if v.starts_with("{{") { + info.target_arch.replace(None); + } else if let Some((arch, _)) = v.split_once("-") { + info.target_arch.replace(Some(arch)); } else { eprintln!("{file}: seems to have a malformed --target value"); *bad = true; @@ -54,9 +59,11 @@ pub fn check(tests_path: &Path, bad: &mut bool) { let rev = rev.unwrap_or("[unspecified]"); match (target_arch, llvm_components) { (None, None) => {} - (Some(_), None) => { + (Some(target_arch), None) => { + let llvm_component = + target_arch.map_or_else(|| "<arch>".to_string(), arch_to_llvm_component); eprintln!( - "{file}: revision {rev} should specify `{LLVM_COMPONENTS_HEADER}` as it has `--target` set" + "{file}: revision {rev} should specify `{LLVM_COMPONENTS_HEADER} {llvm_component}` as it has `--target` set" ); *bad = true; } @@ -66,11 +73,45 @@ pub fn check(tests_path: &Path, bad: &mut bool) { ); *bad = true; } - (Some(_), Some(_)) => { - // FIXME: check specified components against the target architectures we - // gathered. + (Some(target_arch), Some(llvm_components)) => { + if let Some(target_arch) = target_arch { + let llvm_component = arch_to_llvm_component(target_arch); + if !llvm_components.contains(&llvm_component.as_str()) { + eprintln!( + "{file}: revision {rev} should specify `{LLVM_COMPONENTS_HEADER} {llvm_component}` as it has `--target` set" + ); + *bad = true; + } + } } } } }); } + +fn arch_to_llvm_component(arch: &str) -> String { + // NOTE: This is an *approximate* mapping of Rust's `--target` architecture to LLVM component + // names. It is not intended to be an authoritative source, but rather a best-effort that's good + // enough for the purpose of this tidy check. + match arch { + "amdgcn" => "amdgpu".into(), + "aarch64_be" | "arm64_32" | "arm64e" | "arm64ec" => "aarch64".into(), + "i386" | "i586" | "i686" | "x86" | "x86_64" | "x86_64h" => "x86".into(), + "loongarch32" | "loongarch64" => "loongarch".into(), + "nvptx64" => "nvptx".into(), + "s390x" => "systemz".into(), + "sparc64" | "sparcv9" => "sparc".into(), + "wasm32" | "wasm32v1" | "wasm64" => "webassembly".into(), + _ if arch.starts_with("armeb") + || arch.starts_with("armv") + || arch.starts_with("thumbv") => + { + "arm".into() + } + _ if arch.starts_with("bpfe") => "bpf".into(), + _ if arch.starts_with("mips") => "mips".into(), + _ if arch.starts_with("powerpc") => "powerpc".into(), + _ if arch.starts_with("riscv") => "riscv".into(), + _ => arch.to_ascii_lowercase(), + } +} diff --git a/src/tools/tidy/src/ui_tests.rs b/src/tools/tidy/src/ui_tests.rs index b9d22ece597..4d195b3952e 100644 --- a/src/tools/tidy/src/ui_tests.rs +++ b/src/tools/tidy/src/ui_tests.rs @@ -1,24 +1,12 @@ //! Tidy check to ensure below in UI test directories: -//! - the number of entries in each directory must be less than `ENTRY_LIMIT` //! - there are no stray `.stderr` files -use std::collections::{BTreeSet, HashMap}; +use std::collections::BTreeSet; use std::ffi::OsStr; use std::fs; use std::io::Write; use std::path::{Path, PathBuf}; -use ignore::Walk; - -// FIXME: GitHub's UI truncates file lists that exceed 1000 entries, so these -// should all be 1000 or lower. Limits significantly smaller than 1000 are also -// desirable, because large numbers of files are unwieldy in general. See issue -// #73494. -const ENTRY_LIMIT: u32 = 901; -// FIXME: The following limits should be reduced eventually. - -const ISSUES_ENTRY_LIMIT: u32 = 1616; - const EXPECTED_TEST_FILE_EXTENSIONS: &[&str] = &[ "rs", // test source files "stderr", // expected stderr file, corresponds to a rs file @@ -54,42 +42,6 @@ const EXTENSION_EXCEPTION_PATHS: &[&str] = &[ "tests/ui/std/windows-bat-args3.bat", // tests escaping arguments through batch files ]; -fn check_entries(tests_path: &Path, bad: &mut bool) { - let mut directories: HashMap<PathBuf, u32> = HashMap::new(); - - for entry in Walk::new(tests_path.join("ui")).flatten() { - let parent = entry.path().parent().unwrap().to_path_buf(); - *directories.entry(parent).or_default() += 1; - } - - let (mut max, mut max_issues) = (0, 0); - for (dir_path, count) in directories { - let is_issues_dir = tests_path.join("ui/issues") == dir_path; - let (limit, maxcnt) = if is_issues_dir { - (ISSUES_ENTRY_LIMIT, &mut max_issues) - } else { - (ENTRY_LIMIT, &mut max) - }; - *maxcnt = (*maxcnt).max(count); - if count > limit { - tidy_error!( - bad, - "following path contains more than {} entries, \ - you should move the test to some relevant subdirectory (current: {}): {}", - limit, - count, - dir_path.display() - ); - } - } - if ISSUES_ENTRY_LIMIT > max_issues { - tidy_error!( - bad, - "`ISSUES_ENTRY_LIMIT` is too high (is {ISSUES_ENTRY_LIMIT}, should be {max_issues})" - ); - } -} - pub fn check(root_path: &Path, bless: bool, bad: &mut bool) { let issues_txt_header = r#"============================================================ ⚠️⚠️⚠️NOTHING SHOULD EVER BE ADDED TO THIS LIST⚠️⚠️⚠️ @@ -97,7 +49,6 @@ pub fn check(root_path: &Path, bless: bool, bad: &mut bool) { "#; let path = &root_path.join("tests"); - check_entries(path, bad); // the list of files in ui tests that are allowed to start with `issue-XXXX` // BTreeSet because we would like a stable ordering so --bless works @@ -179,7 +130,9 @@ pub fn check(root_path: &Path, bless: bool, bad: &mut bool) { .unwrap() .replace(std::path::MAIN_SEPARATOR_STR, "/"); - if !remaining_issue_names.remove(stripped_path.as_str()) { + if !remaining_issue_names.remove(stripped_path.as_str()) + && !stripped_path.starts_with("ui/issues/") + { tidy_error!( bad, "file `tests/{stripped_path}` must begin with a descriptive name, consider `{{reason}}-issue-{issue_n}.rs`", diff --git a/src/tools/tidy/src/unit_tests.rs b/src/tools/tidy/src/unit_tests.rs index df9146b5147..3d14a467319 100644 --- a/src/tools/tidy/src/unit_tests.rs +++ b/src/tools/tidy/src/unit_tests.rs @@ -1,44 +1,60 @@ //! Tidy check to ensure `#[test]` and `#[bench]` are not used directly inside -//! `core` or `alloc`. +//! of the standard library. //! //! `core` and `alloc` cannot be tested directly due to duplicating lang items. //! All tests and benchmarks must be written externally in //! `{coretests,alloctests}/{tests,benches}`. //! -//! Outside of `core` and `alloc`, tests and benchmarks should be outlined into -//! separate files named `tests.rs` or `benches.rs`, or directories named +//! Outside of the standard library, tests and benchmarks should be outlined +//! into separate files named `tests.rs` or `benches.rs`, or directories named //! `tests` or `benches` unconfigured during normal build. use std::path::Path; use crate::walk::{filter_dirs, walk}; -pub fn check(root_path: &Path, bad: &mut bool) { - let core = root_path.join("core"); - let core_copy = core.clone(); - let is_core = move |path: &Path| path.starts_with(&core); - let alloc = root_path.join("alloc"); - let alloc_copy = alloc.clone(); - let is_alloc = move |path: &Path| path.starts_with(&alloc); - +pub fn check(root_path: &Path, stdlib: bool, bad: &mut bool) { let skip = move |path: &Path, is_dir| { let file_name = path.file_name().unwrap_or_default(); + + // Skip excluded directories and non-rust files if is_dir { - filter_dirs(path) - || path.ends_with("src/doc") - || (file_name == "tests" || file_name == "benches") - && !is_core(path) - && !is_alloc(path) + if filter_dirs(path) || path.ends_with("src/doc") { + return true; + } } else { let extension = path.extension().unwrap_or_default(); - extension != "rs" - || (file_name == "tests.rs" || file_name == "benches.rs") - && !is_core(path) - && !is_alloc(path) - // Tests which use non-public internals and, as such, need to - // have the types in the same crate as the tests themselves. See - // the comment in alloctests/lib.rs. - || path.ends_with("library/alloc/src/collections/btree/borrow/tests.rs") + if extension != "rs" { + return true; + } + } + + // Tests in a separate package are always allowed + if is_dir && file_name != "tests" && file_name.as_encoded_bytes().ends_with(b"tests") { + return true; + } + + if !stdlib { + // Outside of the standard library tests may also be in separate files in the same crate + if is_dir { + if file_name == "tests" || file_name == "benches" { + return true; + } + } else { + if file_name == "tests.rs" || file_name == "benches.rs" { + return true; + } + } + } + + if is_dir { + // FIXME remove those exceptions once no longer necessary + file_name == "std_detect" || file_name == "std" || file_name == "test" + } else { + // Tests which use non-public internals and, as such, need to + // have the types in the same crate as the tests themselves. See + // the comment in alloctests/lib.rs. + path.ends_with("library/alloc/src/collections/btree/borrow/tests.rs") || path.ends_with("library/alloc/src/collections/btree/map/tests.rs") || path.ends_with("library/alloc/src/collections/btree/node/tests.rs") || path.ends_with("library/alloc/src/collections/btree/set/tests.rs") @@ -50,22 +66,29 @@ pub fn check(root_path: &Path, bad: &mut bool) { walk(root_path, skip, &mut |entry, contents| { let path = entry.path(); - let is_core = path.starts_with(&core_copy); - let is_alloc = path.starts_with(&alloc_copy); + let package = path + .strip_prefix(root_path) + .unwrap() + .components() + .next() + .unwrap() + .as_os_str() + .to_str() + .unwrap(); for (i, line) in contents.lines().enumerate() { let line = line.trim(); let is_test = || line.contains("#[test]") && !line.contains("`#[test]"); let is_bench = || line.contains("#[bench]") && !line.contains("`#[bench]"); - let manual_skip = line.contains("//tidy:skip"); - if !line.starts_with("//") && (is_test() || is_bench()) && !manual_skip { - let explanation = if is_core { - "`core` unit tests and benchmarks must be placed into `coretests`" - } else if is_alloc { - "`alloc` unit tests and benchmarks must be placed into `alloctests`" + if !line.starts_with("//") && (is_test() || is_bench()) { + let explanation = if stdlib { + format!( + "`{package}` unit tests and benchmarks must be placed into `{package}tests`" + ) } else { "unit tests and benchmarks must be placed into \ separate files or directories named \ `tests.rs`, `benches.rs`, `tests` or `benches`" + .to_owned() }; let name = if is_test() { "test" } else { "bench" }; tidy_error!( |
