about summary refs log tree commit diff
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2023-03-05 13:46:04 +0000
committerbors <bors@rust-lang.org>2023-03-05 13:46:04 +0000
commit04e957578c77c95a0505f5818d0068b459f673e3 (patch)
tree8174baf83834823508359f8f1a75cdfbcd339ecf
parent0d439f81815b7b2a13b6a30b9263f9a60d46a4d4 (diff)
parent52196169fed915c9fac8fbaab5234ee65d5a1c21 (diff)
downloadrust-04e957578c77c95a0505f5818d0068b459f673e3.tar.gz
rust-04e957578c77c95a0505f5818d0068b459f673e3.zip
Auto merge of #108771 - matthiaskrgr:rollup-whlvo2g, r=matthiaskrgr
Rollup of 7 pull requests

Successful merges:

 - #106440 (Ignore files in .gitignore in tidy)
 - #108613 (Remove `llvm.skip-rebuild` option)
 - #108616 (Sync codegen defaults with compiler defaults and add a ping message so they stay in sync)
 - #108618 (Rename `src/etc/vscode_settings.json` to `rust_analyzer_settings.json`)
 - #108626 (rustdoc-json: switch from HashMap to FxHashMap to fix non-determinism)
 - #108744 (Don't ICE when encountering bound var in builtin copy/clone bounds)
 - #108749 (Clean up rustdoc-js tester.js file)

Failed merges:

r? `@ghost`
`@rustbot` modify labels: rollup
-rw-r--r--.gitignore2
-rw-r--r--Cargo.lock2
-rw-r--r--compiler/rustc_trait_selection/src/traits/select/mod.rs11
-rw-r--r--config.toml.example6
-rw-r--r--src/bootstrap/config.rs9
-rw-r--r--src/bootstrap/defaults/config.codegen.toml2
-rw-r--r--src/bootstrap/flags.rs13
-rw-r--r--src/bootstrap/native.rs9
-rw-r--r--src/bootstrap/setup.rs10
-rw-r--r--src/bootstrap/setup/tests.rs6
-rw-r--r--src/doc/rustc/src/platform-support/armeb-unknown-linux-gnueabi.md1
-rw-r--r--src/etc/rust_analyzer_settings.json (renamed from src/etc/vscode_settings.json)0
-rw-r--r--src/rustdoc-json-types/Cargo.toml1
-rw-r--r--src/rustdoc-json-types/lib.rs17
-rw-r--r--src/tools/jsondoclint/Cargo.toml1
-rw-r--r--src/tools/jsondoclint/src/validator/tests.rs29
-rw-r--r--src/tools/replace-version-placeholder/src/main.rs2
-rw-r--r--src/tools/rustdoc-js/tester.js113
-rw-r--r--src/tools/tidy/src/alphabetical.rs2
-rw-r--r--src/tools/tidy/src/bins.rs74
-rw-r--r--src/tools/tidy/src/debug_artifacts.rs2
-rw-r--r--src/tools/tidy/src/edition.rs37
-rw-r--r--src/tools/tidy/src/error_codes.rs4
-rw-r--r--src/tools/tidy/src/features.rs6
-rw-r--r--src/tools/tidy/src/main.rs2
-rw-r--r--src/tools/tidy/src/pal.rs2
-rw-r--r--src/tools/tidy/src/rustdoc_gui_tests.rs2
-rw-r--r--src/tools/tidy/src/style.rs2
-rw-r--r--src/tools/tidy/src/target_specific_tests.rs2
-rw-r--r--src/tools/tidy/src/ui_tests.rs2
-rw-r--r--src/tools/tidy/src/unit_tests.rs19
-rw-r--r--src/tools/tidy/src/walk.rs39
-rw-r--r--tests/run-make/rustdoc-verify-output-files/Makefile14
-rw-r--r--tests/ui/traits/non_lifetime_binders/bad-copy-cond.rs9
-rw-r--r--tests/ui/traits/non_lifetime_binders/bad-copy-cond.stderr24
-rw-r--r--triagebot.toml5
36 files changed, 231 insertions, 250 deletions
diff --git a/.gitignore b/.gitignore
index e5ca3e50313..ce797a7a837 100644
--- a/.gitignore
+++ b/.gitignore
@@ -41,7 +41,7 @@ no_llvm_build
 /inst/
 /llvm/
 /mingw-build/
-/build/
+build/
 /build-rust-analyzer/
 /dist/
 /unicode-downloads
diff --git a/Cargo.lock b/Cargo.lock
index 5530541cdd0..74bb3c8e6c2 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -2287,6 +2287,7 @@ dependencies = [
  "anyhow",
  "clap 4.1.4",
  "fs-err",
+ "rustc-hash",
  "rustdoc-json-types",
  "serde",
  "serde_json",
@@ -4850,6 +4851,7 @@ dependencies = [
 name = "rustdoc-json-types"
 version = "0.1.0"
 dependencies = [
+ "rustc-hash",
  "serde",
  "serde_json",
 ]
diff --git a/compiler/rustc_trait_selection/src/traits/select/mod.rs b/compiler/rustc_trait_selection/src/traits/select/mod.rs
index 4377de15829..cd3f3c114ba 100644
--- a/compiler/rustc_trait_selection/src/traits/select/mod.rs
+++ b/compiler/rustc_trait_selection/src/traits/select/mod.rs
@@ -2149,7 +2149,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
             ty::Alias(..) | ty::Param(_) | ty::Placeholder(..) => None,
             ty::Infer(ty::TyVar(_)) => Ambiguous,
 
-            // We can make this an ICE if/once we actually instantiate the trait obligation.
+            // We can make this an ICE if/once we actually instantiate the trait obligation eagerly.
             ty::Bound(..) => None,
 
             ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => {
@@ -2257,7 +2257,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
                 }
             }
 
-            ty::Adt(..) | ty::Alias(..) | ty::Param(..) => {
+            ty::Adt(..) | ty::Alias(..) | ty::Param(..) | ty::Placeholder(..) => {
                 // Fallback to whatever user-defined impls exist in this case.
                 None
             }
@@ -2269,9 +2269,10 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
                 Ambiguous
             }
 
-            ty::Placeholder(..)
-            | ty::Bound(..)
-            | ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => {
+            // We can make this an ICE if/once we actually instantiate the trait obligation eagerly.
+            ty::Bound(..) => None,
+
+            ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => {
                 bug!("asked to assemble builtin bounds of unexpected type: {:?}", self_ty);
             }
         }
diff --git a/config.toml.example b/config.toml.example
index 3aaeb09d4c0..267aa8cb548 100644
--- a/config.toml.example
+++ b/config.toml.example
@@ -46,12 +46,6 @@ changelog-seen = 2
 # Defaults to "if-available" when `channel = "dev"` and "false" otherwise.
 #download-ci-llvm = "if-available"
 
-# Indicates whether LLVM rebuild should be skipped when running bootstrap. If
-# this is `false` then the compiler's LLVM will be rebuilt whenever the built
-# version doesn't have the correct hash. If it is `true` then LLVM will never
-# be rebuilt. The default value is `false`.
-#skip-rebuild = false
-
 # Indicates whether the LLVM build is a Release or Debug build
 #optimize = true
 
diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs
index 05e74254949..bf1aff7b72f 100644
--- a/src/bootstrap/config.rs
+++ b/src/bootstrap/config.rs
@@ -112,7 +112,6 @@ pub struct Config {
     pub backtrace_on_ice: bool,
 
     // llvm codegen options
-    pub llvm_skip_rebuild: bool,
     pub llvm_assertions: bool,
     pub llvm_tests: bool,
     pub llvm_plugins: bool,
@@ -666,7 +665,6 @@ define_config! {
 define_config! {
     /// TOML representation of how the LLVM build is configured.
     struct Llvm {
-        skip_rebuild: Option<bool> = "skip-rebuild",
         optimize: Option<bool> = "optimize",
         thin_lto: Option<bool> = "thin-lto",
         release_debuginfo: Option<bool> = "release-debuginfo",
@@ -1060,11 +1058,6 @@ impl Config {
             config.mandir = install.mandir.map(PathBuf::from);
         }
 
-        // We want the llvm-skip-rebuild flag to take precedence over the
-        // skip-rebuild config.toml option so we store it separately
-        // so that we can infer the right value
-        let mut llvm_skip_rebuild = flags.llvm_skip_rebuild;
-
         // Store off these values as options because if they're not provided
         // we'll infer default values for them later
         let mut llvm_assertions = None;
@@ -1170,7 +1163,6 @@ impl Config {
             llvm_assertions = llvm.assertions;
             llvm_tests = llvm.tests;
             llvm_plugins = llvm.plugins;
-            llvm_skip_rebuild = llvm_skip_rebuild.or(llvm.skip_rebuild);
             set(&mut config.llvm_optimize, llvm.optimize);
             set(&mut config.llvm_thin_lto, llvm.thin_lto);
             set(&mut config.llvm_release_debuginfo, llvm.release_debuginfo);
@@ -1324,7 +1316,6 @@ impl Config {
         // Now that we've reached the end of our configuration, infer the
         // default values for all options that we haven't otherwise stored yet.
 
-        config.llvm_skip_rebuild = llvm_skip_rebuild.unwrap_or(false);
         config.llvm_assertions = llvm_assertions.unwrap_or(false);
         config.llvm_tests = llvm_tests.unwrap_or(false);
         config.llvm_plugins = llvm_plugins.unwrap_or(false);
diff --git a/src/bootstrap/defaults/config.codegen.toml b/src/bootstrap/defaults/config.codegen.toml
index 088cbd1057e..eb2afa555f1 100644
--- a/src/bootstrap/defaults/config.codegen.toml
+++ b/src/bootstrap/defaults/config.codegen.toml
@@ -17,3 +17,5 @@ debug-logging = true
 incremental = true
 # Print backtrace on internal compiler errors during bootstrap
 backtrace-on-ice = true
+# Make the compiler and standard library faster to build, at the expense of a ~20% runtime slowdown.
+lto = "off"
diff --git a/src/bootstrap/flags.rs b/src/bootstrap/flags.rs
index 9d1504c34e8..2b0b772a618 100644
--- a/src/bootstrap/flags.rs
+++ b/src/bootstrap/flags.rs
@@ -67,8 +67,6 @@ pub struct Flags {
     // true => deny, false => warn
     pub deny_warnings: Option<bool>,
 
-    pub llvm_skip_rebuild: Option<bool>,
-
     pub rust_profile_use: Option<String>,
     pub rust_profile_generate: Option<String>,
 
@@ -251,14 +249,6 @@ To learn more about a subcommand, run `./x.py <subcommand> -h`",
         opts.optopt("", "color", "whether to use color in cargo and rustc output", "STYLE");
         opts.optopt(
             "",
-            "llvm-skip-rebuild",
-            "whether rebuilding llvm should be skipped \
-             a VALUE of TRUE indicates that llvm will not be rebuilt \
-             VALUE overrides the skip-rebuild option in config.toml.",
-            "VALUE",
-        );
-        opts.optopt(
-            "",
             "rust-profile-generate",
             "generate PGO profile with rustc build",
             "PROFILE",
@@ -714,9 +704,6 @@ Arguments:
                 .collect::<Vec<_>>(),
             include_default_paths: matches.opt_present("include-default-paths"),
             deny_warnings: parse_deny_warnings(&matches),
-            llvm_skip_rebuild: matches.opt_str("llvm-skip-rebuild").map(|s| s.to_lowercase()).map(
-                |s| s.parse::<bool>().expect("`llvm-skip-rebuild` should be either true or false"),
-            ),
             color: matches
                 .opt_get_default("color", Color::Auto)
                 .expect("`color` should be `always`, `never`, or `auto`"),
diff --git a/src/bootstrap/native.rs b/src/bootstrap/native.rs
index 8c2bece1e62..8453a25f38a 100644
--- a/src/bootstrap/native.rs
+++ b/src/bootstrap/native.rs
@@ -108,15 +108,6 @@ pub fn prebuilt_llvm_config(
     let stamp = out_dir.join("llvm-finished-building");
     let stamp = HashStamp::new(stamp, builder.in_tree_llvm_info.sha());
 
-    if builder.config.llvm_skip_rebuild && stamp.path.exists() {
-        builder.info(
-            "Warning: \
-                Using a potentially stale build of LLVM; \
-                This may not behave well.",
-        );
-        return Ok(res);
-    }
-
     if stamp.is_done() {
         if stamp.hash.is_none() {
             builder.info(
diff --git a/src/bootstrap/setup.rs b/src/bootstrap/setup.rs
index 4480bce99d7..09f26862b4a 100644
--- a/src/bootstrap/setup.rs
+++ b/src/bootstrap/setup.rs
@@ -24,7 +24,7 @@ pub enum Profile {
     None,
 }
 
-/// A list of historical hashes of `src/etc/vscode_settings.json`.
+/// A list of historical hashes of `src/etc/rust_analyzer_settings.json`.
 /// New entries should be appended whenever this is updated so we can detect
 /// outdated vs. user-modified settings files.
 static SETTINGS_HASHES: &[&str] = &[
@@ -32,7 +32,7 @@ static SETTINGS_HASHES: &[&str] = &[
     "56e7bf011c71c5d81e0bf42e84938111847a810eee69d906bba494ea90b51922",
     "af1b5efe196aed007577899db9dae15d6dbc923d6fa42fa0934e68617ba9bbe0",
 ];
-static VSCODE_SETTINGS: &str = include_str!("../etc/vscode_settings.json");
+static RUST_ANALYZER_SETTINGS: &str = include_str!("../etc/rust_analyzer_settings.json");
 
 impl Profile {
     fn include_path(&self, src_path: &Path) -> PathBuf {
@@ -489,7 +489,7 @@ undesirable, simply delete the `pre-push` file from .git/hooks."
     Ok(())
 }
 
-/// Sets up or displays `src/etc/vscode_settings.json`
+/// Sets up or displays `src/etc/rust_analyzer_settings.json`
 #[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
 pub struct Vscode;
 
@@ -580,10 +580,10 @@ fn create_vscode_settings_maybe(config: &Config) -> io::Result<()> {
             }
             _ => "Created",
         };
-        fs::write(&vscode_settings, &VSCODE_SETTINGS)?;
+        fs::write(&vscode_settings, &RUST_ANALYZER_SETTINGS)?;
         println!("{verb} `.vscode/settings.json`");
     } else {
-        println!("\n{VSCODE_SETTINGS}");
+        println!("\n{RUST_ANALYZER_SETTINGS}");
     }
     Ok(())
 }
diff --git a/src/bootstrap/setup/tests.rs b/src/bootstrap/setup/tests.rs
index dcf9d18e632..0fe6e4a4644 100644
--- a/src/bootstrap/setup/tests.rs
+++ b/src/bootstrap/setup/tests.rs
@@ -1,14 +1,14 @@
-use super::{SETTINGS_HASHES, VSCODE_SETTINGS};
+use super::{RUST_ANALYZER_SETTINGS, SETTINGS_HASHES};
 use sha2::Digest;
 
 #[test]
 fn check_matching_settings_hash() {
     let mut hasher = sha2::Sha256::new();
-    hasher.update(&VSCODE_SETTINGS);
+    hasher.update(&RUST_ANALYZER_SETTINGS);
     let hash = hex::encode(hasher.finalize().as_slice());
     assert_eq!(
         &hash,
         SETTINGS_HASHES.last().unwrap(),
-        "Update `SETTINGS_HASHES` with the new hash of `src/etc/vscode_settings.json`"
+        "Update `SETTINGS_HASHES` with the new hash of `src/etc/rust_analyzer_settings.json`"
     );
 }
diff --git a/src/doc/rustc/src/platform-support/armeb-unknown-linux-gnueabi.md b/src/doc/rustc/src/platform-support/armeb-unknown-linux-gnueabi.md
index 432e0cfc960..32d3440f1dc 100644
--- a/src/doc/rustc/src/platform-support/armeb-unknown-linux-gnueabi.md
+++ b/src/doc/rustc/src/platform-support/armeb-unknown-linux-gnueabi.md
@@ -26,7 +26,6 @@ Therefore, you can build Rust with support for the target by adding it to the ta
 ```toml
 [llvm]
 download-ci-llvm = false
-skip-rebuild = true
 optimize = true
 ninja = true
 targets = "ARM;X86"
diff --git a/src/etc/vscode_settings.json b/src/etc/rust_analyzer_settings.json
index dd01bfaa725..dd01bfaa725 100644
--- a/src/etc/vscode_settings.json
+++ b/src/etc/rust_analyzer_settings.json
diff --git a/src/rustdoc-json-types/Cargo.toml b/src/rustdoc-json-types/Cargo.toml
index d60699efd36..d63caa7ad70 100644
--- a/src/rustdoc-json-types/Cargo.toml
+++ b/src/rustdoc-json-types/Cargo.toml
@@ -8,6 +8,7 @@ path = "lib.rs"
 
 [dependencies]
 serde = { version = "1.0", features = ["derive"] }
+rustc-hash = "1.1.0"
 
 [dev-dependencies]
 serde_json = "1.0"
diff --git a/src/rustdoc-json-types/lib.rs b/src/rustdoc-json-types/lib.rs
index 387d5787dfc..4c210291b11 100644
--- a/src/rustdoc-json-types/lib.rs
+++ b/src/rustdoc-json-types/lib.rs
@@ -3,10 +3,9 @@
 //! These types are the public API exposed through the `--output-format json` flag. The [`Crate`]
 //! struct is the root of the JSON blob and all other items are contained within.
 
-use std::collections::HashMap;
-use std::path::PathBuf;
-
+use rustc_hash::FxHashMap;
 use serde::{Deserialize, Serialize};
+use std::path::PathBuf;
 
 /// rustdoc format-version.
 pub const FORMAT_VERSION: u32 = 24;
@@ -24,11 +23,11 @@ pub struct Crate {
     pub includes_private: bool,
     /// A collection of all items in the local crate as well as some external traits and their
     /// items that are referenced locally.
-    pub index: HashMap<Id, Item>,
+    pub index: FxHashMap<Id, Item>,
     /// Maps IDs to fully qualified paths and other info helpful for generating links.
-    pub paths: HashMap<Id, ItemSummary>,
+    pub paths: FxHashMap<Id, ItemSummary>,
     /// Maps `crate_id` of items to a crate name and html_root_url if it exists.
-    pub external_crates: HashMap<u32, ExternalCrate>,
+    pub external_crates: FxHashMap<u32, ExternalCrate>,
     /// A single version number to be used in the future when making backwards incompatible changes
     /// to the JSON output.
     pub format_version: u32,
@@ -54,8 +53,8 @@ pub struct ItemSummary {
     ///
     /// Note that items can appear in multiple paths, and the one chosen is implementation
     /// defined. Currently, this is the full path to where the item was defined. Eg
-    /// [`String`] is currently `["alloc", "string", "String"]` and [`HashMap`] is
-    /// `["std", "collections", "hash", "map", "HashMap"]`, but this is subject to change.
+    /// [`String`] is currently `["alloc", "string", "String"]` and [`HashMap`][`std::collections::HashMap`]
+    /// is `["std", "collections", "hash", "map", "HashMap"]`, but this is subject to change.
     pub path: Vec<String>,
     /// Whether this item is a struct, trait, macro, etc.
     pub kind: ItemKind,
@@ -80,7 +79,7 @@ pub struct Item {
     /// Some("") if there is some documentation but it is empty (EG `#[doc = ""]`).
     pub docs: Option<String>,
     /// This mapping resolves [intra-doc links](https://github.com/rust-lang/rfcs/blob/master/text/1946-intra-rustdoc-links.md) from the docstring to their IDs
-    pub links: HashMap<String, Id>,
+    pub links: FxHashMap<String, Id>,
     /// Stringified versions of the attributes on this item (e.g. `"#[inline]"`)
     pub attrs: Vec<String>,
     pub deprecation: Option<Deprecation>,
diff --git a/src/tools/jsondoclint/Cargo.toml b/src/tools/jsondoclint/Cargo.toml
index 8990310a4f4..1318a1f4476 100644
--- a/src/tools/jsondoclint/Cargo.toml
+++ b/src/tools/jsondoclint/Cargo.toml
@@ -9,6 +9,7 @@ edition = "2021"
 anyhow = "1.0.62"
 clap = { version = "4.0.15", features = ["derive"] }
 fs-err = "2.8.1"
+rustc-hash = "1.1.0"
 rustdoc-json-types = { version = "0.1.0", path = "../../rustdoc-json-types" }
 serde = { version = "1.0", features = ["derive"] }
 serde_json = "1.0.85"
diff --git a/src/tools/jsondoclint/src/validator/tests.rs b/src/tools/jsondoclint/src/validator/tests.rs
index 1ef41ff123a..95a56a9dfac 100644
--- a/src/tools/jsondoclint/src/validator/tests.rs
+++ b/src/tools/jsondoclint/src/validator/tests.rs
@@ -1,5 +1,4 @@
-use std::collections::HashMap;
-
+use rustc_hash::FxHashMap;
 use rustdoc_json_types::{Crate, Item, ItemKind, ItemSummary, Visibility, FORMAT_VERSION};
 
 use crate::json_find::SelectorPart;
@@ -27,7 +26,7 @@ fn errors_on_missing_links() {
         root: id("0"),
         crate_version: None,
         includes_private: false,
-        index: HashMap::from_iter([(
+        index: FxHashMap::from_iter([(
             id("0"),
             Item {
                 name: Some("root".to_owned()),
@@ -36,7 +35,7 @@ fn errors_on_missing_links() {
                 span: None,
                 visibility: Visibility::Public,
                 docs: None,
-                links: HashMap::from_iter([("Not Found".to_owned(), id("1"))]),
+                links: FxHashMap::from_iter([("Not Found".to_owned(), id("1"))]),
                 attrs: vec![],
                 deprecation: None,
                 inner: ItemEnum::Module(Module {
@@ -46,8 +45,8 @@ fn errors_on_missing_links() {
                 }),
             },
         )]),
-        paths: HashMap::new(),
-        external_crates: HashMap::new(),
+        paths: FxHashMap::default(),
+        external_crates: FxHashMap::default(),
         format_version: rustdoc_json_types::FORMAT_VERSION,
     };
 
@@ -73,7 +72,7 @@ fn errors_on_local_in_paths_and_not_index() {
         root: id("0:0:1572"),
         crate_version: None,
         includes_private: false,
-        index: HashMap::from_iter([
+        index: FxHashMap::from_iter([
             (
                 id("0:0:1572"),
                 Item {
@@ -83,7 +82,7 @@ fn errors_on_local_in_paths_and_not_index() {
                     span: None,
                     visibility: Visibility::Public,
                     docs: None,
-                    links: HashMap::from_iter([(("prim@i32".to_owned(), id("0:1:1571")))]),
+                    links: FxHashMap::from_iter([(("prim@i32".to_owned(), id("0:1:1571")))]),
                     attrs: Vec::new(),
                     deprecation: None,
                     inner: ItemEnum::Module(Module {
@@ -102,14 +101,14 @@ fn errors_on_local_in_paths_and_not_index() {
                     span: None,
                     visibility: Visibility::Public,
                     docs: None,
-                    links: HashMap::default(),
+                    links: FxHashMap::default(),
                     attrs: Vec::new(),
                     deprecation: None,
                     inner: ItemEnum::Primitive(Primitive { name: "i32".to_owned(), impls: vec![] }),
                 },
             ),
         ]),
-        paths: HashMap::from_iter([(
+        paths: FxHashMap::from_iter([(
             id("0:1:1571"),
             ItemSummary {
                 crate_id: 0,
@@ -117,7 +116,7 @@ fn errors_on_local_in_paths_and_not_index() {
                 kind: ItemKind::Primitive,
             },
         )]),
-        external_crates: HashMap::default(),
+        external_crates: FxHashMap::default(),
         format_version: rustdoc_json_types::FORMAT_VERSION,
     };
 
@@ -137,7 +136,7 @@ fn checks_local_crate_id_is_correct() {
         root: id("root"),
         crate_version: None,
         includes_private: false,
-        index: HashMap::from_iter([(
+        index: FxHashMap::from_iter([(
             id("root"),
             Item {
                 id: id("root"),
@@ -146,7 +145,7 @@ fn checks_local_crate_id_is_correct() {
                 span: None,
                 visibility: Visibility::Public,
                 docs: None,
-                links: HashMap::default(),
+                links: FxHashMap::default(),
                 attrs: Vec::new(),
                 deprecation: None,
                 inner: ItemEnum::Module(Module {
@@ -156,8 +155,8 @@ fn checks_local_crate_id_is_correct() {
                 }),
             },
         )]),
-        paths: HashMap::default(),
-        external_crates: HashMap::default(),
+        paths: FxHashMap::default(),
+        external_crates: FxHashMap::default(),
         format_version: FORMAT_VERSION,
     };
     check(&krate, &[]);
diff --git a/src/tools/replace-version-placeholder/src/main.rs b/src/tools/replace-version-placeholder/src/main.rs
index 33b35d05415..864e68de55d 100644
--- a/src/tools/replace-version-placeholder/src/main.rs
+++ b/src/tools/replace-version-placeholder/src/main.rs
@@ -10,7 +10,7 @@ fn main() {
     let version_str = version_str.trim();
     walk::walk(
         &root_path,
-        &mut |path| {
+        |path| {
             walk::filter_dirs(path)
                 // We exempt these as they require the placeholder
                 // for their operation
diff --git a/src/tools/rustdoc-js/tester.js b/src/tools/rustdoc-js/tester.js
index ea5780f66d7..e617ceba3b9 100644
--- a/src/tools/rustdoc-js/tester.js
+++ b/src/tools/rustdoc-js/tester.js
@@ -2,8 +2,8 @@ const fs = require('fs');
 const path = require('path');
 
 function loadContent(content) {
-    var Module = module.constructor;
-    var m = new Module();
+    const Module = module.constructor;
+    const m = new Module();
     m._compile(content, "tmp.js");
     m.exports.ignore_order = content.indexOf("\n// ignore-order\n") !== -1 ||
         content.startsWith("// ignore-order\n");
@@ -26,8 +26,8 @@ function contentToDiffLine(key, value) {
 // the diff between the two items.
 function betterLookingDiff(entry, data) {
     let output = ' {\n';
-    let spaces = '     ';
-    for (let key in entry) {
+    const spaces = '     ';
+    for (const key in entry) {
         if (!entry.hasOwnProperty(key)) {
             continue;
         }
@@ -35,7 +35,7 @@ function betterLookingDiff(entry, data) {
             output += '-' + spaces + contentToDiffLine(key, entry[key]) + '\n';
             continue;
         }
-        let value = data[key];
+        const value = data[key];
         if (value !== entry[key]) {
             output += '-' + spaces + contentToDiffLine(key, entry[key]) + '\n';
             output += '+' + spaces + contentToDiffLine(key, value) + '\n';
@@ -47,19 +47,19 @@ function betterLookingDiff(entry, data) {
 }
 
 function lookForEntry(entry, data) {
-    for (var i = 0; i < data.length; ++i) {
-        var allGood = true;
-        for (var key in entry) {
+    return data.findIndex(data_entry => {
+        let allGood = true;
+        for (const key in entry) {
             if (!entry.hasOwnProperty(key)) {
                 continue;
             }
-            var value = data[i][key];
+            let value = data_entry[key];
             // To make our life easier, if there is a "parent" type, we add it to the path.
-            if (key === 'path' && data[i]['parent'] !== undefined) {
+            if (key === 'path' && data_entry['parent'] !== undefined) {
                 if (value.length > 0) {
-                    value += '::' + data[i]['parent']['name'];
+                    value += '::' + data_entry['parent']['name'];
                 } else {
-                    value = data[i]['parent']['name'];
+                    value = data_entry['parent']['name'];
                 }
             }
             if (value !== entry[key]) {
@@ -67,11 +67,8 @@ function lookForEntry(entry, data) {
                 break;
             }
         }
-        if (allGood === true) {
-            return i;
-        }
-    }
-    return null;
+        return allGood === true;
+    });
 }
 
 // This function checks if `expected` has all the required fields needed for the checks.
@@ -97,13 +94,12 @@ function checkNeededFields(fullPath, expected, error_text, queryName, position)
     } else {
         fieldsToCheck = [];
     }
-    for (var i = 0; i < fieldsToCheck.length; ++i) {
-        const field = fieldsToCheck[i];
+    for (const field of fieldsToCheck) {
         if (!expected.hasOwnProperty(field)) {
             let text = `${queryName}==> Mandatory key \`${field}\` is not present`;
             if (fullPath.length > 0) {
                 text += ` in field \`${fullPath}\``;
-                if (position != null) {
+                if (position !== null) {
                     text += ` (position ${position})`;
                 }
             }
@@ -114,7 +110,8 @@ function checkNeededFields(fullPath, expected, error_text, queryName, position)
 
 function valueCheck(fullPath, expected, result, error_text, queryName) {
     if (Array.isArray(expected)) {
-        for (var i = 0; i < expected.length; ++i) {
+        let i;
+        for (i = 0; i < expected.length; ++i) {
             checkNeededFields(fullPath, expected[i], error_text, queryName, i);
             if (i >= result.length) {
                 error_text.push(`${queryName}==> EXPECTED has extra value in array from field ` +
@@ -154,8 +151,8 @@ function valueCheck(fullPath, expected, result, error_text, queryName) {
             valueCheck(obj_path, expected[key], result_v, error_text, queryName);
         }
     } else {
-        expectedValue = JSON.stringify(expected);
-        resultValue = JSON.stringify(result);
+        const expectedValue = JSON.stringify(expected);
+        const resultValue = JSON.stringify(result);
         if (expectedValue != resultValue) {
             error_text.push(`${queryName}==> Different values for field \`${fullPath}\`:\n` +
                 `EXPECTED: \`${expectedValue}\`\nRESULT:   \`${resultValue}\``);
@@ -164,7 +161,7 @@ function valueCheck(fullPath, expected, result, error_text, queryName) {
 }
 
 function runParser(query, expected, parseQuery, queryName) {
-    var error_text = [];
+    const error_text = [];
     checkNeededFields("", expected, error_text, queryName, null);
     if (error_text.length === 0) {
         valueCheck('', expected, parseQuery(query), error_text, queryName);
@@ -176,10 +173,10 @@ function runSearch(query, expected, doSearch, loadedFile, queryName) {
     const ignore_order = loadedFile.ignore_order;
     const exact_check = loadedFile.exact_check;
 
-    var results = doSearch(query, loadedFile.FILTER_CRATE);
-    var error_text = [];
+    const results = doSearch(query, loadedFile.FILTER_CRATE);
+    const error_text = [];
 
-    for (var key in expected) {
+    for (const key in expected) {
         if (!expected.hasOwnProperty(key)) {
             continue;
         }
@@ -187,37 +184,37 @@ function runSearch(query, expected, doSearch, loadedFile, queryName) {
             error_text.push('==> Unknown key "' + key + '"');
             break;
         }
-        var entry = expected[key];
+        const entry = expected[key];
 
         if (exact_check == true && entry.length !== results[key].length) {
             error_text.push(queryName + "==> Expected exactly " + entry.length +
                             " results but found " + results[key].length + " in '" + key + "'");
         }
 
-        var prev_pos = -1;
-        for (var i = 0; i < entry.length; ++i) {
-            var entry_pos = lookForEntry(entry[i], results[key]);
-            if (entry_pos === null) {
+        let prev_pos = -1;
+        entry.forEach((elem, index) => {
+            const entry_pos = lookForEntry(elem, results[key]);
+            if (entry_pos === -1) {
                 error_text.push(queryName + "==> Result not found in '" + key + "': '" +
-                                JSON.stringify(entry[i]) + "'");
+                                JSON.stringify(elem) + "'");
                 // By default, we just compare the two first items.
                 let item_to_diff = 0;
-                if ((ignore_order === false || exact_check === true) && i < results[key].length) {
-                    item_to_diff = i;
+                if ((!ignore_order || exact_check) && index < results[key].length) {
+                    item_to_diff = index;
                 }
                 error_text.push("Diff of first error:\n" +
-                    betterLookingDiff(entry[i], results[key][item_to_diff]));
+                    betterLookingDiff(elem, results[key][item_to_diff]));
             } else if (exact_check === true && prev_pos + 1 !== entry_pos) {
                 error_text.push(queryName + "==> Exact check failed at position " + (prev_pos + 1) +
-                                ": expected '" + JSON.stringify(entry[i]) + "' but found '" +
-                                JSON.stringify(results[key][i]) + "'");
+                                ": expected '" + JSON.stringify(elem) + "' but found '" +
+                                JSON.stringify(results[key][index]) + "'");
             } else if (ignore_order === false && entry_pos < prev_pos) {
-                error_text.push(queryName + "==> '" + JSON.stringify(entry[i]) + "' was supposed " +
+                error_text.push(queryName + "==> '" + JSON.stringify(elem) + "' was supposed " +
                                 "to be before '" + JSON.stringify(results[key][entry_pos]) + "'");
             } else {
                 prev_pos = entry_pos;
             }
-        }
+        });
     }
     return error_text;
 }
@@ -252,15 +249,15 @@ function runCheck(loadedFile, key, callback) {
             console.log(`==> QUERY variable should have the same length as ${key}`);
             return 1;
         }
-        for (var i = 0; i < query.length; ++i) {
-            var error_text = callback(query[i], expected[i], "[ query `" + query[i] + "`]");
+        for (let i = 0; i < query.length; ++i) {
+            const error_text = callback(query[i], expected[i], "[ query `" + query[i] + "`]");
             if (checkResult(error_text, loadedFile, false) !== 0) {
                 return 1;
             }
         }
         console.log("OK");
     } else {
-        var error_text = callback(query, expected, "");
+        const error_text = callback(query, expected, "");
         if (checkResult(error_text, loadedFile, true) !== 0) {
             return 1;
         }
@@ -269,9 +266,9 @@ function runCheck(loadedFile, key, callback) {
 }
 
 function runChecks(testFile, doSearch, parseQuery) {
-    var checkExpected = false;
-    var checkParsed = false;
-    var testFileContent = readFile(testFile) + 'exports.QUERY = QUERY;';
+    let checkExpected = false;
+    let checkParsed = false;
+    let testFileContent = readFile(testFile) + 'exports.QUERY = QUERY;';
 
     if (testFileContent.indexOf("FILTER_CRATE") !== -1) {
         testFileContent += "exports.FILTER_CRATE = FILTER_CRATE;";
@@ -294,7 +291,7 @@ function runChecks(testFile, doSearch, parseQuery) {
     }
 
     const loadedFile = loadContent(testFileContent);
-    var res = 0;
+    let res = 0;
 
     if (checkExpected) {
         res += runCheck(loadedFile, "EXPECTED", (query, expected, text) => {
@@ -323,8 +320,7 @@ function loadSearchJS(doc_folder, resource_suffix) {
     const searchIndex = require(searchIndexJs);
 
     const staticFiles = path.join(doc_folder, "static.files");
-    const searchJs = fs.readdirSync(staticFiles).find(
-        f => f.match(/search.*\.js$/));
+    const searchJs = fs.readdirSync(staticFiles).find(f => f.match(/search.*\.js$/));
     const searchModule = require(path.join(staticFiles, searchJs));
     const searchWords = searchModule.initSearch(searchIndex.searchIndex);
 
@@ -334,7 +330,7 @@ function loadSearchJS(doc_folder, resource_suffix) {
                 filterCrate, currentCrate);
         },
         parseQuery: searchModule.parseQuery,
-    }
+    };
 }
 
 function showHelp() {
@@ -349,14 +345,14 @@ function showHelp() {
 }
 
 function parseOptions(args) {
-    var opts = {
+    const opts = {
         "crate_name": "",
         "resource_suffix": "",
         "doc_folder": "",
         "test_folder": "",
         "test_file": [],
     };
-    var correspondences = {
+    const correspondences = {
         "--resource-suffix": "resource_suffix",
         "--doc-folder": "doc_folder",
         "--test-folder": "test_folder",
@@ -364,7 +360,7 @@ function parseOptions(args) {
         "--crate-name": "crate_name",
     };
 
-    for (var i = 0; i < args.length; ++i) {
+    for (let i = 0; i < args.length; ++i) {
         if (correspondences.hasOwnProperty(args[i])) {
             i += 1;
             if (i >= args.length) {
@@ -398,17 +394,18 @@ function parseOptions(args) {
 }
 
 function main(argv) {
-    var opts = parseOptions(argv.slice(2));
+    const opts = parseOptions(argv.slice(2));
     if (opts === null) {
         return 1;
     }
 
-    let parseAndSearch = loadSearchJS(
+    const parseAndSearch = loadSearchJS(
         opts["doc_folder"],
-        opts["resource_suffix"]);
-    var errors = 0;
+        opts["resource_suffix"]
+    );
+    let errors = 0;
 
-    let doSearch = function (queryStr, filterCrate) {
+    const doSearch = function (queryStr, filterCrate) {
         return parseAndSearch.doSearch(queryStr, filterCrate, opts["crate_name"]);
     };
 
diff --git a/src/tools/tidy/src/alphabetical.rs b/src/tools/tidy/src/alphabetical.rs
index f913f6cde38..9bfee1efc0b 100644
--- a/src/tools/tidy/src/alphabetical.rs
+++ b/src/tools/tidy/src/alphabetical.rs
@@ -95,7 +95,7 @@ fn check_section<'a>(
 }
 
 pub fn check(path: &Path, bad: &mut bool) {
-    walk(path, &mut filter_dirs, &mut |entry, contents| {
+    walk(path, filter_dirs, &mut |entry, contents| {
         let file = &entry.path().display();
 
         let mut lines = contents.lines().enumerate().peekable();
diff --git a/src/tools/tidy/src/bins.rs b/src/tools/tidy/src/bins.rs
index b898f20a5d0..2d6abe59343 100644
--- a/src/tools/tidy/src/bins.rs
+++ b/src/tools/tidy/src/bins.rs
@@ -101,54 +101,38 @@ mod os_impl {
 
         const ALLOWED: &[&str] = &["configure", "x"];
 
-        walk_no_read(
-            path,
-            &mut |path| {
-                filter_dirs(path)
-                    || path.ends_with("src/etc")
-                    // This is a list of directories that we almost certainly
-                    // don't need to walk. A future PR will likely want to
-                    // remove these in favor of crate::walk_no_read using git
-                    // ls-files to discover the paths we should check, which
-                    // would naturally ignore all of these directories. It's
-                    // also likely faster than walking the directory tree
-                    // directly (since git is just reading from a couple files
-                    // to produce the results).
-                    || path.ends_with("target")
-                    || path.ends_with("build")
-                    || path.ends_with(".git")
-            },
-            &mut |entry| {
-                let file = entry.path();
-                let extension = file.extension();
-                let scripts = ["py", "sh", "ps1"];
-                if scripts.into_iter().any(|e| extension == Some(OsStr::new(e))) {
-                    return;
-                }
+        // FIXME: we don't need to look at all binaries, only files that have been modified in this branch
+        // (e.g. using `git ls-files`).
+        walk_no_read(path, |path| filter_dirs(path) || path.ends_with("src/etc"), &mut |entry| {
+            let file = entry.path();
+            let extension = file.extension();
+            let scripts = ["py", "sh", "ps1"];
+            if scripts.into_iter().any(|e| extension == Some(OsStr::new(e))) {
+                return;
+            }
 
-                if t!(is_executable(&file), file) {
-                    let rel_path = file.strip_prefix(path).unwrap();
-                    let git_friendly_path = rel_path.to_str().unwrap().replace("\\", "/");
+            if t!(is_executable(&file), file) {
+                let rel_path = file.strip_prefix(path).unwrap();
+                let git_friendly_path = rel_path.to_str().unwrap().replace("\\", "/");
 
-                    if ALLOWED.contains(&git_friendly_path.as_str()) {
-                        return;
-                    }
+                if ALLOWED.contains(&git_friendly_path.as_str()) {
+                    return;
+                }
 
-                    let output = Command::new("git")
-                        .arg("ls-files")
-                        .arg(&git_friendly_path)
-                        .current_dir(path)
-                        .stderr(Stdio::null())
-                        .output()
-                        .unwrap_or_else(|e| {
-                            panic!("could not run git ls-files: {e}");
-                        });
-                    let path_bytes = rel_path.as_os_str().as_bytes();
-                    if output.status.success() && output.stdout.starts_with(path_bytes) {
-                        tidy_error!(bad, "binary checked into source: {}", file.display());
-                    }
+                let output = Command::new("git")
+                    .arg("ls-files")
+                    .arg(&git_friendly_path)
+                    .current_dir(path)
+                    .stderr(Stdio::null())
+                    .output()
+                    .unwrap_or_else(|e| {
+                        panic!("could not run git ls-files: {e}");
+                    });
+                let path_bytes = rel_path.as_os_str().as_bytes();
+                if output.status.success() && output.stdout.starts_with(path_bytes) {
+                    tidy_error!(bad, "binary checked into source: {}", file.display());
                 }
-            },
-        )
+            }
+        })
     }
 }
diff --git a/src/tools/tidy/src/debug_artifacts.rs b/src/tools/tidy/src/debug_artifacts.rs
index 0dd9c1e160c..2241375eaae 100644
--- a/src/tools/tidy/src/debug_artifacts.rs
+++ b/src/tools/tidy/src/debug_artifacts.rs
@@ -6,7 +6,7 @@ use std::path::Path;
 const GRAPHVIZ_POSTFLOW_MSG: &str = "`borrowck_graphviz_postflow` attribute in test";
 
 pub fn check(test_dir: &Path, bad: &mut bool) {
-    walk(test_dir, &mut filter_dirs, &mut |entry, contents| {
+    walk(test_dir, filter_dirs, &mut |entry, contents| {
         let filename = entry.path();
         let is_rust = filename.extension().map_or(false, |ext| ext == "rs");
         if !is_rust {
diff --git a/src/tools/tidy/src/edition.rs b/src/tools/tidy/src/edition.rs
index 8172e3d292b..67d9c30a04f 100644
--- a/src/tools/tidy/src/edition.rs
+++ b/src/tools/tidy/src/edition.rs
@@ -9,27 +9,20 @@ fn is_edition_2021(mut line: &str) -> bool {
 }
 
 pub fn check(path: &Path, bad: &mut bool) {
-    walk(
-        path,
-        &mut |path| {
-            filter_dirs(path)
-                || (path.ends_with("tests") && path.join("COMPILER_TESTS.md").exists())
-        },
-        &mut |entry, contents| {
-            let file = entry.path();
-            let filename = file.file_name().unwrap();
-            if filename != "Cargo.toml" {
-                return;
-            }
+    walk(path, |path| filter_dirs(path), &mut |entry, contents| {
+        let file = entry.path();
+        let filename = file.file_name().unwrap();
+        if filename != "Cargo.toml" {
+            return;
+        }
 
-            let is_2021 = contents.lines().any(is_edition_2021);
-            if !is_2021 {
-                tidy_error!(
-                    bad,
-                    "{} doesn't have `edition = \"2021\"` on a separate line",
-                    file.display()
-                );
-            }
-        },
-    );
+        let is_2021 = contents.lines().any(is_edition_2021);
+        if !is_2021 {
+            tidy_error!(
+                bad,
+                "{} doesn't have `edition = \"2021\"` on a separate line",
+                file.display()
+            );
+        }
+    });
 }
diff --git a/src/tools/tidy/src/error_codes.rs b/src/tools/tidy/src/error_codes.rs
index c60caa0d49c..d90ad5abbf9 100644
--- a/src/tools/tidy/src/error_codes.rs
+++ b/src/tools/tidy/src/error_codes.rs
@@ -127,7 +127,7 @@ fn check_error_codes_docs(
 
     let mut no_longer_emitted_codes = Vec::new();
 
-    walk(&docs_path, &mut |_| false, &mut |entry, contents| {
+    walk(&docs_path, |_| false, &mut |entry, contents| {
         let path = entry.path();
 
         // Error if the file isn't markdown.
@@ -319,7 +319,7 @@ fn check_error_codes_used(
 
     let mut found_codes = Vec::new();
 
-    walk_many(search_paths, &mut filter_dirs, &mut |entry, contents| {
+    walk_many(search_paths, filter_dirs, &mut |entry, contents| {
         let path = entry.path();
 
         // Return early if we aren't looking at a source file.
diff --git a/src/tools/tidy/src/features.rs b/src/tools/tidy/src/features.rs
index af92e6eb863..f0f13628dc7 100644
--- a/src/tools/tidy/src/features.rs
+++ b/src/tools/tidy/src/features.rs
@@ -101,7 +101,7 @@ pub fn check(
             &tests_path.join("rustdoc-ui"),
             &tests_path.join("rustdoc"),
         ],
-        &mut filter_dirs,
+        filter_dirs,
         &mut |entry, contents| {
             let file = entry.path();
             let filename = file.file_name().unwrap().to_string_lossy();
@@ -477,11 +477,11 @@ fn get_and_check_lib_features(
 
 fn map_lib_features(
     base_src_path: &Path,
-    mf: &mut dyn FnMut(Result<(&str, Feature), &str>, &Path, usize),
+    mf: &mut (dyn Send + Sync + FnMut(Result<(&str, Feature), &str>, &Path, usize)),
 ) {
     walk(
         base_src_path,
-        &mut |path| filter_dirs(path) || path.ends_with("tests"),
+        |path| filter_dirs(path) || path.ends_with("tests"),
         &mut |entry, contents| {
             let file = entry.path();
             let filename = file.file_name().unwrap().to_string_lossy();
diff --git a/src/tools/tidy/src/main.rs b/src/tools/tidy/src/main.rs
index 505f9d724c8..d98758ace4f 100644
--- a/src/tools/tidy/src/main.rs
+++ b/src/tools/tidy/src/main.rs
@@ -91,7 +91,6 @@ fn main() {
 
         // Checks that need to be done for both the compiler and std libraries.
         check!(unit_tests, &src_path);
-        check!(unit_tests, &tests_path);
         check!(unit_tests, &compiler_path);
         check!(unit_tests, &library_path);
 
@@ -107,7 +106,6 @@ fn main() {
         check!(edition, &src_path);
         check!(edition, &compiler_path);
         check!(edition, &library_path);
-        check!(edition, &tests_path);
 
         check!(alphabetical, &src_path);
         check!(alphabetical, &tests_path);
diff --git a/src/tools/tidy/src/pal.rs b/src/tools/tidy/src/pal.rs
index f4592fdcff9..33938ac9a0a 100644
--- a/src/tools/tidy/src/pal.rs
+++ b/src/tools/tidy/src/pal.rs
@@ -68,7 +68,7 @@ pub fn check(path: &Path, bad: &mut bool) {
     // Sanity check that the complex parsing here works.
     let mut saw_target_arch = false;
     let mut saw_cfg_bang = false;
-    walk(path, &mut filter_dirs, &mut |entry, contents| {
+    walk(path, filter_dirs, &mut |entry, contents| {
         let file = entry.path();
         let filestr = file.to_string_lossy().replace("\\", "/");
         if !filestr.ends_with(".rs") {
diff --git a/src/tools/tidy/src/rustdoc_gui_tests.rs b/src/tools/tidy/src/rustdoc_gui_tests.rs
index feb513df34b..d7db5c02297 100644
--- a/src/tools/tidy/src/rustdoc_gui_tests.rs
+++ b/src/tools/tidy/src/rustdoc_gui_tests.rs
@@ -5,7 +5,7 @@ use std::path::Path;
 pub fn check(path: &Path, bad: &mut bool) {
     crate::walk::walk(
         &path.join("rustdoc-gui"),
-        &mut |p| {
+        |p| {
             // If there is no extension, it's very likely a folder and we want to go into it.
             p.extension().map(|e| e != "goml").unwrap_or(false)
         },
diff --git a/src/tools/tidy/src/style.rs b/src/tools/tidy/src/style.rs
index 6a0855405ec..9ecb30529cc 100644
--- a/src/tools/tidy/src/style.rs
+++ b/src/tools/tidy/src/style.rs
@@ -235,7 +235,7 @@ pub fn check(path: &Path, bad: &mut bool) {
         .chain(PROBLEMATIC_CONSTS.iter().map(|v| format!("{:X}", v)))
         .collect();
     let problematic_regex = RegexSet::new(problematic_consts_strings.as_slice()).unwrap();
-    walk(path, &mut skip, &mut |entry, contents| {
+    walk(path, skip, &mut |entry, contents| {
         let file = entry.path();
         let filename = file.file_name().unwrap().to_string_lossy();
         let extensions =
diff --git a/src/tools/tidy/src/target_specific_tests.rs b/src/tools/tidy/src/target_specific_tests.rs
index d7a157672cf..f41fa4fcce1 100644
--- a/src/tools/tidy/src/target_specific_tests.rs
+++ b/src/tools/tidy/src/target_specific_tests.rs
@@ -37,7 +37,7 @@ struct RevisionInfo<'a> {
 pub fn check(path: &Path, bad: &mut bool) {
     crate::walk::walk(
         path,
-        &mut |path| path.extension().map(|p| p == "rs") == Some(false),
+        |path| path.extension().map(|p| p == "rs") == Some(false),
         &mut |entry, content| {
             let file = entry.path().display();
             let mut header_map = BTreeMap::new();
diff --git a/src/tools/tidy/src/ui_tests.rs b/src/tools/tidy/src/ui_tests.rs
index 409f7563184..15c36923e88 100644
--- a/src/tools/tidy/src/ui_tests.rs
+++ b/src/tools/tidy/src/ui_tests.rs
@@ -54,7 +54,7 @@ fn check_entries(path: &Path, bad: &mut bool) {
 pub fn check(path: &Path, bad: &mut bool) {
     check_entries(&path, bad);
     for path in &[&path.join("ui"), &path.join("ui-fulldeps")] {
-        crate::walk::walk_no_read(path, &mut |_| false, &mut |entry| {
+        crate::walk::walk_no_read(path, |_| false, &mut |entry| {
             let file_path = entry.path();
             if let Some(ext) = file_path.extension() {
                 if ext == "stderr" || ext == "stdout" {
diff --git a/src/tools/tidy/src/unit_tests.rs b/src/tools/tidy/src/unit_tests.rs
index 27f36c85561..3da200a8a93 100644
--- a/src/tools/tidy/src/unit_tests.rs
+++ b/src/tools/tidy/src/unit_tests.rs
@@ -11,18 +11,19 @@ use crate::walk::{filter_dirs, walk};
 use std::path::Path;
 
 pub fn check(root_path: &Path, bad: &mut bool) {
-    let core = &root_path.join("core");
-    let core_tests = &core.join("tests");
-    let core_benches = &core.join("benches");
-    let is_core = |path: &Path| {
-        path.starts_with(core) && !(path.starts_with(core_tests) || path.starts_with(core_benches))
+    let core = root_path.join("core");
+    let core_copy = core.clone();
+    let core_tests = core.join("tests");
+    let core_benches = core.join("benches");
+    let is_core = move |path: &Path| {
+        path.starts_with(&core)
+            && !(path.starts_with(&core_tests) || path.starts_with(&core_benches))
     };
 
-    let mut skip = |path: &Path| {
+    let skip = move |path: &Path| {
         let file_name = path.file_name().unwrap_or_default();
         if path.is_dir() {
             filter_dirs(path)
-                || path.ends_with("tests")
                 || path.ends_with("src/doc")
                 || (file_name == "tests" || file_name == "benches") && !is_core(path)
         } else {
@@ -35,9 +36,9 @@ pub fn check(root_path: &Path, bad: &mut bool) {
         }
     };
 
-    walk(root_path, &mut skip, &mut |entry, contents| {
+    walk(root_path, skip, &mut |entry, contents| {
         let path = entry.path();
-        let is_core = path.starts_with(core);
+        let is_core = path.starts_with(&core_copy);
         for (i, line) in contents.lines().enumerate() {
             let line = line.trim();
             let is_test = || line.contains("#[test]") && !line.contains("`#[test]");
diff --git a/src/tools/tidy/src/walk.rs b/src/tools/tidy/src/walk.rs
index 4cfb70fa31c..94152e75168 100644
--- a/src/tools/tidy/src/walk.rs
+++ b/src/tools/tidy/src/walk.rs
@@ -1,9 +1,8 @@
-use std::fs::File;
-use std::io::Read;
-use walkdir::{DirEntry, WalkDir};
+use ignore::DirEntry;
 
-use std::path::Path;
+use std::{fs::File, io::Read, path::Path};
 
+/// The default directory filter.
 pub fn filter_dirs(path: &Path) -> bool {
     let skip = [
         "tidy-test-file",
@@ -36,34 +35,42 @@ pub fn filter_dirs(path: &Path) -> bool {
 
 pub fn walk_many(
     paths: &[&Path],
-    skip: &mut dyn FnMut(&Path) -> bool,
+    skip: impl Clone + Send + Sync + 'static + Fn(&Path) -> bool,
     f: &mut dyn FnMut(&DirEntry, &str),
 ) {
     for path in paths {
-        walk(path, skip, f);
+        walk(path, skip.clone(), f);
     }
 }
 
-pub fn walk(path: &Path, skip: &mut dyn FnMut(&Path) -> bool, f: &mut dyn FnMut(&DirEntry, &str)) {
-    let mut contents = String::new();
+pub fn walk(
+    path: &Path,
+    skip: impl Send + Sync + 'static + Fn(&Path) -> bool,
+    f: &mut dyn FnMut(&DirEntry, &str),
+) {
+    let mut contents = Vec::new();
     walk_no_read(path, skip, &mut |entry| {
         contents.clear();
-        if t!(File::open(entry.path()), entry.path()).read_to_string(&mut contents).is_err() {
-            contents.clear();
-        }
-        f(&entry, &contents);
+        let mut file = t!(File::open(entry.path()), entry.path());
+        t!(file.read_to_end(&mut contents), entry.path());
+        let contents_str = match std::str::from_utf8(&contents) {
+            Ok(s) => s,
+            Err(_) => return, // skip this file
+        };
+        f(&entry, &contents_str);
     });
 }
 
 pub(crate) fn walk_no_read(
     path: &Path,
-    skip: &mut dyn FnMut(&Path) -> bool,
+    skip: impl Send + Sync + 'static + Fn(&Path) -> bool,
     f: &mut dyn FnMut(&DirEntry),
 ) {
-    let walker = WalkDir::new(path).into_iter().filter_entry(|e| !skip(e.path()));
-    for entry in walker {
+    let mut walker = ignore::WalkBuilder::new(path);
+    let walker = walker.filter_entry(move |e| !skip(e.path()));
+    for entry in walker.build() {
         if let Ok(entry) = entry {
-            if entry.file_type().is_dir() {
+            if entry.file_type().map_or(true, |kind| kind.is_dir() || kind.is_symlink()) {
                 continue;
             }
             f(&entry);
diff --git a/tests/run-make/rustdoc-verify-output-files/Makefile b/tests/run-make/rustdoc-verify-output-files/Makefile
index bfabbbc6586..0666122e8ab 100644
--- a/tests/run-make/rustdoc-verify-output-files/Makefile
+++ b/tests/run-make/rustdoc-verify-output-files/Makefile
@@ -22,15 +22,11 @@ all:
 	# Check if expected json file is generated
 	[ -e $(OUTPUT_DIR)/foobar.json ]
 
-	# TODO
-	# We should re-generate json doc once again and compare the diff with previously
-	# generated one. Because layout of json docs changes in each compilation, we can't
-	# do that currently.
-	#
-	# See https://github.com/rust-lang/rust/issues/103785#issuecomment-1307425590 for details.
+	# Copy first json output to check if it's exactly same after second compilation
+	cp -R $(OUTPUT_DIR)/foobar.json $(TMP_OUTPUT_DIR)/foobar.json
 
-	# remove generated json doc
-	rm $(OUTPUT_DIR)/foobar.json
+	# Generate json doc on the same output
+	$(RUSTDOC) src/lib.rs --crate-name foobar --crate-type lib --out-dir $(OUTPUT_DIR) -Z unstable-options --output-format json
 
-	# Check if json doc compilation broke any of the html files generated previously
+	# Check if all docs(including both json and html formats) are still the same after multiple compilations
 	$(DIFF) -r -q $(OUTPUT_DIR) $(TMP_OUTPUT_DIR)
diff --git a/tests/ui/traits/non_lifetime_binders/bad-copy-cond.rs b/tests/ui/traits/non_lifetime_binders/bad-copy-cond.rs
new file mode 100644
index 00000000000..506cad25f63
--- /dev/null
+++ b/tests/ui/traits/non_lifetime_binders/bad-copy-cond.rs
@@ -0,0 +1,9 @@
+#![feature(non_lifetime_binders)]
+//~^ WARN the feature `non_lifetime_binders` is incomplete
+
+fn foo() where for<T> T: Copy {}
+
+fn main() {
+    foo();
+    //~^ ERROR the trait bound `T: Copy` is not satisfied
+}
diff --git a/tests/ui/traits/non_lifetime_binders/bad-copy-cond.stderr b/tests/ui/traits/non_lifetime_binders/bad-copy-cond.stderr
new file mode 100644
index 00000000000..07e02d47f27
--- /dev/null
+++ b/tests/ui/traits/non_lifetime_binders/bad-copy-cond.stderr
@@ -0,0 +1,24 @@
+warning: the feature `non_lifetime_binders` is incomplete and may not be safe to use and/or cause compiler crashes
+  --> $DIR/bad-copy-cond.rs:1:12
+   |
+LL | #![feature(non_lifetime_binders)]
+   |            ^^^^^^^^^^^^^^^^^^^^
+   |
+   = note: see issue #108185 <https://github.com/rust-lang/rust/issues/108185> for more information
+   = note: `#[warn(incomplete_features)]` on by default
+
+error[E0277]: the trait bound `T: Copy` is not satisfied
+  --> $DIR/bad-copy-cond.rs:7:5
+   |
+LL |     foo();
+   |     ^^^ the trait `Copy` is not implemented for `T`
+   |
+note: required by a bound in `foo`
+  --> $DIR/bad-copy-cond.rs:4:26
+   |
+LL | fn foo() where for<T> T: Copy {}
+   |                          ^^^^ required by this bound in `foo`
+
+error: aborting due to previous error; 1 warning emitted
+
+For more information about this error, try `rustc --explain E0277`.
diff --git a/triagebot.toml b/triagebot.toml
index f7607d522c1..403a7087ee1 100644
--- a/triagebot.toml
+++ b/triagebot.toml
@@ -471,6 +471,11 @@ If this was intentional then you can ignore this comment.
 [mentions."src/tools/x"]
 message = "`src/tools/x` was changed. Bump version of Cargo.toml in `src/tools/x` so tidy will suggest installing the new version."
 
+[mentions."src/bootstrap/defaults/config.compiler.toml"]
+message = "This PR changes src/bootstrap/defaults/config.compiler.toml. If appropriate, please also update `config.codegen.toml` so the defaults are in sync."
+[mentions."src/bootstrap/defaults/config.codegen.toml"]
+message = "This PR changes src/bootstrap/defaults/config.codegen.toml. If appropriate, please also update `config.compiler.toml` so the defaults are in sync."
+
 [assign]
 warn_non_default_branch = true
 contributing_url = "https://rustc-dev-guide.rust-lang.org/contributing.html"