about summary refs log tree commit diff
diff options
context:
space:
mode:
authorLukas Wirth <lukastw97@gmail.com>2024-02-01 16:16:38 +0100
committerLukas Wirth <lukastw97@gmail.com>2024-02-01 17:57:27 +0100
commit9e8a0fae0cea88a4a64bfb9b9dd1fe00f37c3f7e (patch)
treed14bfee5773f20be5006c3036bdaeb5fb98e2808
parent850ba2fb6395cb39edef263256f7ba334f9142dc (diff)
downloadrust-9e8a0fae0cea88a4a64bfb9b9dd1fe00f37c3f7e.tar.gz
rust-9e8a0fae0cea88a4a64bfb9b9dd1fe00f37c3f7e.zip
Lint debug prints and disallowed types with clippy
-rw-r--r--.github/workflows/ci.yaml2
-rw-r--r--Cargo.lock1
-rw-r--r--Cargo.toml7
-rw-r--r--clippy.toml5
-rw-r--r--crates/flycheck/src/lib.rs7
-rw-r--r--crates/hir-def/src/body/lower.rs5
-rw-r--r--crates/hir-def/src/item_scope.rs8
-rw-r--r--crates/hir-ty/src/layout/tests.rs5
-rw-r--r--crates/hir-ty/src/tests.rs9
-rw-r--r--crates/hir-ty/src/traits.rs2
-rw-r--r--crates/hir/src/diagnostics.rs4
-rw-r--r--crates/ide-assists/src/handlers/desugar_doc_comment.rs4
-rw-r--r--crates/ide-assists/src/handlers/extract_module.rs14
-rw-r--r--crates/ide-assists/src/handlers/generate_delegate_methods.rs6
-rw-r--r--crates/ide-assists/src/handlers/generate_delegate_trait.rs4
-rw-r--r--crates/ide-assists/src/handlers/inline_type_alias.rs10
-rw-r--r--crates/ide-assists/src/handlers/merge_match_arms.rs11
-rw-r--r--crates/ide-assists/src/handlers/remove_unused_imports.rs6
-rw-r--r--crates/ide-assists/src/handlers/unwrap_result_return_type.rs4
-rw-r--r--crates/ide-assists/src/utils/suggest_name.rs6
-rw-r--r--crates/ide-db/src/tests/sourcegen_lints.rs1
-rw-r--r--crates/ide-diagnostics/src/lib.rs10
-rw-r--r--crates/ide-diagnostics/src/tests.rs1
-rw-r--r--crates/ide-ssr/src/matching.rs2
-rw-r--r--crates/ide-ssr/src/tests.rs1
-rw-r--r--crates/ide/src/doc_links/tests.rs3
-rw-r--r--crates/ide/src/references.rs4
-rw-r--r--crates/ide/src/static_index.rs15
-rw-r--r--crates/limit/src/lib.rs13
-rw-r--r--crates/mbe/src/syntax_bridge/tests.rs5
-rw-r--r--crates/parser/src/tests/sourcegen_inline_tests.rs1
-rw-r--r--crates/proc-macro-api/Cargo.toml3
-rw-r--r--crates/proc-macro-api/src/msg/flat.rs9
-rw-r--r--crates/proc-macro-srv-cli/src/main.rs2
-rw-r--r--crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs1
-rw-r--r--crates/profile/src/lib.rs3
-rw-r--r--crates/profile/src/stop_watch.rs3
-rw-r--r--crates/project-model/src/cargo_workspace.rs2
-rw-r--r--crates/project-model/src/sysroot.rs2
-rw-r--r--crates/rust-analyzer/src/bin/main.rs2
-rw-r--r--crates/rust-analyzer/src/cli.rs2
-rw-r--r--crates/rust-analyzer/src/cli/lsif.rs20
-rw-r--r--crates/rust-analyzer/src/cli/rustc_tests.rs15
-rw-r--r--crates/rust-analyzer/src/cli/scip.rs14
-rw-r--r--crates/rust-analyzer/src/diagnostics/to_proto.rs4
-rw-r--r--crates/rust-analyzer/src/lsp/ext.rs15
-rw-r--r--crates/rust-analyzer/src/tracing/hprof.rs1
-rw-r--r--crates/rust-analyzer/tests/slow-tests/main.rs1
-rw-r--r--crates/rust-analyzer/tests/slow-tests/tidy.rs71
-rw-r--r--crates/sourcegen/src/lib.rs1
-rw-r--r--crates/stdx/Cargo.toml2
-rw-r--r--crates/stdx/src/anymap.rs5
-rw-r--r--crates/stdx/src/lib.rs2
-rw-r--r--crates/stdx/src/panic_context.rs3
-rw-r--r--crates/stdx/src/rand.rs5
-rw-r--r--crates/syntax/src/fuzz.rs1
-rw-r--r--crates/syntax/src/tests/sourcegen_ast.rs8
-rw-r--r--crates/test-utils/src/lib.rs3
-rw-r--r--docs/dev/lsp-extensions.md2
-rw-r--r--lib/la-arena/src/lib.rs10
-rw-r--r--lib/la-arena/src/map.rs6
-rw-r--r--lib/lsp-server/examples/goto_def.rs3
-rw-r--r--lib/lsp-server/src/lib.rs1
-rw-r--r--xtask/src/main.rs1
64 files changed, 170 insertions, 229 deletions
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index b5c5ff04738..014dcbcc2a6 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -105,7 +105,7 @@ jobs:
 
       - name: clippy
         if: matrix.os == 'ubuntu-latest'
-        run: cargo clippy --all-targets
+        run: cargo clippy --all-targets -- -D clippy::disallowed_macros -D clippy::dbg_macro -D clippy::todo -D clippy::print_stdout -D clippy::print_stderr
 
   # Weird targets to catch non-portable code
   rust-cross:
diff --git a/Cargo.lock b/Cargo.lock
index 1b5efb4bb81..dff65e21fd6 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1283,6 +1283,7 @@ dependencies = [
  "object 0.32.0",
  "paths",
  "profile",
+ "rustc-hash",
  "serde",
  "serde_json",
  "snap",
diff --git a/Cargo.toml b/Cargo.toml
index 5a748648116..2f00e95d7ec 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -164,6 +164,8 @@ len_without_is_empty = "allow"
 enum_variant_names = "allow"
 # Builder pattern disagrees
 new_ret_no_self = "allow"
+# Has a bunch of false positives
+useless_asref = "allow"
 
 ## Following lints should be tackled at some point
 borrowed_box = "allow"
@@ -178,9 +180,12 @@ type_complexity = "allow"
 wrong_self_convention = "allow"
 
 ## warn at following lints
+# CI raises these to deny
 dbg_macro = "warn"
 todo = "warn"
-unimplemented = "allow"
+print_stdout = "warn"
+print_stderr = "warn"
+
 rc_buffer = "warn"
 # FIXME enable this, we use this pattern a lot so its annoying work ...
 # str_to_string = "warn"
diff --git a/clippy.toml b/clippy.toml
new file mode 100644
index 00000000000..8032c775ab0
--- /dev/null
+++ b/clippy.toml
@@ -0,0 +1,5 @@
+disallowed-types = [
+    { path = "std::collections::HashMap", reason = "use FxHashMap" },
+    { path = "std::collections::HashSet", reason = "use FxHashSet" },
+    { path = "std::collections::hash_map::RandomState", reason = "use BuildHasherDefault<FxHasher>"}
+]
diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs
index 22603842a1b..ef1404487e6 100644
--- a/crates/flycheck/src/lib.rs
+++ b/crates/flycheck/src/lib.rs
@@ -493,9 +493,7 @@ impl CargoActor {
                     // Skip certain kinds of messages to only spend time on what's useful
                     JsonMessage::Cargo(message) => match message {
                         cargo_metadata::Message::CompilerArtifact(artifact) if !artifact.fresh => {
-                            self.sender
-                                .send(CargoMessage::CompilerArtifact(Box::new(artifact)))
-                                .unwrap();
+                            self.sender.send(CargoMessage::CompilerArtifact(artifact)).unwrap();
                         }
                         cargo_metadata::Message::CompilerMessage(msg) => {
                             self.sender.send(CargoMessage::Diagnostic(msg.message)).unwrap();
@@ -539,8 +537,9 @@ impl CargoActor {
     }
 }
 
+#[allow(clippy::large_enum_variant)]
 enum CargoMessage {
-    CompilerArtifact(Box<cargo_metadata::Artifact>),
+    CompilerArtifact(cargo_metadata::Artifact),
     Diagnostic(Diagnostic),
 }
 
diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs
index 492ea6d5c59..29ac666277d 100644
--- a/crates/hir-def/src/body/lower.rs
+++ b/crates/hir-def/src/body/lower.rs
@@ -1980,10 +1980,7 @@ fn pat_literal_to_hir(lit: &ast::LiteralPat) -> Option<(Literal, ast::Literal)>
     let ast_lit = lit.literal()?;
     let mut hir_lit: Literal = ast_lit.kind().into();
     if lit.minus_token().is_some() {
-        let Some(h) = hir_lit.negate() else {
-            return None;
-        };
-        hir_lit = h;
+        hir_lit = hir_lit.negate()?;
     }
     Some((hir_lit, ast_lit))
 }
diff --git a/crates/hir-def/src/item_scope.rs b/crates/hir-def/src/item_scope.rs
index 6237ea7353f..60e61dcdf40 100644
--- a/crates/hir-def/src/item_scope.rs
+++ b/crates/hir-def/src/item_scope.rs
@@ -222,17 +222,15 @@ impl ItemScope {
         self.declarations.iter().copied()
     }
 
-    pub fn extern_crate_decls(
-        &self,
-    ) -> impl Iterator<Item = ExternCrateId> + ExactSizeIterator + '_ {
+    pub fn extern_crate_decls(&self) -> impl ExactSizeIterator<Item = ExternCrateId> + '_ {
         self.extern_crate_decls.iter().copied()
     }
 
-    pub fn use_decls(&self) -> impl Iterator<Item = UseId> + ExactSizeIterator + '_ {
+    pub fn use_decls(&self) -> impl ExactSizeIterator<Item = UseId> + '_ {
         self.use_decls.iter().copied()
     }
 
-    pub fn impls(&self) -> impl Iterator<Item = ImplId> + ExactSizeIterator + '_ {
+    pub fn impls(&self) -> impl ExactSizeIterator<Item = ImplId> + '_ {
         self.impls.iter().copied()
     }
 
diff --git a/crates/hir-ty/src/layout/tests.rs b/crates/hir-ty/src/layout/tests.rs
index 1f2ea753c1b..ba3dfe8100d 100644
--- a/crates/hir-ty/src/layout/tests.rs
+++ b/crates/hir-ty/src/layout/tests.rs
@@ -1,8 +1,7 @@
-use std::collections::HashMap;
-
 use chalk_ir::{AdtId, TyKind};
 use either::Either;
 use hir_def::db::DefDatabase;
+use rustc_hash::FxHashMap;
 use test_fixture::WithFixture;
 use triomphe::Arc;
 
@@ -16,7 +15,7 @@ use crate::{
 mod closure;
 
 fn current_machine_data_layout() -> String {
-    project_model::target_data_layout::get(None, None, &HashMap::default()).unwrap()
+    project_model::target_data_layout::get(None, None, &FxHashMap::default()).unwrap()
 }
 
 fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutError> {
diff --git a/crates/hir-ty/src/tests.rs b/crates/hir-ty/src/tests.rs
index 9804910c878..03e593d9d17 100644
--- a/crates/hir-ty/src/tests.rs
+++ b/crates/hir-ty/src/tests.rs
@@ -10,7 +10,7 @@ mod regression;
 mod simple;
 mod traits;
 
-use std::{collections::HashMap, env};
+use std::env;
 
 use base_db::{FileRange, SourceDatabaseExt};
 use expect_test::Expect;
@@ -25,6 +25,7 @@ use hir_def::{
 };
 use hir_expand::{db::ExpandDatabase, InFile};
 use once_cell::race::OnceBool;
+use rustc_hash::FxHashMap;
 use stdx::format_to;
 use syntax::{
     ast::{self, AstNode, HasName},
@@ -90,9 +91,9 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
     let (db, files) = TestDB::with_many_files(ra_fixture);
 
     let mut had_annotations = false;
-    let mut mismatches = HashMap::new();
-    let mut types = HashMap::new();
-    let mut adjustments = HashMap::<_, Vec<_>>::new();
+    let mut mismatches = FxHashMap::default();
+    let mut types = FxHashMap::default();
+    let mut adjustments = FxHashMap::<_, Vec<_>>::default();
     for (file_id, annotations) in db.extract_annotations() {
         for (range, expected) in annotations {
             let file_range = FileRange { file_id, range };
diff --git a/crates/hir-ty/src/traits.rs b/crates/hir-ty/src/traits.rs
index 3a1a4e63ea1..5303182d8ce 100644
--- a/crates/hir-ty/src/traits.rs
+++ b/crates/hir-ty/src/traits.rs
@@ -187,7 +187,7 @@ struct LoggingRustIrDatabaseLoggingOnDrop<'a>(LoggingRustIrDatabase<Interner, Ch
 
 impl Drop for LoggingRustIrDatabaseLoggingOnDrop<'_> {
     fn drop(&mut self) {
-        eprintln!("chalk program:\n{}", self.0);
+        tracing::info!("chalk program:\n{}", self.0);
     }
 }
 
diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs
index 2d8f1dbad51..b161265cd95 100644
--- a/crates/hir/src/diagnostics.rs
+++ b/crates/hir/src/diagnostics.rs
@@ -546,9 +546,7 @@ impl AnyDiagnostic {
                             source_map.pat_syntax(pat).expect("unexpected synthetic");
 
                         // cast from Either<Pat, SelfParam> -> Either<_, Pat>
-                        let Some(ptr) = AstPtr::try_from_raw(value.syntax_node_ptr()) else {
-                            return None;
-                        };
+                        let ptr = AstPtr::try_from_raw(value.syntax_node_ptr())?;
                         InFile { file_id, value: ptr }
                     }
                 };
diff --git a/crates/ide-assists/src/handlers/desugar_doc_comment.rs b/crates/ide-assists/src/handlers/desugar_doc_comment.rs
index c859e98524e..d2649280467 100644
--- a/crates/ide-assists/src/handlers/desugar_doc_comment.rs
+++ b/crates/ide-assists/src/handlers/desugar_doc_comment.rs
@@ -27,9 +27,7 @@ use crate::{
 pub(crate) fn desugar_doc_comment(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
     let comment = ctx.find_token_at_offset::<ast::Comment>()?;
     // Only allow doc comments
-    let Some(placement) = comment.kind().doc else {
-        return None;
-    };
+    let placement = comment.kind().doc?;
 
     // Only allow comments which are alone on their line
     if let Some(prev) = comment.syntax().prev_token() {
diff --git a/crates/ide-assists/src/handlers/extract_module.rs b/crates/ide-assists/src/handlers/extract_module.rs
index 30c3983dc41..af834c8a53d 100644
--- a/crates/ide-assists/src/handlers/extract_module.rs
+++ b/crates/ide-assists/src/handlers/extract_module.rs
@@ -1,7 +1,4 @@
-use std::{
-    collections::{HashMap, HashSet},
-    iter,
-};
+use std::iter;
 
 use hir::{HasSource, HirFileIdExt, ModuleSource};
 use ide_db::{
@@ -9,6 +6,7 @@ use ide_db::{
     base_db::FileId,
     defs::{Definition, NameClass, NameRefClass},
     search::{FileReference, SearchScope},
+    FxHashMap, FxHashSet,
 };
 use itertools::Itertools;
 use smallvec::SmallVec;
@@ -235,9 +233,9 @@ impl Module {
     fn get_usages_and_record_fields(
         &self,
         ctx: &AssistContext<'_>,
-    ) -> (HashMap<FileId, Vec<(TextRange, String)>>, Vec<SyntaxNode>) {
+    ) -> (FxHashMap<FileId, Vec<(TextRange, String)>>, Vec<SyntaxNode>) {
         let mut adt_fields = Vec::new();
-        let mut refs: HashMap<FileId, Vec<(TextRange, String)>> = HashMap::new();
+        let mut refs: FxHashMap<FileId, Vec<(TextRange, String)>> = FxHashMap::default();
 
         //Here impl is not included as each item inside impl will be tied to the parent of
         //implementing block(a struct, enum, etc), if the parent is in selected module, it will
@@ -320,7 +318,7 @@ impl Module {
         &self,
         ctx: &AssistContext<'_>,
         node_def: Definition,
-        refs_in_files: &mut HashMap<FileId, Vec<(TextRange, String)>>,
+        refs_in_files: &mut FxHashMap<FileId, Vec<(TextRange, String)>>,
     ) {
         for (file_id, references) in node_def.usages(&ctx.sema).all() {
             let source_file = ctx.sema.parse(file_id);
@@ -400,7 +398,7 @@ impl Module {
         ctx: &AssistContext<'_>,
     ) -> Vec<TextRange> {
         let mut import_paths_to_be_removed: Vec<TextRange> = vec![];
-        let mut node_set: HashSet<String> = HashSet::new();
+        let mut node_set: FxHashSet<String> = FxHashSet::default();
 
         for item in self.body_items.clone() {
             for x in item.syntax().descendants() {
diff --git a/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/crates/ide-assists/src/handlers/generate_delegate_methods.rs
index d59bd71d312..1f92c39ad40 100644
--- a/crates/ide-assists/src/handlers/generate_delegate_methods.rs
+++ b/crates/ide-assists/src/handlers/generate_delegate_methods.rs
@@ -1,7 +1,5 @@
-use std::collections::HashSet;
-
 use hir::{self, HasCrate, HasVisibility};
-use ide_db::path_transform::PathTransform;
+use ide_db::{path_transform::PathTransform, FxHashSet};
 use syntax::{
     ast::{
         self, edit_in_place::Indent, make, AstNode, HasGenericParams, HasName, HasVisibility as _,
@@ -71,7 +69,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
 
     let sema_field_ty = ctx.sema.resolve_type(&field_ty)?;
     let mut methods = vec![];
-    let mut seen_names = HashSet::new();
+    let mut seen_names = FxHashSet::default();
 
     for ty in sema_field_ty.autoderef(ctx.db()) {
         let krate = ty.krate(ctx.db());
diff --git a/crates/ide-assists/src/handlers/generate_delegate_trait.rs b/crates/ide-assists/src/handlers/generate_delegate_trait.rs
index 3964b14f470..bc66f6cead4 100644
--- a/crates/ide-assists/src/handlers/generate_delegate_trait.rs
+++ b/crates/ide-assists/src/handlers/generate_delegate_trait.rs
@@ -502,9 +502,7 @@ fn generate_args_for_impl(
     trait_params: &Option<GenericParamList>,
     old_trait_args: &FxHashSet<String>,
 ) -> Option<ast::GenericArgList> {
-    let Some(old_impl_args) = old_impl_gpl.map(|gpl| gpl.to_generic_args().generic_args()) else {
-        return None;
-    };
+    let old_impl_args = old_impl_gpl.map(|gpl| gpl.to_generic_args().generic_args())?;
     // Create pairs of the args of `self_ty` and corresponding `field_ty` to
     // form the substitution list
     let mut arg_substs = FxHashMap::default();
diff --git a/crates/ide-assists/src/handlers/inline_type_alias.rs b/crates/ide-assists/src/handlers/inline_type_alias.rs
index 5982e9d61db..e2f3d9edcd1 100644
--- a/crates/ide-assists/src/handlers/inline_type_alias.rs
+++ b/crates/ide-assists/src/handlers/inline_type_alias.rs
@@ -3,12 +3,12 @@
 // - Remove unused aliases if there are no longer any users, see inline_call.rs.
 
 use hir::{HasSource, PathResolution};
+use ide_db::FxHashMap;
 use ide_db::{
     defs::Definition, imports::insert_use::ast_to_remove_for_path_in_use_stmt,
     search::FileReference,
 };
 use itertools::Itertools;
-use std::collections::HashMap;
 use syntax::{
     ast::{self, make, HasGenericParams, HasName},
     ted, AstNode, NodeOrToken, SyntaxNode,
@@ -189,14 +189,14 @@ fn inline(alias_def: &ast::TypeAlias, alias_instance: &ast::PathType) -> Option<
     Some(repl)
 }
 
-struct LifetimeMap(HashMap<String, ast::Lifetime>);
+struct LifetimeMap(FxHashMap<String, ast::Lifetime>);
 
 impl LifetimeMap {
     fn new(
         instance_args: &Option<ast::GenericArgList>,
         alias_generics: &ast::GenericParamList,
     ) -> Option<Self> {
-        let mut inner = HashMap::new();
+        let mut inner = FxHashMap::default();
 
         let wildcard_lifetime = make::lifetime("'_");
         let lifetimes = alias_generics
@@ -231,14 +231,14 @@ impl LifetimeMap {
     }
 }
 
-struct ConstAndTypeMap(HashMap<String, SyntaxNode>);
+struct ConstAndTypeMap(FxHashMap<String, SyntaxNode>);
 
 impl ConstAndTypeMap {
     fn new(
         instance_args: &Option<ast::GenericArgList>,
         alias_generics: &ast::GenericParamList,
     ) -> Option<Self> {
-        let mut inner = HashMap::new();
+        let mut inner = FxHashMap::default();
         let instance_generics = generic_args_to_const_and_type_generics(instance_args);
         let alias_generics = generic_param_list_to_const_and_type_generics(alias_generics);
 
diff --git a/crates/ide-assists/src/handlers/merge_match_arms.rs b/crates/ide-assists/src/handlers/merge_match_arms.rs
index aae9f20d4ea..4608e9494bc 100644
--- a/crates/ide-assists/src/handlers/merge_match_arms.rs
+++ b/crates/ide-assists/src/handlers/merge_match_arms.rs
@@ -1,5 +1,6 @@
 use hir::Type;
-use std::{collections::HashMap, iter::successors};
+use ide_db::FxHashMap;
+use std::iter::successors;
 use syntax::{
     algo::neighbor,
     ast::{self, AstNode, HasName},
@@ -95,7 +96,7 @@ fn contains_placeholder(a: &ast::MatchArm) -> bool {
 }
 
 fn are_same_types(
-    current_arm_types: &HashMap<String, Option<Type>>,
+    current_arm_types: &FxHashMap<String, Option<Type>>,
     arm: &ast::MatchArm,
     ctx: &AssistContext<'_>,
 ) -> bool {
@@ -114,11 +115,11 @@ fn are_same_types(
 fn get_arm_types(
     context: &AssistContext<'_>,
     arm: &ast::MatchArm,
-) -> HashMap<String, Option<Type>> {
-    let mut mapping: HashMap<String, Option<Type>> = HashMap::new();
+) -> FxHashMap<String, Option<Type>> {
+    let mut mapping: FxHashMap<String, Option<Type>> = FxHashMap::default();
 
     fn recurse(
-        map: &mut HashMap<String, Option<Type>>,
+        map: &mut FxHashMap<String, Option<Type>>,
         ctx: &AssistContext<'_>,
         pat: &Option<ast::Pat>,
     ) {
diff --git a/crates/ide-assists/src/handlers/remove_unused_imports.rs b/crates/ide-assists/src/handlers/remove_unused_imports.rs
index 35bf84c4349..d67b259d2f5 100644
--- a/crates/ide-assists/src/handlers/remove_unused_imports.rs
+++ b/crates/ide-assists/src/handlers/remove_unused_imports.rs
@@ -1,11 +1,11 @@
-use std::collections::{hash_map::Entry, HashMap};
+use std::collections::hash_map::Entry;
 
 use hir::{HirFileIdExt, InFile, InRealFile, Module, ModuleSource};
 use ide_db::{
     base_db::FileRange,
     defs::Definition,
     search::{FileReference, ReferenceCategory, SearchScope},
-    RootDatabase,
+    FxHashMap, RootDatabase,
 };
 use syntax::{ast, AstNode};
 use text_edit::TextRange;
@@ -44,7 +44,7 @@ pub(crate) fn remove_unused_imports(acc: &mut Assists, ctx: &AssistContext<'_>)
     let uses = uses_up.chain(uses_down).collect::<Vec<_>>();
 
     // Maps use nodes to the scope that we should search through to find
-    let mut search_scopes = HashMap::<Module, Vec<SearchScope>>::new();
+    let mut search_scopes = FxHashMap::<Module, Vec<SearchScope>>::default();
 
     // iterator over all unused use trees
     let mut unused = uses
diff --git a/crates/ide-assists/src/handlers/unwrap_result_return_type.rs b/crates/ide-assists/src/handlers/unwrap_result_return_type.rs
index 03e6dfebebf..8a9e669630b 100644
--- a/crates/ide-assists/src/handlers/unwrap_result_return_type.rs
+++ b/crates/ide-assists/src/handlers/unwrap_result_return_type.rs
@@ -47,9 +47,7 @@ pub(crate) fn unwrap_result_return_type(acc: &mut Assists, ctx: &AssistContext<'
         return None;
     }
 
-    let Some(ok_type) = unwrap_result_type(type_ref) else {
-        return None;
-    };
+    let ok_type = unwrap_result_type(type_ref)?;
 
     acc.add(
         AssistId("unwrap_result_return_type", AssistKind::RefactorRewrite),
diff --git a/crates/ide-assists/src/utils/suggest_name.rs b/crates/ide-assists/src/utils/suggest_name.rs
index 78dee24a6d3..74377f8ec61 100644
--- a/crates/ide-assists/src/utils/suggest_name.rs
+++ b/crates/ide-assists/src/utils/suggest_name.rs
@@ -1,9 +1,7 @@
 //! This module contains functions to suggest names for expressions, functions and other items
 
-use std::collections::HashSet;
-
 use hir::Semantics;
-use ide_db::RootDatabase;
+use ide_db::{FxHashSet, RootDatabase};
 use itertools::Itertools;
 use stdx::to_lower_snake_case;
 use syntax::{
@@ -78,7 +76,7 @@ pub(crate) fn for_unique_generic_name(
             ast::GenericParam::TypeParam(t) => t.name().unwrap().to_string(),
             p => p.to_string(),
         })
-        .collect::<HashSet<_>>();
+        .collect::<FxHashSet<_>>();
     let mut name = name.to_string();
     let base_len = name.len();
     let mut count = 0;
diff --git a/crates/ide-db/src/tests/sourcegen_lints.rs b/crates/ide-db/src/tests/sourcegen_lints.rs
index c8cf87d3c20..a165470b57f 100644
--- a/crates/ide-db/src/tests/sourcegen_lints.rs
+++ b/crates/ide-db/src/tests/sourcegen_lints.rs
@@ -241,6 +241,7 @@ fn unescape(s: &str) -> String {
     s.replace(r#"\""#, "").replace(r#"\n"#, "\n").replace(r#"\r"#, "")
 }
 
+#[allow(clippy::print_stderr)]
 fn generate_descriptor_clippy(buf: &mut String, path: &Path) {
     let file_content = std::fs::read_to_string(path).unwrap();
     let mut clippy_lints: Vec<ClippyLint> = Vec::new();
diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs
index 5ad7069e317..ad5e66c5ccd 100644
--- a/crates/ide-diagnostics/src/lib.rs
+++ b/crates/ide-diagnostics/src/lib.rs
@@ -73,8 +73,6 @@ mod handlers {
 #[cfg(test)]
 mod tests;
 
-use std::collections::HashMap;
-
 use hir::{diagnostics::AnyDiagnostic, InFile, Semantics};
 use ide_db::{
     assists::{Assist, AssistId, AssistKind, AssistResolveStrategy},
@@ -413,18 +411,18 @@ pub fn diagnostics(
 
 // `__RA_EVERY_LINT` is a fake lint group to allow every lint in proc macros
 
-static RUSTC_LINT_GROUPS_DICT: Lazy<HashMap<&str, Vec<&str>>> =
+static RUSTC_LINT_GROUPS_DICT: Lazy<FxHashMap<&str, Vec<&str>>> =
     Lazy::new(|| build_group_dict(DEFAULT_LINT_GROUPS, &["warnings", "__RA_EVERY_LINT"], ""));
 
-static CLIPPY_LINT_GROUPS_DICT: Lazy<HashMap<&str, Vec<&str>>> =
+static CLIPPY_LINT_GROUPS_DICT: Lazy<FxHashMap<&str, Vec<&str>>> =
     Lazy::new(|| build_group_dict(CLIPPY_LINT_GROUPS, &["__RA_EVERY_LINT"], "clippy::"));
 
 fn build_group_dict(
     lint_group: &'static [LintGroup],
     all_groups: &'static [&'static str],
     prefix: &'static str,
-) -> HashMap<&'static str, Vec<&'static str>> {
-    let mut r: HashMap<&str, Vec<&str>> = HashMap::new();
+) -> FxHashMap<&'static str, Vec<&'static str>> {
+    let mut r: FxHashMap<&str, Vec<&str>> = FxHashMap::default();
     for g in lint_group {
         for child in g.children {
             r.entry(child.strip_prefix(prefix).unwrap())
diff --git a/crates/ide-diagnostics/src/tests.rs b/crates/ide-diagnostics/src/tests.rs
index f394a491b51..792d4a371ee 100644
--- a/crates/ide-diagnostics/src/tests.rs
+++ b/crates/ide-diagnostics/src/tests.rs
@@ -1,3 +1,4 @@
+#![allow(clippy::print_stderr)]
 #[cfg(not(feature = "in-rust-tree"))]
 mod sourcegen;
 
diff --git a/crates/ide-ssr/src/matching.rs b/crates/ide-ssr/src/matching.rs
index 060897a6852..81f00d51a34 100644
--- a/crates/ide-ssr/src/matching.rs
+++ b/crates/ide-ssr/src/matching.rs
@@ -706,7 +706,7 @@ where
 // we are trying to match that bit of code. This saves us having to pass a boolean into all the bits
 // of code that can make the decision to not match.
 thread_local! {
-    pub static RECORDING_MATCH_FAIL_REASONS: Cell<bool> = Cell::new(false);
+    pub static RECORDING_MATCH_FAIL_REASONS: Cell<bool> = const { Cell::new(false) };
 }
 
 fn recording_match_fail_reasons() -> bool {
diff --git a/crates/ide-ssr/src/tests.rs b/crates/ide-ssr/src/tests.rs
index 7c7d146cb4a..e608b0a7c42 100644
--- a/crates/ide-ssr/src/tests.rs
+++ b/crates/ide-ssr/src/tests.rs
@@ -113,6 +113,7 @@ fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) {
     expected.assert_eq(&actual);
 }
 
+#[allow(clippy::print_stdout)]
 fn print_match_debug_info(match_finder: &MatchFinder<'_>, file_id: FileId, snippet: &str) {
     let debug_info = match_finder.debug_where_text_equal(file_id, snippet);
     println!(
diff --git a/crates/ide/src/doc_links/tests.rs b/crates/ide/src/doc_links/tests.rs
index 3bb0fc60641..60e8d29a716 100644
--- a/crates/ide/src/doc_links/tests.rs
+++ b/crates/ide/src/doc_links/tests.rs
@@ -29,9 +29,6 @@ fn check_external_docs(
     let web_url = links.web_url;
     let local_url = links.local_url;
 
-    println!("web_url: {:?}", web_url);
-    println!("local_url: {:?}", local_url);
-
     match (expect_web_url, web_url) {
         (Some(expect), Some(url)) => expect.assert_eq(&url),
         (None, None) => (),
diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs
index bdda25a111f..dcdc6118a34 100644
--- a/crates/ide/src/references.rs
+++ b/crates/ide/src/references.rs
@@ -9,8 +9,6 @@
 //! at the index that the match starts at and its tree parent is
 //! resolved to the search element definition, we get a reference.
 
-use std::collections::HashMap;
-
 use hir::{DescendPreference, PathResolution, Semantics};
 use ide_db::{
     base_db::FileId,
@@ -79,7 +77,7 @@ pub(crate) fn find_all_refs(
                             .collect(),
                     )
                 })
-                .collect::<HashMap<_, Vec<_>, _>>();
+                .collect::<IntMap<_, Vec<_>>>();
             let declaration = match def {
                 Definition::Module(module) => {
                     Some(NavigationTarget::from_module_to_decl(sema.db, module))
diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs
index 5b7094e6bcc..dee5afbf8d9 100644
--- a/crates/ide/src/static_index.rs
+++ b/crates/ide/src/static_index.rs
@@ -1,14 +1,12 @@
 //! This module provides `StaticIndex` which is used for powering
 //! read-only code browsers and emitting LSIF
 
-use std::collections::HashMap;
-
 use hir::{db::HirDatabase, Crate, HirFileIdExt, Module};
-use ide_db::helpers::get_definition;
 use ide_db::{
     base_db::{FileId, FileRange, SourceDatabaseExt},
     defs::Definition,
-    FxHashSet, RootDatabase,
+    helpers::get_definition,
+    FxHashMap, FxHashSet, RootDatabase,
 };
 use syntax::{AstNode, SyntaxKind::*, TextRange, T};
 
@@ -31,7 +29,7 @@ pub struct StaticIndex<'a> {
     pub tokens: TokenStore,
     analysis: &'a Analysis,
     db: &'a RootDatabase,
-    def_map: HashMap<Definition, TokenId>,
+    def_map: FxHashMap<Definition, TokenId>,
 }
 
 #[derive(Debug)]
@@ -232,14 +230,13 @@ impl StaticIndex<'_> {
 #[cfg(test)]
 mod tests {
     use crate::{fixture, StaticIndex};
-    use ide_db::base_db::FileRange;
-    use std::collections::HashSet;
+    use ide_db::{base_db::FileRange, FxHashSet};
     use syntax::TextSize;
 
     fn check_all_ranges(ra_fixture: &str) {
         let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
         let s = StaticIndex::compute(&analysis);
-        let mut range_set: HashSet<_> = ranges.iter().map(|it| it.0).collect();
+        let mut range_set: FxHashSet<_> = ranges.iter().map(|it| it.0).collect();
         for f in s.files {
             for (range, _) in f.tokens {
                 let it = FileRange { file_id: f.file_id, range };
@@ -258,7 +255,7 @@ mod tests {
     fn check_definitions(ra_fixture: &str) {
         let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
         let s = StaticIndex::compute(&analysis);
-        let mut range_set: HashSet<_> = ranges.iter().map(|it| it.0).collect();
+        let mut range_set: FxHashSet<_> = ranges.iter().map(|it| it.0).collect();
         for (_, t) in s.tokens.iter() {
             if let Some(t) = t.definition {
                 if t.range.start() == TextSize::from(0) {
diff --git a/crates/limit/src/lib.rs b/crates/limit/src/lib.rs
index 7f4b00df0ba..27471db6a34 100644
--- a/crates/limit/src/lib.rs
+++ b/crates/limit/src/lib.rs
@@ -55,13 +55,12 @@ impl Limit {
                 if other <= old_max || old_max == 0 {
                     break;
                 }
-                if self
-                    .max
-                    .compare_exchange_weak(old_max, other, Ordering::Relaxed, Ordering::Relaxed)
-                    .is_ok()
-                {
-                    eprintln!("new max: {other}");
-                }
+                _ = self.max.compare_exchange_weak(
+                    old_max,
+                    other,
+                    Ordering::Relaxed,
+                    Ordering::Relaxed,
+                );
             }
 
             Ok(())
diff --git a/crates/mbe/src/syntax_bridge/tests.rs b/crates/mbe/src/syntax_bridge/tests.rs
index e5569138dbf..11d1a728799 100644
--- a/crates/mbe/src/syntax_bridge/tests.rs
+++ b/crates/mbe/src/syntax_bridge/tests.rs
@@ -1,5 +1,4 @@
-use std::collections::HashMap;
-
+use rustc_hash::FxHashMap;
 use syntax::{ast, AstNode};
 use test_utils::extract_annotations;
 use tt::{
@@ -12,7 +11,7 @@ use crate::{syntax_node_to_token_tree, DummyTestSpanData, DummyTestSpanMap, DUMM
 fn check_punct_spacing(fixture: &str) {
     let source_file = ast::SourceFile::parse(fixture).ok().unwrap();
     let subtree = syntax_node_to_token_tree(source_file.syntax(), DummyTestSpanMap, DUMMY);
-    let mut annotations: HashMap<_, _> = extract_annotations(fixture)
+    let mut annotations: FxHashMap<_, _> = extract_annotations(fixture)
         .into_iter()
         .map(|(range, annotation)| {
             let spacing = match annotation.as_str() {
diff --git a/crates/parser/src/tests/sourcegen_inline_tests.rs b/crates/parser/src/tests/sourcegen_inline_tests.rs
index bd9e188e4d8..c02fb02c9da 100644
--- a/crates/parser/src/tests/sourcegen_inline_tests.rs
+++ b/crates/parser/src/tests/sourcegen_inline_tests.rs
@@ -1,5 +1,6 @@
 //! This module greps parser's code for specially formatted comments and turns
 //! them into tests.
+#![allow(clippy::disallowed_types, clippy::print_stdout)]
 
 use std::{
     collections::HashMap,
diff --git a/crates/proc-macro-api/Cargo.toml b/crates/proc-macro-api/Cargo.toml
index 49a0979f4f5..cf01b94c0a2 100644
--- a/crates/proc-macro-api/Cargo.toml
+++ b/crates/proc-macro-api/Cargo.toml
@@ -23,6 +23,7 @@ serde.workspace = true
 serde_json = { workspace = true, features = ["unbounded_depth"] }
 tracing.workspace = true
 triomphe.workspace = true
+rustc-hash.workspace = true
 memmap2 = "0.5.4"
 snap = "1.1.0"
 indexmap = "2.1.0"
@@ -40,4 +41,4 @@ base-db.workspace = true
 la-arena.workspace = true
 
 [lints]
-workspace = true
\ No newline at end of file
+workspace = true
diff --git a/crates/proc-macro-api/src/msg/flat.rs b/crates/proc-macro-api/src/msg/flat.rs
index 8dfaba52625..ee7afbdd92f 100644
--- a/crates/proc-macro-api/src/msg/flat.rs
+++ b/crates/proc-macro-api/src/msg/flat.rs
@@ -35,10 +35,11 @@
 //! as we don't have bincode in Cargo.toml yet, lets stick with serde_json for
 //! the time being.
 
-use std::collections::{HashMap, VecDeque};
+use std::collections::VecDeque;
 
 use indexmap::IndexSet;
 use la_arena::RawIdx;
+use rustc_hash::FxHashMap;
 use serde::{Deserialize, Serialize};
 use span::{ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContextId};
 use text_size::TextRange;
@@ -129,7 +130,7 @@ impl FlatTree {
         span_data_table: &mut SpanDataIndexMap,
     ) -> FlatTree {
         let mut w = Writer {
-            string_table: HashMap::new(),
+            string_table: FxHashMap::default(),
             work: VecDeque::new(),
             span_data_table,
 
@@ -158,7 +159,7 @@ impl FlatTree {
 
     pub fn new_raw(subtree: &tt::Subtree<TokenId>, version: u32) -> FlatTree {
         let mut w = Writer {
-            string_table: HashMap::new(),
+            string_table: FxHashMap::default(),
             work: VecDeque::new(),
             span_data_table: &mut (),
 
@@ -340,7 +341,7 @@ impl InternableSpan for Span {
 
 struct Writer<'a, 'span, S: InternableSpan> {
     work: VecDeque<(usize, &'a tt::Subtree<S>)>,
-    string_table: HashMap<&'a str, u32>,
+    string_table: FxHashMap<&'a str, u32>,
     span_data_table: &'span mut S::Table,
 
     subtree: Vec<SubtreeRepr>,
diff --git a/crates/proc-macro-srv-cli/src/main.rs b/crates/proc-macro-srv-cli/src/main.rs
index a36200cdb4c..df0ae3171f5 100644
--- a/crates/proc-macro-srv-cli/src/main.rs
+++ b/crates/proc-macro-srv-cli/src/main.rs
@@ -1,6 +1,8 @@
 //! A standalone binary for `proc-macro-srv`.
 //! Driver for proc macro server
 #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
+#![allow(clippy::print_stderr)]
+
 #[cfg(feature = "in-rust-tree")]
 extern crate rustc_driver as _;
 
diff --git a/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs b/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs
index d9018b1b87d..5f8530d08c4 100644
--- a/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs
+++ b/crates/proc-macro-srv/proc-macro-test/imp/src/lib.rs
@@ -2,6 +2,7 @@
 
 #![warn(rust_2018_idioms, unused_lifetimes)]
 #![feature(proc_macro_span, proc_macro_def_site)]
+#![allow(clippy::all)]
 
 use proc_macro::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
 
diff --git a/crates/profile/src/lib.rs b/crates/profile/src/lib.rs
index 38c5b3fc9c7..36399815606 100644
--- a/crates/profile/src/lib.rs
+++ b/crates/profile/src/lib.rs
@@ -21,7 +21,7 @@ pub use countme;
 /// almost zero.
 pub use countme::Count;
 
-thread_local!(static IN_SCOPE: RefCell<bool> = RefCell::new(false));
+thread_local!(static IN_SCOPE: RefCell<bool> = const { RefCell::new(false) });
 
 /// Allows to check if the current code is within some dynamic scope, can be
 /// useful during debugging to figure out why a function is called.
@@ -88,6 +88,7 @@ pub fn cpu_span() -> CpuSpan {
     }
 
     #[cfg(not(feature = "cpu_profiler"))]
+    #[allow(clippy::print_stderr)]
     {
         eprintln!(
             r#"cpu profiling is disabled, uncomment `default = [ "cpu_profiler" ]` in Cargo.toml to enable."#
diff --git a/crates/profile/src/stop_watch.rs b/crates/profile/src/stop_watch.rs
index 814a0257402..990b59cad42 100644
--- a/crates/profile/src/stop_watch.rs
+++ b/crates/profile/src/stop_watch.rs
@@ -1,4 +1,7 @@
 //! Like `std::time::Instant`, but also measures memory & CPU cycles.
+
+#![allow(clippy::print_stderr)]
+
 use std::{
     fmt,
     time::{Duration, Instant},
diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs
index 361f8721a4e..5926e5a5f73 100644
--- a/crates/project-model/src/cargo_workspace.rs
+++ b/crates/project-model/src/cargo_workspace.rs
@@ -399,7 +399,7 @@ impl CargoWorkspace {
         CargoWorkspace { packages, targets, workspace_root, target_directory }
     }
 
-    pub fn packages(&self) -> impl Iterator<Item = Package> + ExactSizeIterator + '_ {
+    pub fn packages(&self) -> impl ExactSizeIterator<Item = Package> + '_ {
         self.packages.iter().map(|(id, _pkg)| id)
     }
 
diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs
index c24c0196dd9..9e19a525838 100644
--- a/crates/project-model/src/sysroot.rs
+++ b/crates/project-model/src/sysroot.rs
@@ -57,7 +57,7 @@ impl Stitched {
         self.by_name("proc_macro")
     }
 
-    pub(crate) fn crates(&self) -> impl Iterator<Item = SysrootCrate> + ExactSizeIterator + '_ {
+    pub(crate) fn crates(&self) -> impl ExactSizeIterator<Item = SysrootCrate> + '_ {
         self.crates.iter().map(|(id, _data)| id)
     }
 
diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs
index 66b680571a9..72dc67b48a5 100644
--- a/crates/rust-analyzer/src/bin/main.rs
+++ b/crates/rust-analyzer/src/bin/main.rs
@@ -3,7 +3,9 @@
 //! Based on cli flags, either spawns an LSP server, or runs a batch analysis
 
 #![warn(rust_2018_idioms, unused_lifetimes)]
+#![allow(clippy::print_stdout, clippy::print_stderr)]
 #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
+
 #[cfg(feature = "in-rust-tree")]
 extern crate rustc_driver as _;
 
diff --git a/crates/rust-analyzer/src/cli.rs b/crates/rust-analyzer/src/cli.rs
index 00670f2cb4c..0bd6677b662 100644
--- a/crates/rust-analyzer/src/cli.rs
+++ b/crates/rust-analyzer/src/cli.rs
@@ -1,5 +1,7 @@
 //! Various batch processing tasks, intended primarily for debugging.
 
+#![allow(clippy::print_stdout, clippy::print_stderr)]
+
 mod analysis_stats;
 mod diagnostics;
 pub mod flags;
diff --git a/crates/rust-analyzer/src/cli/lsif.rs b/crates/rust-analyzer/src/cli/lsif.rs
index 64f965e22ac..1b6187f8df5 100644
--- a/crates/rust-analyzer/src/cli/lsif.rs
+++ b/crates/rust-analyzer/src/cli/lsif.rs
@@ -1,6 +1,5 @@
 //! LSIF (language server index format) generator
 
-use std::collections::HashMap;
 use std::env;
 use std::time::Instant;
 
@@ -16,6 +15,7 @@ use ide_db::{
 use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
 use lsp_types::{self, lsif};
 use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource};
+use rustc_hash::FxHashMap;
 use vfs::{AbsPathBuf, Vfs};
 
 use crate::{
@@ -35,10 +35,10 @@ impl<DB: ParallelDatabase> Clone for Snap<salsa::Snapshot<DB>> {
 
 struct LsifManager<'a> {
     count: i32,
-    token_map: HashMap<TokenId, Id>,
-    range_map: HashMap<FileRange, Id>,
-    file_map: HashMap<FileId, Id>,
-    package_map: HashMap<PackageInformation, Id>,
+    token_map: FxHashMap<TokenId, Id>,
+    range_map: FxHashMap<FileRange, Id>,
+    file_map: FxHashMap<FileId, Id>,
+    package_map: FxHashMap<PackageInformation, Id>,
     analysis: &'a Analysis,
     db: &'a RootDatabase,
     vfs: &'a Vfs,
@@ -57,10 +57,10 @@ impl LsifManager<'_> {
     fn new<'a>(analysis: &'a Analysis, db: &'a RootDatabase, vfs: &'a Vfs) -> LsifManager<'a> {
         LsifManager {
             count: 0,
-            token_map: HashMap::default(),
-            range_map: HashMap::default(),
-            file_map: HashMap::default(),
-            package_map: HashMap::default(),
+            token_map: FxHashMap::default(),
+            range_map: FxHashMap::default(),
+            file_map: FxHashMap::default(),
+            package_map: FxHashMap::default(),
             analysis,
             db,
             vfs,
@@ -215,7 +215,7 @@ impl LsifManager<'_> {
                 out_v: result_set_id.into(),
             }));
             let mut edges = token.references.iter().fold(
-                HashMap::<_, Vec<lsp_types::NumberOrString>>::new(),
+                FxHashMap::<_, Vec<lsp_types::NumberOrString>>::default(),
                 |mut edges, it| {
                     let entry = edges.entry((it.range.file_id, it.is_definition)).or_default();
                     entry.push((*self.range_map.get(&it.range).unwrap()).into());
diff --git a/crates/rust-analyzer/src/cli/rustc_tests.rs b/crates/rust-analyzer/src/cli/rustc_tests.rs
index be7e434acac..64ea246a458 100644
--- a/crates/rust-analyzer/src/cli/rustc_tests.rs
+++ b/crates/rust-analyzer/src/cli/rustc_tests.rs
@@ -1,8 +1,6 @@
 //! Run all tests in a project, similar to `cargo test`, but using the mir interpreter.
 
-use std::{
-    cell::RefCell, collections::HashMap, fs::read_to_string, panic::AssertUnwindSafe, path::PathBuf,
-};
+use std::{cell::RefCell, fs::read_to_string, panic::AssertUnwindSafe, path::PathBuf};
 
 use hir::{Change, Crate};
 use ide::{AnalysisHost, DiagnosticCode, DiagnosticsConfig};
@@ -10,6 +8,7 @@ use profile::StopWatch;
 use project_model::{CargoConfig, ProjectWorkspace, RustLibSource, Sysroot};
 
 use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
+use rustc_hash::FxHashMap;
 use triomphe::Arc;
 use vfs::{AbsPathBuf, FileId};
 use walkdir::WalkDir;
@@ -27,7 +26,7 @@ struct Tester {
 
 fn string_to_diagnostic_code_leaky(code: &str) -> DiagnosticCode {
     thread_local! {
-        static LEAK_STORE: RefCell<HashMap<String, DiagnosticCode>> = RefCell::new(HashMap::new());
+        static LEAK_STORE: RefCell<FxHashMap<String, DiagnosticCode>> = RefCell::new(FxHashMap::default());
     }
     LEAK_STORE.with_borrow_mut(|s| match s.get(code) {
         Some(c) => *c,
@@ -39,9 +38,9 @@ fn string_to_diagnostic_code_leaky(code: &str) -> DiagnosticCode {
     })
 }
 
-fn detect_errors_from_rustc_stderr_file(p: PathBuf) -> HashMap<DiagnosticCode, usize> {
+fn detect_errors_from_rustc_stderr_file(p: PathBuf) -> FxHashMap<DiagnosticCode, usize> {
     let text = read_to_string(p).unwrap();
-    let mut result = HashMap::new();
+    let mut result = FxHashMap::default();
     {
         let mut text = &*text;
         while let Some(p) = text.find("error[E") {
@@ -106,7 +105,7 @@ impl Tester {
         let expected = if stderr_path.exists() {
             detect_errors_from_rustc_stderr_file(stderr_path)
         } else {
-            HashMap::new()
+            FxHashMap::default()
         };
         let text = read_to_string(&p).unwrap();
         let mut change = Change::new();
@@ -125,7 +124,7 @@ impl Tester {
         self.host.apply_change(change);
         let diagnostic_config = DiagnosticsConfig::test_sample();
 
-        let mut actual = HashMap::new();
+        let mut actual = FxHashMap::default();
         let panicked = match std::panic::catch_unwind(|| {
             self.host
                 .analysis()
diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs
index 81622a4617a..1b0cfa6a5dc 100644
--- a/crates/rust-analyzer/src/cli/scip.rs
+++ b/crates/rust-analyzer/src/cli/scip.rs
@@ -1,10 +1,6 @@
 //! SCIP generator
 
-use std::{
-    collections::{HashMap, HashSet},
-    path::PathBuf,
-    time::Instant,
-};
+use std::{path::PathBuf, time::Instant};
 
 use ide::{
     LineCol, MonikerDescriptorKind, MonikerResult, StaticIndex, StaticIndexedFile,
@@ -12,6 +8,7 @@ use ide::{
 };
 use ide_db::LineIndexDatabase;
 use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
+use rustc_hash::{FxHashMap, FxHashSet};
 use scip::types as scip_types;
 
 use crate::{
@@ -76,9 +73,10 @@ impl flags::Scip {
         };
         let mut documents = Vec::new();
 
-        let mut symbols_emitted: HashSet<TokenId> = HashSet::default();
-        let mut tokens_to_symbol: HashMap<TokenId, String> = HashMap::new();
-        let mut tokens_to_enclosing_symbol: HashMap<TokenId, Option<String>> = HashMap::new();
+        let mut symbols_emitted: FxHashSet<TokenId> = FxHashSet::default();
+        let mut tokens_to_symbol: FxHashMap<TokenId, String> = FxHashMap::default();
+        let mut tokens_to_enclosing_symbol: FxHashMap<TokenId, Option<String>> =
+            FxHashMap::default();
 
         for StaticIndexedFile { file_id, tokens, .. } in si.files {
             let mut local_count = 0;
diff --git a/crates/rust-analyzer/src/diagnostics/to_proto.rs b/crates/rust-analyzer/src/diagnostics/to_proto.rs
index f8bc66ff8e7..f79ae793c9a 100644
--- a/crates/rust-analyzer/src/diagnostics/to_proto.rs
+++ b/crates/rust-analyzer/src/diagnostics/to_proto.rs
@@ -1,9 +1,9 @@
 //! This module provides the functionality needed to convert diagnostics from
 //! `cargo check` json format to the LSP diagnostic format.
-use std::collections::HashMap;
 
 use flycheck::{Applicability, DiagnosticLevel, DiagnosticSpan};
 use itertools::Itertools;
+use rustc_hash::FxHashMap;
 use stdx::format_to;
 use vfs::{AbsPath, AbsPathBuf};
 
@@ -186,7 +186,7 @@ fn map_rust_child_diagnostic(
         return MappedRustChildDiagnostic::MessageLine(rd.message.clone());
     }
 
-    let mut edit_map: HashMap<lsp_types::Url, Vec<lsp_types::TextEdit>> = HashMap::new();
+    let mut edit_map: FxHashMap<lsp_types::Url, Vec<lsp_types::TextEdit>> = FxHashMap::default();
     let mut suggested_replacements = Vec::new();
     let mut is_preferred = true;
     for &span in &spans {
diff --git a/crates/rust-analyzer/src/lsp/ext.rs b/crates/rust-analyzer/src/lsp/ext.rs
index 35c8fad3741..e23bb8e046b 100644
--- a/crates/rust-analyzer/src/lsp/ext.rs
+++ b/crates/rust-analyzer/src/lsp/ext.rs
@@ -1,6 +1,8 @@
 //! rust-analyzer extensions to the LSP.
 
-use std::{collections::HashMap, path::PathBuf};
+#![allow(clippy::disallowed_types)]
+
+use std::path::PathBuf;
 
 use ide_db::line_index::WideEncoding;
 use lsp_types::request::Request;
@@ -9,6 +11,7 @@ use lsp_types::{
     PartialResultParams, Position, Range, TextDocumentIdentifier, WorkDoneProgressParams,
 };
 use lsp_types::{PositionEncodingKind, Url};
+use rustc_hash::FxHashMap;
 use serde::{Deserialize, Serialize};
 
 use crate::line_index::PositionEncoding;
@@ -448,12 +451,16 @@ pub struct CodeActionData {
 #[serde(rename_all = "camelCase")]
 pub struct SnippetWorkspaceEdit {
     #[serde(skip_serializing_if = "Option::is_none")]
-    pub changes: Option<HashMap<lsp_types::Url, Vec<lsp_types::TextEdit>>>,
+    pub changes: Option<FxHashMap<lsp_types::Url, Vec<lsp_types::TextEdit>>>,
     #[serde(skip_serializing_if = "Option::is_none")]
     pub document_changes: Option<Vec<SnippetDocumentChangeOperation>>,
     #[serde(skip_serializing_if = "Option::is_none")]
-    pub change_annotations:
-        Option<HashMap<lsp_types::ChangeAnnotationIdentifier, lsp_types::ChangeAnnotation>>,
+    pub change_annotations: Option<
+        std::collections::HashMap<
+            lsp_types::ChangeAnnotationIdentifier,
+            lsp_types::ChangeAnnotation,
+        >,
+    >,
 }
 
 #[derive(Debug, Eq, PartialEq, Clone, Deserialize, Serialize)]
diff --git a/crates/rust-analyzer/src/tracing/hprof.rs b/crates/rust-analyzer/src/tracing/hprof.rs
index c99b551df85..90649873297 100644
--- a/crates/rust-analyzer/src/tracing/hprof.rs
+++ b/crates/rust-analyzer/src/tracing/hprof.rs
@@ -179,6 +179,7 @@ impl Node {
         self.go(0, filter)
     }
 
+    #[allow(clippy::print_stderr)]
     fn go(&self, level: usize, filter: &WriteFilter) {
         if self.duration > filter.longer_than && level < filter.depth {
             let duration = ms(self.duration);
diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs
index 19890110d53..7a2b7497e03 100644
--- a/crates/rust-analyzer/tests/slow-tests/main.rs
+++ b/crates/rust-analyzer/tests/slow-tests/main.rs
@@ -9,6 +9,7 @@
 //! be sure without a real client anyway.
 
 #![warn(rust_2018_idioms, unused_lifetimes)]
+#![allow(clippy::disallowed_types)]
 
 #[cfg(not(feature = "in-rust-tree"))]
 mod sourcegen;
diff --git a/crates/rust-analyzer/tests/slow-tests/tidy.rs b/crates/rust-analyzer/tests/slow-tests/tidy.rs
index d3146ab7671..740626dfe38 100644
--- a/crates/rust-analyzer/tests/slow-tests/tidy.rs
+++ b/crates/rust-analyzer/tests/slow-tests/tidy.rs
@@ -1,3 +1,4 @@
+#![allow(clippy::disallowed_types, clippy::print_stderr)]
 use std::{
     collections::HashSet,
     path::{Path, PathBuf},
@@ -78,8 +79,6 @@ fn files_are_tidy() {
         match extension {
             "rs" => {
                 let text = sh.read_file(&path).unwrap();
-                check_todo(&path, &text);
-                check_dbg(&path, &text);
                 check_test_attrs(&path, &text);
                 check_trailing_ws(&path, &text);
                 tidy_docs.visit(&path, &text);
@@ -205,74 +204,6 @@ Zlib OR Apache-2.0 OR MIT
     assert_eq!(licenses, expected);
 }
 
-fn check_todo(path: &Path, text: &str) {
-    let need_todo = &[
-        // This file itself obviously needs to use todo (<- like this!).
-        "tests/tidy.rs",
-        // Some of our assists generate `todo!()`.
-        "handlers/add_turbo_fish.rs",
-        "handlers/generate_function.rs",
-        "handlers/add_missing_match_arms.rs",
-        "handlers/replace_derive_with_manual_impl.rs",
-        // To support generating `todo!()` in assists, we have `expr_todo()` in
-        // `ast::make`.
-        "ast/make.rs",
-        // The documentation in string literals may contain anything for its own purposes
-        "ide-db/src/generated/lints.rs",
-        "ide-assists/src/utils/gen_trait_fn_body.rs",
-        "ide-assists/src/tests/generated.rs",
-        // The tests for missing fields
-        "ide-diagnostics/src/handlers/missing_fields.rs",
-    ];
-    if need_todo.iter().any(|p| path.ends_with(p)) {
-        return;
-    }
-    if text.contains("TODO") || text.contains("TOOD") || text.contains("todo!") {
-        // Generated by an assist
-        if text.contains("${0:todo!()}") {
-            return;
-        }
-
-        panic!(
-            "\nTODO markers or todo! macros should not be committed to the master branch,\n\
-             use FIXME instead\n\
-             {}\n",
-            path.display(),
-        )
-    }
-}
-
-fn check_dbg(path: &Path, text: &str) {
-    let need_dbg = &[
-        // This file itself obviously needs to use dbg.
-        "slow-tests/tidy.rs",
-        // Assists to remove `dbg!()`
-        "handlers/remove_dbg.rs",
-        // We have .dbg postfix
-        "ide-completion/src/completions/postfix.rs",
-        "ide-completion/src/completions/keyword.rs",
-        "ide-completion/src/tests/expression.rs",
-        "ide-completion/src/tests/proc_macros.rs",
-        // The documentation in string literals may contain anything for its own purposes
-        "ide-completion/src/lib.rs",
-        "ide-db/src/generated/lints.rs",
-        // test for doc test for remove_dbg
-        "src/tests/generated.rs",
-        // `expect!` string can contain `dbg!` (due to .dbg postfix)
-        "ide-completion/src/tests/special.rs",
-    ];
-    if need_dbg.iter().any(|p| path.ends_with(p)) {
-        return;
-    }
-    if text.contains("dbg!") {
-        panic!(
-            "\ndbg! macros should not be committed to the master branch,\n\
-             {}\n",
-            path.display(),
-        )
-    }
-}
-
 fn check_test_attrs(path: &Path, text: &str) {
     let ignore_rule =
         "https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/style.md#ignore";
diff --git a/crates/sourcegen/src/lib.rs b/crates/sourcegen/src/lib.rs
index 18fa77fd974..ac3aa31b57a 100644
--- a/crates/sourcegen/src/lib.rs
+++ b/crates/sourcegen/src/lib.rs
@@ -167,6 +167,7 @@ pub fn add_preamble(generator: &'static str, mut text: String) -> String {
 
 /// Checks that the `file` has the specified `contents`. If that is not the
 /// case, updates the file and then fails the test.
+#[allow(clippy::print_stderr)]
 pub fn ensure_file_contents(file: &Path, contents: &str) {
     if let Ok(old_contents) = fs::read_to_string(file) {
         if normalize_newlines(&old_contents) == normalize_newlines(contents) {
diff --git a/crates/stdx/Cargo.toml b/crates/stdx/Cargo.toml
index 2e3f9113b06..6cca1163353 100644
--- a/crates/stdx/Cargo.toml
+++ b/crates/stdx/Cargo.toml
@@ -29,4 +29,4 @@ winapi = { version = "0.3.9", features = ["winerror"] }
 # default = [ "backtrace" ]
 
 [lints]
-workspace = true
\ No newline at end of file
+workspace = true
diff --git a/crates/stdx/src/anymap.rs b/crates/stdx/src/anymap.rs
index 9990f8b0860..899cd8ac6bb 100644
--- a/crates/stdx/src/anymap.rs
+++ b/crates/stdx/src/anymap.rs
@@ -54,12 +54,13 @@ use core::any::{Any, TypeId};
 use core::hash::BuildHasherDefault;
 use core::marker::PhantomData;
 
-use ::std::collections::hash_map::{self, HashMap};
+use ::std::collections::hash_map;
 
 /// Raw access to the underlying `HashMap`.
 ///
 /// This alias is provided for convenience because of the ugly third generic parameter.
-pub type RawMap<A> = HashMap<TypeId, Box<A>, BuildHasherDefault<TypeIdHasher>>;
+#[allow(clippy::disallowed_types)] // Uses a custom hasher
+pub type RawMap<A> = hash_map::HashMap<TypeId, Box<A>, BuildHasherDefault<TypeIdHasher>>;
 
 /// A collection containing zero or one values for any given type and allowing convenient,
 /// type-safe access to those values.
diff --git a/crates/stdx/src/lib.rs b/crates/stdx/src/lib.rs
index 07b78272281..9a9ebae74e8 100644
--- a/crates/stdx/src/lib.rs
+++ b/crates/stdx/src/lib.rs
@@ -23,12 +23,14 @@ pub fn is_ci() -> bool {
 }
 
 #[must_use]
+#[allow(clippy::print_stderr)]
 pub fn timeit(label: &'static str) -> impl Drop {
     let start = Instant::now();
     defer(move || eprintln!("{}: {:.2?}", label, start.elapsed()))
 }
 
 /// Prints backtrace to stderr, useful for debugging.
+#[allow(clippy::print_stderr)]
 pub fn print_backtrace() {
     #[cfg(feature = "backtrace")]
     eprintln!("{:?}", backtrace::Backtrace::new());
diff --git a/crates/stdx/src/panic_context.rs b/crates/stdx/src/panic_context.rs
index c3e8813b0e8..cf3d85b4da3 100644
--- a/crates/stdx/src/panic_context.rs
+++ b/crates/stdx/src/panic_context.rs
@@ -18,6 +18,7 @@ pub struct PanicContext {
 }
 
 impl PanicContext {
+    #[allow(clippy::print_stderr)]
     fn init() {
         let default_hook = panic::take_hook();
         let hook = move |panic_info: &panic::PanicInfo<'_>| {
@@ -43,7 +44,7 @@ impl Drop for PanicContext {
 
 fn with_ctx(f: impl FnOnce(&mut Vec<String>)) {
     thread_local! {
-        static CTX: RefCell<Vec<String>> = RefCell::new(Vec::new());
+        static CTX: RefCell<Vec<String>> = const { RefCell::new(Vec::new()) };
     }
     CTX.with(|ctx| f(&mut ctx.borrow_mut()));
 }
diff --git a/crates/stdx/src/rand.rs b/crates/stdx/src/rand.rs
index 64aa57eae09..115a073dab3 100644
--- a/crates/stdx/src/rand.rs
+++ b/crates/stdx/src/rand.rs
@@ -14,8 +14,7 @@ pub fn shuffle<T>(slice: &mut [T], mut rand_index: impl FnMut(usize) -> usize) {
 }
 
 pub fn seed() -> u64 {
-    use std::collections::hash_map::RandomState;
     use std::hash::{BuildHasher, Hasher};
-
-    RandomState::new().build_hasher().finish()
+    #[allow(clippy::disallowed_types)]
+    std::collections::hash_map::RandomState::new().build_hasher().finish()
 }
diff --git a/crates/syntax/src/fuzz.rs b/crates/syntax/src/fuzz.rs
index 239a89f9b2d..15e68fc575d 100644
--- a/crates/syntax/src/fuzz.rs
+++ b/crates/syntax/src/fuzz.rs
@@ -46,6 +46,7 @@ impl CheckReparse {
         Some(CheckReparse { text, edit, edited_text })
     }
 
+    #[allow(clippy::print_stderr)]
     pub fn run(&self) {
         let parse = SourceFile::parse(&self.text);
         let new_parse = parse.reparse(&self.edit);
diff --git a/crates/syntax/src/tests/sourcegen_ast.rs b/crates/syntax/src/tests/sourcegen_ast.rs
index c2e921e4b6f..ccb13a0d933 100644
--- a/crates/syntax/src/tests/sourcegen_ast.rs
+++ b/crates/syntax/src/tests/sourcegen_ast.rs
@@ -3,14 +3,12 @@
 //! Specifically, it generates the `SyntaxKind` enum and a number of newtype
 //! wrappers around `SyntaxNode` which implement `syntax::AstNode`.
 
-use std::{
-    collections::{BTreeSet, HashSet},
-    fmt::Write,
-};
+use std::{collections::BTreeSet, fmt::Write};
 
 use itertools::Itertools;
 use proc_macro2::{Punct, Spacing};
 use quote::{format_ident, quote};
+use rustc_hash::FxHashSet;
 use ungrammar::{Grammar, Rule};
 
 use crate::tests::ast_src::{
@@ -278,7 +276,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String {
             }
         });
 
-    let defined_nodes: HashSet<_> = node_names.collect();
+    let defined_nodes: FxHashSet<_> = node_names.collect();
 
     for node in kinds
         .nodes
diff --git a/crates/test-utils/src/lib.rs b/crates/test-utils/src/lib.rs
index e48b2731306..854b613ddf7 100644
--- a/crates/test-utils/src/lib.rs
+++ b/crates/test-utils/src/lib.rs
@@ -7,6 +7,7 @@
 //! * marks (see the eponymous module).
 
 #![warn(rust_2018_idioms, unused_lifetimes)]
+#![allow(clippy::print_stderr)]
 
 mod assert_linear;
 pub mod bench_fixture;
@@ -424,7 +425,7 @@ pub fn format_diff(chunks: Vec<dissimilar::Chunk<'_>>) -> String {
 ///
 /// A benchmark test looks like this:
 ///
-/// ```
+/// ```ignore
 /// #[test]
 /// fn benchmark_foo() {
 ///     if skip_slow_tests() { return; }
diff --git a/docs/dev/lsp-extensions.md b/docs/dev/lsp-extensions.md
index 3251dd75268..bb01ca9ae6d 100644
--- a/docs/dev/lsp-extensions.md
+++ b/docs/dev/lsp-extensions.md
@@ -1,5 +1,5 @@
 <!---
-lsp/ext.rs hash: dff0b009e82ef06a
+lsp/ext.rs hash: c0bf3acd7a9e72b2
 
 If you need to change the above hash to make the test pass, please check if you
 need to adjust this doc as well and ping this issue:
diff --git a/lib/la-arena/src/lib.rs b/lib/la-arena/src/lib.rs
index d195bdd156b..1ded3b00a60 100644
--- a/lib/la-arena/src/lib.rs
+++ b/lib/la-arena/src/lib.rs
@@ -374,7 +374,7 @@ impl<T> Arena<T> {
     /// ```
     pub fn iter(
         &self,
-    ) -> impl Iterator<Item = (Idx<T>, &T)> + ExactSizeIterator + DoubleEndedIterator + Clone {
+    ) -> impl ExactSizeIterator<Item = (Idx<T>, &T)> + DoubleEndedIterator + Clone {
         self.data.iter().enumerate().map(|(idx, value)| (Idx::from_raw(RawIdx(idx as u32)), value))
     }
 
@@ -394,7 +394,7 @@ impl<T> Arena<T> {
     /// ```
     pub fn iter_mut(
         &mut self,
-    ) -> impl Iterator<Item = (Idx<T>, &mut T)> + ExactSizeIterator + DoubleEndedIterator {
+    ) -> impl ExactSizeIterator<Item = (Idx<T>, &mut T)> + DoubleEndedIterator {
         self.data
             .iter_mut()
             .enumerate()
@@ -414,7 +414,7 @@ impl<T> Arena<T> {
     /// assert_eq!(iterator.next(), Some(&40));
     /// assert_eq!(iterator.next(), Some(&60));
     /// ```
-    pub fn values(&self) -> impl Iterator<Item = &T> + ExactSizeIterator + DoubleEndedIterator {
+    pub fn values(&self) -> impl ExactSizeIterator<Item = &T> + DoubleEndedIterator {
         self.data.iter()
     }
 
@@ -432,9 +432,7 @@ impl<T> Arena<T> {
     ///
     /// assert_eq!(arena[idx1], 10);
     /// ```
-    pub fn values_mut(
-        &mut self,
-    ) -> impl Iterator<Item = &mut T> + ExactSizeIterator + DoubleEndedIterator {
+    pub fn values_mut(&mut self) -> impl ExactSizeIterator<Item = &mut T> + DoubleEndedIterator {
         self.data.iter_mut()
     }
 
diff --git a/lib/la-arena/src/map.rs b/lib/la-arena/src/map.rs
index c6a43d8f9a6..6e7528c4f7f 100644
--- a/lib/la-arena/src/map.rs
+++ b/lib/la-arena/src/map.rs
@@ -73,17 +73,17 @@ impl<T, V> ArenaMap<Idx<T>, V> {
     }
 
     /// Returns an iterator over the values in the map.
-    pub fn values(&self) -> impl Iterator<Item = &V> + DoubleEndedIterator {
+    pub fn values(&self) -> impl DoubleEndedIterator<Item = &V> {
         self.v.iter().filter_map(|o| o.as_ref())
     }
 
     /// Returns an iterator over mutable references to the values in the map.
-    pub fn values_mut(&mut self) -> impl Iterator<Item = &mut V> + DoubleEndedIterator {
+    pub fn values_mut(&mut self) -> impl DoubleEndedIterator<Item = &mut V> {
         self.v.iter_mut().filter_map(|o| o.as_mut())
     }
 
     /// Returns an iterator over the arena indexes and values in the map.
-    pub fn iter(&self) -> impl Iterator<Item = (Idx<T>, &V)> + DoubleEndedIterator {
+    pub fn iter(&self) -> impl DoubleEndedIterator<Item = (Idx<T>, &V)> {
         self.v.iter().enumerate().filter_map(|(idx, o)| Some((Self::from_idx(idx), o.as_ref()?)))
     }
 
diff --git a/lib/lsp-server/examples/goto_def.rs b/lib/lsp-server/examples/goto_def.rs
index 71f66254069..6ad144b85fe 100644
--- a/lib/lsp-server/examples/goto_def.rs
+++ b/lib/lsp-server/examples/goto_def.rs
@@ -41,6 +41,9 @@
 //!
 //! {"jsonrpc": "2.0", "method": "exit", "params": null}
 //! ```
+
+#![allow(clippy::print_stderr)]
+
 use std::error::Error;
 
 use lsp_types::OneOf;
diff --git a/lib/lsp-server/src/lib.rs b/lib/lsp-server/src/lib.rs
index e476f8c2d13..e40fc878a77 100644
--- a/lib/lsp-server/src/lib.rs
+++ b/lib/lsp-server/src/lib.rs
@@ -5,6 +5,7 @@
 //! Run with `RUST_LOG=lsp_server=debug` to see all the messages.
 
 #![warn(rust_2018_idioms, unused_lifetimes)]
+#![allow(clippy::print_stdout, clippy::disallowed_types)]
 
 mod error;
 mod msg;
diff --git a/xtask/src/main.rs b/xtask/src/main.rs
index 2d40ceb737d..df4d9810e6f 100644
--- a/xtask/src/main.rs
+++ b/xtask/src/main.rs
@@ -9,6 +9,7 @@
 //! `.cargo/config`.
 
 #![warn(rust_2018_idioms, unused_lifetimes)]
+#![allow(clippy::print_stderr, clippy::print_stdout)]
 
 mod flags;