about summary refs log tree commit diff
diff options
context:
space:
mode:
authorbors[bot] <26634292+bors[bot]@users.noreply.github.com>2021-01-10 18:16:29 +0000
committerGitHub <noreply@github.com>2021-01-10 18:16:29 +0000
commit607b9ea160149bacca41c0638f16d372c3b235cd (patch)
treec1ef9b29af2f080530fd3d79b9bb6622bcff0a2a
parent3e32e39da765632dd5c61d31b846bfa93738e786 (diff)
parentd4621197447d6906305ed30f8ab4fb48d657ec86 (diff)
downloadrust-607b9ea160149bacca41c0638f16d372c3b235cd.tar.gz
rust-607b9ea160149bacca41c0638f16d372c3b235cd.zip
Merge #7218
7218: Fix typos r=Veykril a=regexident

Apart from the very last commit on this PR (which fixes a public type's name) all changes are non-breaking.

Co-authored-by: Vincent Esche <regexident@gmail.com>
-rw-r--r--crates/hir/src/from_id.rs2
-rw-r--r--crates/hir_def/src/body/lower.rs4
-rw-r--r--crates/hir_def/src/expr.rs2
-rw-r--r--crates/hir_def/src/nameres/collector.rs8
-rw-r--r--crates/hir_def/src/resolver.rs2
-rw-r--r--crates/hir_expand/src/db.rs2
-rw-r--r--crates/hir_ty/src/diagnostics/expr.rs4
-rw-r--r--crates/hir_ty/src/diagnostics/match_check.rs4
-rw-r--r--crates/hir_ty/src/lower.rs6
-rw-r--r--crates/ide/src/doc_links.rs4
-rw-r--r--crates/ide/src/hover.rs20
-rw-r--r--crates/ide/src/references.rs2
-rw-r--r--crates/ide/src/references/rename.rs4
-rw-r--r--crates/ide_db/src/imports_locator.rs2
-rw-r--r--crates/mbe/src/lib.rs4
-rw-r--r--crates/mbe/src/mbe_expander/matcher.rs2
-rw-r--r--crates/mbe/src/mbe_expander/transcriber.rs8
-rw-r--r--crates/mbe/src/syntax_bridge.rs12
-rw-r--r--crates/mbe/src/tests.rs2
-rw-r--r--crates/parser/src/grammar/items/use_item.rs2
-rw-r--r--crates/proc_macro_api/src/msg.rs2
-rw-r--r--crates/proc_macro_srv/src/proc_macro/bridge/rpc.rs2
-rw-r--r--crates/proc_macro_srv/src/rustc_server.rs2
-rw-r--r--crates/rust-analyzer/src/lsp_utils.rs10
-rw-r--r--crates/rust-analyzer/src/markdown.rs2
-rw-r--r--crates/syntax/src/algo.rs4
-rw-r--r--crates/syntax/src/ast/make.rs2
-rw-r--r--crates/syntax/src/parsing/lexer.rs8
-rw-r--r--crates/syntax/src/validation.rs2
-rw-r--r--crates/syntax/test_data/parser/inline/ok/0002_use_tree_list.rast98
-rw-r--r--crates/syntax/test_data/parser/inline/ok/0002_use_tree_list.rs2
-rw-r--r--crates/test_utils/src/lib.rs2
32 files changed, 114 insertions, 118 deletions
diff --git a/crates/hir/src/from_id.rs b/crates/hir/src/from_id.rs
index 3e47a5e9dbc..c8c5fecd706 100644
--- a/crates/hir/src/from_id.rs
+++ b/crates/hir/src/from_id.rs
@@ -1,6 +1,6 @@
 //! Utility module for converting between hir_def ids and code_model wrappers.
 //!
-//! It's unclear if we need this long-term, but it's definitelly useful while we
+//! It's unclear if we need this long-term, but it's definitely useful while we
 //! are splitting the hir.
 
 use hir_def::{
diff --git a/crates/hir_def/src/body/lower.rs b/crates/hir_def/src/body/lower.rs
index 6be1eaade87..3dc33f248fa 100644
--- a/crates/hir_def/src/body/lower.rs
+++ b/crates/hir_def/src/body/lower.rs
@@ -581,7 +581,7 @@ impl ExprCollector<'_> {
         match res.value {
             Some((mark, expansion)) => {
                 // FIXME: Statements are too complicated to recover from error for now.
-                // It is because we don't have any hygenine for local variable expansion right now.
+                // It is because we don't have any hygiene for local variable expansion right now.
                 if T::can_cast(syntax::SyntaxKind::MACRO_STMTS) && res.err.is_some() {
                     self.expander.exit(self.db, mark);
                     collector(self, None);
@@ -959,7 +959,7 @@ impl ExprCollector<'_> {
 
     fn collect_tuple_pat(&mut self, args: AstChildren<ast::Pat>) -> (Vec<PatId>, Option<usize>) {
         // Find the location of the `..`, if there is one. Note that we do not
-        // consider the possiblity of there being multiple `..` here.
+        // consider the possibility of there being multiple `..` here.
         let ellipsis = args.clone().position(|p| matches!(p, ast::Pat::RestPat(_)));
         // We want to skip the `..` pattern here, since we account for it above.
         let args = args
diff --git a/crates/hir_def/src/expr.rs b/crates/hir_def/src/expr.rs
index 6a481769dbc..76f5721e587 100644
--- a/crates/hir_def/src/expr.rs
+++ b/crates/hir_def/src/expr.rs
@@ -1,6 +1,6 @@
 //! This module describes hir-level representation of expressions.
 //!
-//! This representaion is:
+//! This representation is:
 //!
 //! 1. Identity-based. Each expression has an `id`, so we can distinguish
 //!    between different `1` in `1 + 1`.
diff --git a/crates/hir_def/src/nameres/collector.rs b/crates/hir_def/src/nameres/collector.rs
index 77017e4ea97..f027fd48d35 100644
--- a/crates/hir_def/src/nameres/collector.rs
+++ b/crates/hir_def/src/nameres/collector.rs
@@ -267,7 +267,7 @@ impl DefCollector<'_> {
 
         // Resolve all indeterminate resolved imports again
         // As some of the macros will expand newly import shadowing partial resolved imports
-        // FIXME: We maybe could skip this, if we handle the Indetermine imports in `resolve_imports`
+        // FIXME: We maybe could skip this, if we handle the indeterminate imports in `resolve_imports`
         // correctly
         let partial_resolved = self.resolved_imports.iter().filter_map(|directive| {
             if let PartialResolvedImport::Indeterminate(_) = directive.status {
@@ -402,7 +402,7 @@ impl DefCollector<'_> {
 
     /// Define a proc macro
     ///
-    /// A proc macro is similar to normal macro scope, but it would not visiable in legacy textual scoped.
+    /// A proc macro is similar to normal macro scope, but it would not visible in legacy textual scoped.
     /// And unconditionally exported.
     fn define_proc_macro(&mut self, name: Name, macro_: MacroDefId) {
         self.update(
@@ -592,7 +592,7 @@ impl DefCollector<'_> {
                     // XXX: urgh, so this works by accident! Here, we look at
                     // the enum data, and, in theory, this might require us to
                     // look back at the crate_def_map, creating a cycle. For
-                    // example, `enum E { crate::some_macro!(); }`. Luckely, the
+                    // example, `enum E { crate::some_macro!(); }`. Luckily, the
                     // only kind of macro that is allowed inside enum is a
                     // `cfg_macro`, and we don't need to run name resolution for
                     // it, but this is sheer luck!
@@ -655,7 +655,7 @@ impl DefCollector<'_> {
         &mut self,
         module_id: LocalModuleId,
         resolutions: &[(Option<Name>, PerNs)],
-        // All resolutions are imported with this visibility; the visibilies in
+        // All resolutions are imported with this visibility; the visibilities in
         // the `PerNs` values are ignored and overwritten
         vis: Visibility,
         import_type: ImportType,
diff --git a/crates/hir_def/src/resolver.rs b/crates/hir_def/src/resolver.rs
index 129f1dbac99..e4152a0be4b 100644
--- a/crates/hir_def/src/resolver.rs
+++ b/crates/hir_def/src/resolver.rs
@@ -27,7 +27,7 @@ use crate::{
 
 #[derive(Debug, Clone, Default)]
 pub struct Resolver {
-    // FIXME: all usages generally call `.rev`, so maybe reverse once in consturciton?
+    // FIXME: all usages generally call `.rev`, so maybe reverse once in construction?
     scopes: Vec<Scope>,
 }
 
diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs
index ab2637b8ca0..c6208639050 100644
--- a/crates/hir_expand/src/db.rs
+++ b/crates/hir_expand/src/db.rs
@@ -40,7 +40,7 @@ impl TokenExpander {
             // FIXME switch these to ExpandResult as well
             TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt).into(),
             TokenExpander::ProcMacro(_) => {
-                // We store the result in salsa db to prevent non-determinisc behavior in
+                // We store the result in salsa db to prevent non-deterministic behavior in
                 // some proc-macro implementation
                 // See #4315 for details
                 db.expand_proc_macro(id.into()).into()
diff --git a/crates/hir_ty/src/diagnostics/expr.rs b/crates/hir_ty/src/diagnostics/expr.rs
index a1c484fdff4..107417c2780 100644
--- a/crates/hir_ty/src/diagnostics/expr.rs
+++ b/crates/hir_ty/src/diagnostics/expr.rs
@@ -379,7 +379,7 @@ pub fn record_literal_missing_fields(
     id: ExprId,
     expr: &Expr,
 ) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
-    let (fields, exhausitve) = match expr {
+    let (fields, exhaustive) = match expr {
         Expr::RecordLit { path: _, fields, spread } => (fields, spread.is_none()),
         _ => return None,
     };
@@ -400,7 +400,7 @@ pub fn record_literal_missing_fields(
     if missed_fields.is_empty() {
         return None;
     }
-    Some((variant_def, missed_fields, exhausitve))
+    Some((variant_def, missed_fields, exhaustive))
 }
 
 pub fn record_pattern_missing_fields(
diff --git a/crates/hir_ty/src/diagnostics/match_check.rs b/crates/hir_ty/src/diagnostics/match_check.rs
index 62c329731d4..61c47eec876 100644
--- a/crates/hir_ty/src/diagnostics/match_check.rs
+++ b/crates/hir_ty/src/diagnostics/match_check.rs
@@ -14,7 +14,7 @@
 //! The algorithm implemented here is a modified version of the one described in
 //! <http://moscova.inria.fr/~maranget/papers/warn/index.html>.
 //! However, to save future implementors from reading the original paper, we
-//! summarise the algorithm here to hopefully save time and be a little clearer
+//! summarize the algorithm here to hopefully save time and be a little clearer
 //! (without being so rigorous).
 //!
 //! The core of the algorithm revolves about a "usefulness" check. In particular, we
@@ -132,7 +132,7 @@
 //! The algorithm is inductive (on the number of columns: i.e., components of tuple patterns).
 //! That means we're going to check the components from left-to-right, so the algorithm
 //! operates principally on the first component of the matrix and new pattern-stack `p`.
-//! This algorithm is realised in the `is_useful` function.
+//! This algorithm is realized in the `is_useful` function.
 //!
 //! Base case (`n = 0`, i.e., an empty tuple pattern):
 //! - If `P` already contains an empty pattern (i.e., if the number of patterns `m > 0`), then
diff --git a/crates/hir_ty/src/lower.rs b/crates/hir_ty/src/lower.rs
index 222f61a11d5..9594cce8b7f 100644
--- a/crates/hir_ty/src/lower.rs
+++ b/crates/hir_ty/src/lower.rs
@@ -491,16 +491,16 @@ impl Ty {
     fn from_hir_path_inner(
         ctx: &TyLoweringContext<'_>,
         segment: PathSegment<'_>,
-        typable: TyDefId,
+        typeable: TyDefId,
         infer_args: bool,
     ) -> Ty {
-        let generic_def = match typable {
+        let generic_def = match typeable {
             TyDefId::BuiltinType(_) => None,
             TyDefId::AdtId(it) => Some(it.into()),
             TyDefId::TypeAliasId(it) => Some(it.into()),
         };
         let substs = substs_from_path_segment(ctx, segment, generic_def, infer_args);
-        ctx.db.ty(typable).subst(&substs)
+        ctx.db.ty(typeable).subst(&substs)
     }
 
     /// Collect generic arguments from a path into a `Substs`. See also
diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs
index 91f4241f944..de10406bc62 100644
--- a/crates/ide/src/doc_links.rs
+++ b/crates/ide/src/doc_links.rs
@@ -39,7 +39,7 @@ pub(crate) fn rewrite_links(db: &RootDatabase, markdown: &str, definition: &Defi
         if target.contains("://") {
             (target.to_string(), title.to_string())
         } else {
-            // Two posibilities:
+            // Two possibilities:
             // * path-based links: `../../module/struct.MyStruct.html`
             // * module-based links (AKA intra-doc links): `super::super::module::MyStruct`
             if let Some(rewritten) = rewrite_intra_doc_link(db, *definition, target, title) {
@@ -442,7 +442,7 @@ fn get_symbol_fragment(db: &dyn HirDatabase, field_or_assoc: &FieldOrAssocItem)
                     function.as_assoc_item(db).map(|assoc| assoc.container(db)),
                     Some(AssocItemContainer::Trait(..))
                 );
-                // This distinction may get more complicated when specialisation is available.
+                // This distinction may get more complicated when specialization is available.
                 // Rustdoc makes this decision based on whether a method 'has defaultness'.
                 // Currently this is only the case for provided trait methods.
                 if is_trait_method && !function.has_body(db) {
diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs
index e331f8886c8..e892d5588f5 100644
--- a/crates/ide/src/hover.rs
+++ b/crates/ide/src/hover.rs
@@ -1953,16 +1953,16 @@ struct S {
 /// Test cases:
 /// case 1.  bare URL: https://www.example.com/
 /// case 2.  inline URL with title: [example](https://www.example.com/)
-/// case 3.  code refrence: [`Result`]
-/// case 4.  code refrence but miss footnote: [`String`]
+/// case 3.  code reference: [`Result`]
+/// case 4.  code reference but miss footnote: [`String`]
 /// case 5.  autolink: <http://www.example.com/>
 /// case 6.  email address: <test@example.com>
-/// case 7.  refrence: [example][example]
+/// case 7.  reference: [example][example]
 /// case 8.  collapsed link: [example][]
 /// case 9.  shortcut link: [example]
 /// case 10. inline without URL: [example]()
-/// case 11. refrence: [foo][foo]
-/// case 12. refrence: [foo][bar]
+/// case 11. reference: [foo][foo]
+/// case 12. reference: [foo][bar]
 /// case 13. collapsed link: [foo][]
 /// case 14. shortcut link: [foo]
 /// case 15. inline without URL: [foo]()
@@ -1989,16 +1989,16 @@ pub fn fo$0o() {}
                 Test cases:
                 case 1.  bare URL: https://www.example.com/
                 case 2.  inline URL with title: [example](https://www.example.com/)
-                case 3.  code refrence: `Result`
-                case 4.  code refrence but miss footnote: `String`
+                case 3.  code reference: `Result`
+                case 4.  code reference but miss footnote: `String`
                 case 5.  autolink: http://www.example.com/
                 case 6.  email address: test@example.com
-                case 7.  refrence: example
+                case 7.  reference: example
                 case 8.  collapsed link: example
                 case 9.  shortcut link: example
                 case 10. inline without URL: example
-                case 11. refrence: foo
-                case 12. refrence: foo
+                case 11. reference: foo
+                case 12. reference: foo
                 case 13. collapsed link: foo
                 case 14. shortcut link: foo
                 case 15. inline without URL: foo
diff --git a/crates/ide/src/references.rs b/crates/ide/src/references.rs
index d44d96dd403..b774a2be1ba 100644
--- a/crates/ide/src/references.rs
+++ b/crates/ide/src/references.rs
@@ -3,7 +3,7 @@
 //! or `ast::NameRef`. If it's a `ast::NameRef`, at the classification step we
 //! try to resolve the direct tree parent of this element, otherwise we
 //! already have a definition and just need to get its HIR together with
-//! some information that is needed for futher steps of searching.
+//! some information that is needed for further steps of searching.
 //! After that, we collect files that might contain references and look
 //! for text occurrences of the identifier. If there's an `ast::NameRef`
 //! at the index that the match starts at and its tree parent is
diff --git a/crates/ide/src/references/rename.rs b/crates/ide/src/references/rename.rs
index 53d79333ce4..099900673aa 100644
--- a/crates/ide/src/references/rename.rs
+++ b/crates/ide/src/references/rename.rs
@@ -945,7 +945,7 @@ use crate::foo$0::FooContent;
 //- /lib.rs
 mod fo$0o;
 //- /foo/mod.rs
-// emtpy
+// empty
 "#,
             expect![[r#"
                 RangeInfo {
@@ -995,7 +995,7 @@ mod fo$0o;
 mod outer { mod fo$0o; }
 
 //- /outer/foo.rs
-// emtpy
+// empty
 "#,
             expect![[r#"
                 RangeInfo {
diff --git a/crates/ide_db/src/imports_locator.rs b/crates/ide_db/src/imports_locator.rs
index 0782ab07095..e9f23adf838 100644
--- a/crates/ide_db/src/imports_locator.rs
+++ b/crates/ide_db/src/imports_locator.rs
@@ -1,4 +1,4 @@
-//! This module contains an import search funcionality that is provided to the assists module.
+//! This module contains an import search functionality that is provided to the assists module.
 //! Later, this should be moved away to a separate crate that is accessible from the assists module.
 
 use hir::{import_map, AsAssocItem, Crate, MacroDef, ModuleDef, Semantics};
diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs
index b3472879d1e..19543d777a4 100644
--- a/crates/mbe/src/lib.rs
+++ b/crates/mbe/src/lib.rs
@@ -24,7 +24,7 @@ use crate::{
 #[derive(Debug, PartialEq, Eq)]
 pub enum ParseError {
     Expected(String),
-    RepetitionEmtpyTokenTree,
+    RepetitionEmptyTokenTree,
 }
 
 #[derive(Debug, PartialEq, Eq, Clone)]
@@ -270,7 +270,7 @@ fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> {
                         }
                         false
                     }) {
-                        return Err(ParseError::RepetitionEmtpyTokenTree);
+                        return Err(ParseError::RepetitionEmptyTokenTree);
                     }
                 }
                 validate(subtree)?
diff --git a/crates/mbe/src/mbe_expander/matcher.rs b/crates/mbe/src/mbe_expander/matcher.rs
index c6d615c81be..d32e6052147 100644
--- a/crates/mbe/src/mbe_expander/matcher.rs
+++ b/crates/mbe/src/mbe_expander/matcher.rs
@@ -378,7 +378,7 @@ pub(super) fn match_repeat(
     src: &mut TtIter,
 ) -> Result<(), ExpandError> {
     // Dirty hack to make macro-expansion terminate.
-    // This should be replaced by a propper macro-by-example implementation
+    // This should be replaced by a proper macro-by-example implementation
     let mut limit = 65536;
     let mut counter = 0;
 
diff --git a/crates/mbe/src/mbe_expander/transcriber.rs b/crates/mbe/src/mbe_expander/transcriber.rs
index 27b2ac777ae..59a3c80a8b6 100644
--- a/crates/mbe/src/mbe_expander/transcriber.rs
+++ b/crates/mbe/src/mbe_expander/transcriber.rs
@@ -67,7 +67,7 @@ struct NestingState {
     /// because there is no variable in use by the current repetition
     hit: bool,
     /// `at_end` is currently necessary to tell `expand_repeat` if it should stop
-    /// because there is no more value avaible for the current repetition
+    /// because there is no more value available for the current repetition
     at_end: bool,
 }
 
@@ -179,11 +179,7 @@ fn expand_repeat(
 
         counter += 1;
         if counter == limit {
-            log::warn!(
-                "expand_tt excced in repeat pattern exceed limit => {:#?}\n{:#?}",
-                template,
-                ctx
-            );
+            log::warn!("expand_tt in repeat pattern exceed limit => {:#?}\n{:#?}", template, ctx);
             break;
         }
 
diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs
index 671036e1ca6..e648519f9b9 100644
--- a/crates/mbe/src/syntax_bridge.rs
+++ b/crates/mbe/src/syntax_bridge.rs
@@ -149,7 +149,7 @@ impl TokenMap {
     }
 
     fn remove_delim(&mut self, idx: usize) {
-        // FIXME: This could be accidently quadratic
+        // FIXME: This could be accidentally quadratic
         self.entries.remove(idx);
     }
 }
@@ -476,14 +476,14 @@ impl Convertor {
 
 #[derive(Debug)]
 enum SynToken {
-    Ordiniary(SyntaxToken),
+    Ordinary(SyntaxToken),
     Punch(SyntaxToken, TextSize),
 }
 
 impl SynToken {
     fn token(&self) -> &SyntaxToken {
         match self {
-            SynToken::Ordiniary(it) => it,
+            SynToken::Ordinary(it) => it,
             SynToken::Punch(it, _) => it,
         }
     }
@@ -495,7 +495,7 @@ impl SrcToken for SynToken {
     }
     fn to_char(&self) -> Option<char> {
         match self {
-            SynToken::Ordiniary(_) => None,
+            SynToken::Ordinary(_) => None,
             SynToken::Punch(it, i) => it.text().chars().nth((*i).into()),
         }
     }
@@ -535,7 +535,7 @@ impl TokenConvertor for Convertor {
         } else {
             self.punct_offset = None;
             let range = curr.text_range();
-            (SynToken::Ordiniary(curr), range)
+            (SynToken::Ordinary(curr), range)
         };
 
         Some(token)
@@ -557,7 +557,7 @@ impl TokenConvertor for Convertor {
         let token = if curr.kind().is_punct() {
             SynToken::Punch(curr, 0.into())
         } else {
-            SynToken::Ordiniary(curr)
+            SynToken::Ordinary(curr)
         };
         Some(token)
     }
diff --git a/crates/mbe/src/tests.rs b/crates/mbe/src/tests.rs
index d854985c5b0..ecea15c114f 100644
--- a/crates/mbe/src/tests.rs
+++ b/crates/mbe/src/tests.rs
@@ -1967,7 +1967,7 @@ fn test_no_space_after_semi_colon() {
 #[test]
 fn test_rustc_issue_57597() {
     fn test_error(fixture: &str) {
-        assert_eq!(parse_macro_error(fixture), ParseError::RepetitionEmtpyTokenTree);
+        assert_eq!(parse_macro_error(fixture), ParseError::RepetitionEmptyTokenTree);
     }
 
     test_error("macro_rules! foo { ($($($i:ident)?)+) => {}; }");
diff --git a/crates/parser/src/grammar/items/use_item.rs b/crates/parser/src/grammar/items/use_item.rs
index 20e6a13cf96..5cb8b08e715 100644
--- a/crates/parser/src/grammar/items/use_item.rs
+++ b/crates/parser/src/grammar/items/use_item.rs
@@ -46,7 +46,7 @@ fn use_tree(p: &mut Parser, top_level: bool) {
         // test use_tree_list
         // use {crate::path::from::root, or::path::from::crate_name}; // Rust 2018 (with a crate named `or`)
         // use {path::from::root}; // Rust 2015
-        // use ::{some::arbritrary::path}; // Rust 2015
+        // use ::{some::arbitrary::path}; // Rust 2015
         // use ::{{{root::export}}}; // Nonsensical but perfectly legal nesting
         T!['{'] => {
             use_tree_list(p);
diff --git a/crates/proc_macro_api/src/msg.rs b/crates/proc_macro_api/src/msg.rs
index 4cd57210161..970f165edc3 100644
--- a/crates/proc_macro_api/src/msg.rs
+++ b/crates/proc_macro_api/src/msg.rs
@@ -79,7 +79,7 @@ impl Message for Response {}
 fn read_json(inp: &mut impl BufRead) -> io::Result<Option<String>> {
     let mut buf = String::new();
     inp.read_line(&mut buf)?;
-    buf.pop(); // Remove traling '\n'
+    buf.pop(); // Remove trailing '\n'
     Ok(match buf.len() {
         0 => None,
         _ => Some(buf),
diff --git a/crates/proc_macro_srv/src/proc_macro/bridge/rpc.rs b/crates/proc_macro_srv/src/proc_macro/bridge/rpc.rs
index 3528d5c9966..bd1e7c2fced 100644
--- a/crates/proc_macro_srv/src/proc_macro/bridge/rpc.rs
+++ b/crates/proc_macro_srv/src/proc_macro/bridge/rpc.rs
@@ -251,7 +251,7 @@ impl<S> DecodeMut<'_, '_, S> for String {
     }
 }
 
-/// Simplied version of panic payloads, ignoring
+/// Simplified version of panic payloads, ignoring
 /// types other than `&'static str` and `String`.
 #[derive(Debug)]
 pub enum PanicMessage {
diff --git a/crates/proc_macro_srv/src/rustc_server.rs b/crates/proc_macro_srv/src/rustc_server.rs
index b54aa1f3bf1..e6006a3c8ce 100644
--- a/crates/proc_macro_srv/src/rustc_server.rs
+++ b/crates/proc_macro_srv/src/rustc_server.rs
@@ -4,7 +4,7 @@
 //! The lib-proc-macro server backend is `TokenStream`-agnostic, such that
 //! we could provide any TokenStream implementation.
 //! The original idea from fedochet is using proc-macro2 as backend,
-//! we use tt instead for better intergation with RA.
+//! we use tt instead for better integration with RA.
 //!
 //! FIXME: No span and source file information is implemented yet
 
diff --git a/crates/rust-analyzer/src/lsp_utils.rs b/crates/rust-analyzer/src/lsp_utils.rs
index 40de56dadc4..2d06fe538d7 100644
--- a/crates/rust-analyzer/src/lsp_utils.rs
+++ b/crates/rust-analyzer/src/lsp_utils.rs
@@ -130,7 +130,7 @@ pub(crate) fn apply_document_changes(
 }
 
 /// Checks that the edits inside the completion and the additional edits do not overlap.
-/// LSP explicitly forbits the additional edits to overlap both with the main edit and themselves.
+/// LSP explicitly forbids the additional edits to overlap both with the main edit and themselves.
 pub(crate) fn all_edits_are_disjoint(
     completion: &lsp_types::CompletionItem,
     additional_edits: &[lsp_types::TextEdit],
@@ -290,7 +290,7 @@ mod tests {
             Some(vec![disjoint_edit.clone(), joint_edit.clone()]);
         assert!(
             !all_edits_are_disjoint(&completion_with_joint_edits, &[]),
-            "Completion with disjoint edits fails the validaton even with empty extra edits"
+            "Completion with disjoint edits fails the validation even with empty extra edits"
         );
 
         completion_with_joint_edits.text_edit =
@@ -298,7 +298,7 @@ mod tests {
         completion_with_joint_edits.additional_text_edits = Some(vec![joint_edit.clone()]);
         assert!(
             !all_edits_are_disjoint(&completion_with_joint_edits, &[]),
-            "Completion with disjoint edits fails the validaton even with empty extra edits"
+            "Completion with disjoint edits fails the validation even with empty extra edits"
         );
 
         completion_with_joint_edits.text_edit =
@@ -310,7 +310,7 @@ mod tests {
         completion_with_joint_edits.additional_text_edits = None;
         assert!(
             !all_edits_are_disjoint(&completion_with_joint_edits, &[]),
-            "Completion with disjoint edits fails the validaton even with empty extra edits"
+            "Completion with disjoint edits fails the validation even with empty extra edits"
         );
 
         completion_with_joint_edits.text_edit =
@@ -322,7 +322,7 @@ mod tests {
         completion_with_joint_edits.additional_text_edits = Some(vec![joint_edit]);
         assert!(
             !all_edits_are_disjoint(&completion_with_joint_edits, &[]),
-            "Completion with disjoint edits fails the validaton even with empty extra edits"
+            "Completion with disjoint edits fails the validation even with empty extra edits"
         );
     }
 
diff --git a/crates/rust-analyzer/src/markdown.rs b/crates/rust-analyzer/src/markdown.rs
index a49a58c0002..865eaae9b62 100644
--- a/crates/rust-analyzer/src/markdown.rs
+++ b/crates/rust-analyzer/src/markdown.rs
@@ -106,7 +106,7 @@ mod tests {
 
     #[test]
     fn test_format_docs_preserves_newlines() {
-        let comment = "this\nis\nultiline";
+        let comment = "this\nis\nmultiline";
         assert_eq!(format_docs(comment), comment);
     }
 
diff --git a/crates/syntax/src/algo.rs b/crates/syntax/src/algo.rs
index 22ab36cd2b0..384d031e714 100644
--- a/crates/syntax/src/algo.rs
+++ b/crates/syntax/src/algo.rs
@@ -88,8 +88,8 @@ pub fn least_common_ancestor(u: &SyntaxNode, v: &SyntaxNode) -> Option<SyntaxNod
     let keep = u_depth.min(v_depth);
 
     let u_candidates = u.ancestors().skip(u_depth - keep);
-    let v_canidates = v.ancestors().skip(v_depth - keep);
-    let (res, _) = u_candidates.zip(v_canidates).find(|(x, y)| x == y)?;
+    let v_candidates = v.ancestors().skip(v_depth - keep);
+    let (res, _) = u_candidates.zip(v_candidates).find(|(x, y)| x == y)?;
     Some(res)
 }
 
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs
index cafa4c19892..1ed8a96e5fc 100644
--- a/crates/syntax/src/ast/make.rs
+++ b/crates/syntax/src/ast/make.rs
@@ -241,7 +241,7 @@ pub fn wildcard_pat() -> ast::WildcardPat {
     }
 }
 
-/// Creates a tuple of patterns from an interator of patterns.
+/// Creates a tuple of patterns from an iterator of patterns.
 ///
 /// Invariant: `pats` must be length > 1
 ///
diff --git a/crates/syntax/src/parsing/lexer.rs b/crates/syntax/src/parsing/lexer.rs
index 0cbba73c5cb..7c8d0a4c48c 100644
--- a/crates/syntax/src/parsing/lexer.rs
+++ b/crates/syntax/src/parsing/lexer.rs
@@ -24,7 +24,7 @@ pub struct Token {
 /// Beware that it checks for shebang first and its length contributes to resulting
 /// tokens offsets.
 pub fn tokenize(text: &str) -> (Vec<Token>, Vec<SyntaxError>) {
-    // non-empty string is a precondtion of `rustc_lexer::strip_shebang()`.
+    // non-empty string is a precondition of `rustc_lexer::strip_shebang()`.
     if text.is_empty() {
         return Default::default();
     }
@@ -76,7 +76,7 @@ pub fn lex_single_syntax_kind(text: &str) -> Option<(SyntaxKind, Option<SyntaxEr
 }
 
 /// The same as `lex_single_syntax_kind()` but returns only `SyntaxKind` and
-/// returns `None` if any tokenization error occured.
+/// returns `None` if any tokenization error occurred.
 ///
 /// Beware that unescape errors are not checked at tokenization time.
 pub fn lex_single_valid_syntax_kind(text: &str) -> Option<SyntaxKind> {
@@ -96,7 +96,7 @@ pub fn lex_single_valid_syntax_kind(text: &str) -> Option<SyntaxKind> {
 ///
 /// Beware that unescape errors are not checked at tokenization time.
 fn lex_first_token(text: &str) -> Option<(Token, Option<SyntaxError>)> {
-    // non-empty string is a precondtion of `rustc_lexer::first_token()`.
+    // non-empty string is a precondition of `rustc_lexer::first_token()`.
     if text.is_empty() {
         return None;
     }
@@ -117,7 +117,7 @@ fn rustc_token_kind_to_syntax_kind(
     token_text: &str,
 ) -> (SyntaxKind, Option<&'static str>) {
     // A note on an intended tradeoff:
-    // We drop some useful infromation here (see patterns with double dots `..`)
+    // We drop some useful information here (see patterns with double dots `..`)
     // Storing that info in `SyntaxKind` is not possible due to its layout requirements of
     // being `u16` that come from `rowan::SyntaxKind`.
 
diff --git a/crates/syntax/src/validation.rs b/crates/syntax/src/validation.rs
index 7f908838212..bfa2dc4ba09 100644
--- a/crates/syntax/src/validation.rs
+++ b/crates/syntax/src/validation.rs
@@ -173,7 +173,7 @@ pub(crate) fn validate_block_structure(root: &SyntaxNode) {
                     assert_eq!(
                         node.parent(),
                         pair.parent(),
-                        "\nunpaired curleys:\n{}\n{:#?}\n",
+                        "\nunpaired curlys:\n{}\n{:#?}\n",
                         root.text(),
                         root,
                     );
diff --git a/crates/syntax/test_data/parser/inline/ok/0002_use_tree_list.rast b/crates/syntax/test_data/parser/inline/ok/0002_use_tree_list.rast
index b1fb75ed1e6..f40500e3890 100644
--- a/crates/syntax/test_data/parser/inline/ok/0002_use_tree_list.rast
+++ b/crates/syntax/test_data/parser/inline/ok/0002_use_tree_list.rast
@@ -1,4 +1,4 @@
-SOURCE_FILE@0..249
+SOURCE_FILE@0..248
   USE@0..58
     USE_KW@0..3 "use"
     WHITESPACE@3..4 " "
@@ -75,62 +75,62 @@ SOURCE_FILE@0..249
         R_CURLY@119..120 "}"
     SEMICOLON@120..121 ";"
   WHITESPACE@121..122 " "
-  USE@122..166
+  USE@122..165
     COMMENT@122..134 "// Rust 2015"
     WHITESPACE@134..135 "\n"
     USE_KW@135..138 "use"
     WHITESPACE@138..139 " "
-    USE_TREE@139..165
+    USE_TREE@139..164
       COLON2@139..141 "::"
-      USE_TREE_LIST@141..165
+      USE_TREE_LIST@141..164
         L_CURLY@141..142 "{"
-        USE_TREE@142..164
-          PATH@142..164
-            PATH@142..158
+        USE_TREE@142..163
+          PATH@142..163
+            PATH@142..157
               PATH@142..146
                 PATH_SEGMENT@142..146
                   NAME_REF@142..146
                     IDENT@142..146 "some"
               COLON2@146..148 "::"
-              PATH_SEGMENT@148..158
-                NAME_REF@148..158
-                  IDENT@148..158 "arbritrary"
-            COLON2@158..160 "::"
-            PATH_SEGMENT@160..164
-              NAME_REF@160..164
-                IDENT@160..164 "path"
-        R_CURLY@164..165 "}"
-    SEMICOLON@165..166 ";"
-  WHITESPACE@166..167 " "
-  USE@167..205
-    COMMENT@167..179 "// Rust 2015"
-    WHITESPACE@179..180 "\n"
-    USE_KW@180..183 "use"
-    WHITESPACE@183..184 " "
-    USE_TREE@184..204
-      COLON2@184..186 "::"
-      USE_TREE_LIST@186..204
-        L_CURLY@186..187 "{"
-        USE_TREE@187..203
-          USE_TREE_LIST@187..203
-            L_CURLY@187..188 "{"
-            USE_TREE@188..202
-              USE_TREE_LIST@188..202
-                L_CURLY@188..189 "{"
-                USE_TREE@189..201
-                  PATH@189..201
-                    PATH@189..193
-                      PATH_SEGMENT@189..193
-                        NAME_REF@189..193
-                          IDENT@189..193 "root"
-                    COLON2@193..195 "::"
-                    PATH_SEGMENT@195..201
-                      NAME_REF@195..201
-                        IDENT@195..201 "export"
-                R_CURLY@201..202 "}"
-            R_CURLY@202..203 "}"
-        R_CURLY@203..204 "}"
-    SEMICOLON@204..205 ";"
-  WHITESPACE@205..206 " "
-  COMMENT@206..248 "// Nonsensical but pe ..."
-  WHITESPACE@248..249 "\n"
+              PATH_SEGMENT@148..157
+                NAME_REF@148..157
+                  IDENT@148..157 "arbitrary"
+            COLON2@157..159 "::"
+            PATH_SEGMENT@159..163
+              NAME_REF@159..163
+                IDENT@159..163 "path"
+        R_CURLY@163..164 "}"
+    SEMICOLON@164..165 ";"
+  WHITESPACE@165..166 " "
+  USE@166..204
+    COMMENT@166..178 "// Rust 2015"
+    WHITESPACE@178..179 "\n"
+    USE_KW@179..182 "use"
+    WHITESPACE@182..183 " "
+    USE_TREE@183..203
+      COLON2@183..185 "::"
+      USE_TREE_LIST@185..203
+        L_CURLY@185..186 "{"
+        USE_TREE@186..202
+          USE_TREE_LIST@186..202
+            L_CURLY@186..187 "{"
+            USE_TREE@187..201
+              USE_TREE_LIST@187..201
+                L_CURLY@187..188 "{"
+                USE_TREE@188..200
+                  PATH@188..200
+                    PATH@188..192
+                      PATH_SEGMENT@188..192
+                        NAME_REF@188..192
+                          IDENT@188..192 "root"
+                    COLON2@192..194 "::"
+                    PATH_SEGMENT@194..200
+                      NAME_REF@194..200
+                        IDENT@194..200 "export"
+                R_CURLY@200..201 "}"
+            R_CURLY@201..202 "}"
+        R_CURLY@202..203 "}"
+    SEMICOLON@203..204 ";"
+  WHITESPACE@204..205 " "
+  COMMENT@205..247 "// Nonsensical but pe ..."
+  WHITESPACE@247..248 "\n"
diff --git a/crates/syntax/test_data/parser/inline/ok/0002_use_tree_list.rs b/crates/syntax/test_data/parser/inline/ok/0002_use_tree_list.rs
index 381cba1e29e..02af4b446e1 100644
--- a/crates/syntax/test_data/parser/inline/ok/0002_use_tree_list.rs
+++ b/crates/syntax/test_data/parser/inline/ok/0002_use_tree_list.rs
@@ -1,4 +1,4 @@
 use {crate::path::from::root, or::path::from::crate_name}; // Rust 2018 (with a crate named `or`)
 use {path::from::root}; // Rust 2015
-use ::{some::arbritrary::path}; // Rust 2015
+use ::{some::arbitrary::path}; // Rust 2015
 use ::{{{root::export}}}; // Nonsensical but perfectly legal nesting
diff --git a/crates/test_utils/src/lib.rs b/crates/test_utils/src/lib.rs
index 84c1d7ebbdf..e19d2ad616e 100644
--- a/crates/test_utils/src/lib.rs
+++ b/crates/test_utils/src/lib.rs
@@ -63,7 +63,7 @@ pub fn extract_offset(text: &str) -> (TextSize, String) {
     }
 }
 
-/// Returns the offset of the first occurence of `$0` marker and the copy of `text`
+/// Returns the offset of the first occurrence of `$0` marker and the copy of `text`
 /// without the marker.
 fn try_extract_offset(text: &str) -> Option<(TextSize, String)> {
     let cursor_pos = text.find(CURSOR_MARKER)?;