about summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs5
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/rename.rs16
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/rename.rs5
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs11
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/edition.rs9
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar.rs36
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs28
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/items.rs22
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/lexed_str.rs24
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/lib.rs3
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs335
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/tests.rs8
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0011_extern_struct.rast2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0042_weird_blocks.rast4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rast2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/err/0047_repeated_extern_modifier.rast4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0035_gen_blocks.rast139
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0035_gen_blocks.rs6
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0036_gen_fn.rast51
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0036_gen_fn.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rast31
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rs1
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs3
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs4
-rw-r--r--src/tools/rust-analyzer/crates/syntax/rust.ungram138
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs13
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs84
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/lib.rs109
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/parsing.rs4
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs10
-rw-r--r--src/tools/rust-analyzer/xtask/src/codegen.rs3
-rw-r--r--src/tools/rust-analyzer/xtask/src/codegen/grammar.rs60
-rw-r--r--src/tools/rust-analyzer/xtask/src/codegen/grammar/ast_src.rs381
39 files changed, 816 insertions, 757 deletions
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
index b6d43af2eb0..e2a5f353136 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
@@ -301,7 +301,10 @@ impl ExprCollector<'_> {
                         result_expr_id
                     })
                 }
-                None => self.collect_block(e),
+                // FIXME
+                Some(ast::BlockModifier::AsyncGen(_)) | Some(ast::BlockModifier::Gen(_)) | None => {
+                    self.collect_block(e)
+                }
             },
             ast::Expr::LoopExpr(e) => {
                 let label = e.label().map(|label| self.collect_label(label));
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
index e3290f53432..c4884b8d99b 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
@@ -728,6 +728,8 @@ fn include_expand(
         }
     };
     match parse_to_token_tree(
+        // FIXME
+        Edition::CURRENT,
         SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
         SyntaxContextId::ROOT,
         &db.file_text(file_id),
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs
index cb34f8d81a3..0ef71a38661 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs
@@ -3,7 +3,7 @@ use syntax::{
     ast::{self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, HasName},
     ted, NodeOrToken,
     SyntaxKind::{BLOCK_EXPR, BREAK_EXPR, COMMENT, LOOP_EXPR, MATCH_GUARD, PATH_EXPR, RETURN_EXPR},
-    SyntaxNode,
+    SyntaxNode, T,
 };
 
 use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
@@ -26,8 +26,8 @@ use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
 // ```
 pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
     let node = if ctx.has_empty_selection() {
-        if let Some(expr_stmt) = ctx.find_node_at_offset::<ast::ExprStmt>() {
-            expr_stmt.syntax().clone()
+        if let Some(t) = ctx.token_at_offset().find(|it| it.kind() == T![;]) {
+            t.parent().and_then(ast::ExprStmt::cast)?.syntax().clone()
         } else if let Some(expr) = ctx.find_node_at_offset::<ast::Expr>() {
             expr.syntax().ancestors().find_map(valid_target_expr)?.syntax().clone()
         } else {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs
index f2a097afc86..457e8baaca0 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils/suggest_name.rs
@@ -184,7 +184,7 @@ fn normalize(name: &str) -> Option<String> {
 
 fn is_valid_name(name: &str) -> bool {
     matches!(
-        ide_db::syntax_helpers::LexedStr::single_token(name),
+        ide_db::syntax_helpers::LexedStr::single_token(syntax::Edition::CURRENT, name),
         Some((syntax::SyntaxKind::IDENT, _error))
     )
 }
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
index 484c65c2b01..7f9e08cb8a9 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
@@ -25,7 +25,7 @@ use std::fmt;
 use base_db::{AnchoredPathBuf, FileId, FileRange};
 use either::Either;
 use hir::{FieldSource, HirFileIdExt, InFile, ModuleSource, Semantics};
-use span::SyntaxContextId;
+use span::{Edition, SyntaxContextId};
 use stdx::{never, TupleExt};
 use syntax::{
     ast::{self, HasName},
@@ -227,7 +227,8 @@ fn rename_mod(
     module: hir::Module,
     new_name: &str,
 ) -> Result<SourceChange> {
-    if IdentifierKind::classify(new_name)? != IdentifierKind::Ident {
+    if IdentifierKind::classify(module.krate().edition(sema.db), new_name)? != IdentifierKind::Ident
+    {
         bail!("Invalid name `{0}`: cannot rename module to {0}", new_name);
     }
 
@@ -313,7 +314,12 @@ fn rename_reference(
     def: Definition,
     new_name: &str,
 ) -> Result<SourceChange> {
-    let ident_kind = IdentifierKind::classify(new_name)?;
+    let ident_kind = IdentifierKind::classify(
+        def.krate(sema.db)
+            .ok_or_else(|| RenameError("definition has no krate?".into()))?
+            .edition(sema.db),
+        new_name,
+    )?;
 
     if matches!(
         def,
@@ -605,8 +611,8 @@ pub enum IdentifierKind {
 }
 
 impl IdentifierKind {
-    pub fn classify(new_name: &str) -> Result<IdentifierKind> {
-        match parser::LexedStr::single_token(new_name) {
+    pub fn classify(edition: Edition, new_name: &str) -> Result<IdentifierKind> {
+        match parser::LexedStr::single_token(edition, new_name) {
             Some(res) => match res {
                 (SyntaxKind::IDENT, _) => {
                     if let Some(inner) = new_name.strip_prefix("r#") {
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs
index 4f706e26af2..c301e100341 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/node_ext.rs
@@ -277,6 +277,8 @@ pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) {
                     });
                 }
                 Some(ast::BlockModifier::Unsafe(_)) => (),
+                Some(ast::BlockModifier::Gen(_)) => (),
+                Some(ast::BlockModifier::AsyncGen(_)) => (),
                 None => (),
             }
             if let Some(stmt_list) = b.stmt_list() {
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs
index 2f91271c465..e752ee3d775 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/parsing.rs
@@ -255,7 +255,7 @@ fn validate_rule(rule: &SsrRule) -> Result<(), SsrError> {
 }
 
 fn tokenize(source: &str) -> Result<Vec<Token>, SsrError> {
-    let lexed = parser::LexedStr::new(source);
+    let lexed = parser::LexedStr::new(parser::Edition::CURRENT, source);
     if let Some((_, first_error)) = lexed.errors().next() {
         bail!("Failed to parse pattern: {}", first_error);
     }
diff --git a/src/tools/rust-analyzer/crates/ide/src/rename.rs b/src/tools/rust-analyzer/crates/ide/src/rename.rs
index e5f86328716..2527b92665d 100644
--- a/src/tools/rust-analyzer/crates/ide/src/rename.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/rename.rs
@@ -13,6 +13,7 @@ use ide_db::{
     RootDatabase,
 };
 use itertools::Itertools;
+use span::Edition;
 use stdx::{always, never};
 use syntax::{
     ast, utils::is_raw_identifier, AstNode, SmolStr, SyntaxKind, SyntaxNode, TextRange, TextSize,
@@ -99,7 +100,7 @@ pub(crate) fn rename(
             // FIXME: This can use the `ide_db::rename_reference` (or def.rename) method once we can
             // properly find "direct" usages/references.
             .map(|(.., def)| {
-                match IdentifierKind::classify(new_name)? {
+                match IdentifierKind::classify(Edition::CURRENT, new_name)? {
                     IdentifierKind::Ident => (),
                     IdentifierKind::Lifetime => {
                         bail!("Cannot alias reference to a lifetime identifier")
@@ -391,7 +392,7 @@ fn rename_self_to_param(
         return Ok(SourceChange::default());
     }
 
-    let identifier_kind = IdentifierKind::classify(new_name)?;
+    let identifier_kind = IdentifierKind::classify(Edition::CURRENT, new_name)?;
 
     let InFile { file_id, value: self_param } =
         sema.source(self_param).ok_or_else(|| format_err!("cannot find function source"))?;
diff --git a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
index 7ff89631080..58edd9900ad 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
@@ -169,6 +169,7 @@ where
 /// Convert a string to a `TokenTree`. The spans of the subtree will be anchored to the provided
 /// anchor with the given context.
 pub fn parse_to_token_tree<Ctx>(
+    edition: Edition,
     anchor: SpanAnchor,
     ctx: Ctx,
     text: &str,
@@ -177,7 +178,7 @@ where
     SpanData<Ctx>: Copy + fmt::Debug,
     Ctx: Copy,
 {
-    let lexed = parser::LexedStr::new(text);
+    let lexed = parser::LexedStr::new(edition, text);
     if lexed.errors().next().is_some() {
         return None;
     }
@@ -187,11 +188,15 @@ where
 }
 
 /// Convert a string to a `TokenTree`. The passed span will be used for all spans of the produced subtree.
-pub fn parse_to_token_tree_static_span<S>(span: S, text: &str) -> Option<tt::Subtree<S>>
+pub fn parse_to_token_tree_static_span<S>(
+    edition: Edition,
+    span: S,
+    text: &str,
+) -> Option<tt::Subtree<S>>
 where
     S: Copy + fmt::Debug,
 {
-    let lexed = parser::LexedStr::new(text);
+    let lexed = parser::LexedStr::new(edition, text);
     if lexed.errors().next().is_some() {
         return None;
     }
diff --git a/src/tools/rust-analyzer/crates/parser/src/edition.rs b/src/tools/rust-analyzer/crates/parser/src/edition.rs
index 26178544f9b..831a482059f 100644
--- a/src/tools/rust-analyzer/crates/parser/src/edition.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/edition.rs
@@ -12,8 +12,17 @@ pub enum Edition {
 }
 
 impl Edition {
+    /// The current latest stable edition, note this is usually not the right choice in code.
     pub const CURRENT: Edition = Edition::Edition2021;
     pub const DEFAULT: Edition = Edition::Edition2015;
+
+    pub fn at_least_2024(self) -> bool {
+        self >= Edition::Edition2024
+    }
+
+    pub fn at_least_2018(self) -> bool {
+        self >= Edition::Edition2018
+    }
 }
 
 #[derive(Debug)]
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar.rs b/src/tools/rust-analyzer/crates/parser/src/grammar.rs
index 2930190cb33..7ae1e5f82e5 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar.rs
@@ -165,42 +165,6 @@ pub(crate) mod entry {
             }
             m.complete(p, ERROR);
         }
-
-        pub(crate) fn eager_macro_input(p: &mut Parser<'_>) {
-            let m = p.start();
-
-            let closing_paren_kind = match p.current() {
-                T!['{'] => T!['}'],
-                T!['('] => T![')'],
-                T!['['] => T![']'],
-                _ => {
-                    p.error("expected `{`, `[`, `(`");
-                    while !p.at(EOF) {
-                        p.bump_any();
-                    }
-                    m.complete(p, ERROR);
-                    return;
-                }
-            };
-            p.bump_any();
-            while !p.at(EOF) && !p.at(closing_paren_kind) {
-                if expressions::expr(p).is_none() {
-                    break;
-                }
-                if !p.at(EOF) && !p.at(closing_paren_kind) {
-                    p.expect(T![,]);
-                }
-            }
-            p.expect(closing_paren_kind);
-            if p.at(EOF) {
-                m.complete(p, MACRO_EAGER_INPUT);
-                return;
-            }
-            while !p.at(EOF) {
-                p.bump_any();
-            }
-            m.complete(p, ERROR);
-        }
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs
index 54ed5f0ba23..54c874d06c0 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs
@@ -51,6 +51,7 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet =
         T![const],
         T![continue],
         T![do],
+        T![gen],
         T![for],
         T![if],
         T![let],
@@ -138,15 +139,37 @@ pub(super) fn atom_expr(
         // fn f() { const { } }
         // fn f() { async { } }
         // fn f() { async move { } }
-        T![const] | T![unsafe] | T![async] if la == T!['{'] => {
+        T![const] | T![unsafe] | T![async] | T![gen] if la == T!['{'] => {
             let m = p.start();
             p.bump_any();
             stmt_list(p);
             m.complete(p, BLOCK_EXPR)
         }
-        T![async] if la == T![move] && p.nth(2) == T!['{'] => {
+        // test_err gen_blocks
+        // pub fn main() {
+        //     gen { yield ""; };
+        //     async gen { yield ""; };
+        //     gen move { yield ""; };
+        //     async gen move { yield ""; };
+        // }
+        T![async] if la == T![gen] && p.nth(2) == T!['{'] => {
+            let m = p.start();
+            p.bump(T![async]);
+            p.eat(T![gen]);
+            stmt_list(p);
+            m.complete(p, BLOCK_EXPR)
+        }
+        T![async] | T![gen] if la == T![move] && p.nth(2) == T!['{'] => {
+            let m = p.start();
+            p.bump_any();
+            p.bump(T![move]);
+            stmt_list(p);
+            m.complete(p, BLOCK_EXPR)
+        }
+        T![async] if la == T![gen] && p.nth(2) == T![move] && p.nth(3) == T!['{'] => {
             let m = p.start();
             p.bump(T![async]);
+            p.bump(T![gen]);
             p.bump(T![move]);
             stmt_list(p);
             m.complete(p, BLOCK_EXPR)
@@ -355,6 +378,7 @@ fn closure_expr(p: &mut Parser<'_>) -> CompletedMarker {
     p.eat(T![const]);
     p.eat(T![static]);
     p.eat(T![async]);
+    p.eat(T![gen]);
     p.eat(T![move]);
 
     if !p.at(T![|]) {
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs
index 99bbf47654b..d8468ba3cb6 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs
@@ -112,11 +112,22 @@ pub(super) fn opt_item(p: &mut Parser<'_>, m: Marker) -> Result<(), Marker> {
 
     // test_err async_without_semicolon
     // fn foo() { let _ = async {} }
-    if p.at(T![async]) && !matches!(p.nth(1), T!['{'] | T![move] | T![|]) {
+    if p.at(T![async])
+        && (!matches!(p.nth(1), T!['{'] | T![gen] | T![move] | T![|])
+            || matches!((p.nth(1), p.nth(2)), (T![gen], T![fn])))
+    {
         p.eat(T![async]);
         has_mods = true;
     }
 
+    // test_err gen_fn
+    // gen fn gen_fn() {}
+    // async gen fn async_gen_fn() {}
+    if p.at(T![gen]) && p.nth(1) == T![fn] {
+        p.eat(T![gen]);
+        has_mods = true;
+    }
+
     // test_err unsafe_block_in_mod
     // fn foo(){} unsafe { } fn bar(){}
     if p.at(T![unsafe]) && p.nth(1) != T!['{'] {
@@ -173,13 +184,6 @@ pub(super) fn opt_item(p: &mut Parser<'_>, m: Marker) -> Result<(), Marker> {
         }
     }
 
-    // test existential_type
-    // existential type Foo: Fn() -> usize;
-    if p.at_contextual_kw(T![existential]) && p.nth(1) == T![type] {
-        p.bump_remap(T![existential]);
-        has_mods = true;
-    }
-
     // items
     match p.current() {
         T![fn] => fn_(p, m),
@@ -201,7 +205,7 @@ pub(super) fn opt_item(p: &mut Parser<'_>, m: Marker) -> Result<(), Marker> {
 
         _ if has_visibility || has_mods => {
             if has_mods {
-                p.error("expected existential, fn, trait or impl");
+                p.error("expected fn, trait or impl");
             } else {
                 p.error("expected an item");
             }
diff --git a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs
index 52b24b73725..13fc61074d0 100644
--- a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs
@@ -13,6 +13,7 @@ use std::ops;
 use rustc_lexer::unescape::{EscapeError, Mode};
 
 use crate::{
+    Edition,
     SyntaxKind::{self, *},
     T,
 };
@@ -30,9 +31,9 @@ struct LexError {
 }
 
 impl<'a> LexedStr<'a> {
-    pub fn new(text: &'a str) -> LexedStr<'a> {
+    pub fn new(edition: Edition, text: &'a str) -> LexedStr<'a> {
         let _p = tracing::info_span!("LexedStr::new").entered();
-        let mut conv = Converter::new(text);
+        let mut conv = Converter::new(edition, text);
         if let Some(shebang_len) = rustc_lexer::strip_shebang(text) {
             conv.res.push(SHEBANG, conv.offset);
             conv.offset = shebang_len;
@@ -47,7 +48,7 @@ impl<'a> LexedStr<'a> {
         conv.finalize_with_eof()
     }
 
-    pub fn single_token(text: &'a str) -> Option<(SyntaxKind, Option<String>)> {
+    pub fn single_token(edition: Edition, text: &'a str) -> Option<(SyntaxKind, Option<String>)> {
         if text.is_empty() {
             return None;
         }
@@ -57,7 +58,7 @@ impl<'a> LexedStr<'a> {
             return None;
         }
 
-        let mut conv = Converter::new(text);
+        let mut conv = Converter::new(edition, text);
         conv.extend_token(&token.kind, text);
         match &*conv.res.kind {
             [kind] => Some((*kind, conv.res.error.pop().map(|it| it.msg))),
@@ -129,13 +130,15 @@ impl<'a> LexedStr<'a> {
 struct Converter<'a> {
     res: LexedStr<'a>,
     offset: usize,
+    edition: Edition,
 }
 
 impl<'a> Converter<'a> {
-    fn new(text: &'a str) -> Self {
+    fn new(edition: Edition, text: &'a str) -> Self {
         Self {
             res: LexedStr { text, kind: Vec::new(), start: Vec::new(), error: Vec::new() },
             offset: 0,
+            edition,
         }
     }
 
@@ -175,6 +178,17 @@ impl<'a> Converter<'a> {
                 rustc_lexer::TokenKind::Whitespace => WHITESPACE,
 
                 rustc_lexer::TokenKind::Ident if token_text == "_" => UNDERSCORE,
+                rustc_lexer::TokenKind::Ident
+                    if ["async", "await", "dyn", "try"].contains(&token_text)
+                        && !self.edition.at_least_2018() =>
+                {
+                    IDENT
+                }
+                rustc_lexer::TokenKind::Ident
+                    if token_text == "gen" && !self.edition.at_least_2024() =>
+                {
+                    IDENT
+                }
                 rustc_lexer::TokenKind::Ident => {
                     SyntaxKind::from_keyword(token_text).unwrap_or(IDENT)
                 }
diff --git a/src/tools/rust-analyzer/crates/parser/src/lib.rs b/src/tools/rust-analyzer/crates/parser/src/lib.rs
index 738ed239a7c..679492066a3 100644
--- a/src/tools/rust-analyzer/crates/parser/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/lib.rs
@@ -82,8 +82,6 @@ pub enum TopEntryPoint {
     /// Edge case -- macros generally don't expand to attributes, with the
     /// exception of `cfg_attr` which does!
     MetaItem,
-    /// Edge case 2 -- eager macros expand their input to a delimited list of comma separated expressions
-    MacroEagerInput,
 }
 
 impl TopEntryPoint {
@@ -97,7 +95,6 @@ impl TopEntryPoint {
             TopEntryPoint::Type => grammar::entry::top::type_,
             TopEntryPoint::Expr => grammar::entry::top::expr,
             TopEntryPoint::MetaItem => grammar::entry::top::meta_item,
-            TopEntryPoint::MacroEagerInput => grammar::entry::top::eager_macro_input,
         };
         let mut p = parser::Parser::new(input, edition);
         entry_point(&mut p);
diff --git a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs
index ad3398453be..7bddf887401 100644
--- a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs
@@ -9,6 +9,7 @@ pub enum SyntaxKind {
     TOMBSTONE,
     #[doc(hidden)]
     EOF,
+    DOLLAR,
     SEMICOLON,
     COMMA,
     L_PAREN,
@@ -23,7 +24,6 @@ pub enum SyntaxKind {
     POUND,
     TILDE,
     QUESTION,
-    DOLLAR,
     AMP,
     PIPE,
     PLUS,
@@ -61,6 +61,7 @@ pub enum SyntaxKind {
     SHR,
     SHLEQ,
     SHREQ,
+    SELF_TYPE_KW,
     ABSTRACT_KW,
     AS_KW,
     ASYNC_KW,
@@ -80,6 +81,7 @@ pub enum SyntaxKind {
     FINAL_KW,
     FN_KW,
     FOR_KW,
+    GEN_KW,
     IF_KW,
     IMPL_KW,
     IN_KW,
@@ -96,7 +98,6 @@ pub enum SyntaxKind {
     REF_KW,
     RETURN_KW,
     SELF_KW,
-    SELF_TYPE_KW,
     STATIC_KW,
     STRUCT_KW,
     SUPER_KW,
@@ -112,173 +113,184 @@ pub enum SyntaxKind {
     WHERE_KW,
     WHILE_KW,
     YIELD_KW,
+    ASM_KW,
     AUTO_KW,
     BUILTIN_KW,
     DEFAULT_KW,
-    EXISTENTIAL_KW,
-    UNION_KW,
-    RAW_KW,
+    FORMAT_ARGS_KW,
     MACRO_RULES_KW,
-    YEET_KW,
     OFFSET_OF_KW,
-    ASM_KW,
-    FORMAT_ARGS_KW,
-    INT_NUMBER,
-    FLOAT_NUMBER,
-    CHAR,
+    RAW_KW,
+    UNION_KW,
+    YEET_KW,
     BYTE,
-    STRING,
     BYTE_STRING,
+    CHAR,
     C_STRING,
+    FLOAT_NUMBER,
+    INT_NUMBER,
+    RAW_BYTE_STRING,
+    RAW_C_STRING,
+    RAW_STRING,
+    STRING,
+    COMMENT,
     ERROR,
     IDENT,
-    WHITESPACE,
     LIFETIME_IDENT,
-    COMMENT,
+    NEWLINE,
     SHEBANG,
-    SOURCE_FILE,
-    STRUCT,
-    UNION,
+    WHITESPACE,
+    ABI,
+    ADT,
+    ARG_LIST,
+    ARRAY_EXPR,
+    ARRAY_TYPE,
+    ASM_EXPR,
+    ASSOC_ITEM,
+    ASSOC_ITEM_LIST,
+    ASSOC_TYPE_ARG,
+    ATTR,
+    AWAIT_EXPR,
+    BECOME_EXPR,
+    BIN_EXPR,
+    BLOCK_EXPR,
+    BOX_PAT,
+    BREAK_EXPR,
+    CALL_EXPR,
+    CAST_EXPR,
+    CLOSURE_BINDER,
+    CLOSURE_EXPR,
+    CONST,
+    CONST_ARG,
+    CONST_BLOCK_PAT,
+    CONST_PARAM,
+    CONTINUE_EXPR,
+    DYN_TRAIT_TYPE,
     ENUM,
-    FN,
-    RET_TYPE,
+    EXPR,
+    EXPR_STMT,
+    EXTERN_BLOCK,
     EXTERN_CRATE,
-    MODULE,
-    USE,
-    STATIC,
-    CONST,
-    TRAIT,
-    TRAIT_ALIAS,
-    IMPL,
-    TYPE_ALIAS,
-    MACRO_CALL,
-    MACRO_RULES,
-    MACRO_ARM,
-    TOKEN_TREE,
-    MACRO_DEF,
-    PAREN_TYPE,
-    TUPLE_TYPE,
-    MACRO_TYPE,
-    NEVER_TYPE,
-    PATH_TYPE,
-    PTR_TYPE,
-    ARRAY_TYPE,
-    SLICE_TYPE,
-    REF_TYPE,
-    INFER_TYPE,
+    EXTERN_ITEM,
+    EXTERN_ITEM_LIST,
+    FIELD_EXPR,
+    FIELD_LIST,
+    FN,
     FN_PTR_TYPE,
+    FORMAT_ARGS_ARG,
+    FORMAT_ARGS_EXPR,
+    FOR_EXPR,
     FOR_TYPE,
-    IMPL_TRAIT_TYPE,
-    DYN_TRAIT_TYPE,
-    OR_PAT,
-    PAREN_PAT,
-    REF_PAT,
-    BOX_PAT,
+    GENERIC_ARG,
+    GENERIC_ARG_LIST,
+    GENERIC_PARAM,
+    GENERIC_PARAM_LIST,
     IDENT_PAT,
-    WILDCARD_PAT,
-    REST_PAT,
-    PATH_PAT,
-    RECORD_PAT,
-    RECORD_PAT_FIELD_LIST,
-    RECORD_PAT_FIELD,
-    TUPLE_STRUCT_PAT,
-    TUPLE_PAT,
-    SLICE_PAT,
-    RANGE_PAT,
-    LITERAL_PAT,
-    MACRO_PAT,
-    CONST_BLOCK_PAT,
-    TUPLE_EXPR,
-    ARRAY_EXPR,
-    PAREN_EXPR,
-    PATH_EXPR,
-    CLOSURE_EXPR,
     IF_EXPR,
-    WHILE_EXPR,
-    LOOP_EXPR,
-    FOR_EXPR,
-    CONTINUE_EXPR,
-    BREAK_EXPR,
+    IMPL,
+    IMPL_TRAIT_TYPE,
+    INDEX_EXPR,
+    INFER_TYPE,
+    ITEM,
+    ITEM_LIST,
     LABEL,
-    BLOCK_EXPR,
-    STMT_LIST,
-    RETURN_EXPR,
-    BECOME_EXPR,
-    YIELD_EXPR,
-    YEET_EXPR,
+    LET_ELSE,
     LET_EXPR,
-    UNDERSCORE_EXPR,
+    LET_STMT,
+    LIFETIME,
+    LIFETIME_ARG,
+    LIFETIME_PARAM,
+    LITERAL,
+    LITERAL_PAT,
+    LOOP_EXPR,
+    MACRO_CALL,
+    MACRO_DEF,
     MACRO_EXPR,
-    MATCH_EXPR,
-    MATCH_ARM_LIST,
+    MACRO_ITEMS,
+    MACRO_PAT,
+    MACRO_RULES,
+    MACRO_STMTS,
+    MACRO_TYPE,
     MATCH_ARM,
+    MATCH_ARM_LIST,
+    MATCH_EXPR,
     MATCH_GUARD,
-    RECORD_EXPR,
-    RECORD_EXPR_FIELD_LIST,
-    RECORD_EXPR_FIELD,
-    OFFSET_OF_EXPR,
-    ASM_EXPR,
-    FORMAT_ARGS_EXPR,
-    FORMAT_ARGS_ARG,
-    CALL_EXPR,
-    INDEX_EXPR,
+    META,
     METHOD_CALL_EXPR,
-    FIELD_EXPR,
-    AWAIT_EXPR,
-    TRY_EXPR,
-    CAST_EXPR,
-    REF_EXPR,
+    MODULE,
+    NAME,
+    NAME_REF,
+    NEVER_TYPE,
+    OFFSET_OF_EXPR,
+    OR_PAT,
+    PARAM,
+    PARAM_LIST,
+    PAREN_EXPR,
+    PAREN_PAT,
+    PAREN_TYPE,
+    PAT,
+    PATH,
+    PATH_EXPR,
+    PATH_PAT,
+    PATH_SEGMENT,
+    PATH_TYPE,
     PREFIX_EXPR,
+    PTR_TYPE,
     RANGE_EXPR,
-    BIN_EXPR,
-    EXTERN_BLOCK,
-    EXTERN_ITEM_LIST,
-    VARIANT,
-    RECORD_FIELD_LIST,
+    RANGE_PAT,
+    RECORD_EXPR,
+    RECORD_EXPR_FIELD,
+    RECORD_EXPR_FIELD_LIST,
     RECORD_FIELD,
-    TUPLE_FIELD_LIST,
+    RECORD_FIELD_LIST,
+    RECORD_PAT,
+    RECORD_PAT_FIELD,
+    RECORD_PAT_FIELD_LIST,
+    REF_EXPR,
+    REF_PAT,
+    REF_TYPE,
+    RENAME,
+    REST_PAT,
+    RETURN_EXPR,
+    RET_TYPE,
+    SELF_PARAM,
+    SLICE_PAT,
+    SLICE_TYPE,
+    SOURCE_FILE,
+    STATIC,
+    STMT,
+    STMT_LIST,
+    STRUCT,
+    TOKEN_TREE,
+    TRAIT,
+    TRAIT_ALIAS,
+    TRY_EXPR,
+    TUPLE_EXPR,
     TUPLE_FIELD,
-    VARIANT_LIST,
-    ITEM_LIST,
-    ASSOC_ITEM_LIST,
-    ATTR,
-    META,
+    TUPLE_FIELD_LIST,
+    TUPLE_PAT,
+    TUPLE_STRUCT_PAT,
+    TUPLE_TYPE,
+    TYPE,
+    TYPE_ALIAS,
+    TYPE_ARG,
+    TYPE_BOUND,
+    TYPE_BOUND_LIST,
+    TYPE_PARAM,
+    UNDERSCORE_EXPR,
+    UNION,
+    USE,
     USE_TREE,
     USE_TREE_LIST,
-    PATH,
-    PATH_SEGMENT,
-    LITERAL,
-    RENAME,
+    VARIANT,
+    VARIANT_LIST,
     VISIBILITY,
     WHERE_CLAUSE,
     WHERE_PRED,
-    ABI,
-    NAME,
-    NAME_REF,
-    LET_STMT,
-    LET_ELSE,
-    EXPR_STMT,
-    GENERIC_PARAM_LIST,
-    GENERIC_PARAM,
-    LIFETIME_PARAM,
-    TYPE_PARAM,
-    RETURN_TYPE_ARG,
-    CONST_PARAM,
-    GENERIC_ARG_LIST,
-    LIFETIME,
-    LIFETIME_ARG,
-    TYPE_ARG,
-    ASSOC_TYPE_ARG,
-    CONST_ARG,
-    PARAM_LIST,
-    PARAM,
-    SELF_PARAM,
-    ARG_LIST,
-    TYPE_BOUND,
-    TYPE_BOUND_LIST,
-    MACRO_ITEMS,
-    MACRO_STMTS,
-    MACRO_EAGER_INPUT,
+    WHILE_EXPR,
+    WILDCARD_PAT,
+    YEET_EXPR,
+    YIELD_EXPR,
     #[doc(hidden)]
     __LAST,
 }
@@ -287,7 +299,8 @@ impl SyntaxKind {
     pub fn is_keyword(self) -> bool {
         matches!(
             self,
-            ABSTRACT_KW
+            SELF_TYPE_KW
+                | ABSTRACT_KW
                 | AS_KW
                 | ASYNC_KW
                 | AWAIT_KW
@@ -306,6 +319,7 @@ impl SyntaxKind {
                 | FINAL_KW
                 | FN_KW
                 | FOR_KW
+                | GEN_KW
                 | IF_KW
                 | IMPL_KW
                 | IN_KW
@@ -322,7 +336,6 @@ impl SyntaxKind {
                 | REF_KW
                 | RETURN_KW
                 | SELF_KW
-                | SELF_TYPE_KW
                 | STATIC_KW
                 | STRUCT_KW
                 | SUPER_KW
@@ -338,23 +351,23 @@ impl SyntaxKind {
                 | WHERE_KW
                 | WHILE_KW
                 | YIELD_KW
+                | ASM_KW
                 | AUTO_KW
                 | BUILTIN_KW
                 | DEFAULT_KW
-                | EXISTENTIAL_KW
-                | UNION_KW
-                | RAW_KW
+                | FORMAT_ARGS_KW
                 | MACRO_RULES_KW
-                | YEET_KW
                 | OFFSET_OF_KW
-                | ASM_KW
-                | FORMAT_ARGS_KW
+                | RAW_KW
+                | UNION_KW
+                | YEET_KW
         )
     }
     pub fn is_punct(self) -> bool {
         matches!(
             self,
-            SEMICOLON
+            DOLLAR
+                | SEMICOLON
                 | COMMA
                 | L_PAREN
                 | R_PAREN
@@ -368,7 +381,6 @@ impl SyntaxKind {
                 | POUND
                 | TILDE
                 | QUESTION
-                | DOLLAR
                 | AMP
                 | PIPE
                 | PLUS
@@ -409,10 +421,22 @@ impl SyntaxKind {
         )
     }
     pub fn is_literal(self) -> bool {
-        matches!(self, INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE | STRING | BYTE_STRING | C_STRING)
+        matches!(
+            self,
+            BYTE | BYTE_STRING
+                | CHAR
+                | C_STRING
+                | FLOAT_NUMBER
+                | INT_NUMBER
+                | RAW_BYTE_STRING
+                | RAW_C_STRING
+                | RAW_STRING
+                | STRING
+        )
     }
     pub fn from_keyword(ident: &str) -> Option<SyntaxKind> {
         let kw = match ident {
+            "Self" => SELF_TYPE_KW,
             "abstract" => ABSTRACT_KW,
             "as" => AS_KW,
             "async" => ASYNC_KW,
@@ -432,6 +456,7 @@ impl SyntaxKind {
             "final" => FINAL_KW,
             "fn" => FN_KW,
             "for" => FOR_KW,
+            "gen" => GEN_KW,
             "if" => IF_KW,
             "impl" => IMPL_KW,
             "in" => IN_KW,
@@ -448,7 +473,6 @@ impl SyntaxKind {
             "ref" => REF_KW,
             "return" => RETURN_KW,
             "self" => SELF_KW,
-            "Self" => SELF_TYPE_KW,
             "static" => STATIC_KW,
             "struct" => STRUCT_KW,
             "super" => SUPER_KW,
@@ -470,23 +494,23 @@ impl SyntaxKind {
     }
     pub fn from_contextual_keyword(ident: &str) -> Option<SyntaxKind> {
         let kw = match ident {
+            "asm" => ASM_KW,
             "auto" => AUTO_KW,
             "builtin" => BUILTIN_KW,
             "default" => DEFAULT_KW,
-            "existential" => EXISTENTIAL_KW,
-            "union" => UNION_KW,
-            "raw" => RAW_KW,
+            "format_args" => FORMAT_ARGS_KW,
             "macro_rules" => MACRO_RULES_KW,
-            "yeet" => YEET_KW,
             "offset_of" => OFFSET_OF_KW,
-            "asm" => ASM_KW,
-            "format_args" => FORMAT_ARGS_KW,
+            "raw" => RAW_KW,
+            "union" => UNION_KW,
+            "yeet" => YEET_KW,
             _ => return None,
         };
         Some(kw)
     }
     pub fn from_char(c: char) -> Option<SyntaxKind> {
         let tok = match c {
+            '$' => DOLLAR,
             ';' => SEMICOLON,
             ',' => COMMA,
             '(' => L_PAREN,
@@ -501,7 +525,6 @@ impl SyntaxKind {
             '#' => POUND,
             '~' => TILDE,
             '?' => QUESTION,
-            '$' => DOLLAR,
             '&' => AMP,
             '|' => PIPE,
             '+' => PLUS,
@@ -521,4 +544,4 @@ impl SyntaxKind {
     }
 }
 #[macro_export]
-macro_rules ! T { [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [abstract] => { $ crate :: SyntaxKind :: ABSTRACT_KW } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [become] => { $ crate :: SyntaxKind :: BECOME_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [final] => { $ crate :: SyntaxKind :: FINAL_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [override] => { $ crate :: SyntaxKind :: OVERRIDE_KW } ; [priv] => { $ crate :: SyntaxKind :: PRIV_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [typeof] => { $ crate :: SyntaxKind :: TYPEOF_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [unsized] => { $ crate :: SyntaxKind :: UNSIZED_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [virtual] => { $ crate :: SyntaxKind :: VIRTUAL_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [existential] => { $ crate :: SyntaxKind :: EXISTENTIAL_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; }
+macro_rules ! T { [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [abstract] => { $ crate :: SyntaxKind :: ABSTRACT_KW } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [become] => { $ crate :: SyntaxKind :: BECOME_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [final] => { $ crate :: SyntaxKind :: FINAL_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [gen] => { $ crate :: SyntaxKind :: GEN_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [override] => { $ crate :: SyntaxKind :: OVERRIDE_KW } ; [priv] => { $ crate :: SyntaxKind :: PRIV_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [typeof] => { $ crate :: SyntaxKind :: TYPEOF_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [unsized] => { $ crate :: SyntaxKind :: UNSIZED_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [virtual] => { $ crate :: SyntaxKind :: VIRTUAL_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [int_number] => { $ crate :: SyntaxKind :: INT_NUMBER } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [string] => { $ crate :: SyntaxKind :: STRING } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; }
diff --git a/src/tools/rust-analyzer/crates/parser/src/tests.rs b/src/tools/rust-analyzer/crates/parser/src/tests.rs
index a38689791c4..b837387be10 100644
--- a/src/tools/rust-analyzer/crates/parser/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/tests.rs
@@ -9,7 +9,7 @@ use std::{
 
 use expect_test::expect_file;
 
-use crate::{LexedStr, TopEntryPoint};
+use crate::{Edition, LexedStr, TopEntryPoint};
 
 #[test]
 fn lex_ok() {
@@ -30,7 +30,7 @@ fn lex_err() {
 }
 
 fn lex(text: &str) -> String {
-    let lexed = LexedStr::new(text);
+    let lexed = LexedStr::new(Edition::CURRENT, text);
 
     let mut res = String::new();
     for i in 0..lexed.len() {
@@ -85,9 +85,9 @@ fn parse_inline_err() {
 }
 
 fn parse(entry: TopEntryPoint, text: &str) -> (String, bool) {
-    let lexed = LexedStr::new(text);
+    let lexed = LexedStr::new(Edition::CURRENT, text);
     let input = lexed.to_input();
-    let output = entry.parse(&input, crate::Edition::CURRENT);
+    let output = entry.parse(&input, Edition::CURRENT);
 
     let mut buf = String::new();
     let mut errors = Vec::new();
diff --git a/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs b/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs
index f92b39edb76..4d4ab345d96 100644
--- a/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs
@@ -1,4 +1,4 @@
-use crate::{LexedStr, PrefixEntryPoint, Step};
+use crate::{Edition, LexedStr, PrefixEntryPoint, Step};
 
 #[test]
 fn vis() {
@@ -82,11 +82,11 @@ fn meta_item() {
 
 #[track_caller]
 fn check(entry: PrefixEntryPoint, input: &str, prefix: &str) {
-    let lexed = LexedStr::new(input);
+    let lexed = LexedStr::new(Edition::CURRENT, input);
     let input = lexed.to_input();
 
     let mut n_tokens = 0;
-    for step in entry.parse(&input, crate::Edition::CURRENT).iter() {
+    for step in entry.parse(&input, Edition::CURRENT).iter() {
         match step {
             Step::Token { n_input_tokens, .. } => n_tokens += n_input_tokens as usize,
             Step::FloatSplit { .. } => n_tokens += 1,
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0011_extern_struct.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0011_extern_struct.rast
index bd5ec4b7c29..249bfeeeeee 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0011_extern_struct.rast
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0011_extern_struct.rast
@@ -10,4 +10,4 @@ SOURCE_FILE
       IDENT "Foo"
     SEMICOLON ";"
   WHITESPACE "\n"
-error 6: expected existential, fn, trait or impl
+error 6: expected fn, trait or impl
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0042_weird_blocks.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0042_weird_blocks.rast
index 1cdc6e6e719..d6d2e75cca6 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0042_weird_blocks.rast
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0042_weird_blocks.rast
@@ -69,7 +69,7 @@ SOURCE_FILE
         WHITESPACE "\n"
         R_CURLY "}"
   WHITESPACE "\n"
-error 24: expected existential, fn, trait or impl
-error 41: expected existential, fn, trait or impl
+error 24: expected fn, trait or impl
+error 41: expected fn, trait or impl
 error 56: expected a block
 error 75: expected a loop or block
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rast
index 96e471a69a7..76464bf7cc2 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rast
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0044_item_modifiers.rast
@@ -39,7 +39,7 @@ SOURCE_FILE
     L_CURLY "{"
     R_CURLY "}"
   WHITESPACE "\n"
-error 6: expected existential, fn, trait or impl
+error 6: expected fn, trait or impl
 error 38: expected a name
 error 40: missing type for `const` or `static`
 error 40: expected SEMICOLON
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0047_repeated_extern_modifier.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0047_repeated_extern_modifier.rast
index 4b2a740362e..a56d692335f 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0047_repeated_extern_modifier.rast
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/err/0047_repeated_extern_modifier.rast
@@ -11,5 +11,5 @@ SOURCE_FILE
       WHITESPACE " "
       STRING "\"C\""
   WHITESPACE "\n"
-error 10: expected existential, fn, trait or impl
-error 21: expected existential, fn, trait or impl
+error 10: expected fn, trait or impl
+error 21: expected fn, trait or impl
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0035_gen_blocks.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0035_gen_blocks.rast
new file mode 100644
index 00000000000..08a85891ed0
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0035_gen_blocks.rast
@@ -0,0 +1,139 @@
+SOURCE_FILE
+  FN
+    VISIBILITY
+      PUB_KW "pub"
+    WHITESPACE " "
+    FN_KW "fn"
+    WHITESPACE " "
+    NAME
+      IDENT "main"
+    PARAM_LIST
+      L_PAREN "("
+      R_PAREN ")"
+    WHITESPACE " "
+    BLOCK_EXPR
+      STMT_LIST
+        L_CURLY "{"
+        WHITESPACE "\n    "
+        EXPR_STMT
+          RECORD_EXPR
+            PATH
+              PATH_SEGMENT
+                NAME_REF
+                  IDENT "gen"
+            WHITESPACE " "
+            RECORD_EXPR_FIELD_LIST
+              L_CURLY "{"
+              WHITESPACE " "
+              ERROR
+                YIELD_KW "yield"
+              WHITESPACE " "
+              ERROR
+                STRING "\"\""
+              ERROR
+                SEMICOLON ";"
+              WHITESPACE " "
+              R_CURLY "}"
+          SEMICOLON ";"
+        WHITESPACE "\n    "
+        ERROR
+          ASYNC_KW "async"
+        WHITESPACE " "
+        EXPR_STMT
+          RECORD_EXPR
+            PATH
+              PATH_SEGMENT
+                NAME_REF
+                  IDENT "gen"
+            WHITESPACE " "
+            RECORD_EXPR_FIELD_LIST
+              L_CURLY "{"
+              WHITESPACE " "
+              ERROR
+                YIELD_KW "yield"
+              WHITESPACE " "
+              ERROR
+                STRING "\"\""
+              ERROR
+                SEMICOLON ";"
+              WHITESPACE " "
+              R_CURLY "}"
+          SEMICOLON ";"
+        WHITESPACE "\n    "
+        EXPR_STMT
+          PATH_EXPR
+            PATH
+              PATH_SEGMENT
+                NAME_REF
+                  IDENT "gen"
+        WHITESPACE " "
+        EXPR_STMT
+          CLOSURE_EXPR
+            MOVE_KW "move"
+        WHITESPACE " "
+        EXPR_STMT
+          BLOCK_EXPR
+            STMT_LIST
+              L_CURLY "{"
+              WHITESPACE " "
+              EXPR_STMT
+                YIELD_EXPR
+                  YIELD_KW "yield"
+                  WHITESPACE " "
+                  LITERAL
+                    STRING "\"\""
+                SEMICOLON ";"
+              WHITESPACE " "
+              R_CURLY "}"
+          SEMICOLON ";"
+        WHITESPACE "\n    "
+        ERROR
+          ASYNC_KW "async"
+        WHITESPACE " "
+        EXPR_STMT
+          PATH_EXPR
+            PATH
+              PATH_SEGMENT
+                NAME_REF
+                  IDENT "gen"
+        WHITESPACE " "
+        EXPR_STMT
+          CLOSURE_EXPR
+            MOVE_KW "move"
+        WHITESPACE " "
+        EXPR_STMT
+          BLOCK_EXPR
+            STMT_LIST
+              L_CURLY "{"
+              WHITESPACE " "
+              EXPR_STMT
+                YIELD_EXPR
+                  YIELD_KW "yield"
+                  WHITESPACE " "
+                  LITERAL
+                    STRING "\"\""
+                SEMICOLON ";"
+              WHITESPACE " "
+              R_CURLY "}"
+          SEMICOLON ";"
+        WHITESPACE "\n"
+        R_CURLY "}"
+  WHITESPACE "\n"
+error 26: expected identifier
+error 31: expected COMMA
+error 32: expected identifier
+error 34: expected COMMA
+error 34: expected identifier
+error 48: expected fn, trait or impl
+error 55: expected identifier
+error 60: expected COMMA
+error 61: expected identifier
+error 63: expected COMMA
+error 63: expected identifier
+error 75: expected SEMICOLON
+error 80: expected `|`
+error 80: expected SEMICOLON
+error 105: expected fn, trait or impl
+error 109: expected SEMICOLON
+error 114: expected `|`
+error 114: expected SEMICOLON
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0035_gen_blocks.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0035_gen_blocks.rs
new file mode 100644
index 00000000000..f7687331d6d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0035_gen_blocks.rs
@@ -0,0 +1,6 @@
+pub fn main() {
+    gen { yield ""; };
+    async gen { yield ""; };
+    gen move { yield ""; };
+    async gen move { yield ""; };
+}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0036_gen_fn.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0036_gen_fn.rast
new file mode 100644
index 00000000000..9609ece77df
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0036_gen_fn.rast
@@ -0,0 +1,51 @@
+SOURCE_FILE
+  MACRO_CALL
+    PATH
+      PATH_SEGMENT
+        NAME_REF
+          IDENT "gen"
+  WHITESPACE " "
+  FN
+    FN_KW "fn"
+    WHITESPACE " "
+    NAME
+      IDENT "gen_fn"
+    PARAM_LIST
+      L_PAREN "("
+      R_PAREN ")"
+    WHITESPACE " "
+    BLOCK_EXPR
+      STMT_LIST
+        L_CURLY "{"
+        R_CURLY "}"
+  WHITESPACE "\n"
+  ERROR
+    ASYNC_KW "async"
+  WHITESPACE " "
+  MACRO_CALL
+    PATH
+      PATH_SEGMENT
+        NAME_REF
+          IDENT "gen"
+  WHITESPACE " "
+  FN
+    FN_KW "fn"
+    WHITESPACE " "
+    NAME
+      IDENT "async_gen_fn"
+    PARAM_LIST
+      L_PAREN "("
+      R_PAREN ")"
+    WHITESPACE " "
+    BLOCK_EXPR
+      STMT_LIST
+        L_CURLY "{"
+        R_CURLY "}"
+  WHITESPACE "\n"
+error 3: expected BANG
+error 3: expected `{`, `[`, `(`
+error 3: expected SEMICOLON
+error 24: expected fn, trait or impl
+error 28: expected BANG
+error 28: expected `{`, `[`, `(`
+error 28: expected SEMICOLON
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0036_gen_fn.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0036_gen_fn.rs
new file mode 100644
index 00000000000..80882e0a404
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0036_gen_fn.rs
@@ -0,0 +1,2 @@
+gen fn gen_fn() {}
+async gen fn async_gen_fn() {}
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rast
deleted file mode 100644
index b73780261ba..00000000000
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rast
+++ /dev/null
@@ -1,31 +0,0 @@
-SOURCE_FILE
-  TYPE_ALIAS
-    EXISTENTIAL_KW "existential"
-    WHITESPACE " "
-    TYPE_KW "type"
-    WHITESPACE " "
-    NAME
-      IDENT "Foo"
-    COLON ":"
-    WHITESPACE " "
-    TYPE_BOUND_LIST
-      TYPE_BOUND
-        PATH_TYPE
-          PATH
-            PATH_SEGMENT
-              NAME_REF
-                IDENT "Fn"
-              PARAM_LIST
-                L_PAREN "("
-                R_PAREN ")"
-              WHITESPACE " "
-              RET_TYPE
-                THIN_ARROW "->"
-                WHITESPACE " "
-                PATH_TYPE
-                  PATH
-                    PATH_SEGMENT
-                      NAME_REF
-                        IDENT "usize"
-    SEMICOLON ";"
-  WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rs
deleted file mode 100644
index 23baf7145cc..00000000000
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0131_existential_type.rs
+++ /dev/null
@@ -1 +0,0 @@
-existential type Foo: Fn() -> usize;
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs
index b1a448427c6..c382a5b2411 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/server_impl/token_stream.rs
@@ -127,7 +127,8 @@ pub(super) mod token_stream {
     impl<S: Copy + fmt::Debug> TokenStream<S> {
         pub(crate) fn from_str(src: &str, call_site: S) -> Result<TokenStream<S>, String> {
             let subtree =
-                mbe::parse_to_token_tree_static_span(call_site, src).ok_or("lexing error")?;
+                mbe::parse_to_token_tree_static_span(span::Edition::CURRENT, call_site, src)
+                    .ok_or("lexing error")?;
 
             Ok(TokenStream::with_subtree(subtree))
         }
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs
index 03b1117a5bd..1080e8c9932 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/utils.rs
@@ -9,7 +9,7 @@ use crate::{dylib, proc_macro_test_dylib_path, EnvSnapshot, ProcMacroSrv};
 
 fn parse_string(call_site: TokenId, src: &str) -> crate::server_impl::TokenStream<TokenId> {
     crate::server_impl::TokenStream::with_subtree(
-        mbe::parse_to_token_tree_static_span(call_site, src).unwrap(),
+        mbe::parse_to_token_tree_static_span(span::Edition::CURRENT, call_site, src).unwrap(),
     )
 }
 
@@ -19,7 +19,7 @@ fn parse_string_spanned(
     src: &str,
 ) -> crate::server_impl::TokenStream<Span> {
     crate::server_impl::TokenStream::with_subtree(
-        mbe::parse_to_token_tree(anchor, call_site, src).unwrap(),
+        mbe::parse_to_token_tree(span::Edition::CURRENT, anchor, call_site, src).unwrap(),
     )
 }
 
diff --git a/src/tools/rust-analyzer/crates/syntax/rust.ungram b/src/tools/rust-analyzer/crates/syntax/rust.ungram
index 8c772b9c7a2..000ac9c8f6a 100644
--- a/src/tools/rust-analyzer/crates/syntax/rust.ungram
+++ b/src/tools/rust-analyzer/crates/syntax/rust.ungram
@@ -8,7 +8,11 @@
 //
 //   //          -- comment
 //   Name =      -- non-terminal definition
-//   'ident'     -- token (terminal)
+//   'ident'     -- keyword or punct token (terminal)
+//   '?ident'    -- contextual keyword (terminal)
+//   too)
+//   '#ident'    -- generic token (terminal)
+//   '@ident'    -- literal token (terminal)
 //   A B         -- sequence
 //   A | B       -- alternation
 //   A*          -- zero or more repetition
@@ -17,17 +21,17 @@
 //   label:A     -- suggested name for field of AST node
 
 //*************************//
-// Names, Paths and Macros //
+//         Paths           //
 //*************************//
 
 Name =
-  'ident' | 'self'
+  '#ident' | 'self'
 
 NameRef =
-  'ident' | 'int_number' | 'self' | 'super' | 'crate' | 'Self'
+  '#ident' | '@int_number' | 'self' | 'super' | 'crate' | 'Self'
 
 Lifetime =
-  'lifetime_ident'
+  '#lifetime_ident'
 
 Path =
   (qualifier:Path '::')? segment:PathSegment
@@ -38,6 +42,11 @@ PathSegment =
 | NameRef ParamList RetType?
 | '<' Type ('as' PathType)? '>'
 
+
+//*************************//
+//        Generics         //
+//*************************//
+
 GenericArgList =
   '::'? '<' (GenericArg (',' GenericArg)* ','?)? '>'
 
@@ -61,6 +70,36 @@ LifetimeArg =
 ConstArg =
   Expr
 
+GenericParamList =
+  '<' (GenericParam (',' GenericParam)* ','?)? '>'
+
+GenericParam =
+  ConstParam
+| LifetimeParam
+| TypeParam
+
+TypeParam =
+  Attr* Name (':' TypeBoundList?)?
+  ('=' default_type:Type)?
+
+ConstParam =
+  Attr* 'const' Name ':' Type
+  ('=' default_val:ConstArg)?
+
+LifetimeParam =
+  Attr* Lifetime (':' TypeBoundList?)?
+
+WhereClause =
+  'where' predicates:(WherePred (',' WherePred)* ','?)
+
+WherePred =
+  ('for' GenericParamList)?  (Lifetime | Type) ':' TypeBoundList?
+
+
+//*************************//
+//          Macro          //
+//*************************//
+
 MacroCall =
   Attr* Path '!' TokenTree ';'?
 
@@ -72,22 +111,23 @@ TokenTree =
 MacroItems =
   Item*
 
-MacroEagerInput =
-  '(' (Expr (',' Expr)* ','?)? ')'
-| '{' (Expr (',' Expr)* ','?)? '}'
-| '[' (Expr (',' Expr)* ','?)? ']'
-
-
 MacroStmts =
   statements:Stmt*
   Expr?
 
+Attr =
+  '#' '!'? '[' Meta ']'
+
+Meta =
+  'unsafe' '(' Path ('=' Expr | TokenTree)? ')'
+| Path ('=' Expr | TokenTree)?
+
 //*************************//
 //          Items          //
 //*************************//
 
 SourceFile =
-  'shebang'?
+  '#shebang'?
   Attr*
   Item*
 
@@ -112,7 +152,7 @@ Item =
 
 MacroRules =
   Attr* Visibility?
-  'macro_rules' '!' Name
+  '?macro_rules' '!' Name
   TokenTree
 
 MacroDef =
@@ -148,12 +188,12 @@ UseTreeList =
 
 Fn =
  Attr* Visibility?
- 'default'? 'const'? 'async'? 'unsafe'? Abi?
+ '?default'? 'const'? 'async'? 'unsafe'? Abi?
  'fn' Name GenericParamList? ParamList RetType? WhereClause?
  (body:BlockExpr | ';')
 
 Abi =
-  'extern' 'string'?
+  'extern' '@string'?
 
 ParamList =
   '('(
@@ -180,7 +220,7 @@ RetType =
 
 TypeAlias =
   Attr* Visibility?
-  'default'?
+  '?default'?
   'type' Name GenericParamList? (':' TypeBoundList?)? WhereClause?
   ('=' Type)? ';'
 
@@ -223,7 +263,7 @@ Variant =
 
 Union =
   Attr* Visibility?
-  'union' Name GenericParamList? WhereClause?
+  '?union' Name GenericParamList? WhereClause?
   RecordFieldList
 
 // A Data Type.
@@ -236,7 +276,7 @@ Adt =
 
 Const =
   Attr* Visibility?
-  'default'?
+  '?default'?
   'const' (Name | '_') ':' Type
   ('=' body:Expr)? ';'
 
@@ -247,7 +287,7 @@ Static =
 
 Trait =
   Attr* Visibility?
-  'unsafe'? 'auto'?
+  'unsafe'? '?auto'?
   'trait' Name GenericParamList?
   (':' TypeBoundList?)? WhereClause? AssocItemList
 
@@ -266,7 +306,7 @@ AssocItem =
 
 Impl =
   Attr* Visibility?
-  'default'? 'unsafe'?
+  '?default'? 'unsafe'?
   'impl' GenericParamList? ('const'? '!'? trait:Type 'for')? self_ty:Type WhereClause?
   AssocItemList
 
@@ -282,41 +322,9 @@ ExternItem =
 | Static
 | TypeAlias
 
-GenericParamList =
-  '<' (GenericParam (',' GenericParam)* ','?)? '>'
-
-GenericParam =
-  ConstParam
-| LifetimeParam
-| TypeParam
-
-TypeParam =
-  Attr* Name (':' TypeBoundList?)?
-  ('=' default_type:Type)?
-
-ConstParam =
-  Attr* 'const' Name ':' Type
-  ('=' default_val:ConstArg)?
-
-LifetimeParam =
-  Attr* Lifetime (':' TypeBoundList?)?
-
-WhereClause =
-  'where' predicates:(WherePred (',' WherePred)* ','?)
-
-WherePred =
-  ('for' GenericParamList)?  (Lifetime | Type) ':' TypeBoundList?
-
 Visibility =
   'pub' ('(' 'in'? Path ')')?
 
-Attr =
-  '#' '!'? '[' Meta ']'
-
-Meta =
-  'unsafe' '(' Path ('=' Expr | TokenTree)? ')'
-| Path ('=' Expr | TokenTree)?
-
 
 //****************************//
 // Statements and Expressions //
@@ -379,13 +387,13 @@ Expr =
 | UnderscoreExpr
 
 OffsetOfExpr =
-  Attr* 'builtin' '#' 'offset_of' '(' Type ',' fields:(NameRef ('.' NameRef)* ) ')'
+  Attr* '?builtin' '#' '?offset_of' '(' Type ',' fields:(NameRef ('.' NameRef)* ) ')'
 
 AsmExpr =
-  Attr* 'builtin' '#' 'asm' '(' Expr ')'
+  Attr* '?builtin' '#' '?asm' '(' Expr ')'
 
 FormatArgsExpr =
-  Attr* 'builtin' '#' 'format_args' '('
+  Attr* '?builtin' '#' '?format_args' '('
   template:Expr
   (',' args:(FormatArgsArg (',' FormatArgsArg)* ','?)? )?
   ')'
@@ -398,11 +406,12 @@ MacroExpr =
 
 Literal =
   Attr* value:(
-    'int_number' | 'float_number'
-  | 'string' | 'raw_string'
-  | 'byte_string' | 'raw_byte_string'
+    '@int_number' | '@float_number'
+  | '@string' | '@raw_string'
+  | '@byte_string' | '@raw_byte_string'
+  | '@c_string' | '@raw_c_string'
+  | '@char' | '@byte'
   | 'true' | 'false'
-  | 'char' | 'byte'
   )
 
 PathExpr =
@@ -416,13 +425,13 @@ StmtList =
   '}'
 
 RefExpr =
-  Attr* '&' (('raw' 'const'?)| ('raw'? 'mut') ) Expr
+  Attr* '&' (('?raw' 'const'?)| ('?raw'? 'mut') ) Expr
 
 TryExpr =
   Attr* Expr '?'
 
 BlockExpr =
-  Attr* Label? ('try' | 'unsafe' | 'async' | 'const') StmtList
+  Attr* Label? ('try' | 'unsafe' | ('async' 'move'?) | ('gen' 'move'?) | 'const') StmtList
 
 PrefixExpr =
   Attr* op:('-' | '!' | '*') Expr
@@ -482,9 +491,12 @@ FieldExpr =
   Attr* Expr '.' NameRef
 
 ClosureExpr =
-  Attr* ('for' GenericParamList)? 'const'? 'static'? 'async'? 'move'?  ParamList RetType?
+  Attr* ClosureBinder? 'const'? 'static'? 'async'? 'gen'? 'move'?  ParamList RetType?
   body:Expr
 
+ClosureBinder =
+  'for' GenericParamList
+
 IfExpr =
   Attr* 'if' condition:Expr then_branch:BlockExpr
   ('else' else_branch:(IfExpr | BlockExpr))?
@@ -538,7 +550,7 @@ YieldExpr =
   Attr* 'yield' Expr?
 
 YeetExpr =
-  Attr* 'do' 'yeet' Expr?
+  Attr* 'do' '?yeet' Expr?
 
 LetExpr =
   Attr* 'let' Pat '=' Expr
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs
index b0ee9dfd507..6ed205e2856 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/expr_ext.rs
@@ -352,13 +352,22 @@ pub enum BlockModifier {
     Unsafe(SyntaxToken),
     Try(SyntaxToken),
     Const(SyntaxToken),
+    AsyncGen(SyntaxToken),
+    Gen(SyntaxToken),
     Label(ast::Label),
 }
 
 impl ast::BlockExpr {
     pub fn modifier(&self) -> Option<BlockModifier> {
-        self.async_token()
-            .map(BlockModifier::Async)
+        self.gen_token()
+            .map(|v| {
+                if self.async_token().is_some() {
+                    BlockModifier::AsyncGen(v)
+                } else {
+                    BlockModifier::Gen(v)
+                }
+            })
+            .or_else(|| self.async_token().map(BlockModifier::Async))
             .or_else(|| self.unsafe_token().map(BlockModifier::Unsafe))
             .or_else(|| self.try_token().map(BlockModifier::Try))
             .or_else(|| self.const_token().map(BlockModifier::Const))
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
index 0373e7c5529..ceecffba5d2 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
@@ -14,6 +14,8 @@ pub struct Abi {
 impl Abi {
     #[inline]
     pub fn extern_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![extern]) }
+    #[inline]
+    pub fn string_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![string]) }
 }
 
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -182,6 +184,10 @@ impl BlockExpr {
     #[inline]
     pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
     #[inline]
+    pub fn gen_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![gen]) }
+    #[inline]
+    pub fn move_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![move]) }
+    #[inline]
     pub fn try_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![try]) }
     #[inline]
     pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
@@ -238,13 +244,24 @@ impl CastExpr {
 }
 
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ClosureBinder {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ClosureBinder {
+    #[inline]
+    pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
+    #[inline]
+    pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ClosureExpr {
     pub(crate) syntax: SyntaxNode,
 }
 impl ast::HasAttrs for ClosureExpr {}
 impl ClosureExpr {
     #[inline]
-    pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
+    pub fn closure_binder(&self) -> Option<ClosureBinder> { support::child(&self.syntax) }
     #[inline]
     pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
     #[inline]
@@ -254,7 +271,7 @@ impl ClosureExpr {
     #[inline]
     pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
     #[inline]
-    pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
+    pub fn gen_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![gen]) }
     #[inline]
     pub fn move_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![move]) }
     #[inline]
@@ -834,27 +851,6 @@ impl MacroDef {
 }
 
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub struct MacroEagerInput {
-    pub(crate) syntax: SyntaxNode,
-}
-impl MacroEagerInput {
-    #[inline]
-    pub fn exprs(&self) -> AstChildren<Expr> { support::children(&self.syntax) }
-    #[inline]
-    pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
-    #[inline]
-    pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
-    #[inline]
-    pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
-    #[inline]
-    pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
-    #[inline]
-    pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
-    #[inline]
-    pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
-}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct MacroExpr {
     pub(crate) syntax: SyntaxNode,
 }
@@ -1050,6 +1046,10 @@ impl NameRef {
     #[inline]
     pub fn ident_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![ident]) }
     #[inline]
+    pub fn int_number_token(&self) -> Option<SyntaxToken> {
+        support::token(&self.syntax, T![int_number])
+    }
+    #[inline]
     pub fn self_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![self]) }
     #[inline]
     pub fn super_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![super]) }
@@ -2461,6 +2461,20 @@ impl AstNode for CastExpr {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
+impl AstNode for ClosureBinder {
+    #[inline]
+    fn can_cast(kind: SyntaxKind) -> bool { kind == CLOSURE_BINDER }
+    #[inline]
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    #[inline]
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
 impl AstNode for ClosureExpr {
     #[inline]
     fn can_cast(kind: SyntaxKind) -> bool { kind == CLOSURE_EXPR }
@@ -3021,20 +3035,6 @@ impl AstNode for MacroDef {
     #[inline]
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
-impl AstNode for MacroEagerInput {
-    #[inline]
-    fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_EAGER_INPUT }
-    #[inline]
-    fn cast(syntax: SyntaxNode) -> Option<Self> {
-        if Self::can_cast(syntax.kind()) {
-            Some(Self { syntax })
-        } else {
-            None
-        }
-    }
-    #[inline]
-    fn syntax(&self) -> &SyntaxNode { &self.syntax }
-}
 impl AstNode for MacroExpr {
     #[inline]
     fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_EXPR }
@@ -5541,6 +5541,11 @@ impl std::fmt::Display for CastExpr {
         std::fmt::Display::fmt(self.syntax(), f)
     }
 }
+impl std::fmt::Display for ClosureBinder {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
 impl std::fmt::Display for ClosureExpr {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         std::fmt::Display::fmt(self.syntax(), f)
@@ -5741,11 +5746,6 @@ impl std::fmt::Display for MacroDef {
         std::fmt::Display::fmt(self.syntax(), f)
     }
 }
-impl std::fmt::Display for MacroEagerInput {
-    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-        std::fmt::Display::fmt(self.syntax(), f)
-    }
-}
 impl std::fmt::Display for MacroExpr {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         std::fmt::Display::fmt(self.syntax(), f)
diff --git a/src/tools/rust-analyzer/crates/syntax/src/lib.rs b/src/tools/rust-analyzer/crates/syntax/src/lib.rs
index abff16b6f55..6d21ca17471 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/lib.rs
@@ -211,115 +211,6 @@ impl SourceFile {
     }
 }
 
-impl ast::TokenTree {
-    pub fn reparse_as_comma_separated_expr(
-        self,
-        edition: parser::Edition,
-    ) -> Parse<ast::MacroEagerInput> {
-        let tokens = self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token);
-
-        let mut parser_input = parser::Input::default();
-        let mut was_joint = false;
-        for t in tokens {
-            let kind = t.kind();
-            if kind.is_trivia() {
-                was_joint = false
-            } else if kind == SyntaxKind::IDENT {
-                let token_text = t.text();
-                let contextual_kw =
-                    SyntaxKind::from_contextual_keyword(token_text).unwrap_or(SyntaxKind::IDENT);
-                parser_input.push_ident(contextual_kw);
-            } else {
-                if was_joint {
-                    parser_input.was_joint();
-                }
-                parser_input.push(kind);
-                // Tag the token as joint if it is float with a fractional part
-                // we use this jointness to inform the parser about what token split
-                // event to emit when we encounter a float literal in a field access
-                if kind == SyntaxKind::FLOAT_NUMBER {
-                    if !t.text().ends_with('.') {
-                        parser_input.was_joint();
-                    } else {
-                        was_joint = false;
-                    }
-                } else {
-                    was_joint = true;
-                }
-            }
-        }
-
-        let parser_output = parser::TopEntryPoint::MacroEagerInput.parse(&parser_input, edition);
-
-        let mut tokens =
-            self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token);
-        let mut text = String::new();
-        let mut pos = TextSize::from(0);
-        let mut builder = SyntaxTreeBuilder::default();
-        for event in parser_output.iter() {
-            match event {
-                parser::Step::Token { kind, n_input_tokens } => {
-                    let mut token = tokens.next().unwrap();
-                    while token.kind().is_trivia() {
-                        let text = token.text();
-                        pos += TextSize::from(text.len() as u32);
-                        builder.token(token.kind(), text);
-
-                        token = tokens.next().unwrap();
-                    }
-                    text.push_str(token.text());
-                    for _ in 1..n_input_tokens {
-                        let token = tokens.next().unwrap();
-                        text.push_str(token.text());
-                    }
-
-                    pos += TextSize::from(text.len() as u32);
-                    builder.token(kind, &text);
-                    text.clear();
-                }
-                parser::Step::FloatSplit { ends_in_dot: has_pseudo_dot } => {
-                    let token = tokens.next().unwrap();
-                    let text = token.text();
-
-                    match text.split_once('.') {
-                        Some((left, right)) => {
-                            assert!(!left.is_empty());
-                            builder.start_node(SyntaxKind::NAME_REF);
-                            builder.token(SyntaxKind::INT_NUMBER, left);
-                            builder.finish_node();
-
-                            // here we move the exit up, the original exit has been deleted in process
-                            builder.finish_node();
-
-                            builder.token(SyntaxKind::DOT, ".");
-
-                            if has_pseudo_dot {
-                                assert!(right.is_empty(), "{left}.{right}");
-                            } else {
-                                assert!(!right.is_empty(), "{left}.{right}");
-                                builder.start_node(SyntaxKind::NAME_REF);
-                                builder.token(SyntaxKind::INT_NUMBER, right);
-                                builder.finish_node();
-
-                                // the parser creates an unbalanced start node, we are required to close it here
-                                builder.finish_node();
-                            }
-                        }
-                        None => unreachable!(),
-                    }
-                    pos += TextSize::from(text.len() as u32);
-                }
-                parser::Step::Enter { kind } => builder.start_node(kind),
-                parser::Step::Exit => builder.finish_node(),
-                parser::Step::Error { msg } => builder.error(msg.to_owned(), pos),
-            }
-        }
-
-        let (green, errors) = builder.finish_raw();
-        Parse::new(green, errors)
-    }
-}
-
 /// Matches a `SyntaxNode` against an `ast` type.
 ///
 /// # Example:
diff --git a/src/tools/rust-analyzer/crates/syntax/src/parsing.rs b/src/tools/rust-analyzer/crates/syntax/src/parsing.rs
index 4bf2a032791..e52daa42f17 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/parsing.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/parsing.rs
@@ -11,7 +11,7 @@ pub(crate) use crate::parsing::reparsing::incremental_reparse;
 
 pub(crate) fn parse_text(text: &str, edition: parser::Edition) -> (GreenNode, Vec<SyntaxError>) {
     let _p = tracing::info_span!("parse_text").entered();
-    let lexed = parser::LexedStr::new(text);
+    let lexed = parser::LexedStr::new(edition, text);
     let parser_input = lexed.to_input();
     let parser_output = parser::TopEntryPoint::SourceFile.parse(&parser_input, edition);
     let (node, errors, _eof) = build_tree(lexed, parser_output);
@@ -24,7 +24,7 @@ pub(crate) fn parse_text_at(
     edition: parser::Edition,
 ) -> (GreenNode, Vec<SyntaxError>) {
     let _p = tracing::info_span!("parse_text_at").entered();
-    let lexed = parser::LexedStr::new(text);
+    let lexed = parser::LexedStr::new(edition, text);
     let parser_input = lexed.to_input();
     let parser_output = entry.parse(&parser_input, edition);
     let (node, errors, _eof) = build_tree(lexed, parser_output);
diff --git a/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs b/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs
index 354b89fd490..1e1a02f4459 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/parsing/reparsing.rs
@@ -6,7 +6,7 @@
 //!   - otherwise, we search for the nearest `{}` block which contains the edit
 //!     and try to parse only this block.
 
-use parser::Reparser;
+use parser::{Edition, Reparser};
 use text_edit::Indel;
 
 use crate::{
@@ -51,7 +51,8 @@ fn reparse_token(
             }
 
             let mut new_text = get_text_after_edit(prev_token.clone().into(), edit);
-            let (new_token_kind, new_err) = parser::LexedStr::single_token(&new_text)?;
+            let (new_token_kind, new_err) =
+                parser::LexedStr::single_token(Edition::CURRENT, &new_text)?;
 
             if new_token_kind != prev_token_kind
                 || (new_token_kind == IDENT && is_contextual_kw(&new_text))
@@ -64,7 +65,8 @@ fn reparse_token(
             // `b` no longer remains an identifier, but becomes a part of byte string literal
             if let Some(next_char) = root.text().char_at(prev_token.text_range().end()) {
                 new_text.push(next_char);
-                let token_with_next_char = parser::LexedStr::single_token(&new_text);
+                let token_with_next_char =
+                    parser::LexedStr::single_token(Edition::CURRENT, &new_text);
                 if let Some((_kind, _error)) = token_with_next_char {
                     return None;
                 }
@@ -91,7 +93,7 @@ fn reparse_block(
     let (node, reparser) = find_reparsable_node(root, edit.delete)?;
     let text = get_text_after_edit(node.clone().into(), edit);
 
-    let lexed = parser::LexedStr::new(text.as_str());
+    let lexed = parser::LexedStr::new(Edition::CURRENT, text.as_str());
     let parser_input = lexed.to_input();
     if !is_balanced(&lexed) {
         return None;
diff --git a/src/tools/rust-analyzer/xtask/src/codegen.rs b/src/tools/rust-analyzer/xtask/src/codegen.rs
index acaa65129df..2491952f52f 100644
--- a/src/tools/rust-analyzer/xtask/src/codegen.rs
+++ b/src/tools/rust-analyzer/xtask/src/codegen.rs
@@ -163,8 +163,9 @@ fn add_preamble(cg: CodegenType, mut text: String) -> String {
 /// case, updates the file and then fails the test.
 #[allow(clippy::print_stderr)]
 fn ensure_file_contents(cg: CodegenType, file: &Path, contents: &str, check: bool) {
+    let contents = normalize_newlines(contents);
     if let Ok(old_contents) = fs::read_to_string(file) {
-        if normalize_newlines(&old_contents) == normalize_newlines(contents) {
+        if normalize_newlines(&old_contents) == contents {
             // File is already up to date.
             return;
         }
diff --git a/src/tools/rust-analyzer/xtask/src/codegen/grammar.rs b/src/tools/rust-analyzer/xtask/src/codegen/grammar.rs
index 45fa2d37c8f..44f12ba4ad9 100644
--- a/src/tools/rust-analyzer/xtask/src/codegen/grammar.rs
+++ b/src/tools/rust-analyzer/xtask/src/codegen/grammar.rs
@@ -17,15 +17,22 @@ use quote::{format_ident, quote};
 use ungrammar::{Grammar, Rule};
 
 use crate::{
-    codegen::{add_preamble, ensure_file_contents, reformat},
+    codegen::{add_preamble, ensure_file_contents, grammar::ast_src::generate_kind_src, reformat},
     project_root,
 };
 
 mod ast_src;
-use self::ast_src::{AstEnumSrc, AstNodeSrc, AstSrc, Cardinality, Field, KindsSrc, KINDS_SRC};
+use self::ast_src::{AstEnumSrc, AstNodeSrc, AstSrc, Cardinality, Field, KindsSrc};
 
 pub(crate) fn generate(check: bool) {
-    let syntax_kinds = generate_syntax_kinds(KINDS_SRC);
+    let grammar = fs::read_to_string(project_root().join("crates/syntax/rust.ungram"))
+        .unwrap()
+        .parse()
+        .unwrap();
+    let ast = lower(&grammar);
+    let kinds_src = generate_kind_src(&ast.nodes, &ast.enums, &grammar);
+
+    let syntax_kinds = generate_syntax_kinds(kinds_src);
     let syntax_kinds_file = project_root().join("crates/parser/src/syntax_kind/generated.rs");
     ensure_file_contents(
         crate::flags::CodegenType::Grammar,
@@ -34,12 +41,6 @@ pub(crate) fn generate(check: bool) {
         check,
     );
 
-    let grammar = fs::read_to_string(project_root().join("crates/syntax/rust.ungram"))
-        .unwrap()
-        .parse()
-        .unwrap();
-    let ast = lower(&grammar);
-
     let ast_tokens = generate_tokens(&ast);
     let ast_tokens_file = project_root().join("crates/syntax/src/ast/generated/tokens.rs");
     ensure_file_contents(
@@ -49,7 +50,7 @@ pub(crate) fn generate(check: bool) {
         check,
     );
 
-    let ast_nodes = generate_nodes(KINDS_SRC, &ast);
+    let ast_nodes = generate_nodes(kinds_src, &ast);
     let ast_nodes_file = project_root().join("crates/syntax/src/ast/generated/nodes.rs");
     ensure_file_contents(
         crate::flags::CodegenType::Grammar,
@@ -96,7 +97,7 @@ fn generate_tokens(grammar: &AstSrc) -> String {
     .replace("#[derive", "\n#[derive")
 }
 
-fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String {
+fn generate_nodes(kinds: KindsSrc, grammar: &AstSrc) -> String {
     let (node_defs, node_boilerplate_impls): (Vec<_>, Vec<_>) = grammar
         .nodes
         .iter()
@@ -117,7 +118,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String {
                 });
 
             let methods = node.fields.iter().map(|field| {
-                let method_name = field.method_name();
+                let method_name = format_ident!("{}", field.method_name());
                 let ty = field.ty();
 
                 if field.is_many() {
@@ -366,7 +367,7 @@ fn write_doc_comment(contents: &[String], dest: &mut String) {
     }
 }
 
-fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> String {
+fn generate_syntax_kinds(grammar: KindsSrc) -> String {
     let (single_byte_tokens_values, single_byte_tokens): (Vec<_>, Vec<_>) = grammar
         .punct
         .iter()
@@ -481,7 +482,9 @@ fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> String {
             #([#punctuation_values] => { $crate::SyntaxKind::#punctuation };)*
             #([#all_keywords_idents] => { $crate::SyntaxKind::#all_keywords };)*
             [lifetime_ident] => { $crate::SyntaxKind::LIFETIME_IDENT };
+            [int_number] => { $crate::SyntaxKind::INT_NUMBER };
             [ident] => { $crate::SyntaxKind::IDENT };
+            [string] => { $crate::SyntaxKind::STRING };
             [shebang] => { $crate::SyntaxKind::SHEBANG };
         }
     };
@@ -550,7 +553,7 @@ impl Field {
             _ => None,
         }
     }
-    fn method_name(&self) -> proc_macro2::Ident {
+    fn method_name(&self) -> String {
         match self {
             Field::Token(name) => {
                 let name = match name.as_str() {
@@ -585,13 +588,13 @@ impl Field {
                     "~" => "tilde",
                     _ => name,
                 };
-                format_ident!("{}_token", name)
+                format!("{name}_token",)
             }
             Field::Node { name, .. } => {
                 if name == "type" {
-                    format_ident!("ty")
+                    String::from("ty")
                 } else {
-                    format_ident!("{}", name)
+                    name.to_owned()
                 }
             }
         }
@@ -604,6 +607,15 @@ impl Field {
     }
 }
 
+fn clean_token_name(name: &str) -> String {
+    let cleaned = name.trim_start_matches(['@', '#', '?']);
+    if cleaned.is_empty() {
+        name.to_owned()
+    } else {
+        cleaned.to_owned()
+    }
+}
+
 fn lower(grammar: &Grammar) -> AstSrc {
     let mut res = AstSrc {
         tokens:
@@ -683,14 +695,12 @@ fn lower_rule(acc: &mut Vec<Field>, grammar: &Grammar, label: Option<&String>, r
         }
         Rule::Token(token) => {
             assert!(label.is_none());
-            let mut name = grammar[*token].name.clone();
-            if name != "int_number" && name != "string" {
-                if "[]{}()".contains(&name) {
-                    name = format!("'{name}'");
-                }
-                let field = Field::Token(name);
-                acc.push(field);
+            let mut name = clean_token_name(&grammar[*token].name);
+            if "[]{}()".contains(&name) {
+                name = format!("'{name}'");
             }
+            let field = Field::Token(name);
+            acc.push(field);
         }
         Rule::Rep(inner) => {
             if let Rule::Node(node) = &**inner {
@@ -863,7 +873,7 @@ fn extract_struct_traits(ast: &mut AstSrc) {
 fn extract_struct_trait(node: &mut AstNodeSrc, trait_name: &str, methods: &[&str]) {
     let mut to_remove = Vec::new();
     for (i, field) in node.fields.iter().enumerate() {
-        let method_name = field.method_name().to_string();
+        let method_name = field.method_name();
         if methods.iter().any(|&it| it == method_name) {
             to_remove.push(i);
         }
diff --git a/src/tools/rust-analyzer/xtask/src/codegen/grammar/ast_src.rs b/src/tools/rust-analyzer/xtask/src/codegen/grammar/ast_src.rs
index c246ee9950c..3444f89908b 100644
--- a/src/tools/rust-analyzer/xtask/src/codegen/grammar/ast_src.rs
+++ b/src/tools/rust-analyzer/xtask/src/codegen/grammar/ast_src.rs
@@ -1,241 +1,154 @@
 //! Defines input for code generation process.
 
-pub(crate) struct KindsSrc<'a> {
-    pub(crate) punct: &'a [(&'a str, &'a str)],
-    pub(crate) keywords: &'a [&'a str],
-    pub(crate) contextual_keywords: &'a [&'a str],
-    pub(crate) literals: &'a [&'a str],
-    pub(crate) tokens: &'a [&'a str],
-    pub(crate) nodes: &'a [&'a str],
+use crate::codegen::grammar::to_upper_snake_case;
+
+#[derive(Copy, Clone, Debug)]
+pub(crate) struct KindsSrc {
+    pub(crate) punct: &'static [(&'static str, &'static str)],
+    pub(crate) keywords: &'static [&'static str],
+    pub(crate) contextual_keywords: &'static [&'static str],
+    pub(crate) literals: &'static [&'static str],
+    pub(crate) tokens: &'static [&'static str],
+    pub(crate) nodes: &'static [&'static str],
 }
 
-pub(crate) const KINDS_SRC: KindsSrc<'_> = KindsSrc {
-    punct: &[
-        (";", "SEMICOLON"),
-        (",", "COMMA"),
-        ("(", "L_PAREN"),
-        (")", "R_PAREN"),
-        ("{", "L_CURLY"),
-        ("}", "R_CURLY"),
-        ("[", "L_BRACK"),
-        ("]", "R_BRACK"),
-        ("<", "L_ANGLE"),
-        (">", "R_ANGLE"),
-        ("@", "AT"),
-        ("#", "POUND"),
-        ("~", "TILDE"),
-        ("?", "QUESTION"),
-        ("$", "DOLLAR"),
-        ("&", "AMP"),
-        ("|", "PIPE"),
-        ("+", "PLUS"),
-        ("*", "STAR"),
-        ("/", "SLASH"),
-        ("^", "CARET"),
-        ("%", "PERCENT"),
-        ("_", "UNDERSCORE"),
-        (".", "DOT"),
-        ("..", "DOT2"),
-        ("...", "DOT3"),
-        ("..=", "DOT2EQ"),
-        (":", "COLON"),
-        ("::", "COLON2"),
-        ("=", "EQ"),
-        ("==", "EQ2"),
-        ("=>", "FAT_ARROW"),
-        ("!", "BANG"),
-        ("!=", "NEQ"),
-        ("-", "MINUS"),
-        ("->", "THIN_ARROW"),
-        ("<=", "LTEQ"),
-        (">=", "GTEQ"),
-        ("+=", "PLUSEQ"),
-        ("-=", "MINUSEQ"),
-        ("|=", "PIPEEQ"),
-        ("&=", "AMPEQ"),
-        ("^=", "CARETEQ"),
-        ("/=", "SLASHEQ"),
-        ("*=", "STAREQ"),
-        ("%=", "PERCENTEQ"),
-        ("&&", "AMP2"),
-        ("||", "PIPE2"),
-        ("<<", "SHL"),
-        (">>", "SHR"),
-        ("<<=", "SHLEQ"),
-        (">>=", "SHREQ"),
-    ],
-    keywords: &[
-        "abstract", "as", "async", "await", "become", "box", "break", "const", "continue", "crate",
-        "do", "dyn", "else", "enum", "extern", "false", "final", "fn", "for", "if", "impl", "in",
-        "let", "loop", "macro", "match", "mod", "move", "mut", "override", "priv", "pub", "ref",
-        "return", "self", "Self", "static", "struct", "super", "trait", "true", "try", "type",
-        "typeof", "unsafe", "unsized", "use", "virtual", "where", "while", "yield",
-    ],
-    contextual_keywords: &[
-        "auto",
-        "builtin",
-        "default",
-        "existential",
-        "union",
-        "raw",
-        "macro_rules",
-        "yeet",
-        "offset_of",
-        "asm",
-        "format_args",
-    ],
-    literals: &["INT_NUMBER", "FLOAT_NUMBER", "CHAR", "BYTE", "STRING", "BYTE_STRING", "C_STRING"],
-    tokens: &["ERROR", "IDENT", "WHITESPACE", "LIFETIME_IDENT", "COMMENT", "SHEBANG"],
-    nodes: &[
-        "SOURCE_FILE",
-        "STRUCT",
-        "UNION",
-        "ENUM",
-        "FN",
-        "RET_TYPE",
-        "EXTERN_CRATE",
-        "MODULE",
-        "USE",
-        "STATIC",
-        "CONST",
-        "TRAIT",
-        "TRAIT_ALIAS",
-        "IMPL",
-        "TYPE_ALIAS",
-        "MACRO_CALL",
-        "MACRO_RULES",
-        "MACRO_ARM",
-        "TOKEN_TREE",
-        "MACRO_DEF",
-        "PAREN_TYPE",
-        "TUPLE_TYPE",
-        "MACRO_TYPE",
-        "NEVER_TYPE",
-        "PATH_TYPE",
-        "PTR_TYPE",
-        "ARRAY_TYPE",
-        "SLICE_TYPE",
-        "REF_TYPE",
-        "INFER_TYPE",
-        "FN_PTR_TYPE",
-        "FOR_TYPE",
-        "IMPL_TRAIT_TYPE",
-        "DYN_TRAIT_TYPE",
-        "OR_PAT",
-        "PAREN_PAT",
-        "REF_PAT",
-        "BOX_PAT",
-        "IDENT_PAT",
-        "WILDCARD_PAT",
-        "REST_PAT",
-        "PATH_PAT",
-        "RECORD_PAT",
-        "RECORD_PAT_FIELD_LIST",
-        "RECORD_PAT_FIELD",
-        "TUPLE_STRUCT_PAT",
-        "TUPLE_PAT",
-        "SLICE_PAT",
-        "RANGE_PAT",
-        "LITERAL_PAT",
-        "MACRO_PAT",
-        "CONST_BLOCK_PAT",
-        // atoms
-        "TUPLE_EXPR",
-        "ARRAY_EXPR",
-        "PAREN_EXPR",
-        "PATH_EXPR",
-        "CLOSURE_EXPR",
-        "IF_EXPR",
-        "WHILE_EXPR",
-        "LOOP_EXPR",
-        "FOR_EXPR",
-        "CONTINUE_EXPR",
-        "BREAK_EXPR",
-        "LABEL",
-        "BLOCK_EXPR",
-        "STMT_LIST",
-        "RETURN_EXPR",
-        "BECOME_EXPR",
-        "YIELD_EXPR",
-        "YEET_EXPR",
-        "LET_EXPR",
-        "UNDERSCORE_EXPR",
-        "MACRO_EXPR",
-        "MATCH_EXPR",
-        "MATCH_ARM_LIST",
-        "MATCH_ARM",
-        "MATCH_GUARD",
-        "RECORD_EXPR",
-        "RECORD_EXPR_FIELD_LIST",
-        "RECORD_EXPR_FIELD",
-        "OFFSET_OF_EXPR",
-        "ASM_EXPR",
-        "FORMAT_ARGS_EXPR",
-        "FORMAT_ARGS_ARG",
-        // postfix
-        "CALL_EXPR",
-        "INDEX_EXPR",
-        "METHOD_CALL_EXPR",
-        "FIELD_EXPR",
-        "AWAIT_EXPR",
-        "TRY_EXPR",
-        "CAST_EXPR",
-        // unary
-        "REF_EXPR",
-        "PREFIX_EXPR",
-        "RANGE_EXPR", // just weird
-        "BIN_EXPR",
-        "EXTERN_BLOCK",
-        "EXTERN_ITEM_LIST",
-        "VARIANT",
-        "RECORD_FIELD_LIST",
-        "RECORD_FIELD",
-        "TUPLE_FIELD_LIST",
-        "TUPLE_FIELD",
-        "VARIANT_LIST",
-        "ITEM_LIST",
-        "ASSOC_ITEM_LIST",
-        "ATTR",
-        "META",
-        "USE_TREE",
-        "USE_TREE_LIST",
-        "PATH",
-        "PATH_SEGMENT",
-        "LITERAL",
-        "RENAME",
-        "VISIBILITY",
-        "WHERE_CLAUSE",
-        "WHERE_PRED",
-        "ABI",
-        "NAME",
-        "NAME_REF",
-        "LET_STMT",
-        "LET_ELSE",
-        "EXPR_STMT",
-        "GENERIC_PARAM_LIST",
-        "GENERIC_PARAM",
-        "LIFETIME_PARAM",
-        "TYPE_PARAM",
-        "RETURN_TYPE_ARG",
-        "CONST_PARAM",
-        "GENERIC_ARG_LIST",
-        "LIFETIME",
-        "LIFETIME_ARG",
-        "TYPE_ARG",
-        "ASSOC_TYPE_ARG",
-        "CONST_ARG",
-        "PARAM_LIST",
-        "PARAM",
-        "SELF_PARAM",
-        "ARG_LIST",
-        "TYPE_BOUND",
-        "TYPE_BOUND_LIST",
-        // macro related
-        "MACRO_ITEMS",
-        "MACRO_STMTS",
-        "MACRO_EAGER_INPUT",
-    ],
-};
+/// The punctuations of the language.
+const PUNCT: &[(&str, &str)] = &[
+    // KEEP THE DOLLAR AT THE TOP ITS SPECIAL
+    ("$", "DOLLAR"),
+    (";", "SEMICOLON"),
+    (",", "COMMA"),
+    ("(", "L_PAREN"),
+    (")", "R_PAREN"),
+    ("{", "L_CURLY"),
+    ("}", "R_CURLY"),
+    ("[", "L_BRACK"),
+    ("]", "R_BRACK"),
+    ("<", "L_ANGLE"),
+    (">", "R_ANGLE"),
+    ("@", "AT"),
+    ("#", "POUND"),
+    ("~", "TILDE"),
+    ("?", "QUESTION"),
+    ("&", "AMP"),
+    ("|", "PIPE"),
+    ("+", "PLUS"),
+    ("*", "STAR"),
+    ("/", "SLASH"),
+    ("^", "CARET"),
+    ("%", "PERCENT"),
+    ("_", "UNDERSCORE"),
+    (".", "DOT"),
+    ("..", "DOT2"),
+    ("...", "DOT3"),
+    ("..=", "DOT2EQ"),
+    (":", "COLON"),
+    ("::", "COLON2"),
+    ("=", "EQ"),
+    ("==", "EQ2"),
+    ("=>", "FAT_ARROW"),
+    ("!", "BANG"),
+    ("!=", "NEQ"),
+    ("-", "MINUS"),
+    ("->", "THIN_ARROW"),
+    ("<=", "LTEQ"),
+    (">=", "GTEQ"),
+    ("+=", "PLUSEQ"),
+    ("-=", "MINUSEQ"),
+    ("|=", "PIPEEQ"),
+    ("&=", "AMPEQ"),
+    ("^=", "CARETEQ"),
+    ("/=", "SLASHEQ"),
+    ("*=", "STAREQ"),
+    ("%=", "PERCENTEQ"),
+    ("&&", "AMP2"),
+    ("||", "PIPE2"),
+    ("<<", "SHL"),
+    (">>", "SHR"),
+    ("<<=", "SHLEQ"),
+    (">>=", "SHREQ"),
+];
+const TOKENS: &[&str] = &["ERROR", "WHITESPACE", "NEWLINE", "COMMENT"];
+// &["ERROR", "IDENT", "WHITESPACE", "LIFETIME_IDENT", "COMMENT", "SHEBANG"],;
+
+const EOF: &str = "EOF";
+
+const RESERVED: &[&str] = &[
+    "abstract", "become", "box", "do", "final", "macro", "override", "priv", "typeof", "unsized",
+    "virtual", "yield", "try",
+];
+const CONTEXTUAL_RESERVED: &[&str] = &[];
+
+pub(crate) fn generate_kind_src(
+    nodes: &[AstNodeSrc],
+    enums: &[AstEnumSrc],
+    grammar: &ungrammar::Grammar,
+) -> KindsSrc {
+    let mut keywords: Vec<&_> = Vec::new();
+    let mut contextual_keywords: Vec<&_> = Vec::new();
+    let mut tokens: Vec<&_> = TOKENS.to_vec();
+    let mut literals: Vec<&_> = Vec::new();
+    let mut used_puncts = vec![false; PUNCT.len()];
+    // Mark $ as used
+    used_puncts[0] = true;
+    grammar.tokens().for_each(|token| {
+        let name = &*grammar[token].name;
+        if name == EOF {
+            return;
+        }
+        match name.split_at(1) {
+            ("@", lit) if !lit.is_empty() => {
+                literals.push(String::leak(to_upper_snake_case(lit)));
+            }
+            ("#", token) if !token.is_empty() => {
+                tokens.push(String::leak(to_upper_snake_case(token)));
+            }
+            ("?", kw) if !kw.is_empty() => {
+                contextual_keywords.push(String::leak(kw.to_owned()));
+            }
+            _ if name.chars().all(char::is_alphabetic) => {
+                keywords.push(String::leak(name.to_owned()));
+            }
+            _ => {
+                let idx = PUNCT
+                    .iter()
+                    .position(|(punct, _)| punct == &name)
+                    .unwrap_or_else(|| panic!("Grammar references unknown punctuation {name:?}"));
+                used_puncts[idx] = true;
+            }
+        }
+    });
+    PUNCT.iter().zip(used_puncts).filter(|(_, used)| !used).for_each(|((punct, _), _)| {
+        panic!("Punctuation {punct:?} is not used in grammar");
+    });
+    keywords.extend(RESERVED.iter().copied());
+    keywords.sort();
+    keywords.dedup();
+    contextual_keywords.extend(CONTEXTUAL_RESERVED.iter().copied());
+    contextual_keywords.sort();
+    contextual_keywords.dedup();
+
+    // we leak things here for simplicity, that way we don't have to deal with lifetimes
+    // The execution is a one shot job so thats fine
+    let nodes = nodes
+        .iter()
+        .map(|it| &it.name)
+        .chain(enums.iter().map(|it| &it.name))
+        .map(|it| to_upper_snake_case(it))
+        .map(String::leak)
+        .map(|it| &*it)
+        .collect();
+    let nodes = Vec::leak(nodes);
+    nodes.sort();
+    let keywords = Vec::leak(keywords);
+    let contextual_keywords = Vec::leak(contextual_keywords);
+    let literals = Vec::leak(literals);
+    literals.sort();
+    let tokens = Vec::leak(tokens);
+    tokens.sort();
+
+    KindsSrc { punct: PUNCT, nodes, keywords, contextual_keywords, literals, tokens }
+}
 
 #[derive(Default, Debug)]
 pub(crate) struct AstSrc {