about summary refs log tree commit diff
path: root/xtask/src
diff options
context:
space:
mode:
authorAleksey Kladov <aleksey.kladov@gmail.com>2021-07-03 22:11:03 +0300
committerAleksey Kladov <aleksey.kladov@gmail.com>2021-07-03 22:11:03 +0300
commit58d2ece88a17030668e09e4aade7bb2ed27dcaac (patch)
tree38ffd8178f472d0154c02ed111b4a980fe087edc /xtask/src
parent668d0612452913624ef8aa4f17d7fef9ac08a75f (diff)
downloadrust-58d2ece88a17030668e09e4aade7bb2ed27dcaac.tar.gz
rust-58d2ece88a17030668e09e4aade7bb2ed27dcaac.zip
internal: overhaul code generation
* Keep codegen adjacent to the relevant crates.
* Remove codgen deps from xtask, speeding-up from-source installation.

This regresses the release process a bit, as it now needs to run the
tests (and, by extension, compile the code).
Diffstat (limited to 'xtask/src')
-rw-r--r--xtask/src/ast_src.rs258
-rw-r--r--xtask/src/codegen.rs166
-rw-r--r--xtask/src/codegen/gen_assists_docs.rs167
-rw-r--r--xtask/src/codegen/gen_diagnostic_docs.rs76
-rw-r--r--xtask/src/codegen/gen_feature_docs.rs79
-rw-r--r--xtask/src/codegen/gen_lint_completions.rs170
-rw-r--r--xtask/src/codegen/gen_parser_tests.rs132
-rw-r--r--xtask/src/codegen/gen_syntax.rs747
-rw-r--r--xtask/src/main.rs43
-rw-r--r--xtask/src/release.rs7
-rw-r--r--xtask/src/tidy.rs71
11 files changed, 46 insertions, 1870 deletions
diff --git a/xtask/src/ast_src.rs b/xtask/src/ast_src.rs
deleted file mode 100644
index fe37d024518..00000000000
--- a/xtask/src/ast_src.rs
+++ /dev/null
@@ -1,258 +0,0 @@
-//! Defines input for code generation process.
-
-pub(crate) struct KindsSrc<'a> {
-    pub(crate) punct: &'a [(&'a str, &'a str)],
-    pub(crate) keywords: &'a [&'a str],
-    pub(crate) contextual_keywords: &'a [&'a str],
-    pub(crate) literals: &'a [&'a str],
-    pub(crate) tokens: &'a [&'a str],
-    pub(crate) nodes: &'a [&'a str],
-}
-
-pub(crate) const KINDS_SRC: KindsSrc = KindsSrc {
-    punct: &[
-        (";", "SEMICOLON"),
-        (",", "COMMA"),
-        ("(", "L_PAREN"),
-        (")", "R_PAREN"),
-        ("{", "L_CURLY"),
-        ("}", "R_CURLY"),
-        ("[", "L_BRACK"),
-        ("]", "R_BRACK"),
-        ("<", "L_ANGLE"),
-        (">", "R_ANGLE"),
-        ("@", "AT"),
-        ("#", "POUND"),
-        ("~", "TILDE"),
-        ("?", "QUESTION"),
-        ("$", "DOLLAR"),
-        ("&", "AMP"),
-        ("|", "PIPE"),
-        ("+", "PLUS"),
-        ("*", "STAR"),
-        ("/", "SLASH"),
-        ("^", "CARET"),
-        ("%", "PERCENT"),
-        ("_", "UNDERSCORE"),
-        (".", "DOT"),
-        ("..", "DOT2"),
-        ("...", "DOT3"),
-        ("..=", "DOT2EQ"),
-        (":", "COLON"),
-        ("::", "COLON2"),
-        ("=", "EQ"),
-        ("==", "EQ2"),
-        ("=>", "FAT_ARROW"),
-        ("!", "BANG"),
-        ("!=", "NEQ"),
-        ("-", "MINUS"),
-        ("->", "THIN_ARROW"),
-        ("<=", "LTEQ"),
-        (">=", "GTEQ"),
-        ("+=", "PLUSEQ"),
-        ("-=", "MINUSEQ"),
-        ("|=", "PIPEEQ"),
-        ("&=", "AMPEQ"),
-        ("^=", "CARETEQ"),
-        ("/=", "SLASHEQ"),
-        ("*=", "STAREQ"),
-        ("%=", "PERCENTEQ"),
-        ("&&", "AMP2"),
-        ("||", "PIPE2"),
-        ("<<", "SHL"),
-        (">>", "SHR"),
-        ("<<=", "SHLEQ"),
-        (">>=", "SHREQ"),
-    ],
-    keywords: &[
-        "as", "async", "await", "box", "break", "const", "continue", "crate", "dyn", "else",
-        "enum", "extern", "false", "fn", "for", "if", "impl", "in", "let", "loop", "macro",
-        "match", "mod", "move", "mut", "pub", "ref", "return", "self", "static", "struct", "super",
-        "trait", "true", "try", "type", "unsafe", "use", "where", "while", "yield",
-    ],
-    contextual_keywords: &["auto", "default", "existential", "union", "raw", "macro_rules"],
-    literals: &["INT_NUMBER", "FLOAT_NUMBER", "CHAR", "BYTE", "STRING", "BYTE_STRING"],
-    tokens: &[
-        "ERROR",
-        "IDENT",
-        "WHITESPACE",
-        "LIFETIME_IDENT",
-        "COMMENT",
-        "SHEBANG",
-        "L_DOLLAR",
-        "R_DOLLAR",
-    ],
-    nodes: &[
-        "SOURCE_FILE",
-        "STRUCT",
-        "UNION",
-        "ENUM",
-        "FN",
-        "RET_TYPE",
-        "EXTERN_CRATE",
-        "MODULE",
-        "USE",
-        "STATIC",
-        "CONST",
-        "TRAIT",
-        "IMPL",
-        "TYPE_ALIAS",
-        "MACRO_CALL",
-        "MACRO_RULES",
-        "MACRO_ARM",
-        "TOKEN_TREE",
-        "MACRO_DEF",
-        "PAREN_TYPE",
-        "TUPLE_TYPE",
-        "MACRO_TYPE",
-        "NEVER_TYPE",
-        "PATH_TYPE",
-        "PTR_TYPE",
-        "ARRAY_TYPE",
-        "SLICE_TYPE",
-        "REF_TYPE",
-        "INFER_TYPE",
-        "FN_PTR_TYPE",
-        "FOR_TYPE",
-        "IMPL_TRAIT_TYPE",
-        "DYN_TRAIT_TYPE",
-        "OR_PAT",
-        "PAREN_PAT",
-        "REF_PAT",
-        "BOX_PAT",
-        "IDENT_PAT",
-        "WILDCARD_PAT",
-        "REST_PAT",
-        "PATH_PAT",
-        "RECORD_PAT",
-        "RECORD_PAT_FIELD_LIST",
-        "RECORD_PAT_FIELD",
-        "TUPLE_STRUCT_PAT",
-        "TUPLE_PAT",
-        "SLICE_PAT",
-        "RANGE_PAT",
-        "LITERAL_PAT",
-        "MACRO_PAT",
-        "CONST_BLOCK_PAT",
-        // atoms
-        "TUPLE_EXPR",
-        "ARRAY_EXPR",
-        "PAREN_EXPR",
-        "PATH_EXPR",
-        "CLOSURE_EXPR",
-        "IF_EXPR",
-        "WHILE_EXPR",
-        "CONDITION",
-        "LOOP_EXPR",
-        "FOR_EXPR",
-        "CONTINUE_EXPR",
-        "BREAK_EXPR",
-        "LABEL",
-        "BLOCK_EXPR",
-        "RETURN_EXPR",
-        "YIELD_EXPR",
-        "MATCH_EXPR",
-        "MATCH_ARM_LIST",
-        "MATCH_ARM",
-        "MATCH_GUARD",
-        "RECORD_EXPR",
-        "RECORD_EXPR_FIELD_LIST",
-        "RECORD_EXPR_FIELD",
-        "EFFECT_EXPR",
-        "BOX_EXPR",
-        // postfix
-        "CALL_EXPR",
-        "INDEX_EXPR",
-        "METHOD_CALL_EXPR",
-        "FIELD_EXPR",
-        "AWAIT_EXPR",
-        "TRY_EXPR",
-        "CAST_EXPR",
-        // unary
-        "REF_EXPR",
-        "PREFIX_EXPR",
-        "RANGE_EXPR", // just weird
-        "BIN_EXPR",
-        "EXTERN_BLOCK",
-        "EXTERN_ITEM_LIST",
-        "VARIANT",
-        "RECORD_FIELD_LIST",
-        "RECORD_FIELD",
-        "TUPLE_FIELD_LIST",
-        "TUPLE_FIELD",
-        "VARIANT_LIST",
-        "ITEM_LIST",
-        "ASSOC_ITEM_LIST",
-        "ATTR",
-        "META",
-        "USE_TREE",
-        "USE_TREE_LIST",
-        "PATH",
-        "PATH_SEGMENT",
-        "LITERAL",
-        "RENAME",
-        "VISIBILITY",
-        "WHERE_CLAUSE",
-        "WHERE_PRED",
-        "ABI",
-        "NAME",
-        "NAME_REF",
-        "LET_STMT",
-        "EXPR_STMT",
-        "GENERIC_PARAM_LIST",
-        "GENERIC_PARAM",
-        "LIFETIME_PARAM",
-        "TYPE_PARAM",
-        "CONST_PARAM",
-        "GENERIC_ARG_LIST",
-        "LIFETIME",
-        "LIFETIME_ARG",
-        "TYPE_ARG",
-        "ASSOC_TYPE_ARG",
-        "CONST_ARG",
-        "PARAM_LIST",
-        "PARAM",
-        "SELF_PARAM",
-        "ARG_LIST",
-        "TYPE_BOUND",
-        "TYPE_BOUND_LIST",
-        // macro related
-        "MACRO_ITEMS",
-        "MACRO_STMTS",
-    ],
-};
-
-#[derive(Default, Debug)]
-pub(crate) struct AstSrc {
-    pub(crate) tokens: Vec<String>,
-    pub(crate) nodes: Vec<AstNodeSrc>,
-    pub(crate) enums: Vec<AstEnumSrc>,
-}
-
-#[derive(Debug)]
-pub(crate) struct AstNodeSrc {
-    pub(crate) doc: Vec<String>,
-    pub(crate) name: String,
-    pub(crate) traits: Vec<String>,
-    pub(crate) fields: Vec<Field>,
-}
-
-#[derive(Debug, Eq, PartialEq)]
-pub(crate) enum Field {
-    Token(String),
-    Node { name: String, ty: String, cardinality: Cardinality },
-}
-
-#[derive(Debug, Eq, PartialEq)]
-pub(crate) enum Cardinality {
-    Optional,
-    Many,
-}
-
-#[derive(Debug)]
-pub(crate) struct AstEnumSrc {
-    pub(crate) doc: Vec<String>,
-    pub(crate) name: String,
-    pub(crate) traits: Vec<String>,
-    pub(crate) variants: Vec<String>,
-}
diff --git a/xtask/src/codegen.rs b/xtask/src/codegen.rs
deleted file mode 100644
index 518e17e3897..00000000000
--- a/xtask/src/codegen.rs
+++ /dev/null
@@ -1,166 +0,0 @@
-//! We use code generation heavily in rust-analyzer.
-//!
-//! Rather then doing it via proc-macros, we use old-school way of just dumping
-//! the source code.
-//!
-//! This module's submodules define specific bits that we generate.
-
-mod gen_syntax;
-mod gen_parser_tests;
-mod gen_lint_completions;
-mod gen_assists_docs;
-mod gen_feature_docs;
-mod gen_diagnostic_docs;
-
-use std::{
-    fmt, mem,
-    path::{Path, PathBuf},
-};
-use xshell::{cmd, pushenv};
-
-use crate::{ensure_rustfmt, project_root, Result};
-
-pub(crate) use self::{
-    gen_assists_docs::generate_assists_tests, gen_lint_completions::generate_lint_completions,
-    gen_parser_tests::generate_parser_tests, gen_syntax::generate_syntax,
-};
-
-pub(crate) fn docs() -> Result<()> {
-    // We don't commit docs to the repo, so we can just overwrite them.
-    gen_assists_docs::generate_assists_docs()?;
-    gen_feature_docs::generate_feature_docs()?;
-    gen_diagnostic_docs::generate_diagnostic_docs()?;
-    Ok(())
-}
-
-#[allow(unused)]
-fn used() {
-    generate_parser_tests();
-    generate_assists_tests();
-    generate_syntax();
-    generate_lint_completions();
-}
-
-/// Checks that the `file` has the specified `contents`. If that is not the
-/// case, updates the file and then fails the test.
-pub(crate) fn ensure_file_contents(file: &Path, contents: &str) -> Result<()> {
-    match std::fs::read_to_string(file) {
-        Ok(old_contents) if normalize_newlines(&old_contents) == normalize_newlines(contents) => {
-            return Ok(())
-        }
-        _ => (),
-    }
-    let display_path = file.strip_prefix(&project_root()).unwrap_or(file);
-    eprintln!(
-        "\n\x1b[31;1merror\x1b[0m: {} was not up-to-date, updating\n",
-        display_path.display()
-    );
-    if std::env::var("CI").is_ok() {
-        eprintln!("    NOTE: run `cargo test` locally and commit the updated files\n");
-    }
-    if let Some(parent) = file.parent() {
-        let _ = std::fs::create_dir_all(parent);
-    }
-    std::fs::write(file, contents).unwrap();
-    anyhow::bail!("some file was not up to date and has been updated, simply re-run the tests")
-}
-
-fn normalize_newlines(s: &str) -> String {
-    s.replace("\r\n", "\n")
-}
-
-const PREAMBLE: &str = "Generated file, do not edit by hand, see `xtask/src/codegen`";
-
-fn reformat(text: &str) -> Result<String> {
-    let _e = pushenv("RUSTUP_TOOLCHAIN", "stable");
-    ensure_rustfmt()?;
-    let rustfmt_toml = project_root().join("rustfmt.toml");
-    let stdout = cmd!("rustfmt --config-path {rustfmt_toml} --config fn_single_line=true")
-        .stdin(text)
-        .read()?;
-    Ok(format!("//! {}\n\n{}\n", PREAMBLE, stdout))
-}
-
-fn extract_comment_blocks(text: &str) -> Vec<Vec<String>> {
-    do_extract_comment_blocks(text, false).into_iter().map(|(_line, block)| block).collect()
-}
-
-fn extract_comment_blocks_with_empty_lines(tag: &str, text: &str) -> Vec<CommentBlock> {
-    assert!(tag.starts_with(char::is_uppercase));
-    let tag = format!("{}:", tag);
-    let mut res = Vec::new();
-    for (line, mut block) in do_extract_comment_blocks(text, true) {
-        let first = block.remove(0);
-        if first.starts_with(&tag) {
-            let id = first[tag.len()..].trim().to_string();
-            let block = CommentBlock { id, line, contents: block };
-            res.push(block);
-        }
-    }
-    res
-}
-
-struct CommentBlock {
-    id: String,
-    line: usize,
-    contents: Vec<String>,
-}
-
-fn do_extract_comment_blocks(
-    text: &str,
-    allow_blocks_with_empty_lines: bool,
-) -> Vec<(usize, Vec<String>)> {
-    let mut res = Vec::new();
-
-    let prefix = "// ";
-    let lines = text.lines().map(str::trim_start);
-
-    let mut block = (0, vec![]);
-    for (line_num, line) in lines.enumerate() {
-        if line == "//" && allow_blocks_with_empty_lines {
-            block.1.push(String::new());
-            continue;
-        }
-
-        let is_comment = line.starts_with(prefix);
-        if is_comment {
-            block.1.push(line[prefix.len()..].to_string());
-        } else {
-            if !block.1.is_empty() {
-                res.push(mem::take(&mut block));
-            }
-            block.0 = line_num + 2;
-        }
-    }
-    if !block.1.is_empty() {
-        res.push(block)
-    }
-    res
-}
-
-#[derive(Debug)]
-struct Location {
-    file: PathBuf,
-    line: usize,
-}
-
-impl Location {
-    fn new(file: PathBuf, line: usize) -> Self {
-        Self { file, line }
-    }
-}
-
-impl fmt::Display for Location {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        let path = self.file.strip_prefix(&project_root()).unwrap().display().to_string();
-        let path = path.replace('\\', "/");
-        let name = self.file.file_name().unwrap();
-        write!(
-            f,
-            "https://github.com/rust-analyzer/rust-analyzer/blob/master/{}#L{}[{}]",
-            path,
-            self.line,
-            name.to_str().unwrap()
-        )
-    }
-}
diff --git a/xtask/src/codegen/gen_assists_docs.rs b/xtask/src/codegen/gen_assists_docs.rs
deleted file mode 100644
index c91716409eb..00000000000
--- a/xtask/src/codegen/gen_assists_docs.rs
+++ /dev/null
@@ -1,167 +0,0 @@
-//! Generates `assists.md` documentation.
-
-use std::{fmt, path::Path};
-
-use xshell::write_file;
-
-use crate::{
-    codegen::{self, extract_comment_blocks_with_empty_lines, reformat, Location, PREAMBLE},
-    project_root, rust_files_in, Result,
-};
-
-pub(crate) fn generate_assists_tests() -> Result<()> {
-    let assists = Assist::collect()?;
-    generate_tests(&assists)
-}
-
-pub(crate) fn generate_assists_docs() -> Result<()> {
-    let assists = Assist::collect()?;
-    let contents = assists.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
-    let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim());
-    let dst = project_root().join("docs/user/generated_assists.adoc");
-    write_file(dst, &contents)?;
-    Ok(())
-}
-
-#[derive(Debug)]
-struct Assist {
-    id: String,
-    location: Location,
-    doc: String,
-    before: String,
-    after: String,
-}
-
-impl Assist {
-    fn collect() -> Result<Vec<Assist>> {
-        let mut res = Vec::new();
-        for path in rust_files_in(&project_root().join("crates/ide_assists/src/handlers")) {
-            collect_file(&mut res, path.as_path())?;
-        }
-        res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
-        return Ok(res);
-
-        fn collect_file(acc: &mut Vec<Assist>, path: &Path) -> Result<()> {
-            let text = xshell::read_file(path)?;
-            let comment_blocks = extract_comment_blocks_with_empty_lines("Assist", &text);
-
-            for block in comment_blocks {
-                // FIXME: doesn't support blank lines yet, need to tweak
-                // `extract_comment_blocks` for that.
-                let id = block.id;
-                assert!(
-                    id.chars().all(|it| it.is_ascii_lowercase() || it == '_'),
-                    "invalid assist id: {:?}",
-                    id
-                );
-                let mut lines = block.contents.iter();
-
-                let doc = take_until(lines.by_ref(), "```").trim().to_string();
-                assert!(
-                    doc.chars().next().unwrap().is_ascii_uppercase() && doc.ends_with('.'),
-                    "\n\n{}: assist docs should be proper sentences, with capitalization and a full stop at the end.\n\n{}\n\n",
-                    id, doc,
-                );
-
-                let before = take_until(lines.by_ref(), "```");
-
-                assert_eq!(lines.next().unwrap().as_str(), "->");
-                assert_eq!(lines.next().unwrap().as_str(), "```");
-                let after = take_until(lines.by_ref(), "```");
-                let location = Location::new(path.to_path_buf(), block.line);
-                acc.push(Assist { id, location, doc, before, after })
-            }
-
-            fn take_until<'a>(lines: impl Iterator<Item = &'a String>, marker: &str) -> String {
-                let mut buf = Vec::new();
-                for line in lines {
-                    if line == marker {
-                        break;
-                    }
-                    buf.push(line.clone());
-                }
-                buf.join("\n")
-            }
-            Ok(())
-        }
-    }
-}
-
-impl fmt::Display for Assist {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        let before = self.before.replace("$0", "┃"); // Unicode pseudo-graphics bar
-        let after = self.after.replace("$0", "┃");
-        writeln!(
-            f,
-            "[discrete]\n=== `{}`
-**Source:** {}
-
-{}
-
-.Before
-```rust
-{}```
-
-.After
-```rust
-{}```",
-            self.id,
-            self.location,
-            self.doc,
-            hide_hash_comments(&before),
-            hide_hash_comments(&after)
-        )
-    }
-}
-
-fn generate_tests(assists: &[Assist]) -> Result<()> {
-    let mut buf = String::from("use super::check_doc_test;\n");
-
-    for assist in assists.iter() {
-        let test = format!(
-            r######"
-#[test]
-fn doctest_{}() {{
-    check_doc_test(
-        "{}",
-r#####"
-{}"#####, r#####"
-{}"#####)
-}}
-"######,
-            assist.id,
-            assist.id,
-            reveal_hash_comments(&assist.before),
-            reveal_hash_comments(&assist.after)
-        );
-
-        buf.push_str(&test)
-    }
-    let buf = reformat(&buf)?;
-    codegen::ensure_file_contents(
-        &project_root().join("crates/ide_assists/src/tests/generated.rs"),
-        &buf,
-    )
-}
-
-fn hide_hash_comments(text: &str) -> String {
-    text.split('\n') // want final newline
-        .filter(|&it| !(it.starts_with("# ") || it == "#"))
-        .map(|it| format!("{}\n", it))
-        .collect()
-}
-
-fn reveal_hash_comments(text: &str) -> String {
-    text.split('\n') // want final newline
-        .map(|it| {
-            if let Some(stripped) = it.strip_prefix("# ") {
-                stripped
-            } else if it == "#" {
-                ""
-            } else {
-                it
-            }
-        })
-        .map(|it| format!("{}\n", it))
-        .collect()
-}
diff --git a/xtask/src/codegen/gen_diagnostic_docs.rs b/xtask/src/codegen/gen_diagnostic_docs.rs
deleted file mode 100644
index 9cf4d0a88a3..00000000000
--- a/xtask/src/codegen/gen_diagnostic_docs.rs
+++ /dev/null
@@ -1,76 +0,0 @@
-//! Generates `assists.md` documentation.
-
-use std::{fmt, path::PathBuf};
-
-use xshell::write_file;
-
-use crate::{
-    codegen::{extract_comment_blocks_with_empty_lines, Location, PREAMBLE},
-    project_root, rust_files, Result,
-};
-
-pub(crate) fn generate_diagnostic_docs() -> Result<()> {
-    let diagnostics = Diagnostic::collect()?;
-    let contents =
-        diagnostics.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
-    let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim());
-    let dst = project_root().join("docs/user/generated_diagnostic.adoc");
-    write_file(&dst, &contents)?;
-    Ok(())
-}
-
-#[derive(Debug)]
-struct Diagnostic {
-    id: String,
-    location: Location,
-    doc: String,
-}
-
-impl Diagnostic {
-    fn collect() -> Result<Vec<Diagnostic>> {
-        let mut res = Vec::new();
-        for path in rust_files() {
-            collect_file(&mut res, path)?;
-        }
-        res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
-        return Ok(res);
-
-        fn collect_file(acc: &mut Vec<Diagnostic>, path: PathBuf) -> Result<()> {
-            let text = xshell::read_file(&path)?;
-            let comment_blocks = extract_comment_blocks_with_empty_lines("Diagnostic", &text);
-
-            for block in comment_blocks {
-                let id = block.id;
-                if let Err(msg) = is_valid_diagnostic_name(&id) {
-                    panic!("invalid diagnostic name: {:?}:\n  {}", id, msg)
-                }
-                let doc = block.contents.join("\n");
-                let location = Location::new(path.clone(), block.line);
-                acc.push(Diagnostic { id, location, doc })
-            }
-
-            Ok(())
-        }
-    }
-}
-
-fn is_valid_diagnostic_name(diagnostic: &str) -> Result<(), String> {
-    let diagnostic = diagnostic.trim();
-    if diagnostic.find(char::is_whitespace).is_some() {
-        return Err("Diagnostic names can't contain whitespace symbols".into());
-    }
-    if diagnostic.chars().any(|c| c.is_ascii_uppercase()) {
-        return Err("Diagnostic names can't contain uppercase symbols".into());
-    }
-    if diagnostic.chars().any(|c| !c.is_ascii()) {
-        return Err("Diagnostic can't contain non-ASCII symbols".into());
-    }
-
-    Ok(())
-}
-
-impl fmt::Display for Diagnostic {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        writeln!(f, "=== {}\n**Source:** {}\n{}", self.id, self.location, self.doc)
-    }
-}
diff --git a/xtask/src/codegen/gen_feature_docs.rs b/xtask/src/codegen/gen_feature_docs.rs
deleted file mode 100644
index c373d7d70f5..00000000000
--- a/xtask/src/codegen/gen_feature_docs.rs
+++ /dev/null
@@ -1,79 +0,0 @@
-//! Generates `assists.md` documentation.
-
-use std::{fmt, path::PathBuf};
-
-use xshell::write_file;
-
-use crate::{
-    codegen::{extract_comment_blocks_with_empty_lines, Location, PREAMBLE},
-    project_root, rust_files, Result,
-};
-
-pub(crate) fn generate_feature_docs() -> Result<()> {
-    let features = Feature::collect()?;
-    let contents = features.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n");
-    let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim());
-    let dst = project_root().join("docs/user/generated_features.adoc");
-    write_file(&dst, &contents)?;
-    Ok(())
-}
-
-#[derive(Debug)]
-struct Feature {
-    id: String,
-    location: Location,
-    doc: String,
-}
-
-impl Feature {
-    fn collect() -> Result<Vec<Feature>> {
-        let mut res = Vec::new();
-        for path in rust_files() {
-            collect_file(&mut res, path)?;
-        }
-        res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id));
-        return Ok(res);
-
-        fn collect_file(acc: &mut Vec<Feature>, path: PathBuf) -> Result<()> {
-            let text = xshell::read_file(&path)?;
-            let comment_blocks = extract_comment_blocks_with_empty_lines("Feature", &text);
-
-            for block in comment_blocks {
-                let id = block.id;
-                if let Err(msg) = is_valid_feature_name(&id) {
-                    panic!("invalid feature name: {:?}:\n  {}", id, msg)
-                }
-                let doc = block.contents.join("\n");
-                let location = Location::new(path.clone(), block.line);
-                acc.push(Feature { id, location, doc })
-            }
-
-            Ok(())
-        }
-    }
-}
-
-fn is_valid_feature_name(feature: &str) -> Result<(), String> {
-    'word: for word in feature.split_whitespace() {
-        for &short in ["to", "and"].iter() {
-            if word == short {
-                continue 'word;
-            }
-        }
-        for &short in ["To", "And"].iter() {
-            if word == short {
-                return Err(format!("Don't capitalize {:?}", word));
-            }
-        }
-        if !word.starts_with(char::is_uppercase) {
-            return Err(format!("Capitalize {:?}", word));
-        }
-    }
-    Ok(())
-}
-
-impl fmt::Display for Feature {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        writeln!(f, "=== {}\n**Source:** {}\n{}", self.id, self.location, self.doc)
-    }
-}
diff --git a/xtask/src/codegen/gen_lint_completions.rs b/xtask/src/codegen/gen_lint_completions.rs
deleted file mode 100644
index 54fcaa0e64a..00000000000
--- a/xtask/src/codegen/gen_lint_completions.rs
+++ /dev/null
@@ -1,170 +0,0 @@
-//! Generates descriptors structure for unstable feature from Unstable Book
-use std::borrow::Cow;
-use std::fmt::Write;
-use std::path::{Path, PathBuf};
-
-use walkdir::WalkDir;
-use xshell::{cmd, read_file};
-
-use crate::codegen::{ensure_file_contents, project_root, reformat, Result};
-
-pub(crate) fn generate_lint_completions() -> Result<()> {
-    if !project_root().join("./target/rust").exists() {
-        cmd!("git clone --depth=1 https://github.com/rust-lang/rust ./target/rust").run()?;
-    }
-
-    let mut contents = String::from(
-        r#"pub struct Lint {
-    pub label: &'static str,
-    pub description: &'static str,
-}
-
-"#,
-    );
-    generate_lint_descriptor(&mut contents)?;
-    contents.push('\n');
-
-    generate_feature_descriptor(&mut contents, "./target/rust/src/doc/unstable-book/src".into())?;
-    contents.push('\n');
-
-    cmd!("curl https://rust-lang.github.io/rust-clippy/master/lints.json --output ./target/clippy_lints.json").run()?;
-    generate_descriptor_clippy(&mut contents, Path::new("./target/clippy_lints.json"))?;
-    let contents = reformat(&contents)?;
-
-    let destination = project_root().join("crates/ide_db/src/helpers/generated_lints.rs");
-    ensure_file_contents(destination.as_path(), &contents)?;
-
-    Ok(())
-}
-
-fn generate_lint_descriptor(buf: &mut String) -> Result<()> {
-    let stdout = cmd!("rustc -W help").read()?;
-    let start_lints =
-        stdout.find("----  -------  -------").ok_or_else(|| anyhow::format_err!(""))?;
-    let start_lint_groups =
-        stdout.find("----  ---------").ok_or_else(|| anyhow::format_err!(""))?;
-    let end_lints =
-        stdout.find("Lint groups provided by rustc:").ok_or_else(|| anyhow::format_err!(""))?;
-    let end_lint_groups = stdout
-        .find("Lint tools like Clippy can provide additional lints and lint groups.")
-        .ok_or_else(|| anyhow::format_err!(""))?;
-    buf.push_str(r#"pub const DEFAULT_LINTS: &[Lint] = &["#);
-    buf.push('\n');
-    let mut lints = stdout[start_lints..end_lints]
-        .lines()
-        .skip(1)
-        .filter(|l| !l.is_empty())
-        .map(|line| {
-            let (name, rest) = line.trim().split_once(char::is_whitespace).unwrap();
-            let (_default_level, description) =
-                rest.trim().split_once(char::is_whitespace).unwrap();
-            (name.trim(), Cow::Borrowed(description.trim()))
-        })
-        .collect::<Vec<_>>();
-    lints.extend(
-        stdout[start_lint_groups..end_lint_groups].lines().skip(1).filter(|l| !l.is_empty()).map(
-            |line| {
-                let (name, lints) = line.trim().split_once(char::is_whitespace).unwrap();
-                (name.trim(), format!("lint group for: {}", lints.trim()).into())
-            },
-        ),
-    );
-
-    lints.sort_by(|(ident, _), (ident2, _)| ident.cmp(ident2));
-    lints.into_iter().for_each(|(name, description)| {
-        push_lint_completion(buf, &name.replace("-", "_"), &description)
-    });
-    buf.push_str("];\n");
-    Ok(())
-}
-
-fn generate_feature_descriptor(buf: &mut String, src_dir: PathBuf) -> Result<()> {
-    buf.push_str(r#"pub const FEATURES: &[Lint] = &["#);
-    buf.push('\n');
-    let mut vec = ["language-features", "library-features"]
-        .iter()
-        .flat_map(|it| WalkDir::new(src_dir.join(it)))
-        .filter_map(|e| e.ok())
-        .filter(|entry| {
-            // Get all `.md ` files
-            entry.file_type().is_file() && entry.path().extension().unwrap_or_default() == "md"
-        })
-        .map(|entry| {
-            let path = entry.path();
-            let feature_ident = path.file_stem().unwrap().to_str().unwrap().replace("-", "_");
-            let doc = read_file(path).unwrap();
-            (feature_ident, doc)
-        })
-        .collect::<Vec<_>>();
-    vec.sort_by(|(feature_ident, _), (feature_ident2, _)| feature_ident.cmp(feature_ident2));
-    vec.into_iter()
-        .for_each(|(feature_ident, doc)| push_lint_completion(buf, &feature_ident, &doc));
-    buf.push_str("];\n");
-    Ok(())
-}
-
-#[derive(Default)]
-struct ClippyLint {
-    help: String,
-    id: String,
-}
-
-fn unescape(s: &str) -> String {
-    s.replace(r#"\""#, "").replace(r#"\n"#, "\n").replace(r#"\r"#, "")
-}
-
-fn generate_descriptor_clippy(buf: &mut String, path: &Path) -> Result<()> {
-    let file_content = read_file(path)?;
-    let mut clippy_lints: Vec<ClippyLint> = vec![];
-
-    for line in file_content.lines().map(|line| line.trim()) {
-        if line.starts_with(r#""id":"#) {
-            let clippy_lint = ClippyLint {
-                id: line
-                    .strip_prefix(r#""id": ""#)
-                    .expect("should be prefixed by id")
-                    .strip_suffix(r#"","#)
-                    .expect("should be suffixed by comma")
-                    .into(),
-                help: String::new(),
-            };
-            clippy_lints.push(clippy_lint)
-        } else if line.starts_with(r#""What it does":"#) {
-            // Typical line to strip: "What is doest": "Here is my useful content",
-            let prefix_to_strip = r#""What it does": ""#;
-            let suffix_to_strip = r#"","#;
-
-            let clippy_lint = clippy_lints.last_mut().expect("clippy lint must already exist");
-            clippy_lint.help = line
-                .strip_prefix(prefix_to_strip)
-                .expect("should be prefixed by what it does")
-                .strip_suffix(suffix_to_strip)
-                .map(unescape)
-                .expect("should be suffixed by comma");
-        }
-    }
-    clippy_lints.sort_by(|lint, lint2| lint.id.cmp(&lint2.id));
-    buf.push_str(r#"pub const CLIPPY_LINTS: &[Lint] = &["#);
-    buf.push('\n');
-    clippy_lints.into_iter().for_each(|clippy_lint| {
-        let lint_ident = format!("clippy::{}", clippy_lint.id);
-        let doc = clippy_lint.help;
-        push_lint_completion(buf, &lint_ident, &doc);
-    });
-
-    buf.push_str("];\n");
-
-    Ok(())
-}
-
-fn push_lint_completion(buf: &mut String, label: &str, description: &str) {
-    writeln!(
-        buf,
-        r###"    Lint {{
-        label: "{}",
-        description: r##"{}"##
-    }},"###,
-        label, description
-    )
-    .unwrap();
-}
diff --git a/xtask/src/codegen/gen_parser_tests.rs b/xtask/src/codegen/gen_parser_tests.rs
deleted file mode 100644
index 2fecb9b5bd8..00000000000
--- a/xtask/src/codegen/gen_parser_tests.rs
+++ /dev/null
@@ -1,132 +0,0 @@
-//! This module greps parser's code for specially formatted comments and turnes
-//! them into tests.
-
-use std::{
-    collections::HashMap,
-    fs, iter,
-    path::{Path, PathBuf},
-};
-
-use crate::{
-    codegen::{ensure_file_contents, extract_comment_blocks},
-    project_root, Result,
-};
-
-pub(crate) fn generate_parser_tests() -> Result<()> {
-    let tests = tests_from_dir(&project_root().join(Path::new("crates/parser/src/grammar")))?;
-    fn install_tests(tests: &HashMap<String, Test>, into: &str) -> Result<()> {
-        let tests_dir = project_root().join(into);
-        if !tests_dir.is_dir() {
-            fs::create_dir_all(&tests_dir)?;
-        }
-        // ok is never actually read, but it needs to be specified to create a Test in existing_tests
-        let existing = existing_tests(&tests_dir, true)?;
-        for t in existing.keys().filter(|&t| !tests.contains_key(t)) {
-            panic!("Test is deleted: {}", t);
-        }
-
-        let mut new_idx = existing.len() + 1;
-        for (name, test) in tests {
-            let path = match existing.get(name) {
-                Some((path, _test)) => path.clone(),
-                None => {
-                    let file_name = format!("{:04}_{}.rs", new_idx, name);
-                    new_idx += 1;
-                    tests_dir.join(file_name)
-                }
-            };
-            ensure_file_contents(&path, &test.text)?;
-        }
-        Ok(())
-    }
-    install_tests(&tests.ok, "crates/syntax/test_data/parser/inline/ok")?;
-    install_tests(&tests.err, "crates/syntax/test_data/parser/inline/err")
-}
-
-#[derive(Debug)]
-struct Test {
-    name: String,
-    text: String,
-    ok: bool,
-}
-
-#[derive(Default, Debug)]
-struct Tests {
-    ok: HashMap<String, Test>,
-    err: HashMap<String, Test>,
-}
-
-fn collect_tests(s: &str) -> Vec<Test> {
-    let mut res = Vec::new();
-    for comment_block in extract_comment_blocks(s) {
-        let first_line = &comment_block[0];
-        let (name, ok) = if let Some(name) = first_line.strip_prefix("test ") {
-            (name.to_string(), true)
-        } else if let Some(name) = first_line.strip_prefix("test_err ") {
-            (name.to_string(), false)
-        } else {
-            continue;
-        };
-        let text: String = comment_block[1..]
-            .iter()
-            .cloned()
-            .chain(iter::once(String::new()))
-            .collect::<Vec<_>>()
-            .join("\n");
-        assert!(!text.trim().is_empty() && text.ends_with('\n'));
-        res.push(Test { name, text, ok })
-    }
-    res
-}
-
-fn tests_from_dir(dir: &Path) -> Result<Tests> {
-    let mut res = Tests::default();
-    for entry in ::walkdir::WalkDir::new(dir) {
-        let entry = entry.unwrap();
-        if !entry.file_type().is_file() {
-            continue;
-        }
-        if entry.path().extension().unwrap_or_default() != "rs" {
-            continue;
-        }
-        process_file(&mut res, entry.path())?;
-    }
-    let grammar_rs = dir.parent().unwrap().join("grammar.rs");
-    process_file(&mut res, &grammar_rs)?;
-    return Ok(res);
-    fn process_file(res: &mut Tests, path: &Path) -> Result<()> {
-        let text = fs::read_to_string(path)?;
-
-        for test in collect_tests(&text) {
-            if test.ok {
-                if let Some(old_test) = res.ok.insert(test.name.clone(), test) {
-                    anyhow::bail!("Duplicate test: {}", old_test.name);
-                }
-            } else if let Some(old_test) = res.err.insert(test.name.clone(), test) {
-                anyhow::bail!("Duplicate test: {}", old_test.name);
-            }
-        }
-        Ok(())
-    }
-}
-
-fn existing_tests(dir: &Path, ok: bool) -> Result<HashMap<String, (PathBuf, Test)>> {
-    let mut res = HashMap::new();
-    for file in fs::read_dir(dir)? {
-        let file = file?;
-        let path = file.path();
-        if path.extension().unwrap_or_default() != "rs" {
-            continue;
-        }
-        let name = {
-            let file_name = path.file_name().unwrap().to_str().unwrap();
-            file_name[5..file_name.len() - 3].to_string()
-        };
-        let text = xshell::read_file(&path)?;
-        let test = Test { name: name.clone(), text, ok };
-        if let Some(old) = res.insert(name, (path, test)) {
-            println!("Duplicate test: {:?}", old);
-        }
-    }
-    Ok(res)
-}
diff --git a/xtask/src/codegen/gen_syntax.rs b/xtask/src/codegen/gen_syntax.rs
deleted file mode 100644
index 5435da76e41..00000000000
--- a/xtask/src/codegen/gen_syntax.rs
+++ /dev/null
@@ -1,747 +0,0 @@
-//! This module generates AST datatype used by rust-analyzer.
-//!
-//! Specifically, it generates the `SyntaxKind` enum and a number of newtype
-//! wrappers around `SyntaxNode` which implement `syntax::AstNode`.
-
-use std::{
-    collections::{BTreeSet, HashSet},
-    fmt::Write,
-};
-
-use proc_macro2::{Punct, Spacing};
-use quote::{format_ident, quote};
-use ungrammar::{rust_grammar, Grammar, Rule};
-
-use crate::{
-    ast_src::{AstEnumSrc, AstNodeSrc, AstSrc, Cardinality, Field, KindsSrc, KINDS_SRC},
-    codegen::{ensure_file_contents, reformat},
-    project_root, Result,
-};
-
-pub(crate) fn generate_syntax() -> Result<()> {
-    let grammar = rust_grammar();
-    let ast = lower(&grammar);
-
-    let syntax_kinds_file = project_root().join("crates/parser/src/syntax_kind/generated.rs");
-    let syntax_kinds = generate_syntax_kinds(KINDS_SRC)?;
-    ensure_file_contents(syntax_kinds_file.as_path(), &syntax_kinds)?;
-
-    let ast_tokens_file = project_root().join("crates/syntax/src/ast/generated/tokens.rs");
-    let contents = generate_tokens(&ast)?;
-    ensure_file_contents(ast_tokens_file.as_path(), &contents)?;
-
-    let ast_nodes_file = project_root().join("crates/syntax/src/ast/generated/nodes.rs");
-    let contents = generate_nodes(KINDS_SRC, &ast)?;
-    ensure_file_contents(ast_nodes_file.as_path(), &contents)?;
-
-    Ok(())
-}
-
-fn generate_tokens(grammar: &AstSrc) -> Result<String> {
-    let tokens = grammar.tokens.iter().map(|token| {
-        let name = format_ident!("{}", token);
-        let kind = format_ident!("{}", to_upper_snake_case(token));
-        quote! {
-            #[derive(Debug, Clone, PartialEq, Eq, Hash)]
-            pub struct #name {
-                pub(crate) syntax: SyntaxToken,
-            }
-            impl std::fmt::Display for #name {
-                fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-                    std::fmt::Display::fmt(&self.syntax, f)
-                }
-            }
-            impl AstToken for #name {
-                fn can_cast(kind: SyntaxKind) -> bool { kind == #kind }
-                fn cast(syntax: SyntaxToken) -> Option<Self> {
-                    if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
-                }
-                fn syntax(&self) -> &SyntaxToken { &self.syntax }
-            }
-        }
-    });
-
-    let pretty = reformat(
-        &quote! {
-            use crate::{SyntaxKind::{self, *}, SyntaxToken, ast::AstToken};
-            #(#tokens)*
-        }
-        .to_string(),
-    )?
-    .replace("#[derive", "\n#[derive");
-    Ok(pretty)
-}
-
-fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> Result<String> {
-    let (node_defs, node_boilerplate_impls): (Vec<_>, Vec<_>) = grammar
-        .nodes
-        .iter()
-        .map(|node| {
-            let name = format_ident!("{}", node.name);
-            let kind = format_ident!("{}", to_upper_snake_case(&node.name));
-            let traits = node.traits.iter().map(|trait_name| {
-                let trait_name = format_ident!("{}", trait_name);
-                quote!(impl ast::#trait_name for #name {})
-            });
-
-            let methods = node.fields.iter().map(|field| {
-                let method_name = field.method_name();
-                let ty = field.ty();
-
-                if field.is_many() {
-                    quote! {
-                        pub fn #method_name(&self) -> AstChildren<#ty> {
-                            support::children(&self.syntax)
-                        }
-                    }
-                } else if let Some(token_kind) = field.token_kind() {
-                    quote! {
-                        pub fn #method_name(&self) -> Option<#ty> {
-                            support::token(&self.syntax, #token_kind)
-                        }
-                    }
-                } else {
-                    quote! {
-                        pub fn #method_name(&self) -> Option<#ty> {
-                            support::child(&self.syntax)
-                        }
-                    }
-                }
-            });
-            (
-                quote! {
-                    #[pretty_doc_comment_placeholder_workaround]
-                    #[derive(Debug, Clone, PartialEq, Eq, Hash)]
-                    pub struct #name {
-                        pub(crate) syntax: SyntaxNode,
-                    }
-
-                    #(#traits)*
-
-                    impl #name {
-                        #(#methods)*
-                    }
-                },
-                quote! {
-                    impl AstNode for #name {
-                        fn can_cast(kind: SyntaxKind) -> bool {
-                            kind == #kind
-                        }
-                        fn cast(syntax: SyntaxNode) -> Option<Self> {
-                            if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
-                        }
-                        fn syntax(&self) -> &SyntaxNode { &self.syntax }
-                    }
-                },
-            )
-        })
-        .unzip();
-
-    let (enum_defs, enum_boilerplate_impls): (Vec<_>, Vec<_>) = grammar
-        .enums
-        .iter()
-        .map(|en| {
-            let variants: Vec<_> = en.variants.iter().map(|var| format_ident!("{}", var)).collect();
-            let name = format_ident!("{}", en.name);
-            let kinds: Vec<_> = variants
-                .iter()
-                .map(|name| format_ident!("{}", to_upper_snake_case(&name.to_string())))
-                .collect();
-            let traits = en.traits.iter().map(|trait_name| {
-                let trait_name = format_ident!("{}", trait_name);
-                quote!(impl ast::#trait_name for #name {})
-            });
-
-            let ast_node = if en.name == "Stmt" {
-                quote! {}
-            } else {
-                quote! {
-                    impl AstNode for #name {
-                        fn can_cast(kind: SyntaxKind) -> bool {
-                            match kind {
-                                #(#kinds)|* => true,
-                                _ => false,
-                            }
-                        }
-                        fn cast(syntax: SyntaxNode) -> Option<Self> {
-                            let res = match syntax.kind() {
-                                #(
-                                #kinds => #name::#variants(#variants { syntax }),
-                                )*
-                                _ => return None,
-                            };
-                            Some(res)
-                        }
-                        fn syntax(&self) -> &SyntaxNode {
-                            match self {
-                                #(
-                                #name::#variants(it) => &it.syntax,
-                                )*
-                            }
-                        }
-                    }
-                }
-            };
-
-            (
-                quote! {
-                    #[pretty_doc_comment_placeholder_workaround]
-                    #[derive(Debug, Clone, PartialEq, Eq, Hash)]
-                    pub enum #name {
-                        #(#variants(#variants),)*
-                    }
-
-                    #(#traits)*
-                },
-                quote! {
-                    #(
-                        impl From<#variants> for #name {
-                            fn from(node: #variants) -> #name {
-                                #name::#variants(node)
-                            }
-                        }
-                    )*
-                    #ast_node
-                },
-            )
-        })
-        .unzip();
-
-    let enum_names = grammar.enums.iter().map(|it| &it.name);
-    let node_names = grammar.nodes.iter().map(|it| &it.name);
-
-    let display_impls =
-        enum_names.chain(node_names.clone()).map(|it| format_ident!("{}", it)).map(|name| {
-            quote! {
-                impl std::fmt::Display for #name {
-                    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-                        std::fmt::Display::fmt(self.syntax(), f)
-                    }
-                }
-            }
-        });
-
-    let defined_nodes: HashSet<_> = node_names.collect();
-
-    for node in kinds
-        .nodes
-        .iter()
-        .map(|kind| to_pascal_case(kind))
-        .filter(|name| !defined_nodes.iter().any(|&it| it == name))
-    {
-        drop(node)
-        // TODO: restore this
-        // eprintln!("Warning: node {} not defined in ast source", node);
-    }
-
-    let ast = quote! {
-        use crate::{
-            SyntaxNode, SyntaxToken, SyntaxKind::{self, *},
-            ast::{self, AstNode, AstChildren, support},
-            T,
-        };
-
-        #(#node_defs)*
-        #(#enum_defs)*
-        #(#node_boilerplate_impls)*
-        #(#enum_boilerplate_impls)*
-        #(#display_impls)*
-    };
-
-    let ast = ast.to_string().replace("T ! [", "T![");
-
-    let mut res = String::with_capacity(ast.len() * 2);
-
-    let mut docs =
-        grammar.nodes.iter().map(|it| &it.doc).chain(grammar.enums.iter().map(|it| &it.doc));
-
-    for chunk in ast.split("# [pretty_doc_comment_placeholder_workaround] ") {
-        res.push_str(chunk);
-        if let Some(doc) = docs.next() {
-            write_doc_comment(doc, &mut res);
-        }
-    }
-
-    let pretty = reformat(&res)?;
-    Ok(pretty)
-}
-
-fn write_doc_comment(contents: &[String], dest: &mut String) {
-    for line in contents {
-        writeln!(dest, "///{}", line).unwrap();
-    }
-}
-
-fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> Result<String> {
-    let (single_byte_tokens_values, single_byte_tokens): (Vec<_>, Vec<_>) = grammar
-        .punct
-        .iter()
-        .filter(|(token, _name)| token.len() == 1)
-        .map(|(token, name)| (token.chars().next().unwrap(), format_ident!("{}", name)))
-        .unzip();
-
-    let punctuation_values = grammar.punct.iter().map(|(token, _name)| {
-        if "{}[]()".contains(token) {
-            let c = token.chars().next().unwrap();
-            quote! { #c }
-        } else {
-            let cs = token.chars().map(|c| Punct::new(c, Spacing::Joint));
-            quote! { #(#cs)* }
-        }
-    });
-    let punctuation =
-        grammar.punct.iter().map(|(_token, name)| format_ident!("{}", name)).collect::<Vec<_>>();
-
-    let full_keywords_values = &grammar.keywords;
-    let full_keywords =
-        full_keywords_values.iter().map(|kw| format_ident!("{}_KW", to_upper_snake_case(kw)));
-
-    let all_keywords_values =
-        grammar.keywords.iter().chain(grammar.contextual_keywords.iter()).collect::<Vec<_>>();
-    let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw));
-    let all_keywords = all_keywords_values
-        .iter()
-        .map(|name| format_ident!("{}_KW", to_upper_snake_case(name)))
-        .collect::<Vec<_>>();
-
-    let literals =
-        grammar.literals.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
-
-    let tokens = grammar.tokens.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
-
-    let nodes = grammar.nodes.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
-
-    let ast = quote! {
-        #![allow(bad_style, missing_docs, unreachable_pub)]
-        /// The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT`.
-        #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
-        #[repr(u16)]
-        pub enum SyntaxKind {
-            // Technical SyntaxKinds: they appear temporally during parsing,
-            // but never end up in the final tree
-            #[doc(hidden)]
-            TOMBSTONE,
-            #[doc(hidden)]
-            EOF,
-            #(#punctuation,)*
-            #(#all_keywords,)*
-            #(#literals,)*
-            #(#tokens,)*
-            #(#nodes,)*
-
-            // Technical kind so that we can cast from u16 safely
-            #[doc(hidden)]
-            __LAST,
-        }
-        use self::SyntaxKind::*;
-
-        impl SyntaxKind {
-            pub fn is_keyword(self) -> bool {
-                match self {
-                    #(#all_keywords)|* => true,
-                    _ => false,
-                }
-            }
-
-            pub fn is_punct(self) -> bool {
-                match self {
-                    #(#punctuation)|* => true,
-                    _ => false,
-                }
-            }
-
-            pub fn is_literal(self) -> bool {
-                match self {
-                    #(#literals)|* => true,
-                    _ => false,
-                }
-            }
-
-            pub fn from_keyword(ident: &str) -> Option<SyntaxKind> {
-                let kw = match ident {
-                    #(#full_keywords_values => #full_keywords,)*
-                    _ => return None,
-                };
-                Some(kw)
-            }
-
-            pub fn from_char(c: char) -> Option<SyntaxKind> {
-                let tok = match c {
-                    #(#single_byte_tokens_values => #single_byte_tokens,)*
-                    _ => return None,
-                };
-                Some(tok)
-            }
-        }
-
-        #[macro_export]
-        macro_rules! T {
-            #([#punctuation_values] => { $crate::SyntaxKind::#punctuation };)*
-            #([#all_keywords_idents] => { $crate::SyntaxKind::#all_keywords };)*
-            [lifetime_ident] => { $crate::SyntaxKind::LIFETIME_IDENT };
-            [ident] => { $crate::SyntaxKind::IDENT };
-            [shebang] => { $crate::SyntaxKind::SHEBANG };
-        }
-    };
-
-    reformat(&ast.to_string())
-}
-
-fn to_upper_snake_case(s: &str) -> String {
-    let mut buf = String::with_capacity(s.len());
-    let mut prev = false;
-    for c in s.chars() {
-        if c.is_ascii_uppercase() && prev {
-            buf.push('_')
-        }
-        prev = true;
-
-        buf.push(c.to_ascii_uppercase());
-    }
-    buf
-}
-
-fn to_lower_snake_case(s: &str) -> String {
-    let mut buf = String::with_capacity(s.len());
-    let mut prev = false;
-    for c in s.chars() {
-        if c.is_ascii_uppercase() && prev {
-            buf.push('_')
-        }
-        prev = true;
-
-        buf.push(c.to_ascii_lowercase());
-    }
-    buf
-}
-
-fn to_pascal_case(s: &str) -> String {
-    let mut buf = String::with_capacity(s.len());
-    let mut prev_is_underscore = true;
-    for c in s.chars() {
-        if c == '_' {
-            prev_is_underscore = true;
-        } else if prev_is_underscore {
-            buf.push(c.to_ascii_uppercase());
-            prev_is_underscore = false;
-        } else {
-            buf.push(c.to_ascii_lowercase());
-        }
-    }
-    buf
-}
-
-fn pluralize(s: &str) -> String {
-    format!("{}s", s)
-}
-
-impl Field {
-    fn is_many(&self) -> bool {
-        matches!(self, Field::Node { cardinality: Cardinality::Many, .. })
-    }
-    fn token_kind(&self) -> Option<proc_macro2::TokenStream> {
-        match self {
-            Field::Token(token) => {
-                let token: proc_macro2::TokenStream = token.parse().unwrap();
-                Some(quote! { T![#token] })
-            }
-            _ => None,
-        }
-    }
-    fn method_name(&self) -> proc_macro2::Ident {
-        match self {
-            Field::Token(name) => {
-                let name = match name.as_str() {
-                    ";" => "semicolon",
-                    "->" => "thin_arrow",
-                    "'{'" => "l_curly",
-                    "'}'" => "r_curly",
-                    "'('" => "l_paren",
-                    "')'" => "r_paren",
-                    "'['" => "l_brack",
-                    "']'" => "r_brack",
-                    "<" => "l_angle",
-                    ">" => "r_angle",
-                    "=" => "eq",
-                    "!" => "excl",
-                    "*" => "star",
-                    "&" => "amp",
-                    "_" => "underscore",
-                    "." => "dot",
-                    ".." => "dotdot",
-                    "..." => "dotdotdot",
-                    "..=" => "dotdoteq",
-                    "=>" => "fat_arrow",
-                    "@" => "at",
-                    ":" => "colon",
-                    "::" => "coloncolon",
-                    "#" => "pound",
-                    "?" => "question_mark",
-                    "," => "comma",
-                    "|" => "pipe",
-                    _ => name,
-                };
-                format_ident!("{}_token", name)
-            }
-            Field::Node { name, .. } => {
-                if name == "type" {
-                    format_ident!("ty")
-                } else {
-                    format_ident!("{}", name)
-                }
-            }
-        }
-    }
-    fn ty(&self) -> proc_macro2::Ident {
-        match self {
-            Field::Token(_) => format_ident!("SyntaxToken"),
-            Field::Node { ty, .. } => format_ident!("{}", ty),
-        }
-    }
-}
-
-fn lower(grammar: &Grammar) -> AstSrc {
-    let mut res = AstSrc::default();
-
-    res.tokens = "Whitespace Comment String ByteString IntNumber FloatNumber"
-        .split_ascii_whitespace()
-        .map(|it| it.to_string())
-        .collect::<Vec<_>>();
-
-    let nodes = grammar.iter().collect::<Vec<_>>();
-
-    for &node in &nodes {
-        let name = grammar[node].name.clone();
-        let rule = &grammar[node].rule;
-        match lower_enum(grammar, rule) {
-            Some(variants) => {
-                let enum_src = AstEnumSrc { doc: Vec::new(), name, traits: Vec::new(), variants };
-                res.enums.push(enum_src);
-            }
-            None => {
-                let mut fields = Vec::new();
-                lower_rule(&mut fields, grammar, None, rule);
-                res.nodes.push(AstNodeSrc { doc: Vec::new(), name, traits: Vec::new(), fields });
-            }
-        }
-    }
-
-    deduplicate_fields(&mut res);
-    extract_enums(&mut res);
-    extract_struct_traits(&mut res);
-    extract_enum_traits(&mut res);
-    res
-}
-
-fn lower_enum(grammar: &Grammar, rule: &Rule) -> Option<Vec<String>> {
-    let alternatives = match rule {
-        Rule::Alt(it) => it,
-        _ => return None,
-    };
-    let mut variants = Vec::new();
-    for alternative in alternatives {
-        match alternative {
-            Rule::Node(it) => variants.push(grammar[*it].name.clone()),
-            Rule::Token(it) if grammar[*it].name == ";" => (),
-            _ => return None,
-        }
-    }
-    Some(variants)
-}
-
-fn lower_rule(acc: &mut Vec<Field>, grammar: &Grammar, label: Option<&String>, rule: &Rule) {
-    if lower_comma_list(acc, grammar, label, rule) {
-        return;
-    }
-
-    match rule {
-        Rule::Node(node) => {
-            let ty = grammar[*node].name.clone();
-            let name = label.cloned().unwrap_or_else(|| to_lower_snake_case(&ty));
-            let field = Field::Node { name, ty, cardinality: Cardinality::Optional };
-            acc.push(field);
-        }
-        Rule::Token(token) => {
-            assert!(label.is_none());
-            let mut name = grammar[*token].name.clone();
-            if name != "int_number" && name != "string" {
-                if "[]{}()".contains(&name) {
-                    name = format!("'{}'", name);
-                }
-                let field = Field::Token(name);
-                acc.push(field);
-            }
-        }
-        Rule::Rep(inner) => {
-            if let Rule::Node(node) = &**inner {
-                let ty = grammar[*node].name.clone();
-                let name = label.cloned().unwrap_or_else(|| pluralize(&to_lower_snake_case(&ty)));
-                let field = Field::Node { name, ty, cardinality: Cardinality::Many };
-                acc.push(field);
-                return;
-            }
-            todo!("{:?}", rule)
-        }
-        Rule::Labeled { label: l, rule } => {
-            assert!(label.is_none());
-            let manually_implemented = matches!(
-                l.as_str(),
-                "lhs"
-                    | "rhs"
-                    | "then_branch"
-                    | "else_branch"
-                    | "start"
-                    | "end"
-                    | "op"
-                    | "index"
-                    | "base"
-                    | "value"
-                    | "trait"
-                    | "self_ty"
-            );
-            if manually_implemented {
-                return;
-            }
-            lower_rule(acc, grammar, Some(l), rule);
-        }
-        Rule::Seq(rules) | Rule::Alt(rules) => {
-            for rule in rules {
-                lower_rule(acc, grammar, label, rule)
-            }
-        }
-        Rule::Opt(rule) => lower_rule(acc, grammar, label, rule),
-    }
-}
-
-// (T (',' T)* ','?)
-fn lower_comma_list(
-    acc: &mut Vec<Field>,
-    grammar: &Grammar,
-    label: Option<&String>,
-    rule: &Rule,
-) -> bool {
-    let rule = match rule {
-        Rule::Seq(it) => it,
-        _ => return false,
-    };
-    let (node, repeat, trailing_comma) = match rule.as_slice() {
-        [Rule::Node(node), Rule::Rep(repeat), Rule::Opt(trailing_comma)] => {
-            (node, repeat, trailing_comma)
-        }
-        _ => return false,
-    };
-    let repeat = match &**repeat {
-        Rule::Seq(it) => it,
-        _ => return false,
-    };
-    match repeat.as_slice() {
-        [comma, Rule::Node(n)] if comma == &**trailing_comma && n == node => (),
-        _ => return false,
-    }
-    let ty = grammar[*node].name.clone();
-    let name = label.cloned().unwrap_or_else(|| pluralize(&to_lower_snake_case(&ty)));
-    let field = Field::Node { name, ty, cardinality: Cardinality::Many };
-    acc.push(field);
-    true
-}
-
-fn deduplicate_fields(ast: &mut AstSrc) {
-    for node in &mut ast.nodes {
-        let mut i = 0;
-        'outer: while i < node.fields.len() {
-            for j in 0..i {
-                let f1 = &node.fields[i];
-                let f2 = &node.fields[j];
-                if f1 == f2 {
-                    node.fields.remove(i);
-                    continue 'outer;
-                }
-            }
-            i += 1;
-        }
-    }
-}
-
-fn extract_enums(ast: &mut AstSrc) {
-    for node in &mut ast.nodes {
-        for enm in &ast.enums {
-            let mut to_remove = Vec::new();
-            for (i, field) in node.fields.iter().enumerate() {
-                let ty = field.ty().to_string();
-                if enm.variants.iter().any(|it| it == &ty) {
-                    to_remove.push(i);
-                }
-            }
-            if to_remove.len() == enm.variants.len() {
-                node.remove_field(to_remove);
-                let ty = enm.name.clone();
-                let name = to_lower_snake_case(&ty);
-                node.fields.push(Field::Node { name, ty, cardinality: Cardinality::Optional });
-            }
-        }
-    }
-}
-
-fn extract_struct_traits(ast: &mut AstSrc) {
-    let traits: &[(&str, &[&str])] = &[
-        ("AttrsOwner", &["attrs"]),
-        ("NameOwner", &["name"]),
-        ("VisibilityOwner", &["visibility"]),
-        ("GenericParamsOwner", &["generic_param_list", "where_clause"]),
-        ("TypeBoundsOwner", &["type_bound_list", "colon_token"]),
-        ("ModuleItemOwner", &["items"]),
-        ("LoopBodyOwner", &["label", "loop_body"]),
-        ("ArgListOwner", &["arg_list"]),
-    ];
-
-    for node in &mut ast.nodes {
-        for (name, methods) in traits {
-            extract_struct_trait(node, name, methods);
-        }
-    }
-}
-
-fn extract_struct_trait(node: &mut AstNodeSrc, trait_name: &str, methods: &[&str]) {
-    let mut to_remove = Vec::new();
-    for (i, field) in node.fields.iter().enumerate() {
-        let method_name = field.method_name().to_string();
-        if methods.iter().any(|&it| it == method_name) {
-            to_remove.push(i);
-        }
-    }
-    if to_remove.len() == methods.len() {
-        node.traits.push(trait_name.to_string());
-        node.remove_field(to_remove);
-    }
-}
-
-fn extract_enum_traits(ast: &mut AstSrc) {
-    for enm in &mut ast.enums {
-        if enm.name == "Stmt" {
-            continue;
-        }
-        let nodes = &ast.nodes;
-        let mut variant_traits = enm
-            .variants
-            .iter()
-            .map(|var| nodes.iter().find(|it| &it.name == var).unwrap())
-            .map(|node| node.traits.iter().cloned().collect::<BTreeSet<_>>());
-
-        let mut enum_traits = match variant_traits.next() {
-            Some(it) => it,
-            None => continue,
-        };
-        for traits in variant_traits {
-            enum_traits = enum_traits.intersection(&traits).cloned().collect();
-        }
-        enm.traits = enum_traits.into_iter().collect();
-    }
-}
-
-impl AstNodeSrc {
-    fn remove_field(&mut self, to_remove: Vec<usize>) {
-        to_remove.into_iter().rev().for_each(|idx| {
-            self.fields.remove(idx);
-        });
-    }
-}
diff --git a/xtask/src/main.rs b/xtask/src/main.rs
index 063e11a5a6e..42e91749a97 100644
--- a/xtask/src/main.rs
+++ b/xtask/src/main.rs
@@ -9,8 +9,6 @@
 //! `.cargo/config`.
 mod flags;
 
-mod codegen;
-mod ast_src;
 #[cfg(test)]
 mod tidy;
 
@@ -24,7 +22,6 @@ use std::{
     env,
     path::{Path, PathBuf},
 };
-use walkdir::{DirEntry, WalkDir};
 use xshell::{cmd, cp, pushd, pushenv};
 
 fn main() -> Result<()> {
@@ -63,31 +60,6 @@ fn project_root() -> PathBuf {
     .to_path_buf()
 }
 
-fn rust_files() -> impl Iterator<Item = PathBuf> {
-    rust_files_in(&project_root().join("crates"))
-}
-
-#[cfg(test)]
-fn cargo_files() -> impl Iterator<Item = PathBuf> {
-    files_in(&project_root(), "toml")
-        .filter(|path| path.file_name().map(|it| it == "Cargo.toml").unwrap_or(false))
-}
-
-fn rust_files_in(path: &Path) -> impl Iterator<Item = PathBuf> {
-    files_in(path, "rs")
-}
-
-fn ensure_rustfmt() -> Result<()> {
-    let out = cmd!("rustfmt --version").read()?;
-    if !out.contains("stable") {
-        bail!(
-            "Failed to run rustfmt from toolchain 'stable'. \
-             Please run `rustup component add rustfmt --toolchain stable` to install it.",
-        )
-    }
-    Ok(())
-}
-
 fn run_fuzzer() -> Result<()> {
     let _d = pushd("./crates/syntax")?;
     let _e = pushenv("RUSTUP_TOOLCHAIN", "nightly");
@@ -113,18 +85,3 @@ fn date_iso() -> Result<String> {
 fn is_release_tag(tag: &str) -> bool {
     tag.len() == "2020-02-24".len() && tag.starts_with(|c: char| c.is_ascii_digit())
 }
-
-fn files_in(path: &Path, ext: &'static str) -> impl Iterator<Item = PathBuf> {
-    let iter = WalkDir::new(path);
-    return iter
-        .into_iter()
-        .filter_entry(|e| !is_hidden(e))
-        .map(|e| e.unwrap())
-        .filter(|e| !e.file_type().is_dir())
-        .map(|e| e.into_path())
-        .filter(move |path| path.extension().map(|it| it == ext).unwrap_or(false));
-
-    fn is_hidden(entry: &DirEntry) -> bool {
-        entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false)
-    }
-}
diff --git a/xtask/src/release.rs b/xtask/src/release.rs
index 2c04767789f..37de5b36f18 100644
--- a/xtask/src/release.rs
+++ b/xtask/src/release.rs
@@ -2,7 +2,7 @@ mod changelog;
 
 use xshell::{cmd, pushd, read_dir, read_file, write_file};
 
-use crate::{codegen, date_iso, flags, is_release_tag, project_root, Result};
+use crate::{date_iso, flags, is_release_tag, project_root, Result};
 
 impl flags::Release {
     pub(crate) fn run(self) -> Result<()> {
@@ -21,7 +21,10 @@ impl flags::Release {
             // to delete old tags.
             cmd!("git push --force").run()?;
         }
-        codegen::docs()?;
+
+        // Generates bits of manual.adoc.
+        cmd!("cargo test -p ide_assists -p ide_diagnostics -p rust-analyzer -- sourcegen_")
+            .run()?;
 
         let website_root = project_root().join("../rust-analyzer.github.io");
         let changelog_dir = website_root.join("./thisweek/_posts");
diff --git a/xtask/src/tidy.rs b/xtask/src/tidy.rs
index a9d434e20fb..fc5bf17a28f 100644
--- a/xtask/src/tidy.rs
+++ b/xtask/src/tidy.rs
@@ -3,38 +3,24 @@ use std::{
     path::{Path, PathBuf},
 };
 
+use walkdir::{DirEntry, WalkDir};
 use xshell::{cmd, pushd, pushenv, read_file};
 
-use crate::{cargo_files, codegen, project_root, rust_files};
-
-#[test]
-fn generate_grammar() {
-    codegen::generate_syntax().unwrap()
-}
-
-#[test]
-fn generate_parser_tests() {
-    codegen::generate_parser_tests().unwrap()
-}
-
-#[test]
-fn generate_assists_tests() {
-    codegen::generate_assists_tests().unwrap();
-}
-
-/// This clones rustc repo, and so is not worth to keep up-to-date. We update
-/// manually by un-ignoring the test from time to time.
-#[test]
-#[ignore]
-fn generate_lint_completions() {
-    codegen::generate_lint_completions().unwrap()
-}
+use crate::project_root;
 
 #[test]
 fn check_code_formatting() {
     let _dir = pushd(project_root()).unwrap();
     let _e = pushenv("RUSTUP_TOOLCHAIN", "stable");
-    crate::ensure_rustfmt().unwrap();
+
+    let out = cmd!("rustfmt --version").read().unwrap();
+    if !out.contains("stable") {
+        panic!(
+            "Failed to run rustfmt from toolchain 'stable'. \
+                 Please run `rustup component add rustfmt --toolchain stable` to install it.",
+        )
+    }
+
     let res = cmd!("cargo fmt -- --check").run();
     if res.is_err() {
         let _ = cmd!("cargo fmt").run();
@@ -43,11 +29,6 @@ fn check_code_formatting() {
 }
 
 #[test]
-fn smoke_test_generate_documentation() {
-    codegen::docs().unwrap()
-}
-
-#[test]
 fn check_lsp_extensions_docs() {
     let expected_hash = {
         let lsp_ext_rs =
@@ -344,6 +325,8 @@ fn check_test_attrs(path: &Path, text: &str) {
         // A legit test which needs to be ignored, as it takes too long to run
         // :(
         "hir_def/src/nameres/collector.rs",
+        // Long sourcegen test to generate lint completions.
+        "ide_completion/src/tests/sourcegen.rs",
         // Obviously needs ignore.
         "ide_assists/src/handlers/toggle_ignore.rs",
         // See above.
@@ -498,3 +481,31 @@ fn find_mark<'a>(text: &'a str, mark: &'static str) -> Option<&'a str> {
     let text = &text[..idx];
     Some(text)
 }
+
+fn rust_files() -> impl Iterator<Item = PathBuf> {
+    rust_files_in(&project_root().join("crates"))
+}
+
+fn cargo_files() -> impl Iterator<Item = PathBuf> {
+    files_in(&project_root(), "toml")
+        .filter(|path| path.file_name().map(|it| it == "Cargo.toml").unwrap_or(false))
+}
+
+fn rust_files_in(path: &Path) -> impl Iterator<Item = PathBuf> {
+    files_in(path, "rs")
+}
+
+fn files_in(path: &Path, ext: &'static str) -> impl Iterator<Item = PathBuf> {
+    let iter = WalkDir::new(path);
+    return iter
+        .into_iter()
+        .filter_entry(|e| !is_hidden(e))
+        .map(|e| e.unwrap())
+        .filter(|e| !e.file_type().is_dir())
+        .map(|e| e.into_path())
+        .filter(move |path| path.extension().map(|it| it == ext).unwrap_or(false));
+
+    fn is_hidden(entry: &DirEntry) -> bool {
+        entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false)
+    }
+}