about summary refs log tree commit diff
path: root/crates/syntax/src
diff options
context:
space:
mode:
authorAleksey Kladov <aleksey.kladov@gmail.com>2020-08-12 18:26:51 +0200
committerAleksey Kladov <aleksey.kladov@gmail.com>2020-08-12 18:30:53 +0200
commita1c187eef3ba08076aedb5154929f7eda8d1b424 (patch)
tree9d898eb9600b0c36a74e4f95238f679c683fa566 /crates/syntax/src
parent3d6889cba72a9d02199f7adaa2ecc69bc30af834 (diff)
downloadrust-a1c187eef3ba08076aedb5154929f7eda8d1b424.tar.gz
rust-a1c187eef3ba08076aedb5154929f7eda8d1b424.zip
Rename ra_syntax -> syntax
Diffstat (limited to 'crates/syntax/src')
-rw-r--r--crates/syntax/src/algo.rs406
-rw-r--r--crates/syntax/src/ast.rs331
-rw-r--r--crates/syntax/src/ast/edit.rs642
-rw-r--r--crates/syntax/src/ast/expr_ext.rs418
-rw-r--r--crates/syntax/src/ast/generated.rs41
-rw-r--r--crates/syntax/src/ast/generated/nodes.rs4067
-rw-r--r--crates/syntax/src/ast/generated/tokens.rs91
-rw-r--r--crates/syntax/src/ast/make.rs392
-rw-r--r--crates/syntax/src/ast/node_ext.rs485
-rw-r--r--crates/syntax/src/ast/token_ext.rs538
-rw-r--r--crates/syntax/src/ast/traits.rs141
-rw-r--r--crates/syntax/src/fuzz.rs73
-rw-r--r--crates/syntax/src/lib.rs388
-rw-r--r--crates/syntax/src/parsing.rs59
-rw-r--r--crates/syntax/src/parsing/lexer.rs244
-rw-r--r--crates/syntax/src/parsing/reparsing.rs455
-rw-r--r--crates/syntax/src/parsing/text_token_source.rs84
-rw-r--r--crates/syntax/src/parsing/text_tree_sink.rs183
-rw-r--r--crates/syntax/src/ptr.rs105
-rw-r--r--crates/syntax/src/syntax_error.rs44
-rw-r--r--crates/syntax/src/syntax_node.rs77
-rw-r--r--crates/syntax/src/tests.rs280
-rw-r--r--crates/syntax/src/validation.rs303
-rw-r--r--crates/syntax/src/validation/block.rs22
24 files changed, 9869 insertions, 0 deletions
diff --git a/crates/syntax/src/algo.rs b/crates/syntax/src/algo.rs
new file mode 100644
index 00000000000..6254b38ba1c
--- /dev/null
+++ b/crates/syntax/src/algo.rs
@@ -0,0 +1,406 @@
+//! FIXME: write short doc here
+
+use std::{
+    fmt,
+    ops::{self, RangeInclusive},
+};
+
+use itertools::Itertools;
+use rustc_hash::FxHashMap;
+use text_edit::TextEditBuilder;
+
+use crate::{
+    AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxNodePtr,
+    SyntaxToken, TextRange, TextSize,
+};
+
+/// Returns ancestors of the node at the offset, sorted by length. This should
+/// do the right thing at an edge, e.g. when searching for expressions at `{
+/// <|>foo }` we will get the name reference instead of the whole block, which
+/// we would get if we just did `find_token_at_offset(...).flat_map(|t|
+/// t.parent().ancestors())`.
+pub fn ancestors_at_offset(
+    node: &SyntaxNode,
+    offset: TextSize,
+) -> impl Iterator<Item = SyntaxNode> {
+    node.token_at_offset(offset)
+        .map(|token| token.parent().ancestors())
+        .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
+}
+
+/// Finds a node of specific Ast type at offset. Note that this is slightly
+/// imprecise: if the cursor is strictly between two nodes of the desired type,
+/// as in
+///
+/// ```no-run
+/// struct Foo {}|struct Bar;
+/// ```
+///
+/// then the shorter node will be silently preferred.
+pub fn find_node_at_offset<N: AstNode>(syntax: &SyntaxNode, offset: TextSize) -> Option<N> {
+    ancestors_at_offset(syntax, offset).find_map(N::cast)
+}
+
+pub fn find_node_at_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<N> {
+    find_covering_element(syntax, range).ancestors().find_map(N::cast)
+}
+
+/// Skip to next non `trivia` token
+pub fn skip_trivia_token(mut token: SyntaxToken, direction: Direction) -> Option<SyntaxToken> {
+    while token.kind().is_trivia() {
+        token = match direction {
+            Direction::Next => token.next_token()?,
+            Direction::Prev => token.prev_token()?,
+        }
+    }
+    Some(token)
+}
+
+/// Finds the first sibling in the given direction which is not `trivia`
+pub fn non_trivia_sibling(element: SyntaxElement, direction: Direction) -> Option<SyntaxElement> {
+    return match element {
+        NodeOrToken::Node(node) => node.siblings_with_tokens(direction).skip(1).find(not_trivia),
+        NodeOrToken::Token(token) => token.siblings_with_tokens(direction).skip(1).find(not_trivia),
+    };
+
+    fn not_trivia(element: &SyntaxElement) -> bool {
+        match element {
+            NodeOrToken::Node(_) => true,
+            NodeOrToken::Token(token) => !token.kind().is_trivia(),
+        }
+    }
+}
+
+pub fn find_covering_element(root: &SyntaxNode, range: TextRange) -> SyntaxElement {
+    root.covering_element(range)
+}
+
+pub fn least_common_ancestor(u: &SyntaxNode, v: &SyntaxNode) -> Option<SyntaxNode> {
+    if u == v {
+        return Some(u.clone());
+    }
+
+    let u_depth = u.ancestors().count();
+    let v_depth = v.ancestors().count();
+    let keep = u_depth.min(v_depth);
+
+    let u_candidates = u.ancestors().skip(u_depth - keep);
+    let v_canidates = v.ancestors().skip(v_depth - keep);
+    let (res, _) = u_candidates.zip(v_canidates).find(|(x, y)| x == y)?;
+    Some(res)
+}
+
+pub fn neighbor<T: AstNode>(me: &T, direction: Direction) -> Option<T> {
+    me.syntax().siblings(direction).skip(1).find_map(T::cast)
+}
+
+pub fn has_errors(node: &SyntaxNode) -> bool {
+    node.children().any(|it| it.kind() == SyntaxKind::ERROR)
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+pub enum InsertPosition<T> {
+    First,
+    Last,
+    Before(T),
+    After(T),
+}
+
+pub struct TreeDiff {
+    replacements: FxHashMap<SyntaxElement, SyntaxElement>,
+}
+
+impl TreeDiff {
+    pub fn into_text_edit(&self, builder: &mut TextEditBuilder) {
+        for (from, to) in self.replacements.iter() {
+            builder.replace(from.text_range(), to.to_string())
+        }
+    }
+
+    pub fn is_empty(&self) -> bool {
+        self.replacements.is_empty()
+    }
+}
+
+/// Finds minimal the diff, which, applied to `from`, will result in `to`.
+///
+/// Specifically, returns a map whose keys are descendants of `from` and values
+/// are descendants of `to`, such that  `replace_descendants(from, map) == to`.
+///
+/// A trivial solution is a singleton map `{ from: to }`, but this function
+/// tries to find a more fine-grained diff.
+pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff {
+    let mut buf = FxHashMap::default();
+    // FIXME: this is both horrible inefficient and gives larger than
+    // necessary diff. I bet there's a cool algorithm to diff trees properly.
+    go(&mut buf, from.clone().into(), to.clone().into());
+    return TreeDiff { replacements: buf };
+
+    fn go(
+        buf: &mut FxHashMap<SyntaxElement, SyntaxElement>,
+        lhs: SyntaxElement,
+        rhs: SyntaxElement,
+    ) {
+        if lhs.kind() == rhs.kind()
+            && lhs.text_range().len() == rhs.text_range().len()
+            && match (&lhs, &rhs) {
+                (NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => {
+                    lhs.green() == rhs.green() || lhs.text() == rhs.text()
+                }
+                (NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(),
+                _ => false,
+            }
+        {
+            return;
+        }
+        if let (Some(lhs), Some(rhs)) = (lhs.as_node(), rhs.as_node()) {
+            if lhs.children_with_tokens().count() == rhs.children_with_tokens().count() {
+                for (lhs, rhs) in lhs.children_with_tokens().zip(rhs.children_with_tokens()) {
+                    go(buf, lhs, rhs)
+                }
+                return;
+            }
+        }
+        buf.insert(lhs, rhs);
+    }
+}
+
+/// Adds specified children (tokens or nodes) to the current node at the
+/// specific position.
+///
+/// This is a type-unsafe low-level editing API, if you need to use it,
+/// prefer to create a type-safe abstraction on top of it instead.
+pub fn insert_children(
+    parent: &SyntaxNode,
+    position: InsertPosition<SyntaxElement>,
+    to_insert: impl IntoIterator<Item = SyntaxElement>,
+) -> SyntaxNode {
+    let mut to_insert = to_insert.into_iter();
+    _insert_children(parent, position, &mut to_insert)
+}
+
+fn _insert_children(
+    parent: &SyntaxNode,
+    position: InsertPosition<SyntaxElement>,
+    to_insert: &mut dyn Iterator<Item = SyntaxElement>,
+) -> SyntaxNode {
+    let mut delta = TextSize::default();
+    let to_insert = to_insert.map(|element| {
+        delta += element.text_range().len();
+        to_green_element(element)
+    });
+
+    let mut old_children = parent.green().children().map(|it| match it {
+        NodeOrToken::Token(it) => NodeOrToken::Token(it.clone()),
+        NodeOrToken::Node(it) => NodeOrToken::Node(it.clone()),
+    });
+
+    let new_children = match &position {
+        InsertPosition::First => to_insert.chain(old_children).collect::<Vec<_>>(),
+        InsertPosition::Last => old_children.chain(to_insert).collect::<Vec<_>>(),
+        InsertPosition::Before(anchor) | InsertPosition::After(anchor) => {
+            let take_anchor = if let InsertPosition::After(_) = position { 1 } else { 0 };
+            let split_at = position_of_child(parent, anchor.clone()) + take_anchor;
+            let before = old_children.by_ref().take(split_at).collect::<Vec<_>>();
+            before.into_iter().chain(to_insert).chain(old_children).collect::<Vec<_>>()
+        }
+    };
+
+    with_children(parent, new_children)
+}
+
+/// Replaces all nodes in `to_delete` with nodes from `to_insert`
+///
+/// This is a type-unsafe low-level editing API, if you need to use it,
+/// prefer to create a type-safe abstraction on top of it instead.
+pub fn replace_children(
+    parent: &SyntaxNode,
+    to_delete: RangeInclusive<SyntaxElement>,
+    to_insert: impl IntoIterator<Item = SyntaxElement>,
+) -> SyntaxNode {
+    let mut to_insert = to_insert.into_iter();
+    _replace_children(parent, to_delete, &mut to_insert)
+}
+
+fn _replace_children(
+    parent: &SyntaxNode,
+    to_delete: RangeInclusive<SyntaxElement>,
+    to_insert: &mut dyn Iterator<Item = SyntaxElement>,
+) -> SyntaxNode {
+    let start = position_of_child(parent, to_delete.start().clone());
+    let end = position_of_child(parent, to_delete.end().clone());
+    let mut old_children = parent.green().children().map(|it| match it {
+        NodeOrToken::Token(it) => NodeOrToken::Token(it.clone()),
+        NodeOrToken::Node(it) => NodeOrToken::Node(it.clone()),
+    });
+
+    let before = old_children.by_ref().take(start).collect::<Vec<_>>();
+    let new_children = before
+        .into_iter()
+        .chain(to_insert.map(to_green_element))
+        .chain(old_children.skip(end + 1 - start))
+        .collect::<Vec<_>>();
+    with_children(parent, new_children)
+}
+
+#[derive(Default)]
+pub struct SyntaxRewriter<'a> {
+    f: Option<Box<dyn Fn(&SyntaxElement) -> Option<SyntaxElement> + 'a>>,
+    //FIXME: add debug_assertions that all elements are in fact from the same file.
+    replacements: FxHashMap<SyntaxElement, Replacement>,
+}
+
+impl fmt::Debug for SyntaxRewriter<'_> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("SyntaxRewriter").field("replacements", &self.replacements).finish()
+    }
+}
+
+impl<'a> SyntaxRewriter<'a> {
+    pub fn from_fn(f: impl Fn(&SyntaxElement) -> Option<SyntaxElement> + 'a) -> SyntaxRewriter<'a> {
+        SyntaxRewriter { f: Some(Box::new(f)), replacements: FxHashMap::default() }
+    }
+    pub fn delete<T: Clone + Into<SyntaxElement>>(&mut self, what: &T) {
+        let what = what.clone().into();
+        let replacement = Replacement::Delete;
+        self.replacements.insert(what, replacement);
+    }
+    pub fn replace<T: Clone + Into<SyntaxElement>>(&mut self, what: &T, with: &T) {
+        let what = what.clone().into();
+        let replacement = Replacement::Single(with.clone().into());
+        self.replacements.insert(what, replacement);
+    }
+    pub fn replace_with_many<T: Clone + Into<SyntaxElement>>(
+        &mut self,
+        what: &T,
+        with: Vec<SyntaxElement>,
+    ) {
+        let what = what.clone().into();
+        let replacement = Replacement::Many(with);
+        self.replacements.insert(what, replacement);
+    }
+    pub fn replace_ast<T: AstNode>(&mut self, what: &T, with: &T) {
+        self.replace(what.syntax(), with.syntax())
+    }
+
+    pub fn rewrite(&self, node: &SyntaxNode) -> SyntaxNode {
+        if self.f.is_none() && self.replacements.is_empty() {
+            return node.clone();
+        }
+        self.rewrite_children(node)
+    }
+
+    pub fn rewrite_ast<N: AstNode>(self, node: &N) -> N {
+        N::cast(self.rewrite(node.syntax())).unwrap()
+    }
+
+    /// Returns a node that encompasses all replacements to be done by this rewriter.
+    ///
+    /// Passing the returned node to `rewrite` will apply all replacements queued up in `self`.
+    ///
+    /// Returns `None` when there are no replacements.
+    pub fn rewrite_root(&self) -> Option<SyntaxNode> {
+        assert!(self.f.is_none());
+        self.replacements
+            .keys()
+            .map(|element| match element {
+                SyntaxElement::Node(it) => it.clone(),
+                SyntaxElement::Token(it) => it.parent(),
+            })
+            // If we only have one replacement, we must return its parent node, since `rewrite` does
+            // not replace the node passed to it.
+            .map(|it| it.parent().unwrap_or(it))
+            .fold1(|a, b| least_common_ancestor(&a, &b).unwrap())
+    }
+
+    fn replacement(&self, element: &SyntaxElement) -> Option<Replacement> {
+        if let Some(f) = &self.f {
+            assert!(self.replacements.is_empty());
+            return f(element).map(Replacement::Single);
+        }
+        self.replacements.get(element).cloned()
+    }
+
+    fn rewrite_children(&self, node: &SyntaxNode) -> SyntaxNode {
+        //  FIXME: this could be made much faster.
+        let mut new_children = Vec::new();
+        for child in node.children_with_tokens() {
+            self.rewrite_self(&mut new_children, &child);
+        }
+        with_children(node, new_children)
+    }
+
+    fn rewrite_self(
+        &self,
+        acc: &mut Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>,
+        element: &SyntaxElement,
+    ) {
+        if let Some(replacement) = self.replacement(&element) {
+            match replacement {
+                Replacement::Single(NodeOrToken::Node(it)) => {
+                    acc.push(NodeOrToken::Node(it.green().clone()))
+                }
+                Replacement::Single(NodeOrToken::Token(it)) => {
+                    acc.push(NodeOrToken::Token(it.green().clone()))
+                }
+                Replacement::Many(replacements) => {
+                    acc.extend(replacements.iter().map(|it| match it {
+                        NodeOrToken::Node(it) => NodeOrToken::Node(it.green().clone()),
+                        NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()),
+                    }))
+                }
+                Replacement::Delete => (),
+            };
+            return;
+        }
+        let res = match element {
+            NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()),
+            NodeOrToken::Node(it) => NodeOrToken::Node(self.rewrite_children(it).green().clone()),
+        };
+        acc.push(res)
+    }
+}
+
+impl ops::AddAssign for SyntaxRewriter<'_> {
+    fn add_assign(&mut self, rhs: SyntaxRewriter) {
+        assert!(rhs.f.is_none());
+        self.replacements.extend(rhs.replacements)
+    }
+}
+
+#[derive(Clone, Debug)]
+enum Replacement {
+    Delete,
+    Single(SyntaxElement),
+    Many(Vec<SyntaxElement>),
+}
+
+fn with_children(
+    parent: &SyntaxNode,
+    new_children: Vec<NodeOrToken<rowan::GreenNode, rowan::GreenToken>>,
+) -> SyntaxNode {
+    let len = new_children.iter().map(|it| it.text_len()).sum::<TextSize>();
+    let new_node = rowan::GreenNode::new(rowan::SyntaxKind(parent.kind() as u16), new_children);
+    let new_root_node = parent.replace_with(new_node);
+    let new_root_node = SyntaxNode::new_root(new_root_node);
+
+    // FIXME: use a more elegant way to re-fetch the node (#1185), make
+    // `range` private afterwards
+    let mut ptr = SyntaxNodePtr::new(parent);
+    ptr.range = TextRange::at(ptr.range.start(), len);
+    ptr.to_node(&new_root_node)
+}
+
+fn position_of_child(parent: &SyntaxNode, child: SyntaxElement) -> usize {
+    parent
+        .children_with_tokens()
+        .position(|it| it == child)
+        .expect("element is not a child of current element")
+}
+
+fn to_green_element(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> {
+    match element {
+        NodeOrToken::Node(it) => it.green().clone().into(),
+        NodeOrToken::Token(it) => it.green().clone().into(),
+    }
+}
diff --git a/crates/syntax/src/ast.rs b/crates/syntax/src/ast.rs
new file mode 100644
index 00000000000..d536bb1e7d6
--- /dev/null
+++ b/crates/syntax/src/ast.rs
@@ -0,0 +1,331 @@
+//! Abstract Syntax Tree, layered on top of untyped `SyntaxNode`s
+
+mod generated;
+mod traits;
+mod token_ext;
+mod node_ext;
+mod expr_ext;
+pub mod edit;
+pub mod make;
+
+use std::marker::PhantomData;
+
+use crate::{
+    syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken},
+    SmolStr, SyntaxKind,
+};
+
+pub use self::{
+    expr_ext::{ArrayExprKind, BinOp, Effect, ElseBranch, LiteralKind, PrefixOp, RangeOp},
+    generated::*,
+    node_ext::{
+        AttrKind, FieldKind, NameOrNameRef, PathSegmentKind, SelfParamKind, SlicePatComponents,
+        StructKind, TypeBoundKind, VisibilityKind,
+    },
+    token_ext::*,
+    traits::*,
+};
+
+/// The main trait to go from untyped `SyntaxNode`  to a typed ast. The
+/// conversion itself has zero runtime cost: ast and syntax nodes have exactly
+/// the same representation: a pointer to the tree root and a pointer to the
+/// node itself.
+pub trait AstNode {
+    fn can_cast(kind: SyntaxKind) -> bool
+    where
+        Self: Sized;
+
+    fn cast(syntax: SyntaxNode) -> Option<Self>
+    where
+        Self: Sized;
+
+    fn syntax(&self) -> &SyntaxNode;
+}
+
+/// Like `AstNode`, but wraps tokens rather than interior nodes.
+pub trait AstToken {
+    fn can_cast(token: SyntaxKind) -> bool
+    where
+        Self: Sized;
+
+    fn cast(syntax: SyntaxToken) -> Option<Self>
+    where
+        Self: Sized;
+
+    fn syntax(&self) -> &SyntaxToken;
+
+    fn text(&self) -> &SmolStr {
+        self.syntax().text()
+    }
+}
+
+/// An iterator over `SyntaxNode` children of a particular AST type.
+#[derive(Debug, Clone)]
+pub struct AstChildren<N> {
+    inner: SyntaxNodeChildren,
+    ph: PhantomData<N>,
+}
+
+impl<N> AstChildren<N> {
+    fn new(parent: &SyntaxNode) -> Self {
+        AstChildren { inner: parent.children(), ph: PhantomData }
+    }
+}
+
+impl<N: AstNode> Iterator for AstChildren<N> {
+    type Item = N;
+    fn next(&mut self) -> Option<N> {
+        self.inner.find_map(N::cast)
+    }
+}
+
+mod support {
+    use super::{AstChildren, AstNode, SyntaxKind, SyntaxNode, SyntaxToken};
+
+    pub(super) fn child<N: AstNode>(parent: &SyntaxNode) -> Option<N> {
+        parent.children().find_map(N::cast)
+    }
+
+    pub(super) fn children<N: AstNode>(parent: &SyntaxNode) -> AstChildren<N> {
+        AstChildren::new(parent)
+    }
+
+    pub(super) fn token(parent: &SyntaxNode, kind: SyntaxKind) -> Option<SyntaxToken> {
+        parent.children_with_tokens().filter_map(|it| it.into_token()).find(|it| it.kind() == kind)
+    }
+}
+
+#[test]
+fn assert_ast_is_object_safe() {
+    fn _f(_: &dyn AstNode, _: &dyn NameOwner) {}
+}
+
+#[test]
+fn test_doc_comment_none() {
+    let file = SourceFile::parse(
+        r#"
+        // non-doc
+        mod foo {}
+        "#,
+    )
+    .ok()
+    .unwrap();
+    let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+    assert!(module.doc_comment_text().is_none());
+}
+
+#[test]
+fn test_doc_comment_of_items() {
+    let file = SourceFile::parse(
+        r#"
+        //! doc
+        // non-doc
+        mod foo {}
+        "#,
+    )
+    .ok()
+    .unwrap();
+    let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+    assert_eq!("doc", module.doc_comment_text().unwrap());
+}
+
+#[test]
+fn test_doc_comment_of_statics() {
+    let file = SourceFile::parse(
+        r#"
+        /// Number of levels
+        static LEVELS: i32 = 0;
+        "#,
+    )
+    .ok()
+    .unwrap();
+    let st = file.syntax().descendants().find_map(Static::cast).unwrap();
+    assert_eq!("Number of levels", st.doc_comment_text().unwrap());
+}
+
+#[test]
+fn test_doc_comment_preserves_indents() {
+    let file = SourceFile::parse(
+        r#"
+        /// doc1
+        /// ```
+        /// fn foo() {
+        ///     // ...
+        /// }
+        /// ```
+        mod foo {}
+        "#,
+    )
+    .ok()
+    .unwrap();
+    let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+    assert_eq!("doc1\n```\nfn foo() {\n    // ...\n}\n```", module.doc_comment_text().unwrap());
+}
+
+#[test]
+fn test_doc_comment_preserves_newlines() {
+    let file = SourceFile::parse(
+        r#"
+        /// this
+        /// is
+        /// mod
+        /// foo
+        mod foo {}
+        "#,
+    )
+    .ok()
+    .unwrap();
+    let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+    assert_eq!("this\nis\nmod\nfoo", module.doc_comment_text().unwrap());
+}
+
+#[test]
+fn test_doc_comment_single_line_block_strips_suffix() {
+    let file = SourceFile::parse(
+        r#"
+        /** this is mod foo*/
+        mod foo {}
+        "#,
+    )
+    .ok()
+    .unwrap();
+    let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+    assert_eq!("this is mod foo", module.doc_comment_text().unwrap());
+}
+
+#[test]
+fn test_doc_comment_single_line_block_strips_suffix_whitespace() {
+    let file = SourceFile::parse(
+        r#"
+        /** this is mod foo */
+        mod foo {}
+        "#,
+    )
+    .ok()
+    .unwrap();
+    let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+    assert_eq!("this is mod foo ", module.doc_comment_text().unwrap());
+}
+
+#[test]
+fn test_doc_comment_multi_line_block_strips_suffix() {
+    let file = SourceFile::parse(
+        r#"
+        /**
+        this
+        is
+        mod foo
+        */
+        mod foo {}
+        "#,
+    )
+    .ok()
+    .unwrap();
+    let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+    assert_eq!(
+        "        this\n        is\n        mod foo\n        ",
+        module.doc_comment_text().unwrap()
+    );
+}
+
+#[test]
+fn test_comments_preserve_trailing_whitespace() {
+    let file = SourceFile::parse(
+        "\n/// Representation of a Realm.   \n/// In the specification these are called Realm Records.\nstruct Realm {}",
+    )
+    .ok()
+    .unwrap();
+    let def = file.syntax().descendants().find_map(Struct::cast).unwrap();
+    assert_eq!(
+        "Representation of a Realm.   \nIn the specification these are called Realm Records.",
+        def.doc_comment_text().unwrap()
+    );
+}
+
+#[test]
+fn test_four_slash_line_comment() {
+    let file = SourceFile::parse(
+        r#"
+        //// too many slashes to be a doc comment
+        /// doc comment
+        mod foo {}
+        "#,
+    )
+    .ok()
+    .unwrap();
+    let module = file.syntax().descendants().find_map(Module::cast).unwrap();
+    assert_eq!("doc comment", module.doc_comment_text().unwrap());
+}
+
+#[test]
+fn test_where_predicates() {
+    fn assert_bound(text: &str, bound: Option<TypeBound>) {
+        assert_eq!(text, bound.unwrap().syntax().text().to_string());
+    }
+
+    let file = SourceFile::parse(
+        r#"
+fn foo()
+where
+   T: Clone + Copy + Debug + 'static,
+   'a: 'b + 'c,
+   Iterator::Item: 'a + Debug,
+   Iterator::Item: Debug + 'a,
+   <T as Iterator>::Item: Debug + 'a,
+   for<'a> F: Fn(&'a str)
+{}
+        "#,
+    )
+    .ok()
+    .unwrap();
+    let where_clause = file.syntax().descendants().find_map(WhereClause::cast).unwrap();
+
+    let mut predicates = where_clause.predicates();
+
+    let pred = predicates.next().unwrap();
+    let mut bounds = pred.type_bound_list().unwrap().bounds();
+
+    assert!(pred.for_token().is_none());
+    assert!(pred.generic_param_list().is_none());
+    assert_eq!("T", pred.ty().unwrap().syntax().text().to_string());
+    assert_bound("Clone", bounds.next());
+    assert_bound("Copy", bounds.next());
+    assert_bound("Debug", bounds.next());
+    assert_bound("'static", bounds.next());
+
+    let pred = predicates.next().unwrap();
+    let mut bounds = pred.type_bound_list().unwrap().bounds();
+
+    assert_eq!("'a", pred.lifetime_token().unwrap().text());
+
+    assert_bound("'b", bounds.next());
+    assert_bound("'c", bounds.next());
+
+    let pred = predicates.next().unwrap();
+    let mut bounds = pred.type_bound_list().unwrap().bounds();
+
+    assert_eq!("Iterator::Item", pred.ty().unwrap().syntax().text().to_string());
+    assert_bound("'a", bounds.next());
+
+    let pred = predicates.next().unwrap();
+    let mut bounds = pred.type_bound_list().unwrap().bounds();
+
+    assert_eq!("Iterator::Item", pred.ty().unwrap().syntax().text().to_string());
+    assert_bound("Debug", bounds.next());
+    assert_bound("'a", bounds.next());
+
+    let pred = predicates.next().unwrap();
+    let mut bounds = pred.type_bound_list().unwrap().bounds();
+
+    assert_eq!("<T as Iterator>::Item", pred.ty().unwrap().syntax().text().to_string());
+    assert_bound("Debug", bounds.next());
+    assert_bound("'a", bounds.next());
+
+    let pred = predicates.next().unwrap();
+    let mut bounds = pred.type_bound_list().unwrap().bounds();
+
+    assert!(pred.for_token().is_some());
+    assert_eq!("<'a>", pred.generic_param_list().unwrap().syntax().text().to_string());
+    assert_eq!("F", pred.ty().unwrap().syntax().text().to_string());
+    assert_bound("Fn(&'a str)", bounds.next());
+}
diff --git a/crates/syntax/src/ast/edit.rs b/crates/syntax/src/ast/edit.rs
new file mode 100644
index 00000000000..5ed123f91fe
--- /dev/null
+++ b/crates/syntax/src/ast/edit.rs
@@ -0,0 +1,642 @@
+//! This module contains functions for editing syntax trees. As the trees are
+//! immutable, all function here return a fresh copy of the tree, instead of
+//! doing an in-place modification.
+use std::{
+    fmt, iter,
+    ops::{self, RangeInclusive},
+};
+
+use arrayvec::ArrayVec;
+
+use crate::{
+    algo::{self, neighbor, SyntaxRewriter},
+    ast::{
+        self,
+        make::{self, tokens},
+        AstNode, TypeBoundsOwner,
+    },
+    AstToken, Direction, InsertPosition, SmolStr, SyntaxElement, SyntaxKind,
+    SyntaxKind::{ATTR, COMMENT, WHITESPACE},
+    SyntaxNode, SyntaxToken, T,
+};
+
+impl ast::BinExpr {
+    #[must_use]
+    pub fn replace_op(&self, op: SyntaxKind) -> Option<ast::BinExpr> {
+        let op_node: SyntaxElement = self.op_details()?.0.into();
+        let to_insert: Option<SyntaxElement> = Some(make::token(op).into());
+        Some(self.replace_children(single_node(op_node), to_insert))
+    }
+}
+
+impl ast::Fn {
+    #[must_use]
+    pub fn with_body(&self, body: ast::BlockExpr) -> ast::Fn {
+        let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new();
+        let old_body_or_semi: SyntaxElement = if let Some(old_body) = self.body() {
+            old_body.syntax().clone().into()
+        } else if let Some(semi) = self.semicolon_token() {
+            to_insert.push(make::tokens::single_space().into());
+            semi.into()
+        } else {
+            to_insert.push(make::tokens::single_space().into());
+            to_insert.push(body.syntax().clone().into());
+            return self.insert_children(InsertPosition::Last, to_insert);
+        };
+        to_insert.push(body.syntax().clone().into());
+        self.replace_children(single_node(old_body_or_semi), to_insert)
+    }
+}
+
+fn make_multiline<N>(node: N) -> N
+where
+    N: AstNode + Clone,
+{
+    let l_curly = match node.syntax().children_with_tokens().find(|it| it.kind() == T!['{']) {
+        Some(it) => it,
+        None => return node,
+    };
+    let sibling = match l_curly.next_sibling_or_token() {
+        Some(it) => it,
+        None => return node,
+    };
+    let existing_ws = match sibling.as_token() {
+        None => None,
+        Some(tok) if tok.kind() != WHITESPACE => None,
+        Some(ws) => {
+            if ws.text().contains('\n') {
+                return node;
+            }
+            Some(ws.clone())
+        }
+    };
+
+    let indent = leading_indent(node.syntax()).unwrap_or_default();
+    let ws = tokens::WsBuilder::new(&format!("\n{}", indent));
+    let to_insert = iter::once(ws.ws().into());
+    match existing_ws {
+        None => node.insert_children(InsertPosition::After(l_curly), to_insert),
+        Some(ws) => node.replace_children(single_node(ws), to_insert),
+    }
+}
+
+impl ast::AssocItemList {
+    #[must_use]
+    pub fn append_items(
+        &self,
+        items: impl IntoIterator<Item = ast::AssocItem>,
+    ) -> ast::AssocItemList {
+        let mut res = self.clone();
+        if !self.syntax().text().contains_char('\n') {
+            res = make_multiline(res);
+        }
+        items.into_iter().for_each(|it| res = res.append_item(it));
+        res
+    }
+
+    #[must_use]
+    pub fn append_item(&self, item: ast::AssocItem) -> ast::AssocItemList {
+        let (indent, position) = match self.assoc_items().last() {
+            Some(it) => (
+                leading_indent(it.syntax()).unwrap_or_default().to_string(),
+                InsertPosition::After(it.syntax().clone().into()),
+            ),
+            None => match self.l_curly_token() {
+                Some(it) => (
+                    "    ".to_string() + &leading_indent(self.syntax()).unwrap_or_default(),
+                    InsertPosition::After(it.into()),
+                ),
+                None => return self.clone(),
+            },
+        };
+        let ws = tokens::WsBuilder::new(&format!("\n{}", indent));
+        let to_insert: ArrayVec<[SyntaxElement; 2]> =
+            [ws.ws().into(), item.syntax().clone().into()].into();
+        self.insert_children(position, to_insert)
+    }
+}
+
+impl ast::RecordExprFieldList {
+    #[must_use]
+    pub fn append_field(&self, field: &ast::RecordExprField) -> ast::RecordExprFieldList {
+        self.insert_field(InsertPosition::Last, field)
+    }
+
+    #[must_use]
+    pub fn insert_field(
+        &self,
+        position: InsertPosition<&'_ ast::RecordExprField>,
+        field: &ast::RecordExprField,
+    ) -> ast::RecordExprFieldList {
+        let is_multiline = self.syntax().text().contains_char('\n');
+        let ws;
+        let space = if is_multiline {
+            ws = tokens::WsBuilder::new(&format!(
+                "\n{}    ",
+                leading_indent(self.syntax()).unwrap_or_default()
+            ));
+            ws.ws()
+        } else {
+            tokens::single_space()
+        };
+
+        let mut to_insert: ArrayVec<[SyntaxElement; 4]> = ArrayVec::new();
+        to_insert.push(space.into());
+        to_insert.push(field.syntax().clone().into());
+        to_insert.push(make::token(T![,]).into());
+
+        macro_rules! after_l_curly {
+            () => {{
+                let anchor = match self.l_curly_token() {
+                    Some(it) => it.into(),
+                    None => return self.clone(),
+                };
+                InsertPosition::After(anchor)
+            }};
+        }
+
+        macro_rules! after_field {
+            ($anchor:expr) => {
+                if let Some(comma) = $anchor
+                    .syntax()
+                    .siblings_with_tokens(Direction::Next)
+                    .find(|it| it.kind() == T![,])
+                {
+                    InsertPosition::After(comma)
+                } else {
+                    to_insert.insert(0, make::token(T![,]).into());
+                    InsertPosition::After($anchor.syntax().clone().into())
+                }
+            };
+        };
+
+        let position = match position {
+            InsertPosition::First => after_l_curly!(),
+            InsertPosition::Last => {
+                if !is_multiline {
+                    // don't insert comma before curly
+                    to_insert.pop();
+                }
+                match self.fields().last() {
+                    Some(it) => after_field!(it),
+                    None => after_l_curly!(),
+                }
+            }
+            InsertPosition::Before(anchor) => {
+                InsertPosition::Before(anchor.syntax().clone().into())
+            }
+            InsertPosition::After(anchor) => after_field!(anchor),
+        };
+
+        self.insert_children(position, to_insert)
+    }
+}
+
+impl ast::TypeAlias {
+    #[must_use]
+    pub fn remove_bounds(&self) -> ast::TypeAlias {
+        let colon = match self.colon_token() {
+            Some(it) => it,
+            None => return self.clone(),
+        };
+        let end = match self.type_bound_list() {
+            Some(it) => it.syntax().clone().into(),
+            None => colon.clone().into(),
+        };
+        self.replace_children(colon.into()..=end, iter::empty())
+    }
+}
+
+impl ast::TypeParam {
+    #[must_use]
+    pub fn remove_bounds(&self) -> ast::TypeParam {
+        let colon = match self.colon_token() {
+            Some(it) => it,
+            None => return self.clone(),
+        };
+        let end = match self.type_bound_list() {
+            Some(it) => it.syntax().clone().into(),
+            None => colon.clone().into(),
+        };
+        self.replace_children(colon.into()..=end, iter::empty())
+    }
+}
+
+impl ast::Path {
+    #[must_use]
+    pub fn with_segment(&self, segment: ast::PathSegment) -> ast::Path {
+        if let Some(old) = self.segment() {
+            return self.replace_children(
+                single_node(old.syntax().clone()),
+                iter::once(segment.syntax().clone().into()),
+            );
+        }
+        self.clone()
+    }
+}
+
+impl ast::PathSegment {
+    #[must_use]
+    pub fn with_type_args(&self, type_args: ast::GenericArgList) -> ast::PathSegment {
+        self._with_type_args(type_args, false)
+    }
+
+    #[must_use]
+    pub fn with_turbo_fish(&self, type_args: ast::GenericArgList) -> ast::PathSegment {
+        self._with_type_args(type_args, true)
+    }
+
+    fn _with_type_args(&self, type_args: ast::GenericArgList, turbo: bool) -> ast::PathSegment {
+        if let Some(old) = self.generic_arg_list() {
+            return self.replace_children(
+                single_node(old.syntax().clone()),
+                iter::once(type_args.syntax().clone().into()),
+            );
+        }
+        let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new();
+        if turbo {
+            to_insert.push(make::token(T![::]).into());
+        }
+        to_insert.push(type_args.syntax().clone().into());
+        self.insert_children(InsertPosition::Last, to_insert)
+    }
+}
+
+impl ast::Use {
+    #[must_use]
+    pub fn with_use_tree(&self, use_tree: ast::UseTree) -> ast::Use {
+        if let Some(old) = self.use_tree() {
+            return self.replace_descendant(old, use_tree);
+        }
+        self.clone()
+    }
+
+    pub fn remove(&self) -> SyntaxRewriter<'static> {
+        let mut res = SyntaxRewriter::default();
+        res.delete(self.syntax());
+        let next_ws = self
+            .syntax()
+            .next_sibling_or_token()
+            .and_then(|it| it.into_token())
+            .and_then(ast::Whitespace::cast);
+        if let Some(next_ws) = next_ws {
+            let ws_text = next_ws.syntax().text();
+            if ws_text.starts_with('\n') {
+                let rest = &ws_text[1..];
+                if rest.is_empty() {
+                    res.delete(next_ws.syntax())
+                } else {
+                    res.replace(next_ws.syntax(), &make::tokens::whitespace(rest));
+                }
+            }
+        }
+        res
+    }
+}
+
+impl ast::UseTree {
+    #[must_use]
+    pub fn with_path(&self, path: ast::Path) -> ast::UseTree {
+        if let Some(old) = self.path() {
+            return self.replace_descendant(old, path);
+        }
+        self.clone()
+    }
+
+    #[must_use]
+    pub fn with_use_tree_list(&self, use_tree_list: ast::UseTreeList) -> ast::UseTree {
+        if let Some(old) = self.use_tree_list() {
+            return self.replace_descendant(old, use_tree_list);
+        }
+        self.clone()
+    }
+
+    #[must_use]
+    pub fn split_prefix(&self, prefix: &ast::Path) -> ast::UseTree {
+        let suffix = match split_path_prefix(&prefix) {
+            Some(it) => it,
+            None => return self.clone(),
+        };
+        let use_tree = make::use_tree(
+            suffix,
+            self.use_tree_list(),
+            self.rename(),
+            self.star_token().is_some(),
+        );
+        let nested = make::use_tree_list(iter::once(use_tree));
+        return make::use_tree(prefix.clone(), Some(nested), None, false);
+
+        fn split_path_prefix(prefix: &ast::Path) -> Option<ast::Path> {
+            let parent = prefix.parent_path()?;
+            let segment = parent.segment()?;
+            if algo::has_errors(segment.syntax()) {
+                return None;
+            }
+            let mut res = make::path_unqualified(segment);
+            for p in iter::successors(parent.parent_path(), |it| it.parent_path()) {
+                res = make::path_qualified(res, p.segment()?);
+            }
+            Some(res)
+        }
+    }
+
+    pub fn remove(&self) -> SyntaxRewriter<'static> {
+        let mut res = SyntaxRewriter::default();
+        res.delete(self.syntax());
+        for &dir in [Direction::Next, Direction::Prev].iter() {
+            if let Some(nb) = neighbor(self, dir) {
+                self.syntax()
+                    .siblings_with_tokens(dir)
+                    .skip(1)
+                    .take_while(|it| it.as_node() != Some(nb.syntax()))
+                    .for_each(|el| res.delete(&el));
+                return res;
+            }
+        }
+        res
+    }
+}
+
+impl ast::MatchArmList {
+    #[must_use]
+    pub fn append_arms(&self, items: impl IntoIterator<Item = ast::MatchArm>) -> ast::MatchArmList {
+        let mut res = self.clone();
+        res = res.strip_if_only_whitespace();
+        if !res.syntax().text().contains_char('\n') {
+            res = make_multiline(res);
+        }
+        items.into_iter().for_each(|it| res = res.append_arm(it));
+        res
+    }
+
+    fn strip_if_only_whitespace(&self) -> ast::MatchArmList {
+        let mut iter = self.syntax().children_with_tokens().skip_while(|it| it.kind() != T!['{']);
+        iter.next(); // Eat the curly
+        let mut inner = iter.take_while(|it| it.kind() != T!['}']);
+        if !inner.clone().all(|it| it.kind() == WHITESPACE) {
+            return self.clone();
+        }
+        let start = match inner.next() {
+            Some(s) => s,
+            None => return self.clone(),
+        };
+        let end = match inner.last() {
+            Some(s) => s,
+            None => start.clone(),
+        };
+        self.replace_children(start..=end, &mut iter::empty())
+    }
+
+    #[must_use]
+    pub fn remove_placeholder(&self) -> ast::MatchArmList {
+        let placeholder =
+            self.arms().find(|arm| matches!(arm.pat(), Some(ast::Pat::WildcardPat(_))));
+        if let Some(placeholder) = placeholder {
+            self.remove_arm(&placeholder)
+        } else {
+            self.clone()
+        }
+    }
+
+    #[must_use]
+    fn remove_arm(&self, arm: &ast::MatchArm) -> ast::MatchArmList {
+        let start = arm.syntax().clone();
+        let end = if let Some(comma) = start
+            .siblings_with_tokens(Direction::Next)
+            .skip(1)
+            .skip_while(|it| it.kind().is_trivia())
+            .next()
+            .filter(|it| it.kind() == T![,])
+        {
+            comma
+        } else {
+            start.clone().into()
+        };
+        self.replace_children(start.into()..=end, None)
+    }
+
+    #[must_use]
+    pub fn append_arm(&self, item: ast::MatchArm) -> ast::MatchArmList {
+        let r_curly = match self.syntax().children_with_tokens().find(|it| it.kind() == T!['}']) {
+            Some(t) => t,
+            None => return self.clone(),
+        };
+        let position = InsertPosition::Before(r_curly.into());
+        let arm_ws = tokens::WsBuilder::new("    ");
+        let match_indent = &leading_indent(self.syntax()).unwrap_or_default();
+        let match_ws = tokens::WsBuilder::new(&format!("\n{}", match_indent));
+        let to_insert: ArrayVec<[SyntaxElement; 3]> =
+            [arm_ws.ws().into(), item.syntax().clone().into(), match_ws.ws().into()].into();
+        self.insert_children(position, to_insert)
+    }
+}
+
+#[must_use]
+pub fn remove_attrs_and_docs<N: ast::AttrsOwner>(node: &N) -> N {
+    N::cast(remove_attrs_and_docs_inner(node.syntax().clone())).unwrap()
+}
+
+fn remove_attrs_and_docs_inner(mut node: SyntaxNode) -> SyntaxNode {
+    while let Some(start) =
+        node.children_with_tokens().find(|it| it.kind() == ATTR || it.kind() == COMMENT)
+    {
+        let end = match &start.next_sibling_or_token() {
+            Some(el) if el.kind() == WHITESPACE => el.clone(),
+            Some(_) | None => start.clone(),
+        };
+        node = algo::replace_children(&node, start..=end, &mut iter::empty());
+    }
+    node
+}
+
+#[derive(Debug, Clone, Copy)]
+pub struct IndentLevel(pub u8);
+
+impl From<u8> for IndentLevel {
+    fn from(level: u8) -> IndentLevel {
+        IndentLevel(level)
+    }
+}
+
+impl fmt::Display for IndentLevel {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        let spaces = "                                        ";
+        let buf;
+        let len = self.0 as usize * 4;
+        let indent = if len <= spaces.len() {
+            &spaces[..len]
+        } else {
+            buf = iter::repeat(' ').take(len).collect::<String>();
+            &buf
+        };
+        fmt::Display::fmt(indent, f)
+    }
+}
+
+impl ops::Add<u8> for IndentLevel {
+    type Output = IndentLevel;
+    fn add(self, rhs: u8) -> IndentLevel {
+        IndentLevel(self.0 + rhs)
+    }
+}
+
+impl IndentLevel {
+    pub fn from_node(node: &SyntaxNode) -> IndentLevel {
+        let first_token = match node.first_token() {
+            Some(it) => it,
+            None => return IndentLevel(0),
+        };
+        for ws in prev_tokens(first_token).filter_map(ast::Whitespace::cast) {
+            let text = ws.syntax().text();
+            if let Some(pos) = text.rfind('\n') {
+                let level = text[pos + 1..].chars().count() / 4;
+                return IndentLevel(level as u8);
+            }
+        }
+        IndentLevel(0)
+    }
+
+    /// XXX: this intentionally doesn't change the indent of the very first token.
+    /// Ie, in something like
+    /// ```
+    /// fn foo() {
+    ///    92
+    /// }
+    /// ```
+    /// if you indent the block, the `{` token would stay put.
+    fn increase_indent(self, node: SyntaxNode) -> SyntaxNode {
+        let mut rewriter = SyntaxRewriter::default();
+        node.descendants_with_tokens()
+            .filter_map(|el| el.into_token())
+            .filter_map(ast::Whitespace::cast)
+            .filter(|ws| {
+                let text = ws.syntax().text();
+                text.contains('\n')
+            })
+            .for_each(|ws| {
+                let new_ws = make::tokens::whitespace(&format!("{}{}", ws.syntax(), self,));
+                rewriter.replace(ws.syntax(), &new_ws)
+            });
+        rewriter.rewrite(&node)
+    }
+
+    fn decrease_indent(self, node: SyntaxNode) -> SyntaxNode {
+        let mut rewriter = SyntaxRewriter::default();
+        node.descendants_with_tokens()
+            .filter_map(|el| el.into_token())
+            .filter_map(ast::Whitespace::cast)
+            .filter(|ws| {
+                let text = ws.syntax().text();
+                text.contains('\n')
+            })
+            .for_each(|ws| {
+                let new_ws = make::tokens::whitespace(
+                    &ws.syntax().text().replace(&format!("\n{}", self), "\n"),
+                );
+                rewriter.replace(ws.syntax(), &new_ws)
+            });
+        rewriter.rewrite(&node)
+    }
+}
+
+// FIXME: replace usages with IndentLevel above
+fn leading_indent(node: &SyntaxNode) -> Option<SmolStr> {
+    for token in prev_tokens(node.first_token()?) {
+        if let Some(ws) = ast::Whitespace::cast(token.clone()) {
+            let ws_text = ws.text();
+            if let Some(pos) = ws_text.rfind('\n') {
+                return Some(ws_text[pos + 1..].into());
+            }
+        }
+        if token.text().contains('\n') {
+            break;
+        }
+    }
+    None
+}
+
+fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> {
+    iter::successors(Some(token), |token| token.prev_token())
+}
+
+pub trait AstNodeEdit: AstNode + Clone + Sized {
+    #[must_use]
+    fn insert_children(
+        &self,
+        position: InsertPosition<SyntaxElement>,
+        to_insert: impl IntoIterator<Item = SyntaxElement>,
+    ) -> Self {
+        let new_syntax = algo::insert_children(self.syntax(), position, to_insert);
+        Self::cast(new_syntax).unwrap()
+    }
+
+    #[must_use]
+    fn replace_children(
+        &self,
+        to_replace: RangeInclusive<SyntaxElement>,
+        to_insert: impl IntoIterator<Item = SyntaxElement>,
+    ) -> Self {
+        let new_syntax = algo::replace_children(self.syntax(), to_replace, to_insert);
+        Self::cast(new_syntax).unwrap()
+    }
+
+    #[must_use]
+    fn replace_descendant<D: AstNode>(&self, old: D, new: D) -> Self {
+        self.replace_descendants(iter::once((old, new)))
+    }
+
+    #[must_use]
+    fn replace_descendants<D: AstNode>(
+        &self,
+        replacement_map: impl IntoIterator<Item = (D, D)>,
+    ) -> Self {
+        let mut rewriter = SyntaxRewriter::default();
+        for (from, to) in replacement_map {
+            rewriter.replace(from.syntax(), to.syntax())
+        }
+        rewriter.rewrite_ast(self)
+    }
+    #[must_use]
+    fn indent(&self, level: IndentLevel) -> Self {
+        Self::cast(level.increase_indent(self.syntax().clone())).unwrap()
+    }
+    #[must_use]
+    fn dedent(&self, level: IndentLevel) -> Self {
+        Self::cast(level.decrease_indent(self.syntax().clone())).unwrap()
+    }
+    #[must_use]
+    fn reset_indent(&self) -> Self {
+        let level = IndentLevel::from_node(self.syntax());
+        self.dedent(level)
+    }
+}
+
+impl<N: AstNode + Clone> AstNodeEdit for N {}
+
+fn single_node(element: impl Into<SyntaxElement>) -> RangeInclusive<SyntaxElement> {
+    let element = element.into();
+    element.clone()..=element
+}
+
+#[test]
+fn test_increase_indent() {
+    let arm_list = {
+        let arm = make::match_arm(iter::once(make::wildcard_pat().into()), make::expr_unit());
+        make::match_arm_list(vec![arm.clone(), arm])
+    };
+    assert_eq!(
+        arm_list.syntax().to_string(),
+        "{
+    _ => (),
+    _ => (),
+}"
+    );
+    let indented = arm_list.indent(IndentLevel(2));
+    assert_eq!(
+        indented.syntax().to_string(),
+        "{
+            _ => (),
+            _ => (),
+        }"
+    );
+}
diff --git a/crates/syntax/src/ast/expr_ext.rs b/crates/syntax/src/ast/expr_ext.rs
new file mode 100644
index 00000000000..f5ba872233f
--- /dev/null
+++ b/crates/syntax/src/ast/expr_ext.rs
@@ -0,0 +1,418 @@
+//! Various extension methods to ast Expr Nodes, which are hard to code-generate.
+
+use crate::{
+    ast::{self, support, AstChildren, AstNode},
+    SmolStr,
+    SyntaxKind::*,
+    SyntaxToken, T,
+};
+
+impl ast::AttrsOwner for ast::Expr {}
+
+impl ast::Expr {
+    pub fn is_block_like(&self) -> bool {
+        match self {
+            ast::Expr::IfExpr(_)
+            | ast::Expr::LoopExpr(_)
+            | ast::Expr::ForExpr(_)
+            | ast::Expr::WhileExpr(_)
+            | ast::Expr::BlockExpr(_)
+            | ast::Expr::MatchExpr(_)
+            | ast::Expr::EffectExpr(_) => true,
+            _ => false,
+        }
+    }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ElseBranch {
+    Block(ast::BlockExpr),
+    IfExpr(ast::IfExpr),
+}
+
+impl ast::IfExpr {
+    pub fn then_branch(&self) -> Option<ast::BlockExpr> {
+        self.blocks().next()
+    }
+    pub fn else_branch(&self) -> Option<ElseBranch> {
+        let res = match self.blocks().nth(1) {
+            Some(block) => ElseBranch::Block(block),
+            None => {
+                let elif: ast::IfExpr = support::child(self.syntax())?;
+                ElseBranch::IfExpr(elif)
+            }
+        };
+        Some(res)
+    }
+
+    pub fn blocks(&self) -> AstChildren<ast::BlockExpr> {
+        support::children(self.syntax())
+    }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum PrefixOp {
+    /// The `*` operator for dereferencing
+    Deref,
+    /// The `!` operator for logical inversion
+    Not,
+    /// The `-` operator for negation
+    Neg,
+}
+
+impl ast::PrefixExpr {
+    pub fn op_kind(&self) -> Option<PrefixOp> {
+        match self.op_token()?.kind() {
+            T![*] => Some(PrefixOp::Deref),
+            T![!] => Some(PrefixOp::Not),
+            T![-] => Some(PrefixOp::Neg),
+            _ => None,
+        }
+    }
+
+    pub fn op_token(&self) -> Option<SyntaxToken> {
+        self.syntax().first_child_or_token()?.into_token()
+    }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum BinOp {
+    /// The `||` operator for boolean OR
+    BooleanOr,
+    /// The `&&` operator for boolean AND
+    BooleanAnd,
+    /// The `==` operator for equality testing
+    EqualityTest,
+    /// The `!=` operator for equality testing
+    NegatedEqualityTest,
+    /// The `<=` operator for lesser-equal testing
+    LesserEqualTest,
+    /// The `>=` operator for greater-equal testing
+    GreaterEqualTest,
+    /// The `<` operator for comparison
+    LesserTest,
+    /// The `>` operator for comparison
+    GreaterTest,
+    /// The `+` operator for addition
+    Addition,
+    /// The `*` operator for multiplication
+    Multiplication,
+    /// The `-` operator for subtraction
+    Subtraction,
+    /// The `/` operator for division
+    Division,
+    /// The `%` operator for remainder after division
+    Remainder,
+    /// The `<<` operator for left shift
+    LeftShift,
+    /// The `>>` operator for right shift
+    RightShift,
+    /// The `^` operator for bitwise XOR
+    BitwiseXor,
+    /// The `|` operator for bitwise OR
+    BitwiseOr,
+    /// The `&` operator for bitwise AND
+    BitwiseAnd,
+    /// The `=` operator for assignment
+    Assignment,
+    /// The `+=` operator for assignment after addition
+    AddAssign,
+    /// The `/=` operator for assignment after division
+    DivAssign,
+    /// The `*=` operator for assignment after multiplication
+    MulAssign,
+    /// The `%=` operator for assignment after remainders
+    RemAssign,
+    /// The `>>=` operator for assignment after shifting right
+    ShrAssign,
+    /// The `<<=` operator for assignment after shifting left
+    ShlAssign,
+    /// The `-=` operator for assignment after subtraction
+    SubAssign,
+    /// The `|=` operator for assignment after bitwise OR
+    BitOrAssign,
+    /// The `&=` operator for assignment after bitwise AND
+    BitAndAssign,
+    /// The `^=` operator for assignment after bitwise XOR
+    BitXorAssign,
+}
+
+impl BinOp {
+    pub fn is_assignment(self) -> bool {
+        match self {
+            BinOp::Assignment
+            | BinOp::AddAssign
+            | BinOp::DivAssign
+            | BinOp::MulAssign
+            | BinOp::RemAssign
+            | BinOp::ShrAssign
+            | BinOp::ShlAssign
+            | BinOp::SubAssign
+            | BinOp::BitOrAssign
+            | BinOp::BitAndAssign
+            | BinOp::BitXorAssign => true,
+            _ => false,
+        }
+    }
+}
+
+impl ast::BinExpr {
+    pub fn op_details(&self) -> Option<(SyntaxToken, BinOp)> {
+        self.syntax().children_with_tokens().filter_map(|it| it.into_token()).find_map(|c| {
+            let bin_op = match c.kind() {
+                T![||] => BinOp::BooleanOr,
+                T![&&] => BinOp::BooleanAnd,
+                T![==] => BinOp::EqualityTest,
+                T![!=] => BinOp::NegatedEqualityTest,
+                T![<=] => BinOp::LesserEqualTest,
+                T![>=] => BinOp::GreaterEqualTest,
+                T![<] => BinOp::LesserTest,
+                T![>] => BinOp::GreaterTest,
+                T![+] => BinOp::Addition,
+                T![*] => BinOp::Multiplication,
+                T![-] => BinOp::Subtraction,
+                T![/] => BinOp::Division,
+                T![%] => BinOp::Remainder,
+                T![<<] => BinOp::LeftShift,
+                T![>>] => BinOp::RightShift,
+                T![^] => BinOp::BitwiseXor,
+                T![|] => BinOp::BitwiseOr,
+                T![&] => BinOp::BitwiseAnd,
+                T![=] => BinOp::Assignment,
+                T![+=] => BinOp::AddAssign,
+                T![/=] => BinOp::DivAssign,
+                T![*=] => BinOp::MulAssign,
+                T![%=] => BinOp::RemAssign,
+                T![>>=] => BinOp::ShrAssign,
+                T![<<=] => BinOp::ShlAssign,
+                T![-=] => BinOp::SubAssign,
+                T![|=] => BinOp::BitOrAssign,
+                T![&=] => BinOp::BitAndAssign,
+                T![^=] => BinOp::BitXorAssign,
+                _ => return None,
+            };
+            Some((c, bin_op))
+        })
+    }
+
+    pub fn op_kind(&self) -> Option<BinOp> {
+        self.op_details().map(|t| t.1)
+    }
+
+    pub fn op_token(&self) -> Option<SyntaxToken> {
+        self.op_details().map(|t| t.0)
+    }
+
+    pub fn lhs(&self) -> Option<ast::Expr> {
+        support::children(self.syntax()).next()
+    }
+
+    pub fn rhs(&self) -> Option<ast::Expr> {
+        support::children(self.syntax()).nth(1)
+    }
+
+    pub fn sub_exprs(&self) -> (Option<ast::Expr>, Option<ast::Expr>) {
+        let mut children = support::children(self.syntax());
+        let first = children.next();
+        let second = children.next();
+        (first, second)
+    }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum RangeOp {
+    /// `..`
+    Exclusive,
+    /// `..=`
+    Inclusive,
+}
+
+impl ast::RangeExpr {
+    fn op_details(&self) -> Option<(usize, SyntaxToken, RangeOp)> {
+        self.syntax().children_with_tokens().enumerate().find_map(|(ix, child)| {
+            let token = child.into_token()?;
+            let bin_op = match token.kind() {
+                T![..] => RangeOp::Exclusive,
+                T![..=] => RangeOp::Inclusive,
+                _ => return None,
+            };
+            Some((ix, token, bin_op))
+        })
+    }
+
+    pub fn op_kind(&self) -> Option<RangeOp> {
+        self.op_details().map(|t| t.2)
+    }
+
+    pub fn op_token(&self) -> Option<SyntaxToken> {
+        self.op_details().map(|t| t.1)
+    }
+
+    pub fn start(&self) -> Option<ast::Expr> {
+        let op_ix = self.op_details()?.0;
+        self.syntax()
+            .children_with_tokens()
+            .take(op_ix)
+            .find_map(|it| ast::Expr::cast(it.into_node()?))
+    }
+
+    pub fn end(&self) -> Option<ast::Expr> {
+        let op_ix = self.op_details()?.0;
+        self.syntax()
+            .children_with_tokens()
+            .skip(op_ix + 1)
+            .find_map(|it| ast::Expr::cast(it.into_node()?))
+    }
+}
+
+impl ast::IndexExpr {
+    pub fn base(&self) -> Option<ast::Expr> {
+        support::children(self.syntax()).next()
+    }
+    pub fn index(&self) -> Option<ast::Expr> {
+        support::children(self.syntax()).nth(1)
+    }
+}
+
+pub enum ArrayExprKind {
+    Repeat { initializer: Option<ast::Expr>, repeat: Option<ast::Expr> },
+    ElementList(AstChildren<ast::Expr>),
+}
+
+impl ast::ArrayExpr {
+    pub fn kind(&self) -> ArrayExprKind {
+        if self.is_repeat() {
+            ArrayExprKind::Repeat {
+                initializer: support::children(self.syntax()).next(),
+                repeat: support::children(self.syntax()).nth(1),
+            }
+        } else {
+            ArrayExprKind::ElementList(support::children(self.syntax()))
+        }
+    }
+
+    fn is_repeat(&self) -> bool {
+        self.syntax().children_with_tokens().any(|it| it.kind() == T![;])
+    }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub enum LiteralKind {
+    String,
+    ByteString,
+    Char,
+    Byte,
+    IntNumber { suffix: Option<SmolStr> },
+    FloatNumber { suffix: Option<SmolStr> },
+    Bool(bool),
+}
+
+impl ast::Literal {
+    pub fn token(&self) -> SyntaxToken {
+        self.syntax()
+            .children_with_tokens()
+            .find(|e| e.kind() != ATTR && !e.kind().is_trivia())
+            .and_then(|e| e.into_token())
+            .unwrap()
+    }
+
+    fn find_suffix(text: &str, possible_suffixes: &[&str]) -> Option<SmolStr> {
+        possible_suffixes
+            .iter()
+            .find(|&suffix| text.ends_with(suffix))
+            .map(|&suffix| SmolStr::new(suffix))
+    }
+
+    pub fn kind(&self) -> LiteralKind {
+        const INT_SUFFIXES: [&str; 12] = [
+            "u64", "u32", "u16", "u8", "usize", "isize", "i64", "i32", "i16", "i8", "u128", "i128",
+        ];
+        const FLOAT_SUFFIXES: [&str; 2] = ["f32", "f64"];
+
+        let token = self.token();
+
+        match token.kind() {
+            INT_NUMBER => {
+                // FYI: there was a bug here previously, thus the if statement below is necessary.
+                // The lexer treats e.g. `1f64` as an integer literal. See
+                // https://github.com/rust-analyzer/rust-analyzer/issues/1592
+                // and the comments on the linked PR.
+
+                let text = token.text();
+                if let suffix @ Some(_) = Self::find_suffix(&text, &FLOAT_SUFFIXES) {
+                    LiteralKind::FloatNumber { suffix }
+                } else {
+                    LiteralKind::IntNumber { suffix: Self::find_suffix(&text, &INT_SUFFIXES) }
+                }
+            }
+            FLOAT_NUMBER => {
+                let text = token.text();
+                LiteralKind::FloatNumber { suffix: Self::find_suffix(&text, &FLOAT_SUFFIXES) }
+            }
+            STRING | RAW_STRING => LiteralKind::String,
+            T![true] => LiteralKind::Bool(true),
+            T![false] => LiteralKind::Bool(false),
+            BYTE_STRING | RAW_BYTE_STRING => LiteralKind::ByteString,
+            CHAR => LiteralKind::Char,
+            BYTE => LiteralKind::Byte,
+            _ => unreachable!(),
+        }
+    }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum Effect {
+    Async(SyntaxToken),
+    Unsafe(SyntaxToken),
+    Try(SyntaxToken),
+    // Very much not an effect, but we stuff it into this node anyway
+    Label(ast::Label),
+}
+
+impl ast::EffectExpr {
+    pub fn effect(&self) -> Effect {
+        if let Some(token) = self.async_token() {
+            return Effect::Async(token);
+        }
+        if let Some(token) = self.unsafe_token() {
+            return Effect::Unsafe(token);
+        }
+        if let Some(token) = self.try_token() {
+            return Effect::Try(token);
+        }
+        if let Some(label) = self.label() {
+            return Effect::Label(label);
+        }
+        unreachable!("ast::EffectExpr without Effect")
+    }
+}
+
+impl ast::BlockExpr {
+    /// false if the block is an intrinsic part of the syntax and can't be
+    /// replaced with arbitrary expression.
+    ///
+    /// ```not_rust
+    /// fn foo() { not_stand_alone }
+    /// const FOO: () = { stand_alone };
+    /// ```
+    pub fn is_standalone(&self) -> bool {
+        let parent = match self.syntax().parent() {
+            Some(it) => it,
+            None => return true,
+        };
+        !matches!(parent.kind(), FN | IF_EXPR | WHILE_EXPR | LOOP_EXPR | EFFECT_EXPR)
+    }
+}
+
+#[test]
+fn test_literal_with_attr() {
+    let parse = ast::SourceFile::parse(r#"const _: &str = { #[attr] "Hello" };"#);
+    let lit = parse.tree().syntax().descendants().find_map(ast::Literal::cast).unwrap();
+    assert_eq!(lit.token().text(), r#""Hello""#);
+}
+
+impl ast::RecordExprField {
+    pub fn parent_record_lit(&self) -> ast::RecordExpr {
+        self.syntax().ancestors().find_map(ast::RecordExpr::cast).unwrap()
+    }
+}
diff --git a/crates/syntax/src/ast/generated.rs b/crates/syntax/src/ast/generated.rs
new file mode 100644
index 00000000000..4a6f41ee71f
--- /dev/null
+++ b/crates/syntax/src/ast/generated.rs
@@ -0,0 +1,41 @@
+//! This file is actually hand-written, but the submodules are indeed generated.
+#[rustfmt::skip]
+mod nodes;
+#[rustfmt::skip]
+mod tokens;
+
+use crate::{
+    AstNode,
+    SyntaxKind::{self, *},
+    SyntaxNode,
+};
+
+pub use {nodes::*, tokens::*};
+
+// Stmt is the only nested enum, so it's easier to just hand-write it
+impl AstNode for Stmt {
+    fn can_cast(kind: SyntaxKind) -> bool {
+        match kind {
+            LET_STMT | EXPR_STMT => true,
+            _ => Item::can_cast(kind),
+        }
+    }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        let res = match syntax.kind() {
+            LET_STMT => Stmt::LetStmt(LetStmt { syntax }),
+            EXPR_STMT => Stmt::ExprStmt(ExprStmt { syntax }),
+            _ => {
+                let item = Item::cast(syntax)?;
+                Stmt::Item(item)
+            }
+        };
+        Some(res)
+    }
+    fn syntax(&self) -> &SyntaxNode {
+        match self {
+            Stmt::LetStmt(it) => &it.syntax,
+            Stmt::ExprStmt(it) => &it.syntax,
+            Stmt::Item(it) => it.syntax(),
+        }
+    }
+}
diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs
new file mode 100644
index 00000000000..3d49309d148
--- /dev/null
+++ b/crates/syntax/src/ast/generated/nodes.rs
@@ -0,0 +1,4067 @@
+//! Generated file, do not edit by hand, see `xtask/src/codegen`
+
+use crate::{
+    ast::{self, support, AstChildren, AstNode},
+    SyntaxKind::{self, *},
+    SyntaxNode, SyntaxToken, T,
+};
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Name {
+    pub(crate) syntax: SyntaxNode,
+}
+impl Name {
+    pub fn ident_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![ident]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct NameRef {
+    pub(crate) syntax: SyntaxNode,
+}
+impl NameRef {
+    pub fn ident_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![ident]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Path {
+    pub(crate) syntax: SyntaxNode,
+}
+impl Path {
+    pub fn qualifier(&self) -> Option<Path> { support::child(&self.syntax) }
+    pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
+    pub fn segment(&self) -> Option<PathSegment> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PathSegment {
+    pub(crate) syntax: SyntaxNode,
+}
+impl PathSegment {
+    pub fn crate_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![crate]) }
+    pub fn self_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![self]) }
+    pub fn super_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![super]) }
+    pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
+    pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+    pub fn generic_arg_list(&self) -> Option<GenericArgList> { support::child(&self.syntax) }
+    pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
+    pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) }
+    pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
+    pub fn path_type(&self) -> Option<PathType> { support::child(&self.syntax) }
+    pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
+    pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct GenericArgList {
+    pub(crate) syntax: SyntaxNode,
+}
+impl GenericArgList {
+    pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
+    pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
+    pub fn generic_args(&self) -> AstChildren<GenericArg> { support::children(&self.syntax) }
+    pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ParamList {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ParamList {
+    pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+    pub fn self_param(&self) -> Option<SelfParam> { support::child(&self.syntax) }
+    pub fn comma_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![,]) }
+    pub fn params(&self) -> AstChildren<Param> { support::children(&self.syntax) }
+    pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RetType {
+    pub(crate) syntax: SyntaxNode,
+}
+impl RetType {
+    pub fn thin_arrow_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![->]) }
+    pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PathType {
+    pub(crate) syntax: SyntaxNode,
+}
+impl PathType {
+    pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TypeArg {
+    pub(crate) syntax: SyntaxNode,
+}
+impl TypeArg {
+    pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AssocTypeArg {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::TypeBoundsOwner for AssocTypeArg {}
+impl AssocTypeArg {
+    pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+    pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+    pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LifetimeArg {
+    pub(crate) syntax: SyntaxNode,
+}
+impl LifetimeArg {
+    pub fn lifetime_token(&self) -> Option<SyntaxToken> {
+        support::token(&self.syntax, T![lifetime])
+    }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ConstArg {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ConstArg {
+    pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TypeBoundList {
+    pub(crate) syntax: SyntaxNode,
+}
+impl TypeBoundList {
+    pub fn bounds(&self) -> AstChildren<TypeBound> { support::children(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroCall {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for MacroCall {}
+impl ast::NameOwner for MacroCall {}
+impl MacroCall {
+    pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+    pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) }
+    pub fn token_tree(&self) -> Option<TokenTree> { support::child(&self.syntax) }
+    pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Attr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl Attr {
+    pub fn pound_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![#]) }
+    pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) }
+    pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+    pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+    pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+    pub fn literal(&self) -> Option<Literal> { support::child(&self.syntax) }
+    pub fn token_tree(&self) -> Option<TokenTree> { support::child(&self.syntax) }
+    pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TokenTree {
+    pub(crate) syntax: SyntaxNode,
+}
+impl TokenTree {
+    pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+    pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+    pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+    pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+    pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+    pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroItems {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::ModuleItemOwner for MacroItems {}
+impl MacroItems {}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroStmts {
+    pub(crate) syntax: SyntaxNode,
+}
+impl MacroStmts {
+    pub fn statements(&self) -> AstChildren<Stmt> { support::children(&self.syntax) }
+    pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct SourceFile {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for SourceFile {}
+impl ast::ModuleItemOwner for SourceFile {}
+impl SourceFile {
+    pub fn shebang_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![shebang]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Const {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for Const {}
+impl ast::NameOwner for Const {}
+impl ast::VisibilityOwner for Const {}
+impl Const {
+    pub fn default_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![default]) }
+    pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+    pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) }
+    pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+    pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+    pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+    pub fn body(&self) -> Option<Expr> { support::child(&self.syntax) }
+    pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Enum {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for Enum {}
+impl ast::NameOwner for Enum {}
+impl ast::VisibilityOwner for Enum {}
+impl ast::GenericParamsOwner for Enum {}
+impl Enum {
+    pub fn enum_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![enum]) }
+    pub fn variant_list(&self) -> Option<VariantList> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ExternBlock {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for ExternBlock {}
+impl ExternBlock {
+    pub fn abi(&self) -> Option<Abi> { support::child(&self.syntax) }
+    pub fn extern_item_list(&self) -> Option<ExternItemList> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ExternCrate {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for ExternCrate {}
+impl ast::VisibilityOwner for ExternCrate {}
+impl ExternCrate {
+    pub fn extern_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![extern]) }
+    pub fn crate_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![crate]) }
+    pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+    pub fn self_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![self]) }
+    pub fn rename(&self) -> Option<Rename> { support::child(&self.syntax) }
+    pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Fn {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for Fn {}
+impl ast::NameOwner for Fn {}
+impl ast::VisibilityOwner for Fn {}
+impl ast::GenericParamsOwner for Fn {}
+impl Fn {
+    pub fn default_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![default]) }
+    pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
+    pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+    pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+    pub fn abi(&self) -> Option<Abi> { support::child(&self.syntax) }
+    pub fn fn_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![fn]) }
+    pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
+    pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) }
+    pub fn body(&self) -> Option<BlockExpr> { support::child(&self.syntax) }
+    pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Impl {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for Impl {}
+impl ast::VisibilityOwner for Impl {}
+impl ast::GenericParamsOwner for Impl {}
+impl Impl {
+    pub fn default_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![default]) }
+    pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+    pub fn impl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![impl]) }
+    pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+    pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) }
+    pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
+    pub fn assoc_item_list(&self) -> Option<AssocItemList> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Module {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for Module {}
+impl ast::NameOwner for Module {}
+impl ast::VisibilityOwner for Module {}
+impl Module {
+    pub fn mod_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mod]) }
+    pub fn item_list(&self) -> Option<ItemList> { support::child(&self.syntax) }
+    pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Static {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for Static {}
+impl ast::NameOwner for Static {}
+impl ast::VisibilityOwner for Static {}
+impl Static {
+    pub fn static_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![static]) }
+    pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+    pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+    pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+    pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+    pub fn body(&self) -> Option<Expr> { support::child(&self.syntax) }
+    pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Struct {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for Struct {}
+impl ast::NameOwner for Struct {}
+impl ast::VisibilityOwner for Struct {}
+impl ast::GenericParamsOwner for Struct {}
+impl Struct {
+    pub fn struct_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![struct]) }
+    pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+    pub fn field_list(&self) -> Option<FieldList> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Trait {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for Trait {}
+impl ast::NameOwner for Trait {}
+impl ast::VisibilityOwner for Trait {}
+impl ast::GenericParamsOwner for Trait {}
+impl ast::TypeBoundsOwner for Trait {}
+impl Trait {
+    pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+    pub fn auto_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![auto]) }
+    pub fn trait_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![trait]) }
+    pub fn assoc_item_list(&self) -> Option<AssocItemList> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TypeAlias {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for TypeAlias {}
+impl ast::NameOwner for TypeAlias {}
+impl ast::VisibilityOwner for TypeAlias {}
+impl ast::GenericParamsOwner for TypeAlias {}
+impl ast::TypeBoundsOwner for TypeAlias {}
+impl TypeAlias {
+    pub fn default_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![default]) }
+    pub fn type_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![type]) }
+    pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+    pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+    pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Union {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for Union {}
+impl ast::NameOwner for Union {}
+impl ast::VisibilityOwner for Union {}
+impl ast::GenericParamsOwner for Union {}
+impl Union {
+    pub fn union_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![union]) }
+    pub fn record_field_list(&self) -> Option<RecordFieldList> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Use {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for Use {}
+impl ast::VisibilityOwner for Use {}
+impl Use {
+    pub fn use_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![use]) }
+    pub fn use_tree(&self) -> Option<UseTree> { support::child(&self.syntax) }
+    pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Visibility {
+    pub(crate) syntax: SyntaxNode,
+}
+impl Visibility {
+    pub fn pub_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![pub]) }
+    pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+    pub fn super_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![super]) }
+    pub fn self_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![self]) }
+    pub fn crate_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![crate]) }
+    pub fn in_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![in]) }
+    pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+    pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ItemList {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for ItemList {}
+impl ast::ModuleItemOwner for ItemList {}
+impl ItemList {
+    pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+    pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Rename {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::NameOwner for Rename {}
+impl Rename {
+    pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
+    pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct UseTree {
+    pub(crate) syntax: SyntaxNode,
+}
+impl UseTree {
+    pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+    pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
+    pub fn star_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![*]) }
+    pub fn use_tree_list(&self) -> Option<UseTreeList> { support::child(&self.syntax) }
+    pub fn rename(&self) -> Option<Rename> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct UseTreeList {
+    pub(crate) syntax: SyntaxNode,
+}
+impl UseTreeList {
+    pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+    pub fn use_trees(&self) -> AstChildren<UseTree> { support::children(&self.syntax) }
+    pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Abi {
+    pub(crate) syntax: SyntaxNode,
+}
+impl Abi {
+    pub fn extern_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![extern]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct GenericParamList {
+    pub(crate) syntax: SyntaxNode,
+}
+impl GenericParamList {
+    pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
+    pub fn generic_params(&self) -> AstChildren<GenericParam> { support::children(&self.syntax) }
+    pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct WhereClause {
+    pub(crate) syntax: SyntaxNode,
+}
+impl WhereClause {
+    pub fn where_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![where]) }
+    pub fn predicates(&self) -> AstChildren<WherePred> { support::children(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct BlockExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for BlockExpr {}
+impl BlockExpr {
+    pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+    pub fn statements(&self) -> AstChildren<Stmt> { support::children(&self.syntax) }
+    pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+    pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct SelfParam {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for SelfParam {}
+impl SelfParam {
+    pub fn amp_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![&]) }
+    pub fn lifetime_token(&self) -> Option<SyntaxToken> {
+        support::token(&self.syntax, T![lifetime])
+    }
+    pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+    pub fn self_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![self]) }
+    pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+    pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Param {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for Param {}
+impl Param {
+    pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+    pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+    pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+    pub fn dotdotdot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![...]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordFieldList {
+    pub(crate) syntax: SyntaxNode,
+}
+impl RecordFieldList {
+    pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+    pub fn fields(&self) -> AstChildren<RecordField> { support::children(&self.syntax) }
+    pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TupleFieldList {
+    pub(crate) syntax: SyntaxNode,
+}
+impl TupleFieldList {
+    pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+    pub fn fields(&self) -> AstChildren<TupleField> { support::children(&self.syntax) }
+    pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordField {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for RecordField {}
+impl ast::NameOwner for RecordField {}
+impl ast::VisibilityOwner for RecordField {}
+impl RecordField {
+    pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+    pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TupleField {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for TupleField {}
+impl ast::VisibilityOwner for TupleField {}
+impl TupleField {
+    pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct VariantList {
+    pub(crate) syntax: SyntaxNode,
+}
+impl VariantList {
+    pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+    pub fn variants(&self) -> AstChildren<Variant> { support::children(&self.syntax) }
+    pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Variant {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for Variant {}
+impl ast::NameOwner for Variant {}
+impl ast::VisibilityOwner for Variant {}
+impl Variant {
+    pub fn field_list(&self) -> Option<FieldList> { support::child(&self.syntax) }
+    pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+    pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AssocItemList {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for AssocItemList {}
+impl AssocItemList {
+    pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+    pub fn assoc_items(&self) -> AstChildren<AssocItem> { support::children(&self.syntax) }
+    pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ExternItemList {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for ExternItemList {}
+impl ExternItemList {
+    pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+    pub fn extern_items(&self) -> AstChildren<ExternItem> { support::children(&self.syntax) }
+    pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ConstParam {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for ConstParam {}
+impl ast::NameOwner for ConstParam {}
+impl ConstParam {
+    pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+    pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+    pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+    pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+    pub fn default_val(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LifetimeParam {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for LifetimeParam {}
+impl ast::TypeBoundsOwner for LifetimeParam {}
+impl LifetimeParam {
+    pub fn lifetime_token(&self) -> Option<SyntaxToken> {
+        support::token(&self.syntax, T![lifetime])
+    }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TypeParam {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for TypeParam {}
+impl ast::NameOwner for TypeParam {}
+impl ast::TypeBoundsOwner for TypeParam {}
+impl TypeParam {
+    pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+    pub fn default_type(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct WherePred {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::TypeBoundsOwner for WherePred {}
+impl WherePred {
+    pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
+    pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
+    pub fn lifetime_token(&self) -> Option<SyntaxToken> {
+        support::token(&self.syntax, T![lifetime])
+    }
+    pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Literal {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for Literal {}
+impl Literal {}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ExprStmt {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for ExprStmt {}
+impl ExprStmt {
+    pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+    pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LetStmt {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for LetStmt {}
+impl LetStmt {
+    pub fn let_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![let]) }
+    pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+    pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+    pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+    pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+    pub fn initializer(&self) -> Option<Expr> { support::child(&self.syntax) }
+    pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ArrayExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for ArrayExpr {}
+impl ArrayExpr {
+    pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+    pub fn exprs(&self) -> AstChildren<Expr> { support::children(&self.syntax) }
+    pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+    pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+    pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AwaitExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for AwaitExpr {}
+impl AwaitExpr {
+    pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+    pub fn dot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![.]) }
+    pub fn await_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![await]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct BinExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for BinExpr {}
+impl BinExpr {}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct BoxExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for BoxExpr {}
+impl BoxExpr {
+    pub fn box_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![box]) }
+    pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct BreakExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for BreakExpr {}
+impl BreakExpr {
+    pub fn break_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![break]) }
+    pub fn lifetime_token(&self) -> Option<SyntaxToken> {
+        support::token(&self.syntax, T![lifetime])
+    }
+    pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct CallExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for CallExpr {}
+impl ast::ArgListOwner for CallExpr {}
+impl CallExpr {
+    pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct CastExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for CastExpr {}
+impl CastExpr {
+    pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+    pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
+    pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ClosureExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for ClosureExpr {}
+impl ClosureExpr {
+    pub fn static_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![static]) }
+    pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
+    pub fn move_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![move]) }
+    pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
+    pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) }
+    pub fn body(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ContinueExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for ContinueExpr {}
+impl ContinueExpr {
+    pub fn continue_token(&self) -> Option<SyntaxToken> {
+        support::token(&self.syntax, T![continue])
+    }
+    pub fn lifetime_token(&self) -> Option<SyntaxToken> {
+        support::token(&self.syntax, T![lifetime])
+    }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct EffectExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for EffectExpr {}
+impl EffectExpr {
+    pub fn label(&self) -> Option<Label> { support::child(&self.syntax) }
+    pub fn try_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![try]) }
+    pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+    pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
+    pub fn block_expr(&self) -> Option<BlockExpr> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct FieldExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for FieldExpr {}
+impl FieldExpr {
+    pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+    pub fn dot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![.]) }
+    pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ForExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for ForExpr {}
+impl ast::LoopBodyOwner for ForExpr {}
+impl ForExpr {
+    pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
+    pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+    pub fn in_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![in]) }
+    pub fn iterable(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct IfExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for IfExpr {}
+impl IfExpr {
+    pub fn if_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![if]) }
+    pub fn condition(&self) -> Option<Condition> { support::child(&self.syntax) }
+    pub fn else_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![else]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct IndexExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for IndexExpr {}
+impl IndexExpr {
+    pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+    pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LoopExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for LoopExpr {}
+impl ast::LoopBodyOwner for LoopExpr {}
+impl LoopExpr {
+    pub fn loop_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![loop]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MatchExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for MatchExpr {}
+impl MatchExpr {
+    pub fn match_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![match]) }
+    pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+    pub fn match_arm_list(&self) -> Option<MatchArmList> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MethodCallExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for MethodCallExpr {}
+impl ast::ArgListOwner for MethodCallExpr {}
+impl MethodCallExpr {
+    pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+    pub fn dot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![.]) }
+    pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+    pub fn generic_arg_list(&self) -> Option<GenericArgList> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ParenExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for ParenExpr {}
+impl ParenExpr {
+    pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+    pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+    pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PathExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for PathExpr {}
+impl PathExpr {
+    pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PrefixExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for PrefixExpr {}
+impl PrefixExpr {
+    pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RangeExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for RangeExpr {}
+impl RangeExpr {}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl RecordExpr {
+    pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+    pub fn record_expr_field_list(&self) -> Option<RecordExprFieldList> {
+        support::child(&self.syntax)
+    }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RefExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for RefExpr {}
+impl RefExpr {
+    pub fn amp_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![&]) }
+    pub fn raw_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![raw]) }
+    pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+    pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+    pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ReturnExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for ReturnExpr {}
+impl ReturnExpr {
+    pub fn return_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![return]) }
+    pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TryExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for TryExpr {}
+impl TryExpr {
+    pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+    pub fn question_mark_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![?]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TupleExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for TupleExpr {}
+impl TupleExpr {
+    pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+    pub fn fields(&self) -> AstChildren<Expr> { support::children(&self.syntax) }
+    pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct WhileExpr {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for WhileExpr {}
+impl ast::LoopBodyOwner for WhileExpr {}
+impl WhileExpr {
+    pub fn while_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![while]) }
+    pub fn condition(&self) -> Option<Condition> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Label {
+    pub(crate) syntax: SyntaxNode,
+}
+impl Label {
+    pub fn lifetime_token(&self) -> Option<SyntaxToken> {
+        support::token(&self.syntax, T![lifetime])
+    }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordExprFieldList {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for RecordExprFieldList {}
+impl RecordExprFieldList {
+    pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+    pub fn fields(&self) -> AstChildren<RecordExprField> { support::children(&self.syntax) }
+    pub fn dotdot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![..]) }
+    pub fn spread(&self) -> Option<Expr> { support::child(&self.syntax) }
+    pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordExprField {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for RecordExprField {}
+impl RecordExprField {
+    pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+    pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+    pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ArgList {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ArgList {
+    pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+    pub fn args(&self) -> AstChildren<Expr> { support::children(&self.syntax) }
+    pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Condition {
+    pub(crate) syntax: SyntaxNode,
+}
+impl Condition {
+    pub fn let_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![let]) }
+    pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+    pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+    pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MatchArmList {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for MatchArmList {}
+impl MatchArmList {
+    pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+    pub fn arms(&self) -> AstChildren<MatchArm> { support::children(&self.syntax) }
+    pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MatchArm {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for MatchArm {}
+impl MatchArm {
+    pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+    pub fn guard(&self) -> Option<MatchGuard> { support::child(&self.syntax) }
+    pub fn fat_arrow_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=>]) }
+    pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+    pub fn comma_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![,]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MatchGuard {
+    pub(crate) syntax: SyntaxNode,
+}
+impl MatchGuard {
+    pub fn if_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![if]) }
+    pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ArrayType {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ArrayType {
+    pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+    pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+    pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+    pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+    pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct DynTraitType {
+    pub(crate) syntax: SyntaxNode,
+}
+impl DynTraitType {
+    pub fn dyn_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![dyn]) }
+    pub fn type_bound_list(&self) -> Option<TypeBoundList> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct FnPtrType {
+    pub(crate) syntax: SyntaxNode,
+}
+impl FnPtrType {
+    pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+    pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
+    pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+    pub fn abi(&self) -> Option<Abi> { support::child(&self.syntax) }
+    pub fn fn_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![fn]) }
+    pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
+    pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ForType {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ForType {
+    pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
+    pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
+    pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ImplTraitType {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ImplTraitType {
+    pub fn impl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![impl]) }
+    pub fn type_bound_list(&self) -> Option<TypeBoundList> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct InferType {
+    pub(crate) syntax: SyntaxNode,
+}
+impl InferType {
+    pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct NeverType {
+    pub(crate) syntax: SyntaxNode,
+}
+impl NeverType {
+    pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ParenType {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ParenType {
+    pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+    pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+    pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PtrType {
+    pub(crate) syntax: SyntaxNode,
+}
+impl PtrType {
+    pub fn star_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![*]) }
+    pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+    pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+    pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RefType {
+    pub(crate) syntax: SyntaxNode,
+}
+impl RefType {
+    pub fn amp_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![&]) }
+    pub fn lifetime_token(&self) -> Option<SyntaxToken> {
+        support::token(&self.syntax, T![lifetime])
+    }
+    pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+    pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct SliceType {
+    pub(crate) syntax: SyntaxNode,
+}
+impl SliceType {
+    pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+    pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+    pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TupleType {
+    pub(crate) syntax: SyntaxNode,
+}
+impl TupleType {
+    pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+    pub fn fields(&self) -> AstChildren<Type> { support::children(&self.syntax) }
+    pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TypeBound {
+    pub(crate) syntax: SyntaxNode,
+}
+impl TypeBound {
+    pub fn lifetime_token(&self) -> Option<SyntaxToken> {
+        support::token(&self.syntax, T![lifetime])
+    }
+    pub fn question_mark_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![?]) }
+    pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct IdentPat {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for IdentPat {}
+impl ast::NameOwner for IdentPat {}
+impl IdentPat {
+    pub fn ref_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![ref]) }
+    pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+    pub fn at_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![@]) }
+    pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct BoxPat {
+    pub(crate) syntax: SyntaxNode,
+}
+impl BoxPat {
+    pub fn box_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![box]) }
+    pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RestPat {
+    pub(crate) syntax: SyntaxNode,
+}
+impl RestPat {
+    pub fn dotdot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![..]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LiteralPat {
+    pub(crate) syntax: SyntaxNode,
+}
+impl LiteralPat {
+    pub fn literal(&self) -> Option<Literal> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroPat {
+    pub(crate) syntax: SyntaxNode,
+}
+impl MacroPat {
+    pub fn macro_call(&self) -> Option<MacroCall> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct OrPat {
+    pub(crate) syntax: SyntaxNode,
+}
+impl OrPat {
+    pub fn pats(&self) -> AstChildren<Pat> { support::children(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ParenPat {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ParenPat {
+    pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+    pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+    pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PathPat {
+    pub(crate) syntax: SyntaxNode,
+}
+impl PathPat {
+    pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct WildcardPat {
+    pub(crate) syntax: SyntaxNode,
+}
+impl WildcardPat {
+    pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RangePat {
+    pub(crate) syntax: SyntaxNode,
+}
+impl RangePat {}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordPat {
+    pub(crate) syntax: SyntaxNode,
+}
+impl RecordPat {
+    pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+    pub fn record_pat_field_list(&self) -> Option<RecordPatFieldList> {
+        support::child(&self.syntax)
+    }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RefPat {
+    pub(crate) syntax: SyntaxNode,
+}
+impl RefPat {
+    pub fn amp_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![&]) }
+    pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+    pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct SlicePat {
+    pub(crate) syntax: SyntaxNode,
+}
+impl SlicePat {
+    pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+    pub fn pats(&self) -> AstChildren<Pat> { support::children(&self.syntax) }
+    pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TuplePat {
+    pub(crate) syntax: SyntaxNode,
+}
+impl TuplePat {
+    pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+    pub fn fields(&self) -> AstChildren<Pat> { support::children(&self.syntax) }
+    pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TupleStructPat {
+    pub(crate) syntax: SyntaxNode,
+}
+impl TupleStructPat {
+    pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+    pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+    pub fn fields(&self) -> AstChildren<Pat> { support::children(&self.syntax) }
+    pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordPatFieldList {
+    pub(crate) syntax: SyntaxNode,
+}
+impl RecordPatFieldList {
+    pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+    pub fn fields(&self) -> AstChildren<RecordPatField> { support::children(&self.syntax) }
+    pub fn dotdot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![..]) }
+    pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordPatField {
+    pub(crate) syntax: SyntaxNode,
+}
+impl ast::AttrsOwner for RecordPatField {}
+impl RecordPatField {
+    pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+    pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+    pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum GenericArg {
+    TypeArg(TypeArg),
+    AssocTypeArg(AssocTypeArg),
+    LifetimeArg(LifetimeArg),
+    ConstArg(ConstArg),
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Type {
+    ArrayType(ArrayType),
+    DynTraitType(DynTraitType),
+    FnPtrType(FnPtrType),
+    ForType(ForType),
+    ImplTraitType(ImplTraitType),
+    InferType(InferType),
+    NeverType(NeverType),
+    ParenType(ParenType),
+    PathType(PathType),
+    PtrType(PtrType),
+    RefType(RefType),
+    SliceType(SliceType),
+    TupleType(TupleType),
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Expr {
+    ArrayExpr(ArrayExpr),
+    AwaitExpr(AwaitExpr),
+    BinExpr(BinExpr),
+    BlockExpr(BlockExpr),
+    BoxExpr(BoxExpr),
+    BreakExpr(BreakExpr),
+    CallExpr(CallExpr),
+    CastExpr(CastExpr),
+    ClosureExpr(ClosureExpr),
+    ContinueExpr(ContinueExpr),
+    EffectExpr(EffectExpr),
+    FieldExpr(FieldExpr),
+    ForExpr(ForExpr),
+    IfExpr(IfExpr),
+    IndexExpr(IndexExpr),
+    Literal(Literal),
+    LoopExpr(LoopExpr),
+    MacroCall(MacroCall),
+    MatchExpr(MatchExpr),
+    MethodCallExpr(MethodCallExpr),
+    ParenExpr(ParenExpr),
+    PathExpr(PathExpr),
+    PrefixExpr(PrefixExpr),
+    RangeExpr(RangeExpr),
+    RecordExpr(RecordExpr),
+    RefExpr(RefExpr),
+    ReturnExpr(ReturnExpr),
+    TryExpr(TryExpr),
+    TupleExpr(TupleExpr),
+    WhileExpr(WhileExpr),
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Item {
+    Const(Const),
+    Enum(Enum),
+    ExternBlock(ExternBlock),
+    ExternCrate(ExternCrate),
+    Fn(Fn),
+    Impl(Impl),
+    MacroCall(MacroCall),
+    Module(Module),
+    Static(Static),
+    Struct(Struct),
+    Trait(Trait),
+    TypeAlias(TypeAlias),
+    Union(Union),
+    Use(Use),
+}
+impl ast::AttrsOwner for Item {}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Stmt {
+    ExprStmt(ExprStmt),
+    Item(Item),
+    LetStmt(LetStmt),
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Pat {
+    IdentPat(IdentPat),
+    BoxPat(BoxPat),
+    RestPat(RestPat),
+    LiteralPat(LiteralPat),
+    MacroPat(MacroPat),
+    OrPat(OrPat),
+    ParenPat(ParenPat),
+    PathPat(PathPat),
+    WildcardPat(WildcardPat),
+    RangePat(RangePat),
+    RecordPat(RecordPat),
+    RefPat(RefPat),
+    SlicePat(SlicePat),
+    TuplePat(TuplePat),
+    TupleStructPat(TupleStructPat),
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum FieldList {
+    RecordFieldList(RecordFieldList),
+    TupleFieldList(TupleFieldList),
+}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum AdtDef {
+    Enum(Enum),
+    Struct(Struct),
+    Union(Union),
+}
+impl ast::AttrsOwner for AdtDef {}
+impl ast::GenericParamsOwner for AdtDef {}
+impl ast::NameOwner for AdtDef {}
+impl ast::VisibilityOwner for AdtDef {}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum AssocItem {
+    Const(Const),
+    Fn(Fn),
+    MacroCall(MacroCall),
+    TypeAlias(TypeAlias),
+}
+impl ast::AttrsOwner for AssocItem {}
+impl ast::NameOwner for AssocItem {}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum ExternItem {
+    Fn(Fn),
+    MacroCall(MacroCall),
+    Static(Static),
+}
+impl ast::AttrsOwner for ExternItem {}
+impl ast::NameOwner for ExternItem {}
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum GenericParam {
+    ConstParam(ConstParam),
+    LifetimeParam(LifetimeParam),
+    TypeParam(TypeParam),
+}
+impl ast::AttrsOwner for GenericParam {}
+impl AstNode for Name {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == NAME }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for NameRef {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == NAME_REF }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Path {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == PATH }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PathSegment {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_SEGMENT }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for GenericArgList {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == GENERIC_ARG_LIST }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ParamList {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == PARAM_LIST }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RetType {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == RET_TYPE }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PathType {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_TYPE }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TypeArg {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_ARG }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for AssocTypeArg {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == ASSOC_TYPE_ARG }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LifetimeArg {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == LIFETIME_ARG }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ConstArg {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == CONST_ARG }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TypeBoundList {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_BOUND_LIST }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroCall {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_CALL }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Attr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == ATTR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TokenTree {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == TOKEN_TREE }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroItems {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_ITEMS }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroStmts {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_STMTS }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for SourceFile {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == SOURCE_FILE }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Const {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == CONST }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Enum {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == ENUM }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ExternBlock {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == EXTERN_BLOCK }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ExternCrate {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == EXTERN_CRATE }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Fn {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == FN }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Impl {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == IMPL }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Module {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == MODULE }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Static {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == STATIC }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Struct {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == STRUCT }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Trait {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == TRAIT }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TypeAlias {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_ALIAS }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Union {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == UNION }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Use {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == USE }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Visibility {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == VISIBILITY }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ItemList {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == ITEM_LIST }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Rename {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == RENAME }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for UseTree {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == USE_TREE }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for UseTreeList {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == USE_TREE_LIST }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Abi {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == ABI }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for GenericParamList {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == GENERIC_PARAM_LIST }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for WhereClause {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == WHERE_CLAUSE }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for BlockExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == BLOCK_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for SelfParam {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == SELF_PARAM }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Param {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == PARAM }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordFieldList {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_FIELD_LIST }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TupleFieldList {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_FIELD_LIST }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordField {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_FIELD }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TupleField {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_FIELD }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for VariantList {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == VARIANT_LIST }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Variant {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == VARIANT }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for AssocItemList {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == ASSOC_ITEM_LIST }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ExternItemList {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == EXTERN_ITEM_LIST }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ConstParam {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == CONST_PARAM }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LifetimeParam {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == LIFETIME_PARAM }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TypeParam {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_PARAM }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for WherePred {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == WHERE_PRED }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Literal {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == LITERAL }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ExprStmt {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == EXPR_STMT }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LetStmt {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == LET_STMT }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ArrayExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == ARRAY_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for AwaitExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == AWAIT_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for BinExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == BIN_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for BoxExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == BOX_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for BreakExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == BREAK_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for CallExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == CALL_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for CastExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == CAST_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ClosureExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == CLOSURE_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ContinueExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == CONTINUE_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for EffectExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == EFFECT_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for FieldExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == FIELD_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ForExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == FOR_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for IfExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == IF_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for IndexExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == INDEX_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LoopExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == LOOP_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MatchExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MethodCallExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == METHOD_CALL_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ParenExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == PAREN_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PathExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PrefixExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == PREFIX_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RangeExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == RANGE_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RefExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == REF_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ReturnExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == RETURN_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TryExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == TRY_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TupleExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for WhileExpr {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == WHILE_EXPR }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Label {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == LABEL }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordExprFieldList {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_EXPR_FIELD_LIST }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordExprField {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_EXPR_FIELD }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ArgList {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == ARG_LIST }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Condition {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == CONDITION }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MatchArmList {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_ARM_LIST }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MatchArm {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_ARM }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MatchGuard {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_GUARD }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ArrayType {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == ARRAY_TYPE }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for DynTraitType {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == DYN_TRAIT_TYPE }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for FnPtrType {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == FN_PTR_TYPE }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ForType {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == FOR_TYPE }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ImplTraitType {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == IMPL_TRAIT_TYPE }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for InferType {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == INFER_TYPE }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for NeverType {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == NEVER_TYPE }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ParenType {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == PAREN_TYPE }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PtrType {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == PTR_TYPE }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RefType {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == REF_TYPE }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for SliceType {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == SLICE_TYPE }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TupleType {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_TYPE }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TypeBound {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_BOUND }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for IdentPat {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == IDENT_PAT }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for BoxPat {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == BOX_PAT }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RestPat {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == REST_PAT }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LiteralPat {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == LITERAL_PAT }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroPat {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_PAT }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for OrPat {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == OR_PAT }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ParenPat {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == PAREN_PAT }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PathPat {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_PAT }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for WildcardPat {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == WILDCARD_PAT }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RangePat {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == RANGE_PAT }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordPat {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_PAT }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RefPat {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == REF_PAT }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for SlicePat {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == SLICE_PAT }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TuplePat {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_PAT }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TupleStructPat {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_STRUCT_PAT }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordPatFieldList {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_PAT_FIELD_LIST }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordPatField {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_PAT_FIELD }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl From<TypeArg> for GenericArg {
+    fn from(node: TypeArg) -> GenericArg { GenericArg::TypeArg(node) }
+}
+impl From<AssocTypeArg> for GenericArg {
+    fn from(node: AssocTypeArg) -> GenericArg { GenericArg::AssocTypeArg(node) }
+}
+impl From<LifetimeArg> for GenericArg {
+    fn from(node: LifetimeArg) -> GenericArg { GenericArg::LifetimeArg(node) }
+}
+impl From<ConstArg> for GenericArg {
+    fn from(node: ConstArg) -> GenericArg { GenericArg::ConstArg(node) }
+}
+impl AstNode for GenericArg {
+    fn can_cast(kind: SyntaxKind) -> bool {
+        match kind {
+            TYPE_ARG | ASSOC_TYPE_ARG | LIFETIME_ARG | CONST_ARG => true,
+            _ => false,
+        }
+    }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        let res = match syntax.kind() {
+            TYPE_ARG => GenericArg::TypeArg(TypeArg { syntax }),
+            ASSOC_TYPE_ARG => GenericArg::AssocTypeArg(AssocTypeArg { syntax }),
+            LIFETIME_ARG => GenericArg::LifetimeArg(LifetimeArg { syntax }),
+            CONST_ARG => GenericArg::ConstArg(ConstArg { syntax }),
+            _ => return None,
+        };
+        Some(res)
+    }
+    fn syntax(&self) -> &SyntaxNode {
+        match self {
+            GenericArg::TypeArg(it) => &it.syntax,
+            GenericArg::AssocTypeArg(it) => &it.syntax,
+            GenericArg::LifetimeArg(it) => &it.syntax,
+            GenericArg::ConstArg(it) => &it.syntax,
+        }
+    }
+}
+impl From<ArrayType> for Type {
+    fn from(node: ArrayType) -> Type { Type::ArrayType(node) }
+}
+impl From<DynTraitType> for Type {
+    fn from(node: DynTraitType) -> Type { Type::DynTraitType(node) }
+}
+impl From<FnPtrType> for Type {
+    fn from(node: FnPtrType) -> Type { Type::FnPtrType(node) }
+}
+impl From<ForType> for Type {
+    fn from(node: ForType) -> Type { Type::ForType(node) }
+}
+impl From<ImplTraitType> for Type {
+    fn from(node: ImplTraitType) -> Type { Type::ImplTraitType(node) }
+}
+impl From<InferType> for Type {
+    fn from(node: InferType) -> Type { Type::InferType(node) }
+}
+impl From<NeverType> for Type {
+    fn from(node: NeverType) -> Type { Type::NeverType(node) }
+}
+impl From<ParenType> for Type {
+    fn from(node: ParenType) -> Type { Type::ParenType(node) }
+}
+impl From<PathType> for Type {
+    fn from(node: PathType) -> Type { Type::PathType(node) }
+}
+impl From<PtrType> for Type {
+    fn from(node: PtrType) -> Type { Type::PtrType(node) }
+}
+impl From<RefType> for Type {
+    fn from(node: RefType) -> Type { Type::RefType(node) }
+}
+impl From<SliceType> for Type {
+    fn from(node: SliceType) -> Type { Type::SliceType(node) }
+}
+impl From<TupleType> for Type {
+    fn from(node: TupleType) -> Type { Type::TupleType(node) }
+}
+impl AstNode for Type {
+    fn can_cast(kind: SyntaxKind) -> bool {
+        match kind {
+            ARRAY_TYPE | DYN_TRAIT_TYPE | FN_PTR_TYPE | FOR_TYPE | IMPL_TRAIT_TYPE | INFER_TYPE
+            | NEVER_TYPE | PAREN_TYPE | PATH_TYPE | PTR_TYPE | REF_TYPE | SLICE_TYPE
+            | TUPLE_TYPE => true,
+            _ => false,
+        }
+    }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        let res = match syntax.kind() {
+            ARRAY_TYPE => Type::ArrayType(ArrayType { syntax }),
+            DYN_TRAIT_TYPE => Type::DynTraitType(DynTraitType { syntax }),
+            FN_PTR_TYPE => Type::FnPtrType(FnPtrType { syntax }),
+            FOR_TYPE => Type::ForType(ForType { syntax }),
+            IMPL_TRAIT_TYPE => Type::ImplTraitType(ImplTraitType { syntax }),
+            INFER_TYPE => Type::InferType(InferType { syntax }),
+            NEVER_TYPE => Type::NeverType(NeverType { syntax }),
+            PAREN_TYPE => Type::ParenType(ParenType { syntax }),
+            PATH_TYPE => Type::PathType(PathType { syntax }),
+            PTR_TYPE => Type::PtrType(PtrType { syntax }),
+            REF_TYPE => Type::RefType(RefType { syntax }),
+            SLICE_TYPE => Type::SliceType(SliceType { syntax }),
+            TUPLE_TYPE => Type::TupleType(TupleType { syntax }),
+            _ => return None,
+        };
+        Some(res)
+    }
+    fn syntax(&self) -> &SyntaxNode {
+        match self {
+            Type::ArrayType(it) => &it.syntax,
+            Type::DynTraitType(it) => &it.syntax,
+            Type::FnPtrType(it) => &it.syntax,
+            Type::ForType(it) => &it.syntax,
+            Type::ImplTraitType(it) => &it.syntax,
+            Type::InferType(it) => &it.syntax,
+            Type::NeverType(it) => &it.syntax,
+            Type::ParenType(it) => &it.syntax,
+            Type::PathType(it) => &it.syntax,
+            Type::PtrType(it) => &it.syntax,
+            Type::RefType(it) => &it.syntax,
+            Type::SliceType(it) => &it.syntax,
+            Type::TupleType(it) => &it.syntax,
+        }
+    }
+}
+impl From<ArrayExpr> for Expr {
+    fn from(node: ArrayExpr) -> Expr { Expr::ArrayExpr(node) }
+}
+impl From<AwaitExpr> for Expr {
+    fn from(node: AwaitExpr) -> Expr { Expr::AwaitExpr(node) }
+}
+impl From<BinExpr> for Expr {
+    fn from(node: BinExpr) -> Expr { Expr::BinExpr(node) }
+}
+impl From<BlockExpr> for Expr {
+    fn from(node: BlockExpr) -> Expr { Expr::BlockExpr(node) }
+}
+impl From<BoxExpr> for Expr {
+    fn from(node: BoxExpr) -> Expr { Expr::BoxExpr(node) }
+}
+impl From<BreakExpr> for Expr {
+    fn from(node: BreakExpr) -> Expr { Expr::BreakExpr(node) }
+}
+impl From<CallExpr> for Expr {
+    fn from(node: CallExpr) -> Expr { Expr::CallExpr(node) }
+}
+impl From<CastExpr> for Expr {
+    fn from(node: CastExpr) -> Expr { Expr::CastExpr(node) }
+}
+impl From<ClosureExpr> for Expr {
+    fn from(node: ClosureExpr) -> Expr { Expr::ClosureExpr(node) }
+}
+impl From<ContinueExpr> for Expr {
+    fn from(node: ContinueExpr) -> Expr { Expr::ContinueExpr(node) }
+}
+impl From<EffectExpr> for Expr {
+    fn from(node: EffectExpr) -> Expr { Expr::EffectExpr(node) }
+}
+impl From<FieldExpr> for Expr {
+    fn from(node: FieldExpr) -> Expr { Expr::FieldExpr(node) }
+}
+impl From<ForExpr> for Expr {
+    fn from(node: ForExpr) -> Expr { Expr::ForExpr(node) }
+}
+impl From<IfExpr> for Expr {
+    fn from(node: IfExpr) -> Expr { Expr::IfExpr(node) }
+}
+impl From<IndexExpr> for Expr {
+    fn from(node: IndexExpr) -> Expr { Expr::IndexExpr(node) }
+}
+impl From<Literal> for Expr {
+    fn from(node: Literal) -> Expr { Expr::Literal(node) }
+}
+impl From<LoopExpr> for Expr {
+    fn from(node: LoopExpr) -> Expr { Expr::LoopExpr(node) }
+}
+impl From<MacroCall> for Expr {
+    fn from(node: MacroCall) -> Expr { Expr::MacroCall(node) }
+}
+impl From<MatchExpr> for Expr {
+    fn from(node: MatchExpr) -> Expr { Expr::MatchExpr(node) }
+}
+impl From<MethodCallExpr> for Expr {
+    fn from(node: MethodCallExpr) -> Expr { Expr::MethodCallExpr(node) }
+}
+impl From<ParenExpr> for Expr {
+    fn from(node: ParenExpr) -> Expr { Expr::ParenExpr(node) }
+}
+impl From<PathExpr> for Expr {
+    fn from(node: PathExpr) -> Expr { Expr::PathExpr(node) }
+}
+impl From<PrefixExpr> for Expr {
+    fn from(node: PrefixExpr) -> Expr { Expr::PrefixExpr(node) }
+}
+impl From<RangeExpr> for Expr {
+    fn from(node: RangeExpr) -> Expr { Expr::RangeExpr(node) }
+}
+impl From<RecordExpr> for Expr {
+    fn from(node: RecordExpr) -> Expr { Expr::RecordExpr(node) }
+}
+impl From<RefExpr> for Expr {
+    fn from(node: RefExpr) -> Expr { Expr::RefExpr(node) }
+}
+impl From<ReturnExpr> for Expr {
+    fn from(node: ReturnExpr) -> Expr { Expr::ReturnExpr(node) }
+}
+impl From<TryExpr> for Expr {
+    fn from(node: TryExpr) -> Expr { Expr::TryExpr(node) }
+}
+impl From<TupleExpr> for Expr {
+    fn from(node: TupleExpr) -> Expr { Expr::TupleExpr(node) }
+}
+impl From<WhileExpr> for Expr {
+    fn from(node: WhileExpr) -> Expr { Expr::WhileExpr(node) }
+}
+impl AstNode for Expr {
+    fn can_cast(kind: SyntaxKind) -> bool {
+        match kind {
+            ARRAY_EXPR | AWAIT_EXPR | BIN_EXPR | BLOCK_EXPR | BOX_EXPR | BREAK_EXPR | CALL_EXPR
+            | CAST_EXPR | CLOSURE_EXPR | CONTINUE_EXPR | EFFECT_EXPR | FIELD_EXPR | FOR_EXPR
+            | IF_EXPR | INDEX_EXPR | LITERAL | LOOP_EXPR | MACRO_CALL | MATCH_EXPR
+            | METHOD_CALL_EXPR | PAREN_EXPR | PATH_EXPR | PREFIX_EXPR | RANGE_EXPR
+            | RECORD_EXPR | REF_EXPR | RETURN_EXPR | TRY_EXPR | TUPLE_EXPR | WHILE_EXPR => true,
+            _ => false,
+        }
+    }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        let res = match syntax.kind() {
+            ARRAY_EXPR => Expr::ArrayExpr(ArrayExpr { syntax }),
+            AWAIT_EXPR => Expr::AwaitExpr(AwaitExpr { syntax }),
+            BIN_EXPR => Expr::BinExpr(BinExpr { syntax }),
+            BLOCK_EXPR => Expr::BlockExpr(BlockExpr { syntax }),
+            BOX_EXPR => Expr::BoxExpr(BoxExpr { syntax }),
+            BREAK_EXPR => Expr::BreakExpr(BreakExpr { syntax }),
+            CALL_EXPR => Expr::CallExpr(CallExpr { syntax }),
+            CAST_EXPR => Expr::CastExpr(CastExpr { syntax }),
+            CLOSURE_EXPR => Expr::ClosureExpr(ClosureExpr { syntax }),
+            CONTINUE_EXPR => Expr::ContinueExpr(ContinueExpr { syntax }),
+            EFFECT_EXPR => Expr::EffectExpr(EffectExpr { syntax }),
+            FIELD_EXPR => Expr::FieldExpr(FieldExpr { syntax }),
+            FOR_EXPR => Expr::ForExpr(ForExpr { syntax }),
+            IF_EXPR => Expr::IfExpr(IfExpr { syntax }),
+            INDEX_EXPR => Expr::IndexExpr(IndexExpr { syntax }),
+            LITERAL => Expr::Literal(Literal { syntax }),
+            LOOP_EXPR => Expr::LoopExpr(LoopExpr { syntax }),
+            MACRO_CALL => Expr::MacroCall(MacroCall { syntax }),
+            MATCH_EXPR => Expr::MatchExpr(MatchExpr { syntax }),
+            METHOD_CALL_EXPR => Expr::MethodCallExpr(MethodCallExpr { syntax }),
+            PAREN_EXPR => Expr::ParenExpr(ParenExpr { syntax }),
+            PATH_EXPR => Expr::PathExpr(PathExpr { syntax }),
+            PREFIX_EXPR => Expr::PrefixExpr(PrefixExpr { syntax }),
+            RANGE_EXPR => Expr::RangeExpr(RangeExpr { syntax }),
+            RECORD_EXPR => Expr::RecordExpr(RecordExpr { syntax }),
+            REF_EXPR => Expr::RefExpr(RefExpr { syntax }),
+            RETURN_EXPR => Expr::ReturnExpr(ReturnExpr { syntax }),
+            TRY_EXPR => Expr::TryExpr(TryExpr { syntax }),
+            TUPLE_EXPR => Expr::TupleExpr(TupleExpr { syntax }),
+            WHILE_EXPR => Expr::WhileExpr(WhileExpr { syntax }),
+            _ => return None,
+        };
+        Some(res)
+    }
+    fn syntax(&self) -> &SyntaxNode {
+        match self {
+            Expr::ArrayExpr(it) => &it.syntax,
+            Expr::AwaitExpr(it) => &it.syntax,
+            Expr::BinExpr(it) => &it.syntax,
+            Expr::BlockExpr(it) => &it.syntax,
+            Expr::BoxExpr(it) => &it.syntax,
+            Expr::BreakExpr(it) => &it.syntax,
+            Expr::CallExpr(it) => &it.syntax,
+            Expr::CastExpr(it) => &it.syntax,
+            Expr::ClosureExpr(it) => &it.syntax,
+            Expr::ContinueExpr(it) => &it.syntax,
+            Expr::EffectExpr(it) => &it.syntax,
+            Expr::FieldExpr(it) => &it.syntax,
+            Expr::ForExpr(it) => &it.syntax,
+            Expr::IfExpr(it) => &it.syntax,
+            Expr::IndexExpr(it) => &it.syntax,
+            Expr::Literal(it) => &it.syntax,
+            Expr::LoopExpr(it) => &it.syntax,
+            Expr::MacroCall(it) => &it.syntax,
+            Expr::MatchExpr(it) => &it.syntax,
+            Expr::MethodCallExpr(it) => &it.syntax,
+            Expr::ParenExpr(it) => &it.syntax,
+            Expr::PathExpr(it) => &it.syntax,
+            Expr::PrefixExpr(it) => &it.syntax,
+            Expr::RangeExpr(it) => &it.syntax,
+            Expr::RecordExpr(it) => &it.syntax,
+            Expr::RefExpr(it) => &it.syntax,
+            Expr::ReturnExpr(it) => &it.syntax,
+            Expr::TryExpr(it) => &it.syntax,
+            Expr::TupleExpr(it) => &it.syntax,
+            Expr::WhileExpr(it) => &it.syntax,
+        }
+    }
+}
+impl From<Const> for Item {
+    fn from(node: Const) -> Item { Item::Const(node) }
+}
+impl From<Enum> for Item {
+    fn from(node: Enum) -> Item { Item::Enum(node) }
+}
+impl From<ExternBlock> for Item {
+    fn from(node: ExternBlock) -> Item { Item::ExternBlock(node) }
+}
+impl From<ExternCrate> for Item {
+    fn from(node: ExternCrate) -> Item { Item::ExternCrate(node) }
+}
+impl From<Fn> for Item {
+    fn from(node: Fn) -> Item { Item::Fn(node) }
+}
+impl From<Impl> for Item {
+    fn from(node: Impl) -> Item { Item::Impl(node) }
+}
+impl From<MacroCall> for Item {
+    fn from(node: MacroCall) -> Item { Item::MacroCall(node) }
+}
+impl From<Module> for Item {
+    fn from(node: Module) -> Item { Item::Module(node) }
+}
+impl From<Static> for Item {
+    fn from(node: Static) -> Item { Item::Static(node) }
+}
+impl From<Struct> for Item {
+    fn from(node: Struct) -> Item { Item::Struct(node) }
+}
+impl From<Trait> for Item {
+    fn from(node: Trait) -> Item { Item::Trait(node) }
+}
+impl From<TypeAlias> for Item {
+    fn from(node: TypeAlias) -> Item { Item::TypeAlias(node) }
+}
+impl From<Union> for Item {
+    fn from(node: Union) -> Item { Item::Union(node) }
+}
+impl From<Use> for Item {
+    fn from(node: Use) -> Item { Item::Use(node) }
+}
+impl AstNode for Item {
+    fn can_cast(kind: SyntaxKind) -> bool {
+        match kind {
+            CONST | ENUM | EXTERN_BLOCK | EXTERN_CRATE | FN | IMPL | MACRO_CALL | MODULE
+            | STATIC | STRUCT | TRAIT | TYPE_ALIAS | UNION | USE => true,
+            _ => false,
+        }
+    }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        let res = match syntax.kind() {
+            CONST => Item::Const(Const { syntax }),
+            ENUM => Item::Enum(Enum { syntax }),
+            EXTERN_BLOCK => Item::ExternBlock(ExternBlock { syntax }),
+            EXTERN_CRATE => Item::ExternCrate(ExternCrate { syntax }),
+            FN => Item::Fn(Fn { syntax }),
+            IMPL => Item::Impl(Impl { syntax }),
+            MACRO_CALL => Item::MacroCall(MacroCall { syntax }),
+            MODULE => Item::Module(Module { syntax }),
+            STATIC => Item::Static(Static { syntax }),
+            STRUCT => Item::Struct(Struct { syntax }),
+            TRAIT => Item::Trait(Trait { syntax }),
+            TYPE_ALIAS => Item::TypeAlias(TypeAlias { syntax }),
+            UNION => Item::Union(Union { syntax }),
+            USE => Item::Use(Use { syntax }),
+            _ => return None,
+        };
+        Some(res)
+    }
+    fn syntax(&self) -> &SyntaxNode {
+        match self {
+            Item::Const(it) => &it.syntax,
+            Item::Enum(it) => &it.syntax,
+            Item::ExternBlock(it) => &it.syntax,
+            Item::ExternCrate(it) => &it.syntax,
+            Item::Fn(it) => &it.syntax,
+            Item::Impl(it) => &it.syntax,
+            Item::MacroCall(it) => &it.syntax,
+            Item::Module(it) => &it.syntax,
+            Item::Static(it) => &it.syntax,
+            Item::Struct(it) => &it.syntax,
+            Item::Trait(it) => &it.syntax,
+            Item::TypeAlias(it) => &it.syntax,
+            Item::Union(it) => &it.syntax,
+            Item::Use(it) => &it.syntax,
+        }
+    }
+}
+impl From<ExprStmt> for Stmt {
+    fn from(node: ExprStmt) -> Stmt { Stmt::ExprStmt(node) }
+}
+impl From<Item> for Stmt {
+    fn from(node: Item) -> Stmt { Stmt::Item(node) }
+}
+impl From<LetStmt> for Stmt {
+    fn from(node: LetStmt) -> Stmt { Stmt::LetStmt(node) }
+}
+impl From<IdentPat> for Pat {
+    fn from(node: IdentPat) -> Pat { Pat::IdentPat(node) }
+}
+impl From<BoxPat> for Pat {
+    fn from(node: BoxPat) -> Pat { Pat::BoxPat(node) }
+}
+impl From<RestPat> for Pat {
+    fn from(node: RestPat) -> Pat { Pat::RestPat(node) }
+}
+impl From<LiteralPat> for Pat {
+    fn from(node: LiteralPat) -> Pat { Pat::LiteralPat(node) }
+}
+impl From<MacroPat> for Pat {
+    fn from(node: MacroPat) -> Pat { Pat::MacroPat(node) }
+}
+impl From<OrPat> for Pat {
+    fn from(node: OrPat) -> Pat { Pat::OrPat(node) }
+}
+impl From<ParenPat> for Pat {
+    fn from(node: ParenPat) -> Pat { Pat::ParenPat(node) }
+}
+impl From<PathPat> for Pat {
+    fn from(node: PathPat) -> Pat { Pat::PathPat(node) }
+}
+impl From<WildcardPat> for Pat {
+    fn from(node: WildcardPat) -> Pat { Pat::WildcardPat(node) }
+}
+impl From<RangePat> for Pat {
+    fn from(node: RangePat) -> Pat { Pat::RangePat(node) }
+}
+impl From<RecordPat> for Pat {
+    fn from(node: RecordPat) -> Pat { Pat::RecordPat(node) }
+}
+impl From<RefPat> for Pat {
+    fn from(node: RefPat) -> Pat { Pat::RefPat(node) }
+}
+impl From<SlicePat> for Pat {
+    fn from(node: SlicePat) -> Pat { Pat::SlicePat(node) }
+}
+impl From<TuplePat> for Pat {
+    fn from(node: TuplePat) -> Pat { Pat::TuplePat(node) }
+}
+impl From<TupleStructPat> for Pat {
+    fn from(node: TupleStructPat) -> Pat { Pat::TupleStructPat(node) }
+}
+impl AstNode for Pat {
+    fn can_cast(kind: SyntaxKind) -> bool {
+        match kind {
+            IDENT_PAT | BOX_PAT | REST_PAT | LITERAL_PAT | MACRO_PAT | OR_PAT | PAREN_PAT
+            | PATH_PAT | WILDCARD_PAT | RANGE_PAT | RECORD_PAT | REF_PAT | SLICE_PAT
+            | TUPLE_PAT | TUPLE_STRUCT_PAT => true,
+            _ => false,
+        }
+    }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        let res = match syntax.kind() {
+            IDENT_PAT => Pat::IdentPat(IdentPat { syntax }),
+            BOX_PAT => Pat::BoxPat(BoxPat { syntax }),
+            REST_PAT => Pat::RestPat(RestPat { syntax }),
+            LITERAL_PAT => Pat::LiteralPat(LiteralPat { syntax }),
+            MACRO_PAT => Pat::MacroPat(MacroPat { syntax }),
+            OR_PAT => Pat::OrPat(OrPat { syntax }),
+            PAREN_PAT => Pat::ParenPat(ParenPat { syntax }),
+            PATH_PAT => Pat::PathPat(PathPat { syntax }),
+            WILDCARD_PAT => Pat::WildcardPat(WildcardPat { syntax }),
+            RANGE_PAT => Pat::RangePat(RangePat { syntax }),
+            RECORD_PAT => Pat::RecordPat(RecordPat { syntax }),
+            REF_PAT => Pat::RefPat(RefPat { syntax }),
+            SLICE_PAT => Pat::SlicePat(SlicePat { syntax }),
+            TUPLE_PAT => Pat::TuplePat(TuplePat { syntax }),
+            TUPLE_STRUCT_PAT => Pat::TupleStructPat(TupleStructPat { syntax }),
+            _ => return None,
+        };
+        Some(res)
+    }
+    fn syntax(&self) -> &SyntaxNode {
+        match self {
+            Pat::IdentPat(it) => &it.syntax,
+            Pat::BoxPat(it) => &it.syntax,
+            Pat::RestPat(it) => &it.syntax,
+            Pat::LiteralPat(it) => &it.syntax,
+            Pat::MacroPat(it) => &it.syntax,
+            Pat::OrPat(it) => &it.syntax,
+            Pat::ParenPat(it) => &it.syntax,
+            Pat::PathPat(it) => &it.syntax,
+            Pat::WildcardPat(it) => &it.syntax,
+            Pat::RangePat(it) => &it.syntax,
+            Pat::RecordPat(it) => &it.syntax,
+            Pat::RefPat(it) => &it.syntax,
+            Pat::SlicePat(it) => &it.syntax,
+            Pat::TuplePat(it) => &it.syntax,
+            Pat::TupleStructPat(it) => &it.syntax,
+        }
+    }
+}
+impl From<RecordFieldList> for FieldList {
+    fn from(node: RecordFieldList) -> FieldList { FieldList::RecordFieldList(node) }
+}
+impl From<TupleFieldList> for FieldList {
+    fn from(node: TupleFieldList) -> FieldList { FieldList::TupleFieldList(node) }
+}
+impl AstNode for FieldList {
+    fn can_cast(kind: SyntaxKind) -> bool {
+        match kind {
+            RECORD_FIELD_LIST | TUPLE_FIELD_LIST => true,
+            _ => false,
+        }
+    }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        let res = match syntax.kind() {
+            RECORD_FIELD_LIST => FieldList::RecordFieldList(RecordFieldList { syntax }),
+            TUPLE_FIELD_LIST => FieldList::TupleFieldList(TupleFieldList { syntax }),
+            _ => return None,
+        };
+        Some(res)
+    }
+    fn syntax(&self) -> &SyntaxNode {
+        match self {
+            FieldList::RecordFieldList(it) => &it.syntax,
+            FieldList::TupleFieldList(it) => &it.syntax,
+        }
+    }
+}
+impl From<Enum> for AdtDef {
+    fn from(node: Enum) -> AdtDef { AdtDef::Enum(node) }
+}
+impl From<Struct> for AdtDef {
+    fn from(node: Struct) -> AdtDef { AdtDef::Struct(node) }
+}
+impl From<Union> for AdtDef {
+    fn from(node: Union) -> AdtDef { AdtDef::Union(node) }
+}
+impl AstNode for AdtDef {
+    fn can_cast(kind: SyntaxKind) -> bool {
+        match kind {
+            ENUM | STRUCT | UNION => true,
+            _ => false,
+        }
+    }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        let res = match syntax.kind() {
+            ENUM => AdtDef::Enum(Enum { syntax }),
+            STRUCT => AdtDef::Struct(Struct { syntax }),
+            UNION => AdtDef::Union(Union { syntax }),
+            _ => return None,
+        };
+        Some(res)
+    }
+    fn syntax(&self) -> &SyntaxNode {
+        match self {
+            AdtDef::Enum(it) => &it.syntax,
+            AdtDef::Struct(it) => &it.syntax,
+            AdtDef::Union(it) => &it.syntax,
+        }
+    }
+}
+impl From<Const> for AssocItem {
+    fn from(node: Const) -> AssocItem { AssocItem::Const(node) }
+}
+impl From<Fn> for AssocItem {
+    fn from(node: Fn) -> AssocItem { AssocItem::Fn(node) }
+}
+impl From<MacroCall> for AssocItem {
+    fn from(node: MacroCall) -> AssocItem { AssocItem::MacroCall(node) }
+}
+impl From<TypeAlias> for AssocItem {
+    fn from(node: TypeAlias) -> AssocItem { AssocItem::TypeAlias(node) }
+}
+impl AstNode for AssocItem {
+    fn can_cast(kind: SyntaxKind) -> bool {
+        match kind {
+            CONST | FN | MACRO_CALL | TYPE_ALIAS => true,
+            _ => false,
+        }
+    }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        let res = match syntax.kind() {
+            CONST => AssocItem::Const(Const { syntax }),
+            FN => AssocItem::Fn(Fn { syntax }),
+            MACRO_CALL => AssocItem::MacroCall(MacroCall { syntax }),
+            TYPE_ALIAS => AssocItem::TypeAlias(TypeAlias { syntax }),
+            _ => return None,
+        };
+        Some(res)
+    }
+    fn syntax(&self) -> &SyntaxNode {
+        match self {
+            AssocItem::Const(it) => &it.syntax,
+            AssocItem::Fn(it) => &it.syntax,
+            AssocItem::MacroCall(it) => &it.syntax,
+            AssocItem::TypeAlias(it) => &it.syntax,
+        }
+    }
+}
+impl From<Fn> for ExternItem {
+    fn from(node: Fn) -> ExternItem { ExternItem::Fn(node) }
+}
+impl From<MacroCall> for ExternItem {
+    fn from(node: MacroCall) -> ExternItem { ExternItem::MacroCall(node) }
+}
+impl From<Static> for ExternItem {
+    fn from(node: Static) -> ExternItem { ExternItem::Static(node) }
+}
+impl AstNode for ExternItem {
+    fn can_cast(kind: SyntaxKind) -> bool {
+        match kind {
+            FN | MACRO_CALL | STATIC => true,
+            _ => false,
+        }
+    }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        let res = match syntax.kind() {
+            FN => ExternItem::Fn(Fn { syntax }),
+            MACRO_CALL => ExternItem::MacroCall(MacroCall { syntax }),
+            STATIC => ExternItem::Static(Static { syntax }),
+            _ => return None,
+        };
+        Some(res)
+    }
+    fn syntax(&self) -> &SyntaxNode {
+        match self {
+            ExternItem::Fn(it) => &it.syntax,
+            ExternItem::MacroCall(it) => &it.syntax,
+            ExternItem::Static(it) => &it.syntax,
+        }
+    }
+}
+impl From<ConstParam> for GenericParam {
+    fn from(node: ConstParam) -> GenericParam { GenericParam::ConstParam(node) }
+}
+impl From<LifetimeParam> for GenericParam {
+    fn from(node: LifetimeParam) -> GenericParam { GenericParam::LifetimeParam(node) }
+}
+impl From<TypeParam> for GenericParam {
+    fn from(node: TypeParam) -> GenericParam { GenericParam::TypeParam(node) }
+}
+impl AstNode for GenericParam {
+    fn can_cast(kind: SyntaxKind) -> bool {
+        match kind {
+            CONST_PARAM | LIFETIME_PARAM | TYPE_PARAM => true,
+            _ => false,
+        }
+    }
+    fn cast(syntax: SyntaxNode) -> Option<Self> {
+        let res = match syntax.kind() {
+            CONST_PARAM => GenericParam::ConstParam(ConstParam { syntax }),
+            LIFETIME_PARAM => GenericParam::LifetimeParam(LifetimeParam { syntax }),
+            TYPE_PARAM => GenericParam::TypeParam(TypeParam { syntax }),
+            _ => return None,
+        };
+        Some(res)
+    }
+    fn syntax(&self) -> &SyntaxNode {
+        match self {
+            GenericParam::ConstParam(it) => &it.syntax,
+            GenericParam::LifetimeParam(it) => &it.syntax,
+            GenericParam::TypeParam(it) => &it.syntax,
+        }
+    }
+}
+impl std::fmt::Display for GenericArg {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for Type {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for Expr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for Item {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for Stmt {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for Pat {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for FieldList {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for AdtDef {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for AssocItem {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for ExternItem {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for GenericParam {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for Name {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for NameRef {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for Path {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for PathSegment {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for GenericArgList {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for ParamList {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for RetType {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for PathType {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for TypeArg {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for AssocTypeArg {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for LifetimeArg {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for ConstArg {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for TypeBoundList {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for MacroCall {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for Attr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for TokenTree {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for MacroItems {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for MacroStmts {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for SourceFile {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for Const {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for Enum {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for ExternBlock {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for ExternCrate {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for Fn {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for Impl {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for Module {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for Static {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for Struct {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for Trait {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for TypeAlias {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for Union {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for Use {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for Visibility {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for ItemList {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for Rename {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for UseTree {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for UseTreeList {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for Abi {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for GenericParamList {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for WhereClause {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for BlockExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for SelfParam {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for Param {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for RecordFieldList {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for TupleFieldList {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for RecordField {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for TupleField {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for VariantList {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for Variant {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for AssocItemList {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for ExternItemList {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for ConstParam {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for LifetimeParam {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for TypeParam {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for WherePred {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for Literal {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for ExprStmt {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for LetStmt {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for ArrayExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for AwaitExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for BinExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for BoxExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for BreakExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for CallExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for CastExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for ClosureExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for ContinueExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for EffectExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for FieldExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for ForExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for IfExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for IndexExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for LoopExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for MatchExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for MethodCallExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for ParenExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for PathExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for PrefixExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for RangeExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for RecordExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for RefExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for ReturnExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for TryExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for TupleExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for WhileExpr {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for Label {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for RecordExprFieldList {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for RecordExprField {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for ArgList {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for Condition {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for MatchArmList {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for MatchArm {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for MatchGuard {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for ArrayType {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for DynTraitType {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for FnPtrType {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for ForType {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for ImplTraitType {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for InferType {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for NeverType {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for ParenType {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for PtrType {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for RefType {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for SliceType {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for TupleType {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for TypeBound {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for IdentPat {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for BoxPat {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for RestPat {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for LiteralPat {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for MacroPat {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for OrPat {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for ParenPat {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for PathPat {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for WildcardPat {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for RangePat {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for RecordPat {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for RefPat {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for SlicePat {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for TuplePat {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for TupleStructPat {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for RecordPatFieldList {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
+impl std::fmt::Display for RecordPatField {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(self.syntax(), f)
+    }
+}
diff --git a/crates/syntax/src/ast/generated/tokens.rs b/crates/syntax/src/ast/generated/tokens.rs
new file mode 100644
index 00000000000..abadd0b61c6
--- /dev/null
+++ b/crates/syntax/src/ast/generated/tokens.rs
@@ -0,0 +1,91 @@
+//! Generated file, do not edit by hand, see `xtask/src/codegen`
+
+use crate::{
+    ast::AstToken,
+    SyntaxKind::{self, *},
+    SyntaxToken,
+};
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Whitespace {
+    pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for Whitespace {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(&self.syntax, f)
+    }
+}
+impl AstToken for Whitespace {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == WHITESPACE }
+    fn cast(syntax: SyntaxToken) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Comment {
+    pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for Comment {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(&self.syntax, f)
+    }
+}
+impl AstToken for Comment {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == COMMENT }
+    fn cast(syntax: SyntaxToken) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct String {
+    pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for String {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(&self.syntax, f)
+    }
+}
+impl AstToken for String {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == STRING }
+    fn cast(syntax: SyntaxToken) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RawString {
+    pub(crate) syntax: SyntaxToken,
+}
+impl std::fmt::Display for RawString {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        std::fmt::Display::fmt(&self.syntax, f)
+    }
+}
+impl AstToken for RawString {
+    fn can_cast(kind: SyntaxKind) -> bool { kind == RAW_STRING }
+    fn cast(syntax: SyntaxToken) -> Option<Self> {
+        if Self::can_cast(syntax.kind()) {
+            Some(Self { syntax })
+        } else {
+            None
+        }
+    }
+    fn syntax(&self) -> &SyntaxToken { &self.syntax }
+}
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs
new file mode 100644
index 00000000000..254a37fe3e4
--- /dev/null
+++ b/crates/syntax/src/ast/make.rs
@@ -0,0 +1,392 @@
+//! This module contains free-standing functions for creating AST fragments out
+//! of smaller pieces.
+//!
+//! Note that all functions here intended to be stupid constructors, which just
+//! assemble a finish node from immediate children. If you want to do something
+//! smarter than that, it probably doesn't belong in this module.
+use itertools::Itertools;
+use stdx::format_to;
+
+use crate::{ast, AstNode, SourceFile, SyntaxKind, SyntaxNode, SyntaxToken};
+
+pub fn name(text: &str) -> ast::Name {
+    ast_from_text(&format!("mod {};", text))
+}
+
+pub fn name_ref(text: &str) -> ast::NameRef {
+    ast_from_text(&format!("fn f() {{ {}; }}", text))
+}
+
+pub fn ty(text: &str) -> ast::Type {
+    ast_from_text(&format!("impl {} for D {{}};", text))
+}
+
+pub fn path_segment(name_ref: ast::NameRef) -> ast::PathSegment {
+    ast_from_text(&format!("use {};", name_ref))
+}
+pub fn path_unqualified(segment: ast::PathSegment) -> ast::Path {
+    path_from_text(&format!("use {}", segment))
+}
+pub fn path_qualified(qual: ast::Path, segment: ast::PathSegment) -> ast::Path {
+    path_from_text(&format!("{}::{}", qual, segment))
+}
+pub fn path_from_text(text: &str) -> ast::Path {
+    ast_from_text(text)
+}
+
+pub fn use_tree(
+    path: ast::Path,
+    use_tree_list: Option<ast::UseTreeList>,
+    alias: Option<ast::Rename>,
+    add_star: bool,
+) -> ast::UseTree {
+    let mut buf = "use ".to_string();
+    buf += &path.syntax().to_string();
+    if let Some(use_tree_list) = use_tree_list {
+        format_to!(buf, "::{}", use_tree_list);
+    }
+    if add_star {
+        buf += "::*";
+    }
+
+    if let Some(alias) = alias {
+        format_to!(buf, " {}", alias);
+    }
+    ast_from_text(&buf)
+}
+
+pub fn use_tree_list(use_trees: impl IntoIterator<Item = ast::UseTree>) -> ast::UseTreeList {
+    let use_trees = use_trees.into_iter().map(|it| it.syntax().clone()).join(", ");
+    ast_from_text(&format!("use {{{}}};", use_trees))
+}
+
+pub fn use_(use_tree: ast::UseTree) -> ast::Use {
+    ast_from_text(&format!("use {};", use_tree))
+}
+
+pub fn record_expr_field(name: ast::NameRef, expr: Option<ast::Expr>) -> ast::RecordExprField {
+    return match expr {
+        Some(expr) => from_text(&format!("{}: {}", name, expr)),
+        None => from_text(&name.to_string()),
+    };
+
+    fn from_text(text: &str) -> ast::RecordExprField {
+        ast_from_text(&format!("fn f() {{ S {{ {}, }} }}", text))
+    }
+}
+
+pub fn record_field(name: ast::NameRef, ty: ast::Type) -> ast::RecordField {
+    ast_from_text(&format!("struct S {{ {}: {}, }}", name, ty))
+}
+
+pub fn block_expr(
+    stmts: impl IntoIterator<Item = ast::Stmt>,
+    tail_expr: Option<ast::Expr>,
+) -> ast::BlockExpr {
+    let mut buf = "{\n".to_string();
+    for stmt in stmts.into_iter() {
+        format_to!(buf, "    {}\n", stmt);
+    }
+    if let Some(tail_expr) = tail_expr {
+        format_to!(buf, "    {}\n", tail_expr)
+    }
+    buf += "}";
+    ast_from_text(&format!("fn f() {}", buf))
+}
+
+pub fn expr_unit() -> ast::Expr {
+    expr_from_text("()")
+}
+pub fn expr_empty_block() -> ast::Expr {
+    expr_from_text("{}")
+}
+pub fn expr_unimplemented() -> ast::Expr {
+    expr_from_text("unimplemented!()")
+}
+pub fn expr_unreachable() -> ast::Expr {
+    expr_from_text("unreachable!()")
+}
+pub fn expr_todo() -> ast::Expr {
+    expr_from_text("todo!()")
+}
+pub fn expr_path(path: ast::Path) -> ast::Expr {
+    expr_from_text(&path.to_string())
+}
+pub fn expr_continue() -> ast::Expr {
+    expr_from_text("continue")
+}
+pub fn expr_break() -> ast::Expr {
+    expr_from_text("break")
+}
+pub fn expr_return() -> ast::Expr {
+    expr_from_text("return")
+}
+pub fn expr_match(expr: ast::Expr, match_arm_list: ast::MatchArmList) -> ast::Expr {
+    expr_from_text(&format!("match {} {}", expr, match_arm_list))
+}
+pub fn expr_if(condition: ast::Condition, then_branch: ast::BlockExpr) -> ast::Expr {
+    expr_from_text(&format!("if {} {}", condition, then_branch))
+}
+pub fn expr_prefix(op: SyntaxKind, expr: ast::Expr) -> ast::Expr {
+    let token = token(op);
+    expr_from_text(&format!("{}{}", token, expr))
+}
+fn expr_from_text(text: &str) -> ast::Expr {
+    ast_from_text(&format!("const C: () = {};", text))
+}
+
+pub fn try_expr_from_text(text: &str) -> Option<ast::Expr> {
+    try_ast_from_text(&format!("const C: () = {};", text))
+}
+
+pub fn condition(expr: ast::Expr, pattern: Option<ast::Pat>) -> ast::Condition {
+    match pattern {
+        None => ast_from_text(&format!("const _: () = while {} {{}};", expr)),
+        Some(pattern) => {
+            ast_from_text(&format!("const _: () = while let {} = {} {{}};", pattern, expr))
+        }
+    }
+}
+
+pub fn ident_pat(name: ast::Name) -> ast::IdentPat {
+    return from_text(name.text());
+
+    fn from_text(text: &str) -> ast::IdentPat {
+        ast_from_text(&format!("fn f({}: ())", text))
+    }
+}
+
+pub fn wildcard_pat() -> ast::WildcardPat {
+    return from_text("_");
+
+    fn from_text(text: &str) -> ast::WildcardPat {
+        ast_from_text(&format!("fn f({}: ())", text))
+    }
+}
+
+/// Creates a tuple of patterns from an interator of patterns.
+///
+/// Invariant: `pats` must be length > 1
+///
+/// FIXME handle `pats` length == 1
+pub fn tuple_pat(pats: impl IntoIterator<Item = ast::Pat>) -> ast::TuplePat {
+    let pats_str = pats.into_iter().map(|p| p.to_string()).join(", ");
+    return from_text(&format!("({})", pats_str));
+
+    fn from_text(text: &str) -> ast::TuplePat {
+        ast_from_text(&format!("fn f({}: ())", text))
+    }
+}
+
+pub fn tuple_struct_pat(
+    path: ast::Path,
+    pats: impl IntoIterator<Item = ast::Pat>,
+) -> ast::TupleStructPat {
+    let pats_str = pats.into_iter().join(", ");
+    return from_text(&format!("{}({})", path, pats_str));
+
+    fn from_text(text: &str) -> ast::TupleStructPat {
+        ast_from_text(&format!("fn f({}: ())", text))
+    }
+}
+
+pub fn record_pat(path: ast::Path, pats: impl IntoIterator<Item = ast::Pat>) -> ast::RecordPat {
+    let pats_str = pats.into_iter().join(", ");
+    return from_text(&format!("{} {{ {} }}", path, pats_str));
+
+    fn from_text(text: &str) -> ast::RecordPat {
+        ast_from_text(&format!("fn f({}: ())", text))
+    }
+}
+
+/// Returns a `BindPat` if the path has just one segment, a `PathPat` otherwise.
+pub fn path_pat(path: ast::Path) -> ast::Pat {
+    return from_text(&path.to_string());
+    fn from_text(text: &str) -> ast::Pat {
+        ast_from_text(&format!("fn f({}: ())", text))
+    }
+}
+
+pub fn match_arm(pats: impl IntoIterator<Item = ast::Pat>, expr: ast::Expr) -> ast::MatchArm {
+    let pats_str = pats.into_iter().join(" | ");
+    return from_text(&format!("{} => {}", pats_str, expr));
+
+    fn from_text(text: &str) -> ast::MatchArm {
+        ast_from_text(&format!("fn f() {{ match () {{{}}} }}", text))
+    }
+}
+
+pub fn match_arm_list(arms: impl IntoIterator<Item = ast::MatchArm>) -> ast::MatchArmList {
+    let arms_str = arms
+        .into_iter()
+        .map(|arm| {
+            let needs_comma = arm.expr().map_or(true, |it| !it.is_block_like());
+            let comma = if needs_comma { "," } else { "" };
+            format!("    {}{}\n", arm.syntax(), comma)
+        })
+        .collect::<String>();
+    return from_text(&arms_str);
+
+    fn from_text(text: &str) -> ast::MatchArmList {
+        ast_from_text(&format!("fn f() {{ match () {{\n{}}} }}", text))
+    }
+}
+
+pub fn where_pred(
+    path: ast::Path,
+    bounds: impl IntoIterator<Item = ast::TypeBound>,
+) -> ast::WherePred {
+    let bounds = bounds.into_iter().join(" + ");
+    return from_text(&format!("{}: {}", path, bounds));
+
+    fn from_text(text: &str) -> ast::WherePred {
+        ast_from_text(&format!("fn f() where {} {{ }}", text))
+    }
+}
+
+pub fn where_clause(preds: impl IntoIterator<Item = ast::WherePred>) -> ast::WhereClause {
+    let preds = preds.into_iter().join(", ");
+    return from_text(preds.as_str());
+
+    fn from_text(text: &str) -> ast::WhereClause {
+        ast_from_text(&format!("fn f() where {} {{ }}", text))
+    }
+}
+
+pub fn let_stmt(pattern: ast::Pat, initializer: Option<ast::Expr>) -> ast::LetStmt {
+    let text = match initializer {
+        Some(it) => format!("let {} = {};", pattern, it),
+        None => format!("let {};", pattern),
+    };
+    ast_from_text(&format!("fn f() {{ {} }}", text))
+}
+pub fn expr_stmt(expr: ast::Expr) -> ast::ExprStmt {
+    let semi = if expr.is_block_like() { "" } else { ";" };
+    ast_from_text(&format!("fn f() {{ {}{} (); }}", expr, semi))
+}
+
+pub fn token(kind: SyntaxKind) -> SyntaxToken {
+    tokens::SOURCE_FILE
+        .tree()
+        .syntax()
+        .descendants_with_tokens()
+        .filter_map(|it| it.into_token())
+        .find(|it| it.kind() == kind)
+        .unwrap_or_else(|| panic!("unhandled token: {:?}", kind))
+}
+
+pub fn param(name: String, ty: String) -> ast::Param {
+    ast_from_text(&format!("fn f({}: {}) {{ }}", name, ty))
+}
+
+pub fn param_list(pats: impl IntoIterator<Item = ast::Param>) -> ast::ParamList {
+    let args = pats.into_iter().join(", ");
+    ast_from_text(&format!("fn f({}) {{ }}", args))
+}
+
+pub fn visibility_pub_crate() -> ast::Visibility {
+    ast_from_text("pub(crate) struct S")
+}
+
+pub fn fn_(
+    visibility: Option<ast::Visibility>,
+    fn_name: ast::Name,
+    type_params: Option<ast::GenericParamList>,
+    params: ast::ParamList,
+    body: ast::BlockExpr,
+) -> ast::Fn {
+    let type_params =
+        if let Some(type_params) = type_params { format!("<{}>", type_params) } else { "".into() };
+    let visibility = match visibility {
+        None => String::new(),
+        Some(it) => format!("{} ", it),
+    };
+    ast_from_text(&format!("{}fn {}{}{} {}", visibility, fn_name, type_params, params, body))
+}
+
+fn ast_from_text<N: AstNode>(text: &str) -> N {
+    let parse = SourceFile::parse(text);
+    let node = match parse.tree().syntax().descendants().find_map(N::cast) {
+        Some(it) => it,
+        None => {
+            panic!("Failed to make ast node `{}` from text {}", std::any::type_name::<N>(), text)
+        }
+    };
+    let node = node.syntax().clone();
+    let node = unroot(node);
+    let node = N::cast(node).unwrap();
+    assert_eq!(node.syntax().text_range().start(), 0.into());
+    node
+}
+
+fn try_ast_from_text<N: AstNode>(text: &str) -> Option<N> {
+    let parse = SourceFile::parse(text);
+    let node = parse.tree().syntax().descendants().find_map(N::cast)?;
+    let node = node.syntax().clone();
+    let node = unroot(node);
+    let node = N::cast(node).unwrap();
+    assert_eq!(node.syntax().text_range().start(), 0.into());
+    Some(node)
+}
+
+fn unroot(n: SyntaxNode) -> SyntaxNode {
+    SyntaxNode::new_root(n.green().clone())
+}
+
+pub mod tokens {
+    use once_cell::sync::Lazy;
+
+    use crate::{ast, AstNode, Parse, SourceFile, SyntaxKind::*, SyntaxToken};
+
+    pub(super) static SOURCE_FILE: Lazy<Parse<SourceFile>> =
+        Lazy::new(|| SourceFile::parse("const C: <()>::Item = (1 != 1, 2 == 2, !true)\n;"));
+
+    pub fn single_space() -> SyntaxToken {
+        SOURCE_FILE
+            .tree()
+            .syntax()
+            .descendants_with_tokens()
+            .filter_map(|it| it.into_token())
+            .find(|it| it.kind() == WHITESPACE && it.text().as_str() == " ")
+            .unwrap()
+    }
+
+    pub fn whitespace(text: &str) -> SyntaxToken {
+        assert!(text.trim().is_empty());
+        let sf = SourceFile::parse(text).ok().unwrap();
+        sf.syntax().first_child_or_token().unwrap().into_token().unwrap()
+    }
+
+    pub fn doc_comment(text: &str) -> SyntaxToken {
+        assert!(!text.trim().is_empty());
+        let sf = SourceFile::parse(text).ok().unwrap();
+        sf.syntax().first_child_or_token().unwrap().into_token().unwrap()
+    }
+
+    pub fn literal(text: &str) -> SyntaxToken {
+        assert_eq!(text.trim(), text);
+        let lit: ast::Literal = super::ast_from_text(&format!("fn f() {{ let _ = {}; }}", text));
+        lit.syntax().first_child_or_token().unwrap().into_token().unwrap()
+    }
+
+    pub fn single_newline() -> SyntaxToken {
+        SOURCE_FILE
+            .tree()
+            .syntax()
+            .descendants_with_tokens()
+            .filter_map(|it| it.into_token())
+            .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n")
+            .unwrap()
+    }
+
+    pub struct WsBuilder(SourceFile);
+
+    impl WsBuilder {
+        pub fn new(text: &str) -> WsBuilder {
+            WsBuilder(SourceFile::parse(text).ok().unwrap())
+        }
+        pub fn ws(&self) -> SyntaxToken {
+            self.0.syntax().first_child_or_token().unwrap().into_token().unwrap()
+        }
+    }
+}
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs
new file mode 100644
index 00000000000..50c1c157d87
--- /dev/null
+++ b/crates/syntax/src/ast/node_ext.rs
@@ -0,0 +1,485 @@
+//! Various extension methods to ast Nodes, which are hard to code-generate.
+//! Extensions for various expressions live in a sibling `expr_extensions` module.
+
+use std::fmt;
+
+use itertools::Itertools;
+use parser::SyntaxKind;
+
+use crate::{
+    ast::{self, support, AstNode, NameOwner, SyntaxNode},
+    SmolStr, SyntaxElement, SyntaxToken, T,
+};
+
+impl ast::Name {
+    pub fn text(&self) -> &SmolStr {
+        text_of_first_token(self.syntax())
+    }
+}
+
+impl ast::NameRef {
+    pub fn text(&self) -> &SmolStr {
+        text_of_first_token(self.syntax())
+    }
+
+    pub fn as_tuple_field(&self) -> Option<usize> {
+        self.text().parse().ok()
+    }
+}
+
+fn text_of_first_token(node: &SyntaxNode) -> &SmolStr {
+    node.green().children().next().and_then(|it| it.into_token()).unwrap().text()
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum AttrKind {
+    Inner,
+    Outer,
+}
+
+impl ast::Attr {
+    pub fn as_simple_atom(&self) -> Option<SmolStr> {
+        if self.eq_token().is_some() || self.token_tree().is_some() {
+            return None;
+        }
+        self.simple_name()
+    }
+
+    pub fn as_simple_call(&self) -> Option<(SmolStr, ast::TokenTree)> {
+        let tt = self.token_tree()?;
+        Some((self.simple_name()?, tt))
+    }
+
+    pub fn as_simple_key_value(&self) -> Option<(SmolStr, SmolStr)> {
+        let lit = self.literal()?;
+        let key = self.simple_name()?;
+        // FIXME: escape? raw string?
+        let value = lit.syntax().first_token()?.text().trim_matches('"').into();
+        Some((key, value))
+    }
+
+    pub fn simple_name(&self) -> Option<SmolStr> {
+        let path = self.path()?;
+        match (path.segment(), path.qualifier()) {
+            (Some(segment), None) => Some(segment.syntax().first_token()?.text().clone()),
+            _ => None,
+        }
+    }
+
+    pub fn kind(&self) -> AttrKind {
+        let first_token = self.syntax().first_token();
+        let first_token_kind = first_token.as_ref().map(SyntaxToken::kind);
+        let second_token_kind =
+            first_token.and_then(|token| token.next_token()).as_ref().map(SyntaxToken::kind);
+
+        match (first_token_kind, second_token_kind) {
+            (Some(SyntaxKind::POUND), Some(T![!])) => AttrKind::Inner,
+            _ => AttrKind::Outer,
+        }
+    }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum PathSegmentKind {
+    Name(ast::NameRef),
+    Type { type_ref: Option<ast::Type>, trait_ref: Option<ast::PathType> },
+    SelfKw,
+    SuperKw,
+    CrateKw,
+}
+
+impl ast::PathSegment {
+    pub fn parent_path(&self) -> ast::Path {
+        self.syntax()
+            .parent()
+            .and_then(ast::Path::cast)
+            .expect("segments are always nested in paths")
+    }
+
+    pub fn kind(&self) -> Option<PathSegmentKind> {
+        let res = if let Some(name_ref) = self.name_ref() {
+            PathSegmentKind::Name(name_ref)
+        } else {
+            match self.syntax().first_child_or_token()?.kind() {
+                T![self] => PathSegmentKind::SelfKw,
+                T![super] => PathSegmentKind::SuperKw,
+                T![crate] => PathSegmentKind::CrateKw,
+                T![<] => {
+                    // <T> or <T as Trait>
+                    // T is any TypeRef, Trait has to be a PathType
+                    let mut type_refs =
+                        self.syntax().children().filter(|node| ast::Type::can_cast(node.kind()));
+                    let type_ref = type_refs.next().and_then(ast::Type::cast);
+                    let trait_ref = type_refs.next().and_then(ast::PathType::cast);
+                    PathSegmentKind::Type { type_ref, trait_ref }
+                }
+                _ => return None,
+            }
+        };
+        Some(res)
+    }
+}
+
+impl ast::Path {
+    pub fn parent_path(&self) -> Option<ast::Path> {
+        self.syntax().parent().and_then(ast::Path::cast)
+    }
+}
+
+impl ast::UseTreeList {
+    pub fn parent_use_tree(&self) -> ast::UseTree {
+        self.syntax()
+            .parent()
+            .and_then(ast::UseTree::cast)
+            .expect("UseTreeLists are always nested in UseTrees")
+    }
+}
+
+impl ast::Impl {
+    pub fn self_ty(&self) -> Option<ast::Type> {
+        match self.target() {
+            (Some(t), None) | (_, Some(t)) => Some(t),
+            _ => None,
+        }
+    }
+
+    pub fn trait_(&self) -> Option<ast::Type> {
+        match self.target() {
+            (Some(t), Some(_)) => Some(t),
+            _ => None,
+        }
+    }
+
+    fn target(&self) -> (Option<ast::Type>, Option<ast::Type>) {
+        let mut types = support::children(self.syntax());
+        let first = types.next();
+        let second = types.next();
+        (first, second)
+    }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum StructKind {
+    Record(ast::RecordFieldList),
+    Tuple(ast::TupleFieldList),
+    Unit,
+}
+
+impl StructKind {
+    fn from_node<N: AstNode>(node: &N) -> StructKind {
+        if let Some(nfdl) = support::child::<ast::RecordFieldList>(node.syntax()) {
+            StructKind::Record(nfdl)
+        } else if let Some(pfl) = support::child::<ast::TupleFieldList>(node.syntax()) {
+            StructKind::Tuple(pfl)
+        } else {
+            StructKind::Unit
+        }
+    }
+}
+
+impl ast::Struct {
+    pub fn kind(&self) -> StructKind {
+        StructKind::from_node(self)
+    }
+}
+
+impl ast::RecordExprField {
+    pub fn for_field_name(field_name: &ast::NameRef) -> Option<ast::RecordExprField> {
+        let candidate =
+            field_name.syntax().parent().and_then(ast::RecordExprField::cast).or_else(|| {
+                field_name.syntax().ancestors().nth(4).and_then(ast::RecordExprField::cast)
+            })?;
+        if candidate.field_name().as_ref() == Some(field_name) {
+            Some(candidate)
+        } else {
+            None
+        }
+    }
+
+    /// Deals with field init shorthand
+    pub fn field_name(&self) -> Option<ast::NameRef> {
+        if let Some(name_ref) = self.name_ref() {
+            return Some(name_ref);
+        }
+        if let Some(ast::Expr::PathExpr(expr)) = self.expr() {
+            let path = expr.path()?;
+            let segment = path.segment()?;
+            let name_ref = segment.name_ref()?;
+            if path.qualifier().is_none() {
+                return Some(name_ref);
+            }
+        }
+        None
+    }
+}
+
+pub enum NameOrNameRef {
+    Name(ast::Name),
+    NameRef(ast::NameRef),
+}
+
+impl fmt::Display for NameOrNameRef {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        match self {
+            NameOrNameRef::Name(it) => fmt::Display::fmt(it, f),
+            NameOrNameRef::NameRef(it) => fmt::Display::fmt(it, f),
+        }
+    }
+}
+
+impl ast::RecordPatField {
+    /// Deals with field init shorthand
+    pub fn field_name(&self) -> Option<NameOrNameRef> {
+        if let Some(name_ref) = self.name_ref() {
+            return Some(NameOrNameRef::NameRef(name_ref));
+        }
+        if let Some(ast::Pat::IdentPat(pat)) = self.pat() {
+            let name = pat.name()?;
+            return Some(NameOrNameRef::Name(name));
+        }
+        None
+    }
+}
+
+impl ast::Variant {
+    pub fn parent_enum(&self) -> ast::Enum {
+        self.syntax()
+            .parent()
+            .and_then(|it| it.parent())
+            .and_then(ast::Enum::cast)
+            .expect("EnumVariants are always nested in Enums")
+    }
+    pub fn kind(&self) -> StructKind {
+        StructKind::from_node(self)
+    }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum FieldKind {
+    Name(ast::NameRef),
+    Index(SyntaxToken),
+}
+
+impl ast::FieldExpr {
+    pub fn index_token(&self) -> Option<SyntaxToken> {
+        self.syntax
+            .children_with_tokens()
+            // FIXME: Accepting floats here to reject them in validation later
+            .find(|c| c.kind() == SyntaxKind::INT_NUMBER || c.kind() == SyntaxKind::FLOAT_NUMBER)
+            .as_ref()
+            .and_then(SyntaxElement::as_token)
+            .cloned()
+    }
+
+    pub fn field_access(&self) -> Option<FieldKind> {
+        if let Some(nr) = self.name_ref() {
+            Some(FieldKind::Name(nr))
+        } else if let Some(tok) = self.index_token() {
+            Some(FieldKind::Index(tok))
+        } else {
+            None
+        }
+    }
+}
+
+pub struct SlicePatComponents {
+    pub prefix: Vec<ast::Pat>,
+    pub slice: Option<ast::Pat>,
+    pub suffix: Vec<ast::Pat>,
+}
+
+impl ast::SlicePat {
+    pub fn components(&self) -> SlicePatComponents {
+        let mut args = self.pats().peekable();
+        let prefix = args
+            .peeking_take_while(|p| match p {
+                ast::Pat::RestPat(_) => false,
+                ast::Pat::IdentPat(bp) => match bp.pat() {
+                    Some(ast::Pat::RestPat(_)) => false,
+                    _ => true,
+                },
+                ast::Pat::RefPat(rp) => match rp.pat() {
+                    Some(ast::Pat::RestPat(_)) => false,
+                    Some(ast::Pat::IdentPat(bp)) => match bp.pat() {
+                        Some(ast::Pat::RestPat(_)) => false,
+                        _ => true,
+                    },
+                    _ => true,
+                },
+                _ => true,
+            })
+            .collect();
+        let slice = args.next();
+        let suffix = args.collect();
+
+        SlicePatComponents { prefix, slice, suffix }
+    }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum SelfParamKind {
+    /// self
+    Owned,
+    /// &self
+    Ref,
+    /// &mut self
+    MutRef,
+}
+
+impl ast::SelfParam {
+    pub fn kind(&self) -> SelfParamKind {
+        if self.amp_token().is_some() {
+            if self.mut_token().is_some() {
+                SelfParamKind::MutRef
+            } else {
+                SelfParamKind::Ref
+            }
+        } else {
+            SelfParamKind::Owned
+        }
+    }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+pub enum TypeBoundKind {
+    /// Trait
+    PathType(ast::PathType),
+    /// for<'a> ...
+    ForType(ast::ForType),
+    /// 'a
+    Lifetime(SyntaxToken),
+}
+
+impl ast::TypeBound {
+    pub fn kind(&self) -> TypeBoundKind {
+        if let Some(path_type) = support::children(self.syntax()).next() {
+            TypeBoundKind::PathType(path_type)
+        } else if let Some(for_type) = support::children(self.syntax()).next() {
+            TypeBoundKind::ForType(for_type)
+        } else if let Some(lifetime) = self.lifetime_token() {
+            TypeBoundKind::Lifetime(lifetime)
+        } else {
+            unreachable!()
+        }
+    }
+}
+
+pub enum VisibilityKind {
+    In(ast::Path),
+    PubCrate,
+    PubSuper,
+    PubSelf,
+    Pub,
+}
+
+impl ast::Visibility {
+    pub fn kind(&self) -> VisibilityKind {
+        if let Some(path) = support::children(self.syntax()).next() {
+            VisibilityKind::In(path)
+        } else if self.crate_token().is_some() {
+            VisibilityKind::PubCrate
+        } else if self.super_token().is_some() {
+            VisibilityKind::PubSuper
+        } else if self.self_token().is_some() {
+            VisibilityKind::PubSelf
+        } else {
+            VisibilityKind::Pub
+        }
+    }
+}
+
+impl ast::MacroCall {
+    pub fn is_macro_rules(&self) -> Option<ast::Name> {
+        let name_ref = self.path()?.segment()?.name_ref()?;
+        if name_ref.text() == "macro_rules" {
+            self.name()
+        } else {
+            None
+        }
+    }
+
+    pub fn is_bang(&self) -> bool {
+        self.is_macro_rules().is_none()
+    }
+}
+
+impl ast::LifetimeParam {
+    pub fn lifetime_bounds(&self) -> impl Iterator<Item = SyntaxToken> {
+        self.syntax()
+            .children_with_tokens()
+            .filter_map(|it| it.into_token())
+            .skip_while(|x| x.kind() != T![:])
+            .filter(|it| it.kind() == T![lifetime])
+    }
+}
+
+impl ast::RangePat {
+    pub fn start(&self) -> Option<ast::Pat> {
+        self.syntax()
+            .children_with_tokens()
+            .take_while(|it| !(it.kind() == T![..] || it.kind() == T![..=]))
+            .filter_map(|it| it.into_node())
+            .find_map(ast::Pat::cast)
+    }
+
+    pub fn end(&self) -> Option<ast::Pat> {
+        self.syntax()
+            .children_with_tokens()
+            .skip_while(|it| !(it.kind() == T![..] || it.kind() == T![..=]))
+            .filter_map(|it| it.into_node())
+            .find_map(ast::Pat::cast)
+    }
+}
+
+impl ast::TokenTree {
+    pub fn left_delimiter_token(&self) -> Option<SyntaxToken> {
+        self.syntax()
+            .first_child_or_token()?
+            .into_token()
+            .filter(|it| matches!(it.kind(), T!['{'] | T!['('] | T!['[']))
+    }
+
+    pub fn right_delimiter_token(&self) -> Option<SyntaxToken> {
+        self.syntax()
+            .last_child_or_token()?
+            .into_token()
+            .filter(|it| matches!(it.kind(), T!['}'] | T![')'] | T![']']))
+    }
+}
+
+impl ast::GenericParamList {
+    pub fn lifetime_params(&self) -> impl Iterator<Item = ast::LifetimeParam> {
+        self.generic_params().filter_map(|param| match param {
+            ast::GenericParam::LifetimeParam(it) => Some(it),
+            ast::GenericParam::TypeParam(_) | ast::GenericParam::ConstParam(_) => None,
+        })
+    }
+    pub fn type_params(&self) -> impl Iterator<Item = ast::TypeParam> {
+        self.generic_params().filter_map(|param| match param {
+            ast::GenericParam::TypeParam(it) => Some(it),
+            ast::GenericParam::LifetimeParam(_) | ast::GenericParam::ConstParam(_) => None,
+        })
+    }
+    pub fn const_params(&self) -> impl Iterator<Item = ast::ConstParam> {
+        self.generic_params().filter_map(|param| match param {
+            ast::GenericParam::ConstParam(it) => Some(it),
+            ast::GenericParam::TypeParam(_) | ast::GenericParam::LifetimeParam(_) => None,
+        })
+    }
+}
+
+impl ast::DocCommentsOwner for ast::SourceFile {}
+impl ast::DocCommentsOwner for ast::Fn {}
+impl ast::DocCommentsOwner for ast::Struct {}
+impl ast::DocCommentsOwner for ast::Union {}
+impl ast::DocCommentsOwner for ast::RecordField {}
+impl ast::DocCommentsOwner for ast::TupleField {}
+impl ast::DocCommentsOwner for ast::Enum {}
+impl ast::DocCommentsOwner for ast::Variant {}
+impl ast::DocCommentsOwner for ast::Trait {}
+impl ast::DocCommentsOwner for ast::Module {}
+impl ast::DocCommentsOwner for ast::Static {}
+impl ast::DocCommentsOwner for ast::Const {}
+impl ast::DocCommentsOwner for ast::TypeAlias {}
+impl ast::DocCommentsOwner for ast::Impl {}
+impl ast::DocCommentsOwner for ast::MacroCall {}
diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs
new file mode 100644
index 00000000000..c5ef92733d8
--- /dev/null
+++ b/crates/syntax/src/ast/token_ext.rs
@@ -0,0 +1,538 @@
+//! There are many AstNodes, but only a few tokens, so we hand-write them here.
+
+use std::{
+    borrow::Cow,
+    convert::{TryFrom, TryInto},
+};
+
+use rustc_lexer::unescape::{unescape_literal, Mode};
+
+use crate::{
+    ast::{AstToken, Comment, RawString, String, Whitespace},
+    TextRange, TextSize,
+};
+
+impl Comment {
+    pub fn kind(&self) -> CommentKind {
+        kind_by_prefix(self.text())
+    }
+
+    pub fn prefix(&self) -> &'static str {
+        for (prefix, k) in COMMENT_PREFIX_TO_KIND.iter() {
+            if *k == self.kind() && self.text().starts_with(prefix) {
+                return prefix;
+            }
+        }
+        unreachable!()
+    }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+pub struct CommentKind {
+    pub shape: CommentShape,
+    pub doc: Option<CommentPlacement>,
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+pub enum CommentShape {
+    Line,
+    Block,
+}
+
+impl CommentShape {
+    pub fn is_line(self) -> bool {
+        self == CommentShape::Line
+    }
+
+    pub fn is_block(self) -> bool {
+        self == CommentShape::Block
+    }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+pub enum CommentPlacement {
+    Inner,
+    Outer,
+}
+
+const COMMENT_PREFIX_TO_KIND: &[(&str, CommentKind)] = {
+    use {CommentPlacement::*, CommentShape::*};
+    &[
+        ("////", CommentKind { shape: Line, doc: None }),
+        ("///", CommentKind { shape: Line, doc: Some(Outer) }),
+        ("//!", CommentKind { shape: Line, doc: Some(Inner) }),
+        ("/**", CommentKind { shape: Block, doc: Some(Outer) }),
+        ("/*!", CommentKind { shape: Block, doc: Some(Inner) }),
+        ("//", CommentKind { shape: Line, doc: None }),
+        ("/*", CommentKind { shape: Block, doc: None }),
+    ]
+};
+
+fn kind_by_prefix(text: &str) -> CommentKind {
+    if text == "/**/" {
+        return CommentKind { shape: CommentShape::Block, doc: None };
+    }
+    for (prefix, kind) in COMMENT_PREFIX_TO_KIND.iter() {
+        if text.starts_with(prefix) {
+            return *kind;
+        }
+    }
+    panic!("bad comment text: {:?}", text)
+}
+
+impl Whitespace {
+    pub fn spans_multiple_lines(&self) -> bool {
+        let text = self.text();
+        text.find('\n').map_or(false, |idx| text[idx + 1..].contains('\n'))
+    }
+}
+
+pub struct QuoteOffsets {
+    pub quotes: (TextRange, TextRange),
+    pub contents: TextRange,
+}
+
+impl QuoteOffsets {
+    fn new(literal: &str) -> Option<QuoteOffsets> {
+        let left_quote = literal.find('"')?;
+        let right_quote = literal.rfind('"')?;
+        if left_quote == right_quote {
+            // `literal` only contains one quote
+            return None;
+        }
+
+        let start = TextSize::from(0);
+        let left_quote = TextSize::try_from(left_quote).unwrap() + TextSize::of('"');
+        let right_quote = TextSize::try_from(right_quote).unwrap();
+        let end = TextSize::of(literal);
+
+        let res = QuoteOffsets {
+            quotes: (TextRange::new(start, left_quote), TextRange::new(right_quote, end)),
+            contents: TextRange::new(left_quote, right_quote),
+        };
+        Some(res)
+    }
+}
+
+pub trait HasQuotes: AstToken {
+    fn quote_offsets(&self) -> Option<QuoteOffsets> {
+        let text = self.text().as_str();
+        let offsets = QuoteOffsets::new(text)?;
+        let o = self.syntax().text_range().start();
+        let offsets = QuoteOffsets {
+            quotes: (offsets.quotes.0 + o, offsets.quotes.1 + o),
+            contents: offsets.contents + o,
+        };
+        Some(offsets)
+    }
+    fn open_quote_text_range(&self) -> Option<TextRange> {
+        self.quote_offsets().map(|it| it.quotes.0)
+    }
+
+    fn close_quote_text_range(&self) -> Option<TextRange> {
+        self.quote_offsets().map(|it| it.quotes.1)
+    }
+
+    fn text_range_between_quotes(&self) -> Option<TextRange> {
+        self.quote_offsets().map(|it| it.contents)
+    }
+}
+
+impl HasQuotes for String {}
+impl HasQuotes for RawString {}
+
+pub trait HasStringValue: HasQuotes {
+    fn value(&self) -> Option<Cow<'_, str>>;
+}
+
+impl HasStringValue for String {
+    fn value(&self) -> Option<Cow<'_, str>> {
+        let text = self.text().as_str();
+        let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
+
+        let mut buf = std::string::String::with_capacity(text.len());
+        let mut has_error = false;
+        unescape_literal(text, Mode::Str, &mut |_, unescaped_char| match unescaped_char {
+            Ok(c) => buf.push(c),
+            Err(_) => has_error = true,
+        });
+
+        if has_error {
+            return None;
+        }
+        // FIXME: don't actually allocate for borrowed case
+        let res = if buf == text { Cow::Borrowed(text) } else { Cow::Owned(buf) };
+        Some(res)
+    }
+}
+
+impl HasStringValue for RawString {
+    fn value(&self) -> Option<Cow<'_, str>> {
+        let text = self.text().as_str();
+        let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
+        Some(Cow::Borrowed(text))
+    }
+}
+
+impl RawString {
+    pub fn map_range_up(&self, range: TextRange) -> Option<TextRange> {
+        let contents_range = self.text_range_between_quotes()?;
+        assert!(TextRange::up_to(contents_range.len()).contains_range(range));
+        Some(range + contents_range.start())
+    }
+}
+
+#[derive(Debug)]
+pub enum FormatSpecifier {
+    Open,
+    Close,
+    Integer,
+    Identifier,
+    Colon,
+    Fill,
+    Align,
+    Sign,
+    NumberSign,
+    Zero,
+    DollarSign,
+    Dot,
+    Asterisk,
+    QuestionMark,
+}
+
+pub trait HasFormatSpecifier: AstToken {
+    fn char_ranges(
+        &self,
+    ) -> Option<Vec<(TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>>;
+
+    fn lex_format_specifier<F>(&self, mut callback: F)
+    where
+        F: FnMut(TextRange, FormatSpecifier),
+    {
+        let char_ranges = if let Some(char_ranges) = self.char_ranges() {
+            char_ranges
+        } else {
+            return;
+        };
+        let mut chars = char_ranges.iter().peekable();
+
+        while let Some((range, first_char)) = chars.next() {
+            match first_char {
+                Ok('{') => {
+                    // Format specifier, see syntax at https://doc.rust-lang.org/std/fmt/index.html#syntax
+                    if let Some((_, Ok('{'))) = chars.peek() {
+                        // Escaped format specifier, `{{`
+                        chars.next();
+                        continue;
+                    }
+
+                    callback(*range, FormatSpecifier::Open);
+
+                    // check for integer/identifier
+                    match chars
+                        .peek()
+                        .and_then(|next| next.1.as_ref().ok())
+                        .copied()
+                        .unwrap_or_default()
+                    {
+                        '0'..='9' => {
+                            // integer
+                            read_integer(&mut chars, &mut callback);
+                        }
+                        c if c == '_' || c.is_alphabetic() => {
+                            // identifier
+                            read_identifier(&mut chars, &mut callback);
+                        }
+                        _ => {}
+                    }
+
+                    if let Some((_, Ok(':'))) = chars.peek() {
+                        skip_char_and_emit(&mut chars, FormatSpecifier::Colon, &mut callback);
+
+                        // check for fill/align
+                        let mut cloned = chars.clone().take(2);
+                        let first = cloned
+                            .next()
+                            .and_then(|next| next.1.as_ref().ok())
+                            .copied()
+                            .unwrap_or_default();
+                        let second = cloned
+                            .next()
+                            .and_then(|next| next.1.as_ref().ok())
+                            .copied()
+                            .unwrap_or_default();
+                        match second {
+                            '<' | '^' | '>' => {
+                                // alignment specifier, first char specifies fillment
+                                skip_char_and_emit(
+                                    &mut chars,
+                                    FormatSpecifier::Fill,
+                                    &mut callback,
+                                );
+                                skip_char_and_emit(
+                                    &mut chars,
+                                    FormatSpecifier::Align,
+                                    &mut callback,
+                                );
+                            }
+                            _ => match first {
+                                '<' | '^' | '>' => {
+                                    skip_char_and_emit(
+                                        &mut chars,
+                                        FormatSpecifier::Align,
+                                        &mut callback,
+                                    );
+                                }
+                                _ => {}
+                            },
+                        }
+
+                        // check for sign
+                        match chars
+                            .peek()
+                            .and_then(|next| next.1.as_ref().ok())
+                            .copied()
+                            .unwrap_or_default()
+                        {
+                            '+' | '-' => {
+                                skip_char_and_emit(
+                                    &mut chars,
+                                    FormatSpecifier::Sign,
+                                    &mut callback,
+                                );
+                            }
+                            _ => {}
+                        }
+
+                        // check for `#`
+                        if let Some((_, Ok('#'))) = chars.peek() {
+                            skip_char_and_emit(
+                                &mut chars,
+                                FormatSpecifier::NumberSign,
+                                &mut callback,
+                            );
+                        }
+
+                        // check for `0`
+                        let mut cloned = chars.clone().take(2);
+                        let first = cloned.next().and_then(|next| next.1.as_ref().ok()).copied();
+                        let second = cloned.next().and_then(|next| next.1.as_ref().ok()).copied();
+
+                        if first == Some('0') && second != Some('$') {
+                            skip_char_and_emit(&mut chars, FormatSpecifier::Zero, &mut callback);
+                        }
+
+                        // width
+                        match chars
+                            .peek()
+                            .and_then(|next| next.1.as_ref().ok())
+                            .copied()
+                            .unwrap_or_default()
+                        {
+                            '0'..='9' => {
+                                read_integer(&mut chars, &mut callback);
+                                if let Some((_, Ok('$'))) = chars.peek() {
+                                    skip_char_and_emit(
+                                        &mut chars,
+                                        FormatSpecifier::DollarSign,
+                                        &mut callback,
+                                    );
+                                }
+                            }
+                            c if c == '_' || c.is_alphabetic() => {
+                                read_identifier(&mut chars, &mut callback);
+                                // can be either width (indicated by dollar sign, or type in which case
+                                // the next sign has to be `}`)
+                                let next =
+                                    chars.peek().and_then(|next| next.1.as_ref().ok()).copied();
+                                match next {
+                                    Some('$') => skip_char_and_emit(
+                                        &mut chars,
+                                        FormatSpecifier::DollarSign,
+                                        &mut callback,
+                                    ),
+                                    Some('}') => {
+                                        skip_char_and_emit(
+                                            &mut chars,
+                                            FormatSpecifier::Close,
+                                            &mut callback,
+                                        );
+                                        continue;
+                                    }
+                                    _ => continue,
+                                };
+                            }
+                            _ => {}
+                        }
+
+                        // precision
+                        if let Some((_, Ok('.'))) = chars.peek() {
+                            skip_char_and_emit(&mut chars, FormatSpecifier::Dot, &mut callback);
+
+                            match chars
+                                .peek()
+                                .and_then(|next| next.1.as_ref().ok())
+                                .copied()
+                                .unwrap_or_default()
+                            {
+                                '*' => {
+                                    skip_char_and_emit(
+                                        &mut chars,
+                                        FormatSpecifier::Asterisk,
+                                        &mut callback,
+                                    );
+                                }
+                                '0'..='9' => {
+                                    read_integer(&mut chars, &mut callback);
+                                    if let Some((_, Ok('$'))) = chars.peek() {
+                                        skip_char_and_emit(
+                                            &mut chars,
+                                            FormatSpecifier::DollarSign,
+                                            &mut callback,
+                                        );
+                                    }
+                                }
+                                c if c == '_' || c.is_alphabetic() => {
+                                    read_identifier(&mut chars, &mut callback);
+                                    if chars.peek().and_then(|next| next.1.as_ref().ok()).copied()
+                                        != Some('$')
+                                    {
+                                        continue;
+                                    }
+                                    skip_char_and_emit(
+                                        &mut chars,
+                                        FormatSpecifier::DollarSign,
+                                        &mut callback,
+                                    );
+                                }
+                                _ => {
+                                    continue;
+                                }
+                            }
+                        }
+
+                        // type
+                        match chars
+                            .peek()
+                            .and_then(|next| next.1.as_ref().ok())
+                            .copied()
+                            .unwrap_or_default()
+                        {
+                            '?' => {
+                                skip_char_and_emit(
+                                    &mut chars,
+                                    FormatSpecifier::QuestionMark,
+                                    &mut callback,
+                                );
+                            }
+                            c if c == '_' || c.is_alphabetic() => {
+                                read_identifier(&mut chars, &mut callback);
+                            }
+                            _ => {}
+                        }
+                    }
+
+                    if let Some((_, Ok('}'))) = chars.peek() {
+                        skip_char_and_emit(&mut chars, FormatSpecifier::Close, &mut callback);
+                    } else {
+                        continue;
+                    }
+                }
+                _ => {
+                    while let Some((_, Ok(next_char))) = chars.peek() {
+                        match next_char {
+                            '{' => break,
+                            _ => {}
+                        }
+                        chars.next();
+                    }
+                }
+            };
+        }
+
+        fn skip_char_and_emit<'a, I, F>(
+            chars: &mut std::iter::Peekable<I>,
+            emit: FormatSpecifier,
+            callback: &mut F,
+        ) where
+            I: Iterator<Item = &'a (TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>,
+            F: FnMut(TextRange, FormatSpecifier),
+        {
+            let (range, _) = chars.next().unwrap();
+            callback(*range, emit);
+        }
+
+        fn read_integer<'a, I, F>(chars: &mut std::iter::Peekable<I>, callback: &mut F)
+        where
+            I: Iterator<Item = &'a (TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>,
+            F: FnMut(TextRange, FormatSpecifier),
+        {
+            let (mut range, c) = chars.next().unwrap();
+            assert!(c.as_ref().unwrap().is_ascii_digit());
+            while let Some((r, Ok(next_char))) = chars.peek() {
+                if next_char.is_ascii_digit() {
+                    chars.next();
+                    range = range.cover(*r);
+                } else {
+                    break;
+                }
+            }
+            callback(range, FormatSpecifier::Integer);
+        }
+
+        fn read_identifier<'a, I, F>(chars: &mut std::iter::Peekable<I>, callback: &mut F)
+        where
+            I: Iterator<Item = &'a (TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>,
+            F: FnMut(TextRange, FormatSpecifier),
+        {
+            let (mut range, c) = chars.next().unwrap();
+            assert!(c.as_ref().unwrap().is_alphabetic() || *c.as_ref().unwrap() == '_');
+            while let Some((r, Ok(next_char))) = chars.peek() {
+                if *next_char == '_' || next_char.is_ascii_digit() || next_char.is_alphabetic() {
+                    chars.next();
+                    range = range.cover(*r);
+                } else {
+                    break;
+                }
+            }
+            callback(range, FormatSpecifier::Identifier);
+        }
+    }
+}
+
+impl HasFormatSpecifier for String {
+    fn char_ranges(
+        &self,
+    ) -> Option<Vec<(TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>> {
+        let text = self.text().as_str();
+        let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
+        let offset = self.text_range_between_quotes()?.start() - self.syntax().text_range().start();
+
+        let mut res = Vec::with_capacity(text.len());
+        unescape_literal(text, Mode::Str, &mut |range, unescaped_char| {
+            res.push((
+                TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap())
+                    + offset,
+                unescaped_char,
+            ))
+        });
+
+        Some(res)
+    }
+}
+
+impl HasFormatSpecifier for RawString {
+    fn char_ranges(
+        &self,
+    ) -> Option<Vec<(TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>> {
+        let text = self.text().as_str();
+        let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
+        let offset = self.text_range_between_quotes()?.start() - self.syntax().text_range().start();
+
+        let mut res = Vec::with_capacity(text.len());
+        for (idx, c) in text.char_indices() {
+            res.push((TextRange::at(idx.try_into().unwrap(), TextSize::of(c)) + offset, Ok(c)));
+        }
+        Some(res)
+    }
+}
diff --git a/crates/syntax/src/ast/traits.rs b/crates/syntax/src/ast/traits.rs
new file mode 100644
index 00000000000..0bdc22d953f
--- /dev/null
+++ b/crates/syntax/src/ast/traits.rs
@@ -0,0 +1,141 @@
+//! Various traits that are implemented by ast nodes.
+//!
+//! The implementations are usually trivial, and live in generated.rs
+use itertools::Itertools;
+
+use crate::{
+    ast::{self, support, AstChildren, AstNode, AstToken},
+    syntax_node::SyntaxElementChildren,
+    SyntaxToken, T,
+};
+
+pub trait NameOwner: AstNode {
+    fn name(&self) -> Option<ast::Name> {
+        support::child(self.syntax())
+    }
+}
+
+pub trait VisibilityOwner: AstNode {
+    fn visibility(&self) -> Option<ast::Visibility> {
+        support::child(self.syntax())
+    }
+}
+
+pub trait LoopBodyOwner: AstNode {
+    fn loop_body(&self) -> Option<ast::BlockExpr> {
+        support::child(self.syntax())
+    }
+
+    fn label(&self) -> Option<ast::Label> {
+        support::child(self.syntax())
+    }
+}
+
+pub trait ArgListOwner: AstNode {
+    fn arg_list(&self) -> Option<ast::ArgList> {
+        support::child(self.syntax())
+    }
+}
+
+pub trait ModuleItemOwner: AstNode {
+    fn items(&self) -> AstChildren<ast::Item> {
+        support::children(self.syntax())
+    }
+}
+
+pub trait GenericParamsOwner: AstNode {
+    fn generic_param_list(&self) -> Option<ast::GenericParamList> {
+        support::child(self.syntax())
+    }
+
+    fn where_clause(&self) -> Option<ast::WhereClause> {
+        support::child(self.syntax())
+    }
+}
+
+pub trait TypeBoundsOwner: AstNode {
+    fn type_bound_list(&self) -> Option<ast::TypeBoundList> {
+        support::child(self.syntax())
+    }
+
+    fn colon_token(&self) -> Option<SyntaxToken> {
+        support::token(self.syntax(), T![:])
+    }
+}
+
+pub trait AttrsOwner: AstNode {
+    fn attrs(&self) -> AstChildren<ast::Attr> {
+        support::children(self.syntax())
+    }
+    fn has_atom_attr(&self, atom: &str) -> bool {
+        self.attrs().filter_map(|x| x.as_simple_atom()).any(|x| x == atom)
+    }
+}
+
+pub trait DocCommentsOwner: AstNode {
+    fn doc_comments(&self) -> CommentIter {
+        CommentIter { iter: self.syntax().children_with_tokens() }
+    }
+
+    fn doc_comment_text(&self) -> Option<String> {
+        self.doc_comments().doc_comment_text()
+    }
+}
+
+impl CommentIter {
+    pub fn from_syntax_node(syntax_node: &ast::SyntaxNode) -> CommentIter {
+        CommentIter { iter: syntax_node.children_with_tokens() }
+    }
+
+    /// Returns the textual content of a doc comment block as a single string.
+    /// That is, strips leading `///` (+ optional 1 character of whitespace),
+    /// trailing `*/`, trailing whitespace and then joins the lines.
+    pub fn doc_comment_text(self) -> Option<String> {
+        let mut has_comments = false;
+        let docs = self
+            .filter(|comment| comment.kind().doc.is_some())
+            .map(|comment| {
+                has_comments = true;
+                let prefix_len = comment.prefix().len();
+
+                let line: &str = comment.text().as_str();
+
+                // Determine if the prefix or prefix + 1 char is stripped
+                let pos =
+                    if let Some(ws) = line.chars().nth(prefix_len).filter(|c| c.is_whitespace()) {
+                        prefix_len + ws.len_utf8()
+                    } else {
+                        prefix_len
+                    };
+
+                let end = if comment.kind().shape.is_block() && line.ends_with("*/") {
+                    line.len() - 2
+                } else {
+                    line.len()
+                };
+
+                // Note that we do not trim the end of the line here
+                // since whitespace can have special meaning at the end
+                // of a line in markdown.
+                line[pos..end].to_owned()
+            })
+            .join("\n");
+
+        if has_comments {
+            Some(docs)
+        } else {
+            None
+        }
+    }
+}
+
+pub struct CommentIter {
+    iter: SyntaxElementChildren,
+}
+
+impl Iterator for CommentIter {
+    type Item = ast::Comment;
+    fn next(&mut self) -> Option<ast::Comment> {
+        self.iter.by_ref().find_map(|el| el.into_token().and_then(ast::Comment::cast))
+    }
+}
diff --git a/crates/syntax/src/fuzz.rs b/crates/syntax/src/fuzz.rs
new file mode 100644
index 00000000000..fbb97aa2736
--- /dev/null
+++ b/crates/syntax/src/fuzz.rs
@@ -0,0 +1,73 @@
+//! FIXME: write short doc here
+
+use std::{
+    convert::TryInto,
+    str::{self, FromStr},
+};
+
+use text_edit::Indel;
+
+use crate::{validation, AstNode, SourceFile, TextRange};
+
+fn check_file_invariants(file: &SourceFile) {
+    let root = file.syntax();
+    validation::validate_block_structure(root);
+}
+
+pub fn check_parser(text: &str) {
+    let file = SourceFile::parse(text);
+    check_file_invariants(&file.tree());
+}
+
+#[derive(Debug, Clone)]
+pub struct CheckReparse {
+    text: String,
+    edit: Indel,
+    edited_text: String,
+}
+
+impl CheckReparse {
+    pub fn from_data(data: &[u8]) -> Option<Self> {
+        const PREFIX: &str = "fn main(){\n\t";
+        const SUFFIX: &str = "\n}";
+
+        let data = str::from_utf8(data).ok()?;
+        let mut lines = data.lines();
+        let delete_start = usize::from_str(lines.next()?).ok()? + PREFIX.len();
+        let delete_len = usize::from_str(lines.next()?).ok()?;
+        let insert = lines.next()?.to_string();
+        let text = lines.collect::<Vec<_>>().join("\n");
+        let text = format!("{}{}{}", PREFIX, text, SUFFIX);
+        text.get(delete_start..delete_start.checked_add(delete_len)?)?; // make sure delete is a valid range
+        let delete =
+            TextRange::at(delete_start.try_into().unwrap(), delete_len.try_into().unwrap());
+        let edited_text =
+            format!("{}{}{}", &text[..delete_start], &insert, &text[delete_start + delete_len..]);
+        let edit = Indel { delete, insert };
+        Some(CheckReparse { text, edit, edited_text })
+    }
+
+    pub fn run(&self) {
+        let parse = SourceFile::parse(&self.text);
+        let new_parse = parse.reparse(&self.edit);
+        check_file_invariants(&new_parse.tree());
+        assert_eq!(&new_parse.tree().syntax().text().to_string(), &self.edited_text);
+        let full_reparse = SourceFile::parse(&self.edited_text);
+        for (a, b) in
+            new_parse.tree().syntax().descendants().zip(full_reparse.tree().syntax().descendants())
+        {
+            if (a.kind(), a.text_range()) != (b.kind(), b.text_range()) {
+                eprint!("original:\n{:#?}", parse.tree().syntax());
+                eprint!("reparsed:\n{:#?}", new_parse.tree().syntax());
+                eprint!("full reparse:\n{:#?}", full_reparse.tree().syntax());
+                assert_eq!(
+                    format!("{:?}", a),
+                    format!("{:?}", b),
+                    "different syntax tree produced by the full reparse"
+                );
+            }
+        }
+        // FIXME
+        // assert_eq!(new_file.errors(), full_reparse.errors());
+    }
+}
diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs
new file mode 100644
index 00000000000..7f8da66af07
--- /dev/null
+++ b/crates/syntax/src/lib.rs
@@ -0,0 +1,388 @@
+//! Syntax Tree library used throughout the rust analyzer.
+//!
+//! Properties:
+//!   - easy and fast incremental re-parsing
+//!   - graceful handling of errors
+//!   - full-fidelity representation (*any* text can be precisely represented as
+//!     a syntax tree)
+//!
+//! For more information, see the [RFC]. Current implementation is inspired by
+//! the [Swift] one.
+//!
+//! The most interesting modules here are `syntax_node` (which defines concrete
+//! syntax tree) and `ast` (which defines abstract syntax tree on top of the
+//! CST). The actual parser live in a separate `parser` crate, though the
+//! lexer lives in this crate.
+//!
+//! See `api_walkthrough` test in this file for a quick API tour!
+//!
+//! [RFC]: <https://github.com/rust-lang/rfcs/pull/2256>
+//! [Swift]: <https://github.com/apple/swift/blob/13d593df6f359d0cb2fc81cfaac273297c539455/lib/Syntax/README.md>
+
+#[allow(unused)]
+macro_rules! eprintln {
+    ($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
+}
+
+mod syntax_node;
+mod syntax_error;
+mod parsing;
+mod validation;
+mod ptr;
+#[cfg(test)]
+mod tests;
+
+pub mod algo;
+pub mod ast;
+#[doc(hidden)]
+pub mod fuzz;
+
+use std::{marker::PhantomData, sync::Arc};
+
+use stdx::format_to;
+use text_edit::Indel;
+
+pub use crate::{
+    algo::InsertPosition,
+    ast::{AstNode, AstToken},
+    parsing::{lex_single_syntax_kind, lex_single_valid_syntax_kind, tokenize, Token},
+    ptr::{AstPtr, SyntaxNodePtr},
+    syntax_error::SyntaxError,
+    syntax_node::{
+        Direction, GreenNode, NodeOrToken, SyntaxElement, SyntaxElementChildren, SyntaxNode,
+        SyntaxNodeChildren, SyntaxToken, SyntaxTreeBuilder,
+    },
+};
+pub use parser::{SyntaxKind, T};
+pub use rowan::{SmolStr, SyntaxText, TextRange, TextSize, TokenAtOffset, WalkEvent};
+
+/// `Parse` is the result of the parsing: a syntax tree and a collection of
+/// errors.
+///
+/// Note that we always produce a syntax tree, even for completely invalid
+/// files.
+#[derive(Debug, PartialEq, Eq)]
+pub struct Parse<T> {
+    green: GreenNode,
+    errors: Arc<Vec<SyntaxError>>,
+    _ty: PhantomData<fn() -> T>,
+}
+
+impl<T> Clone for Parse<T> {
+    fn clone(&self) -> Parse<T> {
+        Parse { green: self.green.clone(), errors: self.errors.clone(), _ty: PhantomData }
+    }
+}
+
+impl<T> Parse<T> {
+    fn new(green: GreenNode, errors: Vec<SyntaxError>) -> Parse<T> {
+        Parse { green, errors: Arc::new(errors), _ty: PhantomData }
+    }
+
+    pub fn syntax_node(&self) -> SyntaxNode {
+        SyntaxNode::new_root(self.green.clone())
+    }
+}
+
+impl<T: AstNode> Parse<T> {
+    pub fn to_syntax(self) -> Parse<SyntaxNode> {
+        Parse { green: self.green, errors: self.errors, _ty: PhantomData }
+    }
+
+    pub fn tree(&self) -> T {
+        T::cast(self.syntax_node()).unwrap()
+    }
+
+    pub fn errors(&self) -> &[SyntaxError] {
+        &*self.errors
+    }
+
+    pub fn ok(self) -> Result<T, Arc<Vec<SyntaxError>>> {
+        if self.errors.is_empty() {
+            Ok(self.tree())
+        } else {
+            Err(self.errors)
+        }
+    }
+}
+
+impl Parse<SyntaxNode> {
+    pub fn cast<N: AstNode>(self) -> Option<Parse<N>> {
+        if N::cast(self.syntax_node()).is_some() {
+            Some(Parse { green: self.green, errors: self.errors, _ty: PhantomData })
+        } else {
+            None
+        }
+    }
+}
+
+impl Parse<SourceFile> {
+    pub fn debug_dump(&self) -> String {
+        let mut buf = format!("{:#?}", self.tree().syntax());
+        for err in self.errors.iter() {
+            format_to!(buf, "error {:?}: {}\n", err.range(), err);
+        }
+        buf
+    }
+
+    pub fn reparse(&self, indel: &Indel) -> Parse<SourceFile> {
+        self.incremental_reparse(indel).unwrap_or_else(|| self.full_reparse(indel))
+    }
+
+    fn incremental_reparse(&self, indel: &Indel) -> Option<Parse<SourceFile>> {
+        // FIXME: validation errors are not handled here
+        parsing::incremental_reparse(self.tree().syntax(), indel, self.errors.to_vec()).map(
+            |(green_node, errors, _reparsed_range)| Parse {
+                green: green_node,
+                errors: Arc::new(errors),
+                _ty: PhantomData,
+            },
+        )
+    }
+
+    fn full_reparse(&self, indel: &Indel) -> Parse<SourceFile> {
+        let mut text = self.tree().syntax().text().to_string();
+        indel.apply(&mut text);
+        SourceFile::parse(&text)
+    }
+}
+
+/// `SourceFile` represents a parse tree for a single Rust file.
+pub use crate::ast::SourceFile;
+
+impl SourceFile {
+    pub fn parse(text: &str) -> Parse<SourceFile> {
+        let (green, mut errors) = parsing::parse_text(text);
+        let root = SyntaxNode::new_root(green.clone());
+
+        if cfg!(debug_assertions) {
+            validation::validate_block_structure(&root);
+        }
+
+        errors.extend(validation::validate(&root));
+
+        assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
+        Parse { green, errors: Arc::new(errors), _ty: PhantomData }
+    }
+}
+
+impl ast::Path {
+    /// Returns `text`, parsed as a path, but only if it has no errors.
+    pub fn parse(text: &str) -> Result<Self, ()> {
+        parsing::parse_text_fragment(text, parser::FragmentKind::Path)
+    }
+}
+
+impl ast::Pat {
+    /// Returns `text`, parsed as a pattern, but only if it has no errors.
+    pub fn parse(text: &str) -> Result<Self, ()> {
+        parsing::parse_text_fragment(text, parser::FragmentKind::Pattern)
+    }
+}
+
+impl ast::Expr {
+    /// Returns `text`, parsed as an expression, but only if it has no errors.
+    pub fn parse(text: &str) -> Result<Self, ()> {
+        parsing::parse_text_fragment(text, parser::FragmentKind::Expr)
+    }
+}
+
+impl ast::Item {
+    /// Returns `text`, parsed as an item, but only if it has no errors.
+    pub fn parse(text: &str) -> Result<Self, ()> {
+        parsing::parse_text_fragment(text, parser::FragmentKind::Item)
+    }
+}
+
+impl ast::Type {
+    /// Returns `text`, parsed as an type reference, but only if it has no errors.
+    pub fn parse(text: &str) -> Result<Self, ()> {
+        parsing::parse_text_fragment(text, parser::FragmentKind::Type)
+    }
+}
+
+/// Matches a `SyntaxNode` against an `ast` type.
+///
+/// # Example:
+///
+/// ```ignore
+/// match_ast! {
+///     match node {
+///         ast::CallExpr(it) => { ... },
+///         ast::MethodCallExpr(it) => { ... },
+///         ast::MacroCall(it) => { ... },
+///         _ => None,
+///     }
+/// }
+/// ```
+#[macro_export]
+macro_rules! match_ast {
+    (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) };
+
+    (match ($node:expr) {
+        $( ast::$ast:ident($it:ident) => $res:expr, )*
+        _ => $catch_all:expr $(,)?
+    }) => {{
+        $( if let Some($it) = ast::$ast::cast($node.clone()) { $res } else )*
+        { $catch_all }
+    }};
+}
+
+/// This test does not assert anything and instead just shows off the crate's
+/// API.
+#[test]
+fn api_walkthrough() {
+    use ast::{ModuleItemOwner, NameOwner};
+
+    let source_code = "
+        fn foo() {
+            1 + 1
+        }
+    ";
+    // `SourceFile` is the main entry point.
+    //
+    // The `parse` method returns a `Parse` -- a pair of syntax tree and a list
+    // of errors. That is, syntax tree is constructed even in presence of errors.
+    let parse = SourceFile::parse(source_code);
+    assert!(parse.errors().is_empty());
+
+    // The `tree` method returns an owned syntax node of type `SourceFile`.
+    // Owned nodes are cheap: inside, they are `Rc` handles to the underling data.
+    let file: SourceFile = parse.tree();
+
+    // `SourceFile` is the root of the syntax tree. We can iterate file's items.
+    // Let's fetch the `foo` function.
+    let mut func = None;
+    for item in file.items() {
+        match item {
+            ast::Item::Fn(f) => func = Some(f),
+            _ => unreachable!(),
+        }
+    }
+    let func: ast::Fn = func.unwrap();
+
+    // Each AST node has a bunch of getters for children. All getters return
+    // `Option`s though, to account for incomplete code. Some getters are common
+    // for several kinds of node. In this case, a trait like `ast::NameOwner`
+    // usually exists. By convention, all ast types should be used with `ast::`
+    // qualifier.
+    let name: Option<ast::Name> = func.name();
+    let name = name.unwrap();
+    assert_eq!(name.text(), "foo");
+
+    // Let's get the `1 + 1` expression!
+    let body: ast::BlockExpr = func.body().unwrap();
+    let expr: ast::Expr = body.expr().unwrap();
+
+    // Enums are used to group related ast nodes together, and can be used for
+    // matching. However, because there are no public fields, it's possible to
+    // match only the top level enum: that is the price we pay for increased API
+    // flexibility
+    let bin_expr: &ast::BinExpr = match &expr {
+        ast::Expr::BinExpr(e) => e,
+        _ => unreachable!(),
+    };
+
+    // Besides the "typed" AST API, there's an untyped CST one as well.
+    // To switch from AST to CST, call `.syntax()` method:
+    let expr_syntax: &SyntaxNode = expr.syntax();
+
+    // Note how `expr` and `bin_expr` are in fact the same node underneath:
+    assert!(expr_syntax == bin_expr.syntax());
+
+    // To go from CST to AST, `AstNode::cast` function is used:
+    let _expr: ast::Expr = match ast::Expr::cast(expr_syntax.clone()) {
+        Some(e) => e,
+        None => unreachable!(),
+    };
+
+    // The two properties each syntax node has is a `SyntaxKind`:
+    assert_eq!(expr_syntax.kind(), SyntaxKind::BIN_EXPR);
+
+    // And text range:
+    assert_eq!(expr_syntax.text_range(), TextRange::new(32.into(), 37.into()));
+
+    // You can get node's text as a `SyntaxText` object, which will traverse the
+    // tree collecting token's text:
+    let text: SyntaxText = expr_syntax.text();
+    assert_eq!(text.to_string(), "1 + 1");
+
+    // There's a bunch of traversal methods on `SyntaxNode`:
+    assert_eq!(expr_syntax.parent().as_ref(), Some(body.syntax()));
+    assert_eq!(body.syntax().first_child_or_token().map(|it| it.kind()), Some(T!['{']));
+    assert_eq!(
+        expr_syntax.next_sibling_or_token().map(|it| it.kind()),
+        Some(SyntaxKind::WHITESPACE)
+    );
+
+    // As well as some iterator helpers:
+    let f = expr_syntax.ancestors().find_map(ast::Fn::cast);
+    assert_eq!(f, Some(func));
+    assert!(expr_syntax.siblings_with_tokens(Direction::Next).any(|it| it.kind() == T!['}']));
+    assert_eq!(
+        expr_syntax.descendants_with_tokens().count(),
+        8, // 5 tokens `1`, ` `, `+`, ` `, `!`
+           // 2 child literal expressions: `1`, `1`
+           // 1 the node itself: `1 + 1`
+    );
+
+    // There's also a `preorder` method with a more fine-grained iteration control:
+    let mut buf = String::new();
+    let mut indent = 0;
+    for event in expr_syntax.preorder_with_tokens() {
+        match event {
+            WalkEvent::Enter(node) => {
+                let text = match &node {
+                    NodeOrToken::Node(it) => it.text().to_string(),
+                    NodeOrToken::Token(it) => it.text().to_string(),
+                };
+                format_to!(buf, "{:indent$}{:?} {:?}\n", " ", text, node.kind(), indent = indent);
+                indent += 2;
+            }
+            WalkEvent::Leave(_) => indent -= 2,
+        }
+    }
+    assert_eq!(indent, 0);
+    assert_eq!(
+        buf.trim(),
+        r#"
+"1 + 1" BIN_EXPR
+  "1" LITERAL
+    "1" INT_NUMBER
+  " " WHITESPACE
+  "+" PLUS
+  " " WHITESPACE
+  "1" LITERAL
+    "1" INT_NUMBER
+"#
+        .trim()
+    );
+
+    // To recursively process the tree, there are three approaches:
+    // 1. explicitly call getter methods on AST nodes.
+    // 2. use descendants and `AstNode::cast`.
+    // 3. use descendants and `match_ast!`.
+    //
+    // Here's how the first one looks like:
+    let exprs_cast: Vec<String> = file
+        .syntax()
+        .descendants()
+        .filter_map(ast::Expr::cast)
+        .map(|expr| expr.syntax().text().to_string())
+        .collect();
+
+    // An alternative is to use a macro.
+    let mut exprs_visit = Vec::new();
+    for node in file.syntax().descendants() {
+        match_ast! {
+            match node {
+                ast::Expr(it) => {
+                    let res = it.syntax().text().to_string();
+                    exprs_visit.push(res);
+                },
+                _ => (),
+            }
+        }
+    }
+    assert_eq!(exprs_cast, exprs_visit);
+}
diff --git a/crates/syntax/src/parsing.rs b/crates/syntax/src/parsing.rs
new file mode 100644
index 00000000000..68a39eb2103
--- /dev/null
+++ b/crates/syntax/src/parsing.rs
@@ -0,0 +1,59 @@
+//! Lexing, bridging to parser (which does the actual parsing) and
+//! incremental reparsing.
+
+mod lexer;
+mod text_token_source;
+mod text_tree_sink;
+mod reparsing;
+
+use crate::{syntax_node::GreenNode, AstNode, SyntaxError, SyntaxNode};
+use text_token_source::TextTokenSource;
+use text_tree_sink::TextTreeSink;
+
+pub use lexer::*;
+
+pub(crate) use self::reparsing::incremental_reparse;
+use parser::SyntaxKind;
+
+pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
+    let (tokens, lexer_errors) = tokenize(&text);
+
+    let mut token_source = TextTokenSource::new(text, &tokens);
+    let mut tree_sink = TextTreeSink::new(text, &tokens);
+
+    parser::parse(&mut token_source, &mut tree_sink);
+
+    let (tree, mut parser_errors) = tree_sink.finish();
+    parser_errors.extend(lexer_errors);
+
+    (tree, parser_errors)
+}
+
+/// Returns `text` parsed as a `T` provided there are no parse errors.
+pub(crate) fn parse_text_fragment<T: AstNode>(
+    text: &str,
+    fragment_kind: parser::FragmentKind,
+) -> Result<T, ()> {
+    let (tokens, lexer_errors) = tokenize(&text);
+    if !lexer_errors.is_empty() {
+        return Err(());
+    }
+
+    let mut token_source = TextTokenSource::new(text, &tokens);
+    let mut tree_sink = TextTreeSink::new(text, &tokens);
+
+    // TextTreeSink assumes that there's at least some root node to which it can attach errors and
+    // tokens. We arbitrarily give it a SourceFile.
+    use parser::TreeSink;
+    tree_sink.start_node(SyntaxKind::SOURCE_FILE);
+    parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind);
+    tree_sink.finish_node();
+
+    let (tree, parser_errors) = tree_sink.finish();
+    use parser::TokenSource;
+    if !parser_errors.is_empty() || token_source.current().kind != SyntaxKind::EOF {
+        return Err(());
+    }
+
+    SyntaxNode::new_root(tree).first_child().and_then(T::cast).ok_or(())
+}
diff --git a/crates/syntax/src/parsing/lexer.rs b/crates/syntax/src/parsing/lexer.rs
new file mode 100644
index 00000000000..fa3be101622
--- /dev/null
+++ b/crates/syntax/src/parsing/lexer.rs
@@ -0,0 +1,244 @@
+//! Lexer analyzes raw input string and produces lexemes (tokens).
+//! It is just a bridge to `rustc_lexer`.
+
+use rustc_lexer::{LiteralKind as LK, RawStrError};
+
+use std::convert::TryInto;
+
+use crate::{
+    SyntaxError,
+    SyntaxKind::{self, *},
+    TextRange, TextSize, T,
+};
+
+/// A token of Rust source.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct Token {
+    /// The kind of token.
+    pub kind: SyntaxKind,
+    /// The length of the token.
+    pub len: TextSize,
+}
+
+/// Break a string up into its component tokens.
+/// Beware that it checks for shebang first and its length contributes to resulting
+/// tokens offsets.
+pub fn tokenize(text: &str) -> (Vec<Token>, Vec<SyntaxError>) {
+    // non-empty string is a precondtion of `rustc_lexer::strip_shebang()`.
+    if text.is_empty() {
+        return Default::default();
+    }
+
+    let mut tokens = Vec::new();
+    let mut errors = Vec::new();
+
+    let mut offset = match rustc_lexer::strip_shebang(text) {
+        Some(shebang_len) => {
+            tokens.push(Token { kind: SHEBANG, len: shebang_len.try_into().unwrap() });
+            shebang_len
+        }
+        None => 0,
+    };
+
+    let text_without_shebang = &text[offset..];
+
+    for rustc_token in rustc_lexer::tokenize(text_without_shebang) {
+        let token_len: TextSize = rustc_token.len.try_into().unwrap();
+        let token_range = TextRange::at(offset.try_into().unwrap(), token_len);
+
+        let (syntax_kind, err_message) =
+            rustc_token_kind_to_syntax_kind(&rustc_token.kind, &text[token_range]);
+
+        tokens.push(Token { kind: syntax_kind, len: token_len });
+
+        if let Some(err_message) = err_message {
+            errors.push(SyntaxError::new(err_message, token_range));
+        }
+
+        offset += rustc_token.len;
+    }
+
+    (tokens, errors)
+}
+
+/// Returns `SyntaxKind` and `Option<SyntaxError>` of the first token
+/// encountered at the beginning of the string.
+///
+/// Returns `None` if the string contains zero *or two or more* tokens.
+/// The token is malformed if the returned error is not `None`.
+///
+/// Beware that unescape errors are not checked at tokenization time.
+pub fn lex_single_syntax_kind(text: &str) -> Option<(SyntaxKind, Option<SyntaxError>)> {
+    lex_first_token(text)
+        .filter(|(token, _)| token.len == TextSize::of(text))
+        .map(|(token, error)| (token.kind, error))
+}
+
+/// The same as `lex_single_syntax_kind()` but returns only `SyntaxKind` and
+/// returns `None` if any tokenization error occured.
+///
+/// Beware that unescape errors are not checked at tokenization time.
+pub fn lex_single_valid_syntax_kind(text: &str) -> Option<SyntaxKind> {
+    lex_first_token(text)
+        .filter(|(token, error)| !error.is_some() && token.len == TextSize::of(text))
+        .map(|(token, _error)| token.kind)
+}
+
+/// Returns `SyntaxKind` and `Option<SyntaxError>` of the first token
+/// encountered at the beginning of the string.
+///
+/// Returns `None` if the string contains zero tokens or if the token was parsed
+/// with an error.
+/// The token is malformed if the returned error is not `None`.
+///
+/// Beware that unescape errors are not checked at tokenization time.
+fn lex_first_token(text: &str) -> Option<(Token, Option<SyntaxError>)> {
+    // non-empty string is a precondtion of `rustc_lexer::first_token()`.
+    if text.is_empty() {
+        return None;
+    }
+
+    let rustc_token = rustc_lexer::first_token(text);
+    let (syntax_kind, err_message) = rustc_token_kind_to_syntax_kind(&rustc_token.kind, text);
+
+    let token = Token { kind: syntax_kind, len: rustc_token.len.try_into().unwrap() };
+    let optional_error = err_message
+        .map(|err_message| SyntaxError::new(err_message, TextRange::up_to(TextSize::of(text))));
+
+    Some((token, optional_error))
+}
+
+/// Returns `SyntaxKind` and an optional tokenize error message.
+fn rustc_token_kind_to_syntax_kind(
+    rustc_token_kind: &rustc_lexer::TokenKind,
+    token_text: &str,
+) -> (SyntaxKind, Option<&'static str>) {
+    // A note on an intended tradeoff:
+    // We drop some useful infromation here (see patterns with double dots `..`)
+    // Storing that info in `SyntaxKind` is not possible due to its layout requirements of
+    // being `u16` that come from `rowan::SyntaxKind`.
+
+    let syntax_kind = {
+        match rustc_token_kind {
+            rustc_lexer::TokenKind::LineComment => COMMENT,
+
+            rustc_lexer::TokenKind::BlockComment { terminated: true } => COMMENT,
+            rustc_lexer::TokenKind::BlockComment { terminated: false } => {
+                return (
+                    COMMENT,
+                    Some("Missing trailing `*/` symbols to terminate the block comment"),
+                );
+            }
+
+            rustc_lexer::TokenKind::Whitespace => WHITESPACE,
+
+            rustc_lexer::TokenKind::Ident => {
+                if token_text == "_" {
+                    UNDERSCORE
+                } else {
+                    SyntaxKind::from_keyword(token_text).unwrap_or(IDENT)
+                }
+            }
+
+            rustc_lexer::TokenKind::RawIdent => IDENT,
+            rustc_lexer::TokenKind::Literal { kind, .. } => return match_literal_kind(&kind),
+
+            rustc_lexer::TokenKind::Lifetime { starts_with_number: false } => LIFETIME,
+            rustc_lexer::TokenKind::Lifetime { starts_with_number: true } => {
+                return (LIFETIME, Some("Lifetime name cannot start with a number"))
+            }
+
+            rustc_lexer::TokenKind::Semi => T![;],
+            rustc_lexer::TokenKind::Comma => T![,],
+            rustc_lexer::TokenKind::Dot => T![.],
+            rustc_lexer::TokenKind::OpenParen => T!['('],
+            rustc_lexer::TokenKind::CloseParen => T![')'],
+            rustc_lexer::TokenKind::OpenBrace => T!['{'],
+            rustc_lexer::TokenKind::CloseBrace => T!['}'],
+            rustc_lexer::TokenKind::OpenBracket => T!['['],
+            rustc_lexer::TokenKind::CloseBracket => T![']'],
+            rustc_lexer::TokenKind::At => T![@],
+            rustc_lexer::TokenKind::Pound => T![#],
+            rustc_lexer::TokenKind::Tilde => T![~],
+            rustc_lexer::TokenKind::Question => T![?],
+            rustc_lexer::TokenKind::Colon => T![:],
+            rustc_lexer::TokenKind::Dollar => T![$],
+            rustc_lexer::TokenKind::Eq => T![=],
+            rustc_lexer::TokenKind::Not => T![!],
+            rustc_lexer::TokenKind::Lt => T![<],
+            rustc_lexer::TokenKind::Gt => T![>],
+            rustc_lexer::TokenKind::Minus => T![-],
+            rustc_lexer::TokenKind::And => T![&],
+            rustc_lexer::TokenKind::Or => T![|],
+            rustc_lexer::TokenKind::Plus => T![+],
+            rustc_lexer::TokenKind::Star => T![*],
+            rustc_lexer::TokenKind::Slash => T![/],
+            rustc_lexer::TokenKind::Caret => T![^],
+            rustc_lexer::TokenKind::Percent => T![%],
+            rustc_lexer::TokenKind::Unknown => ERROR,
+        }
+    };
+
+    return (syntax_kind, None);
+
+    fn match_literal_kind(kind: &rustc_lexer::LiteralKind) -> (SyntaxKind, Option<&'static str>) {
+        #[rustfmt::skip]
+        let syntax_kind = match *kind {
+            LK::Int { empty_int: false, .. } => INT_NUMBER,
+            LK::Int { empty_int: true, .. } => {
+                return (INT_NUMBER, Some("Missing digits after the integer base prefix"))
+            }
+
+            LK::Float { empty_exponent: false, .. } => FLOAT_NUMBER,
+            LK::Float { empty_exponent: true, .. } => {
+                return (FLOAT_NUMBER, Some("Missing digits after the exponent symbol"))
+            }
+
+            LK::Char { terminated: true } => CHAR,
+            LK::Char { terminated: false } => {
+                return (CHAR, Some("Missing trailing `'` symbol to terminate the character literal"))
+            }
+
+            LK::Byte { terminated: true } => BYTE,
+            LK::Byte { terminated: false } => {
+                return (BYTE, Some("Missing trailing `'` symbol to terminate the byte literal"))
+            }
+
+            LK::Str { terminated: true } => STRING,
+            LK::Str { terminated: false } => {
+                return (STRING, Some("Missing trailing `\"` symbol to terminate the string literal"))
+            }
+
+
+            LK::ByteStr { terminated: true } => BYTE_STRING,
+            LK::ByteStr { terminated: false } => {
+                return (BYTE_STRING, Some("Missing trailing `\"` symbol to terminate the byte string literal"))
+            }
+
+            LK::RawStr { err, .. } => match err {
+                None => RAW_STRING,
+                Some(RawStrError::InvalidStarter { .. }) => return (RAW_STRING, Some("Missing `\"` symbol after `#` symbols to begin the raw string literal")),
+                Some(RawStrError::NoTerminator { expected, found, .. }) => if expected == found {
+                    return (RAW_STRING, Some("Missing trailing `\"` to terminate the raw string literal"))
+                } else {
+                    return (RAW_STRING, Some("Missing trailing `\"` with `#` symbols to terminate the raw string literal"))
+
+                },
+                Some(RawStrError::TooManyDelimiters { .. }) => return (RAW_STRING, Some("Too many `#` symbols: raw strings may be delimited by up to 65535 `#` symbols")),
+            },
+            LK::RawByteStr { err, .. } => match err {
+                None => RAW_BYTE_STRING,
+                Some(RawStrError::InvalidStarter { .. }) => return (RAW_BYTE_STRING, Some("Missing `\"` symbol after `#` symbols to begin the raw byte string literal")),
+                Some(RawStrError::NoTerminator { expected, found, .. }) => if expected == found {
+                    return (RAW_BYTE_STRING, Some("Missing trailing `\"` to terminate the raw byte string literal"))
+                } else {
+                    return (RAW_BYTE_STRING, Some("Missing trailing `\"` with `#` symbols to terminate the raw byte string literal"))
+
+                },
+                Some(RawStrError::TooManyDelimiters { .. }) => return (RAW_BYTE_STRING, Some("Too many `#` symbols: raw byte strings may be delimited by up to 65535 `#` symbols")),
+            },
+        };
+
+        (syntax_kind, None)
+    }
+}
diff --git a/crates/syntax/src/parsing/reparsing.rs b/crates/syntax/src/parsing/reparsing.rs
new file mode 100644
index 00000000000..4149f856a83
--- /dev/null
+++ b/crates/syntax/src/parsing/reparsing.rs
@@ -0,0 +1,455 @@
+//! Implementation of incremental re-parsing.
+//!
+//! We use two simple strategies for this:
+//!   - if the edit modifies only a single token (like changing an identifier's
+//!     letter), we replace only this token.
+//!   - otherwise, we search for the nearest `{}` block which contains the edit
+//!     and try to parse only this block.
+
+use parser::Reparser;
+use text_edit::Indel;
+
+use crate::{
+    algo,
+    parsing::{
+        lexer::{lex_single_syntax_kind, tokenize, Token},
+        text_token_source::TextTokenSource,
+        text_tree_sink::TextTreeSink,
+    },
+    syntax_node::{GreenNode, GreenToken, NodeOrToken, SyntaxElement, SyntaxNode},
+    SyntaxError,
+    SyntaxKind::*,
+    TextRange, TextSize, T,
+};
+
+pub(crate) fn incremental_reparse(
+    node: &SyntaxNode,
+    edit: &Indel,
+    errors: Vec<SyntaxError>,
+) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
+    if let Some((green, new_errors, old_range)) = reparse_token(node, &edit) {
+        return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
+    }
+
+    if let Some((green, new_errors, old_range)) = reparse_block(node, &edit) {
+        return Some((green, merge_errors(errors, new_errors, old_range, edit), old_range));
+    }
+    None
+}
+
+fn reparse_token<'node>(
+    root: &'node SyntaxNode,
+    edit: &Indel,
+) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
+    let prev_token = algo::find_covering_element(root, edit.delete).as_token()?.clone();
+    let prev_token_kind = prev_token.kind();
+    match prev_token_kind {
+        WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => {
+            if prev_token_kind == WHITESPACE || prev_token_kind == COMMENT {
+                // removing a new line may extends previous token
+                let deleted_range = edit.delete - prev_token.text_range().start();
+                if prev_token.text()[deleted_range].contains('\n') {
+                    return None;
+                }
+            }
+
+            let mut new_text = get_text_after_edit(prev_token.clone().into(), &edit);
+            let (new_token_kind, new_err) = lex_single_syntax_kind(&new_text)?;
+
+            if new_token_kind != prev_token_kind
+                || (new_token_kind == IDENT && is_contextual_kw(&new_text))
+            {
+                return None;
+            }
+
+            // Check that edited token is not a part of the bigger token.
+            // E.g. if for source code `bruh"str"` the user removed `ruh`, then
+            // `b` no longer remains an identifier, but becomes a part of byte string literal
+            if let Some(next_char) = root.text().char_at(prev_token.text_range().end()) {
+                new_text.push(next_char);
+                let token_with_next_char = lex_single_syntax_kind(&new_text);
+                if let Some((_kind, _error)) = token_with_next_char {
+                    return None;
+                }
+                new_text.pop();
+            }
+
+            let new_token =
+                GreenToken::new(rowan::SyntaxKind(prev_token_kind.into()), new_text.into());
+            Some((
+                prev_token.replace_with(new_token),
+                new_err.into_iter().collect(),
+                prev_token.text_range(),
+            ))
+        }
+        _ => None,
+    }
+}
+
+fn reparse_block<'node>(
+    root: &'node SyntaxNode,
+    edit: &Indel,
+) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
+    let (node, reparser) = find_reparsable_node(root, edit.delete)?;
+    let text = get_text_after_edit(node.clone().into(), edit);
+
+    let (tokens, new_lexer_errors) = tokenize(&text);
+    if !is_balanced(&tokens) {
+        return None;
+    }
+
+    let mut token_source = TextTokenSource::new(&text, &tokens);
+    let mut tree_sink = TextTreeSink::new(&text, &tokens);
+    reparser.parse(&mut token_source, &mut tree_sink);
+
+    let (green, mut new_parser_errors) = tree_sink.finish();
+    new_parser_errors.extend(new_lexer_errors);
+
+    Some((node.replace_with(green), new_parser_errors, node.text_range()))
+}
+
+fn get_text_after_edit(element: SyntaxElement, edit: &Indel) -> String {
+    let edit = Indel::replace(edit.delete - element.text_range().start(), edit.insert.clone());
+
+    let mut text = match element {
+        NodeOrToken::Token(token) => token.text().to_string(),
+        NodeOrToken::Node(node) => node.text().to_string(),
+    };
+    edit.apply(&mut text);
+    text
+}
+
+fn is_contextual_kw(text: &str) -> bool {
+    matches!(text, "auto" | "default" | "union")
+}
+
+fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> {
+    let node = algo::find_covering_element(node, range);
+
+    let mut ancestors = match node {
+        NodeOrToken::Token(it) => it.parent().ancestors(),
+        NodeOrToken::Node(it) => it.ancestors(),
+    };
+    ancestors.find_map(|node| {
+        let first_child = node.first_child_or_token().map(|it| it.kind());
+        let parent = node.parent().map(|it| it.kind());
+        Reparser::for_node(node.kind(), first_child, parent).map(|r| (node, r))
+    })
+}
+
+fn is_balanced(tokens: &[Token]) -> bool {
+    if tokens.is_empty()
+        || tokens.first().unwrap().kind != T!['{']
+        || tokens.last().unwrap().kind != T!['}']
+    {
+        return false;
+    }
+    let mut balance = 0usize;
+    for t in &tokens[1..tokens.len() - 1] {
+        match t.kind {
+            T!['{'] => balance += 1,
+            T!['}'] => {
+                balance = match balance.checked_sub(1) {
+                    Some(b) => b,
+                    None => return false,
+                }
+            }
+            _ => (),
+        }
+    }
+    balance == 0
+}
+
+fn merge_errors(
+    old_errors: Vec<SyntaxError>,
+    new_errors: Vec<SyntaxError>,
+    range_before_reparse: TextRange,
+    edit: &Indel,
+) -> Vec<SyntaxError> {
+    let mut res = Vec::new();
+
+    for old_err in old_errors {
+        let old_err_range = old_err.range();
+        if old_err_range.end() <= range_before_reparse.start() {
+            res.push(old_err);
+        } else if old_err_range.start() >= range_before_reparse.end() {
+            let inserted_len = TextSize::of(&edit.insert);
+            res.push(old_err.with_range((old_err_range + inserted_len) - edit.delete.len()));
+            // Note: extra parens are intentional to prevent uint underflow, HWAB (here was a bug)
+        }
+    }
+    res.extend(new_errors.into_iter().map(|new_err| {
+        // fighting borrow checker with a variable ;)
+        let offseted_range = new_err.range() + range_before_reparse.start();
+        new_err.with_range(offseted_range)
+    }));
+    res
+}
+
+#[cfg(test)]
+mod tests {
+    use test_utils::{assert_eq_text, extract_range};
+
+    use super::*;
+    use crate::{AstNode, Parse, SourceFile};
+
+    fn do_check(before: &str, replace_with: &str, reparsed_len: u32) {
+        let (range, before) = extract_range(before);
+        let edit = Indel::replace(range, replace_with.to_owned());
+        let after = {
+            let mut after = before.clone();
+            edit.apply(&mut after);
+            after
+        };
+
+        let fully_reparsed = SourceFile::parse(&after);
+        let incrementally_reparsed: Parse<SourceFile> = {
+            let before = SourceFile::parse(&before);
+            let (green, new_errors, range) =
+                incremental_reparse(before.tree().syntax(), &edit, before.errors.to_vec()).unwrap();
+            assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length");
+            Parse::new(green, new_errors)
+        };
+
+        assert_eq_text!(
+            &format!("{:#?}", fully_reparsed.tree().syntax()),
+            &format!("{:#?}", incrementally_reparsed.tree().syntax()),
+        );
+        assert_eq!(fully_reparsed.errors(), incrementally_reparsed.errors());
+    }
+
+    #[test] // FIXME: some test here actually test token reparsing
+    fn reparse_block_tests() {
+        do_check(
+            r"
+fn foo() {
+    let x = foo + <|>bar<|>
+}
+",
+            "baz",
+            3,
+        );
+        do_check(
+            r"
+fn foo() {
+    let x = foo<|> + bar<|>
+}
+",
+            "baz",
+            25,
+        );
+        do_check(
+            r"
+struct Foo {
+    f: foo<|><|>
+}
+",
+            ",\n    g: (),",
+            14,
+        );
+        do_check(
+            r"
+fn foo {
+    let;
+    1 + 1;
+    <|>92<|>;
+}
+",
+            "62",
+            31, // FIXME: reparse only int literal here
+        );
+        do_check(
+            r"
+mod foo {
+    fn <|><|>
+}
+",
+            "bar",
+            11,
+        );
+
+        do_check(
+            r"
+trait Foo {
+    type <|>Foo<|>;
+}
+",
+            "Output",
+            3,
+        );
+        do_check(
+            r"
+impl IntoIterator<Item=i32> for Foo {
+    f<|><|>
+}
+",
+            "n next(",
+            9,
+        );
+        do_check(r"use a::b::{foo,<|>,bar<|>};", "baz", 10);
+        do_check(
+            r"
+pub enum A {
+    Foo<|><|>
+}
+",
+            "\nBar;\n",
+            11,
+        );
+        do_check(
+            r"
+foo!{a, b<|><|> d}
+",
+            ", c[3]",
+            8,
+        );
+        do_check(
+            r"
+fn foo() {
+    vec![<|><|>]
+}
+",
+            "123",
+            14,
+        );
+        do_check(
+            r"
+extern {
+    fn<|>;<|>
+}
+",
+            " exit(code: c_int)",
+            11,
+        );
+    }
+
+    #[test]
+    fn reparse_token_tests() {
+        do_check(
+            r"<|><|>
+fn foo() -> i32 { 1 }
+",
+            "\n\n\n   \n",
+            1,
+        );
+        do_check(
+            r"
+fn foo() -> <|><|> {}
+",
+            "  \n",
+            2,
+        );
+        do_check(
+            r"
+fn <|>foo<|>() -> i32 { 1 }
+",
+            "bar",
+            3,
+        );
+        do_check(
+            r"
+fn foo<|><|>foo() {  }
+",
+            "bar",
+            6,
+        );
+        do_check(
+            r"
+fn foo /* <|><|> */ () {}
+",
+            "some comment",
+            6,
+        );
+        do_check(
+            r"
+fn baz <|><|> () {}
+",
+            "    \t\t\n\n",
+            2,
+        );
+        do_check(
+            r"
+fn baz <|><|> () {}
+",
+            "    \t\t\n\n",
+            2,
+        );
+        do_check(
+            r"
+/// foo <|><|>omment
+mod { }
+",
+            "c",
+            14,
+        );
+        do_check(
+            r#"
+fn -> &str { "Hello<|><|>" }
+"#,
+            ", world",
+            7,
+        );
+        do_check(
+            r#"
+fn -> &str { // "Hello<|><|>"
+"#,
+            ", world",
+            10,
+        );
+        do_check(
+            r##"
+fn -> &str { r#"Hello<|><|>"#
+"##,
+            ", world",
+            10,
+        );
+        do_check(
+            r"
+#[derive(<|>Copy<|>)]
+enum Foo {
+
+}
+",
+            "Clone",
+            4,
+        );
+    }
+
+    #[test]
+    fn reparse_str_token_with_error_unchanged() {
+        do_check(r#""<|>Unclosed<|> string literal"#, "Still unclosed", 24);
+    }
+
+    #[test]
+    fn reparse_str_token_with_error_fixed() {
+        do_check(r#""unterinated<|><|>"#, "\"", 12);
+    }
+
+    #[test]
+    fn reparse_block_with_error_in_middle_unchanged() {
+        do_check(
+            r#"fn main() {
+                if {}
+                32 + 4<|><|>
+                return
+                if {}
+            }"#,
+            "23",
+            105,
+        )
+    }
+
+    #[test]
+    fn reparse_block_with_error_in_middle_fixed() {
+        do_check(
+            r#"fn main() {
+                if {}
+                32 + 4<|><|>
+                return
+                if {}
+            }"#,
+            ";",
+            105,
+        )
+    }
+}
diff --git a/crates/syntax/src/parsing/text_token_source.rs b/crates/syntax/src/parsing/text_token_source.rs
new file mode 100644
index 00000000000..df866dc2b75
--- /dev/null
+++ b/crates/syntax/src/parsing/text_token_source.rs
@@ -0,0 +1,84 @@
+//! See `TextTokenSource` docs.
+
+use parser::TokenSource;
+
+use crate::{parsing::lexer::Token, SyntaxKind::EOF, TextRange, TextSize};
+
+/// Implementation of `parser::TokenSource` that takes tokens from source code text.
+pub(crate) struct TextTokenSource<'t> {
+    text: &'t str,
+    /// token and its start position (non-whitespace/comment tokens)
+    /// ```non-rust
+    ///  struct Foo;
+    ///  ^------^--^-
+    ///  |      |    \________
+    ///  |      \____         \
+    ///  |           \         |
+    ///  (struct, 0) (Foo, 7) (;, 10)
+    /// ```
+    /// `[(struct, 0), (Foo, 7), (;, 10)]`
+    token_offset_pairs: Vec<(Token, TextSize)>,
+
+    /// Current token and position
+    curr: (parser::Token, usize),
+}
+
+impl<'t> TokenSource for TextTokenSource<'t> {
+    fn current(&self) -> parser::Token {
+        self.curr.0
+    }
+
+    fn lookahead_nth(&self, n: usize) -> parser::Token {
+        mk_token(self.curr.1 + n, &self.token_offset_pairs)
+    }
+
+    fn bump(&mut self) {
+        if self.curr.0.kind == EOF {
+            return;
+        }
+
+        let pos = self.curr.1 + 1;
+        self.curr = (mk_token(pos, &self.token_offset_pairs), pos);
+    }
+
+    fn is_keyword(&self, kw: &str) -> bool {
+        self.token_offset_pairs
+            .get(self.curr.1)
+            .map(|(token, offset)| &self.text[TextRange::at(*offset, token.len)] == kw)
+            .unwrap_or(false)
+    }
+}
+
+fn mk_token(pos: usize, token_offset_pairs: &[(Token, TextSize)]) -> parser::Token {
+    let (kind, is_jointed_to_next) = match token_offset_pairs.get(pos) {
+        Some((token, offset)) => (
+            token.kind,
+            token_offset_pairs
+                .get(pos + 1)
+                .map(|(_, next_offset)| offset + token.len == *next_offset)
+                .unwrap_or(false),
+        ),
+        None => (EOF, false),
+    };
+    parser::Token { kind, is_jointed_to_next }
+}
+
+impl<'t> TextTokenSource<'t> {
+    /// Generate input from tokens(expect comment and whitespace).
+    pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> TextTokenSource<'t> {
+        let token_offset_pairs: Vec<_> = raw_tokens
+            .iter()
+            .filter_map({
+                let mut len = 0.into();
+                move |token| {
+                    let pair = if token.kind.is_trivia() { None } else { Some((*token, len)) };
+                    len += token.len;
+                    pair
+                }
+            })
+            .collect();
+
+        let first = mk_token(0, &token_offset_pairs);
+        TextTokenSource { text, token_offset_pairs, curr: (first, 0) }
+    }
+}
diff --git a/crates/syntax/src/parsing/text_tree_sink.rs b/crates/syntax/src/parsing/text_tree_sink.rs
new file mode 100644
index 00000000000..c1b5f246d11
--- /dev/null
+++ b/crates/syntax/src/parsing/text_tree_sink.rs
@@ -0,0 +1,183 @@
+//! FIXME: write short doc here
+
+use std::mem;
+
+use parser::{ParseError, TreeSink};
+
+use crate::{
+    parsing::Token,
+    syntax_node::GreenNode,
+    SmolStr, SyntaxError,
+    SyntaxKind::{self, *},
+    SyntaxTreeBuilder, TextRange, TextSize,
+};
+
+/// Bridges the parser with our specific syntax tree representation.
+///
+/// `TextTreeSink` also handles attachment of trivia (whitespace) to nodes.
+pub(crate) struct TextTreeSink<'a> {
+    text: &'a str,
+    tokens: &'a [Token],
+    text_pos: TextSize,
+    token_pos: usize,
+    state: State,
+    inner: SyntaxTreeBuilder,
+}
+
+enum State {
+    PendingStart,
+    Normal,
+    PendingFinish,
+}
+
+impl<'a> TreeSink for TextTreeSink<'a> {
+    fn token(&mut self, kind: SyntaxKind, n_tokens: u8) {
+        match mem::replace(&mut self.state, State::Normal) {
+            State::PendingStart => unreachable!(),
+            State::PendingFinish => self.inner.finish_node(),
+            State::Normal => (),
+        }
+        self.eat_trivias();
+        let n_tokens = n_tokens as usize;
+        let len = self.tokens[self.token_pos..self.token_pos + n_tokens]
+            .iter()
+            .map(|it| it.len)
+            .sum::<TextSize>();
+        self.do_token(kind, len, n_tokens);
+    }
+
+    fn start_node(&mut self, kind: SyntaxKind) {
+        match mem::replace(&mut self.state, State::Normal) {
+            State::PendingStart => {
+                self.inner.start_node(kind);
+                // No need to attach trivias to previous node: there is no
+                // previous node.
+                return;
+            }
+            State::PendingFinish => self.inner.finish_node(),
+            State::Normal => (),
+        }
+
+        let n_trivias =
+            self.tokens[self.token_pos..].iter().take_while(|it| it.kind.is_trivia()).count();
+        let leading_trivias = &self.tokens[self.token_pos..self.token_pos + n_trivias];
+        let mut trivia_end =
+            self.text_pos + leading_trivias.iter().map(|it| it.len).sum::<TextSize>();
+
+        let n_attached_trivias = {
+            let leading_trivias = leading_trivias.iter().rev().map(|it| {
+                let next_end = trivia_end - it.len;
+                let range = TextRange::new(next_end, trivia_end);
+                trivia_end = next_end;
+                (it.kind, &self.text[range])
+            });
+            n_attached_trivias(kind, leading_trivias)
+        };
+        self.eat_n_trivias(n_trivias - n_attached_trivias);
+        self.inner.start_node(kind);
+        self.eat_n_trivias(n_attached_trivias);
+    }
+
+    fn finish_node(&mut self) {
+        match mem::replace(&mut self.state, State::PendingFinish) {
+            State::PendingStart => unreachable!(),
+            State::PendingFinish => self.inner.finish_node(),
+            State::Normal => (),
+        }
+    }
+
+    fn error(&mut self, error: ParseError) {
+        self.inner.error(error, self.text_pos)
+    }
+}
+
+impl<'a> TextTreeSink<'a> {
+    pub(super) fn new(text: &'a str, tokens: &'a [Token]) -> Self {
+        Self {
+            text,
+            tokens,
+            text_pos: 0.into(),
+            token_pos: 0,
+            state: State::PendingStart,
+            inner: SyntaxTreeBuilder::default(),
+        }
+    }
+
+    pub(super) fn finish(mut self) -> (GreenNode, Vec<SyntaxError>) {
+        match mem::replace(&mut self.state, State::Normal) {
+            State::PendingFinish => {
+                self.eat_trivias();
+                self.inner.finish_node()
+            }
+            State::PendingStart | State::Normal => unreachable!(),
+        }
+
+        self.inner.finish_raw()
+    }
+
+    fn eat_trivias(&mut self) {
+        while let Some(&token) = self.tokens.get(self.token_pos) {
+            if !token.kind.is_trivia() {
+                break;
+            }
+            self.do_token(token.kind, token.len, 1);
+        }
+    }
+
+    fn eat_n_trivias(&mut self, n: usize) {
+        for _ in 0..n {
+            let token = self.tokens[self.token_pos];
+            assert!(token.kind.is_trivia());
+            self.do_token(token.kind, token.len, 1);
+        }
+    }
+
+    fn do_token(&mut self, kind: SyntaxKind, len: TextSize, n_tokens: usize) {
+        let range = TextRange::at(self.text_pos, len);
+        let text: SmolStr = self.text[range].into();
+        self.text_pos += len;
+        self.token_pos += n_tokens;
+        self.inner.token(kind, text);
+    }
+}
+
+fn n_attached_trivias<'a>(
+    kind: SyntaxKind,
+    trivias: impl Iterator<Item = (SyntaxKind, &'a str)>,
+) -> usize {
+    match kind {
+        MACRO_CALL | CONST | TYPE_ALIAS | STRUCT | ENUM | VARIANT | FN | TRAIT | MODULE
+        | RECORD_FIELD | STATIC => {
+            let mut res = 0;
+            let mut trivias = trivias.enumerate().peekable();
+
+            while let Some((i, (kind, text))) = trivias.next() {
+                match kind {
+                    WHITESPACE => {
+                        if text.contains("\n\n") {
+                            // we check whether the next token is a doc-comment
+                            // and skip the whitespace in this case
+                            if let Some((peek_kind, peek_text)) =
+                                trivias.peek().map(|(_, pair)| pair)
+                            {
+                                if *peek_kind == COMMENT
+                                    && peek_text.starts_with("///")
+                                    && !peek_text.starts_with("////")
+                                {
+                                    continue;
+                                }
+                            }
+                            break;
+                        }
+                    }
+                    COMMENT => {
+                        res = i + 1;
+                    }
+                    _ => (),
+                }
+            }
+            res
+        }
+        _ => 0,
+    }
+}
diff --git a/crates/syntax/src/ptr.rs b/crates/syntax/src/ptr.rs
new file mode 100644
index 00000000000..ca795774713
--- /dev/null
+++ b/crates/syntax/src/ptr.rs
@@ -0,0 +1,105 @@
+//! FIXME: write short doc here
+
+use std::{
+    hash::{Hash, Hasher},
+    iter::successors,
+    marker::PhantomData,
+};
+
+use crate::{AstNode, SyntaxKind, SyntaxNode, TextRange};
+
+/// A pointer to a syntax node inside a file. It can be used to remember a
+/// specific node across reparses of the same file.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct SyntaxNodePtr {
+    pub(crate) range: TextRange,
+    kind: SyntaxKind,
+}
+
+impl SyntaxNodePtr {
+    pub fn new(node: &SyntaxNode) -> SyntaxNodePtr {
+        SyntaxNodePtr { range: node.text_range(), kind: node.kind() }
+    }
+
+    pub fn to_node(&self, root: &SyntaxNode) -> SyntaxNode {
+        assert!(root.parent().is_none());
+        successors(Some(root.clone()), |node| {
+            node.children().find(|it| it.text_range().contains_range(self.range))
+        })
+        .find(|it| it.text_range() == self.range && it.kind() == self.kind)
+        .unwrap_or_else(|| panic!("can't resolve local ptr to SyntaxNode: {:?}", self))
+    }
+
+    pub fn cast<N: AstNode>(self) -> Option<AstPtr<N>> {
+        if !N::can_cast(self.kind) {
+            return None;
+        }
+        Some(AstPtr { raw: self, _ty: PhantomData })
+    }
+}
+
+/// Like `SyntaxNodePtr`, but remembers the type of node
+#[derive(Debug)]
+pub struct AstPtr<N: AstNode> {
+    raw: SyntaxNodePtr,
+    _ty: PhantomData<fn() -> N>,
+}
+
+impl<N: AstNode> Clone for AstPtr<N> {
+    fn clone(&self) -> AstPtr<N> {
+        AstPtr { raw: self.raw.clone(), _ty: PhantomData }
+    }
+}
+
+impl<N: AstNode> Eq for AstPtr<N> {}
+
+impl<N: AstNode> PartialEq for AstPtr<N> {
+    fn eq(&self, other: &AstPtr<N>) -> bool {
+        self.raw == other.raw
+    }
+}
+
+impl<N: AstNode> Hash for AstPtr<N> {
+    fn hash<H: Hasher>(&self, state: &mut H) {
+        self.raw.hash(state)
+    }
+}
+
+impl<N: AstNode> AstPtr<N> {
+    pub fn new(node: &N) -> AstPtr<N> {
+        AstPtr { raw: SyntaxNodePtr::new(node.syntax()), _ty: PhantomData }
+    }
+
+    pub fn to_node(&self, root: &SyntaxNode) -> N {
+        let syntax_node = self.raw.to_node(root);
+        N::cast(syntax_node).unwrap()
+    }
+
+    pub fn syntax_node_ptr(&self) -> SyntaxNodePtr {
+        self.raw.clone()
+    }
+
+    pub fn cast<U: AstNode>(self) -> Option<AstPtr<U>> {
+        if !U::can_cast(self.raw.kind) {
+            return None;
+        }
+        Some(AstPtr { raw: self.raw, _ty: PhantomData })
+    }
+}
+
+impl<N: AstNode> From<AstPtr<N>> for SyntaxNodePtr {
+    fn from(ptr: AstPtr<N>) -> SyntaxNodePtr {
+        ptr.raw
+    }
+}
+
+#[test]
+fn test_local_syntax_ptr() {
+    use crate::{ast, AstNode, SourceFile};
+
+    let file = SourceFile::parse("struct Foo { f: u32, }").ok().unwrap();
+    let field = file.syntax().descendants().find_map(ast::RecordField::cast).unwrap();
+    let ptr = SyntaxNodePtr::new(field.syntax());
+    let field_syntax = ptr.to_node(file.syntax());
+    assert_eq!(field.syntax(), &field_syntax);
+}
diff --git a/crates/syntax/src/syntax_error.rs b/crates/syntax/src/syntax_error.rs
new file mode 100644
index 00000000000..7c4511fece0
--- /dev/null
+++ b/crates/syntax/src/syntax_error.rs
@@ -0,0 +1,44 @@
+//! See docs for `SyntaxError`.
+
+use std::fmt;
+
+use crate::{TextRange, TextSize};
+
+/// Represents the result of unsuccessful tokenization, parsing
+/// or tree validation.
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct SyntaxError(String, TextRange);
+
+// FIXME: there was an unused SyntaxErrorKind previously (before this enum was removed)
+// It was introduced in this PR: https://github.com/rust-analyzer/rust-analyzer/pull/846/files#diff-827da9b03b8f9faa1bade5cdd44d5dafR95
+// but it was not removed by a mistake.
+//
+// So, we need to find a place where to stick validation for attributes in match clauses.
+// Code before refactor:
+// InvalidMatchInnerAttr => {
+//    write!(f, "Inner attributes are only allowed directly after the opening brace of the match expression")
+// }
+
+impl SyntaxError {
+    pub fn new(message: impl Into<String>, range: TextRange) -> Self {
+        Self(message.into(), range)
+    }
+    pub fn new_at_offset(message: impl Into<String>, offset: TextSize) -> Self {
+        Self(message.into(), TextRange::empty(offset))
+    }
+
+    pub fn range(&self) -> TextRange {
+        self.1
+    }
+
+    pub fn with_range(mut self, range: TextRange) -> Self {
+        self.1 = range;
+        self
+    }
+}
+
+impl fmt::Display for SyntaxError {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.0.fmt(f)
+    }
+}
diff --git a/crates/syntax/src/syntax_node.rs b/crates/syntax/src/syntax_node.rs
new file mode 100644
index 00000000000..b2abcbfbb36
--- /dev/null
+++ b/crates/syntax/src/syntax_node.rs
@@ -0,0 +1,77 @@
+//! This module defines Concrete Syntax Tree (CST), used by rust-analyzer.
+//!
+//! The CST includes comments and whitespace, provides a single node type,
+//! `SyntaxNode`, and a basic traversal API (parent, children, siblings).
+//!
+//! The *real* implementation is in the (language-agnostic) `rowan` crate, this
+//! module just wraps its API.
+
+use rowan::{GreenNodeBuilder, Language};
+
+use crate::{Parse, SmolStr, SyntaxError, SyntaxKind, TextSize};
+
+pub use rowan::GreenNode;
+
+pub(crate) use rowan::GreenToken;
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum RustLanguage {}
+impl Language for RustLanguage {
+    type Kind = SyntaxKind;
+
+    fn kind_from_raw(raw: rowan::SyntaxKind) -> SyntaxKind {
+        SyntaxKind::from(raw.0)
+    }
+
+    fn kind_to_raw(kind: SyntaxKind) -> rowan::SyntaxKind {
+        rowan::SyntaxKind(kind.into())
+    }
+}
+
+pub type SyntaxNode = rowan::SyntaxNode<RustLanguage>;
+pub type SyntaxToken = rowan::SyntaxToken<RustLanguage>;
+pub type SyntaxElement = rowan::SyntaxElement<RustLanguage>;
+pub type SyntaxNodeChildren = rowan::SyntaxNodeChildren<RustLanguage>;
+pub type SyntaxElementChildren = rowan::SyntaxElementChildren<RustLanguage>;
+
+pub use rowan::{Direction, NodeOrToken};
+
+#[derive(Default)]
+pub struct SyntaxTreeBuilder {
+    errors: Vec<SyntaxError>,
+    inner: GreenNodeBuilder<'static>,
+}
+
+impl SyntaxTreeBuilder {
+    pub(crate) fn finish_raw(self) -> (GreenNode, Vec<SyntaxError>) {
+        let green = self.inner.finish();
+        (green, self.errors)
+    }
+
+    pub fn finish(self) -> Parse<SyntaxNode> {
+        let (green, errors) = self.finish_raw();
+        if cfg!(debug_assertions) {
+            let node = SyntaxNode::new_root(green.clone());
+            crate::validation::validate_block_structure(&node);
+        }
+        Parse::new(green, errors)
+    }
+
+    pub fn token(&mut self, kind: SyntaxKind, text: SmolStr) {
+        let kind = RustLanguage::kind_to_raw(kind);
+        self.inner.token(kind, text)
+    }
+
+    pub fn start_node(&mut self, kind: SyntaxKind) {
+        let kind = RustLanguage::kind_to_raw(kind);
+        self.inner.start_node(kind)
+    }
+
+    pub fn finish_node(&mut self) {
+        self.inner.finish_node()
+    }
+
+    pub fn error(&mut self, error: parser::ParseError, text_pos: TextSize) {
+        self.errors.push(SyntaxError::new_at_offset(*error.0, text_pos))
+    }
+}
diff --git a/crates/syntax/src/tests.rs b/crates/syntax/src/tests.rs
new file mode 100644
index 00000000000..ddc71836940
--- /dev/null
+++ b/crates/syntax/src/tests.rs
@@ -0,0 +1,280 @@
+use std::{
+    fmt::Write,
+    fs,
+    path::{Path, PathBuf},
+};
+
+use expect::expect_file;
+use rayon::prelude::*;
+use test_utils::project_dir;
+
+use crate::{fuzz, tokenize, SourceFile, SyntaxError, TextRange, TextSize, Token};
+
+#[test]
+fn lexer_tests() {
+    // FIXME:
+    // * Add tests for unicode escapes in byte-character and [raw]-byte-string literals
+    // * Add tests for unescape errors
+
+    dir_tests(&test_data_dir(), &["lexer/ok"], "txt", |text, path| {
+        let (tokens, errors) = tokenize(text);
+        assert_errors_are_absent(&errors, path);
+        dump_tokens_and_errors(&tokens, &errors, text)
+    });
+    dir_tests(&test_data_dir(), &["lexer/err"], "txt", |text, path| {
+        let (tokens, errors) = tokenize(text);
+        assert_errors_are_present(&errors, path);
+        dump_tokens_and_errors(&tokens, &errors, text)
+    });
+}
+
+#[test]
+fn parse_smoke_test() {
+    let code = r##"
+fn main() {
+    println!("Hello, world!")
+}
+    "##;
+
+    let parse = SourceFile::parse(code);
+    // eprintln!("{:#?}", parse.syntax_node());
+    assert!(parse.ok().is_ok());
+}
+
+#[test]
+fn parser_tests() {
+    dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], "rast", |text, path| {
+        let parse = SourceFile::parse(text);
+        let errors = parse.errors();
+        assert_errors_are_absent(&errors, path);
+        parse.debug_dump()
+    });
+    dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], "rast", |text, path| {
+        let parse = SourceFile::parse(text);
+        let errors = parse.errors();
+        assert_errors_are_present(&errors, path);
+        parse.debug_dump()
+    });
+}
+
+#[test]
+fn expr_parser_tests() {
+    fragment_parser_dir_test(
+        &["parser/fragments/expr/ok"],
+        &["parser/fragments/expr/err"],
+        crate::ast::Expr::parse,
+    );
+}
+
+#[test]
+fn path_parser_tests() {
+    fragment_parser_dir_test(
+        &["parser/fragments/path/ok"],
+        &["parser/fragments/path/err"],
+        crate::ast::Path::parse,
+    );
+}
+
+#[test]
+fn pattern_parser_tests() {
+    fragment_parser_dir_test(
+        &["parser/fragments/pattern/ok"],
+        &["parser/fragments/pattern/err"],
+        crate::ast::Pat::parse,
+    );
+}
+
+#[test]
+fn item_parser_tests() {
+    fragment_parser_dir_test(
+        &["parser/fragments/item/ok"],
+        &["parser/fragments/item/err"],
+        crate::ast::Item::parse,
+    );
+}
+
+#[test]
+fn type_parser_tests() {
+    fragment_parser_dir_test(
+        &["parser/fragments/type/ok"],
+        &["parser/fragments/type/err"],
+        crate::ast::Type::parse,
+    );
+}
+
+#[test]
+fn parser_fuzz_tests() {
+    for (_, text) in collect_rust_files(&test_data_dir(), &["parser/fuzz-failures"]) {
+        fuzz::check_parser(&text)
+    }
+}
+
+#[test]
+fn reparse_fuzz_tests() {
+    for (_, text) in collect_rust_files(&test_data_dir(), &["reparse/fuzz-failures"]) {
+        let check = fuzz::CheckReparse::from_data(text.as_bytes()).unwrap();
+        println!("{:?}", check);
+        check.run();
+    }
+}
+
+/// Test that Rust-analyzer can parse and validate the rust-analyzer
+/// FIXME: Use this as a benchmark
+#[test]
+fn self_hosting_parsing() {
+    let dir = project_dir().join("crates");
+    let files = walkdir::WalkDir::new(dir)
+        .into_iter()
+        .filter_entry(|entry| {
+            // Get all files which are not in the crates/syntax/test_data folder
+            !entry.path().components().any(|component| component.as_os_str() == "test_data")
+        })
+        .map(|e| e.unwrap())
+        .filter(|entry| {
+            // Get all `.rs ` files
+            !entry.path().is_dir() && (entry.path().extension().unwrap_or_default() == "rs")
+        })
+        .map(|entry| entry.into_path())
+        .collect::<Vec<_>>();
+    assert!(
+        files.len() > 100,
+        "self_hosting_parsing found too few files - is it running in the right directory?"
+    );
+
+    let errors = files
+        .into_par_iter()
+        .filter_map(|file| {
+            let text = read_text(&file);
+            match SourceFile::parse(&text).ok() {
+                Ok(_) => None,
+                Err(err) => Some((file, err)),
+            }
+        })
+        .collect::<Vec<_>>();
+
+    if !errors.is_empty() {
+        let errors = errors
+            .into_iter()
+            .map(|(path, err)| format!("{}: {:?}\n", path.display(), err))
+            .collect::<String>();
+        panic!("Parsing errors:\n{}\n", errors);
+    }
+}
+
+fn test_data_dir() -> PathBuf {
+    project_dir().join("crates/syntax/test_data")
+}
+
+fn assert_errors_are_present(errors: &[SyntaxError], path: &Path) {
+    assert!(!errors.is_empty(), "There should be errors in the file {:?}", path.display());
+}
+fn assert_errors_are_absent(errors: &[SyntaxError], path: &Path) {
+    assert_eq!(
+        errors,
+        &[] as &[SyntaxError],
+        "There should be no errors in the file {:?}",
+        path.display(),
+    );
+}
+
+fn dump_tokens_and_errors(tokens: &[Token], errors: &[SyntaxError], text: &str) -> String {
+    let mut acc = String::new();
+    let mut offset: TextSize = 0.into();
+    for token in tokens {
+        let token_len = token.len;
+        let token_text = &text[TextRange::at(offset, token.len)];
+        offset += token.len;
+        writeln!(acc, "{:?} {:?} {:?}", token.kind, token_len, token_text).unwrap();
+    }
+    for err in errors {
+        writeln!(acc, "> error{:?} token({:?}) msg({})", err.range(), &text[err.range()], err)
+            .unwrap();
+    }
+    acc
+}
+
+fn fragment_parser_dir_test<T, F>(ok_paths: &[&str], err_paths: &[&str], f: F)
+where
+    T: crate::AstNode,
+    F: Fn(&str) -> Result<T, ()>,
+{
+    dir_tests(&test_data_dir(), ok_paths, "rast", |text, path| {
+        if let Ok(node) = f(text) {
+            format!("{:#?}", crate::ast::AstNode::syntax(&node))
+        } else {
+            panic!("Failed to parse '{:?}'", path);
+        }
+    });
+    dir_tests(&test_data_dir(), err_paths, "rast", |text, path| {
+        if let Ok(_) = f(text) {
+            panic!("'{:?}' successfully parsed when it should have errored", path);
+        } else {
+            "ERROR\n".to_owned()
+        }
+    });
+}
+
+/// Calls callback `f` with input code and file paths for each `.rs` file in `test_data_dir`
+/// subdirectories defined by `paths`.
+///
+/// If the content of the matching output file differs from the output of `f()`
+/// the test will fail.
+///
+/// If there is no matching output file it will be created and filled with the
+/// output of `f()`, but the test will fail.
+fn dir_tests<F>(test_data_dir: &Path, paths: &[&str], outfile_extension: &str, f: F)
+where
+    F: Fn(&str, &Path) -> String,
+{
+    for (path, input_code) in collect_rust_files(test_data_dir, paths) {
+        let actual = f(&input_code, &path);
+        let path = path.with_extension(outfile_extension);
+        expect_file![path].assert_eq(&actual)
+    }
+}
+
+/// Collects all `.rs` files from `dir` subdirectories defined by `paths`.
+fn collect_rust_files(root_dir: &Path, paths: &[&str]) -> Vec<(PathBuf, String)> {
+    paths
+        .iter()
+        .flat_map(|path| {
+            let path = root_dir.to_owned().join(path);
+            rust_files_in_dir(&path).into_iter()
+        })
+        .map(|path| {
+            let text = read_text(&path);
+            (path, text)
+        })
+        .collect()
+}
+
+/// Collects paths to all `.rs` files from `dir` in a sorted `Vec<PathBuf>`.
+fn rust_files_in_dir(dir: &Path) -> Vec<PathBuf> {
+    let mut acc = Vec::new();
+    for file in fs::read_dir(&dir).unwrap() {
+        let file = file.unwrap();
+        let path = file.path();
+        if path.extension().unwrap_or_default() == "rs" {
+            acc.push(path);
+        }
+    }
+    acc.sort();
+    acc
+}
+
+/// Read file and normalize newlines.
+///
+/// `rustc` seems to always normalize `\r\n` newlines to `\n`:
+///
+/// ```
+/// let s = "
+/// ";
+/// assert_eq!(s.as_bytes(), &[10]);
+/// ```
+///
+/// so this should always be correct.
+fn read_text(path: &Path) -> String {
+    fs::read_to_string(path)
+        .unwrap_or_else(|_| panic!("File at {:?} should be valid", path))
+        .replace("\r\n", "\n")
+}
diff --git a/crates/syntax/src/validation.rs b/crates/syntax/src/validation.rs
new file mode 100644
index 00000000000..2dddaf09a40
--- /dev/null
+++ b/crates/syntax/src/validation.rs
@@ -0,0 +1,303 @@
+//! FIXME: write short doc here
+
+mod block;
+
+use crate::{
+    ast, match_ast, AstNode, SyntaxError,
+    SyntaxKind::{BYTE, BYTE_STRING, CHAR, CONST, FN, INT_NUMBER, STRING, TYPE_ALIAS},
+    SyntaxNode, SyntaxToken, TextSize, T,
+};
+use rustc_lexer::unescape::{
+    self, unescape_byte, unescape_byte_literal, unescape_char, unescape_literal, Mode,
+};
+use std::convert::TryFrom;
+
+fn rustc_unescape_error_to_string(err: unescape::EscapeError) -> &'static str {
+    use unescape::EscapeError as EE;
+
+    #[rustfmt::skip]
+    let err_message = match err {
+        EE::ZeroChars => {
+            "Literal must not be empty"
+        }
+        EE::MoreThanOneChar => {
+            "Literal must be one character long"
+        }
+        EE::LoneSlash => {
+            "Character must be escaped: `\\`"
+        }
+        EE::InvalidEscape => {
+            "Invalid escape"
+        }
+        EE::BareCarriageReturn | EE::BareCarriageReturnInRawString => {
+            "Character must be escaped: `\r`"
+        }
+        EE::EscapeOnlyChar => {
+            "Escape character `\\` must be escaped itself"
+        }
+        EE::TooShortHexEscape => {
+            "ASCII hex escape code must have exactly two digits"
+        }
+        EE::InvalidCharInHexEscape => {
+            "ASCII hex escape code must contain only hex characters"
+        }
+        EE::OutOfRangeHexEscape => {
+            "ASCII hex escape code must be at most 0x7F"
+        }
+        EE::NoBraceInUnicodeEscape => {
+            "Missing `{` to begin the unicode escape"
+        }
+        EE::InvalidCharInUnicodeEscape => {
+            "Unicode escape must contain only hex characters and underscores"
+        }
+        EE::EmptyUnicodeEscape => {
+            "Unicode escape must not be empty"
+        }
+        EE::UnclosedUnicodeEscape => {
+            "Missing `}` to terminate the unicode escape"
+        }
+        EE::LeadingUnderscoreUnicodeEscape => {
+            "Unicode escape code must not begin with an underscore"
+        }
+        EE::OverlongUnicodeEscape => {
+            "Unicode escape code must have at most 6 digits"
+        }
+        EE::LoneSurrogateUnicodeEscape => {
+            "Unicode escape code must not be a surrogate"
+        }
+        EE::OutOfRangeUnicodeEscape => {
+            "Unicode escape code must be at most 0x10FFFF"
+        }
+        EE::UnicodeEscapeInByte => {
+            "Byte literals must not contain unicode escapes"
+        }
+        EE::NonAsciiCharInByte | EE::NonAsciiCharInByteString => {
+            "Byte literals must not contain non-ASCII characters"
+        }
+    };
+
+    err_message
+}
+
+pub(crate) fn validate(root: &SyntaxNode) -> Vec<SyntaxError> {
+    // FIXME:
+    // * Add unescape validation of raw string literals and raw byte string literals
+    // * Add validation of doc comments are being attached to nodes
+
+    let mut errors = Vec::new();
+    for node in root.descendants() {
+        match_ast! {
+            match node {
+                ast::Literal(it) => validate_literal(it, &mut errors),
+                ast::BlockExpr(it) => block::validate_block_expr(it, &mut errors),
+                ast::FieldExpr(it) => validate_numeric_name(it.name_ref(), &mut errors),
+                ast::RecordExprField(it) => validate_numeric_name(it.name_ref(), &mut errors),
+                ast::Visibility(it) => validate_visibility(it, &mut errors),
+                ast::RangeExpr(it) => validate_range_expr(it, &mut errors),
+                ast::PathSegment(it) => validate_path_keywords(it, &mut errors),
+                _ => (),
+            }
+        }
+    }
+    errors
+}
+
+fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
+    // FIXME: move this function to outer scope (https://github.com/rust-analyzer/rust-analyzer/pull/2834#discussion_r366196658)
+    fn unquote(text: &str, prefix_len: usize, end_delimiter: char) -> Option<&str> {
+        text.rfind(end_delimiter).and_then(|end| text.get(prefix_len..end))
+    }
+
+    let token = literal.token();
+    let text = token.text().as_str();
+
+    // FIXME: lift this lambda refactor to `fn` (https://github.com/rust-analyzer/rust-analyzer/pull/2834#discussion_r366199205)
+    let mut push_err = |prefix_len, (off, err): (usize, unescape::EscapeError)| {
+        let off = token.text_range().start() + TextSize::try_from(off + prefix_len).unwrap();
+        acc.push(SyntaxError::new_at_offset(rustc_unescape_error_to_string(err), off));
+    };
+
+    match token.kind() {
+        BYTE => {
+            if let Some(Err(e)) = unquote(text, 2, '\'').map(unescape_byte) {
+                push_err(2, e);
+            }
+        }
+        CHAR => {
+            if let Some(Err(e)) = unquote(text, 1, '\'').map(unescape_char) {
+                push_err(1, e);
+            }
+        }
+        BYTE_STRING => {
+            if let Some(without_quotes) = unquote(text, 2, '"') {
+                unescape_byte_literal(without_quotes, Mode::ByteStr, &mut |range, char| {
+                    if let Err(err) = char {
+                        push_err(2, (range.start, err));
+                    }
+                })
+            }
+        }
+        STRING => {
+            if let Some(without_quotes) = unquote(text, 1, '"') {
+                unescape_literal(without_quotes, Mode::Str, &mut |range, char| {
+                    if let Err(err) = char {
+                        push_err(1, (range.start, err));
+                    }
+                })
+            }
+        }
+        _ => (),
+    }
+}
+
+pub(crate) fn validate_block_structure(root: &SyntaxNode) {
+    let mut stack = Vec::new();
+    for node in root.descendants() {
+        match node.kind() {
+            T!['{'] => stack.push(node),
+            T!['}'] => {
+                if let Some(pair) = stack.pop() {
+                    assert_eq!(
+                        node.parent(),
+                        pair.parent(),
+                        "\nunpaired curleys:\n{}\n{:#?}\n",
+                        root.text(),
+                        root,
+                    );
+                    assert!(
+                        node.next_sibling().is_none() && pair.prev_sibling().is_none(),
+                        "\nfloating curlys at {:?}\nfile:\n{}\nerror:\n{}\n",
+                        node,
+                        root.text(),
+                        node.text(),
+                    );
+                }
+            }
+            _ => (),
+        }
+    }
+}
+
+fn validate_numeric_name(name_ref: Option<ast::NameRef>, errors: &mut Vec<SyntaxError>) {
+    if let Some(int_token) = int_token(name_ref) {
+        if int_token.text().chars().any(|c| !c.is_digit(10)) {
+            errors.push(SyntaxError::new(
+                "Tuple (struct) field access is only allowed through \
+                decimal integers with no underscores or suffix",
+                int_token.text_range(),
+            ));
+        }
+    }
+
+    fn int_token(name_ref: Option<ast::NameRef>) -> Option<SyntaxToken> {
+        name_ref?.syntax().first_child_or_token()?.into_token().filter(|it| it.kind() == INT_NUMBER)
+    }
+}
+
+fn validate_visibility(vis: ast::Visibility, errors: &mut Vec<SyntaxError>) {
+    let parent = match vis.syntax().parent() {
+        Some(it) => it,
+        None => return,
+    };
+    match parent.kind() {
+        FN | CONST | TYPE_ALIAS => (),
+        _ => return,
+    }
+
+    let impl_def = match parent.parent().and_then(|it| it.parent()).and_then(ast::Impl::cast) {
+        Some(it) => it,
+        None => return,
+    };
+    if impl_def.trait_().is_some() {
+        errors.push(SyntaxError::new("Unnecessary visibility qualifier", vis.syntax.text_range()));
+    }
+}
+
+fn validate_range_expr(expr: ast::RangeExpr, errors: &mut Vec<SyntaxError>) {
+    if expr.op_kind() == Some(ast::RangeOp::Inclusive) && expr.end().is_none() {
+        errors.push(SyntaxError::new(
+            "An inclusive range must have an end expression",
+            expr.syntax().text_range(),
+        ));
+    }
+}
+
+fn validate_path_keywords(segment: ast::PathSegment, errors: &mut Vec<SyntaxError>) {
+    use ast::PathSegmentKind;
+
+    let path = segment.parent_path();
+    let is_path_start = segment.coloncolon_token().is_none() && path.qualifier().is_none();
+
+    if let Some(token) = segment.self_token() {
+        if !is_path_start {
+            errors.push(SyntaxError::new(
+                "The `self` keyword is only allowed as the first segment of a path",
+                token.text_range(),
+            ));
+        }
+    } else if let Some(token) = segment.crate_token() {
+        if !is_path_start || use_prefix(path).is_some() {
+            errors.push(SyntaxError::new(
+                "The `crate` keyword is only allowed as the first segment of a path",
+                token.text_range(),
+            ));
+        }
+    } else if let Some(token) = segment.super_token() {
+        if !all_supers(&path) {
+            errors.push(SyntaxError::new(
+                "The `super` keyword may only be preceded by other `super`s",
+                token.text_range(),
+            ));
+            return;
+        }
+
+        let mut curr_path = path;
+        while let Some(prefix) = use_prefix(curr_path) {
+            if !all_supers(&prefix) {
+                errors.push(SyntaxError::new(
+                    "The `super` keyword may only be preceded by other `super`s",
+                    token.text_range(),
+                ));
+                return;
+            }
+            curr_path = prefix;
+        }
+    }
+
+    fn use_prefix(mut path: ast::Path) -> Option<ast::Path> {
+        for node in path.syntax().ancestors().skip(1) {
+            match_ast! {
+                match node {
+                    ast::UseTree(it) => if let Some(tree_path) = it.path() {
+                        // Even a top-level path exists within a `UseTree` so we must explicitly
+                        // allow our path but disallow anything else
+                        if tree_path != path {
+                            return Some(tree_path);
+                        }
+                    },
+                    ast::UseTreeList(_it) => continue,
+                    ast::Path(parent) => path = parent,
+                    _ => return None,
+                }
+            };
+        }
+        return None;
+    }
+
+    fn all_supers(path: &ast::Path) -> bool {
+        let segment = match path.segment() {
+            Some(it) => it,
+            None => return false,
+        };
+
+        if segment.kind() != Some(PathSegmentKind::SuperKw) {
+            return false;
+        }
+
+        if let Some(ref subpath) = path.qualifier() {
+            return all_supers(subpath);
+        }
+
+        return true;
+    }
+}
diff --git a/crates/syntax/src/validation/block.rs b/crates/syntax/src/validation/block.rs
new file mode 100644
index 00000000000..ad990146885
--- /dev/null
+++ b/crates/syntax/src/validation/block.rs
@@ -0,0 +1,22 @@
+//! Logic for validating block expressions i.e. `ast::BlockExpr`.
+
+use crate::{
+    ast::{self, AstNode, AttrsOwner},
+    SyntaxError,
+    SyntaxKind::*,
+};
+
+pub(crate) fn validate_block_expr(block: ast::BlockExpr, errors: &mut Vec<SyntaxError>) {
+    if let Some(parent) = block.syntax().parent() {
+        match parent.kind() {
+            FN | EXPR_STMT | BLOCK_EXPR => return,
+            _ => {}
+        }
+    }
+    errors.extend(block.attrs().map(|attr| {
+        SyntaxError::new(
+            "A block in this position cannot accept inner attributes",
+            attr.syntax().text_range(),
+        )
+    }))
+}