From a1c187eef3ba08076aedb5154929f7eda8d1b424 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Wed, 12 Aug 2020 18:26:51 +0200 Subject: Rename ra_syntax -> syntax --- crates/syntax/src/algo.rs | 406 +++ crates/syntax/src/ast.rs | 331 ++ crates/syntax/src/ast/edit.rs | 642 ++++ crates/syntax/src/ast/expr_ext.rs | 418 +++ crates/syntax/src/ast/generated.rs | 41 + crates/syntax/src/ast/generated/nodes.rs | 4067 ++++++++++++++++++++++++ crates/syntax/src/ast/generated/tokens.rs | 91 + crates/syntax/src/ast/make.rs | 392 +++ crates/syntax/src/ast/node_ext.rs | 485 +++ crates/syntax/src/ast/token_ext.rs | 538 ++++ crates/syntax/src/ast/traits.rs | 141 + crates/syntax/src/fuzz.rs | 73 + crates/syntax/src/lib.rs | 388 +++ crates/syntax/src/parsing.rs | 59 + crates/syntax/src/parsing/lexer.rs | 244 ++ crates/syntax/src/parsing/reparsing.rs | 455 +++ crates/syntax/src/parsing/text_token_source.rs | 84 + crates/syntax/src/parsing/text_tree_sink.rs | 183 ++ crates/syntax/src/ptr.rs | 105 + crates/syntax/src/syntax_error.rs | 44 + crates/syntax/src/syntax_node.rs | 77 + crates/syntax/src/tests.rs | 280 ++ crates/syntax/src/validation.rs | 303 ++ crates/syntax/src/validation/block.rs | 22 + 24 files changed, 9869 insertions(+) create mode 100644 crates/syntax/src/algo.rs create mode 100644 crates/syntax/src/ast.rs create mode 100644 crates/syntax/src/ast/edit.rs create mode 100644 crates/syntax/src/ast/expr_ext.rs create mode 100644 crates/syntax/src/ast/generated.rs create mode 100644 crates/syntax/src/ast/generated/nodes.rs create mode 100644 crates/syntax/src/ast/generated/tokens.rs create mode 100644 crates/syntax/src/ast/make.rs create mode 100644 crates/syntax/src/ast/node_ext.rs create mode 100644 crates/syntax/src/ast/token_ext.rs create mode 100644 crates/syntax/src/ast/traits.rs create mode 100644 crates/syntax/src/fuzz.rs create mode 100644 crates/syntax/src/lib.rs create mode 100644 crates/syntax/src/parsing.rs create mode 100644 crates/syntax/src/parsing/lexer.rs create mode 100644 crates/syntax/src/parsing/reparsing.rs create mode 100644 crates/syntax/src/parsing/text_token_source.rs create mode 100644 crates/syntax/src/parsing/text_tree_sink.rs create mode 100644 crates/syntax/src/ptr.rs create mode 100644 crates/syntax/src/syntax_error.rs create mode 100644 crates/syntax/src/syntax_node.rs create mode 100644 crates/syntax/src/tests.rs create mode 100644 crates/syntax/src/validation.rs create mode 100644 crates/syntax/src/validation/block.rs (limited to 'crates/syntax/src') diff --git a/crates/syntax/src/algo.rs b/crates/syntax/src/algo.rs new file mode 100644 index 00000000000..6254b38ba1c --- /dev/null +++ b/crates/syntax/src/algo.rs @@ -0,0 +1,406 @@ +//! FIXME: write short doc here + +use std::{ + fmt, + ops::{self, RangeInclusive}, +}; + +use itertools::Itertools; +use rustc_hash::FxHashMap; +use text_edit::TextEditBuilder; + +use crate::{ + AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxNodePtr, + SyntaxToken, TextRange, TextSize, +}; + +/// Returns ancestors of the node at the offset, sorted by length. This should +/// do the right thing at an edge, e.g. when searching for expressions at `{ +/// <|>foo }` we will get the name reference instead of the whole block, which +/// we would get if we just did `find_token_at_offset(...).flat_map(|t| +/// t.parent().ancestors())`. +pub fn ancestors_at_offset( + node: &SyntaxNode, + offset: TextSize, +) -> impl Iterator { + node.token_at_offset(offset) + .map(|token| token.parent().ancestors()) + .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) +} + +/// Finds a node of specific Ast type at offset. Note that this is slightly +/// imprecise: if the cursor is strictly between two nodes of the desired type, +/// as in +/// +/// ```no-run +/// struct Foo {}|struct Bar; +/// ``` +/// +/// then the shorter node will be silently preferred. +pub fn find_node_at_offset(syntax: &SyntaxNode, offset: TextSize) -> Option { + ancestors_at_offset(syntax, offset).find_map(N::cast) +} + +pub fn find_node_at_range(syntax: &SyntaxNode, range: TextRange) -> Option { + find_covering_element(syntax, range).ancestors().find_map(N::cast) +} + +/// Skip to next non `trivia` token +pub fn skip_trivia_token(mut token: SyntaxToken, direction: Direction) -> Option { + while token.kind().is_trivia() { + token = match direction { + Direction::Next => token.next_token()?, + Direction::Prev => token.prev_token()?, + } + } + Some(token) +} + +/// Finds the first sibling in the given direction which is not `trivia` +pub fn non_trivia_sibling(element: SyntaxElement, direction: Direction) -> Option { + return match element { + NodeOrToken::Node(node) => node.siblings_with_tokens(direction).skip(1).find(not_trivia), + NodeOrToken::Token(token) => token.siblings_with_tokens(direction).skip(1).find(not_trivia), + }; + + fn not_trivia(element: &SyntaxElement) -> bool { + match element { + NodeOrToken::Node(_) => true, + NodeOrToken::Token(token) => !token.kind().is_trivia(), + } + } +} + +pub fn find_covering_element(root: &SyntaxNode, range: TextRange) -> SyntaxElement { + root.covering_element(range) +} + +pub fn least_common_ancestor(u: &SyntaxNode, v: &SyntaxNode) -> Option { + if u == v { + return Some(u.clone()); + } + + let u_depth = u.ancestors().count(); + let v_depth = v.ancestors().count(); + let keep = u_depth.min(v_depth); + + let u_candidates = u.ancestors().skip(u_depth - keep); + let v_canidates = v.ancestors().skip(v_depth - keep); + let (res, _) = u_candidates.zip(v_canidates).find(|(x, y)| x == y)?; + Some(res) +} + +pub fn neighbor(me: &T, direction: Direction) -> Option { + me.syntax().siblings(direction).skip(1).find_map(T::cast) +} + +pub fn has_errors(node: &SyntaxNode) -> bool { + node.children().any(|it| it.kind() == SyntaxKind::ERROR) +} + +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub enum InsertPosition { + First, + Last, + Before(T), + After(T), +} + +pub struct TreeDiff { + replacements: FxHashMap, +} + +impl TreeDiff { + pub fn into_text_edit(&self, builder: &mut TextEditBuilder) { + for (from, to) in self.replacements.iter() { + builder.replace(from.text_range(), to.to_string()) + } + } + + pub fn is_empty(&self) -> bool { + self.replacements.is_empty() + } +} + +/// Finds minimal the diff, which, applied to `from`, will result in `to`. +/// +/// Specifically, returns a map whose keys are descendants of `from` and values +/// are descendants of `to`, such that `replace_descendants(from, map) == to`. +/// +/// A trivial solution is a singleton map `{ from: to }`, but this function +/// tries to find a more fine-grained diff. +pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff { + let mut buf = FxHashMap::default(); + // FIXME: this is both horrible inefficient and gives larger than + // necessary diff. I bet there's a cool algorithm to diff trees properly. + go(&mut buf, from.clone().into(), to.clone().into()); + return TreeDiff { replacements: buf }; + + fn go( + buf: &mut FxHashMap, + lhs: SyntaxElement, + rhs: SyntaxElement, + ) { + if lhs.kind() == rhs.kind() + && lhs.text_range().len() == rhs.text_range().len() + && match (&lhs, &rhs) { + (NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => { + lhs.green() == rhs.green() || lhs.text() == rhs.text() + } + (NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(), + _ => false, + } + { + return; + } + if let (Some(lhs), Some(rhs)) = (lhs.as_node(), rhs.as_node()) { + if lhs.children_with_tokens().count() == rhs.children_with_tokens().count() { + for (lhs, rhs) in lhs.children_with_tokens().zip(rhs.children_with_tokens()) { + go(buf, lhs, rhs) + } + return; + } + } + buf.insert(lhs, rhs); + } +} + +/// Adds specified children (tokens or nodes) to the current node at the +/// specific position. +/// +/// This is a type-unsafe low-level editing API, if you need to use it, +/// prefer to create a type-safe abstraction on top of it instead. +pub fn insert_children( + parent: &SyntaxNode, + position: InsertPosition, + to_insert: impl IntoIterator, +) -> SyntaxNode { + let mut to_insert = to_insert.into_iter(); + _insert_children(parent, position, &mut to_insert) +} + +fn _insert_children( + parent: &SyntaxNode, + position: InsertPosition, + to_insert: &mut dyn Iterator, +) -> SyntaxNode { + let mut delta = TextSize::default(); + let to_insert = to_insert.map(|element| { + delta += element.text_range().len(); + to_green_element(element) + }); + + let mut old_children = parent.green().children().map(|it| match it { + NodeOrToken::Token(it) => NodeOrToken::Token(it.clone()), + NodeOrToken::Node(it) => NodeOrToken::Node(it.clone()), + }); + + let new_children = match &position { + InsertPosition::First => to_insert.chain(old_children).collect::>(), + InsertPosition::Last => old_children.chain(to_insert).collect::>(), + InsertPosition::Before(anchor) | InsertPosition::After(anchor) => { + let take_anchor = if let InsertPosition::After(_) = position { 1 } else { 0 }; + let split_at = position_of_child(parent, anchor.clone()) + take_anchor; + let before = old_children.by_ref().take(split_at).collect::>(); + before.into_iter().chain(to_insert).chain(old_children).collect::>() + } + }; + + with_children(parent, new_children) +} + +/// Replaces all nodes in `to_delete` with nodes from `to_insert` +/// +/// This is a type-unsafe low-level editing API, if you need to use it, +/// prefer to create a type-safe abstraction on top of it instead. +pub fn replace_children( + parent: &SyntaxNode, + to_delete: RangeInclusive, + to_insert: impl IntoIterator, +) -> SyntaxNode { + let mut to_insert = to_insert.into_iter(); + _replace_children(parent, to_delete, &mut to_insert) +} + +fn _replace_children( + parent: &SyntaxNode, + to_delete: RangeInclusive, + to_insert: &mut dyn Iterator, +) -> SyntaxNode { + let start = position_of_child(parent, to_delete.start().clone()); + let end = position_of_child(parent, to_delete.end().clone()); + let mut old_children = parent.green().children().map(|it| match it { + NodeOrToken::Token(it) => NodeOrToken::Token(it.clone()), + NodeOrToken::Node(it) => NodeOrToken::Node(it.clone()), + }); + + let before = old_children.by_ref().take(start).collect::>(); + let new_children = before + .into_iter() + .chain(to_insert.map(to_green_element)) + .chain(old_children.skip(end + 1 - start)) + .collect::>(); + with_children(parent, new_children) +} + +#[derive(Default)] +pub struct SyntaxRewriter<'a> { + f: Option Option + 'a>>, + //FIXME: add debug_assertions that all elements are in fact from the same file. + replacements: FxHashMap, +} + +impl fmt::Debug for SyntaxRewriter<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("SyntaxRewriter").field("replacements", &self.replacements).finish() + } +} + +impl<'a> SyntaxRewriter<'a> { + pub fn from_fn(f: impl Fn(&SyntaxElement) -> Option + 'a) -> SyntaxRewriter<'a> { + SyntaxRewriter { f: Some(Box::new(f)), replacements: FxHashMap::default() } + } + pub fn delete>(&mut self, what: &T) { + let what = what.clone().into(); + let replacement = Replacement::Delete; + self.replacements.insert(what, replacement); + } + pub fn replace>(&mut self, what: &T, with: &T) { + let what = what.clone().into(); + let replacement = Replacement::Single(with.clone().into()); + self.replacements.insert(what, replacement); + } + pub fn replace_with_many>( + &mut self, + what: &T, + with: Vec, + ) { + let what = what.clone().into(); + let replacement = Replacement::Many(with); + self.replacements.insert(what, replacement); + } + pub fn replace_ast(&mut self, what: &T, with: &T) { + self.replace(what.syntax(), with.syntax()) + } + + pub fn rewrite(&self, node: &SyntaxNode) -> SyntaxNode { + if self.f.is_none() && self.replacements.is_empty() { + return node.clone(); + } + self.rewrite_children(node) + } + + pub fn rewrite_ast(self, node: &N) -> N { + N::cast(self.rewrite(node.syntax())).unwrap() + } + + /// Returns a node that encompasses all replacements to be done by this rewriter. + /// + /// Passing the returned node to `rewrite` will apply all replacements queued up in `self`. + /// + /// Returns `None` when there are no replacements. + pub fn rewrite_root(&self) -> Option { + assert!(self.f.is_none()); + self.replacements + .keys() + .map(|element| match element { + SyntaxElement::Node(it) => it.clone(), + SyntaxElement::Token(it) => it.parent(), + }) + // If we only have one replacement, we must return its parent node, since `rewrite` does + // not replace the node passed to it. + .map(|it| it.parent().unwrap_or(it)) + .fold1(|a, b| least_common_ancestor(&a, &b).unwrap()) + } + + fn replacement(&self, element: &SyntaxElement) -> Option { + if let Some(f) = &self.f { + assert!(self.replacements.is_empty()); + return f(element).map(Replacement::Single); + } + self.replacements.get(element).cloned() + } + + fn rewrite_children(&self, node: &SyntaxNode) -> SyntaxNode { + // FIXME: this could be made much faster. + let mut new_children = Vec::new(); + for child in node.children_with_tokens() { + self.rewrite_self(&mut new_children, &child); + } + with_children(node, new_children) + } + + fn rewrite_self( + &self, + acc: &mut Vec>, + element: &SyntaxElement, + ) { + if let Some(replacement) = self.replacement(&element) { + match replacement { + Replacement::Single(NodeOrToken::Node(it)) => { + acc.push(NodeOrToken::Node(it.green().clone())) + } + Replacement::Single(NodeOrToken::Token(it)) => { + acc.push(NodeOrToken::Token(it.green().clone())) + } + Replacement::Many(replacements) => { + acc.extend(replacements.iter().map(|it| match it { + NodeOrToken::Node(it) => NodeOrToken::Node(it.green().clone()), + NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()), + })) + } + Replacement::Delete => (), + }; + return; + } + let res = match element { + NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()), + NodeOrToken::Node(it) => NodeOrToken::Node(self.rewrite_children(it).green().clone()), + }; + acc.push(res) + } +} + +impl ops::AddAssign for SyntaxRewriter<'_> { + fn add_assign(&mut self, rhs: SyntaxRewriter) { + assert!(rhs.f.is_none()); + self.replacements.extend(rhs.replacements) + } +} + +#[derive(Clone, Debug)] +enum Replacement { + Delete, + Single(SyntaxElement), + Many(Vec), +} + +fn with_children( + parent: &SyntaxNode, + new_children: Vec>, +) -> SyntaxNode { + let len = new_children.iter().map(|it| it.text_len()).sum::(); + let new_node = rowan::GreenNode::new(rowan::SyntaxKind(parent.kind() as u16), new_children); + let new_root_node = parent.replace_with(new_node); + let new_root_node = SyntaxNode::new_root(new_root_node); + + // FIXME: use a more elegant way to re-fetch the node (#1185), make + // `range` private afterwards + let mut ptr = SyntaxNodePtr::new(parent); + ptr.range = TextRange::at(ptr.range.start(), len); + ptr.to_node(&new_root_node) +} + +fn position_of_child(parent: &SyntaxNode, child: SyntaxElement) -> usize { + parent + .children_with_tokens() + .position(|it| it == child) + .expect("element is not a child of current element") +} + +fn to_green_element(element: SyntaxElement) -> NodeOrToken { + match element { + NodeOrToken::Node(it) => it.green().clone().into(), + NodeOrToken::Token(it) => it.green().clone().into(), + } +} diff --git a/crates/syntax/src/ast.rs b/crates/syntax/src/ast.rs new file mode 100644 index 00000000000..d536bb1e7d6 --- /dev/null +++ b/crates/syntax/src/ast.rs @@ -0,0 +1,331 @@ +//! Abstract Syntax Tree, layered on top of untyped `SyntaxNode`s + +mod generated; +mod traits; +mod token_ext; +mod node_ext; +mod expr_ext; +pub mod edit; +pub mod make; + +use std::marker::PhantomData; + +use crate::{ + syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken}, + SmolStr, SyntaxKind, +}; + +pub use self::{ + expr_ext::{ArrayExprKind, BinOp, Effect, ElseBranch, LiteralKind, PrefixOp, RangeOp}, + generated::*, + node_ext::{ + AttrKind, FieldKind, NameOrNameRef, PathSegmentKind, SelfParamKind, SlicePatComponents, + StructKind, TypeBoundKind, VisibilityKind, + }, + token_ext::*, + traits::*, +}; + +/// The main trait to go from untyped `SyntaxNode` to a typed ast. The +/// conversion itself has zero runtime cost: ast and syntax nodes have exactly +/// the same representation: a pointer to the tree root and a pointer to the +/// node itself. +pub trait AstNode { + fn can_cast(kind: SyntaxKind) -> bool + where + Self: Sized; + + fn cast(syntax: SyntaxNode) -> Option + where + Self: Sized; + + fn syntax(&self) -> &SyntaxNode; +} + +/// Like `AstNode`, but wraps tokens rather than interior nodes. +pub trait AstToken { + fn can_cast(token: SyntaxKind) -> bool + where + Self: Sized; + + fn cast(syntax: SyntaxToken) -> Option + where + Self: Sized; + + fn syntax(&self) -> &SyntaxToken; + + fn text(&self) -> &SmolStr { + self.syntax().text() + } +} + +/// An iterator over `SyntaxNode` children of a particular AST type. +#[derive(Debug, Clone)] +pub struct AstChildren { + inner: SyntaxNodeChildren, + ph: PhantomData, +} + +impl AstChildren { + fn new(parent: &SyntaxNode) -> Self { + AstChildren { inner: parent.children(), ph: PhantomData } + } +} + +impl Iterator for AstChildren { + type Item = N; + fn next(&mut self) -> Option { + self.inner.find_map(N::cast) + } +} + +mod support { + use super::{AstChildren, AstNode, SyntaxKind, SyntaxNode, SyntaxToken}; + + pub(super) fn child(parent: &SyntaxNode) -> Option { + parent.children().find_map(N::cast) + } + + pub(super) fn children(parent: &SyntaxNode) -> AstChildren { + AstChildren::new(parent) + } + + pub(super) fn token(parent: &SyntaxNode, kind: SyntaxKind) -> Option { + parent.children_with_tokens().filter_map(|it| it.into_token()).find(|it| it.kind() == kind) + } +} + +#[test] +fn assert_ast_is_object_safe() { + fn _f(_: &dyn AstNode, _: &dyn NameOwner) {} +} + +#[test] +fn test_doc_comment_none() { + let file = SourceFile::parse( + r#" + // non-doc + mod foo {} + "#, + ) + .ok() + .unwrap(); + let module = file.syntax().descendants().find_map(Module::cast).unwrap(); + assert!(module.doc_comment_text().is_none()); +} + +#[test] +fn test_doc_comment_of_items() { + let file = SourceFile::parse( + r#" + //! doc + // non-doc + mod foo {} + "#, + ) + .ok() + .unwrap(); + let module = file.syntax().descendants().find_map(Module::cast).unwrap(); + assert_eq!("doc", module.doc_comment_text().unwrap()); +} + +#[test] +fn test_doc_comment_of_statics() { + let file = SourceFile::parse( + r#" + /// Number of levels + static LEVELS: i32 = 0; + "#, + ) + .ok() + .unwrap(); + let st = file.syntax().descendants().find_map(Static::cast).unwrap(); + assert_eq!("Number of levels", st.doc_comment_text().unwrap()); +} + +#[test] +fn test_doc_comment_preserves_indents() { + let file = SourceFile::parse( + r#" + /// doc1 + /// ``` + /// fn foo() { + /// // ... + /// } + /// ``` + mod foo {} + "#, + ) + .ok() + .unwrap(); + let module = file.syntax().descendants().find_map(Module::cast).unwrap(); + assert_eq!("doc1\n```\nfn foo() {\n // ...\n}\n```", module.doc_comment_text().unwrap()); +} + +#[test] +fn test_doc_comment_preserves_newlines() { + let file = SourceFile::parse( + r#" + /// this + /// is + /// mod + /// foo + mod foo {} + "#, + ) + .ok() + .unwrap(); + let module = file.syntax().descendants().find_map(Module::cast).unwrap(); + assert_eq!("this\nis\nmod\nfoo", module.doc_comment_text().unwrap()); +} + +#[test] +fn test_doc_comment_single_line_block_strips_suffix() { + let file = SourceFile::parse( + r#" + /** this is mod foo*/ + mod foo {} + "#, + ) + .ok() + .unwrap(); + let module = file.syntax().descendants().find_map(Module::cast).unwrap(); + assert_eq!("this is mod foo", module.doc_comment_text().unwrap()); +} + +#[test] +fn test_doc_comment_single_line_block_strips_suffix_whitespace() { + let file = SourceFile::parse( + r#" + /** this is mod foo */ + mod foo {} + "#, + ) + .ok() + .unwrap(); + let module = file.syntax().descendants().find_map(Module::cast).unwrap(); + assert_eq!("this is mod foo ", module.doc_comment_text().unwrap()); +} + +#[test] +fn test_doc_comment_multi_line_block_strips_suffix() { + let file = SourceFile::parse( + r#" + /** + this + is + mod foo + */ + mod foo {} + "#, + ) + .ok() + .unwrap(); + let module = file.syntax().descendants().find_map(Module::cast).unwrap(); + assert_eq!( + " this\n is\n mod foo\n ", + module.doc_comment_text().unwrap() + ); +} + +#[test] +fn test_comments_preserve_trailing_whitespace() { + let file = SourceFile::parse( + "\n/// Representation of a Realm. \n/// In the specification these are called Realm Records.\nstruct Realm {}", + ) + .ok() + .unwrap(); + let def = file.syntax().descendants().find_map(Struct::cast).unwrap(); + assert_eq!( + "Representation of a Realm. \nIn the specification these are called Realm Records.", + def.doc_comment_text().unwrap() + ); +} + +#[test] +fn test_four_slash_line_comment() { + let file = SourceFile::parse( + r#" + //// too many slashes to be a doc comment + /// doc comment + mod foo {} + "#, + ) + .ok() + .unwrap(); + let module = file.syntax().descendants().find_map(Module::cast).unwrap(); + assert_eq!("doc comment", module.doc_comment_text().unwrap()); +} + +#[test] +fn test_where_predicates() { + fn assert_bound(text: &str, bound: Option) { + assert_eq!(text, bound.unwrap().syntax().text().to_string()); + } + + let file = SourceFile::parse( + r#" +fn foo() +where + T: Clone + Copy + Debug + 'static, + 'a: 'b + 'c, + Iterator::Item: 'a + Debug, + Iterator::Item: Debug + 'a, + ::Item: Debug + 'a, + for<'a> F: Fn(&'a str) +{} + "#, + ) + .ok() + .unwrap(); + let where_clause = file.syntax().descendants().find_map(WhereClause::cast).unwrap(); + + let mut predicates = where_clause.predicates(); + + let pred = predicates.next().unwrap(); + let mut bounds = pred.type_bound_list().unwrap().bounds(); + + assert!(pred.for_token().is_none()); + assert!(pred.generic_param_list().is_none()); + assert_eq!("T", pred.ty().unwrap().syntax().text().to_string()); + assert_bound("Clone", bounds.next()); + assert_bound("Copy", bounds.next()); + assert_bound("Debug", bounds.next()); + assert_bound("'static", bounds.next()); + + let pred = predicates.next().unwrap(); + let mut bounds = pred.type_bound_list().unwrap().bounds(); + + assert_eq!("'a", pred.lifetime_token().unwrap().text()); + + assert_bound("'b", bounds.next()); + assert_bound("'c", bounds.next()); + + let pred = predicates.next().unwrap(); + let mut bounds = pred.type_bound_list().unwrap().bounds(); + + assert_eq!("Iterator::Item", pred.ty().unwrap().syntax().text().to_string()); + assert_bound("'a", bounds.next()); + + let pred = predicates.next().unwrap(); + let mut bounds = pred.type_bound_list().unwrap().bounds(); + + assert_eq!("Iterator::Item", pred.ty().unwrap().syntax().text().to_string()); + assert_bound("Debug", bounds.next()); + assert_bound("'a", bounds.next()); + + let pred = predicates.next().unwrap(); + let mut bounds = pred.type_bound_list().unwrap().bounds(); + + assert_eq!("::Item", pred.ty().unwrap().syntax().text().to_string()); + assert_bound("Debug", bounds.next()); + assert_bound("'a", bounds.next()); + + let pred = predicates.next().unwrap(); + let mut bounds = pred.type_bound_list().unwrap().bounds(); + + assert!(pred.for_token().is_some()); + assert_eq!("<'a>", pred.generic_param_list().unwrap().syntax().text().to_string()); + assert_eq!("F", pred.ty().unwrap().syntax().text().to_string()); + assert_bound("Fn(&'a str)", bounds.next()); +} diff --git a/crates/syntax/src/ast/edit.rs b/crates/syntax/src/ast/edit.rs new file mode 100644 index 00000000000..5ed123f91fe --- /dev/null +++ b/crates/syntax/src/ast/edit.rs @@ -0,0 +1,642 @@ +//! This module contains functions for editing syntax trees. As the trees are +//! immutable, all function here return a fresh copy of the tree, instead of +//! doing an in-place modification. +use std::{ + fmt, iter, + ops::{self, RangeInclusive}, +}; + +use arrayvec::ArrayVec; + +use crate::{ + algo::{self, neighbor, SyntaxRewriter}, + ast::{ + self, + make::{self, tokens}, + AstNode, TypeBoundsOwner, + }, + AstToken, Direction, InsertPosition, SmolStr, SyntaxElement, SyntaxKind, + SyntaxKind::{ATTR, COMMENT, WHITESPACE}, + SyntaxNode, SyntaxToken, T, +}; + +impl ast::BinExpr { + #[must_use] + pub fn replace_op(&self, op: SyntaxKind) -> Option { + let op_node: SyntaxElement = self.op_details()?.0.into(); + let to_insert: Option = Some(make::token(op).into()); + Some(self.replace_children(single_node(op_node), to_insert)) + } +} + +impl ast::Fn { + #[must_use] + pub fn with_body(&self, body: ast::BlockExpr) -> ast::Fn { + let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new(); + let old_body_or_semi: SyntaxElement = if let Some(old_body) = self.body() { + old_body.syntax().clone().into() + } else if let Some(semi) = self.semicolon_token() { + to_insert.push(make::tokens::single_space().into()); + semi.into() + } else { + to_insert.push(make::tokens::single_space().into()); + to_insert.push(body.syntax().clone().into()); + return self.insert_children(InsertPosition::Last, to_insert); + }; + to_insert.push(body.syntax().clone().into()); + self.replace_children(single_node(old_body_or_semi), to_insert) + } +} + +fn make_multiline(node: N) -> N +where + N: AstNode + Clone, +{ + let l_curly = match node.syntax().children_with_tokens().find(|it| it.kind() == T!['{']) { + Some(it) => it, + None => return node, + }; + let sibling = match l_curly.next_sibling_or_token() { + Some(it) => it, + None => return node, + }; + let existing_ws = match sibling.as_token() { + None => None, + Some(tok) if tok.kind() != WHITESPACE => None, + Some(ws) => { + if ws.text().contains('\n') { + return node; + } + Some(ws.clone()) + } + }; + + let indent = leading_indent(node.syntax()).unwrap_or_default(); + let ws = tokens::WsBuilder::new(&format!("\n{}", indent)); + let to_insert = iter::once(ws.ws().into()); + match existing_ws { + None => node.insert_children(InsertPosition::After(l_curly), to_insert), + Some(ws) => node.replace_children(single_node(ws), to_insert), + } +} + +impl ast::AssocItemList { + #[must_use] + pub fn append_items( + &self, + items: impl IntoIterator, + ) -> ast::AssocItemList { + let mut res = self.clone(); + if !self.syntax().text().contains_char('\n') { + res = make_multiline(res); + } + items.into_iter().for_each(|it| res = res.append_item(it)); + res + } + + #[must_use] + pub fn append_item(&self, item: ast::AssocItem) -> ast::AssocItemList { + let (indent, position) = match self.assoc_items().last() { + Some(it) => ( + leading_indent(it.syntax()).unwrap_or_default().to_string(), + InsertPosition::After(it.syntax().clone().into()), + ), + None => match self.l_curly_token() { + Some(it) => ( + " ".to_string() + &leading_indent(self.syntax()).unwrap_or_default(), + InsertPosition::After(it.into()), + ), + None => return self.clone(), + }, + }; + let ws = tokens::WsBuilder::new(&format!("\n{}", indent)); + let to_insert: ArrayVec<[SyntaxElement; 2]> = + [ws.ws().into(), item.syntax().clone().into()].into(); + self.insert_children(position, to_insert) + } +} + +impl ast::RecordExprFieldList { + #[must_use] + pub fn append_field(&self, field: &ast::RecordExprField) -> ast::RecordExprFieldList { + self.insert_field(InsertPosition::Last, field) + } + + #[must_use] + pub fn insert_field( + &self, + position: InsertPosition<&'_ ast::RecordExprField>, + field: &ast::RecordExprField, + ) -> ast::RecordExprFieldList { + let is_multiline = self.syntax().text().contains_char('\n'); + let ws; + let space = if is_multiline { + ws = tokens::WsBuilder::new(&format!( + "\n{} ", + leading_indent(self.syntax()).unwrap_or_default() + )); + ws.ws() + } else { + tokens::single_space() + }; + + let mut to_insert: ArrayVec<[SyntaxElement; 4]> = ArrayVec::new(); + to_insert.push(space.into()); + to_insert.push(field.syntax().clone().into()); + to_insert.push(make::token(T![,]).into()); + + macro_rules! after_l_curly { + () => {{ + let anchor = match self.l_curly_token() { + Some(it) => it.into(), + None => return self.clone(), + }; + InsertPosition::After(anchor) + }}; + } + + macro_rules! after_field { + ($anchor:expr) => { + if let Some(comma) = $anchor + .syntax() + .siblings_with_tokens(Direction::Next) + .find(|it| it.kind() == T![,]) + { + InsertPosition::After(comma) + } else { + to_insert.insert(0, make::token(T![,]).into()); + InsertPosition::After($anchor.syntax().clone().into()) + } + }; + }; + + let position = match position { + InsertPosition::First => after_l_curly!(), + InsertPosition::Last => { + if !is_multiline { + // don't insert comma before curly + to_insert.pop(); + } + match self.fields().last() { + Some(it) => after_field!(it), + None => after_l_curly!(), + } + } + InsertPosition::Before(anchor) => { + InsertPosition::Before(anchor.syntax().clone().into()) + } + InsertPosition::After(anchor) => after_field!(anchor), + }; + + self.insert_children(position, to_insert) + } +} + +impl ast::TypeAlias { + #[must_use] + pub fn remove_bounds(&self) -> ast::TypeAlias { + let colon = match self.colon_token() { + Some(it) => it, + None => return self.clone(), + }; + let end = match self.type_bound_list() { + Some(it) => it.syntax().clone().into(), + None => colon.clone().into(), + }; + self.replace_children(colon.into()..=end, iter::empty()) + } +} + +impl ast::TypeParam { + #[must_use] + pub fn remove_bounds(&self) -> ast::TypeParam { + let colon = match self.colon_token() { + Some(it) => it, + None => return self.clone(), + }; + let end = match self.type_bound_list() { + Some(it) => it.syntax().clone().into(), + None => colon.clone().into(), + }; + self.replace_children(colon.into()..=end, iter::empty()) + } +} + +impl ast::Path { + #[must_use] + pub fn with_segment(&self, segment: ast::PathSegment) -> ast::Path { + if let Some(old) = self.segment() { + return self.replace_children( + single_node(old.syntax().clone()), + iter::once(segment.syntax().clone().into()), + ); + } + self.clone() + } +} + +impl ast::PathSegment { + #[must_use] + pub fn with_type_args(&self, type_args: ast::GenericArgList) -> ast::PathSegment { + self._with_type_args(type_args, false) + } + + #[must_use] + pub fn with_turbo_fish(&self, type_args: ast::GenericArgList) -> ast::PathSegment { + self._with_type_args(type_args, true) + } + + fn _with_type_args(&self, type_args: ast::GenericArgList, turbo: bool) -> ast::PathSegment { + if let Some(old) = self.generic_arg_list() { + return self.replace_children( + single_node(old.syntax().clone()), + iter::once(type_args.syntax().clone().into()), + ); + } + let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new(); + if turbo { + to_insert.push(make::token(T![::]).into()); + } + to_insert.push(type_args.syntax().clone().into()); + self.insert_children(InsertPosition::Last, to_insert) + } +} + +impl ast::Use { + #[must_use] + pub fn with_use_tree(&self, use_tree: ast::UseTree) -> ast::Use { + if let Some(old) = self.use_tree() { + return self.replace_descendant(old, use_tree); + } + self.clone() + } + + pub fn remove(&self) -> SyntaxRewriter<'static> { + let mut res = SyntaxRewriter::default(); + res.delete(self.syntax()); + let next_ws = self + .syntax() + .next_sibling_or_token() + .and_then(|it| it.into_token()) + .and_then(ast::Whitespace::cast); + if let Some(next_ws) = next_ws { + let ws_text = next_ws.syntax().text(); + if ws_text.starts_with('\n') { + let rest = &ws_text[1..]; + if rest.is_empty() { + res.delete(next_ws.syntax()) + } else { + res.replace(next_ws.syntax(), &make::tokens::whitespace(rest)); + } + } + } + res + } +} + +impl ast::UseTree { + #[must_use] + pub fn with_path(&self, path: ast::Path) -> ast::UseTree { + if let Some(old) = self.path() { + return self.replace_descendant(old, path); + } + self.clone() + } + + #[must_use] + pub fn with_use_tree_list(&self, use_tree_list: ast::UseTreeList) -> ast::UseTree { + if let Some(old) = self.use_tree_list() { + return self.replace_descendant(old, use_tree_list); + } + self.clone() + } + + #[must_use] + pub fn split_prefix(&self, prefix: &ast::Path) -> ast::UseTree { + let suffix = match split_path_prefix(&prefix) { + Some(it) => it, + None => return self.clone(), + }; + let use_tree = make::use_tree( + suffix, + self.use_tree_list(), + self.rename(), + self.star_token().is_some(), + ); + let nested = make::use_tree_list(iter::once(use_tree)); + return make::use_tree(prefix.clone(), Some(nested), None, false); + + fn split_path_prefix(prefix: &ast::Path) -> Option { + let parent = prefix.parent_path()?; + let segment = parent.segment()?; + if algo::has_errors(segment.syntax()) { + return None; + } + let mut res = make::path_unqualified(segment); + for p in iter::successors(parent.parent_path(), |it| it.parent_path()) { + res = make::path_qualified(res, p.segment()?); + } + Some(res) + } + } + + pub fn remove(&self) -> SyntaxRewriter<'static> { + let mut res = SyntaxRewriter::default(); + res.delete(self.syntax()); + for &dir in [Direction::Next, Direction::Prev].iter() { + if let Some(nb) = neighbor(self, dir) { + self.syntax() + .siblings_with_tokens(dir) + .skip(1) + .take_while(|it| it.as_node() != Some(nb.syntax())) + .for_each(|el| res.delete(&el)); + return res; + } + } + res + } +} + +impl ast::MatchArmList { + #[must_use] + pub fn append_arms(&self, items: impl IntoIterator) -> ast::MatchArmList { + let mut res = self.clone(); + res = res.strip_if_only_whitespace(); + if !res.syntax().text().contains_char('\n') { + res = make_multiline(res); + } + items.into_iter().for_each(|it| res = res.append_arm(it)); + res + } + + fn strip_if_only_whitespace(&self) -> ast::MatchArmList { + let mut iter = self.syntax().children_with_tokens().skip_while(|it| it.kind() != T!['{']); + iter.next(); // Eat the curly + let mut inner = iter.take_while(|it| it.kind() != T!['}']); + if !inner.clone().all(|it| it.kind() == WHITESPACE) { + return self.clone(); + } + let start = match inner.next() { + Some(s) => s, + None => return self.clone(), + }; + let end = match inner.last() { + Some(s) => s, + None => start.clone(), + }; + self.replace_children(start..=end, &mut iter::empty()) + } + + #[must_use] + pub fn remove_placeholder(&self) -> ast::MatchArmList { + let placeholder = + self.arms().find(|arm| matches!(arm.pat(), Some(ast::Pat::WildcardPat(_)))); + if let Some(placeholder) = placeholder { + self.remove_arm(&placeholder) + } else { + self.clone() + } + } + + #[must_use] + fn remove_arm(&self, arm: &ast::MatchArm) -> ast::MatchArmList { + let start = arm.syntax().clone(); + let end = if let Some(comma) = start + .siblings_with_tokens(Direction::Next) + .skip(1) + .skip_while(|it| it.kind().is_trivia()) + .next() + .filter(|it| it.kind() == T![,]) + { + comma + } else { + start.clone().into() + }; + self.replace_children(start.into()..=end, None) + } + + #[must_use] + pub fn append_arm(&self, item: ast::MatchArm) -> ast::MatchArmList { + let r_curly = match self.syntax().children_with_tokens().find(|it| it.kind() == T!['}']) { + Some(t) => t, + None => return self.clone(), + }; + let position = InsertPosition::Before(r_curly.into()); + let arm_ws = tokens::WsBuilder::new(" "); + let match_indent = &leading_indent(self.syntax()).unwrap_or_default(); + let match_ws = tokens::WsBuilder::new(&format!("\n{}", match_indent)); + let to_insert: ArrayVec<[SyntaxElement; 3]> = + [arm_ws.ws().into(), item.syntax().clone().into(), match_ws.ws().into()].into(); + self.insert_children(position, to_insert) + } +} + +#[must_use] +pub fn remove_attrs_and_docs(node: &N) -> N { + N::cast(remove_attrs_and_docs_inner(node.syntax().clone())).unwrap() +} + +fn remove_attrs_and_docs_inner(mut node: SyntaxNode) -> SyntaxNode { + while let Some(start) = + node.children_with_tokens().find(|it| it.kind() == ATTR || it.kind() == COMMENT) + { + let end = match &start.next_sibling_or_token() { + Some(el) if el.kind() == WHITESPACE => el.clone(), + Some(_) | None => start.clone(), + }; + node = algo::replace_children(&node, start..=end, &mut iter::empty()); + } + node +} + +#[derive(Debug, Clone, Copy)] +pub struct IndentLevel(pub u8); + +impl From for IndentLevel { + fn from(level: u8) -> IndentLevel { + IndentLevel(level) + } +} + +impl fmt::Display for IndentLevel { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let spaces = " "; + let buf; + let len = self.0 as usize * 4; + let indent = if len <= spaces.len() { + &spaces[..len] + } else { + buf = iter::repeat(' ').take(len).collect::(); + &buf + }; + fmt::Display::fmt(indent, f) + } +} + +impl ops::Add for IndentLevel { + type Output = IndentLevel; + fn add(self, rhs: u8) -> IndentLevel { + IndentLevel(self.0 + rhs) + } +} + +impl IndentLevel { + pub fn from_node(node: &SyntaxNode) -> IndentLevel { + let first_token = match node.first_token() { + Some(it) => it, + None => return IndentLevel(0), + }; + for ws in prev_tokens(first_token).filter_map(ast::Whitespace::cast) { + let text = ws.syntax().text(); + if let Some(pos) = text.rfind('\n') { + let level = text[pos + 1..].chars().count() / 4; + return IndentLevel(level as u8); + } + } + IndentLevel(0) + } + + /// XXX: this intentionally doesn't change the indent of the very first token. + /// Ie, in something like + /// ``` + /// fn foo() { + /// 92 + /// } + /// ``` + /// if you indent the block, the `{` token would stay put. + fn increase_indent(self, node: SyntaxNode) -> SyntaxNode { + let mut rewriter = SyntaxRewriter::default(); + node.descendants_with_tokens() + .filter_map(|el| el.into_token()) + .filter_map(ast::Whitespace::cast) + .filter(|ws| { + let text = ws.syntax().text(); + text.contains('\n') + }) + .for_each(|ws| { + let new_ws = make::tokens::whitespace(&format!("{}{}", ws.syntax(), self,)); + rewriter.replace(ws.syntax(), &new_ws) + }); + rewriter.rewrite(&node) + } + + fn decrease_indent(self, node: SyntaxNode) -> SyntaxNode { + let mut rewriter = SyntaxRewriter::default(); + node.descendants_with_tokens() + .filter_map(|el| el.into_token()) + .filter_map(ast::Whitespace::cast) + .filter(|ws| { + let text = ws.syntax().text(); + text.contains('\n') + }) + .for_each(|ws| { + let new_ws = make::tokens::whitespace( + &ws.syntax().text().replace(&format!("\n{}", self), "\n"), + ); + rewriter.replace(ws.syntax(), &new_ws) + }); + rewriter.rewrite(&node) + } +} + +// FIXME: replace usages with IndentLevel above +fn leading_indent(node: &SyntaxNode) -> Option { + for token in prev_tokens(node.first_token()?) { + if let Some(ws) = ast::Whitespace::cast(token.clone()) { + let ws_text = ws.text(); + if let Some(pos) = ws_text.rfind('\n') { + return Some(ws_text[pos + 1..].into()); + } + } + if token.text().contains('\n') { + break; + } + } + None +} + +fn prev_tokens(token: SyntaxToken) -> impl Iterator { + iter::successors(Some(token), |token| token.prev_token()) +} + +pub trait AstNodeEdit: AstNode + Clone + Sized { + #[must_use] + fn insert_children( + &self, + position: InsertPosition, + to_insert: impl IntoIterator, + ) -> Self { + let new_syntax = algo::insert_children(self.syntax(), position, to_insert); + Self::cast(new_syntax).unwrap() + } + + #[must_use] + fn replace_children( + &self, + to_replace: RangeInclusive, + to_insert: impl IntoIterator, + ) -> Self { + let new_syntax = algo::replace_children(self.syntax(), to_replace, to_insert); + Self::cast(new_syntax).unwrap() + } + + #[must_use] + fn replace_descendant(&self, old: D, new: D) -> Self { + self.replace_descendants(iter::once((old, new))) + } + + #[must_use] + fn replace_descendants( + &self, + replacement_map: impl IntoIterator, + ) -> Self { + let mut rewriter = SyntaxRewriter::default(); + for (from, to) in replacement_map { + rewriter.replace(from.syntax(), to.syntax()) + } + rewriter.rewrite_ast(self) + } + #[must_use] + fn indent(&self, level: IndentLevel) -> Self { + Self::cast(level.increase_indent(self.syntax().clone())).unwrap() + } + #[must_use] + fn dedent(&self, level: IndentLevel) -> Self { + Self::cast(level.decrease_indent(self.syntax().clone())).unwrap() + } + #[must_use] + fn reset_indent(&self) -> Self { + let level = IndentLevel::from_node(self.syntax()); + self.dedent(level) + } +} + +impl AstNodeEdit for N {} + +fn single_node(element: impl Into) -> RangeInclusive { + let element = element.into(); + element.clone()..=element +} + +#[test] +fn test_increase_indent() { + let arm_list = { + let arm = make::match_arm(iter::once(make::wildcard_pat().into()), make::expr_unit()); + make::match_arm_list(vec![arm.clone(), arm]) + }; + assert_eq!( + arm_list.syntax().to_string(), + "{ + _ => (), + _ => (), +}" + ); + let indented = arm_list.indent(IndentLevel(2)); + assert_eq!( + indented.syntax().to_string(), + "{ + _ => (), + _ => (), + }" + ); +} diff --git a/crates/syntax/src/ast/expr_ext.rs b/crates/syntax/src/ast/expr_ext.rs new file mode 100644 index 00000000000..f5ba872233f --- /dev/null +++ b/crates/syntax/src/ast/expr_ext.rs @@ -0,0 +1,418 @@ +//! Various extension methods to ast Expr Nodes, which are hard to code-generate. + +use crate::{ + ast::{self, support, AstChildren, AstNode}, + SmolStr, + SyntaxKind::*, + SyntaxToken, T, +}; + +impl ast::AttrsOwner for ast::Expr {} + +impl ast::Expr { + pub fn is_block_like(&self) -> bool { + match self { + ast::Expr::IfExpr(_) + | ast::Expr::LoopExpr(_) + | ast::Expr::ForExpr(_) + | ast::Expr::WhileExpr(_) + | ast::Expr::BlockExpr(_) + | ast::Expr::MatchExpr(_) + | ast::Expr::EffectExpr(_) => true, + _ => false, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum ElseBranch { + Block(ast::BlockExpr), + IfExpr(ast::IfExpr), +} + +impl ast::IfExpr { + pub fn then_branch(&self) -> Option { + self.blocks().next() + } + pub fn else_branch(&self) -> Option { + let res = match self.blocks().nth(1) { + Some(block) => ElseBranch::Block(block), + None => { + let elif: ast::IfExpr = support::child(self.syntax())?; + ElseBranch::IfExpr(elif) + } + }; + Some(res) + } + + pub fn blocks(&self) -> AstChildren { + support::children(self.syntax()) + } +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub enum PrefixOp { + /// The `*` operator for dereferencing + Deref, + /// The `!` operator for logical inversion + Not, + /// The `-` operator for negation + Neg, +} + +impl ast::PrefixExpr { + pub fn op_kind(&self) -> Option { + match self.op_token()?.kind() { + T![*] => Some(PrefixOp::Deref), + T![!] => Some(PrefixOp::Not), + T![-] => Some(PrefixOp::Neg), + _ => None, + } + } + + pub fn op_token(&self) -> Option { + self.syntax().first_child_or_token()?.into_token() + } +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub enum BinOp { + /// The `||` operator for boolean OR + BooleanOr, + /// The `&&` operator for boolean AND + BooleanAnd, + /// The `==` operator for equality testing + EqualityTest, + /// The `!=` operator for equality testing + NegatedEqualityTest, + /// The `<=` operator for lesser-equal testing + LesserEqualTest, + /// The `>=` operator for greater-equal testing + GreaterEqualTest, + /// The `<` operator for comparison + LesserTest, + /// The `>` operator for comparison + GreaterTest, + /// The `+` operator for addition + Addition, + /// The `*` operator for multiplication + Multiplication, + /// The `-` operator for subtraction + Subtraction, + /// The `/` operator for division + Division, + /// The `%` operator for remainder after division + Remainder, + /// The `<<` operator for left shift + LeftShift, + /// The `>>` operator for right shift + RightShift, + /// The `^` operator for bitwise XOR + BitwiseXor, + /// The `|` operator for bitwise OR + BitwiseOr, + /// The `&` operator for bitwise AND + BitwiseAnd, + /// The `=` operator for assignment + Assignment, + /// The `+=` operator for assignment after addition + AddAssign, + /// The `/=` operator for assignment after division + DivAssign, + /// The `*=` operator for assignment after multiplication + MulAssign, + /// The `%=` operator for assignment after remainders + RemAssign, + /// The `>>=` operator for assignment after shifting right + ShrAssign, + /// The `<<=` operator for assignment after shifting left + ShlAssign, + /// The `-=` operator for assignment after subtraction + SubAssign, + /// The `|=` operator for assignment after bitwise OR + BitOrAssign, + /// The `&=` operator for assignment after bitwise AND + BitAndAssign, + /// The `^=` operator for assignment after bitwise XOR + BitXorAssign, +} + +impl BinOp { + pub fn is_assignment(self) -> bool { + match self { + BinOp::Assignment + | BinOp::AddAssign + | BinOp::DivAssign + | BinOp::MulAssign + | BinOp::RemAssign + | BinOp::ShrAssign + | BinOp::ShlAssign + | BinOp::SubAssign + | BinOp::BitOrAssign + | BinOp::BitAndAssign + | BinOp::BitXorAssign => true, + _ => false, + } + } +} + +impl ast::BinExpr { + pub fn op_details(&self) -> Option<(SyntaxToken, BinOp)> { + self.syntax().children_with_tokens().filter_map(|it| it.into_token()).find_map(|c| { + let bin_op = match c.kind() { + T![||] => BinOp::BooleanOr, + T![&&] => BinOp::BooleanAnd, + T![==] => BinOp::EqualityTest, + T![!=] => BinOp::NegatedEqualityTest, + T![<=] => BinOp::LesserEqualTest, + T![>=] => BinOp::GreaterEqualTest, + T![<] => BinOp::LesserTest, + T![>] => BinOp::GreaterTest, + T![+] => BinOp::Addition, + T![*] => BinOp::Multiplication, + T![-] => BinOp::Subtraction, + T![/] => BinOp::Division, + T![%] => BinOp::Remainder, + T![<<] => BinOp::LeftShift, + T![>>] => BinOp::RightShift, + T![^] => BinOp::BitwiseXor, + T![|] => BinOp::BitwiseOr, + T![&] => BinOp::BitwiseAnd, + T![=] => BinOp::Assignment, + T![+=] => BinOp::AddAssign, + T![/=] => BinOp::DivAssign, + T![*=] => BinOp::MulAssign, + T![%=] => BinOp::RemAssign, + T![>>=] => BinOp::ShrAssign, + T![<<=] => BinOp::ShlAssign, + T![-=] => BinOp::SubAssign, + T![|=] => BinOp::BitOrAssign, + T![&=] => BinOp::BitAndAssign, + T![^=] => BinOp::BitXorAssign, + _ => return None, + }; + Some((c, bin_op)) + }) + } + + pub fn op_kind(&self) -> Option { + self.op_details().map(|t| t.1) + } + + pub fn op_token(&self) -> Option { + self.op_details().map(|t| t.0) + } + + pub fn lhs(&self) -> Option { + support::children(self.syntax()).next() + } + + pub fn rhs(&self) -> Option { + support::children(self.syntax()).nth(1) + } + + pub fn sub_exprs(&self) -> (Option, Option) { + let mut children = support::children(self.syntax()); + let first = children.next(); + let second = children.next(); + (first, second) + } +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub enum RangeOp { + /// `..` + Exclusive, + /// `..=` + Inclusive, +} + +impl ast::RangeExpr { + fn op_details(&self) -> Option<(usize, SyntaxToken, RangeOp)> { + self.syntax().children_with_tokens().enumerate().find_map(|(ix, child)| { + let token = child.into_token()?; + let bin_op = match token.kind() { + T![..] => RangeOp::Exclusive, + T![..=] => RangeOp::Inclusive, + _ => return None, + }; + Some((ix, token, bin_op)) + }) + } + + pub fn op_kind(&self) -> Option { + self.op_details().map(|t| t.2) + } + + pub fn op_token(&self) -> Option { + self.op_details().map(|t| t.1) + } + + pub fn start(&self) -> Option { + let op_ix = self.op_details()?.0; + self.syntax() + .children_with_tokens() + .take(op_ix) + .find_map(|it| ast::Expr::cast(it.into_node()?)) + } + + pub fn end(&self) -> Option { + let op_ix = self.op_details()?.0; + self.syntax() + .children_with_tokens() + .skip(op_ix + 1) + .find_map(|it| ast::Expr::cast(it.into_node()?)) + } +} + +impl ast::IndexExpr { + pub fn base(&self) -> Option { + support::children(self.syntax()).next() + } + pub fn index(&self) -> Option { + support::children(self.syntax()).nth(1) + } +} + +pub enum ArrayExprKind { + Repeat { initializer: Option, repeat: Option }, + ElementList(AstChildren), +} + +impl ast::ArrayExpr { + pub fn kind(&self) -> ArrayExprKind { + if self.is_repeat() { + ArrayExprKind::Repeat { + initializer: support::children(self.syntax()).next(), + repeat: support::children(self.syntax()).nth(1), + } + } else { + ArrayExprKind::ElementList(support::children(self.syntax())) + } + } + + fn is_repeat(&self) -> bool { + self.syntax().children_with_tokens().any(|it| it.kind() == T![;]) + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub enum LiteralKind { + String, + ByteString, + Char, + Byte, + IntNumber { suffix: Option }, + FloatNumber { suffix: Option }, + Bool(bool), +} + +impl ast::Literal { + pub fn token(&self) -> SyntaxToken { + self.syntax() + .children_with_tokens() + .find(|e| e.kind() != ATTR && !e.kind().is_trivia()) + .and_then(|e| e.into_token()) + .unwrap() + } + + fn find_suffix(text: &str, possible_suffixes: &[&str]) -> Option { + possible_suffixes + .iter() + .find(|&suffix| text.ends_with(suffix)) + .map(|&suffix| SmolStr::new(suffix)) + } + + pub fn kind(&self) -> LiteralKind { + const INT_SUFFIXES: [&str; 12] = [ + "u64", "u32", "u16", "u8", "usize", "isize", "i64", "i32", "i16", "i8", "u128", "i128", + ]; + const FLOAT_SUFFIXES: [&str; 2] = ["f32", "f64"]; + + let token = self.token(); + + match token.kind() { + INT_NUMBER => { + // FYI: there was a bug here previously, thus the if statement below is necessary. + // The lexer treats e.g. `1f64` as an integer literal. See + // https://github.com/rust-analyzer/rust-analyzer/issues/1592 + // and the comments on the linked PR. + + let text = token.text(); + if let suffix @ Some(_) = Self::find_suffix(&text, &FLOAT_SUFFIXES) { + LiteralKind::FloatNumber { suffix } + } else { + LiteralKind::IntNumber { suffix: Self::find_suffix(&text, &INT_SUFFIXES) } + } + } + FLOAT_NUMBER => { + let text = token.text(); + LiteralKind::FloatNumber { suffix: Self::find_suffix(&text, &FLOAT_SUFFIXES) } + } + STRING | RAW_STRING => LiteralKind::String, + T![true] => LiteralKind::Bool(true), + T![false] => LiteralKind::Bool(false), + BYTE_STRING | RAW_BYTE_STRING => LiteralKind::ByteString, + CHAR => LiteralKind::Char, + BYTE => LiteralKind::Byte, + _ => unreachable!(), + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum Effect { + Async(SyntaxToken), + Unsafe(SyntaxToken), + Try(SyntaxToken), + // Very much not an effect, but we stuff it into this node anyway + Label(ast::Label), +} + +impl ast::EffectExpr { + pub fn effect(&self) -> Effect { + if let Some(token) = self.async_token() { + return Effect::Async(token); + } + if let Some(token) = self.unsafe_token() { + return Effect::Unsafe(token); + } + if let Some(token) = self.try_token() { + return Effect::Try(token); + } + if let Some(label) = self.label() { + return Effect::Label(label); + } + unreachable!("ast::EffectExpr without Effect") + } +} + +impl ast::BlockExpr { + /// false if the block is an intrinsic part of the syntax and can't be + /// replaced with arbitrary expression. + /// + /// ```not_rust + /// fn foo() { not_stand_alone } + /// const FOO: () = { stand_alone }; + /// ``` + pub fn is_standalone(&self) -> bool { + let parent = match self.syntax().parent() { + Some(it) => it, + None => return true, + }; + !matches!(parent.kind(), FN | IF_EXPR | WHILE_EXPR | LOOP_EXPR | EFFECT_EXPR) + } +} + +#[test] +fn test_literal_with_attr() { + let parse = ast::SourceFile::parse(r#"const _: &str = { #[attr] "Hello" };"#); + let lit = parse.tree().syntax().descendants().find_map(ast::Literal::cast).unwrap(); + assert_eq!(lit.token().text(), r#""Hello""#); +} + +impl ast::RecordExprField { + pub fn parent_record_lit(&self) -> ast::RecordExpr { + self.syntax().ancestors().find_map(ast::RecordExpr::cast).unwrap() + } +} diff --git a/crates/syntax/src/ast/generated.rs b/crates/syntax/src/ast/generated.rs new file mode 100644 index 00000000000..4a6f41ee71f --- /dev/null +++ b/crates/syntax/src/ast/generated.rs @@ -0,0 +1,41 @@ +//! This file is actually hand-written, but the submodules are indeed generated. +#[rustfmt::skip] +mod nodes; +#[rustfmt::skip] +mod tokens; + +use crate::{ + AstNode, + SyntaxKind::{self, *}, + SyntaxNode, +}; + +pub use {nodes::*, tokens::*}; + +// Stmt is the only nested enum, so it's easier to just hand-write it +impl AstNode for Stmt { + fn can_cast(kind: SyntaxKind) -> bool { + match kind { + LET_STMT | EXPR_STMT => true, + _ => Item::can_cast(kind), + } + } + fn cast(syntax: SyntaxNode) -> Option { + let res = match syntax.kind() { + LET_STMT => Stmt::LetStmt(LetStmt { syntax }), + EXPR_STMT => Stmt::ExprStmt(ExprStmt { syntax }), + _ => { + let item = Item::cast(syntax)?; + Stmt::Item(item) + } + }; + Some(res) + } + fn syntax(&self) -> &SyntaxNode { + match self { + Stmt::LetStmt(it) => &it.syntax, + Stmt::ExprStmt(it) => &it.syntax, + Stmt::Item(it) => it.syntax(), + } + } +} diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs new file mode 100644 index 00000000000..3d49309d148 --- /dev/null +++ b/crates/syntax/src/ast/generated/nodes.rs @@ -0,0 +1,4067 @@ +//! Generated file, do not edit by hand, see `xtask/src/codegen` + +use crate::{ + ast::{self, support, AstChildren, AstNode}, + SyntaxKind::{self, *}, + SyntaxNode, SyntaxToken, T, +}; +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Name { + pub(crate) syntax: SyntaxNode, +} +impl Name { + pub fn ident_token(&self) -> Option { support::token(&self.syntax, T![ident]) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct NameRef { + pub(crate) syntax: SyntaxNode, +} +impl NameRef { + pub fn ident_token(&self) -> Option { support::token(&self.syntax, T![ident]) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Path { + pub(crate) syntax: SyntaxNode, +} +impl Path { + pub fn qualifier(&self) -> Option { support::child(&self.syntax) } + pub fn coloncolon_token(&self) -> Option { support::token(&self.syntax, T![::]) } + pub fn segment(&self) -> Option { support::child(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct PathSegment { + pub(crate) syntax: SyntaxNode, +} +impl PathSegment { + pub fn crate_token(&self) -> Option { support::token(&self.syntax, T![crate]) } + pub fn self_token(&self) -> Option { support::token(&self.syntax, T![self]) } + pub fn super_token(&self) -> Option { support::token(&self.syntax, T![super]) } + pub fn coloncolon_token(&self) -> Option { support::token(&self.syntax, T![::]) } + pub fn name_ref(&self) -> Option { support::child(&self.syntax) } + pub fn generic_arg_list(&self) -> Option { support::child(&self.syntax) } + pub fn param_list(&self) -> Option { support::child(&self.syntax) } + pub fn ret_type(&self) -> Option { support::child(&self.syntax) } + pub fn l_angle_token(&self) -> Option { support::token(&self.syntax, T![<]) } + pub fn path_type(&self) -> Option { support::child(&self.syntax) } + pub fn as_token(&self) -> Option { support::token(&self.syntax, T![as]) } + pub fn r_angle_token(&self) -> Option { support::token(&self.syntax, T![>]) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct GenericArgList { + pub(crate) syntax: SyntaxNode, +} +impl GenericArgList { + pub fn coloncolon_token(&self) -> Option { support::token(&self.syntax, T![::]) } + pub fn l_angle_token(&self) -> Option { support::token(&self.syntax, T![<]) } + pub fn generic_args(&self) -> AstChildren { support::children(&self.syntax) } + pub fn r_angle_token(&self) -> Option { support::token(&self.syntax, T![>]) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ParamList { + pub(crate) syntax: SyntaxNode, +} +impl ParamList { + pub fn l_paren_token(&self) -> Option { support::token(&self.syntax, T!['(']) } + pub fn self_param(&self) -> Option { support::child(&self.syntax) } + pub fn comma_token(&self) -> Option { support::token(&self.syntax, T![,]) } + pub fn params(&self) -> AstChildren { support::children(&self.syntax) } + pub fn r_paren_token(&self) -> Option { support::token(&self.syntax, T![')']) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct RetType { + pub(crate) syntax: SyntaxNode, +} +impl RetType { + pub fn thin_arrow_token(&self) -> Option { support::token(&self.syntax, T![->]) } + pub fn ty(&self) -> Option { support::child(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct PathType { + pub(crate) syntax: SyntaxNode, +} +impl PathType { + pub fn path(&self) -> Option { support::child(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TypeArg { + pub(crate) syntax: SyntaxNode, +} +impl TypeArg { + pub fn ty(&self) -> Option { support::child(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct AssocTypeArg { + pub(crate) syntax: SyntaxNode, +} +impl ast::TypeBoundsOwner for AssocTypeArg {} +impl AssocTypeArg { + pub fn name_ref(&self) -> Option { support::child(&self.syntax) } + pub fn eq_token(&self) -> Option { support::token(&self.syntax, T![=]) } + pub fn ty(&self) -> Option { support::child(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct LifetimeArg { + pub(crate) syntax: SyntaxNode, +} +impl LifetimeArg { + pub fn lifetime_token(&self) -> Option { + support::token(&self.syntax, T![lifetime]) + } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ConstArg { + pub(crate) syntax: SyntaxNode, +} +impl ConstArg { + pub fn expr(&self) -> Option { support::child(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TypeBoundList { + pub(crate) syntax: SyntaxNode, +} +impl TypeBoundList { + pub fn bounds(&self) -> AstChildren { support::children(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct MacroCall { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for MacroCall {} +impl ast::NameOwner for MacroCall {} +impl MacroCall { + pub fn path(&self) -> Option { support::child(&self.syntax) } + pub fn excl_token(&self) -> Option { support::token(&self.syntax, T![!]) } + pub fn token_tree(&self) -> Option { support::child(&self.syntax) } + pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Attr { + pub(crate) syntax: SyntaxNode, +} +impl Attr { + pub fn pound_token(&self) -> Option { support::token(&self.syntax, T![#]) } + pub fn excl_token(&self) -> Option { support::token(&self.syntax, T![!]) } + pub fn l_brack_token(&self) -> Option { support::token(&self.syntax, T!['[']) } + pub fn path(&self) -> Option { support::child(&self.syntax) } + pub fn eq_token(&self) -> Option { support::token(&self.syntax, T![=]) } + pub fn literal(&self) -> Option { support::child(&self.syntax) } + pub fn token_tree(&self) -> Option { support::child(&self.syntax) } + pub fn r_brack_token(&self) -> Option { support::token(&self.syntax, T![']']) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TokenTree { + pub(crate) syntax: SyntaxNode, +} +impl TokenTree { + pub fn l_paren_token(&self) -> Option { support::token(&self.syntax, T!['(']) } + pub fn r_paren_token(&self) -> Option { support::token(&self.syntax, T![')']) } + pub fn l_curly_token(&self) -> Option { support::token(&self.syntax, T!['{']) } + pub fn r_curly_token(&self) -> Option { support::token(&self.syntax, T!['}']) } + pub fn l_brack_token(&self) -> Option { support::token(&self.syntax, T!['[']) } + pub fn r_brack_token(&self) -> Option { support::token(&self.syntax, T![']']) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct MacroItems { + pub(crate) syntax: SyntaxNode, +} +impl ast::ModuleItemOwner for MacroItems {} +impl MacroItems {} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct MacroStmts { + pub(crate) syntax: SyntaxNode, +} +impl MacroStmts { + pub fn statements(&self) -> AstChildren { support::children(&self.syntax) } + pub fn expr(&self) -> Option { support::child(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct SourceFile { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for SourceFile {} +impl ast::ModuleItemOwner for SourceFile {} +impl SourceFile { + pub fn shebang_token(&self) -> Option { support::token(&self.syntax, T![shebang]) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Const { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for Const {} +impl ast::NameOwner for Const {} +impl ast::VisibilityOwner for Const {} +impl Const { + pub fn default_token(&self) -> Option { support::token(&self.syntax, T![default]) } + pub fn const_token(&self) -> Option { support::token(&self.syntax, T![const]) } + pub fn underscore_token(&self) -> Option { support::token(&self.syntax, T![_]) } + pub fn colon_token(&self) -> Option { support::token(&self.syntax, T![:]) } + pub fn ty(&self) -> Option { support::child(&self.syntax) } + pub fn eq_token(&self) -> Option { support::token(&self.syntax, T![=]) } + pub fn body(&self) -> Option { support::child(&self.syntax) } + pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Enum { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for Enum {} +impl ast::NameOwner for Enum {} +impl ast::VisibilityOwner for Enum {} +impl ast::GenericParamsOwner for Enum {} +impl Enum { + pub fn enum_token(&self) -> Option { support::token(&self.syntax, T![enum]) } + pub fn variant_list(&self) -> Option { support::child(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ExternBlock { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for ExternBlock {} +impl ExternBlock { + pub fn abi(&self) -> Option { support::child(&self.syntax) } + pub fn extern_item_list(&self) -> Option { support::child(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ExternCrate { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for ExternCrate {} +impl ast::VisibilityOwner for ExternCrate {} +impl ExternCrate { + pub fn extern_token(&self) -> Option { support::token(&self.syntax, T![extern]) } + pub fn crate_token(&self) -> Option { support::token(&self.syntax, T![crate]) } + pub fn name_ref(&self) -> Option { support::child(&self.syntax) } + pub fn self_token(&self) -> Option { support::token(&self.syntax, T![self]) } + pub fn rename(&self) -> Option { support::child(&self.syntax) } + pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Fn { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for Fn {} +impl ast::NameOwner for Fn {} +impl ast::VisibilityOwner for Fn {} +impl ast::GenericParamsOwner for Fn {} +impl Fn { + pub fn default_token(&self) -> Option { support::token(&self.syntax, T![default]) } + pub fn async_token(&self) -> Option { support::token(&self.syntax, T![async]) } + pub fn const_token(&self) -> Option { support::token(&self.syntax, T![const]) } + pub fn unsafe_token(&self) -> Option { support::token(&self.syntax, T![unsafe]) } + pub fn abi(&self) -> Option { support::child(&self.syntax) } + pub fn fn_token(&self) -> Option { support::token(&self.syntax, T![fn]) } + pub fn param_list(&self) -> Option { support::child(&self.syntax) } + pub fn ret_type(&self) -> Option { support::child(&self.syntax) } + pub fn body(&self) -> Option { support::child(&self.syntax) } + pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Impl { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for Impl {} +impl ast::VisibilityOwner for Impl {} +impl ast::GenericParamsOwner for Impl {} +impl Impl { + pub fn default_token(&self) -> Option { support::token(&self.syntax, T![default]) } + pub fn unsafe_token(&self) -> Option { support::token(&self.syntax, T![unsafe]) } + pub fn impl_token(&self) -> Option { support::token(&self.syntax, T![impl]) } + pub fn const_token(&self) -> Option { support::token(&self.syntax, T![const]) } + pub fn excl_token(&self) -> Option { support::token(&self.syntax, T![!]) } + pub fn for_token(&self) -> Option { support::token(&self.syntax, T![for]) } + pub fn assoc_item_list(&self) -> Option { support::child(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Module { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for Module {} +impl ast::NameOwner for Module {} +impl ast::VisibilityOwner for Module {} +impl Module { + pub fn mod_token(&self) -> Option { support::token(&self.syntax, T![mod]) } + pub fn item_list(&self) -> Option { support::child(&self.syntax) } + pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Static { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for Static {} +impl ast::NameOwner for Static {} +impl ast::VisibilityOwner for Static {} +impl Static { + pub fn static_token(&self) -> Option { support::token(&self.syntax, T![static]) } + pub fn mut_token(&self) -> Option { support::token(&self.syntax, T![mut]) } + pub fn colon_token(&self) -> Option { support::token(&self.syntax, T![:]) } + pub fn ty(&self) -> Option { support::child(&self.syntax) } + pub fn eq_token(&self) -> Option { support::token(&self.syntax, T![=]) } + pub fn body(&self) -> Option { support::child(&self.syntax) } + pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Struct { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for Struct {} +impl ast::NameOwner for Struct {} +impl ast::VisibilityOwner for Struct {} +impl ast::GenericParamsOwner for Struct {} +impl Struct { + pub fn struct_token(&self) -> Option { support::token(&self.syntax, T![struct]) } + pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } + pub fn field_list(&self) -> Option { support::child(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Trait { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for Trait {} +impl ast::NameOwner for Trait {} +impl ast::VisibilityOwner for Trait {} +impl ast::GenericParamsOwner for Trait {} +impl ast::TypeBoundsOwner for Trait {} +impl Trait { + pub fn unsafe_token(&self) -> Option { support::token(&self.syntax, T![unsafe]) } + pub fn auto_token(&self) -> Option { support::token(&self.syntax, T![auto]) } + pub fn trait_token(&self) -> Option { support::token(&self.syntax, T![trait]) } + pub fn assoc_item_list(&self) -> Option { support::child(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TypeAlias { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for TypeAlias {} +impl ast::NameOwner for TypeAlias {} +impl ast::VisibilityOwner for TypeAlias {} +impl ast::GenericParamsOwner for TypeAlias {} +impl ast::TypeBoundsOwner for TypeAlias {} +impl TypeAlias { + pub fn default_token(&self) -> Option { support::token(&self.syntax, T![default]) } + pub fn type_token(&self) -> Option { support::token(&self.syntax, T![type]) } + pub fn eq_token(&self) -> Option { support::token(&self.syntax, T![=]) } + pub fn ty(&self) -> Option { support::child(&self.syntax) } + pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Union { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for Union {} +impl ast::NameOwner for Union {} +impl ast::VisibilityOwner for Union {} +impl ast::GenericParamsOwner for Union {} +impl Union { + pub fn union_token(&self) -> Option { support::token(&self.syntax, T![union]) } + pub fn record_field_list(&self) -> Option { support::child(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Use { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for Use {} +impl ast::VisibilityOwner for Use {} +impl Use { + pub fn use_token(&self) -> Option { support::token(&self.syntax, T![use]) } + pub fn use_tree(&self) -> Option { support::child(&self.syntax) } + pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Visibility { + pub(crate) syntax: SyntaxNode, +} +impl Visibility { + pub fn pub_token(&self) -> Option { support::token(&self.syntax, T![pub]) } + pub fn l_paren_token(&self) -> Option { support::token(&self.syntax, T!['(']) } + pub fn super_token(&self) -> Option { support::token(&self.syntax, T![super]) } + pub fn self_token(&self) -> Option { support::token(&self.syntax, T![self]) } + pub fn crate_token(&self) -> Option { support::token(&self.syntax, T![crate]) } + pub fn in_token(&self) -> Option { support::token(&self.syntax, T![in]) } + pub fn path(&self) -> Option { support::child(&self.syntax) } + pub fn r_paren_token(&self) -> Option { support::token(&self.syntax, T![')']) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ItemList { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for ItemList {} +impl ast::ModuleItemOwner for ItemList {} +impl ItemList { + pub fn l_curly_token(&self) -> Option { support::token(&self.syntax, T!['{']) } + pub fn r_curly_token(&self) -> Option { support::token(&self.syntax, T!['}']) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Rename { + pub(crate) syntax: SyntaxNode, +} +impl ast::NameOwner for Rename {} +impl Rename { + pub fn as_token(&self) -> Option { support::token(&self.syntax, T![as]) } + pub fn underscore_token(&self) -> Option { support::token(&self.syntax, T![_]) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct UseTree { + pub(crate) syntax: SyntaxNode, +} +impl UseTree { + pub fn path(&self) -> Option { support::child(&self.syntax) } + pub fn coloncolon_token(&self) -> Option { support::token(&self.syntax, T![::]) } + pub fn star_token(&self) -> Option { support::token(&self.syntax, T![*]) } + pub fn use_tree_list(&self) -> Option { support::child(&self.syntax) } + pub fn rename(&self) -> Option { support::child(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct UseTreeList { + pub(crate) syntax: SyntaxNode, +} +impl UseTreeList { + pub fn l_curly_token(&self) -> Option { support::token(&self.syntax, T!['{']) } + pub fn use_trees(&self) -> AstChildren { support::children(&self.syntax) } + pub fn r_curly_token(&self) -> Option { support::token(&self.syntax, T!['}']) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Abi { + pub(crate) syntax: SyntaxNode, +} +impl Abi { + pub fn extern_token(&self) -> Option { support::token(&self.syntax, T![extern]) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct GenericParamList { + pub(crate) syntax: SyntaxNode, +} +impl GenericParamList { + pub fn l_angle_token(&self) -> Option { support::token(&self.syntax, T![<]) } + pub fn generic_params(&self) -> AstChildren { support::children(&self.syntax) } + pub fn r_angle_token(&self) -> Option { support::token(&self.syntax, T![>]) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct WhereClause { + pub(crate) syntax: SyntaxNode, +} +impl WhereClause { + pub fn where_token(&self) -> Option { support::token(&self.syntax, T![where]) } + pub fn predicates(&self) -> AstChildren { support::children(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct BlockExpr { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for BlockExpr {} +impl BlockExpr { + pub fn l_curly_token(&self) -> Option { support::token(&self.syntax, T!['{']) } + pub fn statements(&self) -> AstChildren { support::children(&self.syntax) } + pub fn expr(&self) -> Option { support::child(&self.syntax) } + pub fn r_curly_token(&self) -> Option { support::token(&self.syntax, T!['}']) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct SelfParam { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for SelfParam {} +impl SelfParam { + pub fn amp_token(&self) -> Option { support::token(&self.syntax, T![&]) } + pub fn lifetime_token(&self) -> Option { + support::token(&self.syntax, T![lifetime]) + } + pub fn mut_token(&self) -> Option { support::token(&self.syntax, T![mut]) } + pub fn self_token(&self) -> Option { support::token(&self.syntax, T![self]) } + pub fn colon_token(&self) -> Option { support::token(&self.syntax, T![:]) } + pub fn ty(&self) -> Option { support::child(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Param { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for Param {} +impl Param { + pub fn pat(&self) -> Option { support::child(&self.syntax) } + pub fn colon_token(&self) -> Option { support::token(&self.syntax, T![:]) } + pub fn ty(&self) -> Option { support::child(&self.syntax) } + pub fn dotdotdot_token(&self) -> Option { support::token(&self.syntax, T![...]) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct RecordFieldList { + pub(crate) syntax: SyntaxNode, +} +impl RecordFieldList { + pub fn l_curly_token(&self) -> Option { support::token(&self.syntax, T!['{']) } + pub fn fields(&self) -> AstChildren { support::children(&self.syntax) } + pub fn r_curly_token(&self) -> Option { support::token(&self.syntax, T!['}']) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TupleFieldList { + pub(crate) syntax: SyntaxNode, +} +impl TupleFieldList { + pub fn l_paren_token(&self) -> Option { support::token(&self.syntax, T!['(']) } + pub fn fields(&self) -> AstChildren { support::children(&self.syntax) } + pub fn r_paren_token(&self) -> Option { support::token(&self.syntax, T![')']) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct RecordField { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for RecordField {} +impl ast::NameOwner for RecordField {} +impl ast::VisibilityOwner for RecordField {} +impl RecordField { + pub fn colon_token(&self) -> Option { support::token(&self.syntax, T![:]) } + pub fn ty(&self) -> Option { support::child(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TupleField { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for TupleField {} +impl ast::VisibilityOwner for TupleField {} +impl TupleField { + pub fn ty(&self) -> Option { support::child(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct VariantList { + pub(crate) syntax: SyntaxNode, +} +impl VariantList { + pub fn l_curly_token(&self) -> Option { support::token(&self.syntax, T!['{']) } + pub fn variants(&self) -> AstChildren { support::children(&self.syntax) } + pub fn r_curly_token(&self) -> Option { support::token(&self.syntax, T!['}']) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Variant { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for Variant {} +impl ast::NameOwner for Variant {} +impl ast::VisibilityOwner for Variant {} +impl Variant { + pub fn field_list(&self) -> Option { support::child(&self.syntax) } + pub fn eq_token(&self) -> Option { support::token(&self.syntax, T![=]) } + pub fn expr(&self) -> Option { support::child(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct AssocItemList { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for AssocItemList {} +impl AssocItemList { + pub fn l_curly_token(&self) -> Option { support::token(&self.syntax, T!['{']) } + pub fn assoc_items(&self) -> AstChildren { support::children(&self.syntax) } + pub fn r_curly_token(&self) -> Option { support::token(&self.syntax, T!['}']) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ExternItemList { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for ExternItemList {} +impl ExternItemList { + pub fn l_curly_token(&self) -> Option { support::token(&self.syntax, T!['{']) } + pub fn extern_items(&self) -> AstChildren { support::children(&self.syntax) } + pub fn r_curly_token(&self) -> Option { support::token(&self.syntax, T!['}']) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ConstParam { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for ConstParam {} +impl ast::NameOwner for ConstParam {} +impl ConstParam { + pub fn const_token(&self) -> Option { support::token(&self.syntax, T![const]) } + pub fn colon_token(&self) -> Option { support::token(&self.syntax, T![:]) } + pub fn ty(&self) -> Option { support::child(&self.syntax) } + pub fn eq_token(&self) -> Option { support::token(&self.syntax, T![=]) } + pub fn default_val(&self) -> Option { support::child(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct LifetimeParam { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for LifetimeParam {} +impl ast::TypeBoundsOwner for LifetimeParam {} +impl LifetimeParam { + pub fn lifetime_token(&self) -> Option { + support::token(&self.syntax, T![lifetime]) + } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct TypeParam { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for TypeParam {} +impl ast::NameOwner for TypeParam {} +impl ast::TypeBoundsOwner for TypeParam {} +impl TypeParam { + pub fn eq_token(&self) -> Option { support::token(&self.syntax, T![=]) } + pub fn default_type(&self) -> Option { support::child(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct WherePred { + pub(crate) syntax: SyntaxNode, +} +impl ast::TypeBoundsOwner for WherePred {} +impl WherePred { + pub fn for_token(&self) -> Option { support::token(&self.syntax, T![for]) } + pub fn generic_param_list(&self) -> Option { support::child(&self.syntax) } + pub fn lifetime_token(&self) -> Option { + support::token(&self.syntax, T![lifetime]) + } + pub fn ty(&self) -> Option { support::child(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Literal { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for Literal {} +impl Literal {} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ExprStmt { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for ExprStmt {} +impl ExprStmt { + pub fn expr(&self) -> Option { support::child(&self.syntax) } + pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct LetStmt { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for LetStmt {} +impl LetStmt { + pub fn let_token(&self) -> Option { support::token(&self.syntax, T![let]) } + pub fn pat(&self) -> Option { support::child(&self.syntax) } + pub fn colon_token(&self) -> Option { support::token(&self.syntax, T![:]) } + pub fn ty(&self) -> Option { support::child(&self.syntax) } + pub fn eq_token(&self) -> Option { support::token(&self.syntax, T![=]) } + pub fn initializer(&self) -> Option { support::child(&self.syntax) } + pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ArrayExpr { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for ArrayExpr {} +impl ArrayExpr { + pub fn l_brack_token(&self) -> Option { support::token(&self.syntax, T!['[']) } + pub fn exprs(&self) -> AstChildren { support::children(&self.syntax) } + pub fn expr(&self) -> Option { support::child(&self.syntax) } + pub fn semicolon_token(&self) -> Option { support::token(&self.syntax, T![;]) } + pub fn r_brack_token(&self) -> Option { support::token(&self.syntax, T![']']) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct AwaitExpr { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for AwaitExpr {} +impl AwaitExpr { + pub fn expr(&self) -> Option { support::child(&self.syntax) } + pub fn dot_token(&self) -> Option { support::token(&self.syntax, T![.]) } + pub fn await_token(&self) -> Option { support::token(&self.syntax, T![await]) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct BinExpr { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for BinExpr {} +impl BinExpr {} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct BoxExpr { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for BoxExpr {} +impl BoxExpr { + pub fn box_token(&self) -> Option { support::token(&self.syntax, T![box]) } + pub fn expr(&self) -> Option { support::child(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct BreakExpr { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for BreakExpr {} +impl BreakExpr { + pub fn break_token(&self) -> Option { support::token(&self.syntax, T![break]) } + pub fn lifetime_token(&self) -> Option { + support::token(&self.syntax, T![lifetime]) + } + pub fn expr(&self) -> Option { support::child(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct CallExpr { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for CallExpr {} +impl ast::ArgListOwner for CallExpr {} +impl CallExpr { + pub fn expr(&self) -> Option { support::child(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct CastExpr { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for CastExpr {} +impl CastExpr { + pub fn expr(&self) -> Option { support::child(&self.syntax) } + pub fn as_token(&self) -> Option { support::token(&self.syntax, T![as]) } + pub fn ty(&self) -> Option { support::child(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ClosureExpr { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for ClosureExpr {} +impl ClosureExpr { + pub fn static_token(&self) -> Option { support::token(&self.syntax, T![static]) } + pub fn async_token(&self) -> Option { support::token(&self.syntax, T![async]) } + pub fn move_token(&self) -> Option { support::token(&self.syntax, T![move]) } + pub fn param_list(&self) -> Option { support::child(&self.syntax) } + pub fn ret_type(&self) -> Option { support::child(&self.syntax) } + pub fn body(&self) -> Option { support::child(&self.syntax) } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ContinueExpr { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for ContinueExpr {} +impl ContinueExpr { + pub fn continue_token(&self) -> Option { + support::token(&self.syntax, T![continue]) + } + pub fn lifetime_token(&self) -> Option { + support::token(&self.syntax, T![lifetime]) + } +} +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct EffectExpr { + pub(crate) syntax: SyntaxNode, +} +impl ast::AttrsOwner for EffectExpr {} +impl EffectExpr { + pub fn label(&self) -> Option