about summary refs log tree commit diff
path: root/compiler/rustc_session/src/utils.rs
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_session/src/utils.rs')
-rw-r--r--compiler/rustc_session/src/utils.rs58
1 files changed, 0 insertions, 58 deletions
diff --git a/compiler/rustc_session/src/utils.rs b/compiler/rustc_session/src/utils.rs
index 9fda5373fcc..bda7b314308 100644
--- a/compiler/rustc_session/src/utils.rs
+++ b/compiler/rustc_session/src/utils.rs
@@ -1,13 +1,7 @@
-use crate::parse::ParseSess;
 use crate::session::Session;
-use rustc_ast::token::{self, Delimiter, Nonterminal, Token};
-use rustc_ast::tokenstream::CanSynthesizeMissingTokens;
-use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
 use rustc_data_structures::profiling::VerboseTimingGuard;
 use std::path::{Path, PathBuf};
 
-pub type NtToTokenstream = fn(&Nonterminal, &ParseSess, CanSynthesizeMissingTokens) -> TokenStream;
-
 impl Session {
     pub fn timer<'a>(&'a self, what: &'static str) -> VerboseTimingGuard<'a> {
         self.prof.verbose_generic_activity(what)
@@ -94,55 +88,3 @@ impl CanonicalizedPath {
         &self.original
     }
 }
-
-// FIXME: Find a better spot for this - it needs to be accessible from `rustc_ast_lowering`,
-// and needs to access `ParseSess
-pub struct FlattenNonterminals<'a> {
-    pub parse_sess: &'a ParseSess,
-    pub synthesize_tokens: CanSynthesizeMissingTokens,
-    pub nt_to_tokenstream: NtToTokenstream,
-}
-
-impl<'a> FlattenNonterminals<'a> {
-    pub fn process_token_stream(&mut self, tokens: TokenStream) -> TokenStream {
-        fn can_skip(stream: &TokenStream) -> bool {
-            stream.trees().all(|tree| match tree {
-                TokenTree::Token(token) => !matches!(token.kind, token::Interpolated(_)),
-                TokenTree::Delimited(_, _, inner) => can_skip(inner),
-            })
-        }
-
-        if can_skip(&tokens) {
-            return tokens;
-        }
-
-        tokens.into_trees().flat_map(|tree| self.process_token_tree(tree).into_trees()).collect()
-    }
-
-    pub fn process_token_tree(&mut self, tree: TokenTree) -> TokenStream {
-        match tree {
-            TokenTree::Token(token) => self.process_token(token),
-            TokenTree::Delimited(span, delim, tts) => {
-                TokenTree::Delimited(span, delim, self.process_token_stream(tts)).into()
-            }
-        }
-    }
-
-    pub fn process_token(&mut self, token: Token) -> TokenStream {
-        match token.kind {
-            token::Interpolated(nt) if let token::NtIdent(ident, is_raw) = *nt => {
-                TokenTree::Token(Token::new(token::Ident(ident.name, is_raw), ident.span)).into()
-            }
-            token::Interpolated(nt) => {
-                let tts = (self.nt_to_tokenstream)(&nt, self.parse_sess, self.synthesize_tokens);
-                TokenTree::Delimited(
-                    DelimSpan::from_single(token.span),
-                    Delimiter::Invisible,
-                    self.process_token_stream(tts),
-                )
-                .into()
-            }
-            _ => TokenTree::Token(token).into(),
-        }
-    }
-}