about summary refs log tree commit diff
path: root/src/libsyntax/ext
diff options
context:
space:
mode:
Diffstat (limited to 'src/libsyntax/ext')
-rw-r--r--src/libsyntax/ext/base.rs4
-rw-r--r--src/libsyntax/ext/derive.rs72
-rw-r--r--src/libsyntax/ext/expand.rs2
-rw-r--r--src/libsyntax/ext/proc_macro.rs249
-rw-r--r--src/libsyntax/ext/proc_macro_server.rs715
-rw-r--r--src/libsyntax/ext/source_util.rs165
6 files changed, 967 insertions, 240 deletions
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index 0ab14bee160..bb7834a133f 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -1,6 +1,6 @@
 use crate::ast::{self, Attribute, Name, PatKind};
 use crate::attr::{HasAttrs, Stability, Deprecation};
-use crate::source_map::{SourceMap, Spanned, FileName, respan};
+use crate::source_map::{SourceMap, Spanned, respan};
 use crate::edition::Edition;
 use crate::ext::expand::{self, AstFragment, Invocation};
 use crate::ext::hygiene::{ExpnId, SyntaxContext, Transparency};
@@ -14,7 +14,7 @@ use crate::tokenstream::{self, TokenStream, TokenTree};
 
 use errors::{DiagnosticBuilder, DiagnosticId};
 use smallvec::{smallvec, SmallVec};
-use syntax_pos::{Span, MultiSpan, DUMMY_SP};
+use syntax_pos::{FileName, Span, MultiSpan, DUMMY_SP};
 use syntax_pos::hygiene::{ExpnInfo, ExpnKind};
 
 use rustc_data_structures::fx::FxHashMap;
diff --git a/src/libsyntax/ext/derive.rs b/src/libsyntax/ext/derive.rs
deleted file mode 100644
index ff9ad46deec..00000000000
--- a/src/libsyntax/ext/derive.rs
+++ /dev/null
@@ -1,72 +0,0 @@
-use crate::attr::HasAttrs;
-use crate::ast;
-use crate::source_map::{ExpnInfo, ExpnKind};
-use crate::ext::base::{ExtCtxt, MacroKind};
-use crate::ext::build::AstBuilder;
-use crate::parse::parser::PathStyle;
-use crate::symbol::{Symbol, sym};
-use crate::errors::Applicability;
-
-use syntax_pos::Span;
-use rustc_data_structures::fx::FxHashSet;
-
-pub fn collect_derives(cx: &mut ExtCtxt<'_>, attrs: &mut Vec<ast::Attribute>) -> Vec<ast::Path> {
-    let mut result = Vec::new();
-    attrs.retain(|attr| {
-        if attr.path != sym::derive {
-            return true;
-        }
-        if !attr.is_meta_item_list() {
-            cx.struct_span_err(attr.span, "malformed `derive` attribute input")
-                .span_suggestion(
-                    attr.span,
-                    "missing traits to be derived",
-                    "#[derive(Trait1, Trait2, ...)]".to_owned(),
-                    Applicability::HasPlaceholders,
-                ).emit();
-            return false;
-        }
-
-        match attr.parse_list(cx.parse_sess,
-                              |parser| parser.parse_path_allowing_meta(PathStyle::Mod)) {
-            Ok(traits) => {
-                result.extend(traits);
-                true
-            }
-            Err(mut e) => {
-                e.emit();
-                false
-            }
-        }
-    });
-    result
-}
-
-pub fn add_derived_markers<T>(cx: &mut ExtCtxt<'_>, span: Span, traits: &[ast::Path], item: &mut T)
-    where T: HasAttrs,
-{
-    let (mut names, mut pretty_name) = (FxHashSet::default(), String::new());
-    for (i, path) in traits.iter().enumerate() {
-        if i > 0 {
-            pretty_name.push_str(", ");
-        }
-        pretty_name.push_str(&path.to_string());
-        names.insert(unwrap_or!(path.segments.get(0), continue).ident.name);
-    }
-
-    let span = span.fresh_expansion(cx.current_expansion.id, ExpnInfo::allow_unstable(
-        ExpnKind::Macro(MacroKind::Derive, Symbol::intern(&pretty_name)), span,
-        cx.parse_sess.edition, cx.allow_derive_markers.clone(),
-    ));
-
-    item.visit_attrs(|attrs| {
-        if names.contains(&sym::Eq) && names.contains(&sym::PartialEq) {
-            let meta = cx.meta_word(span, sym::structural_match);
-            attrs.push(cx.attribute(span, meta));
-        }
-        if names.contains(&sym::Copy) {
-            let meta = cx.meta_word(span, sym::rustc_copy_clone_marker);
-            attrs.push(cx.attribute(span, meta));
-        }
-    });
-}
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index 640daaccc3a..cd602d08c5b 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -4,7 +4,7 @@ use crate::attr::{self, HasAttrs};
 use crate::source_map::{dummy_spanned, respan};
 use crate::config::StripUnconfigured;
 use crate::ext::base::*;
-use crate::ext::derive::{add_derived_markers, collect_derives};
+use crate::ext::proc_macro::{add_derived_markers, collect_derives};
 use crate::ext::hygiene::{ExpnId, SyntaxContext, ExpnInfo, ExpnKind};
 use crate::ext::placeholders::{placeholder, PlaceholderExpander};
 use crate::feature_gate::{self, Features, GateIssue, is_builtin_attr, emit_feature_err};
diff --git a/src/libsyntax/ext/proc_macro.rs b/src/libsyntax/ext/proc_macro.rs
new file mode 100644
index 00000000000..425b9813f59
--- /dev/null
+++ b/src/libsyntax/ext/proc_macro.rs
@@ -0,0 +1,249 @@
+use crate::ast::{self, ItemKind, Attribute, Mac};
+use crate::attr::{mark_used, mark_known, HasAttrs};
+use crate::errors::{Applicability, FatalError};
+use crate::ext::base::{self, *};
+use crate::ext::build::AstBuilder;
+use crate::ext::proc_macro_server;
+use crate::parse::{self, token};
+use crate::parse::parser::PathStyle;
+use crate::symbol::{sym, Symbol};
+use crate::tokenstream::{self, TokenStream};
+use crate::visit::Visitor;
+
+use rustc_data_structures::fx::FxHashSet;
+use rustc_data_structures::sync::Lrc;
+use syntax_pos::hygiene::{ExpnInfo, ExpnKind};
+use syntax_pos::{Span, DUMMY_SP};
+
+const EXEC_STRATEGY: proc_macro::bridge::server::SameThread =
+    proc_macro::bridge::server::SameThread;
+
+pub struct BangProcMacro {
+    pub client: proc_macro::bridge::client::Client<
+        fn(proc_macro::TokenStream) -> proc_macro::TokenStream,
+    >,
+}
+
+impl base::ProcMacro for BangProcMacro {
+    fn expand<'cx>(&self,
+                   ecx: &'cx mut ExtCtxt<'_>,
+                   span: Span,
+                   input: TokenStream)
+                   -> TokenStream {
+        let server = proc_macro_server::Rustc::new(ecx);
+        match self.client.run(&EXEC_STRATEGY, server, input) {
+            Ok(stream) => stream,
+            Err(e) => {
+                let msg = "proc macro panicked";
+                let mut err = ecx.struct_span_fatal(span, msg);
+                if let Some(s) = e.as_str() {
+                    err.help(&format!("message: {}", s));
+                }
+
+                err.emit();
+                FatalError.raise();
+            }
+        }
+    }
+}
+
+pub struct AttrProcMacro {
+    pub client: proc_macro::bridge::client::Client<
+        fn(proc_macro::TokenStream, proc_macro::TokenStream) -> proc_macro::TokenStream,
+    >,
+}
+
+impl base::AttrProcMacro for AttrProcMacro {
+    fn expand<'cx>(&self,
+                   ecx: &'cx mut ExtCtxt<'_>,
+                   span: Span,
+                   annotation: TokenStream,
+                   annotated: TokenStream)
+                   -> TokenStream {
+        let server = proc_macro_server::Rustc::new(ecx);
+        match self.client.run(&EXEC_STRATEGY, server, annotation, annotated) {
+            Ok(stream) => stream,
+            Err(e) => {
+                let msg = "custom attribute panicked";
+                let mut err = ecx.struct_span_fatal(span, msg);
+                if let Some(s) = e.as_str() {
+                    err.help(&format!("message: {}", s));
+                }
+
+                err.emit();
+                FatalError.raise();
+            }
+        }
+    }
+}
+
+pub struct ProcMacroDerive {
+    pub client: proc_macro::bridge::client::Client<
+        fn(proc_macro::TokenStream) -> proc_macro::TokenStream,
+    >,
+    pub attrs: Vec<ast::Name>,
+}
+
+impl MultiItemModifier for ProcMacroDerive {
+    fn expand(&self,
+              ecx: &mut ExtCtxt<'_>,
+              span: Span,
+              _meta_item: &ast::MetaItem,
+              item: Annotatable)
+              -> Vec<Annotatable> {
+        let item = match item {
+            Annotatable::Item(item) => item,
+            Annotatable::ImplItem(_) |
+            Annotatable::TraitItem(_) |
+            Annotatable::ForeignItem(_) |
+            Annotatable::Stmt(_) |
+            Annotatable::Expr(_) => {
+                ecx.span_err(span, "proc-macro derives may only be \
+                                    applied to a struct, enum, or union");
+                return Vec::new()
+            }
+        };
+        match item.node {
+            ItemKind::Struct(..) |
+            ItemKind::Enum(..) |
+            ItemKind::Union(..) => {},
+            _ => {
+                ecx.span_err(span, "proc-macro derives may only be \
+                                    applied to a struct, enum, or union");
+                return Vec::new()
+            }
+        }
+
+        // Mark attributes as known, and used.
+        MarkAttrs(&self.attrs).visit_item(&item);
+
+        let token = token::Interpolated(Lrc::new(token::NtItem(item)));
+        let input = tokenstream::TokenTree::token(token, DUMMY_SP).into();
+
+        let server = proc_macro_server::Rustc::new(ecx);
+        let stream = match self.client.run(&EXEC_STRATEGY, server, input) {
+            Ok(stream) => stream,
+            Err(e) => {
+                let msg = "proc-macro derive panicked";
+                let mut err = ecx.struct_span_fatal(span, msg);
+                if let Some(s) = e.as_str() {
+                    err.help(&format!("message: {}", s));
+                }
+
+                err.emit();
+                FatalError.raise();
+            }
+        };
+
+        let error_count_before = ecx.parse_sess.span_diagnostic.err_count();
+        let msg = "proc-macro derive produced unparseable tokens";
+
+        let mut parser = parse::stream_to_parser(ecx.parse_sess, stream, Some("proc-macro derive"));
+        let mut items = vec![];
+
+        loop {
+            match parser.parse_item() {
+                Ok(None) => break,
+                Ok(Some(item)) => {
+                    items.push(Annotatable::Item(item))
+                }
+                Err(mut err) => {
+                    // FIXME: handle this better
+                    err.cancel();
+                    ecx.struct_span_fatal(span, msg).emit();
+                    FatalError.raise();
+                }
+            }
+        }
+
+
+        // fail if there have been errors emitted
+        if ecx.parse_sess.span_diagnostic.err_count() > error_count_before {
+            ecx.struct_span_fatal(span, msg).emit();
+            FatalError.raise();
+        }
+
+        items
+    }
+}
+
+struct MarkAttrs<'a>(&'a [ast::Name]);
+
+impl<'a> Visitor<'a> for MarkAttrs<'a> {
+    fn visit_attribute(&mut self, attr: &Attribute) {
+        if let Some(ident) = attr.ident() {
+            if self.0.contains(&ident.name) {
+                mark_used(attr);
+                mark_known(attr);
+            }
+        }
+    }
+
+    fn visit_mac(&mut self, _mac: &Mac) {}
+}
+
+pub fn is_proc_macro_attr(attr: &Attribute) -> bool {
+    [sym::proc_macro, sym::proc_macro_attribute, sym::proc_macro_derive]
+        .iter().any(|kind| attr.check_name(*kind))
+}
+
+crate fn collect_derives(cx: &mut ExtCtxt<'_>, attrs: &mut Vec<ast::Attribute>) -> Vec<ast::Path> {
+    let mut result = Vec::new();
+    attrs.retain(|attr| {
+        if attr.path != sym::derive {
+            return true;
+        }
+        if !attr.is_meta_item_list() {
+            cx.struct_span_err(attr.span, "malformed `derive` attribute input")
+                .span_suggestion(
+                    attr.span,
+                    "missing traits to be derived",
+                    "#[derive(Trait1, Trait2, ...)]".to_owned(),
+                    Applicability::HasPlaceholders,
+                ).emit();
+            return false;
+        }
+
+        match attr.parse_list(cx.parse_sess,
+                              |parser| parser.parse_path_allowing_meta(PathStyle::Mod)) {
+            Ok(traits) => {
+                result.extend(traits);
+                true
+            }
+            Err(mut e) => {
+                e.emit();
+                false
+            }
+        }
+    });
+    result
+}
+
+crate fn add_derived_markers<T: HasAttrs>(
+    cx: &mut ExtCtxt<'_>, span: Span, traits: &[ast::Path], item: &mut T
+) {
+    let (mut names, mut pretty_name) = (FxHashSet::default(), String::new());
+    for (i, path) in traits.iter().enumerate() {
+        if i > 0 {
+            pretty_name.push_str(", ");
+        }
+        pretty_name.push_str(&path.to_string());
+        names.insert(unwrap_or!(path.segments.get(0), continue).ident.name);
+    }
+
+    let span = span.fresh_expansion(cx.current_expansion.id, ExpnInfo::allow_unstable(
+        ExpnKind::Macro(MacroKind::Derive, Symbol::intern(&pretty_name)), span,
+        cx.parse_sess.edition, cx.allow_derive_markers.clone(),
+    ));
+
+    item.visit_attrs(|attrs| {
+        if names.contains(&sym::Eq) && names.contains(&sym::PartialEq) {
+            let meta = cx.meta_word(span, sym::structural_match);
+            attrs.push(cx.attribute(span, meta));
+        }
+        if names.contains(&sym::Copy) {
+            let meta = cx.meta_word(span, sym::rustc_copy_clone_marker);
+            attrs.push(cx.attribute(span, meta));
+        }
+    });
+}
diff --git a/src/libsyntax/ext/proc_macro_server.rs b/src/libsyntax/ext/proc_macro_server.rs
new file mode 100644
index 00000000000..8d0023c9ab1
--- /dev/null
+++ b/src/libsyntax/ext/proc_macro_server.rs
@@ -0,0 +1,715 @@
+use crate::ast;
+use crate::ext::base::ExtCtxt;
+use crate::parse::{self, token, ParseSess};
+use crate::parse::lexer::comments;
+use crate::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
+
+use errors::{Diagnostic, DiagnosticBuilder};
+use rustc_data_structures::sync::Lrc;
+use syntax_pos::{BytePos, FileName, MultiSpan, Pos, SourceFile, Span};
+use syntax_pos::hygiene::{SyntaxContext, Transparency};
+use syntax_pos::symbol::{kw, sym, Symbol};
+
+use proc_macro::{Delimiter, Level, LineColumn, Spacing};
+use proc_macro::bridge::{server, TokenTree};
+use std::{ascii, panic};
+use std::ops::Bound;
+
+trait FromInternal<T> {
+    fn from_internal(x: T) -> Self;
+}
+
+trait ToInternal<T> {
+    fn to_internal(self) -> T;
+}
+
+impl FromInternal<token::DelimToken> for Delimiter {
+    fn from_internal(delim: token::DelimToken) -> Delimiter {
+        match delim {
+            token::Paren => Delimiter::Parenthesis,
+            token::Brace => Delimiter::Brace,
+            token::Bracket => Delimiter::Bracket,
+            token::NoDelim => Delimiter::None,
+        }
+    }
+}
+
+impl ToInternal<token::DelimToken> for Delimiter {
+    fn to_internal(self) -> token::DelimToken {
+        match self {
+            Delimiter::Parenthesis => token::Paren,
+            Delimiter::Brace => token::Brace,
+            Delimiter::Bracket => token::Bracket,
+            Delimiter::None => token::NoDelim,
+        }
+    }
+}
+
+impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
+    for TokenTree<Group, Punct, Ident, Literal>
+{
+    fn from_internal(((tree, is_joint), sess, stack): (TreeAndJoint, &ParseSess, &mut Vec<Self>))
+                    -> Self {
+        use crate::parse::token::*;
+
+        let joint = is_joint == Joint;
+        let Token { kind, span } = match tree {
+            tokenstream::TokenTree::Delimited(span, delim, tts) => {
+                let delimiter = Delimiter::from_internal(delim);
+                return TokenTree::Group(Group {
+                    delimiter,
+                    stream: tts.into(),
+                    span,
+                });
+            }
+            tokenstream::TokenTree::Token(token) => token,
+        };
+
+        macro_rules! tt {
+            ($ty:ident { $($field:ident $(: $value:expr)*),+ $(,)? }) => (
+                TokenTree::$ty(self::$ty {
+                    $($field $(: $value)*,)+
+                    span,
+                })
+            );
+            ($ty:ident::$method:ident($($value:expr),*)) => (
+                TokenTree::$ty(self::$ty::$method($($value,)* span))
+            );
+        }
+        macro_rules! op {
+            ($a:expr) => {
+                tt!(Punct::new($a, joint))
+            };
+            ($a:expr, $b:expr) => {{
+                stack.push(tt!(Punct::new($b, joint)));
+                tt!(Punct::new($a, true))
+            }};
+            ($a:expr, $b:expr, $c:expr) => {{
+                stack.push(tt!(Punct::new($c, joint)));
+                stack.push(tt!(Punct::new($b, true)));
+                tt!(Punct::new($a, true))
+            }};
+        }
+
+        match kind {
+            Eq => op!('='),
+            Lt => op!('<'),
+            Le => op!('<', '='),
+            EqEq => op!('=', '='),
+            Ne => op!('!', '='),
+            Ge => op!('>', '='),
+            Gt => op!('>'),
+            AndAnd => op!('&', '&'),
+            OrOr => op!('|', '|'),
+            Not => op!('!'),
+            Tilde => op!('~'),
+            BinOp(Plus) => op!('+'),
+            BinOp(Minus) => op!('-'),
+            BinOp(Star) => op!('*'),
+            BinOp(Slash) => op!('/'),
+            BinOp(Percent) => op!('%'),
+            BinOp(Caret) => op!('^'),
+            BinOp(And) => op!('&'),
+            BinOp(Or) => op!('|'),
+            BinOp(Shl) => op!('<', '<'),
+            BinOp(Shr) => op!('>', '>'),
+            BinOpEq(Plus) => op!('+', '='),
+            BinOpEq(Minus) => op!('-', '='),
+            BinOpEq(Star) => op!('*', '='),
+            BinOpEq(Slash) => op!('/', '='),
+            BinOpEq(Percent) => op!('%', '='),
+            BinOpEq(Caret) => op!('^', '='),
+            BinOpEq(And) => op!('&', '='),
+            BinOpEq(Or) => op!('|', '='),
+            BinOpEq(Shl) => op!('<', '<', '='),
+            BinOpEq(Shr) => op!('>', '>', '='),
+            At => op!('@'),
+            Dot => op!('.'),
+            DotDot => op!('.', '.'),
+            DotDotDot => op!('.', '.', '.'),
+            DotDotEq => op!('.', '.', '='),
+            Comma => op!(','),
+            Semi => op!(';'),
+            Colon => op!(':'),
+            ModSep => op!(':', ':'),
+            RArrow => op!('-', '>'),
+            LArrow => op!('<', '-'),
+            FatArrow => op!('=', '>'),
+            Pound => op!('#'),
+            Dollar => op!('$'),
+            Question => op!('?'),
+            SingleQuote => op!('\''),
+
+            Ident(name, false) if name == kw::DollarCrate => tt!(Ident::dollar_crate()),
+            Ident(name, is_raw) => tt!(Ident::new(name, is_raw)),
+            Lifetime(name) => {
+                let ident = ast::Ident::new(name, span).without_first_quote();
+                stack.push(tt!(Ident::new(ident.name, false)));
+                tt!(Punct::new('\'', true))
+            }
+            Literal(lit) => tt!(Literal { lit }),
+            DocComment(c) => {
+                let style = comments::doc_comment_style(&c.as_str());
+                let stripped = comments::strip_doc_comment_decoration(&c.as_str());
+                let mut escaped = String::new();
+                for ch in stripped.chars() {
+                    escaped.extend(ch.escape_debug());
+                }
+                let stream = vec![
+                    Ident(sym::doc, false),
+                    Eq,
+                    TokenKind::lit(token::Str, Symbol::intern(&escaped), None),
+                ]
+                .into_iter()
+                .map(|kind| tokenstream::TokenTree::token(kind, span))
+                .collect();
+                stack.push(TokenTree::Group(Group {
+                    delimiter: Delimiter::Bracket,
+                    stream,
+                    span: DelimSpan::from_single(span),
+                }));
+                if style == ast::AttrStyle::Inner {
+                    stack.push(tt!(Punct::new('!', false)));
+                }
+                tt!(Punct::new('#', false))
+            }
+
+            Interpolated(nt) => {
+                let stream = nt.to_tokenstream(sess, span);
+                TokenTree::Group(Group {
+                    delimiter: Delimiter::None,
+                    stream,
+                    span: DelimSpan::from_single(span),
+                })
+            }
+
+            OpenDelim(..) | CloseDelim(..) => unreachable!(),
+            Whitespace | Comment | Shebang(..) | Eof => unreachable!(),
+        }
+    }
+}
+
+impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
+    fn to_internal(self) -> TokenStream {
+        use crate::parse::token::*;
+
+        let (ch, joint, span) = match self {
+            TokenTree::Punct(Punct { ch, joint, span }) => (ch, joint, span),
+            TokenTree::Group(Group {
+                delimiter,
+                stream,
+                span,
+            }) => {
+                return tokenstream::TokenTree::Delimited(
+                    span,
+                    delimiter.to_internal(),
+                    stream.into(),
+                )
+                .into();
+            }
+            TokenTree::Ident(self::Ident { sym, is_raw, span }) => {
+                return tokenstream::TokenTree::token(Ident(sym, is_raw), span).into();
+            }
+            TokenTree::Literal(self::Literal {
+                lit: token::Lit { kind: token::Integer, symbol, suffix },
+                span,
+            }) if symbol.as_str().starts_with("-") => {
+                let minus = BinOp(BinOpToken::Minus);
+                let symbol = Symbol::intern(&symbol.as_str()[1..]);
+                let integer = TokenKind::lit(token::Integer, symbol, suffix);
+                let a = tokenstream::TokenTree::token(minus, span);
+                let b = tokenstream::TokenTree::token(integer, span);
+                return vec![a, b].into_iter().collect();
+            }
+            TokenTree::Literal(self::Literal {
+                lit: token::Lit { kind: token::Float, symbol, suffix },
+                span,
+            }) if symbol.as_str().starts_with("-") => {
+                let minus = BinOp(BinOpToken::Minus);
+                let symbol = Symbol::intern(&symbol.as_str()[1..]);
+                let float = TokenKind::lit(token::Float, symbol, suffix);
+                let a = tokenstream::TokenTree::token(minus, span);
+                let b = tokenstream::TokenTree::token(float, span);
+                return vec![a, b].into_iter().collect();
+            }
+            TokenTree::Literal(self::Literal { lit, span }) => {
+                return tokenstream::TokenTree::token(Literal(lit), span).into()
+            }
+        };
+
+        let kind = match ch {
+            '=' => Eq,
+            '<' => Lt,
+            '>' => Gt,
+            '!' => Not,
+            '~' => Tilde,
+            '+' => BinOp(Plus),
+            '-' => BinOp(Minus),
+            '*' => BinOp(Star),
+            '/' => BinOp(Slash),
+            '%' => BinOp(Percent),
+            '^' => BinOp(Caret),
+            '&' => BinOp(And),
+            '|' => BinOp(Or),
+            '@' => At,
+            '.' => Dot,
+            ',' => Comma,
+            ';' => Semi,
+            ':' => Colon,
+            '#' => Pound,
+            '$' => Dollar,
+            '?' => Question,
+            '\'' => SingleQuote,
+            _ => unreachable!(),
+        };
+
+        let tree = tokenstream::TokenTree::token(kind, span);
+        TokenStream::new(vec![(tree, if joint { Joint } else { NonJoint })])
+    }
+}
+
+impl ToInternal<errors::Level> for Level {
+    fn to_internal(self) -> errors::Level {
+        match self {
+            Level::Error => errors::Level::Error,
+            Level::Warning => errors::Level::Warning,
+            Level::Note => errors::Level::Note,
+            Level::Help => errors::Level::Help,
+            _ => unreachable!("unknown proc_macro::Level variant: {:?}", self),
+        }
+    }
+}
+
+#[derive(Clone)]
+pub struct TokenStreamIter {
+    cursor: tokenstream::Cursor,
+    stack: Vec<TokenTree<Group, Punct, Ident, Literal>>,
+}
+
+#[derive(Clone)]
+pub struct Group {
+    delimiter: Delimiter,
+    stream: TokenStream,
+    span: DelimSpan,
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Punct {
+    ch: char,
+    // NB. not using `Spacing` here because it doesn't implement `Hash`.
+    joint: bool,
+    span: Span,
+}
+
+impl Punct {
+    fn new(ch: char, joint: bool, span: Span) -> Punct {
+        const LEGAL_CHARS: &[char] = &['=', '<', '>', '!', '~', '+', '-', '*', '/', '%', '^',
+                                       '&', '|', '@', '.', ',', ';', ':', '#', '$', '?', '\''];
+        if !LEGAL_CHARS.contains(&ch) {
+            panic!("unsupported character `{:?}`", ch)
+        }
+        Punct { ch, joint, span }
+    }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Ident {
+    sym: Symbol,
+    is_raw: bool,
+    span: Span,
+}
+
+impl Ident {
+    fn is_valid(string: &str) -> bool {
+        let mut chars = string.chars();
+        if let Some(start) = chars.next() {
+            (start == '_' || start.is_xid_start())
+                && chars.all(|cont| cont == '_' || cont.is_xid_continue())
+        } else {
+            false
+        }
+    }
+    fn new(sym: Symbol, is_raw: bool, span: Span) -> Ident {
+        let string = sym.as_str();
+        if !Self::is_valid(&string) {
+            panic!("`{:?}` is not a valid identifier", string)
+        }
+        // Get rid of gensyms to conservatively check rawness on the string contents only.
+        if is_raw && !sym.as_interned_str().as_symbol().can_be_raw() {
+            panic!("`{}` cannot be a raw identifier", string);
+        }
+        Ident { sym, is_raw, span }
+    }
+    fn dollar_crate(span: Span) -> Ident {
+        // `$crate` is accepted as an ident only if it comes from the compiler.
+        Ident { sym: kw::DollarCrate, is_raw: false, span }
+    }
+}
+
+// FIXME(eddyb) `Literal` should not expose internal `Debug` impls.
+#[derive(Clone, Debug)]
+pub struct Literal {
+    lit: token::Lit,
+    span: Span,
+}
+
+pub(crate) struct Rustc<'a> {
+    sess: &'a ParseSess,
+    def_site: Span,
+    call_site: Span,
+}
+
+impl<'a> Rustc<'a> {
+    pub fn new(cx: &'a ExtCtxt<'_>) -> Self {
+        // No way to determine def location for a proc macro right now, so use call location.
+        let location = cx.current_expansion.id.expn_info().unwrap().call_site;
+        let to_span = |transparency| {
+            location.with_ctxt(
+                SyntaxContext::empty()
+                    .apply_mark_with_transparency(cx.current_expansion.id, transparency),
+            )
+        };
+        Rustc {
+            sess: cx.parse_sess,
+            def_site: to_span(Transparency::Opaque),
+            call_site: to_span(Transparency::Transparent),
+        }
+    }
+
+    fn lit(&mut self, kind: token::LitKind, symbol: Symbol, suffix: Option<Symbol>) -> Literal {
+        Literal {
+            lit: token::Lit::new(kind, symbol, suffix),
+            span: server::Span::call_site(self),
+        }
+    }
+}
+
+impl server::Types for Rustc<'_> {
+    type TokenStream = TokenStream;
+    type TokenStreamBuilder = tokenstream::TokenStreamBuilder;
+    type TokenStreamIter = TokenStreamIter;
+    type Group = Group;
+    type Punct = Punct;
+    type Ident = Ident;
+    type Literal = Literal;
+    type SourceFile = Lrc<SourceFile>;
+    type MultiSpan = Vec<Span>;
+    type Diagnostic = Diagnostic;
+    type Span = Span;
+}
+
+impl server::TokenStream for Rustc<'_> {
+    fn new(&mut self) -> Self::TokenStream {
+        TokenStream::empty()
+    }
+    fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
+        stream.is_empty()
+    }
+    fn from_str(&mut self, src: &str) -> Self::TokenStream {
+        parse::parse_stream_from_source_str(
+            FileName::proc_macro_source_code(src),
+            src.to_string(),
+            self.sess,
+            Some(self.call_site),
+        )
+    }
+    fn to_string(&mut self, stream: &Self::TokenStream) -> String {
+        stream.to_string()
+    }
+    fn from_token_tree(
+        &mut self,
+        tree: TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>,
+    ) -> Self::TokenStream {
+        tree.to_internal()
+    }
+    fn into_iter(&mut self, stream: Self::TokenStream) -> Self::TokenStreamIter {
+        TokenStreamIter {
+            cursor: stream.trees(),
+            stack: vec![],
+        }
+    }
+}
+
+impl server::TokenStreamBuilder for Rustc<'_> {
+    fn new(&mut self) -> Self::TokenStreamBuilder {
+        tokenstream::TokenStreamBuilder::new()
+    }
+    fn push(&mut self, builder: &mut Self::TokenStreamBuilder, stream: Self::TokenStream) {
+        builder.push(stream);
+    }
+    fn build(&mut self, builder: Self::TokenStreamBuilder) -> Self::TokenStream {
+        builder.build()
+    }
+}
+
+impl server::TokenStreamIter for Rustc<'_> {
+    fn next(
+        &mut self,
+        iter: &mut Self::TokenStreamIter,
+    ) -> Option<TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
+        loop {
+            let tree = iter.stack.pop().or_else(|| {
+                let next = iter.cursor.next_with_joint()?;
+                Some(TokenTree::from_internal((next, self.sess, &mut iter.stack)))
+            })?;
+            // HACK: The condition "dummy span + group with empty delimiter" represents an AST
+            // fragment approximately converted into a token stream. This may happen, for
+            // example, with inputs to proc macro attributes, including derives. Such "groups"
+            // need to flattened during iteration over stream's token trees.
+            // Eventually this needs to be removed in favor of keeping original token trees
+            // and not doing the roundtrip through AST.
+            if let TokenTree::Group(ref group) = tree {
+                if group.delimiter == Delimiter::None && group.span.entire().is_dummy() {
+                    iter.cursor.append(group.stream.clone());
+                    continue;
+                }
+            }
+            return Some(tree);
+        }
+    }
+}
+
+impl server::Group for Rustc<'_> {
+    fn new(&mut self, delimiter: Delimiter, stream: Self::TokenStream) -> Self::Group {
+        Group {
+            delimiter,
+            stream,
+            span: DelimSpan::from_single(server::Span::call_site(self)),
+        }
+    }
+    fn delimiter(&mut self, group: &Self::Group) -> Delimiter {
+        group.delimiter
+    }
+    fn stream(&mut self, group: &Self::Group) -> Self::TokenStream {
+        group.stream.clone()
+    }
+    fn span(&mut self, group: &Self::Group) -> Self::Span {
+        group.span.entire()
+    }
+    fn span_open(&mut self, group: &Self::Group) -> Self::Span {
+        group.span.open
+    }
+    fn span_close(&mut self, group: &Self::Group) -> Self::Span {
+        group.span.close
+    }
+    fn set_span(&mut self, group: &mut Self::Group, span: Self::Span) {
+        group.span = DelimSpan::from_single(span);
+    }
+}
+
+impl server::Punct for Rustc<'_> {
+    fn new(&mut self, ch: char, spacing: Spacing) -> Self::Punct {
+        Punct::new(ch, spacing == Spacing::Joint, server::Span::call_site(self))
+    }
+    fn as_char(&mut self, punct: Self::Punct) -> char {
+        punct.ch
+    }
+    fn spacing(&mut self, punct: Self::Punct) -> Spacing {
+        if punct.joint {
+            Spacing::Joint
+        } else {
+            Spacing::Alone
+        }
+    }
+    fn span(&mut self, punct: Self::Punct) -> Self::Span {
+        punct.span
+    }
+    fn with_span(&mut self, punct: Self::Punct, span: Self::Span) -> Self::Punct {
+        Punct { span, ..punct }
+    }
+}
+
+impl server::Ident for Rustc<'_> {
+    fn new(&mut self, string: &str, span: Self::Span, is_raw: bool) -> Self::Ident {
+        Ident::new(Symbol::intern(string), is_raw, span)
+    }
+    fn span(&mut self, ident: Self::Ident) -> Self::Span {
+        ident.span
+    }
+    fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident {
+        Ident { span, ..ident }
+    }
+}
+
+impl server::Literal for Rustc<'_> {
+    // FIXME(eddyb) `Literal` should not expose internal `Debug` impls.
+    fn debug(&mut self, literal: &Self::Literal) -> String {
+        format!("{:?}", literal)
+    }
+    fn integer(&mut self, n: &str) -> Self::Literal {
+        self.lit(token::Integer, Symbol::intern(n), None)
+    }
+    fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
+        self.lit(token::Integer, Symbol::intern(n), Some(Symbol::intern(kind)))
+    }
+    fn float(&mut self, n: &str) -> Self::Literal {
+        self.lit(token::Float, Symbol::intern(n), None)
+    }
+    fn f32(&mut self, n: &str) -> Self::Literal {
+        self.lit(token::Float, Symbol::intern(n), Some(sym::f32))
+    }
+    fn f64(&mut self, n: &str) -> Self::Literal {
+        self.lit(token::Float, Symbol::intern(n), Some(sym::f64))
+    }
+    fn string(&mut self, string: &str) -> Self::Literal {
+        let mut escaped = String::new();
+        for ch in string.chars() {
+            escaped.extend(ch.escape_debug());
+        }
+        self.lit(token::Str, Symbol::intern(&escaped), None)
+    }
+    fn character(&mut self, ch: char) -> Self::Literal {
+        let mut escaped = String::new();
+        escaped.extend(ch.escape_unicode());
+        self.lit(token::Char, Symbol::intern(&escaped), None)
+    }
+    fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
+        let string = bytes
+            .iter()
+            .cloned()
+            .flat_map(ascii::escape_default)
+            .map(Into::<char>::into)
+            .collect::<String>();
+        self.lit(token::ByteStr, Symbol::intern(&string), None)
+    }
+    fn span(&mut self, literal: &Self::Literal) -> Self::Span {
+        literal.span
+    }
+    fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) {
+        literal.span = span;
+    }
+    fn subspan(
+        &mut self,
+        literal: &Self::Literal,
+        start: Bound<usize>,
+        end: Bound<usize>,
+    ) -> Option<Self::Span> {
+        let span = literal.span;
+        let length = span.hi().to_usize() - span.lo().to_usize();
+
+        let start = match start {
+            Bound::Included(lo) => lo,
+            Bound::Excluded(lo) => lo + 1,
+            Bound::Unbounded => 0,
+        };
+
+        let end = match end {
+            Bound::Included(hi) => hi + 1,
+            Bound::Excluded(hi) => hi,
+            Bound::Unbounded => length,
+        };
+
+        // Bounds check the values, preventing addition overflow and OOB spans.
+        if start > u32::max_value() as usize
+            || end > u32::max_value() as usize
+            || (u32::max_value() - start as u32) < span.lo().to_u32()
+            || (u32::max_value() - end as u32) < span.lo().to_u32()
+            || start >= end
+            || end > length
+        {
+            return None;
+        }
+
+        let new_lo = span.lo() + BytePos::from_usize(start);
+        let new_hi = span.lo() + BytePos::from_usize(end);
+        Some(span.with_lo(new_lo).with_hi(new_hi))
+    }
+}
+
+impl server::SourceFile for Rustc<'_> {
+    fn eq(&mut self, file1: &Self::SourceFile, file2: &Self::SourceFile) -> bool {
+        Lrc::ptr_eq(file1, file2)
+    }
+    fn path(&mut self, file: &Self::SourceFile) -> String {
+        match file.name {
+            FileName::Real(ref path) => path
+                .to_str()
+                .expect("non-UTF8 file path in `proc_macro::SourceFile::path`")
+                .to_string(),
+            _ => file.name.to_string(),
+        }
+    }
+    fn is_real(&mut self, file: &Self::SourceFile) -> bool {
+        file.is_real_file()
+    }
+}
+
+impl server::MultiSpan for Rustc<'_> {
+    fn new(&mut self) -> Self::MultiSpan {
+        vec![]
+    }
+    fn push(&mut self, spans: &mut Self::MultiSpan, span: Self::Span) {
+        spans.push(span)
+    }
+}
+
+impl server::Diagnostic for Rustc<'_> {
+    fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
+        let mut diag = Diagnostic::new(level.to_internal(), msg);
+        diag.set_span(MultiSpan::from_spans(spans));
+        diag
+    }
+    fn sub(
+        &mut self,
+        diag: &mut Self::Diagnostic,
+        level: Level,
+        msg: &str,
+        spans: Self::MultiSpan,
+    ) {
+        diag.sub(level.to_internal(), msg, MultiSpan::from_spans(spans), None);
+    }
+    fn emit(&mut self, diag: Self::Diagnostic) {
+        DiagnosticBuilder::new_diagnostic(&self.sess.span_diagnostic, diag).emit()
+    }
+}
+
+impl server::Span for Rustc<'_> {
+    fn debug(&mut self, span: Self::Span) -> String {
+        format!("{:?} bytes({}..{})", span.ctxt(), span.lo().0, span.hi().0)
+    }
+    fn def_site(&mut self) -> Self::Span {
+        self.def_site
+    }
+    fn call_site(&mut self) -> Self::Span {
+        self.call_site
+    }
+    fn source_file(&mut self, span: Self::Span) -> Self::SourceFile {
+        self.sess.source_map().lookup_char_pos(span.lo()).file
+    }
+    fn parent(&mut self, span: Self::Span) -> Option<Self::Span> {
+        span.ctxt().outer_expn_info().map(|i| i.call_site)
+    }
+    fn source(&mut self, span: Self::Span) -> Self::Span {
+        span.source_callsite()
+    }
+    fn start(&mut self, span: Self::Span) -> LineColumn {
+        let loc = self.sess.source_map().lookup_char_pos(span.lo());
+        LineColumn {
+            line: loc.line,
+            column: loc.col.to_usize(),
+        }
+    }
+    fn end(&mut self, span: Self::Span) -> LineColumn {
+        let loc = self.sess.source_map().lookup_char_pos(span.hi());
+        LineColumn {
+            line: loc.line,
+            column: loc.col.to_usize(),
+        }
+    }
+    fn join(&mut self, first: Self::Span, second: Self::Span) -> Option<Self::Span> {
+        let self_loc = self.sess.source_map().lookup_char_pos(first.lo());
+        let other_loc = self.sess.source_map().lookup_char_pos(second.lo());
+
+        if self_loc.file.name != other_loc.file.name {
+            return None;
+        }
+
+        Some(first.to(second))
+    }
+    fn resolved_at(&mut self, span: Self::Span, at: Self::Span) -> Self::Span {
+        span.with_ctxt(at.ctxt())
+    }
+    fn source_text(&mut self,  span: Self::Span) -> Option<String> {
+        self.sess.source_map().span_to_snippet(span).ok()
+    }
+}
diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs
deleted file mode 100644
index ae080c05eec..00000000000
--- a/src/libsyntax/ext/source_util.rs
+++ /dev/null
@@ -1,165 +0,0 @@
-use crate::ast;
-use crate::ext::base::{self, *};
-use crate::ext::build::AstBuilder;
-use crate::parse::{self, token, DirectoryOwnership};
-use crate::print::pprust;
-use crate::ptr::P;
-use crate::symbol::Symbol;
-use crate::tokenstream;
-
-use smallvec::SmallVec;
-use syntax_pos::{self, Pos, Span};
-
-use std::fs;
-use std::io::ErrorKind;
-use rustc_data_structures::sync::Lrc;
-
-// These macros all relate to the file system; they either return
-// the column/row/filename of the expression, or they include
-// a given file into the current one.
-
-/// line!(): expands to the current line number
-pub fn expand_line(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
-                   -> Box<dyn base::MacResult+'static> {
-    base::check_zero_tts(cx, sp, tts, "line!");
-
-    let topmost = cx.expansion_cause().unwrap_or(sp);
-    let loc = cx.source_map().lookup_char_pos(topmost.lo());
-
-    base::MacEager::expr(cx.expr_u32(topmost, loc.line as u32))
-}
-
-/* column!(): expands to the current column number */
-pub fn expand_column(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
-                  -> Box<dyn base::MacResult+'static> {
-    base::check_zero_tts(cx, sp, tts, "column!");
-
-    let topmost = cx.expansion_cause().unwrap_or(sp);
-    let loc = cx.source_map().lookup_char_pos(topmost.lo());
-
-    base::MacEager::expr(cx.expr_u32(topmost, loc.col.to_usize() as u32 + 1))
-}
-
-/// file!(): expands to the current filename */
-/// The source_file (`loc.file`) contains a bunch more information we could spit
-/// out if we wanted.
-pub fn expand_file(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
-                   -> Box<dyn base::MacResult+'static> {
-    base::check_zero_tts(cx, sp, tts, "file!");
-
-    let topmost = cx.expansion_cause().unwrap_or(sp);
-    let loc = cx.source_map().lookup_char_pos(topmost.lo());
-    base::MacEager::expr(cx.expr_str(topmost, Symbol::intern(&loc.file.name.to_string())))
-}
-
-pub fn expand_stringify(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
-                        -> Box<dyn base::MacResult+'static> {
-    let s = pprust::tts_to_string(tts);
-    base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&s)))
-}
-
-pub fn expand_mod(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
-                  -> Box<dyn base::MacResult+'static> {
-    base::check_zero_tts(cx, sp, tts, "module_path!");
-    let mod_path = &cx.current_expansion.module.mod_path;
-    let string = mod_path.iter().map(|x| x.to_string()).collect::<Vec<String>>().join("::");
-
-    base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&string)))
-}
-
-/// include! : parse the given file as an expr
-/// This is generally a bad idea because it's going to behave
-/// unhygienically.
-pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
-                           -> Box<dyn base::MacResult+'cx> {
-    let file = match get_single_str_from_tts(cx, sp, tts, "include!") {
-        Some(f) => f,
-        None => return DummyResult::any(sp),
-    };
-    // The file will be added to the code map by the parser
-    let file = cx.resolve_path(file, sp);
-    let directory_ownership = DirectoryOwnership::Owned { relative: None };
-    let p = parse::new_sub_parser_from_file(cx.parse_sess(), &file, directory_ownership, None, sp);
-
-    struct ExpandResult<'a> {
-        p: parse::parser::Parser<'a>,
-    }
-    impl<'a> base::MacResult for ExpandResult<'a> {
-        fn make_expr(mut self: Box<ExpandResult<'a>>) -> Option<P<ast::Expr>> {
-            Some(panictry!(self.p.parse_expr()))
-        }
-
-        fn make_items(mut self: Box<ExpandResult<'a>>) -> Option<SmallVec<[P<ast::Item>; 1]>> {
-            let mut ret = SmallVec::new();
-            while self.p.token != token::Eof {
-                match panictry!(self.p.parse_item()) {
-                    Some(item) => ret.push(item),
-                    None => self.p.diagnostic().span_fatal(self.p.token.span,
-                                                           &format!("expected item, found `{}`",
-                                                                    self.p.this_token_to_string()))
-                                               .raise()
-                }
-            }
-            Some(ret)
-        }
-    }
-
-    Box::new(ExpandResult { p })
-}
-
-// include_str! : read the given file, insert it as a literal string expr
-pub fn expand_include_str(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
-                          -> Box<dyn base::MacResult+'static> {
-    let file = match get_single_str_from_tts(cx, sp, tts, "include_str!") {
-        Some(f) => f,
-        None => return DummyResult::expr(sp)
-    };
-    let file = cx.resolve_path(file, sp);
-    match fs::read_to_string(&file) {
-        Ok(src) => {
-            let interned_src = Symbol::intern(&src);
-
-            // Add this input file to the code map to make it available as
-            // dependency information
-            cx.source_map().new_source_file(file.into(), src);
-
-            base::MacEager::expr(cx.expr_str(sp, interned_src))
-        },
-        Err(ref e) if e.kind() == ErrorKind::InvalidData => {
-            cx.span_err(sp, &format!("{} wasn't a utf-8 file", file.display()));
-            DummyResult::expr(sp)
-        }
-        Err(e) => {
-            cx.span_err(sp, &format!("couldn't read {}: {}", file.display(), e));
-            DummyResult::expr(sp)
-        }
-    }
-}
-
-pub fn expand_include_bytes(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
-                            -> Box<dyn base::MacResult+'static> {
-    let file = match get_single_str_from_tts(cx, sp, tts, "include_bytes!") {
-        Some(f) => f,
-        None => return DummyResult::expr(sp)
-    };
-    let file = cx.resolve_path(file, sp);
-    match fs::read(&file) {
-        Ok(bytes) => {
-            // Add the contents to the source map if it contains UTF-8.
-            let (contents, bytes) = match String::from_utf8(bytes) {
-                Ok(s) => {
-                    let bytes = s.as_bytes().to_owned();
-                    (s, bytes)
-                },
-                Err(e) => (String::new(), e.into_bytes()),
-            };
-            cx.source_map().new_source_file(file.into(), contents);
-
-            base::MacEager::expr(cx.expr_lit(sp, ast::LitKind::ByteStr(Lrc::new(bytes))))
-        },
-        Err(e) => {
-            cx.span_err(sp, &format!("couldn't read {}: {}", file.display(), e));
-            DummyResult::expr(sp)
-        }
-    }
-}