about summary refs log tree commit diff
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2022-07-22 12:28:27 +0000
committerbors <bors@rust-lang.org>2022-07-22 12:28:27 +0000
commitcb8a3be2a158466abe984c4d8448775983a6f388 (patch)
tree4391662492ba2f98fa23f2bb6a08c0d24dc3430e
parent84a6fac37ad61ff512993ee64b47deff9a52c560 (diff)
parente591ff32693b77ad3caf3e5b6d0301f55a6aa543 (diff)
downloadrust-cb8a3be2a158466abe984c4d8448775983a6f388.tar.gz
rust-cb8a3be2a158466abe984c4d8448775983a6f388.zip
Auto merge of #12835 - fasterthanlime:sysroot-abi, r=fasterthanlime
Introduce proc-macro-srv/sysroot-abi

Still a WIP.

This change is tracked by:

  * https://github.com/rust-lang/rust-analyzer/issues/12818
-rw-r--r--Cargo.lock29
-rw-r--r--crates/proc-macro-api/src/lib.rs2
-rw-r--r--crates/proc-macro-api/src/version.rs6
-rw-r--r--crates/proc-macro-srv/Cargo.toml4
-rw-r--r--crates/proc-macro-srv/build.rs25
-rw-r--r--crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs102
-rw-r--r--crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs518
-rw-r--r--crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/symbol.rs46
-rw-r--r--crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/token_stream.rs179
-rw-r--r--crates/proc-macro-srv/src/abis/mod.rs44
-rw-r--r--crates/proc-macro-srv/src/lib.rs27
-rw-r--r--crates/proc-macro-srv/src/tests/mod.rs44
-rw-r--r--crates/proc-macro-test/imp/src/lib.rs36
-rw-r--r--crates/rust-analyzer/Cargo.toml1
14 files changed, 1053 insertions, 10 deletions
diff --git a/Cargo.lock b/Cargo.lock
index a960012c8cf..67bfbf01096 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -248,6 +248,20 @@ dependencies = [
 ]
 
 [[package]]
+name = "crossbeam"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4ae5588f6b3c3cb05239e90bd110f257254aecd01e4635400391aeae07497845"
+dependencies = [
+ "cfg-if",
+ "crossbeam-channel",
+ "crossbeam-deque",
+ "crossbeam-epoch",
+ "crossbeam-queue",
+ "crossbeam-utils",
+]
+
+[[package]]
 name = "crossbeam-channel"
 version = "0.5.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -283,6 +297,16 @@ dependencies = [
 ]
 
 [[package]]
+name = "crossbeam-queue"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1f25d8400f4a7a5778f0e4e52384a48cbd9b5c495d110786187fc750075277a2"
+dependencies = [
+ "cfg-if",
+ "crossbeam-utils",
+]
+
+[[package]]
 name = "crossbeam-utils"
 version = "0.8.10"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1022,9 +1046,9 @@ dependencies = [
 
 [[package]]
 name = "once_cell"
-version = "1.12.0"
+version = "1.13.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7709cef83f0c1f58f666e746a08b21e0085f7440fa6a29cc194d68aac97a4225"
+checksum = "18a6dbe30758c9f83eb00cbea4ac95966305f5a7772f3f42ebfc7fc7eddbd8e1"
 
 [[package]]
 name = "oorandom"
@@ -1162,6 +1186,7 @@ dependencies = [
 name = "proc-macro-srv"
 version = "0.0.0"
 dependencies = [
+ "crossbeam",
  "expect-test",
  "libloading",
  "mbe",
diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs
index dbf2fb37e75..d7010e825aa 100644
--- a/crates/proc-macro-api/src/lib.rs
+++ b/crates/proc-macro-api/src/lib.rs
@@ -26,7 +26,7 @@ use crate::{
     process::ProcMacroProcessSrv,
 };
 
-pub use version::{read_dylib_info, RustCInfo};
+pub use version::{read_dylib_info, read_version, RustCInfo};
 
 #[derive(Copy, Clone, Eq, PartialEq, Debug, Serialize, Deserialize)]
 pub enum ProcMacroKind {
diff --git a/crates/proc-macro-api/src/version.rs b/crates/proc-macro-api/src/version.rs
index 66fe16e94f7..030531b80d7 100644
--- a/crates/proc-macro-api/src/version.rs
+++ b/crates/proc-macro-api/src/version.rs
@@ -16,6 +16,8 @@ pub struct RustCInfo {
     pub channel: String,
     pub commit: Option<String>,
     pub date: Option<String>,
+    // something like "rustc 1.58.1 (db9d1b20b 2022-01-20)"
+    pub version_string: String,
 }
 
 /// Read rustc dylib information
@@ -68,7 +70,7 @@ pub fn read_dylib_info(dylib_path: &AbsPath) -> io::Result<RustCInfo> {
     }
     let version = (version_numbers[0], version_numbers[1], version_numbers[2]);
 
-    Ok(RustCInfo { version, channel, commit, date })
+    Ok(RustCInfo { version, channel, commit, date, version_string: ver_str })
 }
 
 /// This is used inside read_version() to locate the ".rustc" section
@@ -102,7 +104,7 @@ fn read_section<'a>(dylib_binary: &'a [u8], section_name: &str) -> io::Result<&'
 /// * [some more bytes that we don't really care but about still there] :-)
 /// Check this issue for more about the bytes layout:
 /// <https://github.com/rust-lang/rust-analyzer/issues/6174>
-fn read_version(dylib_path: &AbsPath) -> io::Result<String> {
+pub fn read_version(dylib_path: &AbsPath) -> io::Result<String> {
     let dylib_file = File::open(dylib_path)?;
     let dylib_mmaped = unsafe { Mmap::map(&dylib_file) }?;
 
diff --git a/crates/proc-macro-srv/Cargo.toml b/crates/proc-macro-srv/Cargo.toml
index ce1fa0063f8..5746eac0b37 100644
--- a/crates/proc-macro-srv/Cargo.toml
+++ b/crates/proc-macro-srv/Cargo.toml
@@ -24,9 +24,13 @@ tt = { path = "../tt", version = "0.0.0" }
 mbe = { path = "../mbe", version = "0.0.0" }
 paths = { path = "../paths", version = "0.0.0" }
 proc-macro-api = { path = "../proc-macro-api", version = "0.0.0" }
+crossbeam = "0.8.1"
 
 [dev-dependencies]
 expect-test = "1.4.0"
 
 # used as proc macro test targets
 proc-macro-test = { path = "../proc-macro-test" }
+
+[features]
+sysroot-abi = []
diff --git a/crates/proc-macro-srv/build.rs b/crates/proc-macro-srv/build.rs
new file mode 100644
index 00000000000..a8c732f3154
--- /dev/null
+++ b/crates/proc-macro-srv/build.rs
@@ -0,0 +1,25 @@
+//! Determine rustc version `proc-macro-srv` (and thus the sysroot ABI) is
+//! build with and make it accessible at runtime for ABI selection.
+
+use std::{env, fs::File, io::Write, path::PathBuf, process::Command};
+
+fn main() {
+    let mut path = PathBuf::from(env::var_os("OUT_DIR").unwrap());
+    path.push("rustc_version.rs");
+    let mut f = File::create(&path).unwrap();
+
+    let rustc = env::var("RUSTC").expect("proc-macro-srv's build script expects RUSTC to be set");
+    let output = Command::new(rustc).arg("--version").output().expect("rustc --version must run");
+    let version_string = std::str::from_utf8(&output.stdout[..])
+        .expect("rustc --version output must be UTF-8")
+        .trim();
+
+    write!(
+        f,
+        "
+    #[allow(dead_code)]
+    pub(crate) const RUSTC_VERSION_STRING: &str = {version_string:?};
+    "
+    )
+    .unwrap();
+}
diff --git a/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs b/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs
new file mode 100644
index 00000000000..44712f41919
--- /dev/null
+++ b/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs
@@ -0,0 +1,102 @@
+//! Proc macro ABI
+
+extern crate proc_macro;
+
+#[allow(dead_code)]
+#[doc(hidden)]
+mod ra_server;
+
+use libloading::Library;
+use proc_macro_api::ProcMacroKind;
+
+use super::PanicMessage;
+
+pub(crate) struct Abi {
+    exported_macros: Vec<proc_macro::bridge::client::ProcMacro>,
+}
+
+impl From<proc_macro::bridge::PanicMessage> for PanicMessage {
+    fn from(p: proc_macro::bridge::PanicMessage) -> Self {
+        Self { message: p.as_str().map(|s| s.to_string()) }
+    }
+}
+
+impl Abi {
+    pub unsafe fn from_lib(lib: &Library, symbol_name: String) -> Result<Abi, libloading::Error> {
+        let macros: libloading::Symbol<'_, &&[proc_macro::bridge::client::ProcMacro]> =
+            lib.get(symbol_name.as_bytes())?;
+        Ok(Self { exported_macros: macros.to_vec() })
+    }
+
+    pub fn expand(
+        &self,
+        macro_name: &str,
+        macro_body: &tt::Subtree,
+        attributes: Option<&tt::Subtree>,
+    ) -> Result<tt::Subtree, PanicMessage> {
+        let parsed_body = ra_server::TokenStream::with_subtree(macro_body.clone());
+
+        let parsed_attributes = attributes.map_or(ra_server::TokenStream::new(), |attr| {
+            ra_server::TokenStream::with_subtree(attr.clone())
+        });
+
+        for proc_macro in &self.exported_macros {
+            match proc_macro {
+                proc_macro::bridge::client::ProcMacro::CustomDerive {
+                    trait_name, client, ..
+                } if *trait_name == macro_name => {
+                    let res = client.run(
+                        &proc_macro::bridge::server::SameThread,
+                        ra_server::RustAnalyzer::default(),
+                        parsed_body,
+                        true,
+                    );
+                    return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+                }
+                proc_macro::bridge::client::ProcMacro::Bang { name, client }
+                    if *name == macro_name =>
+                {
+                    let res = client.run(
+                        &proc_macro::bridge::server::SameThread,
+                        ra_server::RustAnalyzer::default(),
+                        parsed_body,
+                        true,
+                    );
+                    return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+                }
+                proc_macro::bridge::client::ProcMacro::Attr { name, client }
+                    if *name == macro_name =>
+                {
+                    let res = client.run(
+                        &proc_macro::bridge::server::SameThread,
+                        ra_server::RustAnalyzer::default(),
+                        parsed_attributes,
+                        parsed_body,
+                        true,
+                    );
+                    return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
+                }
+                _ => continue,
+            }
+        }
+
+        Err(proc_macro::bridge::PanicMessage::String("Nothing to expand".to_string()).into())
+    }
+
+    pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
+        self.exported_macros
+            .iter()
+            .map(|proc_macro| match proc_macro {
+                proc_macro::bridge::client::ProcMacro::CustomDerive { trait_name, .. } => {
+                    (trait_name.to_string(), ProcMacroKind::CustomDerive)
+                }
+                proc_macro::bridge::client::ProcMacro::Bang { name, .. } => {
+                    (name.to_string(), ProcMacroKind::FuncLike)
+                }
+                proc_macro::bridge::client::ProcMacro::Attr { name, .. } => {
+                    (name.to_string(), ProcMacroKind::Attr)
+                }
+            })
+            .collect()
+    }
+}
diff --git a/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs b/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs
new file mode 100644
index 00000000000..46882845a80
--- /dev/null
+++ b/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs
@@ -0,0 +1,518 @@
+//! proc-macro server implementation
+//!
+//! Based on idea from <https://github.com/fedochet/rust-proc-macro-expander>
+//! The lib-proc-macro server backend is `TokenStream`-agnostic, such that
+//! we could provide any TokenStream implementation.
+//! The original idea from fedochet is using proc-macro2 as backend,
+//! we use tt instead for better integration with RA.
+//!
+//! FIXME: No span and source file information is implemented yet
+
+use super::proc_macro::{
+    self,
+    bridge::{self, server},
+};
+
+mod token_stream;
+pub use token_stream::TokenStream;
+use token_stream::TokenStreamBuilder;
+
+mod symbol;
+pub use symbol::*;
+
+use std::{iter::FromIterator, ops::Bound};
+
+type Group = tt::Subtree;
+type TokenTree = tt::TokenTree;
+type Punct = tt::Punct;
+type Spacing = tt::Spacing;
+type Literal = tt::Literal;
+type Span = tt::TokenId;
+
+#[derive(Clone)]
+pub struct SourceFile {
+    // FIXME stub
+}
+
+type Level = super::proc_macro::Level;
+type LineColumn = super::proc_macro::LineColumn;
+
+/// A structure representing a diagnostic message and associated children
+/// messages.
+#[derive(Clone, Debug)]
+pub struct Diagnostic {
+    level: Level,
+    message: String,
+    spans: Vec<Span>,
+    children: Vec<Diagnostic>,
+}
+
+impl Diagnostic {
+    /// Creates a new diagnostic with the given `level` and `message`.
+    pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
+        Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
+    }
+}
+
+pub struct FreeFunctions;
+
+#[derive(Default)]
+pub struct RustAnalyzer {
+    // FIXME: store span information here.
+}
+
+impl server::Types for RustAnalyzer {
+    type FreeFunctions = FreeFunctions;
+    type TokenStream = TokenStream;
+    type SourceFile = SourceFile;
+    type MultiSpan = Vec<Span>;
+    type Diagnostic = Diagnostic;
+    type Span = Span;
+    type Symbol = Symbol;
+}
+
+impl server::FreeFunctions for RustAnalyzer {
+    fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {
+        // FIXME: track env var accesses
+        // https://github.com/rust-lang/rust/pull/71858
+    }
+    fn track_path(&mut self, _path: &str) {}
+
+    fn literal_from_str(
+        &mut self,
+        s: &str,
+    ) -> Result<bridge::Literal<Self::Span, Self::Symbol>, ()> {
+        // FIXME: keep track of LitKind and Suffix
+        Ok(bridge::Literal {
+            kind: bridge::LitKind::Err,
+            symbol: Symbol::intern(s),
+            suffix: None,
+            span: tt::TokenId::unspecified(),
+        })
+    }
+}
+
+impl server::TokenStream for RustAnalyzer {
+    fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
+        stream.is_empty()
+    }
+    fn from_str(&mut self, src: &str) -> Self::TokenStream {
+        use std::str::FromStr;
+
+        Self::TokenStream::from_str(src).expect("cannot parse string")
+    }
+    fn to_string(&mut self, stream: &Self::TokenStream) -> String {
+        stream.to_string()
+    }
+    fn from_token_tree(
+        &mut self,
+        tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>,
+    ) -> Self::TokenStream {
+        match tree {
+            bridge::TokenTree::Group(group) => {
+                let group = Group {
+                    delimiter: delim_to_internal(group.delimiter),
+                    token_trees: match group.stream {
+                        Some(stream) => stream.into_iter().collect(),
+                        None => Vec::new(),
+                    },
+                };
+                let tree = TokenTree::from(group);
+                Self::TokenStream::from_iter(vec![tree])
+            }
+
+            bridge::TokenTree::Ident(ident) => {
+                // FIXME: handle raw idents
+                let text = ident.sym.text();
+                let ident: tt::Ident = tt::Ident { text, id: ident.span };
+                let leaf = tt::Leaf::from(ident);
+                let tree = TokenTree::from(leaf);
+                Self::TokenStream::from_iter(vec![tree])
+            }
+
+            bridge::TokenTree::Literal(literal) => {
+                let literal = LiteralFormatter(literal);
+                let text = literal
+                    .with_stringify_parts(|parts| tt::SmolStr::from_iter(parts.iter().copied()));
+
+                let literal = tt::Literal { text, id: literal.0.span };
+                let leaf = tt::Leaf::from(literal);
+                let tree = TokenTree::from(leaf);
+                Self::TokenStream::from_iter(vec![tree])
+            }
+
+            bridge::TokenTree::Punct(p) => {
+                let punct = tt::Punct {
+                    char: p.ch as char,
+                    spacing: if p.joint { Spacing::Joint } else { Spacing::Alone },
+                    id: p.span,
+                };
+                let leaf = tt::Leaf::from(punct);
+                let tree = TokenTree::from(leaf);
+                Self::TokenStream::from_iter(vec![tree])
+            }
+        }
+    }
+
+    fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
+        Ok(self_.clone())
+    }
+
+    fn concat_trees(
+        &mut self,
+        base: Option<Self::TokenStream>,
+        trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>>,
+    ) -> Self::TokenStream {
+        let mut builder = TokenStreamBuilder::new();
+        if let Some(base) = base {
+            builder.push(base);
+        }
+        for tree in trees {
+            builder.push(self.from_token_tree(tree));
+        }
+        builder.build()
+    }
+
+    fn concat_streams(
+        &mut self,
+        base: Option<Self::TokenStream>,
+        streams: Vec<Self::TokenStream>,
+    ) -> Self::TokenStream {
+        let mut builder = TokenStreamBuilder::new();
+        if let Some(base) = base {
+            builder.push(base);
+        }
+        for stream in streams {
+            builder.push(stream);
+        }
+        builder.build()
+    }
+
+    fn into_trees(
+        &mut self,
+        stream: Self::TokenStream,
+    ) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> {
+        stream
+            .into_iter()
+            .map(|tree| match tree {
+                tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
+                    bridge::TokenTree::Ident(bridge::Ident {
+                        sym: Symbol::intern(&ident.text),
+                        // FIXME: handle raw idents
+                        is_raw: false,
+                        span: ident.id,
+                    })
+                }
+                tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
+                    bridge::TokenTree::Literal(bridge::Literal {
+                        // FIXME: handle literal kinds
+                        kind: bridge::LitKind::Err,
+                        symbol: Symbol::intern(&lit.text),
+                        // FIXME: handle suffixes
+                        suffix: None,
+                        span: lit.id,
+                    })
+                }
+                tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
+                    bridge::TokenTree::Punct(bridge::Punct {
+                        ch: punct.char as u8,
+                        joint: punct.spacing == Spacing::Joint,
+                        span: punct.id,
+                    })
+                }
+                tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group {
+                    delimiter: delim_to_external(subtree.delimiter),
+                    stream: if subtree.token_trees.is_empty() {
+                        None
+                    } else {
+                        Some(subtree.token_trees.into_iter().collect())
+                    },
+                    span: bridge::DelimSpan::from_single(
+                        subtree.delimiter.map_or(Span::unspecified(), |del| del.id),
+                    ),
+                }),
+            })
+            .collect()
+    }
+}
+
+fn delim_to_internal(d: proc_macro::Delimiter) -> Option<tt::Delimiter> {
+    let kind = match d {
+        proc_macro::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
+        proc_macro::Delimiter::Brace => tt::DelimiterKind::Brace,
+        proc_macro::Delimiter::Bracket => tt::DelimiterKind::Bracket,
+        proc_macro::Delimiter::None => return None,
+    };
+    Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind })
+}
+
+fn delim_to_external(d: Option<tt::Delimiter>) -> proc_macro::Delimiter {
+    match d.map(|it| it.kind) {
+        Some(tt::DelimiterKind::Parenthesis) => proc_macro::Delimiter::Parenthesis,
+        Some(tt::DelimiterKind::Brace) => proc_macro::Delimiter::Brace,
+        Some(tt::DelimiterKind::Bracket) => proc_macro::Delimiter::Bracket,
+        None => proc_macro::Delimiter::None,
+    }
+}
+
+fn spacing_to_internal(spacing: proc_macro::Spacing) -> Spacing {
+    match spacing {
+        proc_macro::Spacing::Alone => Spacing::Alone,
+        proc_macro::Spacing::Joint => Spacing::Joint,
+    }
+}
+
+fn spacing_to_external(spacing: Spacing) -> proc_macro::Spacing {
+    match spacing {
+        Spacing::Alone => proc_macro::Spacing::Alone,
+        Spacing::Joint => proc_macro::Spacing::Joint,
+    }
+}
+
+impl server::SourceFile for RustAnalyzer {
+    // FIXME these are all stubs
+    fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool {
+        true
+    }
+    fn path(&mut self, _file: &Self::SourceFile) -> String {
+        String::new()
+    }
+    fn is_real(&mut self, _file: &Self::SourceFile) -> bool {
+        true
+    }
+}
+
+impl server::Diagnostic for RustAnalyzer {
+    fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
+        let mut diag = Diagnostic::new(level, msg);
+        diag.spans = spans;
+        diag
+    }
+
+    fn sub(
+        &mut self,
+        _diag: &mut Self::Diagnostic,
+        _level: Level,
+        _msg: &str,
+        _spans: Self::MultiSpan,
+    ) {
+        // FIXME handle diagnostic
+        //
+    }
+
+    fn emit(&mut self, _diag: Self::Diagnostic) {
+        // FIXME handle diagnostic
+        // diag.emit()
+    }
+}
+
+impl server::Span for RustAnalyzer {
+    fn debug(&mut self, span: Self::Span) -> String {
+        format!("{:?}", span.0)
+    }
+    fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile {
+        SourceFile {}
+    }
+    fn save_span(&mut self, _span: Self::Span) -> usize {
+        // FIXME stub
+        0
+    }
+    fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
+        // FIXME stub
+        tt::TokenId::unspecified()
+    }
+    /// Recent feature, not yet in the proc_macro
+    ///
+    /// See PR:
+    /// https://github.com/rust-lang/rust/pull/55780
+    fn source_text(&mut self, _span: Self::Span) -> Option<String> {
+        None
+    }
+
+    fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
+        // FIXME handle span
+        None
+    }
+    fn source(&mut self, span: Self::Span) -> Self::Span {
+        // FIXME handle span
+        span
+    }
+    fn start(&mut self, _span: Self::Span) -> LineColumn {
+        // FIXME handle span
+        LineColumn { line: 0, column: 0 }
+    }
+    fn end(&mut self, _span: Self::Span) -> LineColumn {
+        // FIXME handle span
+        LineColumn { line: 0, column: 0 }
+    }
+    fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
+        // Just return the first span again, because some macros will unwrap the result.
+        Some(first)
+    }
+    fn subspan(
+        &mut self,
+        span: Self::Span,
+        _start: Bound<usize>,
+        _end: Bound<usize>,
+    ) -> Option<Self::Span> {
+        // Just return the span again, because some macros will unwrap the result.
+        Some(span)
+    }
+    fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
+        // FIXME handle span
+        tt::TokenId::unspecified()
+    }
+
+    fn after(&mut self, _self_: Self::Span) -> Self::Span {
+        tt::TokenId::unspecified()
+    }
+
+    fn before(&mut self, _self_: Self::Span) -> Self::Span {
+        tt::TokenId::unspecified()
+    }
+}
+
+impl server::MultiSpan for RustAnalyzer {
+    fn new(&mut self) -> Self::MultiSpan {
+        // FIXME handle span
+        vec![]
+    }
+
+    fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) {
+        //TODP
+        other.push(span)
+    }
+}
+
+impl server::Symbol for RustAnalyzer {
+    fn normalize_and_validate_ident(&mut self, string: &str) -> Result<Self::Symbol, ()> {
+        // FIXME: nfc-normalize and validate idents
+        Ok(<Self as server::Server>::intern_symbol(string))
+    }
+}
+
+impl server::Server for RustAnalyzer {
+    fn globals(&mut self) -> bridge::ExpnGlobals<Self::Span> {
+        bridge::ExpnGlobals {
+            def_site: Span::unspecified(),
+            call_site: Span::unspecified(),
+            mixed_site: Span::unspecified(),
+        }
+    }
+
+    fn intern_symbol(ident: &str) -> Self::Symbol {
+        Symbol::intern(&tt::SmolStr::from(ident))
+    }
+
+    fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) {
+        f(symbol.text().as_str())
+    }
+}
+
+struct LiteralFormatter(bridge::Literal<tt::TokenId, Symbol>);
+
+impl LiteralFormatter {
+    /// Invokes the callback with a `&[&str]` consisting of each part of the
+    /// literal's representation. This is done to allow the `ToString` and
+    /// `Display` implementations to borrow references to symbol values, and
+    /// both be optimized to reduce overhead.
+    fn with_stringify_parts<R>(&self, f: impl FnOnce(&[&str]) -> R) -> R {
+        /// Returns a string containing exactly `num` '#' characters.
+        /// Uses a 256-character source string literal which is always safe to
+        /// index with a `u8` index.
+        fn get_hashes_str(num: u8) -> &'static str {
+            const HASHES: &str = "\
+                        ################################################################\
+                        ################################################################\
+                        ################################################################\
+                        ################################################################\
+                        ";
+            const _: () = assert!(HASHES.len() == 256);
+            &HASHES[..num as usize]
+        }
+
+        self.with_symbol_and_suffix(|symbol, suffix| match self.0.kind {
+            bridge::LitKind::Byte => f(&["b'", symbol, "'", suffix]),
+            bridge::LitKind::Char => f(&["'", symbol, "'", suffix]),
+            bridge::LitKind::Str => f(&["\"", symbol, "\"", suffix]),
+            bridge::LitKind::StrRaw(n) => {
+                let hashes = get_hashes_str(n);
+                f(&["r", hashes, "\"", symbol, "\"", hashes, suffix])
+            }
+            bridge::LitKind::ByteStr => f(&["b\"", symbol, "\"", suffix]),
+            bridge::LitKind::ByteStrRaw(n) => {
+                let hashes = get_hashes_str(n);
+                f(&["br", hashes, "\"", symbol, "\"", hashes, suffix])
+            }
+            _ => f(&[symbol, suffix]),
+        })
+    }
+
+    fn with_symbol_and_suffix<R>(&self, f: impl FnOnce(&str, &str) -> R) -> R {
+        let symbol = self.0.symbol.text();
+        let suffix = self.0.suffix.map(|s| s.text()).unwrap_or_default();
+        f(symbol.as_str(), suffix.as_str())
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn test_ra_server_to_string() {
+        let s = TokenStream {
+            token_trees: vec![
+                tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+                    text: "struct".into(),
+                    id: tt::TokenId::unspecified(),
+                })),
+                tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+                    text: "T".into(),
+                    id: tt::TokenId::unspecified(),
+                })),
+                tt::TokenTree::Subtree(tt::Subtree {
+                    delimiter: Some(tt::Delimiter {
+                        id: tt::TokenId::unspecified(),
+                        kind: tt::DelimiterKind::Brace,
+                    }),
+                    token_trees: vec![],
+                }),
+            ],
+        };
+
+        assert_eq!(s.to_string(), "struct T {}");
+    }
+
+    #[test]
+    fn test_ra_server_from_str() {
+        use std::str::FromStr;
+        let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
+            delimiter: Some(tt::Delimiter {
+                id: tt::TokenId::unspecified(),
+                kind: tt::DelimiterKind::Parenthesis,
+            }),
+            token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+                text: "a".into(),
+                id: tt::TokenId::unspecified(),
+            }))],
+        });
+
+        let t1 = TokenStream::from_str("(a)").unwrap();
+        assert_eq!(t1.token_trees.len(), 1);
+        assert_eq!(t1.token_trees[0], subtree_paren_a);
+
+        let t2 = TokenStream::from_str("(a);").unwrap();
+        assert_eq!(t2.token_trees.len(), 2);
+        assert_eq!(t2.token_trees[0], subtree_paren_a);
+
+        let underscore = TokenStream::from_str("_").unwrap();
+        assert_eq!(
+            underscore.token_trees[0],
+            tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
+                text: "_".into(),
+                id: tt::TokenId::unspecified(),
+            }))
+        );
+    }
+}
diff --git a/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/symbol.rs b/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/symbol.rs
new file mode 100644
index 00000000000..51dfba2ea9f
--- /dev/null
+++ b/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/symbol.rs
@@ -0,0 +1,46 @@
+//! Symbol interner for proc-macro-srv
+
+use std::{cell::RefCell, collections::HashMap};
+use tt::SmolStr;
+
+thread_local! {
+    static SYMBOL_INTERNER: RefCell<SymbolInterner> = Default::default();
+}
+
+// ID for an interned symbol.
+#[derive(Hash, Eq, PartialEq, Copy, Clone)]
+pub struct Symbol(u32);
+
+impl Symbol {
+    pub fn intern(data: &str) -> Symbol {
+        SYMBOL_INTERNER.with(|i| i.borrow_mut().intern(data))
+    }
+
+    pub fn text(&self) -> SmolStr {
+        SYMBOL_INTERNER.with(|i| i.borrow().get(self).clone())
+    }
+}
+
+#[derive(Default)]
+struct SymbolInterner {
+    idents: HashMap<SmolStr, u32>,
+    ident_data: Vec<SmolStr>,
+}
+
+impl SymbolInterner {
+    fn intern(&mut self, data: &str) -> Symbol {
+        if let Some(index) = self.idents.get(data) {
+            return Symbol(*index);
+        }
+
+        let index = self.idents.len() as u32;
+        let data = SmolStr::from(data);
+        self.ident_data.push(data.clone());
+        self.idents.insert(data, index);
+        Symbol(index)
+    }
+
+    fn get(&self, sym: &Symbol) -> &SmolStr {
+        &self.ident_data[sym.0 as usize]
+    }
+}
diff --git a/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/token_stream.rs b/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/token_stream.rs
new file mode 100644
index 00000000000..113bb52c1af
--- /dev/null
+++ b/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/token_stream.rs
@@ -0,0 +1,179 @@
+//! TokenStream implementation used by sysroot ABI
+
+use tt::TokenTree;
+
+#[derive(Debug, Default, Clone)]
+pub struct TokenStream {
+    pub token_trees: Vec<TokenTree>,
+}
+
+impl TokenStream {
+    pub fn new() -> Self {
+        TokenStream::default()
+    }
+
+    pub fn with_subtree(subtree: tt::Subtree) -> Self {
+        if subtree.delimiter.is_some() {
+            TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
+        } else {
+            TokenStream { token_trees: subtree.token_trees }
+        }
+    }
+
+    pub fn into_subtree(self) -> tt::Subtree {
+        tt::Subtree { delimiter: None, token_trees: self.token_trees }
+    }
+
+    pub fn is_empty(&self) -> bool {
+        self.token_trees.is_empty()
+    }
+}
+
+/// Creates a token stream containing a single token tree.
+impl From<TokenTree> for TokenStream {
+    fn from(tree: TokenTree) -> TokenStream {
+        TokenStream { token_trees: vec![tree] }
+    }
+}
+
+/// Collects a number of token trees into a single stream.
+impl FromIterator<TokenTree> for TokenStream {
+    fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+        trees.into_iter().map(TokenStream::from).collect()
+    }
+}
+
+/// A "flattening" operation on token streams, collects token trees
+/// from multiple token streams into a single stream.
+impl FromIterator<TokenStream> for TokenStream {
+    fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
+        let mut builder = TokenStreamBuilder::new();
+        streams.into_iter().for_each(|stream| builder.push(stream));
+        builder.build()
+    }
+}
+
+impl Extend<TokenTree> for TokenStream {
+    fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
+        self.extend(trees.into_iter().map(TokenStream::from));
+    }
+}
+
+impl Extend<TokenStream> for TokenStream {
+    fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
+        for item in streams {
+            for tkn in item {
+                match tkn {
+                    tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
+                        self.token_trees.extend(subtree.token_trees);
+                    }
+                    _ => {
+                        self.token_trees.push(tkn);
+                    }
+                }
+            }
+        }
+    }
+}
+
+pub struct TokenStreamBuilder {
+    acc: TokenStream,
+}
+
+/// Public implementation details for the `TokenStream` type, such as iterators.
+pub mod token_stream {
+    use std::str::FromStr;
+
+    use super::{TokenStream, TokenTree};
+
+    /// An iterator over `TokenStream`'s `TokenTree`s.
+    /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
+    /// and returns whole groups as token trees.
+    impl IntoIterator for TokenStream {
+        type Item = TokenTree;
+        type IntoIter = std::vec::IntoIter<TokenTree>;
+
+        fn into_iter(self) -> Self::IntoIter {
+            self.token_trees.into_iter()
+        }
+    }
+
+    type LexError = String;
+
+    /// Attempts to break the string into tokens and parse those tokens into a token stream.
+    /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
+    /// or characters not existing in the language.
+    /// All tokens in the parsed stream get `Span::call_site()` spans.
+    ///
+    /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
+    /// change these errors into `LexError`s later.
+    impl FromStr for TokenStream {
+        type Err = LexError;
+
+        fn from_str(src: &str) -> Result<TokenStream, LexError> {
+            let (subtree, _token_map) =
+                mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?;
+
+            let subtree = subtree_replace_token_ids_with_unspecified(subtree);
+            Ok(TokenStream::with_subtree(subtree))
+        }
+    }
+
+    impl ToString for TokenStream {
+        fn to_string(&self) -> String {
+            tt::pretty(&self.token_trees)
+        }
+    }
+
+    fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
+        tt::Subtree {
+            delimiter: subtree
+                .delimiter
+                .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
+            token_trees: subtree
+                .token_trees
+                .into_iter()
+                .map(token_tree_replace_token_ids_with_unspecified)
+                .collect(),
+        }
+    }
+
+    fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
+        match tt {
+            tt::TokenTree::Leaf(leaf) => {
+                tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
+            }
+            tt::TokenTree::Subtree(subtree) => {
+                tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
+            }
+        }
+    }
+
+    fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
+        match leaf {
+            tt::Leaf::Literal(lit) => {
+                tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
+            }
+            tt::Leaf::Punct(punct) => {
+                tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
+            }
+            tt::Leaf::Ident(ident) => {
+                tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
+            }
+        }
+    }
+}
+
+impl TokenStreamBuilder {
+    pub(super) fn new() -> TokenStreamBuilder {
+        TokenStreamBuilder { acc: TokenStream::new() }
+    }
+
+    pub(super) fn push(&mut self, stream: TokenStream) {
+        self.acc.extend(stream.into_iter())
+    }
+
+    pub(super) fn build(self) -> TokenStream {
+        self.acc
+    }
+}
diff --git a/crates/proc-macro-srv/src/abis/mod.rs b/crates/proc-macro-srv/src/abis/mod.rs
index ae45b34b725..bcf3f1184cf 100644
--- a/crates/proc-macro-srv/src/abis/mod.rs
+++ b/crates/proc-macro-srv/src/abis/mod.rs
@@ -26,6 +26,11 @@
 mod abi_1_58;
 mod abi_1_63;
 mod abi_1_64;
+#[cfg(feature = "sysroot-abi")]
+mod abi_sysroot;
+
+// see `build.rs`
+include!(concat!(env!("OUT_DIR"), "/rustc_version.rs"));
 
 // Used by `test/utils.rs`
 #[cfg(test)]
@@ -35,6 +40,8 @@ use super::dylib::LoadProcMacroDylibError;
 pub(crate) use abi_1_58::Abi as Abi_1_58;
 pub(crate) use abi_1_63::Abi as Abi_1_63;
 pub(crate) use abi_1_64::Abi as Abi_1_64;
+#[cfg(feature = "sysroot-abi")]
+pub(crate) use abi_sysroot::Abi as Abi_Sysroot;
 use libloading::Library;
 use proc_macro_api::{ProcMacroKind, RustCInfo};
 
@@ -52,6 +59,8 @@ pub(crate) enum Abi {
     Abi1_58(Abi_1_58),
     Abi1_63(Abi_1_63),
     Abi1_64(Abi_1_64),
+    #[cfg(feature = "sysroot-abi")]
+    AbiSysroot(Abi_Sysroot),
 }
 
 impl Abi {
@@ -69,6 +78,37 @@ impl Abi {
         symbol_name: String,
         info: RustCInfo,
     ) -> Result<Abi, LoadProcMacroDylibError> {
+        // the sysroot ABI relies on `extern proc_macro` with unstable features,
+        // instead of a snapshot of the proc macro bridge's source code. it's only
+        // enabled if we have an exact version match.
+        #[cfg(feature = "sysroot-abi")]
+        {
+            if info.version_string == RUSTC_VERSION_STRING {
+                let inner = unsafe { Abi_Sysroot::from_lib(lib, symbol_name) }?;
+                return Ok(Abi::AbiSysroot(inner));
+            }
+
+            // if we reached this point, versions didn't match. in testing, we
+            // want that to panic - this could mean that the format of `rustc
+            // --version` no longer matches the format of the version string
+            // stored in the `.rustc` section, and we want to catch that in-tree
+            // with `x.py test`
+            #[cfg(test)]
+            {
+                let allow_mismatch = std::env::var("PROC_MACRO_SRV_ALLOW_SYSROOT_MISMATCH");
+                if let Ok("1") = allow_mismatch.as_deref() {
+                    // only used by rust-analyzer developers, when working on the
+                    // sysroot ABI from the rust-analyzer repository - which should
+                    // only happen pre-subtree. this can be removed later.
+                } else {
+                    panic!(
+                        "sysroot ABI mismatch: dylib rustc version (read from .rustc section): {:?} != proc-macro-srv version (read from 'rustc --version'): {:?}",
+                        info.version_string, RUSTC_VERSION_STRING
+                    );
+                }
+            }
+        }
+
         // FIXME: this should use exclusive ranges when they're stable
         // https://github.com/rust-lang/rust/issues/37854
         match (info.version.0, info.version.1) {
@@ -98,6 +138,8 @@ impl Abi {
             Self::Abi1_58(abi) => abi.expand(macro_name, macro_body, attributes),
             Self::Abi1_63(abi) => abi.expand(macro_name, macro_body, attributes),
             Self::Abi1_64(abi) => abi.expand(macro_name, macro_body, attributes),
+            #[cfg(feature = "sysroot-abi")]
+            Self::AbiSysroot(abi) => abi.expand(macro_name, macro_body, attributes),
         }
     }
 
@@ -106,6 +148,8 @@ impl Abi {
             Self::Abi1_58(abi) => abi.list_macros(),
             Self::Abi1_63(abi) => abi.list_macros(),
             Self::Abi1_64(abi) => abi.list_macros(),
+            #[cfg(feature = "sysroot-abi")]
+            Self::AbiSysroot(abi) => abi.list_macros(),
         }
     }
 }
diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs
index ca7765082f7..4b1858b8ed8 100644
--- a/crates/proc-macro-srv/src/lib.rs
+++ b/crates/proc-macro-srv/src/lib.rs
@@ -11,6 +11,10 @@
 //!   rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)…
 
 #![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![cfg_attr(
+    feature = "sysroot-abi",
+    feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)
+)]
 #![allow(unreachable_pub)]
 
 mod dylib;
@@ -59,9 +63,26 @@ impl ProcMacroSrv {
 
         let macro_body = task.macro_body.to_subtree();
         let attributes = task.attributes.map(|it| it.to_subtree());
-        let result = expander
-            .expand(&task.macro_name, &macro_body, attributes.as_ref())
-            .map(|it| FlatTree::new(&it));
+        // FIXME: replace this with std's scoped threads once they stabilize
+        // (then remove dependency on crossbeam)
+        let result = crossbeam::scope(|s| {
+            let res = s
+                .spawn(|_| {
+                    expander
+                        .expand(&task.macro_name, &macro_body, attributes.as_ref())
+                        .map(|it| FlatTree::new(&it))
+                })
+                .join();
+
+            match res {
+                Ok(res) => res,
+                Err(e) => std::panic::resume_unwind(e),
+            }
+        });
+        let result = match result {
+            Ok(result) => result,
+            Err(e) => std::panic::resume_unwind(e),
+        };
 
         prev_env.rollback();
 
diff --git a/crates/proc-macro-srv/src/tests/mod.rs b/crates/proc-macro-srv/src/tests/mod.rs
index d4be992465c..07222907f08 100644
--- a/crates/proc-macro-srv/src/tests/mod.rs
+++ b/crates/proc-macro-srv/src/tests/mod.rs
@@ -57,6 +57,48 @@ fn test_fn_like_macro_clone_ident_subtree() {
 }
 
 #[test]
+fn test_fn_like_macro_clone_raw_ident() {
+    assert_expand(
+        "fn_like_clone_tokens",
+        "r#async",
+        expect![[r#"
+            SUBTREE $
+              IDENT   async 4294967295"#]],
+    );
+}
+
+#[test]
+fn test_fn_like_mk_literals() {
+    assert_expand(
+        "fn_like_mk_literals",
+        r#""#,
+        expect![[r#"
+            SUBTREE $
+              LITERAL b"byte_string" 4294967295
+              LITERAL 'c' 4294967295
+              LITERAL "string" 4294967295
+              LITERAL 3.14f64 4294967295
+              LITERAL 3.14 4294967295
+              LITERAL 123i64 4294967295
+              LITERAL 123 4294967295"#]],
+    );
+}
+
+#[test]
+fn test_fn_like_mk_idents() {
+    // FIXME: this test is wrong: raw should be 'r#raw' but ABIs 1.64 and below
+    // simply ignore `is_raw` when implementing the `Ident` interface.
+    assert_expand(
+        "fn_like_mk_idents",
+        r#""#,
+        expect![[r#"
+            SUBTREE $
+              IDENT   standard 4294967295
+              IDENT   raw 4294967295"#]],
+    );
+}
+
+#[test]
 fn test_fn_like_macro_clone_literals() {
     assert_expand(
         "fn_like_clone_tokens",
@@ -105,6 +147,8 @@ fn list_test_macros() {
         fn_like_panic [FuncLike]
         fn_like_error [FuncLike]
         fn_like_clone_tokens [FuncLike]
+        fn_like_mk_literals [FuncLike]
+        fn_like_mk_idents [FuncLike]
         attr_noop [Attr]
         attr_panic [Attr]
         attr_error [Attr]
diff --git a/crates/proc-macro-test/imp/src/lib.rs b/crates/proc-macro-test/imp/src/lib.rs
index 0082eb7bdaf..feeacdb6407 100644
--- a/crates/proc-macro-test/imp/src/lib.rs
+++ b/crates/proc-macro-test/imp/src/lib.rs
@@ -2,7 +2,7 @@
 
 #![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
 
-use proc_macro::{Group, Ident, Literal, Punct, TokenStream, TokenTree};
+use proc_macro::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
 
 #[proc_macro]
 pub fn fn_like_noop(args: TokenStream) -> TokenStream {
@@ -24,6 +24,31 @@ pub fn fn_like_clone_tokens(args: TokenStream) -> TokenStream {
     clone_stream(args)
 }
 
+#[proc_macro]
+pub fn fn_like_mk_literals(_args: TokenStream) -> TokenStream {
+    let trees: Vec<TokenTree> = vec![
+        TokenTree::from(Literal::byte_string(b"byte_string")),
+        TokenTree::from(Literal::character('c')),
+        TokenTree::from(Literal::string("string")),
+        // as of 2022-07-21, there's no method on `Literal` to build a raw
+        // string or a raw byte string
+        TokenTree::from(Literal::f64_suffixed(3.14)),
+        TokenTree::from(Literal::f64_unsuffixed(3.14)),
+        TokenTree::from(Literal::i64_suffixed(123)),
+        TokenTree::from(Literal::i64_unsuffixed(123)),
+    ];
+    TokenStream::from_iter(trees)
+}
+
+#[proc_macro]
+pub fn fn_like_mk_idents(_args: TokenStream) -> TokenStream {
+    let trees: Vec<TokenTree> = vec![
+        TokenTree::from(Ident::new("standard", Span::call_site())),
+        TokenTree::from(Ident::new_raw("raw", Span::call_site())),
+    ];
+    TokenStream::from_iter(trees)
+}
+
 #[proc_macro_attribute]
 pub fn attr_noop(_args: TokenStream, item: TokenStream) -> TokenStream {
     item
@@ -65,7 +90,14 @@ fn clone_tree(t: TokenTree) -> TokenTree {
             new.set_span(orig.span());
             TokenTree::Group(new)
         }
-        TokenTree::Ident(orig) => TokenTree::Ident(Ident::new(&orig.to_string(), orig.span())),
+        TokenTree::Ident(orig) => {
+            let s = orig.to_string();
+            if let Some(rest) = s.strip_prefix("r#") {
+                TokenTree::Ident(Ident::new_raw(rest, orig.span()))
+            } else {
+                TokenTree::Ident(Ident::new(&s, orig.span()))
+            }
+        }
         TokenTree::Punct(orig) => {
             let mut new = Punct::new(orig.as_char(), orig.spacing());
             new.set_span(orig.span());
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml
index c37a0d17491..41205f2584a 100644
--- a/crates/rust-analyzer/Cargo.toml
+++ b/crates/rust-analyzer/Cargo.toml
@@ -84,3 +84,4 @@ mbe = { path = "../mbe" }
 [features]
 jemalloc = ["jemallocator", "profile/jemalloc"]
 force-always-assert = ["always-assert/force"]
+in-rust-tree = ["proc-macro-srv/sysroot-abi"]