about summary refs log tree commit diff
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2023-07-13 07:39:30 +0000
committerbors <bors@rust-lang.org>2023-07-13 07:39:30 +0000
commit132ba2cf771cf80373233074570ed3fdbc2dea1a (patch)
tree437f894a3429b04c471044be7d0ac7cc22fdb374
parent215b149a8b52f7cb0650ac8b96b1bc97cf5a58a0 (diff)
parent6a7b905c86e53d242ddff13a8adbdeb539fa7e75 (diff)
downloadrust-132ba2cf771cf80373233074570ed3fdbc2dea1a.tar.gz
rust-132ba2cf771cf80373233074570ed3fdbc2dea1a.zip
Auto merge of #15248 - Veykril:eager, r=Veykril
Fix eager token mapping panics
-rw-r--r--crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs10
-rw-r--r--crates/hir-def/src/macro_expansion_tests/mbe.rs36
-rw-r--r--crates/hir-def/src/macro_expansion_tests/mod.rs2
-rw-r--r--crates/hir-expand/src/builtin_fn_macro.rs2
-rw-r--r--crates/hir-expand/src/db.rs148
-rw-r--r--crates/hir-expand/src/eager.rs199
-rw-r--r--crates/hir-expand/src/fixup.rs4
-rw-r--r--crates/hir-expand/src/hygiene.rs16
-rw-r--r--crates/hir-expand/src/lib.rs166
-rw-r--r--crates/hir/src/db.rs2
-rw-r--r--crates/hir/src/lib.rs2
-rw-r--r--crates/ide-db/src/apply_change.rs2
-rw-r--r--crates/ide-db/src/lib.rs2
-rw-r--r--crates/ide/src/syntax_highlighting.rs13
-rw-r--r--crates/ide/src/syntax_highlighting/test_data/highlight_strings.html6
-rw-r--r--crates/ide/src/syntax_highlighting/tests.rs4
-rw-r--r--crates/mbe/src/lib.rs2
-rw-r--r--crates/mbe/src/token_map.rs7
-rw-r--r--crates/syntax/src/lib.rs103
-rw-r--r--crates/tt/src/lib.rs15
20 files changed, 535 insertions, 206 deletions
diff --git a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
index 3f17f64f2a4..b232651db96 100644
--- a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
+++ b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs
@@ -235,10 +235,10 @@ macro_rules! format_args {
 
 fn main() {
     /* error: no rule matches input tokens */;
-    /* error: no rule matches input tokens */;
-    /* error: no rule matches input tokens */;
-    /* error: no rule matches input tokens */::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(), ::core::fmt::Display::fmt), ]);
-    /* error: no rule matches input tokens */;
+    /* error: expected expression */;
+    /* error: expected expression, expected COMMA */;
+    /* error: expected expression */::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(), ::core::fmt::Display::fmt), ]);
+    /* error: expected expression, expected R_PAREN */;
     ::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(5), ::core::fmt::Display::fmt), ]);
 }
 "##]],
@@ -364,7 +364,7 @@ macro_rules! format_args {
 
 fn main() {
     let _ =
-        /* error: no rule matches input tokens *//* parse error: expected field name or number */
+        /* error: expected field name or number *//* parse error: expected field name or number */
 ::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::ArgumentV1::new(&(a.), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(), ::core::fmt::Debug::fmt), ]);
 }
 "##]],
diff --git a/crates/hir-def/src/macro_expansion_tests/mbe.rs b/crates/hir-def/src/macro_expansion_tests/mbe.rs
index 553ffe3d0b8..b26f9867580 100644
--- a/crates/hir-def/src/macro_expansion_tests/mbe.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mbe.rs
@@ -98,6 +98,42 @@ fn#19 main#20(#21)#21 {#22
 "##]],
     );
 }
+
+#[test]
+fn token_mapping_eager() {
+    check(
+        r#"
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! format_args {}
+
+macro_rules! identity {
+    ($expr:expr) => { $expr };
+}
+
+fn main(foo: ()) {
+    format_args/*+tokenids*/!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar")
+}
+
+"#,
+        expect![[r##"
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! format_args {}
+
+macro_rules! identity {
+    ($expr:expr) => { $expr };
+}
+
+fn main(foo: ()) {
+    // format_args/*+tokenids*/!("{} {} {}"#1,#3 format_args!("{}", 0#10),#12 foo#13,#14 identity!(10#18),#21 "bar"#22)
+::core#4294967295::fmt#4294967295::Arguments#4294967295::new_v1#4294967295(&#4294967295[#4294967295""#4294967295,#4294967295 " "#4294967295,#4294967295 " "#4294967295,#4294967295 ]#4294967295,#4294967295 &#4294967295[::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(&#4294967295(::core#4294967295::fmt#4294967295::Arguments#4294967295::new_v1#4294967295(&#4294967295[#4294967295""#4294967295,#4294967295 ]#4294967295,#4294967295 &#4294967295[::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(&#4294967295(#42949672950#10)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ]#4294967295)#4294967295)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(&#4294967295(#4294967295foo#13)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(&#4294967295(#429496729510#18)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ]#4294967295)#4294967295
+}
+
+"##]],
+    );
+}
+
 #[test]
 fn float_field_access_macro_input() {
     check(
diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs
index 1c15c1b7f06..7a87e61c693 100644
--- a/crates/hir-def/src/macro_expansion_tests/mod.rs
+++ b/crates/hir-def/src/macro_expansion_tests/mod.rs
@@ -187,7 +187,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
         let range: Range<usize> = range.into();
 
         if show_token_ids {
-            if let Some((tree, map, _)) = arg.as_deref() {
+            if let Some((tree, map, _)) = arg.value.as_deref() {
                 let tt_range = call.token_tree().unwrap().syntax().text_range();
                 let mut ranges = Vec::new();
                 extract_id_ranges(&mut ranges, map, tree);
diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs
index 7370f07a52b..95c6baf42da 100644
--- a/crates/hir-expand/src/builtin_fn_macro.rs
+++ b/crates/hir-expand/src/builtin_fn_macro.rs
@@ -692,7 +692,7 @@ pub(crate) fn include_arg_to_tt(
     arg_id: MacroCallId,
 ) -> Result<(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, TokenMap)>, FileId), ExpandError> {
     let loc = db.lookup_intern_macro_call(arg_id);
-    let Some(EagerCallInfo { arg, arg_id: Some(arg_id), .. }) = loc.eager.as_deref() else {
+    let Some(EagerCallInfo { arg,arg_id, .. }) = loc.eager.as_deref() else {
         panic!("include_arg_to_tt called on non include macro call: {:?}", &loc.eager);
     };
     let path = parse_string(&arg.0)?;
diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs
index a2b642cd114..309c0930d1a 100644
--- a/crates/hir-expand/src/db.rs
+++ b/crates/hir-expand/src/db.rs
@@ -3,7 +3,7 @@
 use base_db::{salsa, CrateId, Edition, SourceDatabase};
 use either::Either;
 use limit::Limit;
-use mbe::syntax_node_to_token_tree;
+use mbe::{syntax_node_to_token_tree, ValueResult};
 use rustc_hash::FxHashSet;
 use syntax::{
     ast::{self, HasAttrs, HasDocComments},
@@ -124,16 +124,21 @@ pub trait ExpandDatabase: SourceDatabase {
     fn macro_arg(
         &self,
         id: MacroCallId,
-    ) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>;
+    ) -> ValueResult<
+        Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
+        Arc<Box<[SyntaxError]>>,
+    >;
     /// Extracts syntax node, corresponding to a macro call. That's a firewall
     /// query, only typing in the macro call itself changes the returned
     /// subtree.
-    fn macro_arg_text(&self, id: MacroCallId) -> Option<GreenNode>;
-    /// Gets the expander for this macro. This compiles declarative macros, and
-    /// just fetches procedural ones.
-    // FIXME: Rename this
+    fn macro_arg_node(
+        &self,
+        id: MacroCallId,
+    ) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>>;
+    /// Fetches the expander for this macro.
     #[salsa::transparent]
-    fn macro_def(&self, id: MacroDefId) -> TokenExpander;
+    fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
+    /// Fetches (and compiles) the expander of this decl macro.
     fn decl_macro_expander(
         &self,
         def_crate: CrateId,
@@ -335,14 +340,20 @@ fn parse_macro_expansion_error(
 fn macro_arg(
     db: &dyn ExpandDatabase,
     id: MacroCallId,
-) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>> {
+) -> ValueResult<
+    Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
+    Arc<Box<[SyntaxError]>>,
+> {
     let loc = db.lookup_intern_macro_call(id);
 
-    if let Some(EagerCallInfo { arg, arg_id: Some(_), error: _ }) = loc.eager.as_deref() {
-        return Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default())));
+    if let Some(EagerCallInfo { arg, arg_id: _, error: _ }) = loc.eager.as_deref() {
+        return ValueResult::ok(Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default()))));
     }
 
-    let arg = db.macro_arg_text(id)?;
+    let ValueResult { value, err } = db.macro_arg_node(id);
+    let Some(arg) = value else {
+        return ValueResult { value: None, err };
+    };
 
     let node = SyntaxNode::new_root(arg);
     let censor = censor_for_macro_input(&loc, &node);
@@ -360,7 +371,11 @@ fn macro_arg(
         // proc macros expect their inputs without parentheses, MBEs expect it with them included
         tt.delimiter = tt::Delimiter::unspecified();
     }
-    Some(Arc::new((tt, tmap, fixups.undo_info)))
+    let val = Some(Arc::new((tt, tmap, fixups.undo_info)));
+    match err {
+        Some(err) => ValueResult::new(val, err),
+        None => ValueResult::ok(val),
+    }
 }
 
 /// Certain macro calls expect some nodes in the input to be preprocessed away, namely:
@@ -402,9 +417,44 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
     .unwrap_or_default()
 }
 
-fn macro_arg_text(db: &dyn ExpandDatabase, id: MacroCallId) -> Option<GreenNode> {
+fn macro_arg_node(
+    db: &dyn ExpandDatabase,
+    id: MacroCallId,
+) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>> {
+    let err = || -> Arc<Box<[_]>> {
+        Arc::new(Box::new([SyntaxError::new_at_offset(
+            "invalid macro call".to_owned(),
+            syntax::TextSize::from(0),
+        )]))
+    };
     let loc = db.lookup_intern_macro_call(id);
-    let arg = loc.kind.arg(db)?;
+    let arg = if let MacroDefKind::BuiltInEager(..) = loc.def.kind {
+        let res = if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() {
+            Some(mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::Expr).0)
+        } else {
+            loc.kind
+                .arg(db)
+                .and_then(|arg| ast::TokenTree::cast(arg.value))
+                .map(|tt| tt.reparse_as_expr().to_syntax())
+        };
+
+        match res {
+            Some(res) if res.errors().is_empty() => res.syntax_node(),
+            Some(res) => {
+                return ValueResult::new(
+                    Some(res.syntax_node().green().into()),
+                    // Box::<[_]>::from(res.errors()), not stable yet
+                    Arc::new(res.errors().to_vec().into_boxed_slice()),
+                );
+            }
+            None => return ValueResult::only_err(err()),
+        }
+    } else {
+        match loc.kind.arg(db) {
+            Some(res) => res.value,
+            None => return ValueResult::only_err(err()),
+        }
+    };
     if matches!(loc.kind, MacroCallKind::FnLike { .. }) {
         let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
         let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
@@ -419,20 +469,13 @@ fn macro_arg_text(db: &dyn ExpandDatabase, id: MacroCallId) -> Option<GreenNode>
             // Some day, we'll have explicit recursion counters for all
             // recursive things, at which point this code might be removed.
             cov_mark::hit!(issue9358_bad_macro_stack_overflow);
-            return None;
+            return ValueResult::only_err(Arc::new(Box::new([SyntaxError::new(
+                "unbalanced token tree".to_owned(),
+                arg.text_range(),
+            )])));
         }
     }
-    if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() {
-        Some(
-            mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::Expr)
-                .0
-                .syntax_node()
-                .green()
-                .into(),
-        )
-    } else {
-        Some(arg.green().into())
-    }
+    ValueResult::ok(Some(arg.green().into()))
 }
 
 fn decl_macro_expander(
@@ -474,7 +517,7 @@ fn decl_macro_expander(
     Arc::new(DeclarativeMacroExpander { mac, def_site_token_map })
 }
 
-fn macro_def(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
+fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
     match id.kind {
         MacroDefKind::Declarative(ast_id) => {
             TokenExpander::DeclarativeMacro(db.decl_macro_expander(id.krate, ast_id))
@@ -490,15 +533,11 @@ fn macro_def(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
 fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
     let _p = profile::span("macro_expand");
     let loc = db.lookup_intern_macro_call(id);
-    if let Some(EagerCallInfo { arg, arg_id: None, error }) = loc.eager.as_deref() {
-        // This is an input expansion for an eager macro. These are already pre-expanded
-        return ExpandResult { value: Arc::new(arg.0.clone()), err: error.clone() };
-    }
 
-    let (ExpandResult { value: mut tt, mut err }, tmap) = match loc.def.kind {
+    let ExpandResult { value: tt, mut err } = match loc.def.kind {
         MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(id),
         MacroDefKind::BuiltInDerive(expander, ..) => {
-            let arg = db.macro_arg_text(id).unwrap();
+            let arg = db.macro_arg_node(id).value.unwrap();
 
             let node = SyntaxNode::new_root(arg);
             let censor = censor_for_macro_input(&loc, &node);
@@ -514,10 +553,13 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
 
             // this cast is a bit sus, can we avoid losing the typedness here?
             let adt = ast::Adt::cast(node).unwrap();
-            (expander.expand(db, id, &adt, &tmap), Some((tmap, fixups.undo_info)))
+            let mut res = expander.expand(db, id, &adt, &tmap);
+            fixup::reverse_fixups(&mut res.value, &tmap, &fixups.undo_info);
+            res
         }
         _ => {
-            let Some(macro_arg) = db.macro_arg(id) else {
+            let ValueResult { value, err } = db.macro_arg(id);
+            let Some(macro_arg) = value else {
                 return ExpandResult {
                     value: Arc::new(tt::Subtree {
                         delimiter: tt::Delimiter::UNSPECIFIED,
@@ -528,18 +570,43 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
                     err: Some(ExpandError::other("invalid token tree")),
                 };
             };
+
             let (arg, arg_tm, undo_info) = &*macro_arg;
             let mut res = match loc.def.kind {
                 MacroDefKind::Declarative(id) => {
                     db.decl_macro_expander(loc.def.krate, id).expand(arg.clone())
                 }
                 MacroDefKind::BuiltIn(it, _) => it.expand(db, id, &arg).map_err(Into::into),
+                // This might look a bit odd, but we do not expand the inputs to eager macros here.
+                // Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
+                // That kind of expansion uses the ast id map of an eager macros input though which goes through
+                // the HirFileId machinery. As eager macro inputs are assigned a macro file id that query
+                // will end up going through here again, whereas we want to just want to inspect the raw input.
+                // As such we just return the input subtree here.
+                MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => {
+                    let mut arg = arg.clone();
+                    fixup::reverse_fixups(&mut arg, arg_tm, undo_info);
+
+                    return ExpandResult {
+                        value: Arc::new(arg),
+                        err: err.map(|err| {
+                            let mut buf = String::new();
+                            for err in &**err {
+                                use std::fmt::Write;
+                                _ = write!(buf, "{}, ", err);
+                            }
+                            buf.pop();
+                            buf.pop();
+                            ExpandError::other(buf)
+                        }),
+                    };
+                }
                 MacroDefKind::BuiltInEager(it, _) => it.expand(db, id, &arg).map_err(Into::into),
                 MacroDefKind::BuiltInAttr(it, _) => it.expand(db, id, &arg),
                 _ => unreachable!(),
             };
             fixup::reverse_fixups(&mut res.value, arg_tm, undo_info);
-            (res, None)
+            res
         }
     };
 
@@ -553,24 +620,23 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
         return value;
     }
 
-    if let Some((arg_tm, undo_info)) = &tmap {
-        fixup::reverse_fixups(&mut tt, arg_tm, undo_info);
-    }
-
     ExpandResult { value: Arc::new(tt), err }
 }
 
 fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
     let loc = db.lookup_intern_macro_call(id);
-    let Some(macro_arg) = db.macro_arg(id) else {
+    let Some(macro_arg) = db.macro_arg(id).value else {
         return ExpandResult {
             value: Arc::new(tt::Subtree {
                 delimiter: tt::Delimiter::UNSPECIFIED,
                 token_trees: Vec::new(),
             }),
+            // FIXME: We should make sure to enforce an invariant that invalid macro
+            // calls do not reach this call path!
             err: Some(ExpandError::other("invalid token tree")),
         };
     };
+
     let (arg_tt, arg_tm, undo_info) = &*macro_arg;
 
     let expander = match loc.def.kind {
diff --git a/crates/hir-expand/src/eager.rs b/crates/hir-expand/src/eager.rs
index 3ccc3ab1e1d..876813eab5d 100644
--- a/crates/hir-expand/src/eager.rs
+++ b/crates/hir-expand/src/eager.rs
@@ -19,7 +19,8 @@
 //!
 //! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
 use base_db::CrateId;
-use syntax::{ted, Parse, SyntaxNode};
+use rustc_hash::FxHashMap;
+use syntax::{ted, Parse, SyntaxNode, TextRange, TextSize, WalkEvent};
 use triomphe::Arc;
 
 use crate::{
@@ -38,18 +39,8 @@ pub fn expand_eager_macro_input(
     def: MacroDefId,
     resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
 ) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
-    assert!(matches!(def.kind, MacroDefKind::BuiltInEager(..)));
-    let token_tree = macro_call.value.token_tree();
-
-    let Some(token_tree) = token_tree else {
-        return Ok(ExpandResult {
-            value: None,
-            err: Some(ExpandError::other("invalid token tree")),
-        });
-    };
-    let (parsed_args, arg_token_map) = mbe::syntax_node_to_token_tree(token_tree.syntax());
-
     let ast_map = db.ast_id_map(macro_call.file_id);
+    // the expansion which the ast id map is built upon has no whitespace, so the offsets are wrong as macro_call is from the token tree that has whitespace!
     let call_id = InFile::new(macro_call.file_id, ast_map.ast_id(&macro_call.value));
     let expand_to = ExpandTo::from_call_site(&macro_call.value);
 
@@ -60,41 +51,69 @@ pub fn expand_eager_macro_input(
     let arg_id = db.intern_macro_call(MacroCallLoc {
         def,
         krate,
-        eager: Some(Box::new(EagerCallInfo {
-            arg: Arc::new((parsed_args, arg_token_map)),
-            arg_id: None,
-            error: None,
-        })),
+        eager: None,
         kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr },
     });
-    let arg_as_expr = match db.macro_arg_text(arg_id) {
-        Some(it) => it,
-        None => {
-            return Ok(ExpandResult {
-                value: None,
-                err: Some(ExpandError::other("invalid token tree")),
-            })
-        }
+    let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
+        db.parse_macro_expansion(arg_id.as_macro_file());
+    // we need this map here as the expansion of the eager input fake file loses whitespace ...
+    let mut ws_mapping = FxHashMap::default();
+    if let Some((_, tm, _)) = db.macro_arg(arg_id).value.as_deref() {
+        ws_mapping.extend(tm.entries().filter_map(|(id, range)| {
+            Some((arg_exp_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)?, range))
+        }));
+    }
+
+    let ExpandResult { value: expanded_eager_input, err } = {
+        eager_macro_recur(
+            db,
+            &Hygiene::new(db, macro_call.file_id),
+            InFile::new(arg_id.as_file(), arg_exp.syntax_node()),
+            krate,
+            resolver,
+        )?
     };
-    let ExpandResult { value: expanded_eager_input, err } = eager_macro_recur(
-        db,
-        &Hygiene::new(db, macro_call.file_id),
-        InFile::new(arg_id.as_file(), SyntaxNode::new_root(arg_as_expr)),
-        krate,
-        resolver,
-    )?;
-    let Some(expanded_eager_input) = expanded_eager_input else {
+    let err = parse_err.or(err);
+
+    let Some((expanded_eager_input, mapping)) = expanded_eager_input else {
         return Ok(ExpandResult { value: None, err });
     };
-    let (mut subtree, token_map) = mbe::syntax_node_to_token_tree(&expanded_eager_input);
+
+    let og_tmap = mbe::syntax_node_to_token_map(
+        macro_call.value.token_tree().expect("macro_arg_text succeeded").syntax(),
+    );
+
+    let (mut subtree, expanded_eager_input_token_map) =
+        mbe::syntax_node_to_token_tree(&expanded_eager_input);
+
+    // The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside
+    // so we need to remap them to the original input of the eager macro.
+    subtree.visit_ids(&|id| {
+        // Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix
+
+        if let Some(range) =
+            expanded_eager_input_token_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)
+        {
+            // remap from expanded eager input to eager input expansion
+            if let Some(og_range) = mapping.get(&range) {
+                // remap from eager input expansion to original eager input
+                if let Some(&og_range) = ws_mapping.get(og_range) {
+                    if let Some(og_token) = og_tmap.token_by_range(og_range) {
+                        return og_token;
+                    }
+                }
+            }
+        }
+        tt::TokenId::UNSPECIFIED
+    });
     subtree.delimiter = crate::tt::Delimiter::unspecified();
 
     let loc = MacroCallLoc {
         def,
         krate,
         eager: Some(Box::new(EagerCallInfo {
-            arg: Arc::new((subtree, token_map)),
-            arg_id: Some(arg_id),
+            arg: Arc::new((subtree, og_tmap)),
+            arg_id,
             error: err.clone(),
         })),
         kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
@@ -108,19 +127,16 @@ fn lazy_expand(
     def: &MacroDefId,
     macro_call: InFile<ast::MacroCall>,
     krate: CrateId,
-) -> ExpandResult<InFile<Parse<SyntaxNode>>> {
+) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<mbe::TokenMap>)> {
     let ast_id = db.ast_id_map(macro_call.file_id).ast_id(&macro_call.value);
 
     let expand_to = ExpandTo::from_call_site(&macro_call.value);
-    let id = def.as_lazy_macro(
-        db,
-        krate,
-        MacroCallKind::FnLike { ast_id: macro_call.with_value(ast_id), expand_to },
-    );
-
+    let ast_id = macro_call.with_value(ast_id);
+    let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to });
     let macro_file = id.as_macro_file();
 
-    db.parse_macro_expansion(macro_file).map(|parse| InFile::new(macro_file.into(), parse.0))
+    db.parse_macro_expansion(macro_file)
+        .map(|parse| (InFile::new(macro_file.into(), parse.0), parse.1))
 }
 
 fn eager_macro_recur(
@@ -129,18 +145,43 @@ fn eager_macro_recur(
     curr: InFile<SyntaxNode>,
     krate: CrateId,
     macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
-) -> Result<ExpandResult<Option<SyntaxNode>>, UnresolvedMacro> {
+) -> Result<ExpandResult<Option<(SyntaxNode, FxHashMap<TextRange, TextRange>)>>, UnresolvedMacro> {
     let original = curr.value.clone_for_update();
+    let mut mapping = FxHashMap::default();
 
-    let children = original.descendants().filter_map(ast::MacroCall::cast);
     let mut replacements = Vec::new();
 
     // Note: We only report a single error inside of eager expansions
     let mut error = None;
+    let mut offset = 0i32;
+    let apply_offset = |it: TextSize, offset: i32| {
+        TextSize::from(u32::try_from(offset + u32::from(it) as i32).unwrap_or_default())
+    };
+    let mut children = original.preorder_with_tokens();
 
     // Collect replacement
-    for child in children {
-        let def = match child.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
+    while let Some(child) = children.next() {
+        let WalkEvent::Enter(child) = child else { continue };
+        let call = match child {
+            syntax::NodeOrToken::Node(node) => match ast::MacroCall::cast(node) {
+                Some(it) => {
+                    children.skip_subtree();
+                    it
+                }
+                None => continue,
+            },
+            syntax::NodeOrToken::Token(t) => {
+                mapping.insert(
+                    TextRange::new(
+                        apply_offset(t.text_range().start(), offset),
+                        apply_offset(t.text_range().end(), offset),
+                    ),
+                    t.text_range(),
+                );
+                continue;
+            }
+        };
+        let def = match call.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
             Some(path) => macro_resolver(path.clone()).ok_or(UnresolvedMacro { path })?,
             None => {
                 error = Some(ExpandError::other("malformed macro invocation"));
@@ -152,7 +193,7 @@ fn eager_macro_recur(
                 let ExpandResult { value, err } = match expand_eager_macro_input(
                     db,
                     krate,
-                    curr.with_value(child.clone()),
+                    curr.with_value(call.clone()),
                     def,
                     macro_resolver,
                 ) {
@@ -160,9 +201,22 @@ fn eager_macro_recur(
                     Err(err) => return Err(err),
                 };
                 match value {
-                    Some(call) => {
+                    Some(call_id) => {
                         let ExpandResult { value, err: err2 } =
-                            db.parse_macro_expansion(call.as_macro_file());
+                            db.parse_macro_expansion(call_id.as_macro_file());
+
+                        let call_tt_start =
+                            call.token_tree().unwrap().syntax().text_range().start();
+                        let call_start = apply_offset(call.syntax().text_range().start(), offset);
+                        if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() {
+                            mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
+                                value
+                                    .1
+                                    .first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE)
+                                    .map(|r| (r + call_start, range + call_tt_start))
+                            }));
+                        };
+
                         ExpandResult {
                             value: Some(value.0.syntax_node().clone_for_update()),
                             err: err.or(err2),
@@ -176,36 +230,61 @@ fn eager_macro_recur(
             | MacroDefKind::BuiltInAttr(..)
             | MacroDefKind::BuiltInDerive(..)
             | MacroDefKind::ProcMacro(..) => {
-                let ExpandResult { value, err } =
-                    lazy_expand(db, &def, curr.with_value(child.clone()), krate);
+                let ExpandResult { value: (parse, tm), err } =
+                    lazy_expand(db, &def, curr.with_value(call.clone()), krate);
+                let decl_mac = if let MacroDefKind::Declarative(ast_id) = def.kind {
+                    Some(db.decl_macro_expander(def.krate, ast_id))
+                } else {
+                    None
+                };
 
                 // replace macro inside
-                let hygiene = Hygiene::new(db, value.file_id);
+                let hygiene = Hygiene::new(db, parse.file_id);
                 let ExpandResult { value, err: error } = eager_macro_recur(
                     db,
                     &hygiene,
                     // FIXME: We discard parse errors here
-                    value.map(|it| it.syntax_node()),
+                    parse.as_ref().map(|it| it.syntax_node()),
                     krate,
                     macro_resolver,
                 )?;
                 let err = err.or(error);
-                ExpandResult { value, err }
+
+                let call_tt_start = call.token_tree().unwrap().syntax().text_range().start();
+                let call_start = apply_offset(call.syntax().text_range().start(), offset);
+                if let Some((_tt, arg_map, _)) = parse
+                    .file_id
+                    .macro_file()
+                    .and_then(|id| db.macro_arg(id.macro_call_id).value)
+                    .as_deref()
+                {
+                    mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
+                        tm.first_range_by_token(
+                            decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid),
+                            syntax::SyntaxKind::TOMBSTONE,
+                        )
+                        .map(|r| (r + call_start, range + call_tt_start))
+                    }));
+                };
+                // FIXME: Do we need to re-use _m here?
+                ExpandResult { value: value.map(|(n, _m)| n), err }
             }
         };
         if err.is_some() {
             error = err;
         }
         // check if the whole original syntax is replaced
-        if child.syntax() == &original {
-            return Ok(ExpandResult { value, err: error });
+        if call.syntax() == &original {
+            return Ok(ExpandResult { value: value.zip(Some(mapping)), err: error });
         }
 
         if let Some(insert) = value {
-            replacements.push((child, insert));
+            offset += u32::from(insert.text_range().len()) as i32
+                - u32::from(call.syntax().text_range().len()) as i32;
+            replacements.push((call, insert));
         }
     }
 
     replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new));
-    Ok(ExpandResult { value: Some(original), err: error })
+    Ok(ExpandResult { value: Some((original, mapping)), err: error })
 }
diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs
index af9d2d78dd7..e6e8d8c0299 100644
--- a/crates/hir-expand/src/fixup.rs
+++ b/crates/hir-expand/src/fixup.rs
@@ -26,7 +26,7 @@ pub(crate) struct SyntaxFixups {
 /// This is the information needed to reverse the fixups.
 #[derive(Debug, Default, PartialEq, Eq)]
 pub struct SyntaxFixupUndoInfo {
-    original: Vec<Subtree>,
+    original: Box<[Subtree]>,
 }
 
 const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
@@ -272,7 +272,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
         replace,
         token_map,
         next_id,
-        undo_info: SyntaxFixupUndoInfo { original },
+        undo_info: SyntaxFixupUndoInfo { original: original.into_boxed_slice() },
     }
 }
 
diff --git a/crates/hir-expand/src/hygiene.rs b/crates/hir-expand/src/hygiene.rs
index b2921bb173b..54e74d50c87 100644
--- a/crates/hir-expand/src/hygiene.rs
+++ b/crates/hir-expand/src/hygiene.rs
@@ -149,16 +149,12 @@ impl HygieneInfo {
                     token_id = unshifted;
                     (&attr_args.1, self.attr_input_or_mac_def_start?)
                 }
-                None => (
-                    &self.macro_arg.1,
-                    InFile::new(loc.kind.file_id(), loc.kind.arg(db)?.text_range().start()),
-                ),
+                None => (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start())),
             },
             _ => match origin {
-                mbe::Origin::Call => (
-                    &self.macro_arg.1,
-                    InFile::new(loc.kind.file_id(), loc.kind.arg(db)?.text_range().start()),
-                ),
+                mbe::Origin::Call => {
+                    (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start()))
+                }
                 mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def_start) {
                     (TokenExpander::DeclarativeMacro(expander), Some(tt)) => {
                         (&expander.def_site_token_map, *tt)
@@ -198,9 +194,9 @@ fn make_hygiene_info(
         _ => None,
     });
 
-    let macro_def = db.macro_def(loc.def);
+    let macro_def = db.macro_expander(loc.def);
     let (_, exp_map) = db.parse_macro_expansion(macro_file).value;
-    let macro_arg = db.macro_arg(macro_file.macro_call_id).unwrap_or_else(|| {
+    let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
         Arc::new((
             tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
             Default::default(),
diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs
index a92c17f4ed0..9ed6c31ddde 100644
--- a/crates/hir-expand/src/lib.rs
+++ b/crates/hir-expand/src/lib.rs
@@ -127,7 +127,8 @@ impl_intern_key!(MacroCallId);
 pub struct MacroCallLoc {
     pub def: MacroDefId,
     pub(crate) krate: CrateId,
-    /// Some if `def` is a builtin eager macro.
+    /// Some if this is a macro call for an eager macro. Note that this is `None`
+    /// for the eager input macro file.
     eager: Option<Box<EagerCallInfo>>,
     pub kind: MacroCallKind,
 }
@@ -152,11 +153,10 @@ pub enum MacroDefKind {
 
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
 struct EagerCallInfo {
-    /// NOTE: This can be *either* the expansion result, *or* the argument to the eager macro!
+    /// The expanded argument of the eager macro.
     arg: Arc<(tt::Subtree, TokenMap)>,
-    /// call id of the eager macro's input file. If this is none, macro call containing this call info
-    /// is an eager macro's input, otherwise it is its output.
-    arg_id: Option<MacroCallId>,
+    /// Call id of the eager macro's input file (this is the macro file for its fully expanded input).
+    arg_id: MacroCallId,
     error: Option<ExpandError>,
 }
 
@@ -221,11 +221,7 @@ impl HirFileId {
                 HirFileIdRepr::FileId(id) => break id,
                 HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => {
                     let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_call_id);
-                    let is_include_expansion = loc.def.is_include()
-                        && matches!(
-                            loc.eager.as_deref(),
-                            Some(EagerCallInfo { arg_id: Some(_), .. })
-                        );
+                    let is_include_expansion = loc.def.is_include() && loc.eager.is_some();
                     file_id = match is_include_expansion.then(|| db.include_expand(macro_call_id)) {
                         Some(Ok((_, file))) => file.into(),
                         _ => loc.kind.file_id(),
@@ -270,53 +266,7 @@ impl HirFileId {
     /// Return expansion information if it is a macro-expansion file
     pub fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo> {
         let macro_file = self.macro_file()?;
-        let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
-
-        let arg_tt = loc.kind.arg(db)?;
-
-        let macro_def = db.macro_def(loc.def);
-        let (parse, exp_map) = db.parse_macro_expansion(macro_file).value;
-        let macro_arg = db.macro_arg(macro_file.macro_call_id).unwrap_or_else(|| {
-            Arc::new((
-                tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
-                Default::default(),
-                Default::default(),
-            ))
-        });
-
-        let def = loc.def.ast_id().left().and_then(|id| {
-            let def_tt = match id.to_node(db) {
-                ast::Macro::MacroRules(mac) => mac.token_tree()?,
-                ast::Macro::MacroDef(_) if matches!(macro_def, TokenExpander::BuiltInAttr(_)) => {
-                    return None
-                }
-                ast::Macro::MacroDef(mac) => mac.body()?,
-            };
-            Some(InFile::new(id.file_id, def_tt))
-        });
-        let attr_input_or_mac_def = def.or_else(|| match loc.kind {
-            MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
-                // FIXME: handle `cfg_attr`
-                let tt = ast_id
-                    .to_node(db)
-                    .doc_comments_and_attrs()
-                    .nth(invoc_attr_index.ast_index())
-                    .and_then(Either::left)?
-                    .token_tree()?;
-                Some(InFile::new(ast_id.file_id, tt))
-            }
-            _ => None,
-        });
-
-        Some(ExpansionInfo {
-            expanded: InFile::new(self, parse.syntax_node()),
-            arg: InFile::new(loc.kind.file_id(), arg_tt),
-            attr_input_or_mac_def,
-            macro_arg_shift: mbe::Shift::new(&macro_arg.0),
-            macro_arg,
-            macro_def,
-            exp_map,
-        })
+        ExpansionInfo::new(db, macro_file)
     }
 
     pub fn as_builtin_derive_attr_node(
@@ -370,7 +320,7 @@ impl HirFileId {
         match self.macro_file() {
             Some(macro_file) => {
                 let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
-                matches!(loc.eager.as_deref(), Some(EagerCallInfo { .. }))
+                matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
             }
             _ => false,
         }
@@ -603,13 +553,18 @@ impl MacroCallKind {
         FileRange { range, file_id }
     }
 
-    fn arg(&self, db: &dyn db::ExpandDatabase) -> Option<SyntaxNode> {
+    fn arg(&self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>> {
         match self {
-            MacroCallKind::FnLike { ast_id, .. } => {
-                Some(ast_id.to_node(db).token_tree()?.syntax().clone())
+            MacroCallKind::FnLike { ast_id, .. } => ast_id
+                .to_in_file_node(db)
+                .map(|it| Some(it.token_tree()?.syntax().clone()))
+                .transpose(),
+            MacroCallKind::Derive { ast_id, .. } => {
+                Some(ast_id.to_in_file_node(db).syntax().cloned())
+            }
+            MacroCallKind::Attr { ast_id, .. } => {
+                Some(ast_id.to_in_file_node(db).syntax().cloned())
             }
-            MacroCallKind::Derive { ast_id, .. } => Some(ast_id.to_node(db).syntax().clone()),
-            MacroCallKind::Attr { ast_id, .. } => Some(ast_id.to_node(db).syntax().clone()),
         }
     }
 }
@@ -627,7 +582,7 @@ impl MacroCallId {
 /// ExpansionInfo mainly describes how to map text range between src and expanded macro
 #[derive(Debug, Clone, PartialEq, Eq)]
 pub struct ExpansionInfo {
-    expanded: InFile<SyntaxNode>,
+    expanded: InMacroFile<SyntaxNode>,
     /// The argument TokenTree or item for attributes
     arg: InFile<SyntaxNode>,
     /// The `macro_rules!` or attribute input.
@@ -643,7 +598,7 @@ pub struct ExpansionInfo {
 
 impl ExpansionInfo {
     pub fn expanded(&self) -> InFile<SyntaxNode> {
-        self.expanded.clone()
+        self.expanded.clone().into()
     }
 
     pub fn call_node(&self) -> Option<InFile<SyntaxNode>> {
@@ -674,7 +629,7 @@ impl ExpansionInfo {
         let token_id_in_attr_input = if let Some(item) = item {
             // check if we are mapping down in an attribute input
             // this is a special case as attributes can have two inputs
-            let call_id = self.expanded.file_id.macro_file()?.macro_call_id;
+            let call_id = self.expanded.file_id.macro_call_id;
             let loc = db.lookup_intern_macro_call(call_id);
 
             let token_range = token.value.text_range();
@@ -720,7 +675,7 @@ impl ExpansionInfo {
                 let relative_range =
                     token.value.text_range().checked_sub(self.arg.value.text_range().start())?;
                 let token_id = self.macro_arg.1.token_by_range(relative_range)?;
-                // conditionally shift the id by a declaratives macro definition
+                // conditionally shift the id by a declarative macro definition
                 self.macro_def.map_id_down(token_id)
             }
         };
@@ -730,7 +685,7 @@ impl ExpansionInfo {
             .ranges_by_token(token_id, token.value.kind())
             .flat_map(move |range| self.expanded.value.covering_element(range).into_token());
 
-        Some(tokens.map(move |token| self.expanded.with_value(token)))
+        Some(tokens.map(move |token| InFile::new(self.expanded.file_id.into(), token)))
     }
 
     /// Map a token up out of the expansion it resides in into the arguments of the macro call of the expansion.
@@ -739,18 +694,17 @@ impl ExpansionInfo {
         db: &dyn db::ExpandDatabase,
         token: InFile<&SyntaxToken>,
     ) -> Option<(InFile<SyntaxToken>, Origin)> {
+        assert_eq!(token.file_id, self.expanded.file_id.into());
         // Fetch the id through its text range,
         let token_id = self.exp_map.token_by_range(token.value.text_range())?;
         // conditionally unshifting the id to accommodate for macro-rules def site
         let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
 
-        let call_id = self.expanded.file_id.macro_file()?.macro_call_id;
+        let call_id = self.expanded.file_id.macro_call_id;
         let loc = db.lookup_intern_macro_call(call_id);
 
         // Special case: map tokens from `include!` expansions to the included file
-        if loc.def.is_include()
-            && matches!(loc.eager.as_deref(), Some(EagerCallInfo { arg_id: Some(_), .. }))
-        {
+        if loc.def.is_include() {
             if let Ok((tt_and_map, file_id)) = db.include_expand(call_id) {
                 let range = tt_and_map.1.first_range_by_token(token_id, token.value.kind())?;
                 let source = db.parse(file_id);
@@ -794,6 +748,58 @@ impl ExpansionInfo {
             tt.value.covering_element(range + tt.value.text_range().start()).into_token()?;
         Some((tt.with_value(token), origin))
     }
+
+    fn new(db: &dyn db::ExpandDatabase, macro_file: MacroFile) -> Option<ExpansionInfo> {
+        let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+
+        let arg_tt = loc.kind.arg(db)?;
+
+        let macro_def = db.macro_expander(loc.def);
+        let (parse, exp_map) = db.parse_macro_expansion(macro_file).value;
+        let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() };
+
+        let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
+            Arc::new((
+                tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
+                Default::default(),
+                Default::default(),
+            ))
+        });
+
+        let def = loc.def.ast_id().left().and_then(|id| {
+            let def_tt = match id.to_node(db) {
+                ast::Macro::MacroRules(mac) => mac.token_tree()?,
+                ast::Macro::MacroDef(_) if matches!(macro_def, TokenExpander::BuiltInAttr(_)) => {
+                    return None
+                }
+                ast::Macro::MacroDef(mac) => mac.body()?,
+            };
+            Some(InFile::new(id.file_id, def_tt))
+        });
+        let attr_input_or_mac_def = def.or_else(|| match loc.kind {
+            MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
+                // FIXME: handle `cfg_attr`
+                let tt = ast_id
+                    .to_node(db)
+                    .doc_comments_and_attrs()
+                    .nth(invoc_attr_index.ast_index())
+                    .and_then(Either::left)?
+                    .token_tree()?;
+                Some(InFile::new(ast_id.file_id, tt))
+            }
+            _ => None,
+        });
+
+        Some(ExpansionInfo {
+            expanded,
+            arg: arg_tt,
+            attr_input_or_mac_def,
+            macro_arg_shift: mbe::Shift::new(&macro_arg.0),
+            macro_arg,
+            macro_def,
+            exp_map,
+        })
+    }
 }
 
 /// `AstId` points to an AST node in any file.
@@ -805,6 +811,9 @@ impl<N: AstIdNode> AstId<N> {
     pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N {
         self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))
     }
+    pub fn to_in_file_node(&self, db: &dyn db::ExpandDatabase) -> InFile<N> {
+        InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)))
+    }
     pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> AstPtr<N> {
         db.ast_id_map(self.file_id).get(self.value)
     }
@@ -820,6 +829,7 @@ impl ErasedAstId {
         db.ast_id_map(self.file_id).get_raw(self.value)
     }
 }
+
 /// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
 ///
 /// Typical usages are:
@@ -1038,6 +1048,18 @@ impl InFile<SyntaxToken> {
     }
 }
 
+#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
+pub struct InMacroFile<T> {
+    pub file_id: MacroFile,
+    pub value: T,
+}
+
+impl<T> From<InMacroFile<T>> for InFile<T> {
+    fn from(macro_file: InMacroFile<T>) -> Self {
+        InFile { file_id: macro_file.file_id.into(), value: macro_file.value }
+    }
+}
+
 fn ascend_node_border_tokens(
     db: &dyn db::ExpandDatabase,
     InFile { file_id, value: node }: InFile<&SyntaxNode>,
diff --git a/crates/hir/src/db.rs b/crates/hir/src/db.rs
index fa2deb72152..f3a0608944b 100644
--- a/crates/hir/src/db.rs
+++ b/crates/hir/src/db.rs
@@ -6,7 +6,7 @@
 pub use hir_def::db::*;
 pub use hir_expand::db::{
     AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage,
-    ExpandProcMacroQuery, HygieneFrameQuery, InternMacroCallQuery, MacroArgTextQuery,
+    ExpandProcMacroQuery, HygieneFrameQuery, InternMacroCallQuery, MacroArgNodeQuery,
     MacroExpandQuery, ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery,
 };
 pub use hir_ty::db::*;
diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs
index d09963d4d31..f8d9398ae2c 100644
--- a/crates/hir/src/lib.rs
+++ b/crates/hir/src/lib.rs
@@ -698,7 +698,7 @@ impl Module {
 
 fn emit_macro_def_diagnostics(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>, m: Macro) {
     let id = macro_id_to_def_id(db.upcast(), m.id);
-    if let hir_expand::db::TokenExpander::DeclarativeMacro(expander) = db.macro_def(id) {
+    if let hir_expand::db::TokenExpander::DeclarativeMacro(expander) = db.macro_expander(id) {
         if let Some(e) = expander.mac.err() {
             let Some(ast) = id.ast_id().left() else {
                 never!("declarative expander for non decl-macro: {:?}", e);
diff --git a/crates/ide-db/src/apply_change.rs b/crates/ide-db/src/apply_change.rs
index 7ba0bd73ec4..a0b05c87ae7 100644
--- a/crates/ide-db/src/apply_change.rs
+++ b/crates/ide-db/src/apply_change.rs
@@ -99,7 +99,7 @@ impl RootDatabase {
             hir::db::AstIdMapQuery
             hir::db::ParseMacroExpansionQuery
             hir::db::InternMacroCallQuery
-            hir::db::MacroArgTextQuery
+            hir::db::MacroArgNodeQuery
             hir::db::DeclMacroExpanderQuery
             hir::db::MacroExpandQuery
             hir::db::ExpandProcMacroQuery
diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs
index feced7e4bbb..f27ed485d81 100644
--- a/crates/ide-db/src/lib.rs
+++ b/crates/ide-db/src/lib.rs
@@ -200,7 +200,7 @@ impl RootDatabase {
             hir_db::AstIdMapQuery
             // hir_db::ParseMacroExpansionQuery
             // hir_db::InternMacroCallQuery
-            hir_db::MacroArgTextQuery
+            hir_db::MacroArgNodeQuery
             hir_db::DeclMacroExpanderQuery
             // hir_db::MacroExpandQuery
             hir_db::ExpandProcMacroQuery
diff --git a/crates/ide/src/syntax_highlighting.rs b/crates/ide/src/syntax_highlighting.rs
index dc06591ffea..577bd2bc1f8 100644
--- a/crates/ide/src/syntax_highlighting.rs
+++ b/crates/ide/src/syntax_highlighting.rs
@@ -265,10 +265,14 @@ fn traverse(
 
         // set macro and attribute highlighting states
         match event.clone() {
-            Enter(NodeOrToken::Node(node)) if ast::TokenTree::can_cast(node.kind()) => {
+            Enter(NodeOrToken::Node(node))
+                if current_macro.is_none() && ast::TokenTree::can_cast(node.kind()) =>
+            {
                 tt_level += 1;
             }
-            Leave(NodeOrToken::Node(node)) if ast::TokenTree::can_cast(node.kind()) => {
+            Leave(NodeOrToken::Node(node))
+                if current_macro.is_none() && ast::TokenTree::can_cast(node.kind()) =>
+            {
                 tt_level -= 1;
             }
             Enter(NodeOrToken::Node(node)) if ast::Attr::can_cast(node.kind()) => {
@@ -387,7 +391,7 @@ fn traverse(
             };
         let descended_element = if in_macro {
             // Attempt to descend tokens into macro-calls.
-            match element {
+            let res = match element {
                 NodeOrToken::Token(token) if token.kind() != COMMENT => {
                     let token = match attr_or_derive_item {
                         Some(AttrOrDerive::Attr(_)) => {
@@ -412,7 +416,8 @@ fn traverse(
                     }
                 }
                 e => e,
-            }
+            };
+            res
         } else {
             element
         };
diff --git a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
index fa374b04f19..f4f164aa1de 100644
--- a/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
+++ b/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html
@@ -161,7 +161,7 @@ pre                 { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
 
     <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">"</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x00</span><span class="escape_sequence">\x63</span><span class="escape_sequence">\n</span><span class="string_literal">"</span><span class="semicolon">;</span>
     <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">b"</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x00</span><span class="escape_sequence">\x63</span><span class="escape_sequence">\n</span><span class="string_literal">"</span><span class="semicolon">;</span>
-    <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">r"\\"</span><span class="semicolon">;</span>
+    <span class="keyword">let</span> <span class="variable declaration reference">backslash</span> <span class="operator">=</span> <span class="string_literal">r"\\"</span><span class="semicolon">;</span>
 
     <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="escape_sequence">\x41</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="none macro">A</span> <span class="operator macro">=</span> <span class="numeric_literal macro">92</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
     <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="variable">ничоси</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="none macro">ничоси</span> <span class="operator macro">=</span> <span class="numeric_literal macro">92</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
@@ -173,6 +173,6 @@ pre                 { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
     <span class="macro">assert</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="bool_literal macro">true</span><span class="comma macro">,</span> <span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> asdasd"</span><span class="comma macro">,</span> <span class="numeric_literal macro">1</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
     <span class="macro">toho</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">fmt"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
     <span class="macro unsafe">asm</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"mov eax, </span><span class="format_specifier">{</span><span class="numeric_literal">0</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
-    <span class="macro">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="none macro">concat</span><span class="punctuation macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
-    <span class="macro">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="none macro">format_args</span><span class="operator macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0</span><span class="parenthesis macro">)</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+    <span class="macro">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="none macro">concat</span><span class="punctuation macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="string_literal macro">"{}"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
+    <span class="macro">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="none macro">backslash</span><span class="comma macro">,</span> <span class="none macro">format_args</span><span class="punctuation macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="none macro">foo</span><span class="comma macro">,</span> <span class="string_literal macro">"bar"</span><span class="comma macro">,</span> <span class="none macro">toho</span><span class="punctuation macro">!</span><span class="parenthesis macro">(</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="none macro">backslash</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
 <span class="brace">}</span></code></pre>
\ No newline at end of file
diff --git a/crates/ide/src/syntax_highlighting/tests.rs b/crates/ide/src/syntax_highlighting/tests.rs
index 497992f684c..1ee451a06d0 100644
--- a/crates/ide/src/syntax_highlighting/tests.rs
+++ b/crates/ide/src/syntax_highlighting/tests.rs
@@ -507,7 +507,7 @@ fn main() {
 
     let _ = "\x28\x28\x00\x63\n";
     let _ = b"\x28\x28\x00\x63\n";
-    let _ = r"\\";
+    let backslash = r"\\";
 
     println!("{\x41}", A = 92);
     println!("{ничоси}", ничоси = 92);
@@ -520,7 +520,7 @@ fn main() {
     toho!("{}fmt", 0);
     asm!("mov eax, {0}");
     format_args!(concat!("{}"), "{}");
-    format_args!("{}", format_args!("{}", 0));
+    format_args!("{} {} {} {} {} {}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash);
 }"#,
         expect_file!["./test_data/highlight_strings.html"],
         false,
diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs
index c17ba1c58e2..665bce474a6 100644
--- a/crates/mbe/src/lib.rs
+++ b/crates/mbe/src/lib.rs
@@ -208,7 +208,7 @@ impl Shift {
     }
 }
 
-#[derive(Debug, Eq, PartialEq)]
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
 pub enum Origin {
     Def,
     Call,
diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs
index c923e7a69a1..9b2df89f9c7 100644
--- a/crates/mbe/src/token_map.rs
+++ b/crates/mbe/src/token_map.rs
@@ -110,4 +110,11 @@ impl TokenMap {
         // FIXME: This could be accidentally quadratic
         self.entries.remove(idx);
     }
+
+    pub fn entries(&self) -> impl Iterator<Item = (tt::TokenId, TextRange)> + '_ {
+        self.entries.iter().filter_map(|&(tid, tr)| match tr {
+            TokenTextRange::Token(range) => Some((tid, range)),
+            TokenTextRange::Delimiter(_) => None,
+        })
+    }
 }
diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs
index efbf8796644..bed240a6d73 100644
--- a/crates/syntax/src/lib.rs
+++ b/crates/syntax/src/lib.rs
@@ -171,6 +171,109 @@ impl SourceFile {
     }
 }
 
+impl ast::TokenTree {
+    pub fn reparse_as_expr(self) -> Parse<ast::Expr> {
+        let tokens = self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token);
+
+        let mut parser_input = parser::Input::default();
+        let mut was_joint = false;
+        for t in tokens {
+            let kind = t.kind();
+            if kind.is_trivia() {
+                was_joint = false
+            } else {
+                if kind == SyntaxKind::IDENT {
+                    let token_text = t.text();
+                    let contextual_kw = SyntaxKind::from_contextual_keyword(token_text)
+                        .unwrap_or(SyntaxKind::IDENT);
+                    parser_input.push_ident(contextual_kw);
+                } else {
+                    if was_joint {
+                        parser_input.was_joint();
+                    }
+                    parser_input.push(kind);
+                    // Tag the token as joint if it is float with a fractional part
+                    // we use this jointness to inform the parser about what token split
+                    // event to emit when we encounter a float literal in a field access
+                    if kind == SyntaxKind::FLOAT_NUMBER && !t.text().ends_with('.') {
+                        parser_input.was_joint();
+                    }
+                }
+                was_joint = true;
+            }
+        }
+
+        let parser_output = parser::TopEntryPoint::Expr.parse(&parser_input);
+
+        let mut tokens =
+            self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token);
+        let mut text = String::new();
+        let mut pos = TextSize::from(0);
+        let mut builder = SyntaxTreeBuilder::default();
+        for event in parser_output.iter() {
+            match event {
+                parser::Step::Token { kind, n_input_tokens } => {
+                    let mut token = tokens.next().unwrap();
+                    while token.kind().is_trivia() {
+                        let text = token.text();
+                        pos += TextSize::from(text.len() as u32);
+                        builder.token(token.kind(), text);
+
+                        token = tokens.next().unwrap();
+                    }
+                    text.push_str(token.text());
+                    for _ in 1..n_input_tokens {
+                        let token = tokens.next().unwrap();
+                        text.push_str(token.text());
+                    }
+
+                    pos += TextSize::from(text.len() as u32);
+                    builder.token(kind, &text);
+                    text.clear();
+                }
+                parser::Step::FloatSplit { ends_in_dot: has_pseudo_dot } => {
+                    let token = tokens.next().unwrap();
+                    let text = token.text();
+
+                    match text.split_once('.') {
+                        Some((left, right)) => {
+                            assert!(!left.is_empty());
+                            builder.start_node(SyntaxKind::NAME_REF);
+                            builder.token(SyntaxKind::INT_NUMBER, left);
+                            builder.finish_node();
+
+                            // here we move the exit up, the original exit has been deleted in process
+                            builder.finish_node();
+
+                            builder.token(SyntaxKind::DOT, ".");
+
+                            if has_pseudo_dot {
+                                assert!(right.is_empty(), "{left}.{right}");
+                            } else {
+                                builder.start_node(SyntaxKind::NAME_REF);
+                                builder.token(SyntaxKind::INT_NUMBER, right);
+                                builder.finish_node();
+
+                                // the parser creates an unbalanced start node, we are required to close it here
+                                builder.finish_node();
+                            }
+                        }
+                        None => unreachable!(),
+                    }
+                    pos += TextSize::from(text.len() as u32);
+                }
+                parser::Step::Enter { kind } => builder.start_node(kind),
+                parser::Step::Exit => builder.finish_node(),
+                parser::Step::Error { msg } => builder.error(msg.to_owned(), pos),
+            }
+        }
+
+        let (green, errors) = builder.finish_raw();
+
+        Parse { green, errors: Arc::new(errors), _ty: PhantomData }
+    }
+}
+
 /// Matches a `SyntaxNode` against an `ast` type.
 ///
 /// # Example:
diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs
index c2ebf03746a..1b8d4ba42a5 100644
--- a/crates/tt/src/lib.rs
+++ b/crates/tt/src/lib.rs
@@ -68,6 +68,21 @@ pub mod token_id {
             Self::Subtree(Subtree { delimiter: Delimiter::unspecified(), token_trees: vec![] })
         }
     }
+
+    impl Subtree {
+        pub fn visit_ids(&mut self, f: &impl Fn(TokenId) -> TokenId) {
+            self.delimiter.open = f(self.delimiter.open);
+            self.delimiter.close = f(self.delimiter.close);
+            self.token_trees.iter_mut().for_each(|tt| match tt {
+                crate::TokenTree::Leaf(leaf) => match leaf {
+                    crate::Leaf::Literal(it) => it.span = f(it.span),
+                    crate::Leaf::Punct(it) => it.span = f(it.span),
+                    crate::Leaf::Ident(it) => it.span = f(it.span),
+                },
+                crate::TokenTree::Subtree(s) => s.visit_ids(f),
+            })
+        }
+    }
 }
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]