about summary refs log tree commit diff
path: root/src
diff options
context:
space:
mode:
authorDavid Barsky <me@davidbarsky.com>2024-11-05 12:24:41 -0500
committerDavid Barsky <me@davidbarsky.com>2025-03-10 13:30:51 -0400
commita5c071633be35085edff186cef7fcfb2d17c22af (patch)
treee077d825c4e8460b8a69b7d87af43a6f0d7125c9 /src
parent83c1c4c2bb7c5744fe7e39869c941f52d54d80b7 (diff)
downloadrust-a5c071633be35085edff186cef7fcfb2d17c22af.tar.gz
rust-a5c071633be35085edff186cef7fcfb2d17c22af.zip
internal: port rust-analyzer to new Salsa
Diffstat (limited to 'src')
-rw-r--r--src/tools/rust-analyzer/Cargo.lock855
-rw-r--r--src/tools/rust-analyzer/Cargo.toml3
-rw-r--r--src/tools/rust-analyzer/crates/base-db/Cargo.toml4
-rw-r--r--src/tools/rust-analyzer/crates/base-db/src/change.rs15
-rw-r--r--src/tools/rust-analyzer/crates/base-db/src/lib.rs338
-rw-r--r--src/tools/rust-analyzer/crates/edition/src/lib.rs10
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/Cargo.toml4
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/attr.rs7
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs1
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/db.rs184
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs7
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs19
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/expr_store/scope.rs27
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/block.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/generics.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/import_map.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/lib.rs100
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs30
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs13
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres.rs7
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs5
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs22
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/resolver.rs25
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/test_db.rs134
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs25
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin/quote.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/change.rs8
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/db.rs131
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs5
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/eager.rs10
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/files.rs20
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs137
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/lib.rs41
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/prettify_macro_expansion_.rs3
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs12
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/Cargo.toml3
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs26
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/db.rs219
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/drop.rs9
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/interner.rs5
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/layout.rs17
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs13
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/lower.rs37
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs49
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs1
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir.rs9
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs8
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs1
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs14
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs121
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs10
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/traits.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/variance.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/db.rs81
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/has_source.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/lib.rs10
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics.rs20
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs16
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs3
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/symbols.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs8
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_to_enum.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs12
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs7
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs10
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/tests.rs19
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs7
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/context.rs17
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/lib.rs7
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/raw_identifiers.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/Cargo.toml4
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs361
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/helpers.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs7
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/lib.rs181
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs18
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/search.rs43
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs39
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/suggest_name.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt14
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt60
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/traits.rs13
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs16
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs49
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs10
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs11
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs24
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/search.rs12
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/doc_links.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/expand_macro.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/fetch_crates.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_definition.rs7
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/highlight_related.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover/render.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover/tests.rs14
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs10
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs8
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/interpret.rs17
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/lib.rs68
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/parent_module.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/rename.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/runnables.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/ssr.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/static_index.rs8
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/status.rs237
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs8
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html12
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/test_explorer.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/typing.rs13
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs10
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs9
-rw-r--r--src/tools/rust-analyzer/crates/load-cargo/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/tests.rs160
-rw-r--r--src/tools/rust-analyzer/crates/query-group-macro/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs92
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs6
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs10
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs4
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs6
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs12
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs31
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs5
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs3
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs25
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs18
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs2
-rw-r--r--src/tools/rust-analyzer/crates/span/Cargo.toml5
-rw-r--r--src/tools/rust-analyzer/crates/span/src/hygiene.rs381
-rw-r--r--src/tools/rust-analyzer/crates/span/src/lib.rs135
-rw-r--r--src/tools/rust-analyzer/crates/test-fixture/src/lib.rs9
-rw-r--r--src/tools/rust-analyzer/xtask/src/tidy.rs5
161 files changed, 3042 insertions, 2298 deletions
diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock
index a50e1e8f49a..efd7362594c 100644
--- a/src/tools/rust-analyzer/Cargo.lock
+++ b/src/tools/rust-analyzer/Cargo.lock
@@ -4,18 +4,18 @@ version = 3
 
 [[package]]
 name = "addr2line"
-version = "0.22.0"
+version = "0.24.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678"
+checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1"
 dependencies = [
  "gimli",
 ]
 
 [[package]]
-name = "adler"
-version = "1.0.2"
+name = "adler2"
+version = "2.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
+checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627"
 
 [[package]]
 name = "aho-corasick"
@@ -40,41 +40,41 @@ checksum = "a1078fa1ce1e34b1872d8611ad921196d76bdd7027e949fbe31231abde201892"
 
 [[package]]
 name = "anyhow"
-version = "1.0.86"
+version = "1.0.97"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
+checksum = "dcfed56ad506cb2c684a14971b8861fdc3baaaae314b9e5f9bb532cbe3ba7a4f"
 
 [[package]]
 name = "arbitrary"
-version = "1.3.2"
+version = "1.4.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7d5a26814d8dcb93b0e5a0ff3c6d80a8843bafb21b39e8e18a6f05471870e110"
+checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223"
 
 [[package]]
 name = "arrayvec"
-version = "0.7.4"
+version = "0.7.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711"
+checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
 
 [[package]]
 name = "autocfg"
-version = "1.3.0"
+version = "1.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0"
+checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26"
 
 [[package]]
 name = "backtrace"
-version = "0.3.73"
+version = "0.3.74"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a"
+checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a"
 dependencies = [
  "addr2line",
- "cc",
  "cfg-if",
  "libc",
  "miniz_oxide",
- "object 0.36.3",
+ "object 0.36.7",
  "rustc-demangle",
+ "windows-targets 0.52.6",
 ]
 
 [[package]]
@@ -82,11 +82,13 @@ name = "base-db"
 version = "0.0.0"
 dependencies = [
  "cfg",
+ "dashmap 5.5.3",
  "intern",
  "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "lz4_flex",
- "rustc-hash 2.0.0",
- "salsa 0.0.0",
+ "query-group-macro",
+ "rustc-hash 2.1.1",
+ "salsa 0.19.0",
  "semver",
  "span",
  "stdx",
@@ -110,17 +112,17 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
 
 [[package]]
 name = "bitflags"
-version = "2.7.0"
+version = "2.9.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1be3f42a67d6d345ecd59f675f3f012d6974981560836e938c22b424b85ce1be"
+checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd"
 
 [[package]]
 name = "borsh"
-version = "1.5.1"
+version = "1.5.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a6362ed55def622cddc70a4746a68554d7b687713770de539e59a739b249f8ed"
+checksum = "5430e3be710b68d984d1391c854eb431a9d548640711faa54eecb1df93db91cc"
 dependencies = [
- "cfg_aliases 0.2.1",
+ "cfg_aliases",
 ]
 
 [[package]]
@@ -140,18 +142,18 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
 
 [[package]]
 name = "camino"
-version = "1.1.7"
+version = "1.1.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e0ec6b951b160caa93cc0c7b209e5a3bff7aae9062213451ac99493cd844c239"
+checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3"
 dependencies = [
  "serde",
 ]
 
 [[package]]
 name = "cargo-platform"
-version = "0.1.8"
+version = "0.1.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "24b1f0365a6c6bb4020cd05806fd0d33c44d38046b8bd7f0e40814b9763cabfc"
+checksum = "e35af189006b9c0f00a064685c727031e3ed2d8020f7ba284d78cc2671bd36ea"
 dependencies = [
  "serde",
 ]
@@ -172,9 +174,9 @@ dependencies = [
 
 [[package]]
 name = "cc"
-version = "1.1.22"
+version = "1.2.16"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9540e661f81799159abee814118cc139a2004b3a3aa3ea37724a1b66530b90e0"
+checksum = "be714c154be609ec7f5dad223a33bf1482fff90472de28f7362806e6d4832b8c"
 dependencies = [
  "shlex",
 ]
@@ -188,7 +190,7 @@ dependencies = [
  "expect-test",
  "intern",
  "oorandom",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "syntax",
  "syntax-bridge",
  "tracing",
@@ -203,12 +205,6 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
 
 [[package]]
 name = "cfg_aliases"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e"
-
-[[package]]
-name = "cfg_aliases"
 version = "0.2.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
@@ -231,7 +227,7 @@ version = "0.100.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "4f114996bda14c0213f014a4ef31a7867dcf5f539a3900477fc6b20138e7a17b"
 dependencies = [
- "bitflags 2.7.0",
+ "bitflags 2.9.0",
  "chalk-derive",
 ]
 
@@ -287,18 +283,18 @@ dependencies = [
 
 [[package]]
 name = "crossbeam-channel"
-version = "0.5.13"
+version = "0.5.14"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2"
+checksum = "06ba6d68e24814cb8de6bb986db8222d3a027d15872cabc0d18817bc3c0e4471"
 dependencies = [
  "crossbeam-utils",
 ]
 
 [[package]]
 name = "crossbeam-deque"
-version = "0.8.5"
+version = "0.8.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d"
+checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51"
 dependencies = [
  "crossbeam-epoch",
  "crossbeam-utils",
@@ -324,18 +320,18 @@ dependencies = [
 
 [[package]]
 name = "crossbeam-utils"
-version = "0.8.20"
+version = "0.8.21"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80"
+checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
 
 [[package]]
 name = "ctrlc"
-version = "3.4.4"
+version = "3.4.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "672465ae37dc1bc6380a6547a8883d5dd397b0f1faaad4f265726cc7042a5345"
+checksum = "90eeab0aa92f3f9b4e87f258c72b139c207d251f9cbc1080a0086b86a8870dd3"
 dependencies = [
  "nix",
- "windows-sys 0.52.0",
+ "windows-sys 0.59.0",
 ]
 
 [[package]]
@@ -376,9 +372,9 @@ dependencies = [
 
 [[package]]
 name = "derive_arbitrary"
-version = "1.3.2"
+version = "1.4.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611"
+checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -416,10 +412,21 @@ dependencies = [
 ]
 
 [[package]]
+name = "displaydoc"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
 name = "dissimilar"
-version = "1.0.9"
+version = "1.0.10"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "59f8e79d1fbf76bdfbde321e902714bf6c49df88a7dda6fc682fc2979226962d"
+checksum = "8975ffdaa0ef3661bfe02dbdcc06c9f829dfafe6a3c474de366a8d5e44276921"
 
 [[package]]
 name = "dot"
@@ -439,9 +446,9 @@ version = "0.0.0"
 
 [[package]]
 name = "either"
-version = "1.13.0"
+version = "1.15.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0"
+checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
 
 [[package]]
 name = "ena"
@@ -454,15 +461,15 @@ dependencies = [
 
 [[package]]
 name = "equivalent"
-version = "1.0.1"
+version = "1.0.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
+checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
 
 [[package]]
 name = "expect-test"
-version = "1.5.0"
+version = "1.5.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9e0be0a561335815e06dab7c62e50353134c796e7a6155402a64bcff66b6a5e0"
+checksum = "63af43ff4431e848fb47472a920f14fa71c24de13255a5692e93d4e90302acb0"
 dependencies = [
  "dissimilar",
  "once_cell",
@@ -470,9 +477,9 @@ dependencies = [
 
 [[package]]
 name = "filetime"
-version = "0.2.24"
+version = "0.2.25"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bf401df4a4e3872c4fe8151134cf483738e74b67fc934d6532c882b3d24a4550"
+checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586"
 dependencies = [
  "cfg-if",
  "libc",
@@ -488,9 +495,9 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
 
 [[package]]
 name = "flate2"
-version = "1.0.31"
+version = "1.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7f211bbe8e69bbd0cfdea405084f128ae8b4aaa6b0b522fc8f2b009084797920"
+checksum = "11faaf5a5236997af9848be0bef4db95824b1d534ebc64d0f0c6cf3e67bd38dc"
 dependencies = [
  "crc32fast",
  "miniz_oxide",
@@ -552,9 +559,9 @@ dependencies = [
 
 [[package]]
 name = "gimli"
-version = "0.29.0"
+version = "0.31.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd"
+checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f"
 
 [[package]]
 name = "hashbrown"
@@ -609,7 +616,7 @@ dependencies = [
  "indexmap",
  "intern",
  "itertools",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "smallvec",
  "span",
  "stdx",
@@ -628,7 +635,7 @@ version = "0.0.0"
 dependencies = [
  "arrayvec",
  "base-db",
- "bitflags 2.7.0",
+ "bitflags 2.9.0",
  "cfg",
  "cov-mark",
  "dashmap 5.5.3",
@@ -643,11 +650,13 @@ dependencies = [
  "itertools",
  "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "mbe",
+ "query-group-macro",
  "ra-ap-rustc_abi",
  "ra-ap-rustc_hashes",
  "ra-ap-rustc_parse_format",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "rustc_apfloat",
+ "salsa 0.19.0",
  "smallvec",
  "span",
  "stdx",
@@ -676,7 +685,9 @@ dependencies = [
  "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "mbe",
  "parser",
- "rustc-hash 2.0.0",
+ "query-group-macro",
+ "rustc-hash 2.1.1",
+ "salsa 0.19.0",
  "smallvec",
  "span",
  "stdx",
@@ -693,12 +704,13 @@ version = "0.0.0"
 dependencies = [
  "arrayvec",
  "base-db",
- "bitflags 2.7.0",
+ "bitflags 2.9.0",
  "chalk-derive",
  "chalk-ir",
  "chalk-recursive",
  "chalk-solve",
  "cov-mark",
+ "dashmap 5.5.3",
  "either",
  "ena",
  "expect-test",
@@ -711,11 +723,13 @@ dependencies = [
  "nohash-hasher",
  "oorandom",
  "project-model",
+ "query-group-macro",
  "ra-ap-rustc_abi",
  "ra-ap-rustc_index",
  "ra-ap-rustc_pattern_analysis",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "rustc_apfloat",
+ "salsa 0.19.0",
  "scoped-tls",
  "smallvec",
  "span",
@@ -732,11 +746,129 @@ dependencies = [
 
 [[package]]
 name = "home"
-version = "0.5.9"
+version = "0.5.11"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5"
+checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf"
 dependencies = [
- "windows-sys 0.52.0",
+ "windows-sys 0.59.0",
+]
+
+[[package]]
+name = "icu_collections"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526"
+dependencies = [
+ "displaydoc",
+ "yoke",
+ "zerofrom",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_locid"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637"
+dependencies = [
+ "displaydoc",
+ "litemap",
+ "tinystr",
+ "writeable",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_locid_transform"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e"
+dependencies = [
+ "displaydoc",
+ "icu_locid",
+ "icu_locid_transform_data",
+ "icu_provider",
+ "tinystr",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_locid_transform_data"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e"
+
+[[package]]
+name = "icu_normalizer"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f"
+dependencies = [
+ "displaydoc",
+ "icu_collections",
+ "icu_normalizer_data",
+ "icu_properties",
+ "icu_provider",
+ "smallvec",
+ "utf16_iter",
+ "utf8_iter",
+ "write16",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_normalizer_data"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516"
+
+[[package]]
+name = "icu_properties"
+version = "1.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5"
+dependencies = [
+ "displaydoc",
+ "icu_collections",
+ "icu_locid_transform",
+ "icu_properties_data",
+ "icu_provider",
+ "tinystr",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_properties_data"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569"
+
+[[package]]
+name = "icu_provider"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9"
+dependencies = [
+ "displaydoc",
+ "icu_locid",
+ "icu_provider_macros",
+ "stable_deref_trait",
+ "tinystr",
+ "writeable",
+ "yoke",
+ "zerofrom",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_provider_macros"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
 ]
 
 [[package]]
@@ -816,9 +948,10 @@ version = "0.0.0"
 dependencies = [
  "arrayvec",
  "base-db",
- "bitflags 2.7.0",
+ "bitflags 2.9.0",
  "cov-mark",
  "crossbeam-channel",
+ "dashmap 5.5.3",
  "either",
  "expect-test",
  "fst",
@@ -830,8 +963,10 @@ dependencies = [
  "nohash-hasher",
  "parser",
  "profile",
+ "query-group-macro",
  "rayon",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
+ "salsa 0.19.0",
  "span",
  "stdx",
  "syntax",
@@ -839,6 +974,7 @@ dependencies = [
  "test-utils",
  "tracing",
  "triomphe",
+ "vfs",
 ]
 
 [[package]]
@@ -881,22 +1017,33 @@ dependencies = [
 
 [[package]]
 name = "idna"
-version = "0.5.0"
+version = "1.0.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6"
+checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e"
 dependencies = [
- "unicode-bidi",
- "unicode-normalization",
+ "idna_adapter",
+ "smallvec",
+ "utf8_iter",
+]
+
+[[package]]
+name = "idna_adapter"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71"
+dependencies = [
+ "icu_normalizer",
+ "icu_properties",
 ]
 
 [[package]]
 name = "indexmap"
-version = "2.3.0"
+version = "2.7.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "de3fc2e30ba82dd1b3911c8de1ffc143c74a914a14e99514d7637e3099df5ea0"
+checksum = "8c9c992b02b5b4c94ea26e32fe5bccb7aa7d9f390ab5c1221ff895bc7ea8b652"
 dependencies = [
  "equivalent",
- "hashbrown 0.14.5",
+ "hashbrown 0.15.2",
  "serde",
 ]
 
@@ -906,7 +1053,7 @@ version = "0.11.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3"
 dependencies = [
- "bitflags 2.7.0",
+ "bitflags 2.9.0",
  "inotify-sys",
  "libc",
 ]
@@ -926,7 +1073,7 @@ version = "0.0.0"
 dependencies = [
  "dashmap 5.5.3",
  "hashbrown 0.14.5",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "triomphe",
 ]
 
@@ -941,9 +1088,9 @@ dependencies = [
 
 [[package]]
 name = "itoa"
-version = "1.0.11"
+version = "1.0.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b"
+checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
 
 [[package]]
 name = "jod-thread"
@@ -989,15 +1136,15 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
 
 [[package]]
 name = "libc"
-version = "0.2.169"
+version = "0.2.170"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a"
+checksum = "875b3680cb2f8f71bdcf9a30f38d48282f5d3c95cbf9b3fa57269bb5d5c06828"
 
 [[package]]
 name = "libloading"
-version = "0.8.5"
+version = "0.8.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4"
+checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34"
 dependencies = [
  "cfg-if",
  "windows-targets 0.52.6",
@@ -1019,7 +1166,7 @@ version = "0.1.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
 dependencies = [
- "bitflags 2.7.0",
+ "bitflags 2.9.0",
  "libc",
  "redox_syscall",
 ]
@@ -1050,6 +1197,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
 
 [[package]]
+name = "litemap"
+version = "0.7.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "23fb14cb19457329c82206317a5663005a4d404783dc74f4252769b0d5f42856"
+
+[[package]]
 name = "load-cargo"
 version = "0.0.0"
 dependencies = [
@@ -1081,9 +1234,9 @@ dependencies = [
 
 [[package]]
 name = "log"
-version = "0.4.22"
+version = "0.4.26"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
+checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e"
 
 [[package]]
 name = "loom"
@@ -1100,24 +1253,25 @@ dependencies = [
 
 [[package]]
 name = "lsp-server"
-version = "0.7.7"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "550446e84739dcaf6d48a4a093973850669e13e8a34d8f8d64851041be267cd9"
+version = "0.7.8"
 dependencies = [
  "crossbeam-channel",
+ "ctrlc",
  "log",
+ "lsp-types",
  "serde",
+ "serde_derive",
  "serde_json",
 ]
 
 [[package]]
 name = "lsp-server"
 version = "0.7.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9462c4dc73e17f971ec1f171d44bfffb72e65a130117233388a0ebc7ec5656f9"
 dependencies = [
  "crossbeam-channel",
- "ctrlc",
  "log",
- "lsp-types",
  "serde",
  "serde_derive",
  "serde_json",
@@ -1161,7 +1315,7 @@ dependencies = [
  "intern",
  "parser",
  "ra-ap-rustc_lexer",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "smallvec",
  "span",
  "stdx",
@@ -1207,11 +1361,11 @@ dependencies = [
 
 [[package]]
 name = "miniz_oxide"
-version = "0.7.4"
+version = "0.8.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08"
+checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5"
 dependencies = [
- "adler",
+ "adler2",
 ]
 
 [[package]]
@@ -1237,13 +1391,13 @@ dependencies = [
 
 [[package]]
 name = "nix"
-version = "0.28.0"
+version = "0.29.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4"
+checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46"
 dependencies = [
- "bitflags 2.7.0",
+ "bitflags 2.9.0",
  "cfg-if",
- "cfg_aliases 0.1.1",
+ "cfg_aliases",
  "libc",
 ]
 
@@ -1259,7 +1413,7 @@ version = "8.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "2fee8403b3d66ac7b26aee6e40a897d85dc5ce26f44da36b8b73e987cc52e943"
 dependencies = [
- "bitflags 2.7.0",
+ "bitflags 2.9.0",
  "filetime",
  "fsevent-sys",
  "inotify",
@@ -1333,24 +1487,24 @@ dependencies = [
 
 [[package]]
 name = "object"
-version = "0.36.3"
+version = "0.36.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "27b64972346851a39438c60b341ebc01bba47464ae329e55cf343eb93964efd9"
+checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87"
 dependencies = [
  "memchr",
 ]
 
 [[package]]
 name = "once_cell"
-version = "1.19.0"
+version = "1.21.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
+checksum = "cde51589ab56b20a6f686b2c68f7a0bd6add753d697abf720d63f8db3ab7b1ad"
 
 [[package]]
 name = "oorandom"
-version = "11.1.4"
+version = "11.1.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b410bbe7e14ab526a0e86877eb47c6996a2bd7746f027ba551028c925390e4e9"
+checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e"
 
 [[package]]
 name = "option-ext"
@@ -1449,9 +1603,9 @@ dependencies = [
 
 [[package]]
 name = "pin-project-lite"
-version = "0.2.14"
+version = "0.2.16"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02"
+checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b"
 
 [[package]]
 name = "portable-atomic"
@@ -1467,9 +1621,9 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391"
 
 [[package]]
 name = "ppv-lite86"
-version = "0.2.20"
+version = "0.2.21"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04"
+checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9"
 dependencies = [
  "zerocopy",
 ]
@@ -1481,7 +1635,7 @@ dependencies = [
  "indexmap",
  "intern",
  "paths",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "serde",
  "serde_derive",
  "serde_json",
@@ -1528,23 +1682,23 @@ dependencies = [
 
 [[package]]
 name = "proc-macro2"
-version = "1.0.93"
+version = "1.0.94"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99"
+checksum = "a31971752e70b8b2686d7e46ec17fb38dad4051d94024c88df49b667caea9c84"
 dependencies = [
  "unicode-ident",
 ]
 
 [[package]]
 name = "process-wrap"
-version = "8.0.2"
+version = "8.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "38ee68ae331824036479c84060534b18254c864fa73366c58d86db3b7b811619"
+checksum = "d35f4dc9988d1326b065b4def5e950c3ed727aa03e3151b86cc9e2aec6b03f54"
 dependencies = [
  "indexmap",
  "nix",
  "tracing",
- "windows 0.56.0",
+ "windows 0.59.0",
 ]
 
 [[package]]
@@ -1571,7 +1725,7 @@ dependencies = [
  "itertools",
  "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "paths",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "semver",
  "serde",
  "serde_derive",
@@ -1609,7 +1763,7 @@ version = "0.9.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "57206b407293d2bcd3af849ce869d52068623f19e1b5ff8e8778e3309439682b"
 dependencies = [
- "bitflags 2.7.0",
+ "bitflags 2.9.0",
  "memchr",
  "unicase",
 ]
@@ -1637,9 +1791,9 @@ dependencies = [
 
 [[package]]
 name = "quote"
-version = "1.0.36"
+version = "1.0.39"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7"
+checksum = "c1f1914ce909e1658d9907913b4b91947430c7d9be598b15a1912935b8c04801"
 dependencies = [
  "proc-macro2",
 ]
@@ -1650,7 +1804,7 @@ version = "0.100.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "f1651b0f7e8c3eb7c27a88f39d277e69c32bfe58e3be174d286c1a24d6a7a4d8"
 dependencies = [
- "bitflags 2.7.0",
+ "bitflags 2.9.0",
  "ra-ap-rustc_hashes",
  "ra-ap-rustc_index",
  "tracing",
@@ -1714,7 +1868,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "bef3ff73fa4653252ffe1d1e9177a446f49ef46d97140e4816b7ff2dad59ed53"
 dependencies = [
  "ra-ap-rustc_index",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "rustc_apfloat",
  "smallvec",
  "tracing",
@@ -1772,18 +1926,18 @@ dependencies = [
 
 [[package]]
 name = "redox_syscall"
-version = "0.5.3"
+version = "0.5.10"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2a908a6e00f1fdd0dfd9c0eb08ce85126f6d8bbda50017e74bc4a4b7d4a926a4"
+checksum = "0b8c0c260b63a8219631167be35e6a988e9554dbd323f8bd08439c8ed1302bd1"
 dependencies = [
- "bitflags 2.7.0",
+ "bitflags 2.9.0",
 ]
 
 [[package]]
 name = "redox_users"
-version = "0.4.5"
+version = "0.4.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891"
+checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43"
 dependencies = [
  "getrandom",
  "libredox",
@@ -1871,7 +2025,7 @@ dependencies = [
  "intern",
  "itertools",
  "load-cargo",
- "lsp-server 0.7.7",
+ "lsp-server 0.7.8 (registry+https://github.com/rust-lang/crates.io-index)",
  "lsp-types",
  "memchr",
  "mimalloc",
@@ -1886,7 +2040,7 @@ dependencies = [
  "profile",
  "project-model",
  "rayon",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "scip",
  "semver",
  "serde",
@@ -1927,23 +2081,23 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
 
 [[package]]
 name = "rustc-hash"
-version = "2.0.0"
+version = "2.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152"
+checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
 
 [[package]]
 name = "rustc-stable-hash"
-version = "0.1.1"
+version = "0.1.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2febf9acc5ee5e99d1ad0afcdbccc02d87aa3f857a1f01f825b80eacf8edfcd1"
+checksum = "781442f29170c5c93b7185ad559492601acdc71d5bb0706f5868094f45cfcd08"
 
 [[package]]
 name = "rustc_apfloat"
-version = "0.2.1+llvm-462a31f5a5ab"
+version = "0.2.2+llvm-462a31f5a5ab"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "886d94c63c812a8037c4faca2607453a0fa4cf82f734665266876b022244543f"
+checksum = "121e2195ff969977a4e2b5c9965ea867fce7e4cb5aee5b09dee698a7932d574f"
 dependencies = [
- "bitflags 1.3.2",
+ "bitflags 2.9.0",
  "smallvec",
 ]
 
@@ -1955,9 +2109,9 @@ checksum = "eded382c5f5f786b989652c49544c4877d9f015cc22e145a5ea8ea66c2921cd2"
 
 [[package]]
 name = "ryu"
-version = "1.0.18"
+version = "1.0.20"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f"
+checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
 
 [[package]]
 name = "salsa"
@@ -1972,7 +2126,7 @@ dependencies = [
  "oorandom",
  "parking_lot",
  "rand",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "salsa-macros 0.0.0",
  "smallvec",
  "tracing",
@@ -1994,7 +2148,7 @@ dependencies = [
  "parking_lot",
  "portable-atomic",
  "rayon",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "salsa-macro-rules",
  "salsa-macros 0.19.0",
  "smallvec",
@@ -2041,9 +2195,9 @@ dependencies = [
 
 [[package]]
 name = "scip"
-version = "0.5.1"
+version = "0.5.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8dfafd2fa14c6237fa1fc4310f739d02fa915d92977fa069426591f1de046f81"
+checksum = "fb2b449a5e4660ce817676a0871cd1b4e2ff1023e33a1ac046670fa594b543a2"
 dependencies = [
  "protobuf",
 ]
@@ -2062,27 +2216,27 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
 
 [[package]]
 name = "semver"
-version = "1.0.23"
+version = "1.0.26"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b"
+checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0"
 dependencies = [
  "serde",
 ]
 
 [[package]]
 name = "serde"
-version = "1.0.216"
+version = "1.0.219"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0b9781016e935a97e8beecf0c933758c97a5520d32930e460142b4cd80c6338e"
+checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6"
 dependencies = [
  "serde_derive",
 ]
 
 [[package]]
 name = "serde_derive"
-version = "1.0.216"
+version = "1.0.219"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "46f859dbbf73865c6627ed570e78961cd3ac92407a2d117204c49232485da55e"
+checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -2091,9 +2245,9 @@ dependencies = [
 
 [[package]]
 name = "serde_json"
-version = "1.0.124"
+version = "1.0.140"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "66ad62847a56b3dba58cc891acd13884b9c61138d330c0d7b6181713d4fce38d"
+checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373"
 dependencies = [
  "indexmap",
  "itoa",
@@ -2104,9 +2258,9 @@ dependencies = [
 
 [[package]]
 name = "serde_repr"
-version = "0.1.19"
+version = "0.1.20"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9"
+checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -2115,9 +2269,9 @@ dependencies = [
 
 [[package]]
 name = "serde_spanned"
-version = "0.6.7"
+version = "0.6.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eb5b1b31579f3811bf615c144393417496f152e12ac8b7663bf664f4a815306d"
+checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1"
 dependencies = [
  "serde",
 ]
@@ -2139,9 +2293,9 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
 
 [[package]]
 name = "smallvec"
-version = "1.13.2"
+version = "1.14.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67"
+checksum = "7fcf8323ef1faaee30a44a340193b1ac6814fd9b7b4e88e9d4519a3e4abe1cfd"
 
 [[package]]
 name = "smol_str"
@@ -2159,8 +2313,8 @@ version = "0.0.0"
 dependencies = [
  "hashbrown 0.14.5",
  "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-hash 2.0.0",
- "salsa 0.0.0",
+ "rustc-hash 2.1.1",
+ "salsa 0.19.0",
  "stdx",
  "syntax",
  "text-size",
@@ -2168,6 +2322,12 @@ dependencies = [
 ]
 
 [[package]]
+name = "stable_deref_trait"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
+
+[[package]]
 name = "stdx"
 version = "0.0.0"
 dependencies = [
@@ -2183,9 +2343,9 @@ dependencies = [
 
 [[package]]
 name = "syn"
-version = "2.0.87"
+version = "2.0.100"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d"
+checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -2216,7 +2376,7 @@ dependencies = [
  "ra-ap-rustc_lexer",
  "rayon",
  "rowan",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "rustc_apfloat",
  "smol_str",
  "stdx",
@@ -2231,7 +2391,7 @@ version = "0.0.0"
 dependencies = [
  "intern",
  "parser",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "span",
  "stdx",
  "syntax",
@@ -2254,7 +2414,7 @@ dependencies = [
  "cfg",
  "hir-expand",
  "intern",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "span",
  "stdx",
  "test-utils",
@@ -2268,7 +2428,7 @@ dependencies = [
  "dissimilar",
  "paths",
  "profile",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "stdx",
  "text-size",
  "tracing",
@@ -2282,18 +2442,18 @@ checksum = "f18aa187839b2bdb1ad2fa35ead8c4c2976b64e4363c386d45ac0f7ee85c9233"
 
 [[package]]
 name = "thiserror"
-version = "1.0.63"
+version = "1.0.69"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724"
+checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52"
 dependencies = [
  "thiserror-impl",
 ]
 
 [[package]]
 name = "thiserror-impl"
-version = "1.0.63"
+version = "1.0.69"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261"
+checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -2343,9 +2503,9 @@ dependencies = [
 
 [[package]]
 name = "time"
-version = "0.3.36"
+version = "0.3.39"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885"
+checksum = "dad298b01a40a23aac4580b67e3dbedb7cc8402f3592d7f49469de2ea4aecdd8"
 dependencies = [
  "deranged",
  "itoa",
@@ -2360,40 +2520,35 @@ dependencies = [
 
 [[package]]
 name = "time-core"
-version = "0.1.2"
+version = "0.1.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3"
+checksum = "765c97a5b985b7c11d7bc27fa927dc4fe6af3a6dfb021d28deb60d3bf51e76ef"
 
 [[package]]
 name = "time-macros"
-version = "0.2.18"
+version = "0.2.20"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf"
+checksum = "e8093bc3e81c3bc5f7879de09619d06c9a5a5e45ca44dfeeb7225bae38005c5c"
 dependencies = [
  "num-conv",
  "time-core",
 ]
 
 [[package]]
-name = "tinyvec"
-version = "1.8.0"
+name = "tinystr"
+version = "0.7.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938"
+checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f"
 dependencies = [
- "tinyvec_macros",
+ "displaydoc",
+ "zerovec",
 ]
 
 [[package]]
-name = "tinyvec_macros"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
-
-[[package]]
 name = "toml"
-version = "0.8.19"
+version = "0.8.20"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e"
+checksum = "cd87a5cdd6ffab733b2f74bc4fd7ee5fff6634124999ac278c35fc78c6120148"
 dependencies = [
  "serde",
  "serde_spanned",
@@ -2412,9 +2567,9 @@ dependencies = [
 
 [[package]]
 name = "toml_edit"
-version = "0.22.20"
+version = "0.22.24"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d"
+checksum = "17b4795ff5edd201c7cd6dca065ae59972ce77d1b80fa0a84d94950ece7d1474"
 dependencies = [
  "indexmap",
  "serde",
@@ -2433,9 +2588,9 @@ dependencies = [
 
 [[package]]
 name = "tracing"
-version = "0.1.40"
+version = "0.1.41"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef"
+checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
 dependencies = [
  "pin-project-lite",
  "tracing-attributes",
@@ -2444,9 +2599,9 @@ dependencies = [
 
 [[package]]
 name = "tracing-attributes"
-version = "0.1.27"
+version = "0.1.28"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
+checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -2455,9 +2610,9 @@ dependencies = [
 
 [[package]]
 name = "tracing-core"
-version = "0.1.32"
+version = "0.1.33"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54"
+checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c"
 dependencies = [
  "once_cell",
  "valuable",
@@ -2476,9 +2631,9 @@ dependencies = [
 
 [[package]]
 name = "tracing-subscriber"
-version = "0.3.18"
+version = "0.3.19"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b"
+checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008"
 dependencies = [
  "matchers",
  "nu-ansi-term 0.46.0",
@@ -2536,51 +2691,33 @@ checksum = "a3e5df347f0bf3ec1d670aad6ca5c6a1859cd9ea61d2113125794654ccced68f"
 
 [[package]]
 name = "unicase"
-version = "2.7.0"
+version = "2.8.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89"
-dependencies = [
- "version_check",
-]
-
-[[package]]
-name = "unicode-bidi"
-version = "0.3.15"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75"
+checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539"
 
 [[package]]
 name = "unicode-ident"
-version = "1.0.12"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
-
-[[package]]
-name = "unicode-normalization"
-version = "0.1.23"
+version = "1.0.18"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5"
-dependencies = [
- "tinyvec",
-]
+checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
 
 [[package]]
 name = "unicode-properties"
-version = "0.1.1"
+version = "0.1.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e4259d9d4425d9f0661581b804cb85fe66a4c631cadd8f490d1c13a35d5d9291"
+checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0"
 
 [[package]]
 name = "unicode-xid"
-version = "0.2.4"
+version = "0.2.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
+checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853"
 
 [[package]]
 name = "url"
-version = "2.5.2"
+version = "2.5.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c"
+checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60"
 dependencies = [
  "form_urlencoded",
  "idna",
@@ -2589,16 +2726,22 @@ dependencies = [
 ]
 
 [[package]]
-name = "valuable"
-version = "0.1.0"
+name = "utf16_iter"
+version = "1.0.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
+checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246"
 
 [[package]]
-name = "version_check"
-version = "0.9.5"
+name = "utf8_iter"
+version = "1.0.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
+checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be"
+
+[[package]]
+name = "valuable"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65"
 
 [[package]]
 name = "vfs"
@@ -2609,7 +2752,7 @@ dependencies = [
  "indexmap",
  "nohash-hasher",
  "paths",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "stdx",
  "tracing",
 ]
@@ -2622,7 +2765,7 @@ dependencies = [
  "notify",
  "paths",
  "rayon",
- "rustc-hash 2.0.0",
+ "rustc-hash 2.1.1",
  "stdx",
  "tracing",
  "vfs",
@@ -2678,16 +2821,6 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
 
 [[package]]
 name = "windows"
-version = "0.56.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1de69df01bdf1ead2f4ac895dc77c9351aefff65b2f3db429a343f9cbf05e132"
-dependencies = [
- "windows-core 0.56.0",
- "windows-targets 0.52.6",
-]
-
-[[package]]
-name = "windows"
 version = "0.58.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "dd04d41d93c4992d421894c18c8b43496aa748dd4c081bac0dc93eb0489272b6"
@@ -2697,15 +2830,13 @@ dependencies = [
 ]
 
 [[package]]
-name = "windows-core"
-version = "0.56.0"
+name = "windows"
+version = "0.59.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4698e52ed2d08f8658ab0c39512a7c00ee5fe2688c65f8c0a4f06750d729f2a6"
+checksum = "7f919aee0a93304be7f62e8e5027811bbba96bcb1de84d6618be56e43f8a32a1"
 dependencies = [
- "windows-implement 0.56.0",
- "windows-interface 0.56.0",
- "windows-result 0.1.2",
- "windows-targets 0.52.6",
+ "windows-core 0.59.0",
+ "windows-targets 0.53.0",
 ]
 
 [[package]]
@@ -2717,19 +2848,21 @@ dependencies = [
  "windows-implement 0.58.0",
  "windows-interface 0.58.0",
  "windows-result 0.2.0",
- "windows-strings",
+ "windows-strings 0.1.0",
  "windows-targets 0.52.6",
 ]
 
 [[package]]
-name = "windows-implement"
-version = "0.56.0"
+name = "windows-core"
+version = "0.59.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f6fc35f58ecd95a9b71c4f2329b911016e6bec66b3f2e6a4aad86bd2e99e2f9b"
+checksum = "810ce18ed2112484b0d4e15d022e5f598113e220c53e373fb31e67e21670c1ce"
 dependencies = [
- "proc-macro2",
- "quote",
- "syn",
+ "windows-implement 0.59.0",
+ "windows-interface 0.59.0",
+ "windows-result 0.3.1",
+ "windows-strings 0.3.1",
+ "windows-targets 0.53.0",
 ]
 
 [[package]]
@@ -2744,10 +2877,10 @@ dependencies = [
 ]
 
 [[package]]
-name = "windows-interface"
-version = "0.56.0"
+name = "windows-implement"
+version = "0.59.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "08990546bf4edef8f431fa6326e032865f27138718c587dc21bc0265bbcb57cc"
+checksum = "83577b051e2f49a058c308f17f273b570a6a758386fc291b5f6a934dd84e48c1"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -2766,15 +2899,23 @@ dependencies = [
 ]
 
 [[package]]
-name = "windows-result"
-version = "0.1.2"
+name = "windows-interface"
+version = "0.59.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8"
+checksum = "cb26fd936d991781ea39e87c3a27285081e3c0da5ca0fcbc02d368cc6f52ff01"
 dependencies = [
- "windows-targets 0.52.6",
+ "proc-macro2",
+ "quote",
+ "syn",
 ]
 
 [[package]]
+name = "windows-link"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6dccfd733ce2b1753b03b6d3c65edf020262ea35e20ccdf3e288043e6dd620e3"
+
+[[package]]
 name = "windows-result"
 version = "0.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2784,6 +2925,15 @@ dependencies = [
 ]
 
 [[package]]
+name = "windows-result"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "06374efe858fab7e4f881500e6e86ec8bc28f9462c47e5a9941a0142ad86b189"
+dependencies = [
+ "windows-link",
+]
+
+[[package]]
 name = "windows-strings"
 version = "0.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2794,6 +2944,15 @@ dependencies = [
 ]
 
 [[package]]
+name = "windows-strings"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "87fa48cc5d406560701792be122a10132491cff9d0aeb23583cc2dcafc847319"
+dependencies = [
+ "windows-link",
+]
+
+[[package]]
 name = "windows-sys"
 version = "0.48.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2844,7 +3003,7 @@ dependencies = [
  "windows_aarch64_gnullvm 0.52.6",
  "windows_aarch64_msvc 0.52.6",
  "windows_i686_gnu 0.52.6",
- "windows_i686_gnullvm",
+ "windows_i686_gnullvm 0.52.6",
  "windows_i686_msvc 0.52.6",
  "windows_x86_64_gnu 0.52.6",
  "windows_x86_64_gnullvm 0.52.6",
@@ -2852,6 +3011,22 @@ dependencies = [
 ]
 
 [[package]]
+name = "windows-targets"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b1e4c7e8ceaaf9cb7d7507c974735728ab453b67ef8f18febdd7c11fe59dca8b"
+dependencies = [
+ "windows_aarch64_gnullvm 0.53.0",
+ "windows_aarch64_msvc 0.53.0",
+ "windows_i686_gnu 0.53.0",
+ "windows_i686_gnullvm 0.53.0",
+ "windows_i686_msvc 0.53.0",
+ "windows_x86_64_gnu 0.53.0",
+ "windows_x86_64_gnullvm 0.53.0",
+ "windows_x86_64_msvc 0.53.0",
+]
+
+[[package]]
 name = "windows_aarch64_gnullvm"
 version = "0.48.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2864,6 +3039,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
 
 [[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764"
+
+[[package]]
 name = "windows_aarch64_msvc"
 version = "0.48.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2876,6 +3057,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
 
 [[package]]
+name = "windows_aarch64_msvc"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c"
+
+[[package]]
 name = "windows_i686_gnu"
 version = "0.48.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2888,12 +3075,24 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
 
 [[package]]
+name = "windows_i686_gnu"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3"
+
+[[package]]
 name = "windows_i686_gnullvm"
 version = "0.52.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
 
 [[package]]
+name = "windows_i686_gnullvm"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11"
+
+[[package]]
 name = "windows_i686_msvc"
 version = "0.48.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2906,6 +3105,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
 
 [[package]]
+name = "windows_i686_msvc"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d"
+
+[[package]]
 name = "windows_x86_64_gnu"
 version = "0.48.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2918,6 +3123,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
 
 [[package]]
+name = "windows_x86_64_gnu"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba"
+
+[[package]]
 name = "windows_x86_64_gnullvm"
 version = "0.48.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2930,6 +3141,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
 
 [[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57"
+
+[[package]]
 name = "windows_x86_64_msvc"
 version = "0.48.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2942,10 +3159,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
 
 [[package]]
+name = "windows_x86_64_msvc"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486"
+
+[[package]]
 name = "winnow"
-version = "0.6.18"
+version = "0.7.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f"
+checksum = "0e7f4ea97f6f78012141bcdb6a216b2609f0979ada50b20ca5b52dde2eac2bb1"
 dependencies = [
  "memchr",
 ]
@@ -2957,6 +3180,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "23f6174b2566cc4a74f95e1367ec343e7fa80c93cc8087f5c4a3d6a1088b2118"
 
 [[package]]
+name = "write16"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936"
+
+[[package]]
+name = "writeable"
+version = "0.5.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51"
+
+[[package]]
 name = "xflags"
 version = "0.3.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -3008,20 +3243,86 @@ dependencies = [
 ]
 
 [[package]]
+name = "yoke"
+version = "0.7.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40"
+dependencies = [
+ "serde",
+ "stable_deref_trait",
+ "yoke-derive",
+ "zerofrom",
+]
+
+[[package]]
+name = "yoke-derive"
+version = "0.7.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "synstructure",
+]
+
+[[package]]
 name = "zerocopy"
-version = "0.7.35"
+version = "0.8.23"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0"
+checksum = "fd97444d05a4328b90e75e503a34bad781f14e28a823ad3557f0750df1ebcbc6"
 dependencies = [
- "byteorder",
  "zerocopy-derive",
 ]
 
 [[package]]
 name = "zerocopy-derive"
-version = "0.7.35"
+version = "0.8.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6352c01d0edd5db859a63e2605f4ea3183ddbd15e2c4a9e7d32184df75e4f154"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "zerofrom"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5"
+dependencies = [
+ "zerofrom-derive",
+]
+
+[[package]]
+name = "zerofrom-derive"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "synstructure",
+]
+
+[[package]]
+name = "zerovec"
+version = "0.10.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079"
+dependencies = [
+ "yoke",
+ "zerofrom",
+ "zerovec-derive",
+]
+
+[[package]]
+name = "zerovec-derive"
+version = "0.10.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
+checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6"
 dependencies = [
  "proc-macro2",
  "quote",
diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml
index 35afbdd1b5c..7dd5f2bb1fa 100644
--- a/src/tools/rust-analyzer/Cargo.toml
+++ b/src/tools/rust-analyzer/Cargo.toml
@@ -72,7 +72,7 @@ proc-macro-srv = { path = "./crates/proc-macro-srv", version = "0.0.0" }
 proc-macro-srv-cli = { path = "./crates/proc-macro-srv-cli", version = "0.0.0" }
 profile = { path = "./crates/profile", version = "0.0.0" }
 project-model = { path = "./crates/project-model", version = "0.0.0" }
-ra-salsa = { path = "./crates/ra-salsa", package = "salsa", version = "0.0.0" }
+query-group = { package = "query-group-macro", path = "./crates/query-group-macro", version = "0.0.0" }
 span = { path = "./crates/span", version = "0.0.0" }
 stdx = { path = "./crates/stdx", version = "0.0.0" }
 syntax = { path = "./crates/syntax", version = "0.0.0" }
@@ -135,6 +135,7 @@ process-wrap = { version = "8.0.2", features = ["std"] }
 pulldown-cmark-to-cmark = "10.0.4"
 pulldown-cmark = { version = "0.9.0", default-features = false }
 rayon = "1.8.0"
+salsa = "0.19"
 rustc-hash = "2.0.0"
 semver = "1.0.14"
 serde = { version = "1.0.192" }
diff --git a/src/tools/rust-analyzer/crates/base-db/Cargo.toml b/src/tools/rust-analyzer/crates/base-db/Cargo.toml
index 042dd36488a..4780d5191be 100644
--- a/src/tools/rust-analyzer/crates/base-db/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/base-db/Cargo.toml
@@ -15,7 +15,9 @@ rust-version.workspace = true
 lz4_flex = { version = "0.11", default-features = false }
 
 la-arena.workspace = true
-ra-salsa.workspace = true
+dashmap.workspace = true
+salsa.workspace = true
+query-group.workspace = true
 rustc-hash.workspace = true
 triomphe.workspace = true
 semver.workspace = true
diff --git a/src/tools/rust-analyzer/crates/base-db/src/change.rs b/src/tools/rust-analyzer/crates/base-db/src/change.rs
index 7e40f5408f1..90413a573ae 100644
--- a/src/tools/rust-analyzer/crates/base-db/src/change.rs
+++ b/src/tools/rust-analyzer/crates/base-db/src/change.rs
@@ -3,15 +3,12 @@
 
 use std::fmt;
 
-use ra_salsa::Durability;
 use rustc_hash::FxHashMap;
+use salsa::Durability;
 use triomphe::Arc;
 use vfs::FileId;
 
-use crate::{
-    CrateGraph, CrateId, CrateWorkspaceData, SourceDatabaseFileInputExt, SourceRoot,
-    SourceRootDatabase, SourceRootId,
-};
+use crate::{CrateGraph, CrateId, CrateWorkspaceData, RootQueryDb, SourceRoot, SourceRootId};
 
 /// Encapsulate a bunch of raw `.set` calls on the database.
 #[derive(Default)]
@@ -59,7 +56,7 @@ impl FileChange {
         self.ws_data = Some(data);
     }
 
-    pub fn apply(self, db: &mut dyn SourceRootDatabase) {
+    pub fn apply(self, db: &mut dyn RootQueryDb) {
         let _p = tracing::info_span!("FileChange::apply").entered();
         if let Some(roots) = self.roots {
             for (idx, root) in roots.into_iter().enumerate() {
@@ -68,14 +65,16 @@ impl FileChange {
                 for file_id in root.iter() {
                     db.set_file_source_root_with_durability(file_id, root_id, durability);
                 }
+
                 db.set_source_root_with_durability(root_id, Arc::new(root), durability);
             }
         }
 
         for (file_id, text) in self.files_changed {
             let source_root_id = db.file_source_root(file_id);
-            let source_root = db.source_root(source_root_id);
-            let durability = durability(&source_root);
+            let source_root = db.source_root(source_root_id.source_root_id(db));
+
+            let durability = durability(&source_root.source_root(db));
             // XXX: can't actually remove the file, just reset the text
             let text = text.unwrap_or_default();
             db.set_file_text_with_durability(file_id, &text, durability)
diff --git a/src/tools/rust-analyzer/crates/base-db/src/lib.rs b/src/tools/rust-analyzer/crates/base-db/src/lib.rs
index eed8c886839..9ec9100968d 100644
--- a/src/tools/rust-analyzer/crates/base-db/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/base-db/src/lib.rs
@@ -3,14 +3,7 @@
 mod change;
 mod input;
 
-use std::panic;
-
-use ra_salsa::Durability;
-use rustc_hash::FxHashMap;
-use span::EditionedFileId;
-use syntax::{ast, Parse, SourceFile, SyntaxError};
-use triomphe::Arc;
-use vfs::FileId;
+use std::hash::BuildHasherDefault;
 
 pub use crate::{
     change::FileChange,
@@ -20,20 +13,30 @@ pub use crate::{
         TargetLayoutLoadResult,
     },
 };
-pub use ra_salsa::{self, Cancelled};
-pub use vfs::{file_set::FileSet, AnchoredPath, AnchoredPathBuf, VfsPath};
-
+use dashmap::{mapref::entry::Entry, DashMap};
+pub use query_group::{self};
+use rustc_hash::{FxHashMap, FxHasher};
+pub use salsa::{self};
+use salsa::{Durability, Setter};
 pub use semver::{BuildMetadata, Prerelease, Version, VersionReq};
+use syntax::{ast, Parse, SyntaxError};
+use triomphe::Arc;
+pub use vfs::{file_set::FileSet, AnchoredPath, AnchoredPathBuf, FileId, VfsPath};
 
 #[macro_export]
 macro_rules! impl_intern_key {
-    ($name:ident) => {
-        impl $crate::ra_salsa::InternKey for $name {
-            fn from_intern_id(v: $crate::ra_salsa::InternId) -> Self {
-                $name(v)
-            }
-            fn as_intern_id(&self) -> $crate::ra_salsa::InternId {
-                self.0
+    ($id:ident, $loc:ident) => {
+        #[salsa::interned(no_debug, no_lifetime)]
+        pub struct $id {
+            pub loc: $loc,
+        }
+
+        // If we derive this salsa prints the values recursively, and this causes us to blow.
+        impl ::std::fmt::Debug for $id {
+            fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
+                f.debug_tuple(stringify!($id))
+                    .field(&format_args!("{:04x}", self.0.as_u32()))
+                    .finish()
             }
         }
     };
@@ -47,39 +50,213 @@ pub const DEFAULT_FILE_TEXT_LRU_CAP: u16 = 16;
 pub const DEFAULT_PARSE_LRU_CAP: u16 = 128;
 pub const DEFAULT_BORROWCK_LRU_CAP: u16 = 2024;
 
-pub trait FileLoader {
-    fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId>;
-    /// Crates whose root's source root is the same as the source root of `file_id`
-    fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]>;
+#[derive(Debug, Default)]
+pub struct Files {
+    files: Arc<DashMap<vfs::FileId, FileText, BuildHasherDefault<FxHasher>>>,
+    source_roots: Arc<DashMap<SourceRootId, SourceRootInput, BuildHasherDefault<FxHasher>>>,
+    file_source_roots: Arc<DashMap<vfs::FileId, FileSourceRootInput, BuildHasherDefault<FxHasher>>>,
 }
 
-/// Database which stores all significant input facts: source code and project
-/// model. Everything else in rust-analyzer is derived from these queries.
-#[ra_salsa::query_group(SourceDatabaseStorage)]
-pub trait SourceDatabase: FileLoader + std::fmt::Debug {
-    #[ra_salsa::input]
-    fn compressed_file_text(&self, file_id: FileId) -> Arc<[u8]>;
+impl Files {
+    pub fn file_text(&self, file_id: vfs::FileId) -> FileText {
+        *self.files.get(&file_id).expect("Unable to fetch file; this is a bug")
+    }
 
-    /// Text of the file.
-    #[ra_salsa::lru]
-    fn file_text(&self, file_id: FileId) -> Arc<str>;
+    pub fn set_file_text(&self, db: &mut dyn SourceDatabase, file_id: vfs::FileId, text: &str) {
+        let files = Arc::clone(&self.files);
+        match files.entry(file_id) {
+            Entry::Occupied(mut occupied) => {
+                occupied.get_mut().set_text(db).to(Arc::from(text));
+            }
+            Entry::Vacant(vacant) => {
+                let text = FileText::new(db, Arc::from(text), file_id);
+                vacant.insert(text);
+            }
+        };
+    }
 
+    pub fn set_file_text_with_durability(
+        &self,
+        db: &mut dyn SourceDatabase,
+        file_id: vfs::FileId,
+        text: &str,
+        durability: Durability,
+    ) {
+        let files = Arc::clone(&self.files);
+        match files.entry(file_id) {
+            Entry::Occupied(mut occupied) => {
+                occupied.get_mut().set_text(db).to(Arc::from(text));
+            }
+            Entry::Vacant(vacant) => {
+                let text =
+                    FileText::builder(Arc::from(text), file_id).durability(durability).new(db);
+                vacant.insert(text);
+            }
+        };
+    }
+
+    /// Source root of the file.
+    pub fn source_root(&self, source_root_id: SourceRootId) -> SourceRootInput {
+        let source_root = self
+            .source_roots
+            .get(&source_root_id)
+            .expect("Unable to fetch source root id; this is a bug");
+
+        *source_root
+    }
+
+    pub fn set_source_root_with_durability(
+        &self,
+        db: &mut dyn SourceDatabase,
+        source_root_id: SourceRootId,
+        source_root: Arc<SourceRoot>,
+        durability: Durability,
+    ) {
+        let source_roots = Arc::clone(&self.source_roots);
+        match source_roots.entry(source_root_id) {
+            Entry::Occupied(mut occupied) => {
+                occupied.get_mut().set_source_root(db).to(source_root);
+            }
+            Entry::Vacant(vacant) => {
+                let source_root =
+                    SourceRootInput::builder(source_root).durability(durability).new(db);
+                vacant.insert(source_root);
+            }
+        };
+    }
+
+    pub fn file_source_root(&self, id: vfs::FileId) -> FileSourceRootInput {
+        let file_source_root = self
+            .file_source_roots
+            .get(&id)
+            .expect("Unable to fetch FileSourceRootInput; this is a bug");
+        *file_source_root
+    }
+
+    pub fn set_file_source_root_with_durability(
+        &self,
+        db: &mut dyn SourceDatabase,
+        id: vfs::FileId,
+        source_root_id: SourceRootId,
+        durability: Durability,
+    ) {
+        let file_source_roots = Arc::clone(&self.file_source_roots);
+        // let db = self;
+        match file_source_roots.entry(id) {
+            Entry::Occupied(mut occupied) => {
+                occupied.get_mut().set_source_root_id(db).to(source_root_id);
+            }
+            Entry::Vacant(vacant) => {
+                let file_source_root =
+                    FileSourceRootInput::builder(source_root_id).durability(durability).new(db);
+                vacant.insert(file_source_root);
+            }
+        };
+    }
+}
+
+#[salsa::interned(no_lifetime)]
+pub struct EditionedFileId {
+    pub editioned_file_id: span::EditionedFileId,
+}
+
+impl EditionedFileId {
+    pub fn file_id(&self, db: &dyn salsa::Database) -> vfs::FileId {
+        let id = self.editioned_file_id(db);
+        id.file_id()
+    }
+
+    fn unpack(&self, db: &dyn salsa::Database) -> (vfs::FileId, span::Edition) {
+        let id = self.editioned_file_id(db);
+        (id.file_id(), id.edition())
+    }
+}
+
+#[salsa::input]
+pub struct FileText {
+    pub text: Arc<str>,
+    pub file_id: vfs::FileId,
+}
+
+#[salsa::input]
+pub struct FileSourceRootInput {
+    pub source_root_id: SourceRootId,
+}
+
+#[salsa::input]
+pub struct SourceRootInput {
+    pub source_root: Arc<SourceRoot>,
+}
+
+/// Database which stores all significant input facts: source code and project
+/// model. Everything else in rust-analyzer is derived from these queries.
+#[query_group::query_group]
+pub trait RootQueryDb: SourceDatabase + salsa::Database {
     /// Parses the file into the syntax tree.
-    #[ra_salsa::lru]
+    #[salsa::invoke_actual(parse)]
+    #[salsa::lru(128)]
     fn parse(&self, file_id: EditionedFileId) -> Parse<ast::SourceFile>;
 
     /// Returns the set of errors obtained from parsing the file including validation errors.
     fn parse_errors(&self, file_id: EditionedFileId) -> Option<Arc<[SyntaxError]>>;
 
     /// The crate graph.
-    #[ra_salsa::input]
+    #[salsa::input]
     fn crate_graph(&self) -> Arc<CrateGraph>;
 
-    #[ra_salsa::input]
+    #[salsa::input]
     fn crate_workspace_data(&self) -> Arc<FxHashMap<CrateId, Arc<CrateWorkspaceData>>>;
 
-    #[ra_salsa::transparent]
+    #[salsa::transparent]
     fn toolchain_channel(&self, krate: CrateId) -> Option<ReleaseChannel>;
+
+    /// Crates whose root file is in `id`.
+    fn source_root_crates(&self, id: SourceRootId) -> Arc<[CrateId]>;
+
+    #[salsa::transparent]
+    fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]>;
+}
+
+#[salsa::db]
+pub trait SourceDatabase: salsa::Database {
+    /// Text of the file.
+    fn file_text(&self, file_id: vfs::FileId) -> FileText;
+
+    fn set_file_text(&mut self, file_id: vfs::FileId, text: &str);
+
+    fn set_file_text_with_durability(
+        &mut self,
+        file_id: vfs::FileId,
+        text: &str,
+        durability: Durability,
+    );
+
+    /// Contents of the source root.
+    fn source_root(&self, id: SourceRootId) -> SourceRootInput;
+
+    fn file_source_root(&self, id: vfs::FileId) -> FileSourceRootInput;
+
+    fn set_file_source_root_with_durability(
+        &mut self,
+        id: vfs::FileId,
+        source_root_id: SourceRootId,
+        durability: Durability,
+    );
+
+    /// Source root of the file.
+    fn set_source_root_with_durability(
+        &mut self,
+        source_root_id: SourceRootId,
+        source_root: Arc<SourceRoot>,
+        durability: Durability,
+    );
+
+    fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
+        // FIXME: this *somehow* should be platform agnostic...
+        let source_root = self.file_source_root(path.anchor);
+        let source_root = self.source_root(source_root.source_root_id(self));
+        source_root.source_root(self).resolve_path(path)
+    }
 }
 
 /// Crate related data shared by the whole workspace.
@@ -91,7 +268,7 @@ pub struct CrateWorkspaceData {
     pub toolchain: Option<Version>,
 }
 
-fn toolchain_channel(db: &dyn SourceDatabase, krate: CrateId) -> Option<ReleaseChannel> {
+fn toolchain_channel(db: &dyn RootQueryDb, krate: CrateId) -> Option<ReleaseChannel> {
     db.crate_workspace_data()
         .get(&krate)?
         .toolchain
@@ -99,14 +276,14 @@ fn toolchain_channel(db: &dyn SourceDatabase, krate: CrateId) -> Option<ReleaseC
         .and_then(|v| ReleaseChannel::from_str(&v.pre))
 }
 
-fn parse(db: &dyn SourceDatabase, file_id: EditionedFileId) -> Parse<ast::SourceFile> {
+fn parse(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Parse<ast::SourceFile> {
     let _p = tracing::info_span!("parse", ?file_id).entered();
-    let (file_id, edition) = file_id.unpack();
-    let text = db.file_text(file_id);
-    SourceFile::parse(&text, edition)
+    let (file_id, edition) = file_id.unpack(db.as_dyn_database());
+    let text = db.file_text(file_id).text(db);
+    ast::SourceFile::parse(&text, edition)
 }
 
-fn parse_errors(db: &dyn SourceDatabase, file_id: EditionedFileId) -> Option<Arc<[SyntaxError]>> {
+fn parse_errors(db: &dyn RootQueryDb, file_id: EditionedFileId) -> Option<Arc<[SyntaxError]>> {
     let errors = db.parse(file_id).errors();
     match &*errors {
         [] => None,
@@ -114,67 +291,13 @@ fn parse_errors(db: &dyn SourceDatabase, file_id: EditionedFileId) -> Option<Arc
     }
 }
 
-fn file_text(db: &dyn SourceDatabase, file_id: FileId) -> Arc<str> {
-    let bytes = db.compressed_file_text(file_id);
-    let bytes =
-        lz4_flex::decompress_size_prepended(&bytes).expect("lz4 decompression should not fail");
-    let text = std::str::from_utf8(&bytes).expect("file contents should be valid UTF-8");
-    Arc::from(text)
-}
-
-/// We don't want to give HIR knowledge of source roots, hence we extract these
-/// methods into a separate DB.
-#[ra_salsa::query_group(SourceRootDatabaseStorage)]
-pub trait SourceRootDatabase: SourceDatabase {
-    /// Path to a file, relative to the root of its source root.
-    /// Source root of the file.
-    #[ra_salsa::input]
-    fn file_source_root(&self, file_id: FileId) -> SourceRootId;
-    /// Contents of the source root.
-    #[ra_salsa::input]
-    fn source_root(&self, id: SourceRootId) -> Arc<SourceRoot>;
-
-    /// Crates whose root file is in `id`.
-    fn source_root_crates(&self, id: SourceRootId) -> Arc<[CrateId]>;
-}
-
-pub trait SourceDatabaseFileInputExt {
-    fn set_file_text(&mut self, file_id: FileId, text: &str) {
-        self.set_file_text_with_durability(file_id, text, Durability::LOW);
-    }
-
-    fn set_file_text_with_durability(
-        &mut self,
-        file_id: FileId,
-        text: &str,
-        durability: Durability,
-    );
-}
-
-impl<Db: ?Sized + SourceRootDatabase> SourceDatabaseFileInputExt for Db {
-    fn set_file_text_with_durability(
-        &mut self,
-        file_id: FileId,
-        text: &str,
-        durability: Durability,
-    ) {
-        let bytes = text.as_bytes();
-        let compressed = lz4_flex::compress_prepend_size(bytes);
-        self.set_compressed_file_text_with_durability(
-            file_id,
-            Arc::from(compressed.as_slice()),
-            durability,
-        )
-    }
-}
-
-fn source_root_crates(db: &dyn SourceRootDatabase, id: SourceRootId) -> Arc<[CrateId]> {
+fn source_root_crates(db: &dyn RootQueryDb, id: SourceRootId) -> Arc<[CrateId]> {
     let graph = db.crate_graph();
     let mut crates = graph
         .iter()
         .filter(|&krate| {
             let root_file = graph[krate].root_file_id;
-            db.file_source_root(root_file) == id
+            db.file_source_root(root_file).source_root_id(db) == id
         })
         .collect::<Vec<_>>();
     crates.sort();
@@ -182,22 +305,9 @@ fn source_root_crates(db: &dyn SourceRootDatabase, id: SourceRootId) -> Arc<[Cra
     crates.into_iter().collect()
 }
 
-// FIXME: Would be nice to get rid of this somehow
-/// Silly workaround for cyclic deps due to the SourceRootDatabase and SourceDatabase split
-/// regarding FileLoader
-pub struct FileLoaderDelegate<T>(pub T);
+fn relevant_crates(db: &dyn RootQueryDb, file_id: FileId) -> Arc<[CrateId]> {
+    let _p = tracing::info_span!("relevant_crates").entered();
 
-impl<T: SourceRootDatabase> FileLoader for FileLoaderDelegate<&'_ T> {
-    fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
-        // FIXME: this *somehow* should be platform agnostic...
-        let source_root = self.0.file_source_root(path.anchor);
-        let source_root = self.0.source_root(source_root);
-        source_root.resolve_path(path)
-    }
-
-    fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]> {
-        let _p = tracing::info_span!("relevant_crates").entered();
-        let source_root = self.0.file_source_root(file_id);
-        self.0.source_root_crates(source_root)
-    }
+    let source_root = db.file_source_root(file_id);
+    db.source_root_crates(source_root.source_root_id(db))
 }
diff --git a/src/tools/rust-analyzer/crates/edition/src/lib.rs b/src/tools/rust-analyzer/crates/edition/src/lib.rs
index 7e9c94af408..a2269bf54d4 100644
--- a/src/tools/rust-analyzer/crates/edition/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/edition/src/lib.rs
@@ -19,6 +19,16 @@ impl Edition {
     /// The current latest stable edition, note this is usually not the right choice in code.
     pub const CURRENT_FIXME: Edition = Edition::Edition2021;
 
+    pub fn from_u32(u32: u32) -> Edition {
+        match u32 {
+            0 => Edition::Edition2015,
+            1 => Edition::Edition2018,
+            2 => Edition::Edition2021,
+            3 => Edition::Edition2024,
+            _ => panic!("invalid edition"),
+        }
+    }
+
     pub fn at_least_2024(self) -> bool {
         self >= Edition::Edition2024
     }
diff --git a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
index a22961c26c8..1b1d430ffdf 100644
--- a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
@@ -29,6 +29,8 @@ hashbrown.workspace = true
 triomphe.workspace = true
 rustc_apfloat = "0.2.0"
 text-size.workspace = true
+salsa.workspace = true
+query-group.workspace = true
 
 ra-ap-rustc_hashes.workspace = true
 ra-ap-rustc_parse_format.workspace = true
@@ -45,7 +47,6 @@ cfg.workspace = true
 tt.workspace = true
 span.workspace = true
 
-
 [dev-dependencies]
 expect-test.workspace = true
 
@@ -53,6 +54,7 @@ expect-test.workspace = true
 test-utils.workspace = true
 test-fixture.workspace = true
 syntax-bridge.workspace = true
+
 [features]
 in-rust-tree = ["hir-expand/in-rust-tree"]
 
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
index 710bffcefe9..52a72bce91d 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
@@ -601,17 +601,14 @@ impl<'attr> AttrQuery<'attr> {
 
 fn any_has_attrs<'db>(
     db: &(dyn DefDatabase + 'db),
-    id: impl Lookup<
-        Database<'db> = dyn DefDatabase + 'db,
-        Data = impl HasSource<Value = impl ast::HasAttrs>,
-    >,
+    id: impl Lookup<Database = dyn DefDatabase, Data = impl HasSource<Value = impl ast::HasAttrs>>,
 ) -> InFile<ast::AnyHasAttrs> {
     id.lookup(db).source(db).map(ast::AnyHasAttrs::new)
 }
 
 fn attrs_from_item_tree_loc<'db, N: ItemTreeNode>(
     db: &(dyn DefDatabase + 'db),
-    lookup: impl Lookup<Database<'db> = dyn DefDatabase + 'db, Data = impl ItemTreeLoc<Id = N>>,
+    lookup: impl Lookup<Database = dyn DefDatabase, Data = impl ItemTreeLoc<Id = N>>,
 ) -> RawAttrs {
     let id = lookup.lookup(db).item_tree_id();
     let tree = id.item_tree(db);
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs b/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs
index c94622016d3..28992ec600a 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs
@@ -228,6 +228,7 @@ impl StructData {
             None,
         );
         let types_map = strukt.types_map.clone();
+
         (
             Arc::new(StructData {
                 name: strukt.name.clone(),
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/db.rs b/src/tools/rust-analyzer/crates/hir-def/src/db.rs
index 598a850898b..0772d00f03c 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/db.rs
@@ -1,5 +1,5 @@
 //! Defines database & queries for name resolution.
-use base_db::{ra_salsa, CrateId, SourceDatabase, Upcast};
+use base_db::{CrateId, RootQueryDb, SourceDatabase, Upcast};
 use either::Either;
 use hir_expand::{db::ExpandDatabase, HirFileId, MacroDefId};
 use intern::sym;
@@ -33,178 +33,204 @@ use crate::{
     UseId, UseLoc, VariantId,
 };
 
-#[ra_salsa::query_group(InternDatabaseStorage)]
-pub trait InternDatabase: SourceDatabase {
+use salsa::plumbing::AsId;
+
+#[query_group::query_group(InternDatabaseStorage)]
+pub trait InternDatabase: RootQueryDb {
     // region: items
-    #[ra_salsa::interned]
+    #[salsa::interned]
     fn intern_use(&self, loc: UseLoc) -> UseId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_extern_crate(&self, loc: ExternCrateLoc) -> ExternCrateId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_function(&self, loc: FunctionLoc) -> FunctionId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_struct(&self, loc: StructLoc) -> StructId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_union(&self, loc: UnionLoc) -> UnionId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_enum(&self, loc: EnumLoc) -> EnumId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_enum_variant(&self, loc: EnumVariantLoc) -> EnumVariantId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_const(&self, loc: ConstLoc) -> ConstId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_static(&self, loc: StaticLoc) -> StaticId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_trait(&self, loc: TraitLoc) -> TraitId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_trait_alias(&self, loc: TraitAliasLoc) -> TraitAliasId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_type_alias(&self, loc: TypeAliasLoc) -> TypeAliasId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_impl(&self, loc: ImplLoc) -> ImplId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_extern_block(&self, loc: ExternBlockLoc) -> ExternBlockId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_macro2(&self, loc: Macro2Loc) -> Macro2Id;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_proc_macro(&self, loc: ProcMacroLoc) -> ProcMacroId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_macro_rules(&self, loc: MacroRulesLoc) -> MacroRulesId;
-    // endregion: items
+    // // endregion: items
 
-    #[ra_salsa::interned]
+    #[salsa::interned]
     fn intern_block(&self, loc: BlockLoc) -> BlockId;
-    #[ra_salsa::interned]
+    #[salsa::interned]
     fn intern_anonymous_const(&self, id: ConstBlockLoc) -> ConstBlockId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_in_type_const(&self, id: InTypeConstLoc) -> InTypeConstId;
 }
 
-#[ra_salsa::query_group(DefDatabaseStorage)]
-pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDatabase> {
+#[query_group::query_group]
+pub trait DefDatabase:
+    InternDatabase
+    + ExpandDatabase
+    + SourceDatabase
+    + Upcast<dyn ExpandDatabase>
+    + Upcast<dyn RootQueryDb>
+{
     /// Whether to expand procedural macros during name resolution.
-    #[ra_salsa::input]
+    #[salsa::input]
     fn expand_proc_attr_macros(&self) -> bool;
 
     /// Computes an [`ItemTree`] for the given file or macro expansion.
-    #[ra_salsa::invoke(ItemTree::file_item_tree_query)]
+    #[salsa::invoke(ItemTree::file_item_tree_query)]
     fn file_item_tree(&self, file_id: HirFileId) -> Arc<ItemTree>;
 
-    #[ra_salsa::invoke(ItemTree::block_item_tree_query)]
+    #[salsa::invoke_actual(ItemTree::block_item_tree_query)]
     fn block_item_tree(&self, block_id: BlockId) -> Arc<ItemTree>;
 
-    #[ra_salsa::invoke(ItemTree::file_item_tree_with_source_map_query)]
+    #[salsa::invoke(ItemTree::file_item_tree_with_source_map_query)]
     fn file_item_tree_with_source_map(
         &self,
         file_id: HirFileId,
     ) -> (Arc<ItemTree>, Arc<ItemTreeSourceMaps>);
 
-    #[ra_salsa::invoke(ItemTree::block_item_tree_with_source_map_query)]
+    #[salsa::invoke_actual(ItemTree::block_item_tree_with_source_map_query)]
     fn block_item_tree_with_source_map(
         &self,
         block_id: BlockId,
     ) -> (Arc<ItemTree>, Arc<ItemTreeSourceMaps>);
 
-    #[ra_salsa::invoke(DefMap::crate_def_map_query)]
+    #[salsa::invoke(DefMap::crate_def_map_query)]
     fn crate_def_map(&self, krate: CrateId) -> Arc<DefMap>;
 
     /// Computes the block-level `DefMap`.
-    #[ra_salsa::invoke(DefMap::block_def_map_query)]
+    #[salsa::invoke_actual(DefMap::block_def_map_query)]
     fn block_def_map(&self, block: BlockId) -> Arc<DefMap>;
 
     /// Turns a MacroId into a MacroDefId, describing the macro's definition post name resolution.
+    #[salsa::invoke_actual(macro_def)]
     fn macro_def(&self, m: MacroId) -> MacroDefId;
 
     // region:data
 
-    #[ra_salsa::transparent]
-    #[ra_salsa::invoke(StructData::struct_data_query)]
+    #[salsa::transparent]
+    #[salsa::invoke_actual(StructData::struct_data_query)]
     fn struct_data(&self, id: StructId) -> Arc<StructData>;
 
-    #[ra_salsa::invoke(StructData::struct_data_with_diagnostics_query)]
+    #[salsa::invoke_actual(StructData::struct_data_with_diagnostics_query)]
     fn struct_data_with_diagnostics(&self, id: StructId) -> (Arc<StructData>, DefDiagnostics);
 
-    #[ra_salsa::transparent]
-    #[ra_salsa::invoke(StructData::union_data_query)]
+    #[salsa::transparent]
+    #[salsa::invoke_actual(StructData::union_data_query)]
     fn union_data(&self, id: UnionId) -> Arc<StructData>;
 
-    #[ra_salsa::invoke(StructData::union_data_with_diagnostics_query)]
+    #[salsa::invoke_actual(StructData::union_data_with_diagnostics_query)]
     fn union_data_with_diagnostics(&self, id: UnionId) -> (Arc<StructData>, DefDiagnostics);
 
-    #[ra_salsa::invoke(EnumData::enum_data_query)]
+    #[salsa::invoke_actual(EnumData::enum_data_query)]
     fn enum_data(&self, e: EnumId) -> Arc<EnumData>;
 
-    #[ra_salsa::transparent]
-    #[ra_salsa::invoke(EnumVariantData::enum_variant_data_query)]
+    #[salsa::transparent]
+    #[salsa::invoke_actual(EnumVariantData::enum_variant_data_query)]
     fn enum_variant_data(&self, id: EnumVariantId) -> Arc<EnumVariantData>;
 
-    #[ra_salsa::invoke(EnumVariantData::enum_variant_data_with_diagnostics_query)]
+    #[salsa::invoke_actual(EnumVariantData::enum_variant_data_with_diagnostics_query)]
     fn enum_variant_data_with_diagnostics(
         &self,
         id: EnumVariantId,
     ) -> (Arc<EnumVariantData>, DefDiagnostics);
 
-    #[ra_salsa::transparent]
-    #[ra_salsa::invoke(VariantData::variant_data)]
+    #[salsa::transparent]
+    #[salsa::invoke_actual(VariantData::variant_data)]
     fn variant_data(&self, id: VariantId) -> Arc<VariantData>;
-    #[ra_salsa::transparent]
-    #[ra_salsa::invoke(ImplData::impl_data_query)]
+    #[salsa::transparent]
+    #[salsa::invoke_actual(ImplData::impl_data_query)]
     fn impl_data(&self, e: ImplId) -> Arc<ImplData>;
 
-    #[ra_salsa::invoke(ImplData::impl_data_with_diagnostics_query)]
+    #[salsa::invoke_actual(ImplData::impl_data_with_diagnostics_query)]
     fn impl_data_with_diagnostics(&self, e: ImplId) -> (Arc<ImplData>, DefDiagnostics);
 
-    #[ra_salsa::transparent]
-    #[ra_salsa::invoke(TraitData::trait_data_query)]
+    #[salsa::transparent]
+    #[salsa::invoke_actual(TraitData::trait_data_query)]
     fn trait_data(&self, e: TraitId) -> Arc<TraitData>;
 
-    #[ra_salsa::invoke(TraitData::trait_data_with_diagnostics_query)]
+    #[salsa::invoke_actual(TraitData::trait_data_with_diagnostics_query)]
     fn trait_data_with_diagnostics(&self, tr: TraitId) -> (Arc<TraitData>, DefDiagnostics);
 
-    #[ra_salsa::invoke(TraitAliasData::trait_alias_query)]
+    #[salsa::invoke_actual(TraitAliasData::trait_alias_query)]
     fn trait_alias_data(&self, e: TraitAliasId) -> Arc<TraitAliasData>;
 
-    #[ra_salsa::invoke(TypeAliasData::type_alias_data_query)]
+    #[salsa::invoke_actual(TypeAliasData::type_alias_data_query)]
     fn type_alias_data(&self, e: TypeAliasId) -> Arc<TypeAliasData>;
 
-    #[ra_salsa::invoke(FunctionData::fn_data_query)]
+    #[salsa::invoke_actual(FunctionData::fn_data_query)]
     fn function_data(&self, func: FunctionId) -> Arc<FunctionData>;
 
-    #[ra_salsa::invoke(ConstData::const_data_query)]
+    #[salsa::invoke_actual(ConstData::const_data_query)]
     fn const_data(&self, konst: ConstId) -> Arc<ConstData>;
 
-    #[ra_salsa::invoke(StaticData::static_data_query)]
+    #[salsa::invoke_actual(StaticData::static_data_query)]
     fn static_data(&self, statik: StaticId) -> Arc<StaticData>;
 
-    #[ra_salsa::invoke(Macro2Data::macro2_data_query)]
+    #[salsa::invoke_actual(Macro2Data::macro2_data_query)]
     fn macro2_data(&self, makro: Macro2Id) -> Arc<Macro2Data>;
 
-    #[ra_salsa::invoke(MacroRulesData::macro_rules_data_query)]
+    #[salsa::invoke_actual(MacroRulesData::macro_rules_data_query)]
     fn macro_rules_data(&self, makro: MacroRulesId) -> Arc<MacroRulesData>;
 
-    #[ra_salsa::invoke(ProcMacroData::proc_macro_data_query)]
+    #[salsa::invoke_actual(ProcMacroData::proc_macro_data_query)]
     fn proc_macro_data(&self, makro: ProcMacroId) -> Arc<ProcMacroData>;
 
-    #[ra_salsa::invoke(ExternCrateDeclData::extern_crate_decl_data_query)]
+    #[salsa::invoke_actual(ExternCrateDeclData::extern_crate_decl_data_query)]
     fn extern_crate_decl_data(&self, extern_crate: ExternCrateId) -> Arc<ExternCrateDeclData>;
 
     // endregion:data
 
-    #[ra_salsa::invoke(Body::body_with_source_map_query)]
-    #[ra_salsa::lru]
+    #[salsa::invoke(Body::body_with_source_map_query)]
+    #[salsa::lru(512)]
     fn body_with_source_map(&self, def: DefWithBodyId) -> (Arc<Body>, Arc<BodySourceMap>);
 
-    #[ra_salsa::invoke(Body::body_query)]
+    #[salsa::invoke(Body::body_query)]
     fn body(&self, def: DefWithBodyId) -> Arc<Body>;
 
-    #[ra_salsa::invoke(ExprScopes::expr_scopes_query)]
+    #[salsa::invoke_actual(ExprScopes::expr_scopes_query)]
     fn expr_scopes(&self, def: DefWithBodyId) -> Arc<ExprScopes>;
 
-    #[ra_salsa::invoke(GenericParams::generic_params_query)]
+    #[salsa::invoke_actual(GenericParams::generic_params_query)]
     fn generic_params(&self, def: GenericDefId) -> Arc<GenericParams>;
 
     /// If this returns `None` for the source map, that means it is the same as with the item tree.
-    #[ra_salsa::invoke(GenericParams::generic_params_with_source_map_query)]
+    #[salsa::invoke_actual(GenericParams::generic_params_with_source_map_query)]
     fn generic_params_with_source_map(
         &self,
         def: GenericDefId,
@@ -212,51 +238,51 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba
 
     // region:attrs
 
-    #[ra_salsa::invoke(Attrs::fields_attrs_query)]
+    #[salsa::invoke_actual(Attrs::fields_attrs_query)]
     fn fields_attrs(&self, def: VariantId) -> Arc<ArenaMap<LocalFieldId, Attrs>>;
 
     // should this really be a query?
-    #[ra_salsa::invoke(crate::attr::fields_attrs_source_map)]
+    #[salsa::invoke_actual(crate::attr::fields_attrs_source_map)]
     fn fields_attrs_source_map(
         &self,
         def: VariantId,
     ) -> Arc<ArenaMap<LocalFieldId, AstPtr<Either<ast::TupleField, ast::RecordField>>>>;
 
-    #[ra_salsa::invoke(AttrsWithOwner::attrs_query)]
+    #[salsa::invoke(AttrsWithOwner::attrs_query)]
     fn attrs(&self, def: AttrDefId) -> Attrs;
 
-    #[ra_salsa::transparent]
-    #[ra_salsa::invoke(lang_item::lang_attr)]
+    #[salsa::transparent]
+    #[salsa::invoke(lang_item::lang_attr)]
     fn lang_attr(&self, def: AttrDefId) -> Option<LangItem>;
 
     // endregion:attrs
 
-    #[ra_salsa::invoke(LangItems::lang_item_query)]
+    #[salsa::invoke(LangItems::lang_item_query)]
     fn lang_item(&self, start_crate: CrateId, item: LangItem) -> Option<LangItemTarget>;
 
-    #[ra_salsa::invoke(ImportMap::import_map_query)]
+    #[salsa::invoke(ImportMap::import_map_query)]
     fn import_map(&self, krate: CrateId) -> Arc<ImportMap>;
 
     // region:visibilities
 
-    #[ra_salsa::invoke(visibility::field_visibilities_query)]
+    #[salsa::invoke(visibility::field_visibilities_query)]
     fn field_visibilities(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Visibility>>;
 
     // FIXME: unify function_visibility and const_visibility?
-    #[ra_salsa::invoke(visibility::function_visibility_query)]
+    #[salsa::invoke_actual(visibility::function_visibility_query)]
     fn function_visibility(&self, def: FunctionId) -> Visibility;
 
-    #[ra_salsa::invoke(visibility::const_visibility_query)]
+    #[salsa::invoke_actual(visibility::const_visibility_query)]
     fn const_visibility(&self, def: ConstId) -> Visibility;
 
     // endregion:visibilities
 
-    #[ra_salsa::invoke(LangItems::crate_lang_items_query)]
+    #[salsa::invoke(LangItems::crate_lang_items_query)]
     fn crate_lang_items(&self, krate: CrateId) -> Option<Arc<LangItems>>;
 
-    #[ra_salsa::invoke(crate::lang_item::notable_traits_in_deps)]
+    #[salsa::invoke(crate::lang_item::notable_traits_in_deps)]
     fn notable_traits_in_deps(&self, krate: CrateId) -> Arc<[Arc<[TraitId]>]>;
-    #[ra_salsa::invoke(crate::lang_item::crate_notable_traits)]
+    #[salsa::invoke(crate::lang_item::crate_notable_traits)]
     fn crate_notable_traits(&self, krate: CrateId) -> Option<Arc<[TraitId]>>;
 
     fn crate_supports_no_std(&self, crate_id: CrateId) -> bool;
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs
index 5ff6a7ffe56..616d2c9fd89 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs
@@ -16,7 +16,7 @@ use hir_expand::{name::Name, ExpandError, InFile};
 use la_arena::{Arena, ArenaMap};
 use rustc_hash::FxHashMap;
 use smallvec::SmallVec;
-use span::{Edition, MacroFileId, SyntaxContextData};
+use span::{Edition, MacroFileId, SyntaxContext};
 use syntax::{ast, AstPtr, SyntaxNodePtr};
 use triomphe::Arc;
 use tt::TextRange;
@@ -49,8 +49,9 @@ impl HygieneId {
         Self(ctx)
     }
 
-    pub(crate) fn lookup(self, db: &dyn DefDatabase) -> SyntaxContextData {
-        db.lookup_intern_syntax_context(self.0)
+    // FIXME: Inline this
+    pub(crate) fn lookup(self) -> SyntaxContext {
+        self.0
     }
 
     pub(crate) fn is_root(self) -> bool {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs
index 6e505a6b112..c3ca610faec 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs
@@ -1931,11 +1931,11 @@ impl ExprCollector<'_> {
             None => (HygieneId::ROOT, None),
             Some(span_map) => {
                 let span = span_map.span_at(lifetime.syntax().text_range().start());
-                let ctx = self.db.lookup_intern_syntax_context(span.ctx);
-                let hygiene_id = HygieneId::new(ctx.opaque_and_semitransparent);
-                let hygiene_info = ctx.outer_expn.map(|expansion| {
+                let ctx = span.ctx;
+                let hygiene_id = HygieneId::new(ctx.opaque_and_semitransparent(self.db));
+                let hygiene_info = ctx.outer_expn(self.db).map(|expansion| {
                     let expansion = self.db.lookup_intern_macro_call(expansion);
-                    (ctx.parent, expansion.def)
+                    (ctx.parent(self.db), expansion.def)
                 });
                 (hygiene_id, hygiene_info)
             }
@@ -1962,11 +1962,12 @@ impl ExprCollector<'_> {
                             // A macro is allowed to refer to labels from before its declaration.
                             // Therefore, if we got to the rib of its declaration, give up its hygiene
                             // and use its parent expansion.
-                            let parent_ctx = self.db.lookup_intern_syntax_context(parent_ctx);
-                            hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent);
-                            hygiene_info = parent_ctx.outer_expn.map(|expansion| {
+
+                            hygiene_id =
+                                HygieneId::new(parent_ctx.opaque_and_semitransparent(self.db));
+                            hygiene_info = parent_ctx.outer_expn(self.db).map(|expansion| {
                                 let expansion = self.db.lookup_intern_macro_call(expansion);
-                                (parent_ctx.parent, expansion.def)
+                                (parent_ctx.parent(self.db), expansion.def)
                             });
                         }
                     }
@@ -2593,7 +2594,7 @@ impl ExprCollector<'_> {
             None => HygieneId::ROOT,
             Some(span_map) => {
                 let ctx = span_map.span_at(span_start).ctx;
-                HygieneId::new(self.db.lookup_intern_syntax_context(ctx).opaque_and_semitransparent)
+                HygieneId::new(ctx.opaque_and_semitransparent(self.db))
             }
         }
     }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/scope.rs
index 859a706177a..42a8eae4064 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/scope.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/scope.rs
@@ -324,8 +324,9 @@ fn compute_expr_scopes(
 
 #[cfg(test)]
 mod tests {
-    use base_db::SourceDatabase;
+    use base_db::RootQueryDb;
     use hir_expand::{name::AsName, InFile};
+    use salsa::AsDynDatabase;
     use span::FileId;
     use syntax::{algo::find_node_at_offset, ast, AstNode};
     use test_fixture::WithFixture;
@@ -357,18 +358,22 @@ mod tests {
         };
 
         let (db, position) = TestDB::with_position(&code);
-        let file_id = position.file_id;
+        let editioned_file_id = position.file_id;
         let offset = position.offset;
 
-        let file_syntax = db.parse(file_id).syntax_node();
+        let (file_id, _) = editioned_file_id.unpack();
+        let editioned_file_id_wrapper =
+            base_db::EditionedFileId::new(db.as_dyn_database(), editioned_file_id);
+
+        let file_syntax = db.parse(editioned_file_id_wrapper).syntax_node();
         let marker: ast::PathExpr = find_node_at_offset(&file_syntax, offset).unwrap();
-        let function = find_function(&db, file_id.file_id());
+        let function = find_function(&db, file_id);
 
         let scopes = db.expr_scopes(function.into());
         let (_body, source_map) = db.body_with_source_map(function.into());
 
         let expr_id = source_map
-            .node_expr(InFile { file_id: file_id.into(), value: &marker.into() })
+            .node_expr(InFile { file_id: editioned_file_id.into(), value: &marker.into() })
             .unwrap()
             .as_expr()
             .unwrap();
@@ -511,15 +516,19 @@ fn foo() {
 
     fn do_check_local_name(#[rust_analyzer::rust_fixture] ra_fixture: &str, expected_offset: u32) {
         let (db, position) = TestDB::with_position(ra_fixture);
-        let file_id = position.file_id;
+        let editioned_file_id = position.file_id;
         let offset = position.offset;
 
-        let file = db.parse(file_id).ok().unwrap();
+        let (file_id, _) = editioned_file_id.unpack();
+        let file_id_wrapper =
+            base_db::EditionedFileId::new(db.as_dyn_database(), editioned_file_id);
+
+        let file = db.parse(file_id_wrapper).ok().unwrap();
         let expected_name = find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into())
             .expect("failed to find a name at the target offset");
         let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), offset).unwrap();
 
-        let function = find_function(&db, file_id.file_id());
+        let function = find_function(&db, file_id);
 
         let scopes = db.expr_scopes(function.into());
         let (_, source_map) = db.body_with_source_map(function.into());
@@ -527,7 +536,7 @@ fn foo() {
         let expr_scope = {
             let expr_ast = name_ref.syntax().ancestors().find_map(ast::Expr::cast).unwrap();
             let expr_id = source_map
-                .node_expr(InFile { file_id: file_id.into(), value: &expr_ast })
+                .node_expr(InFile { file_id: editioned_file_id.into(), value: &expr_ast })
                 .unwrap()
                 .as_expr()
                 .unwrap();
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/block.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/block.rs
index e136dd18a55..df0bbe5c3ef 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/block.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/block.rs
@@ -189,8 +189,8 @@ fn f() {
 }
     "#,
         expect![[r#"
-            BlockId(1) in BlockRelativeModuleId { block: Some(BlockId(0)), local_id: Idx::<ModuleData>(1) }
-            BlockId(0) in BlockRelativeModuleId { block: None, local_id: Idx::<ModuleData>(0) }
+            BlockId(4c01) in BlockRelativeModuleId { block: Some(BlockId(4c00)), local_id: Idx::<ModuleData>(1) }
+            BlockId(4c00) in BlockRelativeModuleId { block: None, local_id: Idx::<ModuleData>(0) }
             crate scope
         "#]],
     );
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs
index e2b36da79b2..80820193796 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs
@@ -362,10 +362,7 @@ impl GenericParams {
             };
         fn id_to_generics<Id: GenericsItemTreeNode>(
             db: &dyn DefDatabase,
-            id: impl for<'db> Lookup<
-                Database<'db> = dyn DefDatabase + 'db,
-                Data = impl ItemTreeLoc<Id = Id>,
-            >,
+            id: impl Lookup<Database = dyn DefDatabase, Data = impl ItemTreeLoc<Id = Id>>,
             enabled_params: impl Fn(
                 &Arc<GenericParams>,
                 &ItemTree,
@@ -378,6 +375,7 @@ impl GenericParams {
             let id = id.lookup(db).item_tree_id();
             let tree = id.item_tree(db);
             let item = &tree[id.value];
+
             (enabled_params(item.generic_params(), &tree, id.value.into()), None)
         }
 
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
index d43776b8a66..64c1d0d274f 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs
@@ -475,7 +475,7 @@ fn search_maps(
 
 #[cfg(test)]
 mod tests {
-    use base_db::{SourceDatabase, Upcast};
+    use base_db::{RootQueryDb, Upcast};
     use expect_test::{expect, Expect};
     use test_fixture::WithFixture;
 
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
index 70bf2f13c88..d113a500ef7 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
@@ -546,7 +546,7 @@ impl Printer<'_> {
                 let MacroCall { path, ast_id, expand_to, ctxt } = &self.tree[it];
                 let _ = writeln!(
                     self,
-                    "// AstId: {:?}, SyntaxContext: {}, ExpandTo: {:?}",
+                    "// AstId: {:?}, SyntaxContextId: {}, ExpandTo: {:?}",
                     ast_id.erase().into_raw(),
                     ctxt,
                     expand_to
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs
index 80b699649fb..b442e877343 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs
@@ -270,7 +270,7 @@ m!();
             // AstId: 2
             pub macro m2 { ... }
 
-            // AstId: 3, SyntaxContext: 2, ExpandTo: Items
+            // AstId: 3, SyntaxContextId: 4294967037, ExpandTo: Items
             m!(...);
         "#]],
     );
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
index 9c947df35e9..15ef8364ed2 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
@@ -69,16 +69,9 @@ mod pretty;
 #[cfg(test)]
 mod test_db;
 
-use std::{
-    hash::{Hash, Hasher},
-    panic::{RefUnwindSafe, UnwindSafe},
-};
+use std::hash::{Hash, Hasher};
 
-use base_db::{
-    impl_intern_key,
-    ra_salsa::{self, InternValueTrivial},
-    CrateId,
-};
+use base_db::{impl_intern_key, CrateId};
 use hir_expand::{
     builtin::{BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerExpander},
     db::ExpandDatabase,
@@ -192,8 +185,7 @@ pub trait ItemTreeLoc {
 
 macro_rules! impl_intern {
     ($id:ident, $loc:ident, $intern:ident, $lookup:ident) => {
-        impl_intern_key!($id);
-        impl InternValueTrivial for $loc {}
+        impl_intern_key!($id, $loc);
         impl_intern_lookup!(DefDatabase, $id, $loc, $intern, $lookup);
     };
 }
@@ -213,88 +205,59 @@ macro_rules! impl_loc {
     };
 }
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct FunctionId(ra_salsa::InternId);
 type FunctionLoc = AssocItemLoc<Function>;
 impl_intern!(FunctionId, FunctionLoc, intern_function, lookup_intern_function);
 impl_loc!(FunctionLoc, id: Function, container: ItemContainerId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
-pub struct StructId(ra_salsa::InternId);
 type StructLoc = ItemLoc<Struct>;
 impl_intern!(StructId, StructLoc, intern_struct, lookup_intern_struct);
 impl_loc!(StructLoc, id: Struct, container: ModuleId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
-pub struct UnionId(ra_salsa::InternId);
 pub type UnionLoc = ItemLoc<Union>;
 impl_intern!(UnionId, UnionLoc, intern_union, lookup_intern_union);
 impl_loc!(UnionLoc, id: Union, container: ModuleId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
-pub struct EnumId(ra_salsa::InternId);
 pub type EnumLoc = ItemLoc<Enum>;
 impl_intern!(EnumId, EnumLoc, intern_enum, lookup_intern_enum);
 impl_loc!(EnumLoc, id: Enum, container: ModuleId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct ConstId(ra_salsa::InternId);
 type ConstLoc = AssocItemLoc<Const>;
 impl_intern!(ConstId, ConstLoc, intern_const, lookup_intern_const);
 impl_loc!(ConstLoc, id: Const, container: ItemContainerId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct StaticId(ra_salsa::InternId);
 pub type StaticLoc = AssocItemLoc<Static>;
 impl_intern!(StaticId, StaticLoc, intern_static, lookup_intern_static);
 impl_loc!(StaticLoc, id: Static, container: ItemContainerId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
-pub struct TraitId(ra_salsa::InternId);
 pub type TraitLoc = ItemLoc<Trait>;
 impl_intern!(TraitId, TraitLoc, intern_trait, lookup_intern_trait);
 impl_loc!(TraitLoc, id: Trait, container: ModuleId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct TraitAliasId(ra_salsa::InternId);
 pub type TraitAliasLoc = ItemLoc<TraitAlias>;
 impl_intern!(TraitAliasId, TraitAliasLoc, intern_trait_alias, lookup_intern_trait_alias);
 impl_loc!(TraitAliasLoc, id: TraitAlias, container: ModuleId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct TypeAliasId(ra_salsa::InternId);
 type TypeAliasLoc = AssocItemLoc<TypeAlias>;
 impl_intern!(TypeAliasId, TypeAliasLoc, intern_type_alias, lookup_intern_type_alias);
 impl_loc!(TypeAliasLoc, id: TypeAlias, container: ItemContainerId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
-pub struct ImplId(ra_salsa::InternId);
 type ImplLoc = ItemLoc<Impl>;
 impl_intern!(ImplId, ImplLoc, intern_impl, lookup_intern_impl);
 impl_loc!(ImplLoc, id: Impl, container: ModuleId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
-pub struct UseId(ra_salsa::InternId);
 type UseLoc = ItemLoc<Use>;
 impl_intern!(UseId, UseLoc, intern_use, lookup_intern_use);
 impl_loc!(UseLoc, id: Use, container: ModuleId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
-pub struct ExternCrateId(ra_salsa::InternId);
 type ExternCrateLoc = ItemLoc<ExternCrate>;
 impl_intern!(ExternCrateId, ExternCrateLoc, intern_extern_crate, lookup_intern_extern_crate);
 impl_loc!(ExternCrateLoc, id: ExternCrate, container: ModuleId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
-pub struct ExternBlockId(ra_salsa::InternId);
 type ExternBlockLoc = ItemLoc<ExternBlock>;
 impl_intern!(ExternBlockId, ExternBlockLoc, intern_extern_block, lookup_intern_extern_block);
 impl_loc!(ExternBlockLoc, id: ExternBlock, container: ModuleId);
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct EnumVariantId(ra_salsa::InternId);
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub struct EnumVariantLoc {
     pub id: ItemTreeId<Variant>,
     pub parent: EnumId,
@@ -302,9 +265,6 @@ pub struct EnumVariantLoc {
 }
 impl_intern!(EnumVariantId, EnumVariantLoc, intern_enum_variant, lookup_intern_enum_variant);
 impl_loc!(EnumVariantLoc, id: Variant, parent: EnumId);
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
-pub struct Macro2Id(ra_salsa::InternId);
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub struct Macro2Loc {
     pub container: ModuleId,
@@ -316,8 +276,6 @@ pub struct Macro2Loc {
 impl_intern!(Macro2Id, Macro2Loc, intern_macro2, lookup_intern_macro2);
 impl_loc!(Macro2Loc, id: Macro2, container: ModuleId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
-pub struct MacroRulesId(ra_salsa::InternId);
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub struct MacroRulesLoc {
     pub container: ModuleId,
@@ -345,8 +303,7 @@ pub enum MacroExpander {
     BuiltInDerive(BuiltinDeriveExpander),
     BuiltInEager(EagerExpander),
 }
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
-pub struct ProcMacroId(ra_salsa::InternId);
+
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub struct ProcMacroLoc {
     pub container: CrateRootModuleId,
@@ -358,8 +315,6 @@ pub struct ProcMacroLoc {
 impl_intern!(ProcMacroId, ProcMacroLoc, intern_proc_macro, lookup_intern_proc_macro);
 impl_loc!(ProcMacroLoc, id: Function, container: CrateRootModuleId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
-pub struct BlockId(ra_salsa::InternId);
 #[derive(Debug, Hash, PartialEq, Eq, Clone)]
 pub struct BlockLoc {
     pub ast_id: AstId<ast::BlockExpr>,
@@ -368,10 +323,8 @@ pub struct BlockLoc {
 }
 impl_intern!(BlockId, BlockLoc, intern_block, lookup_intern_block);
 
-/// Id of the anonymous const block expression and patterns. This is very similar to `ClosureId` and
-/// shouldn't be a `DefWithBodyId` since its type inference is dependent on its parent.
-#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
-pub struct ConstBlockId(ra_salsa::InternId);
+// Id of the anonymous const block expression and patterns. This is very similar to `ClosureId` and
+// shouldn't be a `DefWithBodyId` since its type inference is dependent on its parent.
 impl_intern!(ConstBlockId, ConstBlockLoc, intern_anonymous_const, lookup_intern_anonymous_const);
 
 #[derive(Debug, Hash, PartialEq, Eq, Clone)]
@@ -536,12 +489,11 @@ pub struct TupleFieldId {
     pub index: u32,
 }
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
 pub struct TypeOrConstParamId {
     pub parent: GenericDefId,
     pub local_id: LocalTypeOrConstParamId,
 }
-impl InternValueTrivial for TypeOrConstParamId {}
 
 /// A TypeOrConstParamId with an invariant that it actually belongs to a type
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -603,7 +555,6 @@ pub struct LifetimeParamId {
     pub local_id: LocalLifetimeParamId,
 }
 pub type LocalLifetimeParamId = Idx<generics::LifetimeParamData>;
-impl InternValueTrivial for LifetimeParamId {}
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub enum ItemContainerId {
@@ -615,7 +566,7 @@ pub enum ItemContainerId {
 impl_from!(ModuleId for ItemContainerId);
 
 /// A Data Type
-#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
 pub enum AdtId {
     StructId(StructId),
     UnionId(UnionId),
@@ -624,7 +575,7 @@ pub enum AdtId {
 impl_from!(StructId, UnionId, EnumId for AdtId);
 
 /// A macro
-#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
+#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
 pub enum MacroId {
     Macro2Id(Macro2Id),
     MacroRulesId(MacroRulesId),
@@ -760,9 +711,7 @@ impl From<GenericDefId> for TypeOwnerId {
 /// currently only used in `InTypeConstId` for storing the type (which has type `Ty` defined in
 /// the `hir-ty` crate) of the constant in its id, which is a temporary hack so we may want
 /// to remove this after removing that.
-pub trait OpaqueInternableThing:
-    std::any::Any + std::fmt::Debug + Sync + Send + UnwindSafe + RefUnwindSafe
-{
+pub trait OpaqueInternableThing: std::any::Any + std::fmt::Debug + Sync + Send {
     fn as_any(&self) -> &dyn std::any::Any;
     fn box_any(&self) -> Box<dyn std::any::Any>;
     fn dyn_hash(&self, state: &mut dyn Hasher);
@@ -809,11 +758,9 @@ impl Clone for Box<dyn OpaqueInternableThing> {
 //   and the name of the struct that contains this constant is resolved, so a query that only traverses the
 //   type owner by its syntax tree might have a hard time here.
 
-/// A constant in a type as a substitution for const generics (like `Foo<{ 2 + 2 }>`) or as an array
-/// length (like `[u8; 2 + 2]`). These constants are body owner and are a variant of `DefWithBodyId`. These
-/// are not called `AnonymousConstId` to prevent confusion with [`ConstBlockId`].
-#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
-pub struct InTypeConstId(ra_salsa::InternId);
+// A constant in a type as a substitution for const generics (like `Foo<{ 2 + 2 }>`) or as an array
+// length (like `[u8; 2 + 2]`). These constants are body owner and are a variant of `DefWithBodyId`. These
+// are not called `AnonymousConstId` to prevent confusion with [`ConstBlockId`].
 impl_intern!(InTypeConstId, InTypeConstLoc, intern_in_type_const, lookup_intern_in_type_const);
 
 // We would like to set `derive(PartialEq)`
@@ -838,8 +785,8 @@ impl InTypeConstId {
     pub fn source(&self, db: &dyn DefDatabase) -> ast::ConstArg {
         let src = self.lookup(db).id;
         let file_id = src.file_id;
-        let root = &db.parse_or_expand(file_id);
-        db.ast_id_map(file_id).get(src.value).to_node(root)
+        let root = db.parse_or_expand(file_id);
+        db.ast_id_map(file_id).get(src.value).to_node(&root)
     }
 }
 
@@ -884,7 +831,7 @@ impl GeneralConstId {
 }
 
 /// The defs which have a body.
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
 pub enum DefWithBodyId {
     FunctionId(FunctionId),
     StaticId(StaticId),
@@ -892,7 +839,6 @@ pub enum DefWithBodyId {
     InTypeConstId(InTypeConstId),
     VariantId(EnumVariantId),
 }
-
 impl_from!(FunctionId, ConstId, StaticId, InTypeConstId for DefWithBodyId);
 
 impl From<EnumVariantId> for DefWithBodyId {
@@ -928,7 +874,7 @@ pub enum AssocItemId {
 // casting them, and somehow making the constructors private, which would be annoying.
 impl_from!(FunctionId, ConstId, TypeAliasId for AssocItemId);
 
-#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
+#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
 pub enum GenericDefId {
     AdtId(AdtId),
     // consts can have type parameters from their parents (i.e. associated consts of traits)
@@ -962,7 +908,7 @@ impl GenericDefId {
     ) -> (HirFileId, Option<ast::GenericParamList>) {
         fn file_id_and_params_of_item_loc<Loc>(
             db: &dyn DefDatabase,
-            def: impl for<'db> Lookup<Database<'db> = dyn DefDatabase + 'db, Data = Loc>,
+            def: impl Lookup<Database = dyn DefDatabase, Data = Loc>,
         ) -> (HirFileId, Option<ast::GenericParamList>)
         where
             Loc: src::HasSource,
@@ -1017,15 +963,13 @@ impl From<AssocItemId> for GenericDefId {
     }
 }
 
-#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+#[derive(Debug, PartialOrd, Ord, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
 pub enum CallableDefId {
     FunctionId(FunctionId),
     StructId(StructId),
     EnumVariantId(EnumVariantId),
 }
 
-impl InternValueTrivial for CallableDefId {}
-
 impl_from!(FunctionId, StructId, EnumVariantId for CallableDefId);
 impl From<CallableDefId> for ModuleDefId {
     fn from(def: CallableDefId) -> ModuleDefId {
@@ -1135,7 +1079,7 @@ impl From<VariantId> for AttrDefId {
     }
 }
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
 pub enum VariantId {
     EnumVariantId(EnumVariantId),
     StructId(StructId),
@@ -1197,7 +1141,7 @@ pub trait HasModule {
 impl<N, ItemId> HasModule for ItemId
 where
     N: ItemTreeNode,
-    ItemId: for<'db> Lookup<Database<'db> = dyn DefDatabase + 'db, Data = ItemLoc<N>> + Copy,
+    ItemId: Lookup<Database = dyn DefDatabase, Data = ItemLoc<N>> + Copy,
 {
     #[inline]
     fn module(&self, db: &dyn DefDatabase) -> ModuleId {
@@ -1222,7 +1166,7 @@ where
 #[inline]
 fn module_for_assoc_item_loc<'db>(
     db: &(dyn 'db + DefDatabase),
-    id: impl Lookup<Database<'db> = dyn DefDatabase + 'db, Data = AssocItemLoc<impl ItemTreeNode>>,
+    id: impl Lookup<Database = dyn DefDatabase, Data = AssocItemLoc<impl ItemTreeNode>>,
 ) -> ModuleId {
     id.lookup(db).container.module(db)
 }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
index 8c5bd3b6d36..66e344d7245 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
@@ -35,9 +35,9 @@ macro_rules! f {
     };
 }
 
-struct#0:1@58..64#4# MyTraitMap2#0:2@31..42#2# {#0:1@72..73#4#
-    map#0:1@86..89#4#:#0:1@89..90#4# #0:1@89..90#4#::#0:1@91..93#4#std#0:1@93..96#4#::#0:1@96..98#4#collections#0:1@98..109#4#::#0:1@109..111#4#HashSet#0:1@111..118#4#<#0:1@118..119#4#(#0:1@119..120#4#)#0:1@120..121#4#>#0:1@121..122#4#,#0:1@122..123#4#
-}#0:1@132..133#4#
+struct#0:1@58..64#20480# MyTraitMap2#0:2@31..42#4294967037# {#0:1@72..73#20480#
+    map#0:1@86..89#20480#:#0:1@89..90#20480# #0:1@89..90#20480#::#0:1@91..93#20480#std#0:1@93..96#20480#::#0:1@96..98#20480#collections#0:1@98..109#20480#::#0:1@109..111#20480#HashSet#0:1@111..118#20480#<#0:1@118..119#20480#(#0:1@119..120#20480#)#0:1@120..121#20480#>#0:1@121..122#20480#,#0:1@122..123#20480#
+}#0:1@132..133#20480#
 "#]],
     );
 }
@@ -75,12 +75,12 @@ macro_rules! f {
     };
 }
 
-fn#0:2@30..32#2# main#0:2@33..37#2#(#0:2@37..38#2#)#0:2@38..39#2# {#0:2@40..41#2#
-    1#0:2@50..51#2#;#0:2@51..52#2#
-    1.0#0:2@61..64#2#;#0:2@64..65#2#
-    (#0:2@74..75#2#(#0:2@75..76#2#1#0:2@76..77#2#,#0:2@77..78#2# )#0:2@78..79#2#,#0:2@79..80#2# )#0:2@80..81#2#.#0:2@81..82#2#0#0:2@82..85#2#.#0:2@82..85#2#0#0:2@82..85#2#;#0:2@85..86#2#
-    let#0:2@95..98#2# x#0:2@99..100#2# =#0:2@101..102#2# 1#0:2@103..104#2#;#0:2@104..105#2#
-}#0:2@110..111#2#
+fn#0:2@30..32#4294967037# main#0:2@33..37#4294967037#(#0:2@37..38#4294967037#)#0:2@38..39#4294967037# {#0:2@40..41#4294967037#
+    1#0:2@50..51#4294967037#;#0:2@51..52#4294967037#
+    1.0#0:2@61..64#4294967037#;#0:2@64..65#4294967037#
+    (#0:2@74..75#4294967037#(#0:2@75..76#4294967037#1#0:2@76..77#4294967037#,#0:2@77..78#4294967037# )#0:2@78..79#4294967037#,#0:2@79..80#4294967037# )#0:2@80..81#4294967037#.#0:2@81..82#4294967037#0#0:2@82..85#4294967037#.#0:2@82..85#4294967037#0#0:2@82..85#4294967037#;#0:2@85..86#4294967037#
+    let#0:2@95..98#4294967037# x#0:2@99..100#4294967037# =#0:2@101..102#4294967037# 1#0:2@103..104#4294967037#;#0:2@104..105#4294967037#
+}#0:2@110..111#4294967037#
 
 
 "#]],
@@ -171,7 +171,7 @@ fn main(foo: ()) {
     }
 
     fn main(foo: ()) {
-        /* error: unresolved macro unresolved */"helloworld!"#0:3@236..321#2#;
+        /* error: unresolved macro unresolved */"helloworld!"#0:3@236..321#4294967037#;
     }
 }
 
@@ -197,7 +197,7 @@ macro_rules! mk_struct {
 #[macro_use]
 mod foo;
 
-struct#1:1@59..65#4# Foo#0:2@32..35#2#(#1:1@70..71#4#u32#0:2@41..44#2#)#1:1@74..75#4#;#1:1@75..76#4#
+struct#1:1@59..65#20480# Foo#0:2@32..35#4294967037#(#1:1@70..71#20480#u32#0:2@41..44#4294967037#)#1:1@74..75#20480#;#1:1@75..76#20480#
 "#]],
     );
 }
@@ -423,10 +423,10 @@ m! { foo, bar }
 macro_rules! m {
     ($($i:ident),*) => ( impl Bar { $(fn $i() {})* } );
 }
-impl#\4# Bar#\4# {#\4#
-    fn#\4# foo#\2#(#\4#)#\4# {#\4#}#\4#
-    fn#\4# bar#\2#(#\4#)#\4# {#\4#}#\4#
-}#\4#
+impl#\20480# Bar#\20480# {#\20480#
+    fn#\20480# foo#\4294967037#(#\20480#)#\20480# {#\20480#}#\20480#
+    fn#\20480# bar#\4294967037#(#\20480#)#\20480# {#\20480#}#\20480#
+}#\20480#
 "#]],
     );
 }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs
index a2d0ba3deb8..d0678a40652 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs
@@ -16,7 +16,7 @@ mod proc_macros;
 
 use std::{iter, ops::Range, sync};
 
-use base_db::SourceDatabase;
+use base_db::RootQueryDb;
 use expect_test::Expect;
 use hir_expand::{
     db::ExpandDatabase,
@@ -26,6 +26,7 @@ use hir_expand::{
 };
 use intern::Symbol;
 use itertools::Itertools;
+use salsa::AsDynDatabase;
 use span::{Edition, Span};
 use stdx::{format_to, format_to_acc};
 use syntax::{
@@ -63,9 +64,13 @@ fn check_errors(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect)
                 MacroCallKind::Derive { ast_id, .. } => ast_id.map(|it| it.erase()),
                 MacroCallKind::Attr { ast_id, .. } => ast_id.map(|it| it.erase()),
             };
-            let ast = db
-                .parse(ast_id.file_id.file_id().expect("macros inside macros are not supported"))
-                .syntax_node();
+
+            let editioned_file_id =
+                ast_id.file_id.file_id().expect("macros inside macros are not supported");
+            let editioned_file_id =
+                base_db::EditionedFileId::new(db.as_dyn_database(), editioned_file_id);
+
+            let ast = db.parse(editioned_file_id).syntax_node();
             let ast_id_map = db.ast_id_map(ast_id.file_id);
             let node = ast_id_map.get_erased(ast_id.value).to_node(&ast);
             Some((node.text_range(), errors))
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
index a43c0eb9d70..650807fb4ac 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
@@ -181,9 +181,9 @@ fn foo(&self) {
     self.0. 1;
 }
 
-fn#0:1@45..47#2# foo#0:1@48..51#2#(#0:1@51..52#2#&#0:1@52..53#2#self#0:1@53..57#2# )#0:1@57..58#2# {#0:1@59..60#2#
-    self#0:1@65..69#2# .#0:1@69..70#2#0#0:1@70..71#2#.#0:1@71..72#2#1#0:1@73..74#2#;#0:1@74..75#2#
-}#0:1@76..77#2#"#]],
+fn#0:1@45..47#4294967037# foo#0:1@48..51#4294967037#(#0:1@51..52#4294967037#&#0:1@52..53#4294967037#self#0:1@53..57#4294967037# )#0:1@57..58#4294967037# {#0:1@59..60#4294967037#
+    self#0:1@65..69#4294967037# .#0:1@69..70#4294967037#0#0:1@70..71#4294967037#.#0:1@71..72#4294967037#1#0:1@73..74#4294967037#;#0:1@74..75#4294967037#
+}#0:1@76..77#4294967037#"#]],
     );
 }
 
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs
index 3b6e3c5916e..5b3d75c4ee6 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs
@@ -295,9 +295,12 @@ impl ModuleOrigin {
     /// That is, a file or a `mod foo {}` with items.
     pub fn definition_source(&self, db: &dyn DefDatabase) -> InFile<ModuleSource> {
         match self {
-            &ModuleOrigin::File { definition, .. } | &ModuleOrigin::CrateRoot { definition } => {
+            &ModuleOrigin::File { definition: editioned_file_id, .. }
+            | &ModuleOrigin::CrateRoot { definition: editioned_file_id } => {
+                let definition = base_db::EditionedFileId::new(db, editioned_file_id);
+
                 let sf = db.parse(definition).tree();
-                InFile::new(definition.into(), ModuleSource::SourceFile(sf))
+                InFile::new(editioned_file_id.into(), ModuleSource::SourceFile(sf))
             }
             &ModuleOrigin::Inline { definition, definition_tree_id } => InFile::new(
                 definition_tree_id.file_id(),
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
index 16f3fd56eb9..bf013c25ef5 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
@@ -376,7 +376,7 @@ impl DefCollector<'_> {
         'resolve_attr: loop {
             let _p = tracing::info_span!("resolve_macros loop").entered();
             'resolve_macros: loop {
-                self.db.unwind_if_cancelled();
+                self.db.unwind_if_revision_cancelled();
 
                 {
                     let _p = tracing::info_span!("resolve_imports loop").entered();
@@ -977,7 +977,7 @@ impl DefCollector<'_> {
         vis: Visibility,
         import: Option<ImportOrExternCrate>,
     ) {
-        self.db.unwind_if_cancelled();
+        self.db.unwind_if_revision_cancelled();
         self.update_recursive(module_id, resolutions, vis, import, 0)
     }
 
@@ -2517,7 +2517,7 @@ impl ModCollector<'_, '_> {
 
 #[cfg(test)]
 mod tests {
-    use base_db::SourceDatabase;
+    use base_db::RootQueryDb;
     use test_fixture::WithFixture;
 
     use crate::{nameres::DefMapCrateData, test_db::TestDB};
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs
index 17d09bcbd04..a012eb6ff7e 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs
@@ -1,6 +1,6 @@
 //! This module resolves `mod foo;` declaration to file.
 use arrayvec::ArrayVec;
-use base_db::AnchoredPath;
+use base_db::{AnchoredPath, RootQueryDb};
 use hir_expand::{name::Name, HirFileIdExt};
 use span::EditionedFileId;
 
@@ -80,7 +80,8 @@ impl ModDir {
         let orig_file_id = file_id.original_file_respecting_includes(db.upcast());
         for candidate in candidate_files.iter() {
             let path = AnchoredPath { anchor: orig_file_id.file_id(), path: candidate.as_str() };
-            if let Some(file_id) = db.resolve_path(path) {
+            if let Some(file_id) = base_db::Upcast::<dyn RootQueryDb>::upcast(db).resolve_path(path)
+            {
                 let is_mod_rs = candidate.ends_with("/mod.rs");
 
                 let root_dir_owner = is_mod_rs || attr_path.is_some();
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs
index 73fc6787bfe..7c9fad91865 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs
@@ -4,7 +4,7 @@ mod macros;
 mod mod_resolution;
 mod primitives;
 
-use base_db::SourceDatabase;
+use base_db::RootQueryDb;
 use expect_test::{expect, Expect};
 use test_fixture::WithFixture;
 use triomphe::Arc;
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs
index c8b7ec463a0..cd590205766 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/incremental.rs
@@ -1,4 +1,4 @@
-use base_db::SourceDatabaseFileInputExt as _;
+use base_db::SourceDatabase;
 use test_fixture::WithFixture;
 
 use crate::{db::DefDatabase, nameres::tests::TestDB, AdtId, ModuleDefId};
@@ -255,10 +255,10 @@ m!(Z);
             assert_eq!(module_data.scope.resolutions().count(), 4);
         });
         let n_recalculated_item_trees =
-            events.iter().filter(|it| it.contains("item_tree(")).count();
+            events.iter().filter(|it| it.contains("file_item_tree_shim")).count();
         assert_eq!(n_recalculated_item_trees, 6);
         let n_reparsed_macros =
-            events.iter().filter(|it| it.contains("parse_macro_expansion(")).count();
+            events.iter().filter(|it| it.contains("parse_macro_expansion_shim")).count();
         assert_eq!(n_reparsed_macros, 3);
     }
 
@@ -276,10 +276,11 @@ m!(Z);
             let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
             assert_eq!(module_data.scope.resolutions().count(), 4);
         });
-        let n_recalculated_item_trees = events.iter().filter(|it| it.contains("item_tree")).count();
-        assert_eq!(n_recalculated_item_trees, 1);
+        let n_recalculated_item_trees =
+            events.iter().filter(|it| it.contains("file_item_tree_shim")).count();
+        assert_eq!(n_recalculated_item_trees, 0);
         let n_reparsed_macros =
-            events.iter().filter(|it| it.contains("parse_macro_expansion(")).count();
+            events.iter().filter(|it| it.contains("parse_macro_expansion_shim")).count();
         assert_eq!(n_reparsed_macros, 0);
     }
 }
@@ -310,14 +311,15 @@ pub type Ty = ();
         let events = db.log_executed(|| {
             db.file_item_tree(pos.file_id.into());
         });
-        let n_calculated_item_trees = events.iter().filter(|it| it.contains("item_tree(")).count();
+        let n_calculated_item_trees =
+            events.iter().filter(|it| it.contains("file_item_tree_shim")).count();
         assert_eq!(n_calculated_item_trees, 1);
-        let n_parsed_files = events.iter().filter(|it| it.contains("parse(")).count();
+        let n_parsed_files = events.iter().filter(|it| it.contains("parse")).count();
         assert_eq!(n_parsed_files, 1);
     }
 
-    // Delete the parse tree.
-    base_db::ParseQuery.in_db(&db).purge();
+    // FIXME(salsa-transition): bring this back
+    // base_db::ParseQuery.in_db(&db).purge();
 
     {
         let events = db.log_executed(|| {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs
index 3b7e7653fba..7a6d6973298 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs
@@ -185,7 +185,7 @@ pub(super) fn lower_path(ctx: &mut LowerCtx<'_>, mut path: ast::Path) -> Option<
     if segments.len() == 1 && kind == PathKind::Plain {
         if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
             let syn_ctxt = ctx.span_map().span_for_range(path.segment()?.syntax().text_range()).ctx;
-            if let Some(macro_call_id) = ctx.db.lookup_intern_syntax_context(syn_ctxt).outer_expn {
+            if let Some(macro_call_id) = syn_ctxt.outer_expn(ctx.db) {
                 if ctx.db.lookup_intern_macro_call(macro_call_id).def.local_inner {
                     kind = match resolve_crate_root(ctx.db.upcast(), syn_ctxt) {
                         Some(crate_root) => PathKind::DollarCrate(crate_root),
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
index a2e6e4cc043..f4773de0855 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
@@ -207,7 +207,13 @@ impl Resolver {
             return self.module_scope.resolve_path_in_type_ns(db, path);
         }
 
-        let remaining_idx = || if path.segments().len() == 1 { None } else { Some(1) };
+        let remaining_idx = || {
+            if path.segments().len() == 1 {
+                None
+            } else {
+                Some(1)
+            }
+        };
 
         for scope in self.scopes() {
             match scope {
@@ -314,7 +320,7 @@ impl Resolver {
                         None,
                     ),
                     ResolvePathResultPrefixInfo::default(),
-                ))
+                ));
             }
             Path::LangItem(l, Some(_)) => {
                 let type_ns = match *l {
@@ -889,11 +895,10 @@ fn handle_macro_def_scope(
             // A macro is allowed to refer to variables from before its declaration.
             // Therefore, if we got to the rib of its declaration, give up its hygiene
             // and use its parent expansion.
-            let parent_ctx = db.lookup_intern_syntax_context(*parent_ctx);
-            *hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent);
-            *hygiene_info = parent_ctx.outer_expn.map(|expansion| {
+            *hygiene_id = HygieneId::new(parent_ctx.opaque_and_semitransparent(db));
+            *hygiene_info = parent_ctx.outer_expn(db).map(|expansion| {
                 let expansion = db.lookup_intern_macro_call(expansion);
-                (parent_ctx.parent, expansion.def)
+                (parent_ctx.parent(db), expansion.def)
             });
         }
     }
@@ -905,10 +910,10 @@ fn hygiene_info(
     hygiene_id: HygieneId,
 ) -> Option<(SyntaxContextId, MacroDefId)> {
     if !hygiene_id.is_root() {
-        let ctx = hygiene_id.lookup(db);
-        ctx.outer_expn.map(|expansion| {
+        let ctx = hygiene_id.lookup();
+        ctx.outer_expn(db).map(|expansion| {
             let expansion = db.lookup_intern_macro_call(expansion);
-            (ctx.parent, expansion.def)
+            (ctx.parent(db), expansion.def)
         })
     } else {
         None
@@ -1438,7 +1443,7 @@ impl HasResolver for MacroRulesId {
 fn lookup_resolver<'db>(
     db: &(dyn DefDatabase + 'db),
     lookup: impl Lookup<
-        Database<'db> = dyn DefDatabase + 'db,
+        Database = dyn DefDatabase,
         Data = impl ItemTreeLoc<Container = impl HasResolver>,
     >,
 ) -> Resolver {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs
index 54e6c1fd206..b6f08c0cafd 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/test_db.rs
@@ -3,10 +3,11 @@
 use std::{fmt, panic, sync::Mutex};
 
 use base_db::{
-    ra_salsa::{self, Durability},
-    AnchoredPath, CrateId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
+    CrateId, FileSourceRootInput, FileText, RootQueryDb, SourceDatabase, SourceRoot, SourceRootId,
+    SourceRootInput, Upcast,
 };
 use hir_expand::{db::ExpandDatabase, files::FilePosition, InFile};
+use salsa::{AsDynDatabase, Durability};
 use span::{EditionedFileId, FileId};
 use syntax::{algo, ast, AstNode};
 use triomphe::Arc;
@@ -18,43 +19,58 @@ use crate::{
     LocalModuleId, Lookup, ModuleDefId, ModuleId,
 };
 
-#[ra_salsa::database(
-    base_db::SourceRootDatabaseStorage,
-    base_db::SourceDatabaseStorage,
-    hir_expand::db::ExpandDatabaseStorage,
-    crate::db::InternDatabaseStorage,
-    crate::db::DefDatabaseStorage
-)]
+#[salsa::db]
+#[derive(Clone)]
 pub(crate) struct TestDB {
-    storage: ra_salsa::Storage<TestDB>,
-    events: Mutex<Option<Vec<ra_salsa::Event>>>,
+    storage: salsa::Storage<Self>,
+    files: Arc<base_db::Files>,
+    events: Arc<Mutex<Option<Vec<salsa::Event>>>>,
 }
 
 impl Default for TestDB {
     fn default() -> Self {
-        let mut this = Self { storage: Default::default(), events: Default::default() };
-        this.setup_syntax_context_root();
+        let mut this = Self {
+            storage: Default::default(),
+            events: Default::default(),
+            files: Default::default(),
+        };
         this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
         this
     }
 }
 
 impl Upcast<dyn ExpandDatabase> for TestDB {
+    #[inline]
     fn upcast(&self) -> &(dyn ExpandDatabase + 'static) {
         self
     }
 }
 
 impl Upcast<dyn DefDatabase> for TestDB {
+    #[inline]
     fn upcast(&self) -> &(dyn DefDatabase + 'static) {
         self
     }
 }
 
-impl ra_salsa::Database for TestDB {
-    fn salsa_event(&self, event: ra_salsa::Event) {
+impl Upcast<dyn RootQueryDb> for TestDB {
+    fn upcast(&self) -> &(dyn RootQueryDb + 'static) {
+        self
+    }
+}
+
+impl Upcast<dyn SourceDatabase> for TestDB {
+    fn upcast(&self) -> &(dyn SourceDatabase + 'static) {
+        self
+    }
+}
+
+#[salsa::db]
+impl salsa::Database for TestDB {
+    fn salsa_event(&self, event: &dyn std::ops::Fn() -> salsa::Event) {
         let mut events = self.events.lock().unwrap();
         if let Some(events) = &mut *events {
+            let event = event();
             events.push(event);
         }
     }
@@ -68,12 +84,54 @@ impl fmt::Debug for TestDB {
 
 impl panic::RefUnwindSafe for TestDB {}
 
-impl FileLoader for TestDB {
-    fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
-        FileLoaderDelegate(self).resolve_path(path)
+#[salsa::db]
+impl SourceDatabase for TestDB {
+    fn file_text(&self, file_id: base_db::FileId) -> FileText {
+        self.files.file_text(file_id)
     }
-    fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]> {
-        FileLoaderDelegate(self).relevant_crates(file_id)
+
+    fn set_file_text(&mut self, file_id: base_db::FileId, text: &str) {
+        let files = Arc::clone(&self.files);
+        files.set_file_text(self, file_id, text);
+    }
+
+    fn set_file_text_with_durability(
+        &mut self,
+        file_id: base_db::FileId,
+        text: &str,
+        durability: Durability,
+    ) {
+        let files = Arc::clone(&self.files);
+        files.set_file_text_with_durability(self, file_id, text, durability);
+    }
+
+    /// Source root of the file.
+    fn source_root(&self, source_root_id: SourceRootId) -> SourceRootInput {
+        self.files.source_root(source_root_id)
+    }
+
+    fn set_source_root_with_durability(
+        &mut self,
+        source_root_id: SourceRootId,
+        source_root: Arc<SourceRoot>,
+        durability: Durability,
+    ) {
+        let files = Arc::clone(&self.files);
+        files.set_source_root_with_durability(self, source_root_id, source_root, durability);
+    }
+
+    fn file_source_root(&self, id: base_db::FileId) -> FileSourceRootInput {
+        self.files.file_source_root(id)
+    }
+
+    fn set_file_source_root_with_durability(
+        &mut self,
+        id: base_db::FileId,
+        source_root_id: SourceRootId,
+        durability: Durability,
+    ) {
+        let files = Arc::clone(&self.files);
+        files.set_file_source_root_with_durability(self, id, source_root_id, durability);
     }
 }
 
@@ -92,8 +150,10 @@ impl TestDB {
     }
 
     pub(crate) fn module_for_file(&self, file_id: FileId) -> ModuleId {
+        let db = <TestDB as Upcast<dyn DefDatabase>>::upcast(self);
+
         for &krate in self.relevant_crates(file_id).iter() {
-            let crate_def_map = self.crate_def_map(krate);
+            let crate_def_map = db.crate_def_map(krate);
             for (local_id, data) in crate_def_map.modules() {
                 if data.origin.file_id().map(EditionedFileId::file_id) == Some(file_id) {
                     return crate_def_map.module_id(local_id);
@@ -104,8 +164,10 @@ impl TestDB {
     }
 
     pub(crate) fn module_at_position(&self, position: FilePosition) -> ModuleId {
+        let db = <TestDB as Upcast<dyn DefDatabase>>::upcast(self);
+
         let file_module = self.module_for_file(position.file_id.file_id());
-        let mut def_map = file_module.def_map(self);
+        let mut def_map = file_module.def_map(db);
         let module = self.mod_at_position(&def_map, position);
 
         def_map = match self.block_at_position(&def_map, position) {
@@ -128,10 +190,11 @@ impl TestDB {
 
     /// Finds the smallest/innermost module in `def_map` containing `position`.
     fn mod_at_position(&self, def_map: &DefMap, position: FilePosition) -> LocalModuleId {
+        let db = <TestDB as Upcast<dyn DefDatabase>>::upcast(self);
         let mut size = None;
         let mut res = DefMap::ROOT;
         for (module, data) in def_map.modules() {
-            let src = data.definition_source(self);
+            let src = data.definition_source(db);
             if src.file_id != position.file_id {
                 continue;
             }
@@ -167,17 +230,18 @@ impl TestDB {
     }
 
     fn block_at_position(&self, def_map: &DefMap, position: FilePosition) -> Option<Arc<DefMap>> {
+        let db = <TestDB as Upcast<dyn DefDatabase>>::upcast(self);
         // Find the smallest (innermost) function in `def_map` containing the cursor.
         let mut size = None;
         let mut fn_def = None;
         for (_, module) in def_map.modules() {
-            let file_id = module.definition_source(self).file_id;
+            let file_id = module.definition_source(db).file_id;
             if file_id != position.file_id {
                 continue;
             }
             for decl in module.scope.declarations() {
                 if let ModuleDefId::FunctionId(it) = decl {
-                    let range = it.lookup(self).source(self).value.syntax().text_range();
+                    let range = it.lookup(db).source(db).value.syntax().text_range();
 
                     if !range.contains(position.offset) {
                         continue;
@@ -203,10 +267,13 @@ impl TestDB {
 
         // Find the innermost block expression that has a `DefMap`.
         let def_with_body = fn_def?.into();
-        let (_, source_map) = self.body_with_source_map(def_with_body);
-        let scopes = self.expr_scopes(def_with_body);
-        let root = self.parse(position.file_id);
+        let source_map = db.body_with_source_map(def_with_body).1;
+        let scopes = db.expr_scopes(def_with_body);
+
+        let editioned_file_id_wrapper =
+            base_db::EditionedFileId::new(db.as_dyn_database(), position.file_id);
 
+        let root = db.parse(editioned_file_id_wrapper);
         let scope_iter = algo::ancestors_at_offset(&root.syntax_node(), position.offset)
             .filter_map(|node| {
                 let block = ast::BlockExpr::cast(node)?;
@@ -223,7 +290,7 @@ impl TestDB {
             let mut containing_blocks =
                 scopes.scope_chain(Some(scope)).filter_map(|scope| scopes.block(scope));
 
-            if let Some(block) = containing_blocks.next().map(|block| self.block_def_map(block)) {
+            if let Some(block) = containing_blocks.next().map(|block| db.block_def_map(block)) {
                 return Some(block);
             }
         }
@@ -231,7 +298,7 @@ impl TestDB {
         None
     }
 
-    pub(crate) fn log(&self, f: impl FnOnce()) -> Vec<ra_salsa::Event> {
+    pub(crate) fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event> {
         *self.events.lock().unwrap() = Some(Vec::new());
         f();
         self.events.lock().unwrap().take().unwrap()
@@ -244,8 +311,11 @@ impl TestDB {
             .filter_map(|e| match e.kind {
                 // This is pretty horrible, but `Debug` is the only way to inspect
                 // QueryDescriptor at the moment.
-                ra_salsa::EventKind::WillExecute { database_key } => {
-                    Some(format!("{:?}", database_key.debug(self)))
+                salsa::EventKind::WillExecute { database_key } => {
+                    let ingredient = self
+                        .as_dyn_database()
+                        .ingredient_debug_name(database_key.ingredient_index());
+                    Some(ingredient.to_string())
                 }
                 _ => None,
             })
diff --git a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
index 7d561e0527d..607199a6244 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
@@ -21,6 +21,8 @@ itertools.workspace = true
 hashbrown.workspace = true
 smallvec.workspace = true
 triomphe.workspace = true
+query-group.workspace = true
+salsa.workspace = true
 
 # local deps
 stdx.workspace = true
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs
index 55242ab3e57..d8b3f40e8f4 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs
@@ -3,9 +3,12 @@
 use base_db::AnchoredPath;
 use cfg::CfgExpr;
 use either::Either;
-use intern::{sym, Symbol};
+use intern::{
+    sym::{self},
+    Symbol,
+};
 use mbe::{expect_fragment, DelimiterKind};
-use span::{Edition, EditionedFileId, Span};
+use span::{Edition, EditionedFileId, FileId, Span};
 use stdx::format_to;
 use syntax::{
     format_smolstr,
@@ -401,7 +404,7 @@ fn use_panic_2021(db: &dyn ExpandDatabase, span: Span) -> bool {
     // stack that does not have #[allow_internal_unstable(edition_panic)].
     // (To avoid using the edition of e.g. the assert!() or debug_assert!() definition.)
     loop {
-        let Some(expn) = db.lookup_intern_syntax_context(span.ctx).outer_expn else {
+        let Some(expn) = span.ctx.outer_expn(db) else {
             break false;
         };
         let expn = db.lookup_intern_macro_call(expn);
@@ -656,10 +659,10 @@ fn relative_file(
     allow_recursion: bool,
     err_span: Span,
 ) -> Result<EditionedFileId, ExpandError> {
-    let lookup = call_id.lookup(db);
+    let lookup = db.lookup_intern_macro_call(call_id);
     let call_site = lookup.kind.file_id().original_file_respecting_includes(db).file_id();
     let path = AnchoredPath { anchor: call_site, path: path_str };
-    let res = db
+    let res: FileId = db
         .resolve_path(path)
         .ok_or_else(|| ExpandError::other(err_span, format!("failed to load file `{path_str}`")))?;
     // Prevent include itself
@@ -725,8 +728,10 @@ fn include_expand(
     tt: &tt::TopSubtree,
     span: Span,
 ) -> ExpandResult<tt::TopSubtree> {
-    let file_id = match include_input_to_file_id(db, arg_id, tt) {
-        Ok(it) => it,
+    let (file_id_wrapper, editioned_file_id) = match include_input_to_file_id(db, arg_id, tt) {
+        Ok(editioned_file_id) => {
+            (base_db::EditionedFileId::new(db, editioned_file_id), editioned_file_id)
+        }
         Err(e) => {
             return ExpandResult::new(
                 tt::TopSubtree::empty(DelimSpan { open: span, close: span }),
@@ -734,10 +739,10 @@ fn include_expand(
             )
         }
     };
-    let span_map = db.real_span_map(file_id);
+    let span_map = db.real_span_map(editioned_file_id);
     // FIXME: Parse errors
     ExpandResult::ok(syntax_node_to_token_tree(
-        &db.parse(file_id).syntax_node(),
+        &db.parse(file_id_wrapper).syntax_node(),
         SpanMap::RealSpanMap(span_map),
         span,
         syntax_bridge::DocCommentDesugarMode::ProcMacro,
@@ -800,7 +805,7 @@ fn include_str_expand(
     };
 
     let text = db.file_text(file_id.file_id());
-    let text = &*text;
+    let text = &*text.text(db);
 
     ExpandResult::ok(quote!(span =>#text))
 }
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/quote.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/quote.rs
index 9b637fc7684..a961df181db 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/quote.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/quote.rs
@@ -277,8 +277,8 @@ mod tests {
         assert_eq!(quoted.to_string(), "hello");
         let t = format!("{quoted:#?}");
         expect![[r#"
-            SUBTREE $$ 937550:0@0..0#2 937550:0@0..0#2
-              IDENT   hello 937550:0@0..0#2"#]]
+            SUBTREE $$ 937550:0@0..0#4294967037 937550:0@0..0#4294967037
+              IDENT   hello 937550:0@0..0#4294967037"#]]
         .assert_eq(&t);
     }
 
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/change.rs b/src/tools/rust-analyzer/crates/hir-expand/src/change.rs
index 1fdf251ba52..7a42d662601 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/change.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/change.rs
@@ -1,10 +1,8 @@
 //! Defines a unit of change that can applied to the database to get the next
 //! state. Changes are transactional.
-use base_db::{
-    ra_salsa::Durability, CrateGraph, CrateId, CrateWorkspaceData, FileChange, SourceRoot,
-    SourceRootDatabase,
-};
+use base_db::{CrateGraph, CrateId, CrateWorkspaceData, FileChange, SourceRoot};
 use rustc_hash::FxHashMap;
+use salsa::Durability;
 use span::FileId;
 use triomphe::Arc;
 
@@ -21,7 +19,7 @@ impl ChangeWithProcMacros {
         Self::default()
     }
 
-    pub fn apply(self, db: &mut (impl ExpandDatabase + SourceRootDatabase)) {
+    pub fn apply(self, db: &mut impl ExpandDatabase) {
         self.source_change.apply(db);
         if let Some(proc_macros) = self.proc_macros {
             db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH);
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
index 8ca8bf1ba4a..2f97cceab55 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
@@ -1,10 +1,14 @@
 //! Defines database & queries for macro expansion.
 
-use base_db::{ra_salsa, CrateId, SourceDatabase};
+use base_db::{CrateId, RootQueryDb};
 use either::Either;
 use mbe::MatchedArmIndex;
 use rustc_hash::FxHashSet;
-use span::{AstIdMap, Edition, EditionedFileId, Span, SyntaxContextData, SyntaxContextId};
+use salsa::plumbing::AsId;
+use span::{
+    AstIdMap, Edition, EditionedFileId, HirFileId, HirFileIdRepr, MacroCallId, MacroFileId, Span,
+    SyntaxContextId,
+};
 use syntax::{ast, AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T};
 use syntax_bridge::{syntax_node_to_token_tree, DocCommentDesugarMode};
 use triomphe::Arc;
@@ -19,12 +23,11 @@ use crate::{
         span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt,
         SyntaxContextExt as _,
     },
-    proc_macro::ProcMacros,
-    span_map::{RealSpanMap, SpanMap, SpanMapRef},
-    tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander,
-    CustomProcMacroExpander, EagerCallInfo, EagerExpander, ExpandError, ExpandResult, ExpandTo,
-    ExpansionSpanMap, HirFileId, HirFileIdRepr, Lookup, MacroCallId, MacroCallKind, MacroCallLoc,
-    MacroDefId, MacroDefKind, MacroFileId,
+    proc_macro::{CustomProcMacroExpander, ProcMacros},
+    span_map::{ExpansionSpanMap, RealSpanMap, SpanMap, SpanMapRef},
+    tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
+    EagerExpander, ExpandError, ExpandResult, ExpandTo, MacroCallKind, MacroCallLoc, MacroDefId,
+    MacroDefKind,
 };
 /// This is just to ensure the types of smart_macro_arg and macro_arg are the same
 type MacroArgResult = (Arc<tt::TopSubtree>, SyntaxFixupUndoInfo, Span);
@@ -52,32 +55,32 @@ pub enum TokenExpander {
     ProcMacro(CustomProcMacroExpander),
 }
 
-#[ra_salsa::query_group(ExpandDatabaseStorage)]
-pub trait ExpandDatabase: SourceDatabase {
+#[query_group::query_group]
+pub trait ExpandDatabase: RootQueryDb {
     /// The proc macros.
-    #[ra_salsa::input]
+    #[salsa::input]
     fn proc_macros(&self) -> Arc<ProcMacros>;
 
     fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
 
-    /// Main public API -- parses a hir file, not caring whether it's a real
-    /// file or a macro expansion.
-    #[ra_salsa::transparent]
+    #[salsa::transparent]
     fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode;
+
     /// Implementation for the macro case.
-    #[ra_salsa::lru]
+    #[salsa::lru(512)]
     fn parse_macro_expansion(
         &self,
-        macro_file: MacroFileId,
+        macro_file: span::MacroFileId,
     ) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)>;
-    #[ra_salsa::transparent]
-    #[ra_salsa::invoke(SpanMap::new)]
+
+    #[salsa::transparent]
+    #[salsa::invoke(SpanMap::new)]
     fn span_map(&self, file_id: HirFileId) -> SpanMap;
 
-    #[ra_salsa::transparent]
-    #[ra_salsa::invoke(crate::span_map::expansion_span_map)]
+    #[salsa::transparent]
+    #[salsa::invoke(crate::span_map::expansion_span_map)]
     fn expansion_span_map(&self, file_id: MacroFileId) -> Arc<ExpansionSpanMap>;
-    #[ra_salsa::invoke(crate::span_map::real_span_map)]
+    #[salsa::invoke(crate::span_map::real_span_map)]
     fn real_span_map(&self, file_id: EditionedFileId) -> Arc<RealSpanMap>;
 
     /// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
@@ -85,43 +88,47 @@ pub trait ExpandDatabase: SourceDatabase {
     ///
     /// We encode macro definitions into ids of macro calls, this what allows us
     /// to be incremental.
-    #[ra_salsa::interned]
+    #[salsa::transparent]
     fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId;
-    #[ra_salsa::interned]
-    fn intern_syntax_context(&self, ctx: SyntaxContextData) -> SyntaxContextId;
+    #[salsa::transparent]
+    fn lookup_intern_macro_call(&self, macro_call: MacroCallId) -> MacroCallLoc;
 
-    #[ra_salsa::transparent]
-    fn setup_syntax_context_root(&self) -> ();
-    #[ra_salsa::transparent]
-    #[ra_salsa::invoke(crate::hygiene::dump_syntax_contexts)]
+    #[salsa::transparent]
+    #[salsa::invoke(crate::hygiene::dump_syntax_contexts)]
     fn dump_syntax_contexts(&self) -> String;
 
     /// Lowers syntactic macro call to a token tree representation. That's a firewall
     /// query, only typing in the macro call itself changes the returned
     /// subtree.
     #[deprecated = "calling this is incorrect, call `macro_arg_considering_derives` instead"]
+    #[salsa::invoke(macro_arg)]
     fn macro_arg(&self, id: MacroCallId) -> MacroArgResult;
-    #[ra_salsa::transparent]
+
+    #[salsa::transparent]
     fn macro_arg_considering_derives(
         &self,
         id: MacroCallId,
         kind: &MacroCallKind,
     ) -> MacroArgResult;
+
     /// Fetches the expander for this macro.
-    #[ra_salsa::transparent]
-    #[ra_salsa::invoke(TokenExpander::macro_expander)]
+    #[salsa::transparent]
+    #[salsa::invoke(TokenExpander::macro_expander)]
     fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
+
     /// Fetches (and compiles) the expander of this decl macro.
-    #[ra_salsa::invoke(DeclarativeMacroExpander::expander)]
+    #[salsa::invoke(DeclarativeMacroExpander::expander)]
     fn decl_macro_expander(
         &self,
         def_crate: CrateId,
         id: AstId<ast::Macro>,
     ) -> Arc<DeclarativeMacroExpander>;
+
     /// Special case of the previous query for procedural macros. We can't LRU
     /// proc macros, since they are not deterministic in general, and
     /// non-determinism breaks salsa in a very, very, very bad way.
     /// @edwin0cheng heroically debugged this once! See #4315 for details
+    #[salsa::invoke(expand_proc_macro)]
     fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult<Arc<tt::TopSubtree>>;
     /// Retrieves the span to be used for a proc-macro expansions spans.
     /// This is a firewall query as it requires parsing the file, which we don't want proc-macros to
@@ -129,22 +136,42 @@ pub trait ExpandDatabase: SourceDatabase {
     /// parse queries being LRU cached. If they weren't the invalidations would only happen if the
     /// user wrote in the file that defines the proc-macro.
     fn proc_macro_span(&self, fun: AstId<ast::Fn>) -> Span;
+
     /// Firewall query that returns the errors from the `parse_macro_expansion` query.
+    #[salsa::invoke(parse_macro_expansion_error)]
     fn parse_macro_expansion_error(
         &self,
         macro_call: MacroCallId,
     ) -> Option<Arc<ExpandResult<Arc<[SyntaxError]>>>>;
-    #[ra_salsa::transparent]
+
+    #[salsa::transparent]
     fn syntax_context(&self, file: HirFileId, edition: Edition) -> SyntaxContextId;
 }
 
+#[salsa::interned(no_lifetime, id = span::MacroCallId)]
+pub struct MacroCallWrapper {
+    pub loc: MacroCallLoc,
+}
+
+fn intern_macro_call(db: &dyn ExpandDatabase, macro_call: MacroCallLoc) -> MacroCallId {
+    MacroCallWrapper::new(db, macro_call).0
+}
+
+fn lookup_intern_macro_call(db: &dyn ExpandDatabase, macro_call: MacroCallId) -> MacroCallLoc {
+    MacroCallWrapper::ingredient(db).data(db.as_dyn_database(), macro_call.as_id()).0.clone()
+}
+
+#[salsa::interned(no_lifetime, id = span::SyntaxContextId)]
+pub struct SyntaxContextWrapper {
+    pub data: SyntaxContextId,
+}
+
 fn syntax_context(db: &dyn ExpandDatabase, file: HirFileId, edition: Edition) -> SyntaxContextId {
     match file.repr() {
         HirFileIdRepr::FileId(_) => SyntaxContextId::root(edition),
         HirFileIdRepr::MacroFile(m) => {
-            db.macro_arg_considering_derives(m.macro_call_id, &m.macro_call_id.lookup(db).kind)
-                .2
-                .ctx
+            let kind = db.lookup_intern_macro_call(m.macro_call_id).kind;
+            db.macro_arg_considering_derives(m.macro_call_id, &kind).2.ctx
         }
     }
 }
@@ -322,9 +349,15 @@ fn ast_id_map(db: &dyn ExpandDatabase, file_id: span::HirFileId) -> triomphe::Ar
     triomphe::Arc::new(AstIdMap::from_source(&db.parse_or_expand(file_id)))
 }
 
+/// Main public API -- parses a hir file, not caring whether it's a real
+/// file or a macro expansion.
 fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
     match file_id.repr() {
-        HirFileIdRepr::FileId(file_id) => db.parse(file_id).syntax_node(),
+        HirFileIdRepr::FileId(editioned_file_id) => {
+            let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
+            db.parse(file_id).syntax_node()
+        }
+
         HirFileIdRepr::MacroFile(macro_file) => {
             db.parse_macro_expansion(macro_file).value.0.syntax_node()
         }
@@ -376,8 +409,13 @@ pub(crate) fn parse_with_map(
     file_id: HirFileId,
 ) -> (Parse<SyntaxNode>, SpanMap) {
     match file_id.repr() {
-        HirFileIdRepr::FileId(file_id) => {
-            (db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id)))
+        HirFileIdRepr::FileId(editioned_file_id) => {
+            let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
+
+            (
+                db.parse(file_id).to_syntax(),
+                SpanMap::RealSpanMap(db.real_span_map(editioned_file_id)),
+            )
         }
         HirFileIdRepr::MacroFile(macro_file) => {
             let (parse, map) = db.parse_macro_expansion(macro_file).value;
@@ -597,7 +635,7 @@ fn macro_expand(
 
     let (ExpandResult { value: (tt, matched_arm), err }, span) = match loc.def.kind {
         MacroDefKind::ProcMacro(..) => {
-            return db.expand_proc_macro(macro_call_id).map(CowArc::Arc).zip_val(None)
+            return db.expand_proc_macro(macro_call_id).map(CowArc::Arc).zip_val(None);
         }
         _ => {
             let (macro_arg, undo_info, span) =
@@ -728,12 +766,7 @@ pub(crate) fn token_tree_to_syntax_node(
         ExpandTo::Type => syntax_bridge::TopEntryPoint::Type,
         ExpandTo::Expr => syntax_bridge::TopEntryPoint::Expr,
     };
-    syntax_bridge::token_tree_to_syntax_node(
-        tt,
-        entry_point,
-        &mut |ctx| ctx.lookup(db).edition,
-        edition,
-    )
+    syntax_bridge::token_tree_to_syntax_node(tt, entry_point, &mut |ctx| ctx.edition(db), edition)
 }
 
 fn check_tt_count(tt: &tt::TopSubtree) -> Result<(), ExpandResult<()>> {
@@ -754,9 +787,3 @@ fn check_tt_count(tt: &tt::TopSubtree) -> Result<(), ExpandResult<()>> {
         })
     }
 }
-
-fn setup_syntax_context_root(db: &dyn ExpandDatabase) {
-    for edition in Edition::iter() {
-        db.intern_syntax_context(SyntaxContextData::root(edition));
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs b/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs
index fef77acb7bb..91cbbc37364 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/declarative.rs
@@ -102,12 +102,13 @@ impl DeclarativeMacroExpander {
         };
         let ctx_edition = |ctx: SyntaxContextId| {
             let crate_graph = db.crate_graph();
+
             if ctx.is_root() {
                 crate_graph[def_crate].edition
             } else {
-                let data = db.lookup_intern_syntax_context(ctx);
                 // UNWRAP-SAFETY: Only the root context has no outer expansion
-                crate_graph[data.outer_expn.unwrap().lookup(db).def.krate].edition
+                let krate = db.lookup_intern_macro_call(ctx.outer_expn(db).unwrap()).def.krate;
+                crate_graph[krate].edition
             }
         };
         let (mac, transparency) = match id.to_ptr(db).to_node(&root) {
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
index f476d1b564c..4a98b455cab 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
@@ -28,7 +28,7 @@ use crate::{
     ast::{self, AstNode},
     db::ExpandDatabase,
     mod_path::ModPath,
-    AstId, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile, Intern,
+    AstId, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile,
     MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
 };
 
@@ -47,13 +47,13 @@ pub fn expand_eager_macro_input(
     // When `lazy_expand` is called, its *parent* file must already exist.
     // Here we store an eager macro id for the argument expanded subtree
     // for that purpose.
-    let arg_id = MacroCallLoc {
+    let loc = MacroCallLoc {
         def,
         krate,
         kind: MacroCallKind::FnLike { ast_id, expand_to: ExpandTo::Expr, eager: None },
         ctxt: call_site,
-    }
-    .intern(db);
+    };
+    let arg_id = db.intern_macro_call(loc);
     #[allow(deprecated)] // builtin eager macros are never derives
     let (_, _, span) = db.macro_arg(arg_id);
     let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
@@ -107,7 +107,7 @@ pub fn expand_eager_macro_input(
         ctxt: call_site,
     };
 
-    ExpandResult { value: Some(loc.intern(db)), err }
+    ExpandResult { value: Some(db.intern_macro_call(loc)), err }
 }
 
 fn lazy_expand(
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs
index f3bcc772682..5810d11338c 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/files.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/files.rs
@@ -159,7 +159,9 @@ trait FileIdToSyntax: Copy {
 
 impl FileIdToSyntax for EditionedFileId {
     fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
-        db.parse(self).syntax_node()
+        let file_id = base_db::EditionedFileId::new(db, self);
+
+        db.parse(file_id).syntax_node()
     }
 }
 impl FileIdToSyntax for MacroFileId {
@@ -274,7 +276,7 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
         // as we don't have node inputs otherwise and therefore can't find an `N` node in the input
         let file_id = match self.file_id.repr() {
             HirFileIdRepr::FileId(file_id) => {
-                return Some(InRealFile { file_id, value: self.value.borrow().clone() })
+                return Some(InRealFile { file_id, value: self.value.borrow().clone() });
             }
             HirFileIdRepr::MacroFile(m)
                 if matches!(m.kind(db), MacroKind::Attr | MacroKind::AttrBuiltIn) =>
@@ -284,12 +286,14 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
             _ => return None,
         };
 
-        let FileRange { file_id, range } = map_node_range_up_rooted(
+        let FileRange { file_id: editioned_file_id, range } = map_node_range_up_rooted(
             db,
             &db.expansion_span_map(file_id),
             self.value.borrow().text_range(),
         )?;
 
+        let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
+
         let kind = self.kind();
         let value = db
             .parse(file_id)
@@ -298,7 +302,7 @@ impl<SN: Borrow<SyntaxNode>> InFile<SN> {
             .ancestors()
             .take_while(|it| it.text_range() == range)
             .find(|it| it.kind() == kind)?;
-        Some(InRealFile::new(file_id, value))
+        Some(InRealFile::new(editioned_file_id, value))
     }
 }
 
@@ -453,7 +457,7 @@ impl<N: AstNode> InFile<N> {
         // as we don't have node inputs otherwise and therefore can't find an `N` node in the input
         let file_id = match self.file_id.repr() {
             HirFileIdRepr::FileId(file_id) => {
-                return Some(InRealFile { file_id, value: self.value })
+                return Some(InRealFile { file_id, value: self.value });
             }
             HirFileIdRepr::MacroFile(m) => m,
         };
@@ -461,16 +465,18 @@ impl<N: AstNode> InFile<N> {
             return None;
         }
 
-        let FileRange { file_id, range } = map_node_range_up_rooted(
+        let FileRange { file_id: editioned_file_id, range } = map_node_range_up_rooted(
             db,
             &db.expansion_span_map(file_id),
             self.value.syntax().text_range(),
         )?;
 
+        let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
+
         // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
         let anc = db.parse(file_id).syntax_node().covering_element(range);
         let value = anc.ancestors().find_map(N::cast)?;
-        Some(InRealFile::new(file_id, value))
+        Some(InRealFile::new(editioned_file_id, value))
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
index fe05af0ac9d..b53468ccacd 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
@@ -22,11 +22,11 @@
 // FIXME: Move this into the span crate? Not quite possible today as that depends on `MacroCallLoc`
 // which contains a bunch of unrelated things
 
-use std::iter;
+use std::{convert::identity, iter};
 
-use span::{Edition, MacroCallId, Span, SyntaxContextData, SyntaxContextId};
+use span::{Edition, MacroCallId, Span, SyntaxContextId};
 
-use crate::db::{ExpandDatabase, InternSyntaxContextQuery};
+use crate::db::{ExpandDatabase, MacroCallWrapper};
 
 pub use span::Transparency;
 
@@ -72,8 +72,8 @@ fn span_with_ctxt_from_mark(
 
 pub(super) fn apply_mark(
     db: &dyn ExpandDatabase,
-    ctxt: SyntaxContextId,
-    call_id: MacroCallId,
+    ctxt: span::SyntaxContextId,
+    call_id: span::MacroCallId,
     transparency: Transparency,
     edition: Edition,
 ) -> SyntaxContextId {
@@ -114,92 +114,75 @@ fn apply_mark_internal(
     transparency: Transparency,
     edition: Edition,
 ) -> SyntaxContextId {
-    use base_db::ra_salsa;
-
     let call_id = Some(call_id);
 
-    let syntax_context_data = db.lookup_intern_syntax_context(ctxt);
-    let mut opaque = syntax_context_data.opaque;
-    let mut opaque_and_semitransparent = syntax_context_data.opaque_and_semitransparent;
+    let mut opaque = ctxt.opaque(db);
+    let mut opaque_and_semitransparent = ctxt.opaque_and_semitransparent(db);
 
     if transparency >= Transparency::Opaque {
         let parent = opaque;
-        opaque = ra_salsa::plumbing::get_query_table::<InternSyntaxContextQuery>(db).get_or_insert(
-            (parent, call_id, transparency, edition),
-            |new_opaque| SyntaxContextData {
-                outer_expn: call_id,
-                outer_transparency: transparency,
-                parent,
-                opaque: new_opaque,
-                opaque_and_semitransparent: new_opaque,
-                edition,
-            },
-        );
+        opaque =
+            SyntaxContextId::new(db, call_id, transparency, edition, parent, identity, identity);
     }
 
     if transparency >= Transparency::SemiTransparent {
         let parent = opaque_and_semitransparent;
         opaque_and_semitransparent =
-            ra_salsa::plumbing::get_query_table::<InternSyntaxContextQuery>(db).get_or_insert(
-                (parent, call_id, transparency, edition),
-                |new_opaque_and_semitransparent| SyntaxContextData {
-                    outer_expn: call_id,
-                    outer_transparency: transparency,
-                    parent,
-                    opaque,
-                    opaque_and_semitransparent: new_opaque_and_semitransparent,
-                    edition,
-                },
-            );
+            SyntaxContextId::new(db, call_id, transparency, edition, parent, |_| opaque, identity);
     }
 
     let parent = ctxt;
-    db.intern_syntax_context(SyntaxContextData {
-        outer_expn: call_id,
-        outer_transparency: transparency,
-        parent,
-        opaque,
-        opaque_and_semitransparent,
+    SyntaxContextId::new(
+        db,
+        call_id,
+        transparency,
         edition,
-    })
+        parent,
+        |_| opaque,
+        |_| opaque_and_semitransparent,
+    )
 }
 
 pub trait SyntaxContextExt {
-    fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self;
-    fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self;
-    fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self;
-    fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
-    fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
-    fn marks(self, db: &dyn ExpandDatabase) -> Vec<(MacroCallId, Transparency)>;
+    fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> span::SyntaxContextId;
+    fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> span::SyntaxContextId;
+    fn parent_ctxt(self, db: &dyn ExpandDatabase) -> span::SyntaxContextId;
+    fn remove_mark(&mut self, db: &dyn ExpandDatabase)
+        -> (Option<span::MacroCallId>, Transparency);
+    fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<span::MacroCallId>, Transparency);
+    fn marks(self, db: &dyn ExpandDatabase) -> Vec<(span::MacroCallId, Transparency)>;
     fn is_opaque(self, db: &dyn ExpandDatabase) -> bool;
 }
 
 impl SyntaxContextExt for SyntaxContextId {
-    fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self {
-        db.lookup_intern_syntax_context(self).opaque_and_semitransparent
+    fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> span::SyntaxContextId {
+        self.opaque_and_semitransparent(db)
     }
-    fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self {
-        db.lookup_intern_syntax_context(self).opaque
+    fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> span::SyntaxContextId {
+        self.opaque(db)
     }
-    fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self {
-        db.lookup_intern_syntax_context(self).parent
+    fn parent_ctxt(self, db: &dyn ExpandDatabase) -> span::SyntaxContextId {
+        self.parent(db)
     }
-    fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency) {
-        let data = db.lookup_intern_syntax_context(self);
-        (data.outer_expn, data.outer_transparency)
+    fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<span::MacroCallId>, Transparency) {
+        let data = self;
+        (data.outer_expn(db), data.outer_transparency(db))
     }
-    fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency) {
-        let data = db.lookup_intern_syntax_context(*self);
-        *self = data.parent;
-        (data.outer_expn, data.outer_transparency)
+    fn remove_mark(
+        &mut self,
+        db: &dyn ExpandDatabase,
+    ) -> (Option<span::MacroCallId>, Transparency) {
+        let data = *self;
+        *self = data.parent(db);
+        (data.outer_expn(db), data.outer_transparency(db))
     }
-    fn marks(self, db: &dyn ExpandDatabase) -> Vec<(MacroCallId, Transparency)> {
+    fn marks(self, db: &dyn ExpandDatabase) -> Vec<(span::MacroCallId, Transparency)> {
         let mut marks = marks_rev(self, db).collect::<Vec<_>>();
         marks.reverse();
         marks
     }
     fn is_opaque(self, db: &dyn ExpandDatabase) -> bool {
-        !self.is_root() && db.lookup_intern_syntax_context(self).outer_transparency.is_opaque()
+        !self.is_root() && self.outer_transparency(db).is_opaque()
     }
 }
 
@@ -207,7 +190,7 @@ impl SyntaxContextExt for SyntaxContextId {
 pub fn marks_rev(
     ctxt: SyntaxContextId,
     db: &dyn ExpandDatabase,
-) -> impl Iterator<Item = (MacroCallId, Transparency)> + '_ {
+) -> impl Iterator<Item = (span::MacroCallId, Transparency)> + '_ {
     iter::successors(Some(ctxt), move |&mark| Some(mark.parent_ctxt(db)))
         .take_while(|&it| !it.is_root())
         .map(|ctx| {
@@ -219,18 +202,14 @@ pub fn marks_rev(
 }
 
 pub(crate) fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String {
-    use crate::db::{InternMacroCallLookupQuery, InternSyntaxContextLookupQuery};
-    use base_db::ra_salsa::debug::DebugQueryTable;
-
     let mut s = String::from("Expansions:");
-    let mut entries = InternMacroCallLookupQuery.in_db(db).entries::<Vec<_>>();
-    entries.sort_by_key(|e| e.key);
-    for e in entries {
-        let id = e.key;
-        let expn_data = e.value.as_ref().unwrap();
+    let entries =
+        MacroCallWrapper::ingredient(db).entries(db.as_dyn_database()).collect::<Vec<_>>();
+    for loc in entries {
+        let expn_data = &loc.fields().0;
+
         s.push_str(&format!(
-            "\n{:?}: parent: {:?}, call_site_ctxt: {:?}, kind: {:?}",
-            id,
+            "parent: {:?}, call_site_ctxt: {:?}, kind: {:?}",
             expn_data.kind.file_id(),
             expn_data.ctxt,
             expn_data.kind.descr(),
@@ -238,28 +217,25 @@ pub(crate) fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String {
     }
 
     s.push_str("\n\nSyntaxContexts:\n");
-    let mut entries = InternSyntaxContextLookupQuery.in_db(db).entries::<Vec<_>>();
-    entries.sort_by_key(|e| e.key);
+    let entries = SyntaxContextId::ingredient(db).entries(db.as_dyn_database()).collect::<Vec<_>>();
     for e in entries {
         struct SyntaxContextDebug<'a>(
             &'a dyn ExpandDatabase,
-            SyntaxContextId,
-            &'a SyntaxContextData,
+            &'a span::SyntaxContextUnderlyingData,
         );
 
         impl std::fmt::Debug for SyntaxContextDebug<'_> {
             fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-                fancy_debug(self.2, self.1, self.0, f)
+                fancy_debug(self.1, self.0, f)
             }
         }
 
         fn fancy_debug(
-            this: &SyntaxContextData,
-            self_id: SyntaxContextId,
+            this: &span::SyntaxContextUnderlyingData,
             db: &dyn ExpandDatabase,
             f: &mut std::fmt::Formatter<'_>,
         ) -> std::fmt::Result {
-            write!(f, "#{self_id} parent: #{}, outer_mark: (", this.parent)?;
+            write!(f, "parent: #{}, outer_mark: (", this.parent)?;
             match this.outer_expn {
                 Some(id) => {
                     write!(f, "{:?}::{{{{expn{:?}}}}}", db.lookup_intern_macro_call(id).krate, id)?
@@ -269,7 +245,8 @@ pub(crate) fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String {
             write!(f, ", {:?})", this.outer_transparency)
         }
 
-        stdx::format_to!(s, "{:?}\n", SyntaxContextDebug(db, e.key, &e.value.unwrap()));
+        let dbg = SyntaxContextDebug(db, e.fields());
+        stdx::format_to!(s, "{:?}\n", dbg);
     }
     s
 }
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
index c1d808cbf2c..f8c83dce55a 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
@@ -33,11 +33,11 @@ use triomphe::Arc;
 use core::fmt;
 use std::hash::Hash;
 
-use base_db::{ra_salsa::InternValueTrivial, CrateId};
+use base_db::CrateId;
 use either::Either;
 use span::{
     Edition, EditionedFileId, ErasedFileAstId, FileAstId, HirFileIdRepr, Span, SpanAnchor,
-    SyntaxContextData, SyntaxContextId,
+    SyntaxContextId,
 };
 use syntax::{
     ast::{self, AstNode},
@@ -89,17 +89,17 @@ pub mod tt {
 macro_rules! impl_intern_lookup {
     ($db:ident, $id:ident, $loc:ident, $intern:ident, $lookup:ident) => {
         impl $crate::Intern for $loc {
-            type Database<'db> = dyn $db + 'db;
+            type Database = dyn $db;
             type ID = $id;
-            fn intern(self, db: &Self::Database<'_>) -> $id {
+            fn intern(self, db: &Self::Database) -> Self::ID {
                 db.$intern(self)
             }
         }
 
         impl $crate::Lookup for $id {
-            type Database<'db> = dyn $db + 'db;
+            type Database = dyn $db;
             type Data = $loc;
-            fn lookup(&self, db: &Self::Database<'_>) -> $loc {
+            fn lookup(&self, db: &Self::Database) -> Self::Data {
                 db.$lookup(*self)
             }
         }
@@ -108,15 +108,15 @@ macro_rules! impl_intern_lookup {
 
 // ideally these would be defined in base-db, but the orphan rule doesn't let us
 pub trait Intern {
-    type Database<'db>: ?Sized;
+    type Database: ?Sized;
     type ID;
-    fn intern(self, db: &Self::Database<'_>) -> Self::ID;
+    fn intern(self, db: &Self::Database) -> Self::ID;
 }
 
 pub trait Lookup {
-    type Database<'db>: ?Sized;
+    type Database: ?Sized;
     type Data;
-    fn lookup(&self, db: &Self::Database<'_>) -> Self::Data;
+    fn lookup(&self, db: &Self::Database) -> Self::Data;
 }
 
 impl_intern_lookup!(
@@ -127,14 +127,6 @@ impl_intern_lookup!(
     lookup_intern_macro_call
 );
 
-impl_intern_lookup!(
-    ExpandDatabase,
-    SyntaxContextId,
-    SyntaxContextData,
-    intern_syntax_context,
-    lookup_intern_syntax_context
-);
-
 pub type ExpandResult<T> = ValueResult<T, ExpandError>;
 
 #[derive(Debug, PartialEq, Eq, Clone, Hash)]
@@ -262,7 +254,6 @@ pub struct MacroCallLoc {
     pub kind: MacroCallKind,
     pub ctxt: SyntaxContextId,
 }
-impl InternValueTrivial for MacroCallLoc {}
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub struct MacroDefId {
@@ -357,7 +348,7 @@ impl HirFileIdExt for HirFileId {
     fn edition(self, db: &dyn ExpandDatabase) -> Edition {
         match self.repr() {
             HirFileIdRepr::FileId(file_id) => file_id.edition(),
-            HirFileIdRepr::MacroFile(m) => m.macro_call_id.lookup(db).def.edition,
+            HirFileIdRepr::MacroFile(m) => db.lookup_intern_macro_call(m.macro_call_id).def.edition,
         }
     }
     fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId {
@@ -366,7 +357,7 @@ impl HirFileIdExt for HirFileId {
             match file_id.repr() {
                 HirFileIdRepr::FileId(id) => break id,
                 HirFileIdRepr::MacroFile(MacroFileId { macro_call_id }) => {
-                    file_id = macro_call_id.lookup(db).kind.file_id();
+                    file_id = db.lookup_intern_macro_call(macro_call_id).kind.file_id()
                 }
             }
         }
@@ -409,7 +400,7 @@ impl HirFileIdExt for HirFileId {
 
     fn as_builtin_derive_attr_node(&self, db: &dyn ExpandDatabase) -> Option<InFile<ast::Attr>> {
         let macro_file = self.macro_file()?;
-        let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+        let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
         let attr = match loc.def.kind {
             MacroDefKind::BuiltInDerive(..) => loc.to_node(db),
             _ => return None,
@@ -467,7 +458,7 @@ impl MacroFileIdExt for MacroFileId {
         let mut level = 0;
         let mut macro_file = self;
         loop {
-            let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+            let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
 
             level += 1;
             macro_file = match loc.kind.file_id().repr() {
@@ -477,7 +468,7 @@ impl MacroFileIdExt for MacroFileId {
         }
     }
     fn parent(self, db: &dyn ExpandDatabase) -> HirFileId {
-        self.macro_call_id.lookup(db).kind.file_id()
+        db.lookup_intern_macro_call(self.macro_call_id).kind.file_id()
     }
 
     /// Return expansion information if it is a macro-expansion file
@@ -538,7 +529,7 @@ impl MacroDefId {
         kind: MacroCallKind,
         ctxt: SyntaxContextId,
     ) -> MacroCallId {
-        MacroCallLoc { def: self, krate, kind, ctxt }.intern(db)
+        db.intern_macro_call(MacroCallLoc { def: self, krate, kind, ctxt })
     }
 
     pub fn definition_range(&self, db: &dyn ExpandDatabase) -> InFile<TextRange> {
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
index 75b58614540..6f6f41f322a 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs
@@ -277,7 +277,7 @@ fn convert_path(
     if mod_path.segments.len() == 1 && mod_path.kind == PathKind::Plain {
         if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
             let syn_ctx = span_for_range(segment.syntax().text_range());
-            if let Some(macro_call_id) = db.lookup_intern_syntax_context(syn_ctx).outer_expn {
+            if let Some(macro_call_id) = syn_ctx.outer_expn(db) {
                 if db.lookup_intern_macro_call(macro_call_id).def.local_inner {
                     mod_path.kind = match resolve_crate_root(db, syn_ctx) {
                         Some(crate_root) => PathKind::DollarCrate(crate_root),
@@ -336,7 +336,7 @@ fn convert_path_tt(db: &dyn ExpandDatabase, tt: tt::TokenTreesView<'_>) -> Optio
 pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContextId) -> Option<CrateId> {
     // When resolving `$crate` from a `macro_rules!` invoked in a `macro`,
     // we don't want to pretend that the `macro_rules!` definition is in the `macro`
-    // as described in `SyntaxContext::apply_mark`, so we ignore prepended opaque marks.
+    // as described in `SyntaxContextId::apply_mark`, so we ignore prepended opaque marks.
     // FIXME: This is only a guess and it doesn't work correctly for `macro_rules!`
     // definitions actually produced by `macro` and `macro` definitions produced by
     // `macro_rules!`, but at least such configurations are not stable yet.
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/prettify_macro_expansion_.rs b/src/tools/rust-analyzer/crates/hir-expand/src/prettify_macro_expansion_.rs
index c744fbce77b..944341ec3f2 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/prettify_macro_expansion_.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/prettify_macro_expansion_.rs
@@ -25,9 +25,8 @@ pub fn prettify_macro_expansion(
         let ctx = span_map.span_at(dollar_crate.text_range().start() + span_offset).ctx;
         let replacement =
             syntax_ctx_id_to_dollar_crate_replacement.entry(ctx).or_insert_with(|| {
-                let ctx_data = db.lookup_intern_syntax_context(ctx);
                 let macro_call_id =
-                    ctx_data.outer_expn.expect("`$crate` cannot come from `SyntaxContextId::ROOT`");
+                    ctx.outer_expn(db).expect("`$crate` cannot come from `SyntaxContextId::ROOT`");
                 let macro_call = db.lookup_intern_macro_call(macro_call_id);
                 let macro_def_crate = macro_call.def.krate;
                 // First, if this is the same crate as the macro, nothing will work but `crate`.
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs b/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs
index 740c27b89ce..ab99cb14f95 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/span_map.rs
@@ -79,10 +79,16 @@ impl SpanMapRef<'_> {
     }
 }
 
-pub(crate) fn real_span_map(db: &dyn ExpandDatabase, file_id: EditionedFileId) -> Arc<RealSpanMap> {
+pub(crate) fn real_span_map(
+    db: &dyn ExpandDatabase,
+    editioned_file_id: EditionedFileId,
+) -> Arc<RealSpanMap> {
     use syntax::ast::HasModuleItem;
     let mut pairs = vec![(syntax::TextSize::new(0), span::ROOT_ERASED_FILE_AST_ID)];
-    let ast_id_map = db.ast_id_map(file_id.into());
+    let ast_id_map = db.ast_id_map(editioned_file_id.into());
+
+    let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
+
     let tree = db.parse(file_id).tree();
     // This is an incrementality layer. Basically we can't use absolute ranges for our spans as that
     // would mean we'd invalidate everything whenever we type. So instead we make the text ranges
@@ -134,7 +140,7 @@ pub(crate) fn real_span_map(db: &dyn ExpandDatabase, file_id: EditionedFileId) -
     });
 
     Arc::new(RealSpanMap::from_file(
-        file_id,
+        editioned_file_id,
         pairs.into_boxed_slice(),
         tree.syntax().text_range().end(),
     ))
diff --git a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
index 1d12bee646c..30074105331 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
@@ -31,8 +31,11 @@ la-arena.workspace = true
 triomphe.workspace = true
 nohash-hasher.workspace = true
 typed-arena = "2.0.1"
+dashmap.workspace = true
 indexmap.workspace = true
 rustc_apfloat = "0.2.0"
+query-group.workspace = true
+salsa.workspace = true
 
 ra-ap-rustc_abi.workspace = true
 ra-ap-rustc_index.workspace = true
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
index 67fb73696f7..2be32173307 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
@@ -451,10 +451,10 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
         self.db.type_alias_data(id).name.display(self.db.upcast(), self.edition()).to_string()
     }
     fn opaque_type_name(&self, opaque_ty_id: chalk_ir::OpaqueTyId<Interner>) -> String {
-        format!("Opaque_{}", opaque_ty_id.0)
+        format!("Opaque_{:?}", opaque_ty_id.0)
     }
     fn fn_def_name(&self, fn_def_id: chalk_ir::FnDefId<Interner>) -> String {
-        format!("fn_{}", fn_def_id.0)
+        format!("fn_{:?}", fn_def_id.0)
     }
     fn coroutine_datum(
         &self,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
index fb604569f43..b3c604015a0 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs
@@ -1,6 +1,6 @@
 //! Constant evaluation details
 
-use base_db::{ra_salsa::Cycle, CrateId};
+use base_db::CrateId;
 use chalk_ir::{cast::Cast, BoundVar, DebruijnIndex};
 use hir_def::{
     expr_store::{Body, HygieneId},
@@ -11,14 +11,19 @@ use hir_def::{
     ConstBlockLoc, EnumVariantId, GeneralConstId, HasModule as _, StaticId,
 };
 use hir_expand::Lookup;
+use salsa::Cycle;
 use stdx::never;
 use triomphe::Arc;
 
 use crate::{
-    db::HirDatabase, display::DisplayTarget, generics::Generics, infer::InferenceContext,
-    lower::ParamLoweringMode, mir::monomorphize_mir_body_bad, to_placeholder_idx, Const, ConstData,
-    ConstScalar, ConstValue, GenericArg, Interner, MemoryMap, Substitution, TraitEnvironment, Ty,
-    TyBuilder,
+    db::{HirDatabase, HirDatabaseData},
+    display::DisplayTarget,
+    generics::Generics,
+    infer::InferenceContext,
+    lower::ParamLoweringMode,
+    mir::monomorphize_mir_body_bad,
+    to_placeholder_idx, Const, ConstData, ConstScalar, ConstValue, GenericArg, Interner, MemoryMap,
+    Substitution, TraitEnvironment, Ty, TyBuilder,
 };
 
 use super::mir::{interpret_mir, lower_to_mir, pad16, MirEvalError, MirLowerError};
@@ -224,9 +229,10 @@ pub fn try_const_isize(db: &dyn HirDatabase, c: &Const) -> Option<i128> {
 pub(crate) fn const_eval_recover(
     _: &dyn HirDatabase,
     _: &Cycle,
-    _: &GeneralConstId,
-    _: &Substitution,
-    _: &Option<Arc<TraitEnvironment>>,
+    _: HirDatabaseData,
+    _: GeneralConstId,
+    _: Substitution,
+    _: Option<Arc<TraitEnvironment>>,
 ) -> Result<Const, ConstEvalError> {
     Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
 }
@@ -234,7 +240,7 @@ pub(crate) fn const_eval_recover(
 pub(crate) fn const_eval_static_recover(
     _: &dyn HirDatabase,
     _: &Cycle,
-    _: &StaticId,
+    _: StaticId,
 ) -> Result<Const, ConstEvalError> {
     Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
 }
@@ -242,7 +248,7 @@ pub(crate) fn const_eval_static_recover(
 pub(crate) fn const_eval_discriminant_recover(
     _: &dyn HirDatabase,
     _: &Cycle,
-    _: &EnumVariantId,
+    _: EnumVariantId,
 ) -> Result<i128, ConstEvalError> {
     Err(ConstEvalError::MirLowerError(MirLowerError::Loop))
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
index 26a3b702297..f2673dc58fa 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs
@@ -1,4 +1,4 @@
-use base_db::SourceDatabase;
+use base_db::RootQueryDb;
 use chalk_ir::Substitution;
 use hir_def::db::DefDatabase;
 use rustc_apfloat::{
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
index 76031491d9a..5817ed2ef20 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
@@ -3,11 +3,7 @@
 
 use std::sync;
 
-use base_db::{
-    impl_intern_key,
-    ra_salsa::{self, InternValueTrivial},
-    CrateId, Upcast,
-};
+use base_db::{impl_intern_key, CrateId, Upcast};
 use hir_def::{
     db::DefDatabase, hir::ExprId, layout::TargetDataLayout, AdtId, BlockId, CallableDefId,
     ConstParamId, DefWithBodyId, EnumVariantId, FunctionId, GeneralConstId, GenericDefId, ImplId,
@@ -15,6 +11,7 @@ use hir_def::{
 };
 use hir_expand::name::Name;
 use la_arena::ArenaMap;
+use salsa::plumbing::AsId;
 use smallvec::SmallVec;
 use triomphe::Arc;
 
@@ -31,22 +28,22 @@ use crate::{
     PolyFnSig, Substitution, TraitEnvironment, TraitRef, Ty, TyDefId, ValueTyDefId,
 };
 
-#[ra_salsa::query_group(HirDatabaseStorage)]
-pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
-    #[ra_salsa::invoke(crate::infer::infer_query)]
+#[query_group::query_group]
+pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> + std::fmt::Debug {
+    #[salsa::invoke_actual(crate::infer::infer_query)]
     fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
 
     // region:mir
 
-    #[ra_salsa::invoke(crate::mir::mir_body_query)]
-    #[ra_salsa::cycle(crate::mir::mir_body_recover)]
+    #[salsa::invoke_actual(crate::mir::mir_body_query)]
+    #[salsa::cycle(crate::mir::mir_body_recover)]
     fn mir_body(&self, def: DefWithBodyId) -> Result<Arc<MirBody>, MirLowerError>;
 
-    #[ra_salsa::invoke(crate::mir::mir_body_for_closure_query)]
+    #[salsa::invoke(crate::mir::mir_body_for_closure_query)]
     fn mir_body_for_closure(&self, def: ClosureId) -> Result<Arc<MirBody>, MirLowerError>;
 
-    #[ra_salsa::invoke(crate::mir::monomorphized_mir_body_query)]
-    #[ra_salsa::cycle(crate::mir::monomorphized_mir_body_recover)]
+    #[salsa::invoke(crate::mir::monomorphized_mir_body_query)]
+    #[salsa::cycle(crate::mir::monomorphized_mir_body_recover)]
     fn monomorphized_mir_body(
         &self,
         def: DefWithBodyId,
@@ -54,7 +51,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
         env: Arc<TraitEnvironment>,
     ) -> Result<Arc<MirBody>, MirLowerError>;
 
-    #[ra_salsa::invoke(crate::mir::monomorphized_mir_body_for_closure_query)]
+    #[salsa::invoke(crate::mir::monomorphized_mir_body_for_closure_query)]
     fn monomorphized_mir_body_for_closure(
         &self,
         def: ClosureId,
@@ -62,12 +59,12 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
         env: Arc<TraitEnvironment>,
     ) -> Result<Arc<MirBody>, MirLowerError>;
 
-    #[ra_salsa::invoke(crate::mir::borrowck_query)]
-    #[ra_salsa::lru]
+    #[salsa::invoke(crate::mir::borrowck_query)]
+    #[salsa::lru(2024)]
     fn borrowck(&self, def: DefWithBodyId) -> Result<Arc<[BorrowckResult]>, MirLowerError>;
 
-    #[ra_salsa::invoke(crate::consteval::const_eval_query)]
-    #[ra_salsa::cycle(crate::consteval::const_eval_recover)]
+    #[salsa::invoke(crate::consteval::const_eval_query)]
+    #[salsa::cycle(crate::consteval::const_eval_recover)]
     fn const_eval(
         &self,
         def: GeneralConstId,
@@ -75,15 +72,15 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
         trait_env: Option<Arc<TraitEnvironment>>,
     ) -> Result<Const, ConstEvalError>;
 
-    #[ra_salsa::invoke(crate::consteval::const_eval_static_query)]
-    #[ra_salsa::cycle(crate::consteval::const_eval_static_recover)]
+    #[salsa::invoke_actual(crate::consteval::const_eval_static_query)]
+    #[salsa::cycle(crate::consteval::const_eval_static_recover)]
     fn const_eval_static(&self, def: StaticId) -> Result<Const, ConstEvalError>;
 
-    #[ra_salsa::invoke(crate::consteval::const_eval_discriminant_variant)]
-    #[ra_salsa::cycle(crate::consteval::const_eval_discriminant_recover)]
+    #[salsa::invoke_actual(crate::consteval::const_eval_discriminant_variant)]
+    #[salsa::cycle(crate::consteval::const_eval_discriminant_recover)]
     fn const_eval_discriminant(&self, def: EnumVariantId) -> Result<i128, ConstEvalError>;
 
-    #[ra_salsa::invoke(crate::method_resolution::lookup_impl_method_query)]
+    #[salsa::invoke(crate::method_resolution::lookup_impl_method_query)]
     fn lookup_impl_method(
         &self,
         env: Arc<TraitEnvironment>,
@@ -93,8 +90,8 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
 
     // endregion:mir
 
-    #[ra_salsa::invoke(crate::layout::layout_of_adt_query)]
-    #[ra_salsa::cycle(crate::layout::layout_of_adt_recover)]
+    #[salsa::invoke(crate::layout::layout_of_adt_query)]
+    #[salsa::cycle(crate::layout::layout_of_adt_recover)]
     fn layout_of_adt(
         &self,
         def: AdtId,
@@ -102,63 +99,67 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
         env: Arc<TraitEnvironment>,
     ) -> Result<Arc<Layout>, LayoutError>;
 
-    #[ra_salsa::invoke(crate::layout::layout_of_ty_query)]
-    #[ra_salsa::cycle(crate::layout::layout_of_ty_recover)]
+    #[salsa::invoke(crate::layout::layout_of_ty_query)]
+    #[salsa::cycle(crate::layout::layout_of_ty_recover)]
     fn layout_of_ty(&self, ty: Ty, env: Arc<TraitEnvironment>) -> Result<Arc<Layout>, LayoutError>;
 
-    #[ra_salsa::invoke(crate::layout::target_data_layout_query)]
+    #[salsa::invoke(crate::layout::target_data_layout_query)]
     fn target_data_layout(&self, krate: CrateId) -> Result<Arc<TargetDataLayout>, Arc<str>>;
 
-    #[ra_salsa::invoke(crate::dyn_compatibility::dyn_compatibility_of_trait_query)]
+    #[salsa::invoke_actual(crate::dyn_compatibility::dyn_compatibility_of_trait_query)]
     fn dyn_compatibility_of_trait(&self, trait_: TraitId) -> Option<DynCompatibilityViolation>;
 
-    #[ra_salsa::invoke(crate::lower::ty_query)]
-    #[ra_salsa::cycle(crate::lower::ty_recover)]
+    #[salsa::invoke(crate::lower::ty_query)]
+    #[salsa::cycle(crate::lower::ty_recover)]
     fn ty(&self, def: TyDefId) -> Binders<Ty>;
 
-    #[ra_salsa::invoke(crate::lower::type_for_type_alias_with_diagnostics_query)]
+    #[salsa::invoke_actual(crate::lower::type_for_type_alias_with_diagnostics_query)]
     fn type_for_type_alias_with_diagnostics(&self, def: TypeAliasId) -> (Binders<Ty>, Diagnostics);
 
     /// Returns the type of the value of the given constant, or `None` if the `ValueTyDefId` is
     /// a `StructId` or `EnumVariantId` with a record constructor.
-    #[ra_salsa::invoke(crate::lower::value_ty_query)]
+    #[salsa::invoke_actual(crate::lower::value_ty_query)]
     fn value_ty(&self, def: ValueTyDefId) -> Option<Binders<Ty>>;
 
-    #[ra_salsa::invoke(crate::lower::impl_self_ty_with_diagnostics_query)]
-    #[ra_salsa::cycle(crate::lower::impl_self_ty_with_diagnostics_recover)]
+    #[salsa::invoke_actual(crate::lower::impl_self_ty_with_diagnostics_query)]
+    #[salsa::cycle(crate::lower::impl_self_ty_with_diagnostics_recover)]
     fn impl_self_ty_with_diagnostics(&self, def: ImplId) -> (Binders<Ty>, Diagnostics);
-    #[ra_salsa::invoke(crate::lower::impl_self_ty_query)]
+
+    #[salsa::invoke_actual(crate::lower::impl_self_ty_query)]
     fn impl_self_ty(&self, def: ImplId) -> Binders<Ty>;
 
-    #[ra_salsa::invoke(crate::lower::const_param_ty_with_diagnostics_query)]
+    #[salsa::invoke(crate::lower::const_param_ty_with_diagnostics_query)]
     fn const_param_ty_with_diagnostics(&self, def: ConstParamId) -> (Ty, Diagnostics);
-    #[ra_salsa::invoke(crate::lower::const_param_ty_query)]
+
+    #[salsa::invoke(crate::lower::const_param_ty_query)]
     fn const_param_ty(&self, def: ConstParamId) -> Ty;
 
-    #[ra_salsa::invoke(crate::lower::impl_trait_with_diagnostics_query)]
+    #[salsa::invoke_actual(crate::lower::impl_trait_with_diagnostics_query)]
     fn impl_trait_with_diagnostics(&self, def: ImplId) -> Option<(Binders<TraitRef>, Diagnostics)>;
-    #[ra_salsa::invoke(crate::lower::impl_trait_query)]
+
+    #[salsa::invoke_actual(crate::lower::impl_trait_query)]
     fn impl_trait(&self, def: ImplId) -> Option<Binders<TraitRef>>;
 
-    #[ra_salsa::invoke(crate::lower::field_types_with_diagnostics_query)]
+    #[salsa::invoke_actual(crate::lower::field_types_with_diagnostics_query)]
     fn field_types_with_diagnostics(
         &self,
         var: VariantId,
     ) -> (Arc<ArenaMap<LocalFieldId, Binders<Ty>>>, Diagnostics);
-    #[ra_salsa::invoke(crate::lower::field_types_query)]
+
+    #[salsa::invoke_actual(crate::lower::field_types_query)]
     fn field_types(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Binders<Ty>>>;
 
-    #[ra_salsa::invoke(crate::lower::callable_item_sig)]
+    #[salsa::invoke_actual(crate::lower::callable_item_sig)]
     fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig;
 
-    #[ra_salsa::invoke(crate::lower::return_type_impl_traits)]
+    #[salsa::invoke_actual(crate::lower::return_type_impl_traits)]
     fn return_type_impl_traits(&self, def: FunctionId) -> Option<Arc<Binders<ImplTraits>>>;
 
-    #[ra_salsa::invoke(crate::lower::type_alias_impl_traits)]
+    #[salsa::invoke_actual(crate::lower::type_alias_impl_traits)]
     fn type_alias_impl_traits(&self, def: TypeAliasId) -> Option<Arc<Binders<ImplTraits>>>;
 
-    #[ra_salsa::invoke(crate::lower::generic_predicates_for_param_query)]
-    #[ra_salsa::cycle(crate::lower::generic_predicates_for_param_recover)]
+    #[salsa::invoke(crate::lower::generic_predicates_for_param_query)]
+    #[salsa::cycle(crate::lower::generic_predicates_for_param_recover)]
     fn generic_predicates_for_param(
         &self,
         def: GenericDefId,
@@ -166,132 +167,139 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
         assoc_name: Option<Name>,
     ) -> GenericPredicates;
 
-    #[ra_salsa::invoke(crate::lower::generic_predicates_query)]
+    #[salsa::invoke_actual(crate::lower::generic_predicates_query)]
     fn generic_predicates(&self, def: GenericDefId) -> GenericPredicates;
 
-    #[ra_salsa::invoke(crate::lower::generic_predicates_without_parent_with_diagnostics_query)]
+    #[salsa::invoke_actual(crate::lower::generic_predicates_without_parent_with_diagnostics_query)]
     fn generic_predicates_without_parent_with_diagnostics(
         &self,
         def: GenericDefId,
     ) -> (GenericPredicates, Diagnostics);
-    #[ra_salsa::invoke(crate::lower::generic_predicates_without_parent_query)]
+
+    #[salsa::invoke_actual(crate::lower::generic_predicates_without_parent_query)]
     fn generic_predicates_without_parent(&self, def: GenericDefId) -> GenericPredicates;
 
-    #[ra_salsa::invoke(crate::lower::trait_environment_for_body_query)]
-    #[ra_salsa::transparent]
+    #[salsa::invoke_actual(crate::lower::trait_environment_for_body_query)]
+    #[salsa::transparent]
     fn trait_environment_for_body(&self, def: DefWithBodyId) -> Arc<TraitEnvironment>;
 
-    #[ra_salsa::invoke(crate::lower::trait_environment_query)]
+    #[salsa::invoke_actual(crate::lower::trait_environment_query)]
     fn trait_environment(&self, def: GenericDefId) -> Arc<TraitEnvironment>;
 
-    #[ra_salsa::invoke(crate::lower::generic_defaults_with_diagnostics_query)]
-    #[ra_salsa::cycle(crate::lower::generic_defaults_with_diagnostics_recover)]
+    #[salsa::invoke_actual(crate::lower::generic_defaults_with_diagnostics_query)]
+    #[salsa::cycle(crate::lower::generic_defaults_with_diagnostics_recover)]
     fn generic_defaults_with_diagnostics(
         &self,
         def: GenericDefId,
     ) -> (GenericDefaults, Diagnostics);
-    #[ra_salsa::invoke(crate::lower::generic_defaults_query)]
+
+    #[salsa::invoke_actual(crate::lower::generic_defaults_query)]
     fn generic_defaults(&self, def: GenericDefId) -> GenericDefaults;
 
-    #[ra_salsa::invoke(InherentImpls::inherent_impls_in_crate_query)]
+    #[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)]
     fn inherent_impls_in_crate(&self, krate: CrateId) -> Arc<InherentImpls>;
 
-    #[ra_salsa::invoke(InherentImpls::inherent_impls_in_block_query)]
+    #[salsa::invoke_actual(InherentImpls::inherent_impls_in_block_query)]
     fn inherent_impls_in_block(&self, block: BlockId) -> Option<Arc<InherentImpls>>;
 
     /// Collects all crates in the dependency graph that have impls for the
     /// given fingerprint. This is only used for primitive types and types
     /// annotated with `rustc_has_incoherent_inherent_impls`; for other types
     /// we just look at the crate where the type is defined.
-    #[ra_salsa::invoke(crate::method_resolution::incoherent_inherent_impl_crates)]
+    #[salsa::invoke(crate::method_resolution::incoherent_inherent_impl_crates)]
     fn incoherent_inherent_impl_crates(
         &self,
         krate: CrateId,
         fp: TyFingerprint,
     ) -> SmallVec<[CrateId; 2]>;
 
-    #[ra_salsa::invoke(TraitImpls::trait_impls_in_crate_query)]
+    #[salsa::invoke(TraitImpls::trait_impls_in_crate_query)]
     fn trait_impls_in_crate(&self, krate: CrateId) -> Arc<TraitImpls>;
 
-    #[ra_salsa::invoke(TraitImpls::trait_impls_in_block_query)]
+    #[salsa::invoke_actual(TraitImpls::trait_impls_in_block_query)]
     fn trait_impls_in_block(&self, block: BlockId) -> Option<Arc<TraitImpls>>;
 
-    #[ra_salsa::invoke(TraitImpls::trait_impls_in_deps_query)]
+    #[salsa::invoke(TraitImpls::trait_impls_in_deps_query)]
     fn trait_impls_in_deps(&self, krate: CrateId) -> Arc<[Arc<TraitImpls>]>;
 
     // Interned IDs for Chalk integration
-    #[ra_salsa::interned]
+    #[salsa::interned]
     fn intern_callable_def(&self, callable_def: CallableDefId) -> InternedCallableDefId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_type_or_const_param_id(
         &self,
         param_id: TypeOrConstParamId,
     ) -> InternedTypeOrConstParamId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_lifetime_param_id(&self, param_id: LifetimeParamId) -> InternedLifetimeParamId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_impl_trait_id(&self, id: ImplTraitId) -> InternedOpaqueTyId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_closure(&self, id: InternedClosure) -> InternedClosureId;
-    #[ra_salsa::interned]
+
+    #[salsa::interned]
     fn intern_coroutine(&self, id: InternedCoroutine) -> InternedCoroutineId;
 
-    #[ra_salsa::invoke(chalk_db::associated_ty_data_query)]
+    #[salsa::invoke(chalk_db::associated_ty_data_query)]
     fn associated_ty_data(
         &self,
         id: chalk_db::AssocTypeId,
     ) -> sync::Arc<chalk_db::AssociatedTyDatum>;
 
-    #[ra_salsa::invoke(chalk_db::trait_datum_query)]
+    #[salsa::invoke(chalk_db::trait_datum_query)]
     fn trait_datum(
         &self,
         krate: CrateId,
         trait_id: chalk_db::TraitId,
     ) -> sync::Arc<chalk_db::TraitDatum>;
 
-    #[ra_salsa::invoke(chalk_db::adt_datum_query)]
+    #[salsa::invoke(chalk_db::adt_datum_query)]
     fn adt_datum(
         &self,
         krate: CrateId,
         struct_id: chalk_db::AdtId,
     ) -> sync::Arc<chalk_db::AdtDatum>;
 
-    #[ra_salsa::invoke(chalk_db::impl_datum_query)]
+    #[salsa::invoke(chalk_db::impl_datum_query)]
     fn impl_datum(
         &self,
         krate: CrateId,
         impl_id: chalk_db::ImplId,
     ) -> sync::Arc<chalk_db::ImplDatum>;
 
-    #[ra_salsa::invoke(chalk_db::fn_def_datum_query)]
+    #[salsa::invoke(chalk_db::fn_def_datum_query)]
     fn fn_def_datum(&self, fn_def_id: FnDefId) -> sync::Arc<chalk_db::FnDefDatum>;
 
-    #[ra_salsa::invoke(chalk_db::fn_def_variance_query)]
+    #[salsa::invoke(chalk_db::fn_def_variance_query)]
     fn fn_def_variance(&self, fn_def_id: FnDefId) -> chalk_db::Variances;
 
-    #[ra_salsa::invoke(chalk_db::adt_variance_query)]
+    #[salsa::invoke(chalk_db::adt_variance_query)]
     fn adt_variance(&self, adt_id: chalk_db::AdtId) -> chalk_db::Variances;
 
-    #[ra_salsa::invoke(crate::variance::variances_of)]
-    #[ra_salsa::cycle(crate::variance::variances_of_cycle)]
+    #[salsa::invoke_actual(crate::variance::variances_of)]
+    #[salsa::cycle(crate::variance::variances_of_cycle)]
     fn variances_of(&self, def: GenericDefId) -> Option<Arc<[crate::variance::Variance]>>;
 
-    #[ra_salsa::invoke(chalk_db::associated_ty_value_query)]
+    #[salsa::invoke(chalk_db::associated_ty_value_query)]
     fn associated_ty_value(
         &self,
         krate: CrateId,
         id: chalk_db::AssociatedTyValueId,
     ) -> sync::Arc<chalk_db::AssociatedTyValue>;
 
-    #[ra_salsa::invoke(crate::traits::normalize_projection_query)]
-    #[ra_salsa::transparent]
+    #[salsa::invoke(crate::traits::normalize_projection_query)]
+    #[salsa::transparent]
     fn normalize_projection(
         &self,
         projection: crate::ProjectionTy,
         env: Arc<TraitEnvironment>,
     ) -> Ty;
 
-    #[ra_salsa::invoke(crate::traits::trait_solve_query)]
+    #[salsa::invoke(crate::traits::trait_solve_query)]
     fn trait_solve(
         &self,
         krate: CrateId,
@@ -299,7 +307,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
         goal: crate::Canonical<crate::InEnvironment<crate::Goal>>,
     ) -> Option<crate::Solution>;
 
-    #[ra_salsa::invoke(chalk_db::program_clauses_for_chalk_env_query)]
+    #[salsa::invoke(chalk_db::program_clauses_for_chalk_env_query)]
     fn program_clauses_for_chalk_env(
         &self,
         krate: CrateId,
@@ -307,9 +315,9 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
         env: chalk_ir::Environment<Interner>,
     ) -> chalk_ir::ProgramClauses<Interner>;
 
-    #[ra_salsa::invoke(crate::drop::has_drop_glue)]
-    #[ra_salsa::cycle(crate::drop::has_drop_glue_recover)]
-    fn has_drop_glue(&self, ty: Ty, env: Arc<TraitEnvironment>) -> DropGlue {}
+    #[salsa::invoke(crate::drop::has_drop_glue)]
+    #[salsa::cycle(crate::drop::has_drop_glue_recover)]
+    fn has_drop_glue(&self, ty: Ty, env: Arc<TraitEnvironment>) -> DropGlue;
 }
 
 #[test]
@@ -317,41 +325,22 @@ fn hir_database_is_dyn_compatible() {
     fn _assert_dyn_compatible(_: &dyn HirDatabase) {}
 }
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct InternedTypeOrConstParamId(ra_salsa::InternId);
-impl_intern_key!(InternedTypeOrConstParamId);
+impl_intern_key!(InternedTypeOrConstParamId, TypeOrConstParamId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct InternedLifetimeParamId(ra_salsa::InternId);
-impl_intern_key!(InternedLifetimeParamId);
+impl_intern_key!(InternedLifetimeParamId, LifetimeParamId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct InternedConstParamId(ra_salsa::InternId);
-impl_intern_key!(InternedConstParamId);
+impl_intern_key!(InternedConstParamId, ConstParamId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct InternedOpaqueTyId(ra_salsa::InternId);
-impl_intern_key!(InternedOpaqueTyId);
+impl_intern_key!(InternedOpaqueTyId, ImplTraitId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct InternedClosureId(ra_salsa::InternId);
-impl_intern_key!(InternedClosureId);
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
 pub struct InternedClosure(pub DefWithBodyId, pub ExprId);
+impl_intern_key!(InternedClosureId, InternedClosure);
 
-impl InternValueTrivial for InternedClosure {}
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct InternedCoroutineId(ra_salsa::InternId);
-impl_intern_key!(InternedCoroutineId);
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
 pub struct InternedCoroutine(pub DefWithBodyId, pub ExprId);
-impl InternValueTrivial for InternedCoroutine {}
+impl_intern_key!(InternedCoroutineId, InternedCoroutine);
 
-/// This exists just for Chalk, because Chalk just has a single `FnDefId` where
-/// we have different IDs for struct and enum variant constructors.
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
-pub struct InternedCallableDefId(ra_salsa::InternId);
-impl_intern_key!(InternedCallableDefId);
+// This exists just for Chalk, because Chalk just has a single `FnDefId` where
+// we have different IDs for struct and enum variant constructors.
+impl_intern_key!(InternedCallableDefId, CallableDefId);
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
index 774991560e9..eed74e1eee3 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
@@ -251,7 +251,7 @@ impl<'a> DeclValidator<'a> {
             return;
         }
 
-        let (_, source_map) = self.db.body_with_source_map(func.into());
+        let source_map = self.db.body_with_source_map(func.into()).1;
         for (id, replacement) in pats_replacements {
             let Ok(source_ptr) = source_map.pat_syntax(id) else {
                 continue;
@@ -597,7 +597,7 @@ impl<'a> DeclValidator<'a> {
     ) where
         N: AstNode + HasName + fmt::Debug,
         S: HasSource<Value = N>,
-        L: Lookup<Data = S, Database<'a> = dyn DefDatabase + 'a> + HasModule + Copy,
+        L: Lookup<Data = S, Database = dyn DefDatabase> + HasModule + Copy,
     {
         let to_expected_case_type = match expected_case {
             CaseType::LowerSnakeCase => to_lower_snake_case,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs
index cc6f4d9e52e..975143b29f2 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs
@@ -434,7 +434,7 @@ impl ExprValidator {
                     let last_then_expr_ty = &self.infer[last_then_expr];
                     if last_then_expr_ty.is_never() {
                         // Only look at sources if the then branch diverges and we have an else branch.
-                        let (_, source_map) = db.body_with_source_map(self.owner);
+                        let source_map = db.body_with_source_map(self.owner).1;
                         let Ok(source_ptr) = source_map.expr_syntax(id) else {
                             return;
                         };
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs b/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs
index 351926c86c4..4ad16cf8bcf 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/drop.rs
@@ -1,6 +1,5 @@
 //! Utilities for computing drop info about types.
 
-use base_db::ra_salsa;
 use chalk_ir::cast::Cast;
 use hir_def::data::adt::StructFlags;
 use hir_def::lang_item::LangItem;
@@ -8,6 +7,7 @@ use hir_def::AdtId;
 use stdx::never;
 use triomphe::Arc;
 
+use crate::db::HirDatabaseData;
 use crate::{
     db::HirDatabase, method_resolution::TyFingerprint, AliasTy, Canonical, CanonicalVarKinds,
     InEnvironment, Interner, ProjectionTy, TraitEnvironment, Ty, TyBuilder, TyKind,
@@ -201,9 +201,10 @@ fn is_copy(db: &dyn HirDatabase, ty: Ty, env: Arc<TraitEnvironment>) -> bool {
 
 pub(crate) fn has_drop_glue_recover(
     _db: &dyn HirDatabase,
-    _cycle: &ra_salsa::Cycle,
-    _ty: &Ty,
-    _env: &Arc<TraitEnvironment>,
+    _cycle: &salsa::Cycle,
+    _: HirDatabaseData,
+    _ty: Ty,
+    _env: Arc<TraitEnvironment>,
 ) -> DropGlue {
     DropGlue::None
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
index 80e3ca1fa28..131e98d7296 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
@@ -289,7 +289,7 @@ impl InferenceContext<'_> {
         expected: &Expectation,
         is_read: ExprIsRead,
     ) -> Ty {
-        self.db.unwind_if_cancelled();
+        self.db.unwind_if_revision_cancelled();
 
         let ty = match &self.body[tgt_expr] {
             Expr::Missing => self.err_ty(),
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs b/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs
index 804c3aea3a5..832a00e1e5e 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs
@@ -8,7 +8,6 @@ use crate::{
     ProgramClauseData, ProgramClauses, ProjectionTy, QuantifiedWhereClause, QuantifiedWhereClauses,
     Substitution, Ty, TyData, TyKind, VariableKind, VariableKinds,
 };
-use base_db::ra_salsa::InternId;
 use chalk_ir::{ProgramClauseImplication, SeparatorTraitRef, Variance};
 use hir_def::TypeAliasId;
 use intern::{impl_internable, Interned};
@@ -68,7 +67,7 @@ impl chalk_ir::interner::Interner for Interner {
     type InternedCanonicalVarKinds = Interned<InternedWrapper<Vec<CanonicalVarKind>>>;
     type InternedConstraints = Vec<InEnvironment<Constraint>>;
     type InternedVariances = SmallVec<[Variance; 16]>;
-    type DefId = InternId;
+    type DefId = salsa::Id;
     type InternedAdtId = hir_def::AdtId;
     type Identifier = TypeAliasId;
     type FnAbi = FnAbi;
@@ -98,7 +97,7 @@ impl chalk_ir::interner::Interner for Interner {
         opaque_ty_id: OpaqueTyId,
         fmt: &mut fmt::Formatter<'_>,
     ) -> Option<fmt::Result> {
-        Some(write!(fmt, "OpaqueTy#{}", opaque_ty_id.0))
+        Some(write!(fmt, "OpaqueTy#{:?}", opaque_ty_id.0))
     }
 
     fn debug_fn_def_id(fn_def_id: FnDefId, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
index bbd419d9659..167dcec3bb3 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
@@ -2,7 +2,6 @@
 
 use std::fmt;
 
-use base_db::ra_salsa::Cycle;
 use chalk_ir::{AdtId, FloatTy, IntTy, TyKind, UintTy};
 use hir_def::{
     layout::{
@@ -14,21 +13,20 @@ use hir_def::{
 use la_arena::{Idx, RawIdx};
 use rustc_abi::AddressSpace;
 use rustc_index::IndexVec;
+use salsa::Cycle;
 
 use triomphe::Arc;
 
 use crate::{
     consteval::try_const_usize,
-    db::{HirDatabase, InternedClosure},
+    db::{HirDatabase, HirDatabaseData, InternedClosure},
     infer::normalize,
     utils::ClosureSubst,
     Interner, ProjectionTy, Substitution, TraitEnvironment, Ty,
 };
 
-pub use self::{
-    adt::{layout_of_adt_query, layout_of_adt_recover},
-    target::target_data_layout_query,
-};
+pub(crate) use self::adt::layout_of_adt_recover;
+pub use self::{adt::layout_of_adt_query, target::target_data_layout_query};
 
 mod adt;
 mod target;
@@ -367,11 +365,12 @@ pub fn layout_of_ty_query(
     Ok(Arc::new(result))
 }
 
-pub fn layout_of_ty_recover(
+pub(crate) fn layout_of_ty_recover(
     _: &dyn HirDatabase,
     _: &Cycle,
-    _: &Ty,
-    _: &Arc<TraitEnvironment>,
+    _: HirDatabaseData,
+    _: Ty,
+    _: Arc<TraitEnvironment>,
 ) -> Result<Arc<Layout>, LayoutError> {
     Err(LayoutError::RecursiveTypeWithoutIndirection)
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
index eb4729fab84..ab9c07779c0 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
@@ -2,7 +2,6 @@
 
 use std::{cmp, ops::Bound};
 
-use base_db::ra_salsa::Cycle;
 use hir_def::{
     data::adt::VariantData,
     layout::{Integer, ReprOptions, TargetDataLayout},
@@ -10,6 +9,7 @@ use hir_def::{
 };
 use intern::sym;
 use rustc_index::IndexVec;
+use salsa::Cycle;
 use smallvec::SmallVec;
 use triomphe::Arc;
 
@@ -20,7 +20,7 @@ use crate::{
     Substitution, TraitEnvironment,
 };
 
-use super::LayoutCx;
+use super::{HirDatabaseData, LayoutCx};
 
 pub fn layout_of_adt_query(
     db: &dyn HirDatabase,
@@ -131,12 +131,13 @@ fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound<u128>,
     )
 }
 
-pub fn layout_of_adt_recover(
+pub(crate) fn layout_of_adt_recover(
     _: &dyn HirDatabase,
     _: &Cycle,
-    _: &AdtId,
-    _: &Substitution,
-    _: &Arc<TraitEnvironment>,
+    _: HirDatabaseData,
+    _: AdtId,
+    _: Substitution,
+    _: Arc<TraitEnvironment>,
 ) -> Result<Arc<Layout>, LayoutError> {
     Err(LayoutError::RecursiveTypeWithoutIndirection)
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
index 624767cedf8..f35298846a8 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
@@ -56,7 +56,6 @@ mod variance;
 
 use std::hash::Hash;
 
-use base_db::ra_salsa::InternValueTrivial;
 use chalk_ir::{
     fold::{Shift, TypeFoldable},
     interner::HasInterner,
@@ -610,7 +609,6 @@ pub enum ImplTraitId {
     TypeAliasImplTrait(hir_def::TypeAliasId, ImplTraitIdx),
     AsyncBlockTypeImplTrait(hir_def::DefWithBodyId, ExprId),
 }
-impl InternValueTrivial for ImplTraitId {}
 
 #[derive(PartialEq, Eq, Debug, Hash)]
 pub struct ImplTraits {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
index af73b5ed9a7..ff7f0349638 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
@@ -14,7 +14,7 @@ use std::{
     ops::{self, Not as _},
 };
 
-use base_db::{ra_salsa::Cycle, CrateId};
+use base_db::CrateId;
 use chalk_ir::{
     cast::Cast,
     fold::{Shift, TypeFoldable},
@@ -47,6 +47,7 @@ use hir_expand::{name::Name, ExpandResult};
 use la_arena::{Arena, ArenaMap};
 use rustc_hash::FxHashSet;
 use rustc_pattern_analysis::Captures;
+use salsa::Cycle;
 use stdx::{impl_from, never};
 use syntax::ast;
 use triomphe::{Arc, ThinArc};
@@ -57,7 +58,7 @@ use crate::{
         intern_const_ref, intern_const_scalar, path_to_const, unknown_const,
         unknown_const_as_generic,
     },
-    db::HirDatabase,
+    db::{HirDatabase, HirDatabaseData},
     error_lifetime,
     generics::{generics, trait_self_param_idx, Generics},
     lower::{
@@ -1111,10 +1112,11 @@ pub(crate) fn generic_predicates_for_param_query(
 
 pub(crate) fn generic_predicates_for_param_recover(
     _db: &dyn HirDatabase,
-    _cycle: &Cycle,
-    _def: &GenericDefId,
-    _param_id: &TypeOrConstParamId,
-    _assoc_name: &Option<Name>,
+    _cycle: &salsa::Cycle,
+    _: HirDatabaseData,
+    _def: GenericDefId,
+    _param_id: TypeOrConstParamId,
+    _assoc_name: Option<Name>,
 ) -> GenericPredicates {
     GenericPredicates(None)
 }
@@ -1271,6 +1273,7 @@ where
             );
         };
     }
+
     (
         GenericPredicates(predicates.is_empty().not().then(|| predicates.into())),
         create_diagnostics(ctx.diagnostics),
@@ -1414,9 +1417,9 @@ pub(crate) fn generic_defaults_with_diagnostics_query(
 pub(crate) fn generic_defaults_with_diagnostics_recover(
     db: &dyn HirDatabase,
     _cycle: &Cycle,
-    def: &GenericDefId,
+    def: GenericDefId,
 ) -> (GenericDefaults, Diagnostics) {
-    let generic_params = generics(db.upcast(), *def);
+    let generic_params = generics(db.upcast(), def);
     if generic_params.len() == 0 {
         return (GenericDefaults(None), None);
     }
@@ -1591,6 +1594,7 @@ pub(crate) fn type_for_type_alias_with_diagnostics_query(
             .map(|type_ref| ctx.lower_ty(type_ref))
             .unwrap_or_else(|| TyKind::Error.intern(Interner))
     };
+
     (make_binders(db, &generics, inner), create_diagnostics(ctx.diagnostics))
 }
 
@@ -1602,7 +1606,7 @@ pub enum TyDefId {
 }
 impl_from!(BuiltinType, AdtId(StructId, EnumId, UnionId), TypeAliasId for TyDefId);
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Supertype)]
 pub enum ValueTyDefId {
     FunctionId(FunctionId),
     StructId(StructId),
@@ -1638,8 +1642,13 @@ pub(crate) fn ty_query(db: &dyn HirDatabase, def: TyDefId) -> Binders<Ty> {
     }
 }
 
-pub(crate) fn ty_recover(db: &dyn HirDatabase, _cycle: &Cycle, def: &TyDefId) -> Binders<Ty> {
-    let generics = match *def {
+pub(crate) fn ty_recover(
+    db: &dyn HirDatabase,
+    _cycle: &salsa::Cycle,
+    _: HirDatabaseData,
+    def: TyDefId,
+) -> Binders<Ty> {
+    let generics = match def {
         TyDefId::BuiltinType(_) => return Binders::empty(Interner, TyKind::Error.intern(Interner)),
         TyDefId::AdtId(it) => generics(db.upcast(), it.into()),
         TyDefId::TypeAliasId(it) => generics(db.upcast(), it.into()),
@@ -1703,10 +1712,10 @@ pub(crate) fn const_param_ty_with_diagnostics_query(
 
 pub(crate) fn impl_self_ty_with_diagnostics_recover(
     db: &dyn HirDatabase,
-    _cycle: &Cycle,
-    impl_id: &ImplId,
+    _cycle: &salsa::Cycle,
+    impl_id: ImplId,
 ) -> (Binders<Ty>, Diagnostics) {
-    let generics = generics(db.upcast(), (*impl_id).into());
+    let generics = generics(db.upcast(), (impl_id).into());
     (make_binders(db, &generics, TyKind::Error.intern(Interner)), None)
 }
 
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs
index 2f38e8fa14c..cfa2a49b79c 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mapping.rs
@@ -5,8 +5,11 @@
 
 use chalk_solve::rust_ir;
 
-use base_db::ra_salsa::{self, InternKey};
 use hir_def::{LifetimeParamId, TraitId, TypeAliasId, TypeOrConstParamId};
+use salsa::{
+    plumbing::{AsId, FromId},
+    Id,
+};
 
 use crate::{
     chalk_db, db::HirDatabase, AssocTypeId, CallableDefId, ChalkTraitId, FnDefId, ForeignDefId,
@@ -30,11 +33,11 @@ impl ToChalk for hir_def::ImplId {
     type Chalk = chalk_db::ImplId;
 
     fn to_chalk(self, _db: &dyn HirDatabase) -> chalk_db::ImplId {
-        chalk_ir::ImplId(self.as_intern_id())
+        chalk_ir::ImplId(self.as_id())
     }
 
     fn from_chalk(_db: &dyn HirDatabase, impl_id: chalk_db::ImplId) -> hir_def::ImplId {
-        InternKey::from_intern_id(impl_id.0)
+        FromId::from_id(impl_id.0.as_id())
     }
 }
 
@@ -56,84 +59,84 @@ impl ToChalk for TypeAliasAsValue {
     type Chalk = chalk_db::AssociatedTyValueId;
 
     fn to_chalk(self, _db: &dyn HirDatabase) -> chalk_db::AssociatedTyValueId {
-        rust_ir::AssociatedTyValueId(self.0.as_intern_id())
+        rust_ir::AssociatedTyValueId(self.0.as_id())
     }
 
     fn from_chalk(
         _db: &dyn HirDatabase,
         assoc_ty_value_id: chalk_db::AssociatedTyValueId,
     ) -> TypeAliasAsValue {
-        TypeAliasAsValue(TypeAliasId::from_intern_id(assoc_ty_value_id.0))
+        TypeAliasAsValue(TypeAliasId::from_id(assoc_ty_value_id.0))
     }
 }
 
 impl From<FnDefId> for crate::db::InternedCallableDefId {
     fn from(fn_def_id: FnDefId) -> Self {
-        InternKey::from_intern_id(fn_def_id.0)
+        Self::from_id(fn_def_id.0)
     }
 }
 
 impl From<crate::db::InternedCallableDefId> for FnDefId {
     fn from(callable_def_id: crate::db::InternedCallableDefId) -> Self {
-        chalk_ir::FnDefId(callable_def_id.as_intern_id())
+        chalk_ir::FnDefId(callable_def_id.as_id())
     }
 }
 
 impl From<OpaqueTyId> for crate::db::InternedOpaqueTyId {
     fn from(id: OpaqueTyId) -> Self {
-        InternKey::from_intern_id(id.0)
+        FromId::from_id(id.0)
     }
 }
 
 impl From<crate::db::InternedOpaqueTyId> for OpaqueTyId {
     fn from(id: crate::db::InternedOpaqueTyId) -> Self {
-        chalk_ir::OpaqueTyId(id.as_intern_id())
+        chalk_ir::OpaqueTyId(id.as_id())
     }
 }
 
 impl From<chalk_ir::ClosureId<Interner>> for crate::db::InternedClosureId {
     fn from(id: chalk_ir::ClosureId<Interner>) -> Self {
-        Self::from_intern_id(id.0)
+        FromId::from_id(id.0)
     }
 }
 
 impl From<crate::db::InternedClosureId> for chalk_ir::ClosureId<Interner> {
     fn from(id: crate::db::InternedClosureId) -> Self {
-        chalk_ir::ClosureId(id.as_intern_id())
+        chalk_ir::ClosureId(id.as_id())
     }
 }
 
 impl From<chalk_ir::CoroutineId<Interner>> for crate::db::InternedCoroutineId {
     fn from(id: chalk_ir::CoroutineId<Interner>) -> Self {
-        Self::from_intern_id(id.0)
+        Self::from_id(id.0)
     }
 }
 
 impl From<crate::db::InternedCoroutineId> for chalk_ir::CoroutineId<Interner> {
     fn from(id: crate::db::InternedCoroutineId) -> Self {
-        chalk_ir::CoroutineId(id.as_intern_id())
+        chalk_ir::CoroutineId(id.as_id())
     }
 }
 
 pub fn to_foreign_def_id(id: TypeAliasId) -> ForeignDefId {
-    chalk_ir::ForeignDefId(ra_salsa::InternKey::as_intern_id(&id))
+    chalk_ir::ForeignDefId(id.as_id())
 }
 
 pub fn from_foreign_def_id(id: ForeignDefId) -> TypeAliasId {
-    ra_salsa::InternKey::from_intern_id(id.0)
+    FromId::from_id(id.0)
 }
 
 pub fn to_assoc_type_id(id: TypeAliasId) -> AssocTypeId {
-    chalk_ir::AssocTypeId(ra_salsa::InternKey::as_intern_id(&id))
+    chalk_ir::AssocTypeId(id.as_id())
 }
 
 pub fn from_assoc_type_id(id: AssocTypeId) -> TypeAliasId {
-    ra_salsa::InternKey::from_intern_id(id.0)
+    FromId::from_id(id.0)
 }
 
 pub fn from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> TypeOrConstParamId {
     assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT);
-    let interned_id = ra_salsa::InternKey::from_intern_id(ra_salsa::InternId::from(idx.idx));
+    let interned_id = FromId::from_id(Id::from_u32(idx.idx.try_into().unwrap()));
     db.lookup_intern_type_or_const_param_id(interned_id)
 }
 
@@ -141,13 +144,13 @@ pub fn to_placeholder_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Place
     let interned_id = db.intern_type_or_const_param_id(id);
     PlaceholderIndex {
         ui: chalk_ir::UniverseIndex::ROOT,
-        idx: ra_salsa::InternKey::as_intern_id(&interned_id).as_usize(),
+        idx: interned_id.as_id().as_u32() as usize,
     }
 }
 
 pub fn lt_from_placeholder_idx(db: &dyn HirDatabase, idx: PlaceholderIndex) -> LifetimeParamId {
     assert_eq!(idx.ui, chalk_ir::UniverseIndex::ROOT);
-    let interned_id = ra_salsa::InternKey::from_intern_id(ra_salsa::InternId::from(idx.idx));
+    let interned_id = FromId::from_id(Id::from_u32(idx.idx.try_into().unwrap()));
     db.lookup_intern_lifetime_param_id(interned_id)
 }
 
@@ -155,14 +158,14 @@ pub fn lt_to_placeholder_idx(db: &dyn HirDatabase, id: LifetimeParamId) -> Place
     let interned_id = db.intern_lifetime_param_id(id);
     PlaceholderIndex {
         ui: chalk_ir::UniverseIndex::ROOT,
-        idx: ra_salsa::InternKey::as_intern_id(&interned_id).as_usize(),
+        idx: interned_id.as_id().as_u32() as usize,
     }
 }
 
 pub fn to_chalk_trait_id(id: TraitId) -> ChalkTraitId {
-    chalk_ir::TraitId(ra_salsa::InternKey::as_intern_id(&id))
+    chalk_ir::TraitId(id.as_id())
 }
 
 pub fn from_chalk_trait_id(id: ChalkTraitId) -> TraitId {
-    ra_salsa::InternKey::from_intern_id(id.0)
+    FromId::from_id(id.0)
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
index db94351dcc9..c5ad808accd 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
@@ -714,6 +714,7 @@ pub(crate) fn lookup_impl_method_query(
     else {
         return (func, fn_subst);
     };
+
     (
         impl_fn,
         Substitution::from_iter(
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs
index 56c431ef8da..ae454fbe528 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs
@@ -32,17 +32,18 @@ pub use borrowck::{borrowck_query, BorrowckResult, MutabilityReason};
 pub use eval::{
     interpret_mir, pad16, render_const_using_debug_impl, Evaluator, MirEvalError, VTableMap,
 };
-pub use lower::{
-    lower_to_mir, mir_body_for_closure_query, mir_body_query, mir_body_recover, MirLowerError,
-};
+pub use lower::{lower_to_mir, mir_body_for_closure_query, mir_body_query, MirLowerError};
 pub use monomorphization::{
     monomorphize_mir_body_bad, monomorphized_mir_body_for_closure_query,
-    monomorphized_mir_body_query, monomorphized_mir_body_recover,
+    monomorphized_mir_body_query,
 };
 use rustc_hash::FxHashMap;
 use smallvec::{smallvec, SmallVec};
 use stdx::{impl_from, never};
 
+pub(crate) use lower::mir_body_recover;
+pub(crate) use monomorphization::monomorphized_mir_body_recover;
+
 use super::consteval::{intern_const_scalar, try_const_usize};
 
 pub type BasicBlockId = Idx<BasicBlock>;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
index fbcca388e78..fd1e724ee88 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
@@ -145,7 +145,7 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef>
         Operand::Constant(_) | Operand::Static(_) => (),
     };
     for (_, block) in body.basic_blocks.iter() {
-        db.unwind_if_cancelled();
+        db.unwind_if_revision_cancelled();
         for statement in &block.statements {
             match &statement.kind {
                 StatementKind::Assign(_, r) => match r {
@@ -235,7 +235,7 @@ fn partially_moved(db: &dyn HirDatabase, body: &MirBody) -> Vec<PartiallyMoved>
         Operand::Constant(_) | Operand::Static(_) => (),
     };
     for (_, block) in body.basic_blocks.iter() {
-        db.unwind_if_cancelled();
+        db.unwind_if_revision_cancelled();
         for statement in &block.statements {
             match &statement.kind {
                 StatementKind::Assign(_, r) => match r {
@@ -306,7 +306,7 @@ fn partially_moved(db: &dyn HirDatabase, body: &MirBody) -> Vec<PartiallyMoved>
 fn borrow_regions(db: &dyn HirDatabase, body: &MirBody) -> Vec<BorrowRegion> {
     let mut borrows = FxHashMap::default();
     for (_, block) in body.basic_blocks.iter() {
-        db.unwind_if_cancelled();
+        db.unwind_if_revision_cancelled();
         for statement in &block.statements {
             if let StatementKind::Assign(_, Rvalue::Ref(kind, p)) = &statement.kind {
                 borrows
@@ -477,7 +477,7 @@ fn ever_initialized_map(
         dfs(db, body, l, &mut stack, &mut result);
     }
     for l in body.locals.iter().map(|it| it.0) {
-        db.unwind_if_cancelled();
+        db.unwind_if_revision_cancelled();
         if !result[body.start_block].contains_idx(l) {
             result[body.start_block].insert(l, false);
             stack.clear();
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
index 74a34e29817..c9d62f566c1 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
@@ -2558,6 +2558,7 @@ impl Evaluator<'_> {
         } else {
             let (imp, generic_args) =
                 self.db.lookup_impl_method(self.trait_env.clone(), def, generic_args.clone());
+
             let mir_body = self
                 .db
                 .monomorphized_mir_body(imp.into(), generic_args, self.trait_env.clone())
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
index 520717e7995..17f1da0c9f3 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs
@@ -2,7 +2,7 @@
 
 use std::{fmt::Write, iter, mem};
 
-use base_db::{ra_salsa::Cycle, CrateId};
+use base_db::{salsa::Cycle, CrateId};
 use chalk_ir::{BoundVar, ConstData, DebruijnIndex, TyKind};
 use hir_def::{
     data::adt::{StructKind, VariantData},
@@ -2149,10 +2149,10 @@ pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result<Arc<Mi
     Ok(Arc::new(result))
 }
 
-pub fn mir_body_recover(
+pub(crate) fn mir_body_recover(
     _db: &dyn HirDatabase,
     _cycle: &Cycle,
-    _def: &DefWithBodyId,
+    _def: DefWithBodyId,
 ) -> Result<Arc<MirBody>> {
     Err(MirLowerError::Loop)
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs
index 92132fa0473..6d1e9a1ea19 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs
@@ -9,7 +9,6 @@
 
 use std::mem;
 
-use base_db::ra_salsa::Cycle;
 use chalk_ir::{
     fold::{FallibleTypeFolder, TypeFoldable, TypeSuperFoldable},
     ConstData, DebruijnIndex,
@@ -19,7 +18,7 @@ use triomphe::Arc;
 
 use crate::{
     consteval::{intern_const_scalar, unknown_const},
-    db::{HirDatabase, InternedClosure},
+    db::{HirDatabase, HirDatabaseData, InternedClosure},
     from_placeholder_idx,
     generics::{generics, Generics},
     infer::normalize,
@@ -314,12 +313,13 @@ pub fn monomorphized_mir_body_query(
     Ok(Arc::new(body))
 }
 
-pub fn monomorphized_mir_body_recover(
+pub(crate) fn monomorphized_mir_body_recover(
     _: &dyn HirDatabase,
-    _: &Cycle,
-    _: &DefWithBodyId,
-    _: &Substitution,
-    _: &Arc<crate::TraitEnvironment>,
+    _: &salsa::Cycle,
+    _: HirDatabaseData,
+    _: DefWithBodyId,
+    _: Substitution,
+    _: Arc<crate::TraitEnvironment>,
 ) -> Result<Arc<MirBody>, MirLowerError> {
     Err(MirLowerError::Loop)
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs
index f37dd91d8e9..b18a057ba0b 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs
@@ -3,34 +3,34 @@
 use std::{fmt, panic, sync::Mutex};
 
 use base_db::{
-    ra_salsa::{self, Durability},
-    AnchoredPath, CrateId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
+    FileSourceRootInput, FileText, RootQueryDb, SourceDatabase, SourceRoot, SourceRootId,
+    SourceRootInput, Upcast,
 };
+
 use hir_def::{db::DefDatabase, ModuleId};
 use hir_expand::db::ExpandDatabase;
 use rustc_hash::FxHashMap;
+use salsa::{AsDynDatabase, Durability};
 use span::{EditionedFileId, FileId};
 use syntax::TextRange;
 use test_utils::extract_annotations;
 use triomphe::Arc;
 
-#[ra_salsa::database(
-    base_db::SourceRootDatabaseStorage,
-    base_db::SourceDatabaseStorage,
-    hir_expand::db::ExpandDatabaseStorage,
-    hir_def::db::InternDatabaseStorage,
-    hir_def::db::DefDatabaseStorage,
-    crate::db::HirDatabaseStorage
-)]
+#[salsa::db]
+#[derive(Clone)]
 pub(crate) struct TestDB {
-    storage: ra_salsa::Storage<TestDB>,
-    events: Mutex<Option<Vec<ra_salsa::Event>>>,
+    storage: salsa::Storage<Self>,
+    files: Arc<base_db::Files>,
+    events: Arc<Mutex<Option<Vec<salsa::Event>>>>,
 }
 
 impl Default for TestDB {
     fn default() -> Self {
-        let mut this = Self { storage: Default::default(), events: Default::default() };
-        this.setup_syntax_context_root();
+        let mut this = Self {
+            storage: Default::default(),
+            events: Default::default(),
+            files: Default::default(),
+        };
         this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
         this
     }
@@ -54,35 +54,81 @@ impl Upcast<dyn DefDatabase> for TestDB {
     }
 }
 
-impl ra_salsa::Database for TestDB {
-    fn salsa_event(&self, event: ra_salsa::Event) {
-        let mut events = self.events.lock().unwrap();
-        if let Some(events) = &mut *events {
-            events.push(event);
-        }
+impl Upcast<dyn RootQueryDb> for TestDB {
+    fn upcast(&self) -> &(dyn RootQueryDb + 'static) {
+        self
     }
 }
 
-impl ra_salsa::ParallelDatabase for TestDB {
-    fn snapshot(&self) -> ra_salsa::Snapshot<TestDB> {
-        ra_salsa::Snapshot::new(TestDB {
-            storage: self.storage.snapshot(),
-            events: Default::default(),
-        })
+impl Upcast<dyn SourceDatabase> for TestDB {
+    fn upcast(&self) -> &(dyn SourceDatabase + 'static) {
+        self
     }
 }
 
-impl panic::RefUnwindSafe for TestDB {}
+#[salsa::db]
+impl SourceDatabase for TestDB {
+    fn file_text(&self, file_id: base_db::FileId) -> FileText {
+        self.files.file_text(file_id)
+    }
 
-impl FileLoader for TestDB {
-    fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
-        FileLoaderDelegate(self).resolve_path(path)
+    fn set_file_text(&mut self, file_id: base_db::FileId, text: &str) {
+        let files = Arc::clone(&self.files);
+        files.set_file_text(self, file_id, text);
     }
-    fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]> {
-        FileLoaderDelegate(self).relevant_crates(file_id)
+
+    fn set_file_text_with_durability(
+        &mut self,
+        file_id: base_db::FileId,
+        text: &str,
+        durability: Durability,
+    ) {
+        let files = Arc::clone(&self.files);
+        files.set_file_text_with_durability(self, file_id, text, durability);
+    }
+
+    /// Source root of the file.
+    fn source_root(&self, source_root_id: SourceRootId) -> SourceRootInput {
+        self.files.source_root(source_root_id)
+    }
+
+    fn set_source_root_with_durability(
+        &mut self,
+        source_root_id: SourceRootId,
+        source_root: Arc<SourceRoot>,
+        durability: Durability,
+    ) {
+        let files = Arc::clone(&self.files);
+        files.set_source_root_with_durability(self, source_root_id, source_root, durability);
+    }
+
+    fn file_source_root(&self, id: base_db::FileId) -> FileSourceRootInput {
+        self.files.file_source_root(id)
+    }
+
+    fn set_file_source_root_with_durability(
+        &mut self,
+        id: base_db::FileId,
+        source_root_id: SourceRootId,
+        durability: Durability,
+    ) {
+        let files = Arc::clone(&self.files);
+        files.set_file_source_root_with_durability(self, id, source_root_id, durability);
     }
 }
 
+#[salsa::db]
+impl salsa::Database for TestDB {
+    fn salsa_event(&self, event: &dyn std::ops::Fn() -> salsa::Event) {
+        let mut events = self.events.lock().unwrap();
+        if let Some(events) = &mut *events {
+            events.push(event());
+        }
+    }
+}
+
+impl panic::RefUnwindSafe for TestDB {}
+
 impl TestDB {
     pub(crate) fn module_for_file_opt(&self, file_id: impl Into<FileId>) -> Option<ModuleId> {
         let file_id = file_id.into();
@@ -117,7 +163,7 @@ impl TestDB {
             .into_iter()
             .filter_map(|file_id| {
                 let text = self.file_text(file_id.file_id());
-                let annotations = extract_annotations(&text);
+                let annotations = extract_annotations(&text.text(self));
                 if annotations.is_empty() {
                     return None;
                 }
@@ -128,7 +174,7 @@ impl TestDB {
 }
 
 impl TestDB {
-    pub(crate) fn log(&self, f: impl FnOnce()) -> Vec<ra_salsa::Event> {
+    pub(crate) fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event> {
         *self.events.lock().unwrap() = Some(Vec::new());
         f();
         self.events.lock().unwrap().take().unwrap()
@@ -141,8 +187,11 @@ impl TestDB {
             .filter_map(|e| match e.kind {
                 // This is pretty horrible, but `Debug` is the only way to inspect
                 // QueryDescriptor at the moment.
-                ra_salsa::EventKind::WillExecute { database_key } => {
-                    Some(format!("{:?}", database_key.debug(self)))
+                salsa::EventKind::WillExecute { database_key } => {
+                    let ingredient = self
+                        .as_dyn_database()
+                        .ingredient_debug_name(database_key.ingredient_index());
+                    Some(ingredient.to_string())
                 }
                 _ => None,
             })
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
index 81e38be2285..26229040582 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs
@@ -15,7 +15,7 @@ mod type_alias_impl_traits;
 use std::env;
 use std::sync::LazyLock;
 
-use base_db::{CrateId, SourceDatabaseFileInputExt as _};
+use base_db::{CrateId, SourceDatabase};
 use expect_test::Expect;
 use hir_def::{
     db::DefDatabase,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs
index 6f7bfc4ea7a..efb1728d056 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/closure_captures.rs
@@ -1,8 +1,8 @@
-use base_db::ra_salsa::InternKey;
 use expect_test::{expect, Expect};
 use hir_def::db::DefDatabase;
 use hir_expand::files::InFileWrapper;
 use itertools::Itertools;
+use salsa::plumbing::FromId;
 use span::{HirFileId, TextRange};
 use syntax::{AstNode, AstPtr};
 use test_fixture::WithFixture;
@@ -34,8 +34,8 @@ fn check_closure_captures(#[rust_analyzer::rust_fixture] ra_fixture: &str, expec
         let infer = db.infer(def);
         let db = &db;
         captures_info.extend(infer.closure_info.iter().flat_map(|(closure_id, (captures, _))| {
-            let closure = db.lookup_intern_closure(InternedClosureId::from_intern_id(closure_id.0));
-            let (_, source_map) = db.body_with_source_map(closure.0);
+            let closure = db.lookup_intern_closure(InternedClosureId::from_id(closure_id.0));
+            let source_map = db.body_with_source_map(closure.0).1;
             let closure_text_range = source_map
                 .expr_syntax(closure.1)
                 .expect("failed to map closure to SyntaxNode")
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs
index 3757d722ac8..d54c6937bc3 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/incremental.rs
@@ -1,4 +1,4 @@
-use base_db::SourceDatabaseFileInputExt as _;
+use base_db::SourceDatabase;
 use hir_def::ModuleDefId;
 use test_fixture::WithFixture;
 
@@ -25,7 +25,7 @@ fn foo() -> i32 {
                 }
             });
         });
-        assert!(format!("{events:?}").contains("infer"))
+        assert!(format!("{events:?}").contains("infer_shim"))
     }
 
     let new_text = "
@@ -47,7 +47,7 @@ fn foo() -> i32 {
                 }
             });
         });
-        assert!(!format!("{events:?}").contains("infer"), "{events:#?}")
+        assert!(!format!("{events:?}").contains("infer_shim"), "{events:#?}")
     }
 }
 
@@ -76,7 +76,7 @@ fn baz() -> i32 {
                 }
             });
         });
-        assert!(format!("{events:?}").contains("infer"))
+        assert!(format!("{events:?}").contains("infer_shim"))
     }
 
     let new_text = "
@@ -103,6 +103,6 @@ fn baz() -> i32 {
                 }
             });
         });
-        assert!(format!("{events:?}").matches("infer").count() == 1, "{events:#?}")
+        assert!(format!("{events:?}").matches("infer_shim").count() == 1, "{events:#?}")
     }
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
index 8cb7dbf60f3..0135e0a409b 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
@@ -160,7 +160,7 @@ fn solve(
     let fuel = std::cell::Cell::new(CHALK_SOLVER_FUEL);
 
     let should_continue = || {
-        db.unwind_if_cancelled();
+        db.unwind_if_revision_cancelled();
         let remaining = fuel.get();
         fuel.set(remaining - 1);
         if remaining == 0 {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs b/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs
index 3a22158ce6f..425196d92f7 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs
@@ -19,7 +19,7 @@ use crate::{
     AliasTy, Const, ConstScalar, DynTyExt, GenericArg, GenericArgData, Interner, Lifetime,
     LifetimeData, Ty, TyKind,
 };
-use base_db::ra_salsa::Cycle;
+use base_db::salsa::Cycle;
 use chalk_ir::Mutability;
 use hir_def::data::adt::StructFlags;
 use hir_def::{AdtId, GenericDefId, GenericParamId, VariantId};
@@ -58,9 +58,9 @@ pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> Option<Ar
 pub(crate) fn variances_of_cycle(
     db: &dyn HirDatabase,
     _cycle: &Cycle,
-    def: &GenericDefId,
+    def: GenericDefId,
 ) -> Option<Arc<[Variance]>> {
-    let generics = generics(db.upcast(), *def);
+    let generics = generics(db.upcast(), def);
     let count = generics.len();
 
     if count == 0 {
diff --git a/src/tools/rust-analyzer/crates/hir/src/db.rs b/src/tools/rust-analyzer/crates/hir/src/db.rs
index 22760c41aae..9e8e87ecff0 100644
--- a/src/tools/rust-analyzer/crates/hir/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/db.rs
@@ -3,44 +3,43 @@
 //! we didn't do that.
 //!
 //! But we need this for at least LRU caching at the query level.
-pub use hir_def::db::{
-    AttrsQuery, BlockDefMapQuery, BlockItemTreeQuery, BlockItemTreeWithSourceMapQuery, BodyQuery,
-    BodyWithSourceMapQuery, ConstDataQuery, ConstVisibilityQuery, CrateDefMapQuery,
-    CrateLangItemsQuery, CrateNotableTraitsQuery, CrateSupportsNoStdQuery, DefDatabase,
-    DefDatabaseStorage, EnumDataQuery, EnumVariantDataWithDiagnosticsQuery,
-    ExpandProcAttrMacrosQuery, ExprScopesQuery, ExternCrateDeclDataQuery, FieldVisibilitiesQuery,
-    FieldsAttrsQuery, FieldsAttrsSourceMapQuery, FileItemTreeQuery, FileItemTreeWithSourceMapQuery,
-    FunctionDataQuery, FunctionVisibilityQuery, GenericParamsQuery,
-    GenericParamsWithSourceMapQuery, ImplDataWithDiagnosticsQuery, ImportMapQuery,
-    IncludeMacroInvocQuery, InternAnonymousConstQuery, InternBlockQuery, InternConstQuery,
-    InternDatabase, InternDatabaseStorage, InternEnumQuery, InternExternBlockQuery,
-    InternExternCrateQuery, InternFunctionQuery, InternImplQuery, InternInTypeConstQuery,
-    InternMacro2Query, InternMacroRulesQuery, InternProcMacroQuery, InternStaticQuery,
-    InternStructQuery, InternTraitAliasQuery, InternTraitQuery, InternTypeAliasQuery,
-    InternUnionQuery, InternUseQuery, LangItemQuery, Macro2DataQuery, MacroDefQuery,
-    MacroRulesDataQuery, NotableTraitsInDepsQuery, ProcMacroDataQuery, StaticDataQuery,
-    StructDataWithDiagnosticsQuery, TraitAliasDataQuery, TraitDataWithDiagnosticsQuery,
-    TypeAliasDataQuery, UnionDataWithDiagnosticsQuery,
-};
-pub use hir_expand::db::{
-    AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage,
-    ExpandProcMacroQuery, InternMacroCallQuery, InternSyntaxContextQuery, MacroArgQuery,
-    ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, ProcMacroSpanQuery, ProcMacrosQuery,
-    RealSpanMapQuery,
-};
-pub use hir_ty::db::{
-    AdtDatumQuery, AdtVarianceQuery, AssociatedTyDataQuery, AssociatedTyValueQuery, BorrowckQuery,
-    CallableItemSignatureQuery, ConstEvalDiscriminantQuery, ConstEvalQuery, ConstEvalStaticQuery,
-    ConstParamTyQuery, DynCompatibilityOfTraitQuery, FieldTypesQuery, FnDefDatumQuery,
-    FnDefVarianceQuery, GenericDefaultsQuery, GenericPredicatesForParamQuery,
-    GenericPredicatesQuery, GenericPredicatesWithoutParentQuery, HirDatabase, HirDatabaseStorage,
-    ImplDatumQuery, ImplSelfTyQuery, ImplTraitQuery, IncoherentInherentImplCratesQuery, InferQuery,
-    InherentImplsInBlockQuery, InherentImplsInCrateQuery, InternCallableDefQuery,
-    InternClosureQuery, InternCoroutineQuery, InternImplTraitIdQuery, InternLifetimeParamIdQuery,
-    InternTypeOrConstParamIdQuery, LayoutOfAdtQuery, LayoutOfTyQuery, LookupImplMethodQuery,
-    MirBodyForClosureQuery, MirBodyQuery, MonomorphizedMirBodyForClosureQuery,
-    MonomorphizedMirBodyQuery, ProgramClausesForChalkEnvQuery, ReturnTypeImplTraitsQuery,
-    TargetDataLayoutQuery, TraitDatumQuery, TraitEnvironmentQuery, TraitImplsInBlockQuery,
-    TraitImplsInCrateQuery, TraitImplsInDepsQuery, TraitSolveQuery, TyQuery,
-    TypeAliasImplTraitsQuery, ValueTyQuery,
-};
+pub use hir_def::db::DefDatabase;
+//     AttrsQuery, BlockDefMapQuery, BlockItemTreeQuery, BlockItemTreeWithSourceMapQuery, BodyQuery,
+//     BodyWithSourceMapQuery, ConstDataQuery, ConstVisibilityQuery, CrateDefMapQuery,
+//     CrateLangItemsQuery, CrateNotableTraitsQuery, CrateSupportsNoStdQuery, DefDatabase,
+//     DefDatabaseStorage, EnumDataQuery, EnumVariantDataWithDiagnosticsQuery,
+//     ExpandProcAttrMacrosQuery, ExprScopesQuery, ExternCrateDeclDataQuery, FieldVisibilitiesQuery,
+//     FieldsAttrsQuery, FieldsAttrsSourceMapQuery, FileItemTreeQuery, FileItemTreeWithSourceMapQuery,
+//     FunctionDataQuery, FunctionVisibilityQuery, GenericParamsQuery,
+//     GenericParamsWithSourceMapQuery, ImplDataWithDiagnosticsQuery, ImportMapQuery,
+//     IncludeMacroInvocQuery, InternAnonymousConstQuery, InternBlockQuery, InternConstQuery,
+//     InternDatabase, InternDatabaseStorage, InternEnumQuery, InternExternBlockQuery,
+//     InternExternCrateQuery, InternFunctionQuery, InternImplQuery, InternInTypeConstQuery,
+//     InternMacro2Query, InternMacroRulesQuery, InternProcMacroQuery, InternStaticQuery,
+//     InternStructQuery, InternTraitAliasQuery, InternTraitQuery, InternTypeAliasQuery,
+//     InternUnionQuery, InternUseQuery, LangItemQuery, Macro2DataQuery, MacroDefQuery,
+//     MacroRulesDataQuery, NotableTraitsInDepsQuery, ProcMacroDataQuery, StaticDataQuery,
+//     StructDataWithDiagnosticsQuery, TraitAliasDataQuery, TraitDataWithDiagnosticsQuery,
+//     TypeAliasDataQuery, UnionDataWithDiagnosticsQuery,
+// };
+pub use hir_expand::db::ExpandDatabase;
+// AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage,
+// ExpandProcMacroQuery, InternMacroCallQuery, InternSyntaxContextQuery, MacroArgQuery,
+// ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, ProcMacroSpanQuery, ProcMacrosQuery,
+// RealSpanMapQuery,
+pub use hir_ty::db::HirDatabase;
+//     AdtDatumQuery, AdtVarianceQuery, AssociatedTyDataQuery, AssociatedTyValueQuery, BorrowckQuery,
+//     CallableItemSignatureQuery, ConstEvalDiscriminantQuery, ConstEvalQuery, ConstEvalStaticQuery,
+//     ConstParamTyQuery, DynCompatibilityOfTraitQuery, FieldTypesQuery, FnDefDatumQuery,
+//     FnDefVarianceQuery, GenericDefaultsQuery, GenericPredicatesForParamQuery,
+//     GenericPredicatesQuery, GenericPredicatesWithoutParentQuery, HirDatabase, HirDatabaseStorage,
+//     ImplDatumQuery, ImplSelfTyQuery, ImplTraitQuery, IncoherentInherentImplCratesQuery, InferQuery,
+//     InherentImplsInBlockQuery, InherentImplsInCrateQuery, InternCallableDefQuery,
+//     InternClosureQuery, InternCoroutineQuery, InternImplTraitIdQuery, InternLifetimeParamIdQuery,
+//     InternTypeOrConstParamIdQuery, LayoutOfAdtQuery, LayoutOfTyQuery, LookupImplMethodQuery,
+//     MirBodyForClosureQuery, MirBodyQuery, MonomorphizedMirBodyForClosureQuery,
+//     MonomorphizedMirBodyQuery, ProgramClausesForChalkEnvQuery, ReturnTypeImplTraitsQuery,
+//     TargetDataLayoutQuery, TraitDatumQuery, TraitEnvironmentQuery, TraitImplsInBlockQuery,
+//     TraitImplsInCrateQuery, TraitImplsInDepsQuery, TraitSolveQuery, TyQuery,
+//     TypeAliasImplTraitsQuery, ValueTyQuery,
+// };
diff --git a/src/tools/rust-analyzer/crates/hir/src/has_source.rs b/src/tools/rust-analyzer/crates/hir/src/has_source.rs
index 372c7252934..b4468178fbe 100644
--- a/src/tools/rust-analyzer/crates/hir/src/has_source.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/has_source.rs
@@ -306,7 +306,7 @@ impl HasSource for ExternCrateDecl {
 impl HasSource for InlineAsmOperand {
     type Ast = ast::AsmOperandNamed;
     fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
-        let (_body, source_map) = db.body_with_source_map(self.owner);
+        let source_map = db.body_with_source_map(self.owner).1;
         if let Ok(src) = source_map.expr_syntax(self.expr) {
             let root = src.file_syntax(db.upcast());
             return src
diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs
index a8075509474..dbe743e7e2f 100644
--- a/src/tools/rust-analyzer/crates/hir/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs
@@ -662,6 +662,7 @@ impl Module {
                                 db.field_types_with_diagnostics(s.id.into()).1,
                                 tree_source_maps.strukt(tree_id.value).item(),
                             );
+
                             for diag in db.struct_data_with_diagnostics(s.id).1.iter() {
                                 emit_def_diagnostic(db, acc, diag, edition);
                             }
@@ -675,6 +676,7 @@ impl Module {
                                 db.field_types_with_diagnostics(u.id.into()).1,
                                 tree_source_maps.union(tree_id.value).item(),
                             );
+
                             for diag in db.union_data_with_diagnostics(u.id).1.iter() {
                                 emit_def_diagnostic(db, acc, diag, edition);
                             }
@@ -1906,6 +1908,7 @@ impl DefWithBody {
         let krate = self.module(db).id.krate();
 
         let (body, source_map) = db.body_with_source_map(self.into());
+
         let item_tree_source_maps;
         let outer_types_source_map = match self {
             DefWithBody::Function(function) => {
@@ -1955,7 +1958,7 @@ impl DefWithBody {
                         None
                     };
                     MacroError {
-                        node: (*node).map(|it| it.into()),
+                        node: (node).map(|it| it.into()),
                         precise_location,
                         message,
                         error,
@@ -3346,7 +3349,7 @@ fn as_assoc_item<'db, ID, DEF, LOC>(
     id: ID,
 ) -> Option<AssocItem>
 where
-    ID: Lookup<Database<'db> = dyn DefDatabase + 'db, Data = AssocItemLoc<LOC>>,
+    ID: Lookup<Database = dyn DefDatabase, Data = AssocItemLoc<LOC>>,
     DEF: From<ID>,
     LOC: ItemTreeNode,
 {
@@ -3362,7 +3365,7 @@ fn as_extern_assoc_item<'db, ID, DEF, LOC>(
     id: ID,
 ) -> Option<ExternAssocItem>
 where
-    ID: Lookup<Database<'db> = dyn DefDatabase + 'db, Data = AssocItemLoc<LOC>>,
+    ID: Lookup<Database = dyn DefDatabase, Data = AssocItemLoc<LOC>>,
     DEF: From<ID>,
     LOC: ItemTreeNode,
 {
@@ -4656,6 +4659,7 @@ pub struct CaptureUsages {
 impl CaptureUsages {
     pub fn sources(&self, db: &dyn HirDatabase) -> Vec<CaptureUsageSource> {
         let (body, source_map) = db.body_with_source_map(self.parent);
+
         let mut result = Vec::with_capacity(self.spans.len());
         for &span in self.spans.iter() {
             let is_ref = span.is_ref_span(&body);
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
index 5e2eebcd13c..aeeb3f97909 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
@@ -307,9 +307,10 @@ impl<'db> SemanticsImpl<'db> {
         SemanticsImpl { db, s2d_cache: Default::default(), macro_call_cache: Default::default() }
     }
 
-    pub fn parse(&self, file_id: EditionedFileId) -> ast::SourceFile {
+    pub fn parse(&self, file_id: base_db::EditionedFileId) -> ast::SourceFile {
+        let hir_file_id = file_id.editioned_file_id(self.db).into();
         let tree = self.db.parse(file_id).tree();
-        self.cache(tree.syntax().clone(), file_id.into());
+        self.cache(tree.syntax().clone(), hir_file_id);
         tree
     }
 
@@ -329,11 +330,14 @@ impl<'db> SemanticsImpl<'db> {
     }
 
     pub fn parse_guess_edition(&self, file_id: FileId) -> ast::SourceFile {
-        let file_id = self
+        let editioned_file_id = self
             .attach_first_edition(file_id)
             .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
+
+        let file_id = base_db::EditionedFileId::new(self.db, editioned_file_id);
+
         let tree = self.db.parse(file_id).tree();
-        self.cache(tree.syntax().clone(), file_id.into());
+        self.cache(tree.syntax().clone(), editioned_file_id.into());
         tree
     }
 
@@ -1903,7 +1907,9 @@ fn macro_call_to_macro_id(
     match loc.def.ast_id() {
         Either::Left(it) => {
             let node = match it.file_id.repr() {
-                HirFileIdRepr::FileId(file_id) => {
+                HirFileIdRepr::FileId(editioned_file_id) => {
+                    let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
+
                     it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
                 }
                 HirFileIdRepr::MacroFile(macro_file) => {
@@ -1915,7 +1921,9 @@ fn macro_call_to_macro_id(
         }
         Either::Right(it) => {
             let node = match it.file_id.repr() {
-                HirFileIdRepr::FileId(file_id) => {
+                HirFileIdRepr::FileId(editioned_file_id) => {
+                    let file_id = base_db::EditionedFileId::new(db, editioned_file_id);
+
                     it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
                 }
                 HirFileIdRepr::MacroFile(macro_file) => {
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs
index d0fdf5cbdf7..da9bb8b15c8 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics/child_by_source.rs
@@ -254,7 +254,7 @@ fn insert_item_loc<ID, N, Data>(
     id: ID,
     key: Key<N::Source, ID>,
 ) where
-    ID: for<'db> Lookup<Database<'db> = dyn DefDatabase + 'db, Data = Data> + 'static,
+    ID: Lookup<Database = dyn DefDatabase, Data = Data> + 'static,
     Data: ItemTreeLoc<Id = N>,
     N: ItemTreeNode,
     N::Source: 'static,
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
index 18cbaa15aea..4ec07396560 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
@@ -85,6 +85,7 @@
 //! active crate for a given position, and then provide an API to resolve all
 //! syntax nodes against this specific crate.
 
+use base_db::{RootQueryDb, Upcast};
 use either::Either;
 use hir_def::{
     dyn_map::{
@@ -99,11 +100,11 @@ use hir_def::{
 };
 use hir_expand::{
     attrs::AttrId, name::AsName, ExpansionInfo, HirFileId, HirFileIdExt, InMacroFile, MacroCallId,
-    MacroFileIdExt,
+    MacroFileId, MacroFileIdExt,
 };
 use rustc_hash::FxHashMap;
 use smallvec::SmallVec;
-use span::{EditionedFileId, FileId, MacroFileId};
+use span::{EditionedFileId, FileId};
 use stdx::impl_from;
 use syntax::{
     ast::{self, HasName},
@@ -142,7 +143,7 @@ impl SourceToDefCache {
             return m;
         }
         self.included_file_cache.insert(file, None);
-        for &crate_id in db.relevant_crates(file.into()).iter() {
+        for &crate_id in Upcast::<dyn RootQueryDb>::upcast(db).relevant_crates(file.into()).iter() {
             db.include_macro_invoc(crate_id).iter().for_each(|&(macro_call_id, file_id)| {
                 self.included_file_cache.insert(file_id, Some(MacroFileId { macro_call_id }));
             });
@@ -176,7 +177,9 @@ impl SourceToDefCtx<'_, '_> {
         let _p = tracing::info_span!("SourceToDefCtx::file_to_def").entered();
         self.cache.file_to_def_cache.entry(file).or_insert_with(|| {
             let mut mods = SmallVec::new();
-            for &crate_id in self.db.relevant_crates(file).iter() {
+
+            for &crate_id in Upcast::<dyn RootQueryDb>::upcast(self.db).relevant_crates(file).iter()
+            {
                 // Note: `mod` declarations in block modules cannot be supported here
                 let crate_def_map = self.db.crate_def_map(crate_id);
                 let n_mods = mods.len();
@@ -344,7 +347,7 @@ impl SourceToDefCtx<'_, '_> {
             })
             .position(|it| it == *src.value)?;
         let container = self.find_pat_or_label_container(src.syntax_ref())?;
-        let (_, source_map) = self.db.body_with_source_map(container);
+        let source_map = self.db.body_with_source_map(container).1;
         let expr = source_map.node_expr(src.with_value(&ast::Expr::AsmExpr(asm)))?.as_expr()?;
         Some(InlineAsmOperand { owner: container, expr, index })
     }
@@ -377,7 +380,8 @@ impl SourceToDefCtx<'_, '_> {
         src: InFile<&ast::Label>,
     ) -> Option<(DefWithBodyId, LabelId)> {
         let container = self.find_pat_or_label_container(src.syntax_ref())?;
-        let (_body, source_map) = self.db.body_with_source_map(container);
+        let source_map = self.db.body_with_source_map(container).1;
+
         let label_id = source_map.node_label(src)?;
         Some((container, label_id))
     }
diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
index d1245f5f7d6..aa0eac9478a 100644
--- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
@@ -1635,8 +1635,7 @@ pub(crate) fn name_hygiene(db: &dyn HirDatabase, name: InFile<&SyntaxNode>) -> H
     };
     let span_map = db.expansion_span_map(macro_file);
     let ctx = span_map.span_at(name.value.text_range().start()).ctx;
-    let ctx = db.lookup_intern_syntax_context(ctx);
-    HygieneId::new(ctx.opaque_and_semitransparent)
+    HygieneId::new(ctx.opaque_and_semitransparent(db))
 }
 
 fn type_of_expr_including_adjust(infer: &InferenceResult, id: ExprId) -> Option<&Ty> {
diff --git a/src/tools/rust-analyzer/crates/hir/src/symbols.rs b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
index 81eb6a70ad7..fa8153ad216 100644
--- a/src/tools/rust-analyzer/crates/hir/src/symbols.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/symbols.rs
@@ -111,7 +111,7 @@ impl<'a> SymbolCollector<'a> {
     fn do_work(&mut self, work: SymbolCollectorWork) {
         let _p = tracing::info_span!("SymbolCollector::do_work", ?work).entered();
         tracing::info!(?work, "SymbolCollector::do_work");
-        self.db.unwind_if_cancelled();
+        self.db.unwind_if_revision_cancelled();
 
         let parent_name = work.parent.map(|name| name.as_str().to_smolstr());
         self.with_container_name(parent_name, |s| s.collect_from_module(work.module_id));
@@ -346,9 +346,9 @@ impl<'a> SymbolCollector<'a> {
         }
     }
 
-    fn push_decl<'db, L>(&mut self, id: L, name: &Name, is_assoc: bool)
+    fn push_decl<L>(&mut self, id: L, name: &Name, is_assoc: bool)
     where
-        L: Lookup<Database<'db> = dyn DefDatabase + 'db> + Into<ModuleDefId>,
+        L: Lookup<Database = dyn DefDatabase> + Into<ModuleDefId>,
         <L as Lookup>::Data: HasSource,
         <<L as Lookup>::Data as HasSource>::Value: HasName,
     {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs b/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs
index b1189f0d0b0..c4e98c07423 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/assist_context.rs
@@ -1,6 +1,7 @@
 //! See [`AssistContext`].
 
 use hir::{FileRange, Semantics};
+use ide_db::base_db::salsa::AsDynDatabase;
 use ide_db::EditionedFileId;
 use ide_db::{label::Label, FileId, RootDatabase};
 use syntax::Edition;
@@ -64,7 +65,10 @@ impl<'a> AssistContext<'a> {
         config: &'a AssistConfig,
         frange: FileRange,
     ) -> AssistContext<'a> {
-        let source_file = sema.parse(frange.file_id);
+        let editioned_file_id =
+            ide_db::base_db::EditionedFileId::new(sema.db.as_dyn_database(), frange.file_id);
+
+        let source_file = sema.parse(editioned_file_id);
 
         let start = frange.range.start();
         let end = frange.range.end();
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
index 37f5f44dfa0..fee7662a784 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
@@ -2,6 +2,7 @@ use std::iter::{self, Peekable};
 
 use either::Either;
 use hir::{sym, Adt, Crate, HasAttrs, ImportPathConfig, ModuleDef, Semantics};
+use ide_db::base_db::salsa::AsDynDatabase;
 use ide_db::syntax_helpers::suggest_name;
 use ide_db::RootDatabase;
 use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast};
@@ -256,7 +257,12 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
             // Just replace the element that the original range came from
             let old_place = {
                 // Find the original element
-                let file = ctx.sema.parse(arm_list_range.file_id);
+                let editioned_file_id = ide_db::base_db::EditionedFileId::new(
+                    ctx.sema.db.as_dyn_database(),
+                    arm_list_range.file_id,
+                );
+
+                let file = ctx.sema.parse(editioned_file_id);
                 let old_place = file.syntax().covering_element(arm_list_range.range);
 
                 match old_place {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_to_enum.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_to_enum.rs
index 7716e99e604..9cb14e8d9a0 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_to_enum.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_to_enum.rs
@@ -1136,7 +1136,7 @@ fn foo() {
 }
 
 //- /main.rs
-use foo::Foo;
+use foo::{Bool, Foo};
 
 mod foo;
 
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs
index d34cf895cd9..54826f03884 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_closure_to_fn.rs
@@ -590,7 +590,7 @@ fn handle_call(
     let indent =
         if insert_newlines { first_arg_indent.unwrap().to_string() } else { String::new() };
     // FIXME: This text manipulation seems risky.
-    let text = ctx.db().file_text(file_id.file_id());
+    let text = ctx.db().file_text(file_id.file_id()).text(ctx.db());
     let mut text = text[..u32::from(range.end()).try_into().unwrap()].trim_end();
     if !text.ends_with(')') {
         return None;
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs
index 6e3be0ce692..b94422b13c1 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_module.rs
@@ -2,6 +2,7 @@ use std::iter;
 
 use either::Either;
 use hir::{HasSource, HirFileIdExt, ModuleSource};
+use ide_db::base_db::salsa::AsDynDatabase;
 use ide_db::{
     assists::{AssistId, AssistKind},
     defs::{Definition, NameClass, NameRefClass},
@@ -331,7 +332,10 @@ impl Module {
         let mut use_stmts_set = FxHashSet::default();
 
         for (file_id, refs) in node_def.usages(&ctx.sema).all() {
-            let source_file = ctx.sema.parse(file_id);
+            let editioned_file_id =
+                ide_db::base_db::EditionedFileId::new(ctx.sema.db.as_dyn_database(), file_id);
+
+            let source_file = ctx.sema.parse(editioned_file_id);
             let usages = refs.into_iter().filter_map(|FileReference { range, .. }| {
                 // handle normal usages
                 let name_ref = find_node_at_range::<ast::NameRef>(source_file.syntax(), range)?;
@@ -457,7 +461,11 @@ impl Module {
         let selection_range = ctx.selection_trimmed();
         let file_id = ctx.file_id();
         let usage_res = def.usages(&ctx.sema).in_scope(&SearchScope::single_file(file_id)).all();
-        let file = ctx.sema.parse(file_id);
+
+        let editioned_file_id =
+            ide_db::base_db::EditionedFileId::new(ctx.sema.db.as_dyn_database(), file_id);
+
+        let file = ctx.sema.parse(editioned_file_id);
 
         // track uses which does not exists in `Use`
         let mut uses_exist_in_sel = false;
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
index 7af2a2e1e6a..29bd8cf0d1a 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
@@ -2,6 +2,7 @@ use hir::{
     Adt, AsAssocItem, HasSource, HirDisplay, HirFileIdExt, Module, PathResolution, Semantics,
     StructKind, Type, TypeInfo,
 };
+use ide_db::base_db::salsa::AsDynDatabase;
 use ide_db::{
     defs::{Definition, NameRefClass},
     famous_defs::FamousDefs,
@@ -205,7 +206,11 @@ fn get_adt_source(
     fn_name: &str,
 ) -> Option<(Option<ast::Impl>, FileId)> {
     let range = adt.source(ctx.sema.db)?.syntax().original_file_range_rooted(ctx.sema.db);
-    let file = ctx.sema.parse(range.file_id);
+
+    let editioned_file_id =
+        ide_db::base_db::EditionedFileId::new(ctx.sema.db.as_dyn_database(), range.file_id);
+
+    let file = ctx.sema.parse(editioned_file_id);
     let adt_source =
         ctx.sema.find_node_at_offset_with_macros(file.syntax(), range.range.start())?;
     find_struct_impl(ctx, &adt_source, &[fn_name.to_owned()])
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs
index 5ddb17b2072..2d7722a654e 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_unused_param.rs
@@ -1,4 +1,6 @@
-use ide_db::{defs::Definition, search::FileReference, EditionedFileId};
+use ide_db::{
+    base_db::salsa::AsDynDatabase, defs::Definition, search::FileReference, EditionedFileId,
+};
 use syntax::{
     algo::{find_node_at_range, least_common_ancestor_element},
     ast::{self, HasArgList},
@@ -102,7 +104,11 @@ fn process_usages(
     arg_to_remove: usize,
     is_self_present: bool,
 ) {
-    let source_file = ctx.sema.parse(file_id);
+    let editioned_file_id_wrapper =
+        ide_db::base_db::EditionedFileId::new(ctx.sema.db.as_dyn_database(), file_id);
+
+    let source_file = ctx.sema.parse(editioned_file_id_wrapper);
+    builder.edit_file(file_id);
     let possible_ranges = references
         .into_iter()
         .filter_map(|usage| process_usage(&source_file, usage, arg_to_remove, is_self_present));
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
index b4042abf5d6..4f751b68e7f 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests.rs
@@ -3,7 +3,7 @@ mod generated;
 use expect_test::expect;
 use hir::{FileRange, Semantics};
 use ide_db::{
-    base_db::{SourceDatabase, SourceRootDatabase},
+    base_db::SourceDatabase,
     imports::insert_use::{ImportGranularity, InsertUseConfig},
     source_change::FileSystemEdit,
     EditionedFileId, RootDatabase, SnippetCap,
@@ -222,7 +222,7 @@ pub(crate) fn check_assist_unresolved(
 fn check_doc_test(assist_id: &str, before: &str, after: &str) {
     let after = trim_indent(after);
     let (db, file_id, selection) = RootDatabase::with_range_or_offset(before);
-    let before = db.file_text(file_id.file_id()).to_string();
+    let before = db.file_text(file_id.file_id()).text(&db).to_string();
     let frange = FileRange { file_id, range: selection.into() };
 
     let assist = assists(&db, &TEST_CONFIG, AssistResolveStrategy::All, frange.into())
@@ -281,7 +281,7 @@ fn check_with_config(
 ) {
     let (mut db, file_with_caret_id, range_or_offset) = RootDatabase::with_range_or_offset(before);
     db.enable_proc_attr_macros();
-    let text_without_caret = db.file_text(file_with_caret_id.into()).to_string();
+    let text_without_caret = db.file_text(file_with_caret_id.into()).text(&db).to_string();
 
     let frange = FileRange { file_id: file_with_caret_id, range: range_or_offset.into() };
 
@@ -311,14 +311,14 @@ fn check_with_config(
 
             let mut buf = String::new();
             for (file_id, (edit, snippet_edit)) in source_change.source_file_edits {
-                let mut text = db.file_text(file_id).as_ref().to_owned();
+                let mut text = db.file_text(file_id).text(&db).as_ref().to_owned();
                 edit.apply(&mut text);
                 if let Some(snippet_edit) = snippet_edit {
                     snippet_edit.apply(&mut text);
                 }
                 if !skip_header {
-                    let sr = db.file_source_root(file_id);
-                    let sr = db.source_root(sr);
+                    let source_root_id = db.file_source_root(file_id).source_root_id(&db);
+                    let sr = db.source_root(source_root_id).source_root(&db);
                     let path = sr.path_for_file(&file_id).unwrap();
                     format_to!(buf, "//- {}\n", path)
                 }
@@ -329,15 +329,16 @@ fn check_with_config(
                 let (dst, contents) = match file_system_edit {
                     FileSystemEdit::CreateFile { dst, initial_contents } => (dst, initial_contents),
                     FileSystemEdit::MoveFile { src, dst } => {
-                        (dst, db.file_text(src).as_ref().to_owned())
+                        (dst, db.file_text(src).text(&db).as_ref().to_owned())
                     }
                     FileSystemEdit::MoveDir { src, src_id, dst } => {
                         // temporary placeholder for MoveDir since we are not using MoveDir in ide assists yet.
                         (dst, format!("{src_id:?}\n{src:?}"))
                     }
                 };
-                let sr = db.file_source_root(dst.anchor);
-                let sr = db.source_root(sr);
+
+                let source_root_id = db.file_source_root(dst.anchor).source_root_id(&db);
+                let sr = db.source_root(source_root_id).source_root(&db);
                 let mut base = sr.path_for_file(&dst.anchor).unwrap().clone();
                 base.pop();
                 let created_file_path = base.join(&dst.path).unwrap();
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs
index cca6a22f290..fad7c92d8ae 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs
@@ -4,7 +4,7 @@ use std::iter;
 
 use hir::{HirFileIdExt, Module};
 use ide_db::{
-    base_db::{SourceRootDatabase, VfsPath},
+    base_db::{SourceDatabase, VfsPath},
     FxHashSet, RootDatabase, SymbolKind,
 };
 use syntax::{ast, AstNode, SyntaxKind};
@@ -43,7 +43,10 @@ pub(crate) fn complete_mod(
 
     let module_definition_file =
         current_module.definition_source_file_id(ctx.db).original_file(ctx.db);
-    let source_root = ctx.db.source_root(ctx.db.file_source_root(module_definition_file.file_id()));
+    let source_root_id =
+        ctx.db.file_source_root(module_definition_file.file_id()).source_root_id(ctx.db);
+    let source_root = ctx.db.source_root(source_root_id).source_root(ctx.db);
+
     let directory_to_look_for_submodules = directory_to_look_for_submodules(
         current_module,
         ctx.db,
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs
index 28e2853096e..611f8a2873d 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs
@@ -277,7 +277,7 @@ fn get_receiver_text(
         range.range = TextRange::at(range.range.start(), range.range.len() - TextSize::of('.'))
     }
     let file_text = sema.db.file_text(range.file_id.file_id());
-    let mut text = file_text[range.range].to_owned();
+    let mut text = file_text.text(sema.db)[range.range].to_owned();
 
     // The receiver texts should be interpreted as-is, as they are expected to be
     // normal Rust expressions.
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
index e686a293094..54620bbad1b 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
@@ -6,13 +6,14 @@ mod tests;
 
 use std::{iter, ops::ControlFlow};
 
+use base_db::{salsa::AsDynDatabase, RootQueryDb as _};
 use hir::{
     DisplayTarget, HasAttrs, Local, ModPath, ModuleDef, ModuleSource, Name, PathResolution,
     ScopeDef, Semantics, SemanticsScope, Symbol, Type, TypeInfo,
 };
 use ide_db::{
-    base_db::SourceDatabase, famous_defs::FamousDefs, helpers::is_editable_crate, FilePosition,
-    FxHashMap, FxHashSet, RootDatabase,
+    famous_defs::FamousDefs, helpers::is_editable_crate, FilePosition, FxHashMap, FxHashSet,
+    RootDatabase,
 };
 use syntax::{
     ast::{self, AttrKind, NameOrNameRef},
@@ -706,15 +707,19 @@ impl<'a> CompletionContext<'a> {
         let _p = tracing::info_span!("CompletionContext::new").entered();
         let sema = Semantics::new(db);
 
-        let file_id = sema.attach_first_edition(file_id)?;
-        let original_file = sema.parse(file_id);
+        let editioned_file_id = sema.attach_first_edition(file_id)?;
+        let editioned_file_id_wrapper =
+            ide_db::base_db::EditionedFileId::new(sema.db.as_dyn_database(), editioned_file_id);
+
+        let original_file = sema.parse(editioned_file_id_wrapper);
 
         // Insert a fake ident to get a valid parse tree. We will use this file
         // to determine context, though the original_file will be used for
         // actual completion.
         let file_with_fake_ident = {
-            let parse = db.parse(file_id);
-            parse.reparse(TextRange::empty(offset), COMPLETION_MARKER, file_id.edition()).tree()
+            let (_, edition) = editioned_file_id.unpack();
+            let parse = db.parse(editioned_file_id_wrapper);
+            parse.reparse(TextRange::empty(offset), COMPLETION_MARKER, edition).tree()
         };
 
         // always pick the token to the immediate left of the cursor, as that is what we are actually
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
index a990b39481a..c9fc5ae1524 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
@@ -11,6 +11,7 @@ mod snippet;
 mod tests;
 
 use ide_db::{
+    base_db::salsa::AsDynDatabase,
     imports::insert_use::{self, ImportScope},
     syntax_helpers::tree_diff::diff,
     text_edit::TextEdit,
@@ -275,7 +276,11 @@ pub fn resolve_completion_edits(
     let _p = tracing::info_span!("resolve_completion_edits").entered();
     let sema = hir::Semantics::new(db);
 
-    let original_file = sema.parse(sema.attach_first_edition(file_id)?);
+    let editioned_file_id = sema.attach_first_edition(file_id)?;
+    let editioned_file_id =
+        ide_db::base_db::EditionedFileId::new(db.as_dyn_database(), editioned_file_id);
+
+    let original_file = sema.parse(editioned_file_id);
     let original_token =
         syntax::AstNode::syntax(&original_file).token_at_offset(offset).left_biased()?;
     let position_for_import = &original_token.parent()?;
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
index 0735be38cd9..0f0fa115af0 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
@@ -2042,8 +2042,8 @@ fn f() { A { bar: b$0 }; }
             expect![[r#"
                 fn bar() fn() -> u8 [type+name]
                 fn baz() fn() -> u8 [type]
-                ex bar()  [type]
                 ex baz()  [type]
+                ex bar()  [type]
                 st A A []
                 fn f() fn() []
             "#]],
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
index 9d91f95eb65..2984348a327 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
@@ -246,7 +246,7 @@ pub(crate) fn check_edit_with_config(
         .filter(|it| it.lookup() == what)
         .collect_tuple()
         .unwrap_or_else(|| panic!("can't find {what:?} completion in {completions:#?}"));
-    let mut actual = db.file_text(position.file_id).to_string();
+    let mut actual = db.file_text(position.file_id).text(&db).to_string();
 
     let mut combined_edit = completion.text_edit.clone();
 
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/raw_identifiers.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/raw_identifiers.rs
index 9ab66243b5c..823cc8c3d8c 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/raw_identifiers.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/raw_identifiers.rs
@@ -7,7 +7,7 @@ use crate::tests::{completion_list_with_config_raw, position, TEST_CONFIG};
 fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
     let completions = completion_list_with_config_raw(TEST_CONFIG, ra_fixture, true, None);
     let (db, position) = position(ra_fixture);
-    let mut actual = db.file_text(position.file_id).to_string();
+    let mut actual = db.file_text(position.file_id).text(&db).to_string();
     completions
         .into_iter()
         .exactly_one()
diff --git a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
index 641998c3dac..84855652338 100644
--- a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
@@ -15,6 +15,7 @@ rust-version.workspace = true
 cov-mark = "2.0.0-pre.1"
 crossbeam-channel.workspace = true
 tracing.workspace = true
+dashmap.workspace = true
 rayon.workspace = true
 fst = { version = "0.4.7", default-features = false }
 rustc-hash.workspace = true
@@ -23,6 +24,8 @@ itertools.workspace = true
 arrayvec.workspace = true
 indexmap.workspace = true
 memchr = "2.6.4"
+salsa.workspace = true
+query-group.workspace = true
 triomphe.workspace = true
 nohash-hasher.workspace = true
 bitflags.workspace = true
@@ -34,6 +37,7 @@ profile.workspace = true
 stdx.workspace = true
 syntax.workspace = true
 span.workspace = true
+vfs.workspace = true
 # ide should depend only on the top-level `hir` package. if you need
 # something from some `hir-xxx` subpackage, reexport the API via `hir`.
 hir.workspace = true
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
index 46ff4fbf9e9..d8b26567827 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs
@@ -1,14 +1,9 @@
 //! Applies changes to the IDE state transactionally.
 
-use base_db::{
-    ra_salsa::{
-        debug::{DebugQueryTable, TableEntry},
-        Database, Durability, Query, QueryTable,
-    },
-    SourceRootId,
-};
-use profile::{memory_usage, Bytes};
+use base_db::SourceRootId;
+use profile::Bytes;
 use rustc_hash::FxHashSet;
+use salsa::{Database as _, Durability};
 use triomphe::Arc;
 
 use crate::{symbol_index::SymbolsDatabase, ChangeWithProcMacros, RootDatabase};
@@ -52,23 +47,23 @@ impl RootDatabase {
     pub fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes, usize)> {
         let mut acc: Vec<(String, Bytes, usize)> = vec![];
 
-        fn collect_query_count<'q, Q>(table: &QueryTable<'q, Q>) -> usize
-        where
-            QueryTable<'q, Q>: DebugQueryTable,
-            Q: Query,
-            <Q as Query>::Storage: 'q,
-        {
-            struct EntryCounter(usize);
-            impl<K, V> FromIterator<TableEntry<K, V>> for EntryCounter {
-                fn from_iter<T>(iter: T) -> EntryCounter
-                where
-                    T: IntoIterator<Item = TableEntry<K, V>>,
-                {
-                    EntryCounter(iter.into_iter().count())
-                }
-            }
-            table.entries::<EntryCounter>().0
-        }
+        // fn collect_query_count<'q, Q>(table: &QueryTable<'q, Q>) -> usize
+        // where
+        //     QueryTable<'q, Q>: DebugQueryTable,
+        //     Q: Query,
+        //     <Q as Query>::Storage: 'q,
+        // {
+        //     struct EntryCounter(usize);
+        //     impl<K, V> FromIterator<TableEntry<K, V>> for EntryCounter {
+        //         fn from_iter<T>(iter: T) -> EntryCounter
+        //         where
+        //             T: IntoIterator<Item = TableEntry<K, V>>,
+        //         {
+        //             EntryCounter(iter.into_iter().count())
+        //         }
+        //     }
+        //     table.entries::<EntryCounter>().0
+        // }
 
         macro_rules! purge_each_query {
             ($($q:path)*) => {$(
@@ -83,170 +78,170 @@ impl RootDatabase {
             )*}
         }
         purge_each_query![
-            // SymbolsDatabase
-            crate::symbol_index::ModuleSymbolsQuery
-            crate::symbol_index::LibrarySymbolsQuery
-            crate::symbol_index::LocalRootsQuery
-            crate::symbol_index::LibraryRootsQuery
-            // HirDatabase
-            hir::db::AdtDatumQuery
-            hir::db::AdtVarianceQuery
-            hir::db::AssociatedTyDataQuery
-            hir::db::AssociatedTyValueQuery
-            hir::db::BorrowckQuery
-            hir::db::CallableItemSignatureQuery
-            hir::db::ConstEvalDiscriminantQuery
-            hir::db::ConstEvalQuery
-            hir::db::ConstEvalStaticQuery
-            hir::db::ConstParamTyQuery
-            hir::db::DynCompatibilityOfTraitQuery
-            hir::db::FieldTypesQuery
-            hir::db::FnDefDatumQuery
-            hir::db::FnDefVarianceQuery
-            hir::db::GenericDefaultsQuery
-            hir::db::GenericPredicatesForParamQuery
-            hir::db::GenericPredicatesQuery
-            hir::db::GenericPredicatesWithoutParentQuery
-            hir::db::ImplDatumQuery
-            hir::db::ImplSelfTyQuery
-            hir::db::ImplTraitQuery
-            hir::db::IncoherentInherentImplCratesQuery
-            hir::db::InferQuery
-            hir::db::InherentImplsInBlockQuery
-            hir::db::InherentImplsInCrateQuery
-            hir::db::InternCallableDefQuery
-            hir::db::InternClosureQuery
-            hir::db::InternCoroutineQuery
-            hir::db::InternImplTraitIdQuery
-            hir::db::InternLifetimeParamIdQuery
-            hir::db::InternTypeOrConstParamIdQuery
-            hir::db::LayoutOfAdtQuery
-            hir::db::LayoutOfTyQuery
-            hir::db::LookupImplMethodQuery
-            hir::db::MirBodyForClosureQuery
-            hir::db::MirBodyQuery
-            hir::db::MonomorphizedMirBodyForClosureQuery
-            hir::db::MonomorphizedMirBodyQuery
-            hir::db::ProgramClausesForChalkEnvQuery
-            hir::db::ReturnTypeImplTraitsQuery
-            hir::db::TargetDataLayoutQuery
-            hir::db::TraitDatumQuery
-            hir::db::TraitEnvironmentQuery
-            hir::db::TraitImplsInBlockQuery
-            hir::db::TraitImplsInCrateQuery
-            hir::db::TraitImplsInDepsQuery
-            hir::db::TraitSolveQuery
-            hir::db::TyQuery
-            hir::db::TypeAliasImplTraitsQuery
-            hir::db::ValueTyQuery
+            // // SymbolsDatabase
+            // crate::symbol_index::ModuleSymbolsQuery
+            // crate::symbol_index::LibrarySymbolsQuery
+            // crate::symbol_index::LocalRootsQuery
+            // crate::symbol_index::LibraryRootsQuery
+            // // HirDatabase
+            // hir::db::AdtDatumQuery
+            // hir::db::AdtVarianceQuery
+            // hir::db::AssociatedTyDataQuery
+            // hir::db::AssociatedTyValueQuery
+            // hir::db::BorrowckQuery
+            // hir::db::CallableItemSignatureQuery
+            // hir::db::ConstEvalDiscriminantQuery
+            // hir::db::ConstEvalQuery
+            // hir::db::ConstEvalStaticQuery
+            // hir::db::ConstParamTyQuery
+            // hir::db::DynCompatibilityOfTraitQuery
+            // hir::db::FieldTypesQuery
+            // hir::db::FnDefDatumQuery
+            // hir::db::FnDefVarianceQuery
+            // hir::db::GenericDefaultsQuery
+            // hir::db::GenericPredicatesForParamQuery
+            // hir::db::GenericPredicatesQuery
+            // hir::db::GenericPredicatesWithoutParentQuery
+            // hir::db::ImplDatumQuery
+            // hir::db::ImplSelfTyQuery
+            // hir::db::ImplTraitQuery
+            // hir::db::IncoherentInherentImplCratesQuery
+            // hir::db::InferQuery
+            // hir::db::InherentImplsInBlockQuery
+            // hir::db::InherentImplsInCrateQuery
+            // hir::db::InternCallableDefQuery
+            // hir::db::InternClosureQuery
+            // hir::db::InternCoroutineQuery
+            // hir::db::InternImplTraitIdQuery
+            // hir::db::InternLifetimeParamIdQuery
+            // hir::db::InternTypeOrConstParamIdQuery
+            // hir::db::LayoutOfAdtQuery
+            // hir::db::LayoutOfTyQuery
+            // hir::db::LookupImplMethodQuery
+            // hir::db::MirBodyForClosureQuery
+            // hir::db::MirBodyQuery
+            // hir::db::MonomorphizedMirBodyForClosureQuery
+            // hir::db::MonomorphizedMirBodyQuery
+            // hir::db::ProgramClausesForChalkEnvQuery
+            // hir::db::ReturnTypeImplTraitsQuery
+            // hir::db::TargetDataLayoutQuery
+            // hir::db::TraitDatumQuery
+            // hir::db::TraitEnvironmentQuery
+            // hir::db::TraitImplsInBlockQuery
+            // hir::db::TraitImplsInCrateQuery
+            // hir::db::TraitImplsInDepsQuery
+            // hir::db::TraitSolveQuery
+            // hir::db::TyQuery
+            // hir::db::TypeAliasImplTraitsQuery
+            // hir::db::ValueTyQuery
 
-            // DefDatabase
-            hir::db::AttrsQuery
-            hir::db::BlockDefMapQuery
-            hir::db::BlockItemTreeQuery
-            hir::db::BlockItemTreeWithSourceMapQuery
-            hir::db::BodyQuery
-            hir::db::BodyWithSourceMapQuery
-            hir::db::ConstDataQuery
-            hir::db::ConstVisibilityQuery
-            hir::db::CrateDefMapQuery
-            hir::db::CrateLangItemsQuery
-            hir::db::CrateNotableTraitsQuery
-            hir::db::CrateSupportsNoStdQuery
-            hir::db::EnumDataQuery
-            hir::db::EnumVariantDataWithDiagnosticsQuery
-            hir::db::ExpandProcAttrMacrosQuery
-            hir::db::ExprScopesQuery
-            hir::db::ExternCrateDeclDataQuery
-            hir::db::FieldVisibilitiesQuery
-            hir::db::FieldsAttrsQuery
-            hir::db::FieldsAttrsSourceMapQuery
-            hir::db::FileItemTreeQuery
-            hir::db::FileItemTreeWithSourceMapQuery
-            hir::db::FunctionDataQuery
-            hir::db::FunctionVisibilityQuery
-            hir::db::GenericParamsQuery
-            hir::db::GenericParamsWithSourceMapQuery
-            hir::db::ImplDataWithDiagnosticsQuery
-            hir::db::ImportMapQuery
-            hir::db::IncludeMacroInvocQuery
-            hir::db::InternAnonymousConstQuery
-            hir::db::InternBlockQuery
-            hir::db::InternConstQuery
-            hir::db::InternEnumQuery
-            hir::db::InternExternBlockQuery
-            hir::db::InternExternCrateQuery
-            hir::db::InternFunctionQuery
-            hir::db::InternImplQuery
-            hir::db::InternInTypeConstQuery
-            hir::db::InternMacro2Query
-            hir::db::InternMacroRulesQuery
-            hir::db::InternProcMacroQuery
-            hir::db::InternStaticQuery
-            hir::db::InternStructQuery
-            hir::db::InternTraitAliasQuery
-            hir::db::InternTraitQuery
-            hir::db::InternTypeAliasQuery
-            hir::db::InternUnionQuery
-            hir::db::InternUseQuery
-            hir::db::LangItemQuery
-            hir::db::Macro2DataQuery
-            hir::db::MacroDefQuery
-            hir::db::MacroRulesDataQuery
-            hir::db::NotableTraitsInDepsQuery
-            hir::db::ProcMacroDataQuery
-            hir::db::StaticDataQuery
-            hir::db::StructDataWithDiagnosticsQuery
-            hir::db::TraitAliasDataQuery
-            hir::db::TraitDataWithDiagnosticsQuery
-            hir::db::TypeAliasDataQuery
-            hir::db::UnionDataWithDiagnosticsQuery
+            // // DefDatabase
+            // hir::db::AttrsQuery
+            // hir::db::BlockDefMapQuery
+            // hir::db::BlockItemTreeQuery
+            // hir::db::BlockItemTreeWithSourceMapQuery
+            // hir::db::BodyQuery
+            // hir::db::BodyWithSourceMapQuery
+            // hir::db::ConstDataQuery
+            // hir::db::ConstVisibilityQuery
+            // hir::db::CrateDefMapQuery
+            // hir::db::CrateLangItemsQuery
+            // hir::db::CrateNotableTraitsQuery
+            // hir::db::CrateSupportsNoStdQuery
+            // hir::db::EnumDataQuery
+            // hir::db::EnumVariantDataWithDiagnosticsQuery
+            // hir::db::ExpandProcAttrMacrosQuery
+            // hir::db::ExprScopesQuery
+            // hir::db::ExternCrateDeclDataQuery
+            // hir::db::FieldVisibilitiesQuery
+            // hir::db::FieldsAttrsQuery
+            // hir::db::FieldsAttrsSourceMapQuery
+            // hir::db::FileItemTreeQuery
+            // hir::db::FileItemTreeWithSourceMapQuery
+            // hir::db::FunctionDataQuery
+            // hir::db::FunctionVisibilityQuery
+            // hir::db::GenericParamsQuery
+            // hir::db::GenericParamsWithSourceMapQuery
+            // hir::db::ImplDataWithDiagnosticsQuery
+            // hir::db::ImportMapQuery
+            // hir::db::IncludeMacroInvocQuery
+            // hir::db::InternAnonymousConstQuery
+            // hir::db::InternBlockQuery
+            // hir::db::InternConstQuery
+            // hir::db::InternEnumQuery
+            // hir::db::InternExternBlockQuery
+            // hir::db::InternExternCrateQuery
+            // hir::db::InternFunctionQuery
+            // hir::db::InternImplQuery
+            // hir::db::InternInTypeConstQuery
+            // hir::db::InternMacro2Query
+            // hir::db::InternMacroRulesQuery
+            // hir::db::InternProcMacroQuery
+            // hir::db::InternStaticQuery
+            // hir::db::InternStructQuery
+            // hir::db::InternTraitAliasQuery
+            // hir::db::InternTraitQuery
+            // hir::db::InternTypeAliasQuery
+            // hir::db::InternUnionQuery
+            // hir::db::InternUseQuery
+            // hir::db::LangItemQuery
+            // hir::db::Macro2DataQuery
+            // hir::db::MacroDefQuery
+            // hir::db::MacroRulesDataQuery
+            // hir::db::NotableTraitsInDepsQuery
+            // hir::db::ProcMacroDataQuery
+            // hir::db::StaticDataQuery
+            // hir::db::StructDataWithDiagnosticsQuery
+            // hir::db::TraitAliasDataQuery
+            // hir::db::TraitDataWithDiagnosticsQuery
+            // hir::db::TypeAliasDataQuery
+            // hir::db::UnionDataWithDiagnosticsQuery
 
-            // InternDatabase
-            hir::db::InternFunctionQuery
-            hir::db::InternStructQuery
-            hir::db::InternUnionQuery
-            hir::db::InternEnumQuery
-            hir::db::InternConstQuery
-            hir::db::InternStaticQuery
-            hir::db::InternTraitQuery
-            hir::db::InternTraitAliasQuery
-            hir::db::InternTypeAliasQuery
-            hir::db::InternImplQuery
-            hir::db::InternExternBlockQuery
-            hir::db::InternBlockQuery
-            hir::db::InternMacro2Query
-            hir::db::InternProcMacroQuery
-            hir::db::InternMacroRulesQuery
+            // // InternDatabase
+            // hir::db::InternFunctionQuery
+            // hir::db::InternStructQuery
+            // hir::db::InternUnionQuery
+            // hir::db::InternEnumQuery
+            // hir::db::InternConstQuery
+            // hir::db::InternStaticQuery
+            // hir::db::InternTraitQuery
+            // hir::db::InternTraitAliasQuery
+            // hir::db::InternTypeAliasQuery
+            // hir::db::InternImplQuery
+            // hir::db::InternExternBlockQuery
+            // hir::db::InternBlockQuery
+            // hir::db::InternMacro2Query
+            // hir::db::InternProcMacroQuery
+            // hir::db::InternMacroRulesQuery
 
-            // ExpandDatabase
-            hir::db::AstIdMapQuery
-            hir::db::DeclMacroExpanderQuery
-            hir::db::ExpandProcMacroQuery
-            hir::db::InternMacroCallQuery
-            hir::db::InternSyntaxContextQuery
-            hir::db::MacroArgQuery
-            hir::db::ParseMacroExpansionErrorQuery
-            hir::db::ParseMacroExpansionQuery
-            hir::db::ProcMacroSpanQuery
-            hir::db::ProcMacrosQuery
-            hir::db::RealSpanMapQuery
+            // // ExpandDatabase
+            // hir::db::AstIdMapQuery
+            // hir::db::DeclMacroExpanderQuery
+            // hir::db::ExpandProcMacroQuery
+            // hir::db::InternMacroCallQuery
+            // hir::db::InternSyntaxContextQuery
+            // hir::db::MacroArgQuery
+            // hir::db::ParseMacroExpansionErrorQuery
+            // hir::db::ParseMacroExpansionQuery
+            // hir::db::ProcMacroSpanQuery
+            // hir::db::ProcMacrosQuery
+            // hir::db::RealSpanMapQuery
 
-            // LineIndexDatabase
-            crate::LineIndexQuery
+            // // LineIndexDatabase
+            // crate::LineIndexQuery
 
-            // SourceDatabase
-            base_db::ParseQuery
-            base_db::ParseErrorsQuery
-            base_db::CrateGraphQuery
-            base_db::CrateWorkspaceDataQuery
+            // // SourceDatabase
+            // base_db::ParseQuery
+            // base_db::ParseErrorsQuery
+            // base_db::CrateGraphQuery
+            // base_db::CrateWorkspaceDataQuery
 
-            // SourceDatabaseExt
-            base_db::FileTextQuery
-            base_db::CompressedFileTextQuery
-            base_db::FileSourceRootQuery
-            base_db::SourceRootQuery
-            base_db::SourceRootCratesQuery
+            // // SourceDatabaseExt
+            // base_db::FileTextQuery
+            // base_db::CompressedFileTextQuery
+            // base_db::FileSourceRootQuery
+            // base_db::SourceRootQuery
+            // base_db::SourceRootCratesQuery
         ];
 
         acc.sort_by_key(|it| std::cmp::Reverse(it.1));
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs
index 2f4d07446f2..af4c10f8ea6 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/famous_defs.rs
@@ -1,6 +1,6 @@
 //! See [`FamousDefs`].
 
-use base_db::{CrateOrigin, LangCrateOrigin, SourceDatabase};
+use base_db::{CrateOrigin, LangCrateOrigin, RootQueryDb as _};
 use hir::{Crate, Enum, Function, Macro, Module, ScopeDef, Semantics, Trait};
 
 use crate::RootDatabase;
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs
index 84fa58d743b..ebafc8876f2 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs
@@ -2,7 +2,7 @@
 
 use std::collections::VecDeque;
 
-use base_db::SourceRootDatabase;
+use base_db::SourceDatabase;
 use hir::{Crate, ItemInNs, ModuleDef, Name, Semantics};
 use span::{Edition, FileId};
 use syntax::{
@@ -108,8 +108,8 @@ pub fn lint_eq_or_in_group(lint: &str, lint_is: &str) -> bool {
 
 pub fn is_editable_crate(krate: Crate, db: &RootDatabase) -> bool {
     let root_file = krate.root_file(db);
-    let source_root_id = db.file_source_root(root_file);
-    !db.source_root(source_root_id).is_library
+    let source_root_id = db.file_source_root(root_file).source_root_id(db);
+    !db.source_root(source_root_id).source_root(db).is_library
 }
 
 // FIXME: This is a weird function
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs
index decb0ea9d8a..39810c615bb 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use/tests.rs
@@ -1,3 +1,4 @@
+use salsa::AsDynDatabase;
 use stdx::trim_indent;
 use test_fixture::WithFixture;
 use test_utils::{assert_eq_text, CURSOR_MARKER};
@@ -1250,9 +1251,15 @@ fn check_with_config(
 ) {
     let (db, file_id, pos) = if ra_fixture_before.contains(CURSOR_MARKER) {
         let (db, file_id, range_or_offset) = RootDatabase::with_range_or_offset(ra_fixture_before);
+
+        let file_id = crate::base_db::EditionedFileId::new(db.as_dyn_database(), file_id);
+
         (db, file_id, Some(range_or_offset))
     } else {
         let (db, file_id) = RootDatabase::with_single_file(ra_fixture_before);
+
+        let file_id = crate::base_db::EditionedFileId::new(db.as_dyn_database(), file_id);
+
         (db, file_id, None)
     };
     let sema = &Semantics::new(&db);
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
index 96115eee6dc..2516a9d0aa3 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs
@@ -46,13 +46,14 @@ pub mod syntax_helpers {
 }
 
 pub use hir::ChangeWithProcMacros;
+use salsa::Durability;
 
 use std::{fmt, mem::ManuallyDrop};
 
 use base_db::{
-    ra_salsa::{self, Durability},
-    AnchoredPath, CrateId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
-    DEFAULT_FILE_TEXT_LRU_CAP,
+    query_group::{self},
+    FileSourceRootInput, FileText, Files, RootQueryDb, SourceDatabase, SourceRoot, SourceRootId,
+    SourceRootInput, Upcast,
 };
 use hir::{
     db::{DefDatabase, ExpandDatabase, HirDatabase},
@@ -76,22 +77,21 @@ pub type FxIndexMap<K, V> =
 pub type FilePosition = FilePositionWrapper<FileId>;
 pub type FileRange = FileRangeWrapper<FileId>;
 
-#[ra_salsa::database(
-    base_db::SourceRootDatabaseStorage,
-    base_db::SourceDatabaseStorage,
-    hir::db::ExpandDatabaseStorage,
-    hir::db::DefDatabaseStorage,
-    hir::db::HirDatabaseStorage,
-    hir::db::InternDatabaseStorage,
-    LineIndexDatabaseStorage,
-    symbol_index::SymbolsDatabaseStorage
-)]
+#[salsa::db]
 pub struct RootDatabase {
     // We use `ManuallyDrop` here because every codegen unit that contains a
     // `&RootDatabase -> &dyn OtherDatabase` cast will instantiate its drop glue in the vtable,
     // which duplicates `Weak::drop` and `Arc::drop` tens of thousands of times, which makes
     // compile times of all `ide_*` and downstream crates suffer greatly.
-    storage: ManuallyDrop<ra_salsa::Storage<RootDatabase>>,
+    storage: ManuallyDrop<salsa::Storage<Self>>,
+    files: Arc<Files>,
+}
+
+impl std::panic::RefUnwindSafe for RootDatabase {}
+
+#[salsa::db]
+impl salsa::Database for RootDatabase {
+    fn salsa_event(&self, _event: &dyn Fn() -> salsa::Event) {}
 }
 
 impl Drop for RootDatabase {
@@ -100,6 +100,12 @@ impl Drop for RootDatabase {
     }
 }
 
+impl Clone for RootDatabase {
+    fn clone(&self) -> Self {
+        Self { storage: self.storage.clone(), files: self.files.clone() }
+    }
+}
+
 impl fmt::Debug for RootDatabase {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         f.debug_struct("RootDatabase").finish()
@@ -127,16 +133,68 @@ impl Upcast<dyn HirDatabase> for RootDatabase {
     }
 }
 
-impl FileLoader for RootDatabase {
-    fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
-        FileLoaderDelegate(self).resolve_path(path)
+impl Upcast<dyn RootQueryDb> for RootDatabase {
+    fn upcast(&self) -> &(dyn RootQueryDb + 'static) {
+        self
     }
-    fn relevant_crates(&self, file_id: FileId) -> Arc<[CrateId]> {
-        FileLoaderDelegate(self).relevant_crates(file_id)
+}
+
+impl Upcast<dyn SourceDatabase> for RootDatabase {
+    fn upcast(&self) -> &(dyn SourceDatabase + 'static) {
+        self
     }
 }
 
-impl ra_salsa::Database for RootDatabase {}
+#[salsa::db]
+impl SourceDatabase for RootDatabase {
+    fn file_text(&self, file_id: vfs::FileId) -> FileText {
+        self.files.file_text(file_id)
+    }
+
+    fn set_file_text(&mut self, file_id: vfs::FileId, text: &str) {
+        let files = Arc::clone(&self.files);
+        files.set_file_text(self, file_id, text);
+    }
+
+    fn set_file_text_with_durability(
+        &mut self,
+        file_id: vfs::FileId,
+        text: &str,
+        durability: Durability,
+    ) {
+        let files = Arc::clone(&self.files);
+        files.set_file_text_with_durability(self, file_id, text, durability);
+    }
+
+    /// Source root of the file.
+    fn source_root(&self, source_root_id: SourceRootId) -> SourceRootInput {
+        self.files.source_root(source_root_id)
+    }
+
+    fn set_source_root_with_durability(
+        &mut self,
+        source_root_id: SourceRootId,
+        source_root: Arc<SourceRoot>,
+        durability: Durability,
+    ) {
+        let files = Arc::clone(&self.files);
+        files.set_source_root_with_durability(self, source_root_id, source_root, durability);
+    }
+
+    fn file_source_root(&self, id: vfs::FileId) -> FileSourceRootInput {
+        self.files.file_source_root(id)
+    }
+
+    fn set_file_source_root_with_durability(
+        &mut self,
+        id: vfs::FileId,
+        source_root_id: SourceRootId,
+        durability: Durability,
+    ) {
+        let files = Arc::clone(&self.files);
+        files.set_file_source_root_with_durability(self, id, source_root_id, durability);
+    }
+}
 
 impl Default for RootDatabase {
     fn default() -> RootDatabase {
@@ -146,14 +204,16 @@ impl Default for RootDatabase {
 
 impl RootDatabase {
     pub fn new(lru_capacity: Option<u16>) -> RootDatabase {
-        let mut db = RootDatabase { storage: ManuallyDrop::new(ra_salsa::Storage::default()) };
+        let mut db = RootDatabase {
+            storage: ManuallyDrop::new(salsa::Storage::default()),
+            files: Default::default(),
+        };
         db.set_crate_graph_with_durability(Default::default(), Durability::HIGH);
         db.set_proc_macros_with_durability(Default::default(), Durability::HIGH);
         db.set_local_roots_with_durability(Default::default(), Durability::HIGH);
         db.set_library_roots_with_durability(Default::default(), Durability::HIGH);
         db.set_expand_proc_attr_macros_with_durability(false, Durability::HIGH);
         db.update_base_query_lru_capacities(lru_capacity);
-        db.setup_syntax_context_root();
         db
     }
 
@@ -161,57 +221,54 @@ impl RootDatabase {
         self.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
     }
 
-    pub fn update_base_query_lru_capacities(&mut self, lru_capacity: Option<u16>) {
-        let lru_capacity = lru_capacity.unwrap_or(base_db::DEFAULT_PARSE_LRU_CAP);
-        base_db::FileTextQuery.in_db_mut(self).set_lru_capacity(DEFAULT_FILE_TEXT_LRU_CAP);
-        base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity);
-        // macro expansions are usually rather small, so we can afford to keep more of them alive
-        hir::db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity);
-        hir::db::BorrowckQuery.in_db_mut(self).set_lru_capacity(base_db::DEFAULT_BORROWCK_LRU_CAP);
-        hir::db::BodyWithSourceMapQuery.in_db_mut(self).set_lru_capacity(2048);
+    pub fn update_base_query_lru_capacities(&mut self, _lru_capacity: Option<u16>) {
+        // let lru_capacity = lru_capacity.unwrap_or(base_db::DEFAULT_PARSE_LRU_CAP);
+        // base_db::FileTextQuery.in_db_mut(self).set_lru_capacity(DEFAULT_FILE_TEXT_LRU_CAP);
+        // base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity);
+        // // macro expansions are usually rather small, so we can afford to keep more of them alive
+        // hir::db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity);
+        // hir::db::BorrowckQuery.in_db_mut(self).set_lru_capacity(base_db::DEFAULT_BORROWCK_LRU_CAP);
+        // hir::db::BodyWithSourceMapQuery.in_db_mut(self).set_lru_capacity(2048);
     }
 
-    pub fn update_lru_capacities(&mut self, lru_capacities: &FxHashMap<Box<str>, u16>) {
-        use hir::db as hir_db;
-
-        base_db::FileTextQuery.in_db_mut(self).set_lru_capacity(DEFAULT_FILE_TEXT_LRU_CAP);
-        base_db::ParseQuery.in_db_mut(self).set_lru_capacity(
-            lru_capacities
-                .get(stringify!(ParseQuery))
-                .copied()
-                .unwrap_or(base_db::DEFAULT_PARSE_LRU_CAP),
-        );
-        hir_db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(
-            lru_capacities
-                .get(stringify!(ParseMacroExpansionQuery))
-                .copied()
-                .unwrap_or(4 * base_db::DEFAULT_PARSE_LRU_CAP),
-        );
-        hir_db::BorrowckQuery.in_db_mut(self).set_lru_capacity(
-            lru_capacities
-                .get(stringify!(BorrowckQuery))
-                .copied()
-                .unwrap_or(base_db::DEFAULT_BORROWCK_LRU_CAP),
-        );
-        hir::db::BodyWithSourceMapQuery.in_db_mut(self).set_lru_capacity(2048);
+    pub fn update_lru_capacities(&mut self, _lru_capacities: &FxHashMap<Box<str>, u16>) {
+        // FIXME(salsa-transition): bring this back; allow changing LRU settings at runtime.
+        // use hir::db as hir_db;
+
+        // base_db::FileTextQuery.in_db_mut(self).set_lru_capacity(DEFAULT_FILE_TEXT_LRU_CAP);
+        // base_db::ParseQuery.in_db_mut(self).set_lru_capacity(
+        //     lru_capacities
+        //         .get(stringify!(ParseQuery))
+        //         .copied()
+        //         .unwrap_or(base_db::DEFAULT_PARSE_LRU_CAP),
+        // );
+        // hir_db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(
+        //     lru_capacities
+        //         .get(stringify!(ParseMacroExpansionQuery))
+        //         .copied()
+        //         .unwrap_or(4 * base_db::DEFAULT_PARSE_LRU_CAP),
+        // );
+        // hir_db::BorrowckQuery.in_db_mut(self).set_lru_capacity(
+        //     lru_capacities
+        //         .get(stringify!(BorrowckQuery))
+        //         .copied()
+        //         .unwrap_or(base_db::DEFAULT_BORROWCK_LRU_CAP),
+        // );
+        // hir::db::BodyWithSourceMapQuery.in_db_mut(self).set_lru_capacity(2048);
     }
-}
 
-impl ra_salsa::ParallelDatabase for RootDatabase {
-    fn snapshot(&self) -> ra_salsa::Snapshot<RootDatabase> {
-        ra_salsa::Snapshot::new(RootDatabase {
-            storage: ManuallyDrop::new(self.storage.snapshot()),
-        })
+    pub fn snapshot(&self) -> Self {
+        Self { storage: self.storage.clone(), files: self.files.clone() }
     }
 }
 
-#[ra_salsa::query_group(LineIndexDatabaseStorage)]
-pub trait LineIndexDatabase: base_db::SourceDatabase {
+#[query_group::query_group]
+pub trait LineIndexDatabase: base_db::RootQueryDb {
     fn line_index(&self, file_id: FileId) -> Arc<LineIndex>;
 }
 
 fn line_index(db: &dyn LineIndexDatabase, file_id: FileId) -> Arc<LineIndex> {
-    let text = db.file_text(file_id);
+    let text = db.file_text(file_id).text(db);
     Arc::new(LineIndex::new(&text))
 }
 
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs b/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs
index 22dc3d9e29d..74d79cd6955 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs
@@ -8,12 +8,10 @@ use std::time::Duration;
 
 use hir::{db::DefDatabase, Symbol};
 use itertools::Itertools;
+use salsa::{Cancelled, Database};
 
 use crate::{
-    base_db::{
-        ra_salsa::{Database, ParallelDatabase, Snapshot},
-        Cancelled, CrateId, SourceDatabase,
-    },
+    base_db::{CrateId, RootQueryDb},
     symbol_index::SymbolsDatabase,
     FxIndexMap, RootDatabase,
 };
@@ -66,7 +64,7 @@ pub fn parallel_prime_caches(
     let (work_sender, progress_receiver) = {
         let (progress_sender, progress_receiver) = crossbeam_channel::unbounded();
         let (work_sender, work_receiver) = crossbeam_channel::unbounded();
-        let prime_caches_worker = move |db: Snapshot<RootDatabase>| {
+        let prime_caches_worker = move |db: RootDatabase| {
             while let Ok((crate_id, crate_name, kind)) = work_receiver.recv() {
                 progress_sender
                     .send(ParallelPrimeCacheWorkerProgress::BeginCrate { crate_id, crate_name })?;
@@ -90,7 +88,7 @@ pub fn parallel_prime_caches(
             stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker)
                 .allow_leak(true)
                 .name(format!("PrimeCaches#{id}"))
-                .spawn(move || Cancelled::catch(|| worker(db)))
+                .spawn(move || Cancelled::catch(|| worker(db.snapshot())))
                 .expect("failed to spawn thread");
         }
 
@@ -108,7 +106,7 @@ pub fn parallel_prime_caches(
     let mut additional_phases = vec![];
 
     while crates_done < crates_total {
-        db.unwind_if_cancelled();
+        db.unwind_if_revision_cancelled();
 
         for crate_id in &mut crates_to_prime {
             let krate = &graph[crate_id];
@@ -145,7 +143,7 @@ pub fn parallel_prime_caches(
             }
             Err(crossbeam_channel::RecvTimeoutError::Disconnected) => {
                 // our workers may have died from a cancelled task, so we'll check and re-raise here.
-                db.unwind_if_cancelled();
+                db.unwind_if_revision_cancelled();
                 break;
             }
         };
@@ -177,7 +175,7 @@ pub fn parallel_prime_caches(
     }
 
     while crates_done < crates_total {
-        db.unwind_if_cancelled();
+        db.unwind_if_revision_cancelled();
 
         // recv_timeout is somewhat a hack, we need a way to from this thread check to see if the current salsa revision
         // is cancelled on a regular basis. workers will only exit if they are processing a task that is cancelled, or
@@ -189,7 +187,7 @@ pub fn parallel_prime_caches(
             }
             Err(crossbeam_channel::RecvTimeoutError::Disconnected) => {
                 // our workers may have died from a cancelled task, so we'll check and re-raise here.
-                db.unwind_if_cancelled();
+                db.unwind_if_revision_cancelled();
                 break;
             }
         };
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
index 02cd8b8bdf5..81df0c0f0f8 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
@@ -7,7 +7,7 @@
 use std::mem;
 use std::{cell::LazyCell, cmp::Reverse};
 
-use base_db::{ra_salsa::Database, SourceDatabase, SourceRootDatabase};
+use base_db::{RootQueryDb, SourceDatabase};
 use either::Either;
 use hir::{
     sym, Adt, AsAssocItem, DefWithBody, FileRange, FileRangeWrapper, HasAttrs, HasContainer,
@@ -17,6 +17,7 @@ use hir::{
 use memchr::memmem::Finder;
 use parser::SyntaxKind;
 use rustc_hash::{FxHashMap, FxHashSet};
+use salsa::Database;
 use span::EditionedFileId;
 use syntax::{
     ast::{self, HasName, Rename},
@@ -164,8 +165,8 @@ impl SearchScope {
         let graph = db.crate_graph();
         for krate in graph.iter() {
             let root_file = graph[krate].root_file_id;
-            let source_root_id = db.file_source_root(root_file);
-            let source_root = db.source_root(source_root_id);
+            let source_root = db.file_source_root(root_file).source_root_id(db);
+            let source_root = db.source_root(source_root).source_root(db);
             entries.extend(
                 source_root.iter().map(|id| (EditionedFileId::new(id, graph[krate].edition), None)),
             );
@@ -178,8 +179,9 @@ impl SearchScope {
         let mut entries = FxHashMap::default();
         for rev_dep in of.transitive_reverse_dependencies(db) {
             let root_file = rev_dep.root_file(db);
-            let source_root_id = db.file_source_root(root_file);
-            let source_root = db.source_root(source_root_id);
+
+            let source_root = db.file_source_root(root_file).source_root_id(db);
+            let source_root = db.source_root(source_root).source_root(db);
             entries.extend(
                 source_root.iter().map(|id| (EditionedFileId::new(id, rev_dep.edition(db)), None)),
             );
@@ -190,8 +192,9 @@ impl SearchScope {
     /// Build a search scope spanning the given crate.
     fn krate(db: &RootDatabase, of: hir::Crate) -> SearchScope {
         let root_file = of.root_file(db);
-        let source_root_id = db.file_source_root(root_file);
-        let source_root = db.source_root(source_root_id);
+
+        let source_root_id = db.file_source_root(root_file).source_root_id(db);
+        let source_root = db.source_root(source_root_id).source_root(db);
         SearchScope {
             entries: source_root
                 .iter()
@@ -483,7 +486,7 @@ impl<'a> FindUsages<'a> {
         scope: &'b SearchScope,
     ) -> impl Iterator<Item = (Arc<str>, EditionedFileId, TextRange)> + 'b {
         scope.entries.iter().map(|(&file_id, &search_range)| {
-            let text = db.file_text(file_id.file_id());
+            let text = db.file_text(file_id.file_id()).text(db);
             let search_range =
                 search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&*text)));
 
@@ -649,6 +652,8 @@ impl<'a> FindUsages<'a> {
                 for (file_text, file_id, search_range) in
                     FindUsages::scope_files(db, &current_to_process_search_scope)
                 {
+                    let file_id = crate::base_db::EditionedFileId::new(db, file_id);
+
                     let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone());
 
                     for offset in FindUsages::match_indices(&file_text, &finder, search_range) {
@@ -808,7 +813,9 @@ impl<'a> FindUsages<'a> {
             sink: &mut dyn FnMut(EditionedFileId, FileReference) -> bool,
         ) {
             for (file_text, file_id, search_range) in files {
-                let tree = LazyCell::new(move || this.sema.parse(file_id).syntax().clone());
+                let file_id_wrapper = crate::base_db::EditionedFileId::new(this.sema.db, file_id);
+
+                let tree = LazyCell::new(move || this.sema.parse(file_id_wrapper).syntax().clone());
 
                 for offset in FindUsages::match_indices(&file_text, finder, search_range) {
                     let usages = FindUsages::find_nodes(this.sema, name, &tree, offset)
@@ -853,7 +860,7 @@ impl<'a> FindUsages<'a> {
                 name,
                 is_possibly_self.into_iter().map(|position| {
                     (
-                        self.sema.db.file_text(position.file_id.file_id()),
+                        self.sema.db.file_text(position.file_id.file_id()).text(self.sema.db),
                         position.file_id,
                         position.range,
                     )
@@ -947,8 +954,9 @@ impl<'a> FindUsages<'a> {
         let include_self_kw_refs =
             self.include_self_kw_refs.as_ref().map(|ty| (ty, Finder::new("Self")));
         for (text, file_id, search_range) in Self::scope_files(sema.db, &search_scope) {
-            self.sema.db.unwind_if_cancelled();
-            let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone());
+            let file_id_wrapper = crate::base_db::EditionedFileId::new(sema.db, file_id);
+
+            let tree = LazyCell::new(move || sema.parse(file_id_wrapper).syntax().clone());
 
             // Search for occurrences of the items name
             for offset in Self::match_indices(&text, finder, search_range) {
@@ -1001,8 +1009,10 @@ impl<'a> FindUsages<'a> {
             let finder = &Finder::new("super");
 
             for (text, file_id, search_range) in Self::scope_files(sema.db, &scope) {
-                self.sema.db.unwind_if_cancelled();
-                let tree = LazyCell::new(move || sema.parse(file_id).syntax().clone());
+                self.sema.db.unwind_if_revision_cancelled();
+
+                let file_id_wrapper = crate::base_db::EditionedFileId::new(sema.db, file_id);
+                let tree = LazyCell::new(move || sema.parse(file_id_wrapper).syntax().clone());
 
                 for offset in Self::match_indices(&text, finder, search_range) {
                     for name_ref in Self::find_nodes(sema, "super", &tree, offset)
@@ -1050,10 +1060,13 @@ impl<'a> FindUsages<'a> {
                     return;
                 };
 
-                let text = sema.db.file_text(file_id.file_id());
+                let file_text = sema.db.file_text(file_id.file_id());
+                let text = file_text.text(sema.db);
                 let search_range =
                     search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(&*text)));
 
+                let file_id = crate::base_db::EditionedFileId::new(sema.db, file_id);
+
                 let tree = LazyCell::new(|| sema.parse(file_id).syntax().clone());
                 let finder = &Finder::new("self");
 
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
index 2737436993d..5fea97b32db 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
@@ -27,10 +27,7 @@ use std::{
     ops::ControlFlow,
 };
 
-use base_db::{
-    ra_salsa::{self, ParallelDatabase},
-    SourceRootDatabase, SourceRootId, Upcast,
-};
+use base_db::{RootQueryDb, SourceDatabase, SourceRootId, Upcast};
 use fst::{raw::IndexedValue, Automaton, Streamer};
 use hir::{
     db::HirDatabase,
@@ -99,8 +96,8 @@ impl Query {
     }
 }
 
-#[ra_salsa::query_group(SymbolsDatabaseStorage)]
-pub trait SymbolsDatabase: HirDatabase + SourceRootDatabase + Upcast<dyn HirDatabase> {
+#[query_group::query_group]
+pub trait SymbolsDatabase: HirDatabase + SourceDatabase + Upcast<dyn HirDatabase> {
     /// The symbol index for a given module. These modules should only be in source roots that
     /// are inside local_roots.
     fn module_symbols(&self, module: Module) -> Arc<SymbolIndex>;
@@ -108,18 +105,18 @@ pub trait SymbolsDatabase: HirDatabase + SourceRootDatabase + Upcast<dyn HirData
     /// The symbol index for a given source root within library_roots.
     fn library_symbols(&self, source_root_id: SourceRootId) -> Arc<SymbolIndex>;
 
-    #[ra_salsa::transparent]
+    #[salsa::transparent]
     /// The symbol indices of modules that make up a given crate.
     fn crate_symbols(&self, krate: Crate) -> Box<[Arc<SymbolIndex>]>;
 
     /// The set of "local" (that is, from the current workspace) roots.
     /// Files in local roots are assumed to change frequently.
-    #[ra_salsa::input]
+    #[salsa::input]
     fn local_roots(&self) -> Arc<FxHashSet<SourceRootId>>;
 
     /// The set of roots for crates.io libraries.
     /// Files in libraries are assumed to never change.
-    #[ra_salsa::input]
+    #[salsa::input]
     fn library_roots(&self) -> Arc<FxHashSet<SourceRootId>>;
 }
 
@@ -150,26 +147,6 @@ pub fn crate_symbols(db: &dyn SymbolsDatabase, krate: Crate) -> Box<[Arc<SymbolI
     krate.modules(db.upcast()).into_iter().map(|module| db.module_symbols(module)).collect()
 }
 
-/// Need to wrap Snapshot to provide `Clone` impl for `map_with`
-struct Snap<DB>(DB);
-impl<DB: ParallelDatabase> Snap<ra_salsa::Snapshot<DB>> {
-    fn new(db: &DB) -> Self {
-        Self(db.snapshot())
-    }
-}
-impl<DB: ParallelDatabase> Clone for Snap<ra_salsa::Snapshot<DB>> {
-    fn clone(&self) -> Snap<ra_salsa::Snapshot<DB>> {
-        Snap(self.0.snapshot())
-    }
-}
-impl<DB> std::ops::Deref for Snap<DB> {
-    type Target = DB;
-
-    fn deref(&self) -> &Self::Target {
-        &self.0
-    }
-}
-
 // Feature: Workspace Symbol
 //
 // Uses fuzzy-search to find types, modules and functions by name across your
@@ -201,7 +178,7 @@ pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec<FileSymbol> {
     let indices: Vec<_> = if query.libs {
         db.library_roots()
             .par_iter()
-            .map_with(Snap::new(db), |snap, &root| snap.library_symbols(root))
+            .map_with(db.clone(), |snap, &root| snap.library_symbols(root))
             .collect()
     } else {
         let mut crates = Vec::new();
@@ -211,7 +188,7 @@ pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec<FileSymbol> {
         }
         let indices: Vec<_> = crates
             .into_par_iter()
-            .map_with(Snap::new(db), |snap, krate| snap.crate_symbols(krate.into()))
+            .map_with(db.clone(), |snap, krate| snap.crate_symbols(krate.into()))
             .collect();
         indices.iter().flat_map(|indices| indices.iter().cloned()).collect()
     };
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/suggest_name.rs b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/suggest_name.rs
index e085bf15cb9..21dd098781a 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/suggest_name.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/syntax_helpers/suggest_name.rs
@@ -457,9 +457,11 @@ mod tests {
     fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expected: &str) {
         let (db, file_id, range_or_offset) = RootDatabase::with_range_or_offset(ra_fixture);
         let frange = FileRange { file_id, range: range_or_offset.into() };
-
         let sema = Semantics::new(&db);
-        let source_file = sema.parse(frange.file_id);
+
+        let file_id = crate::base_db::EditionedFileId::new(sema.db, frange.file_id);
+        let source_file = sema.parse(file_id);
+
         let element = source_file.syntax().covering_element(frange.range);
         let expr =
             element.ancestors().find_map(ast::Expr::cast).expect("selection is not an expression");
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt
index efcf53ded64..0cae7f367c2 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt
+++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_doc_alias.txt
@@ -14,7 +14,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                1,
+                                4401,
                             ),
                         },
                     ),
@@ -47,7 +47,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                0,
+                                4400,
                             ),
                         },
                     ),
@@ -80,7 +80,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                0,
+                                4400,
                             ),
                         },
                     ),
@@ -113,7 +113,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                0,
+                                4400,
                             ),
                         },
                     ),
@@ -146,7 +146,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                0,
+                                4400,
                             ),
                         },
                     ),
@@ -179,7 +179,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                1,
+                                4401,
                             ),
                         },
                     ),
@@ -212,7 +212,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                0,
+                                4400,
                             ),
                         },
                     ),
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt
index 7dce95592b8..48de1fb837a 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt
+++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt
@@ -13,7 +13,7 @@
                 def: TypeAlias(
                     TypeAlias {
                         id: TypeAliasId(
-                            0,
+                            8400,
                         ),
                     },
                 ),
@@ -44,7 +44,7 @@
                 def: Const(
                     Const {
                         id: ConstId(
-                            0,
+                            7c00,
                         ),
                     },
                 ),
@@ -75,7 +75,7 @@
                 def: Const(
                     Const {
                         id: ConstId(
-                            2,
+                            7c02,
                         ),
                     },
                 ),
@@ -107,7 +107,7 @@
                     Enum(
                         Enum {
                             id: EnumId(
-                                0,
+                                6400,
                             ),
                         },
                     ),
@@ -140,7 +140,7 @@
                     Macro {
                         id: Macro2Id(
                             Macro2Id(
-                                0,
+                                6000,
                             ),
                         ),
                     },
@@ -173,7 +173,7 @@
                     Macro {
                         id: Macro2Id(
                             Macro2Id(
-                                0,
+                                6000,
                             ),
                         ),
                     },
@@ -205,7 +205,7 @@
                 def: Static(
                     Static {
                         id: StaticId(
-                            0,
+                            8000,
                         ),
                     },
                 ),
@@ -237,7 +237,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                1,
+                                5c01,
                             ),
                         },
                     ),
@@ -270,14 +270,14 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                0,
+                                5c00,
                             ),
                         },
                     ),
                 ),
                 loc: DeclarationLocation {
                     hir_file_id: MacroFile(
-                        0,
+                        Id(4800),
                     ),
                     ptr: SyntaxNodePtr {
                         kind: STRUCT,
@@ -300,7 +300,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                5,
+                                5c05,
                             ),
                         },
                     ),
@@ -335,7 +335,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                6,
+                                5c06,
                             ),
                         },
                     ),
@@ -370,7 +370,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                7,
+                                5c07,
                             ),
                         },
                     ),
@@ -403,7 +403,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                2,
+                                5c02,
                             ),
                         },
                     ),
@@ -435,7 +435,7 @@
                 def: Trait(
                     Trait {
                         id: TraitId(
-                            0,
+                            7400,
                         ),
                     },
                 ),
@@ -467,7 +467,7 @@
                     Macro {
                         id: Macro2Id(
                             Macro2Id(
-                                0,
+                                6000,
                             ),
                         ),
                     },
@@ -500,7 +500,7 @@
                     Union(
                         Union {
                             id: UnionId(
-                                0,
+                                6c00,
                             ),
                         },
                     ),
@@ -599,7 +599,7 @@
                     Macro {
                         id: MacroRulesId(
                             MacroRulesId(
-                                1,
+                                4401,
                             ),
                         ),
                     },
@@ -631,7 +631,7 @@
                 def: Function(
                     Function {
                         id: FunctionId(
-                            2,
+                            7802,
                         ),
                     },
                 ),
@@ -664,7 +664,7 @@
                 def: Function(
                     Function {
                         id: FunctionId(
-                            1,
+                            7801,
                         ),
                     },
                 ),
@@ -698,7 +698,7 @@
                     Macro {
                         id: MacroRulesId(
                             MacroRulesId(
-                                0,
+                                4400,
                             ),
                         ),
                     },
@@ -730,7 +730,7 @@
                 def: Function(
                     Function {
                         id: FunctionId(
-                            0,
+                            7800,
                         ),
                     },
                 ),
@@ -762,7 +762,7 @@
                     Macro {
                         id: MacroRulesId(
                             MacroRulesId(
-                                1,
+                                4401,
                             ),
                         ),
                     },
@@ -794,7 +794,7 @@
                 def: Function(
                     Function {
                         id: FunctionId(
-                            3,
+                            7803,
                         ),
                     },
                 ),
@@ -839,7 +839,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                3,
+                                5c03,
                             ),
                         },
                     ),
@@ -882,7 +882,7 @@
                 def: Trait(
                     Trait {
                         id: TraitId(
-                            0,
+                            7400,
                         ),
                     },
                 ),
@@ -914,7 +914,7 @@
                     Macro {
                         id: Macro2Id(
                             Macro2Id(
-                                0,
+                                6000,
                             ),
                         ),
                     },
@@ -947,7 +947,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                4,
+                                5c04,
                             ),
                         },
                     ),
@@ -980,7 +980,7 @@
                     Macro {
                         id: Macro2Id(
                             Macro2Id(
-                                0,
+                                6000,
                             ),
                         ),
                     },
@@ -1013,7 +1013,7 @@
                     Struct(
                         Struct {
                             id: StructId(
-                                4,
+                                5c04,
                             ),
                         },
                     ),
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/traits.rs b/src/tools/rust-analyzer/crates/ide-db/src/traits.rs
index 0f67496d098..fb231393a47 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/traits.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/traits.rs
@@ -116,6 +116,7 @@ mod tests {
     use expect_test::{expect, Expect};
     use hir::FilePosition;
     use hir::Semantics;
+    use salsa::AsDynDatabase;
     use span::Edition;
     use syntax::ast::{self, AstNode};
     use test_fixture::ChangeFixture;
@@ -138,7 +139,11 @@ mod tests {
     fn check_trait(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
         let (db, position) = position(ra_fixture);
         let sema = Semantics::new(&db);
-        let file = sema.parse(position.file_id);
+
+        let editioned_file_id =
+            crate::base_db::EditionedFileId::new(sema.db.as_dyn_database(), position.file_id);
+
+        let file = sema.parse(editioned_file_id);
         let impl_block: ast::Impl =
             sema.find_node_at_offset_with_descend(file.syntax(), position.offset).unwrap();
         let trait_ = crate::traits::resolve_target_trait(&sema, &impl_block);
@@ -152,7 +157,11 @@ mod tests {
     fn check_missing_assoc(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
         let (db, position) = position(ra_fixture);
         let sema = Semantics::new(&db);
-        let file = sema.parse(position.file_id);
+
+        let editioned_file_id =
+            crate::base_db::EditionedFileId::new(sema.db.as_dyn_database(), position.file_id);
+
+        let file = sema.parse(editioned_file_id);
         let impl_block: ast::Impl =
             sema.find_node_at_offset_with_descend(file.syntax(), position.offset).unwrap();
         let items = crate::traits::get_missing_assoc_items(&sema, &impl_block);
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
index c726a3bcd3c..f17e9e64ab7 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs
@@ -1053,7 +1053,7 @@ fn test() -> String {
 fn f() {
     let mut x = (|| 1, 2);
     x = (|| 3, 4);
-       //^^^^ error: expected {closure#0}, found {closure#1}
+       //^^^^ error: expected {closure#23552}, found {closure#23553}
 }
             "#,
         );
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs
index f3109b9bb73..550751b6c01 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs
@@ -3,9 +3,10 @@
 use std::iter;
 
 use hir::{db::DefDatabase, DefMap, InFile, ModuleSource};
+use ide_db::base_db::RootQueryDb;
 use ide_db::text_edit::TextEdit;
 use ide_db::{
-    base_db::{FileLoader, SourceDatabase, SourceRootDatabase},
+    base_db::{SourceDatabase, Upcast},
     source_change::SourceChange,
     FileId, FileRange, LineIndexDatabase,
 };
@@ -48,6 +49,7 @@ pub(crate) fn unlinked_file(
         // Only show this diagnostic on the first three characters of
         // the file, to avoid overwhelming the user during startup.
         range = SourceDatabase::file_text(ctx.sema.db, file_id)
+            .text(ctx.sema.db)
             .char_indices()
             .take(3)
             .last()
@@ -78,7 +80,11 @@ fn fixes(
     // If there's an existing module that could add `mod` or `pub mod` items to include the unlinked file,
     // suggest that as a fix.
 
-    let source_root = ctx.sema.db.source_root(ctx.sema.db.file_source_root(file_id));
+    let db = ctx.sema.db;
+
+    let source_root = ctx.sema.db.file_source_root(file_id).source_root_id(db);
+    let source_root = ctx.sema.db.source_root(source_root).source_root(db);
+
     let our_path = source_root.path_for_file(&file_id)?;
     let parent = our_path.parent()?;
     let (module_name, _) = our_path.name_and_extension()?;
@@ -93,7 +99,8 @@ fn fixes(
     };
 
     // check crate roots, i.e. main.rs, lib.rs, ...
-    'crates: for &krate in &*ctx.sema.db.relevant_crates(file_id) {
+    let relevant_crates = Upcast::<dyn RootQueryDb>::upcast(db).relevant_crates(file_id);
+    'crates: for &krate in &*relevant_crates {
         let crate_def_map = ctx.sema.db.crate_def_map(krate);
 
         let root_module = &crate_def_map[DefMap::ROOT];
@@ -141,7 +148,8 @@ fn fixes(
             paths.into_iter().find_map(|path| source_root.file_for_path(&path))
         })?;
     stack.pop();
-    'crates: for &krate in ctx.sema.db.relevant_crates(parent_id).iter() {
+    let relevant_crates = Upcast::<dyn RootQueryDb>::upcast(db).relevant_crates(parent_id);
+    'crates: for &krate in relevant_crates.iter() {
         let crate_def_map = ctx.sema.db.crate_def_map(krate);
         let Some((_, module)) = crate_def_map.modules().find(|(_, module)| {
             module.origin.file_id().map(Into::into) == Some(parent_id) && !module.origin.is_inline()
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
index e15d3495789..5ce3336eb41 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs
@@ -85,9 +85,10 @@ use hir::{
     db::ExpandDatabase, diagnostics::AnyDiagnostic, Crate, DisplayTarget, HirFileId, InFile,
     Semantics,
 };
+use ide_db::base_db::salsa::AsDynDatabase;
 use ide_db::{
     assists::{Assist, AssistId, AssistKind, AssistResolveStrategy},
-    base_db::{ReleaseChannel, SourceDatabase},
+    base_db::{ReleaseChannel, RootQueryDb as _},
     generated::lints::{Lint, LintGroup, CLIPPY_LINT_GROUPS, DEFAULT_LINTS, DEFAULT_LINT_GROUPS},
     imports::insert_use::InsertUseConfig,
     label::Label,
@@ -319,12 +320,17 @@ pub fn syntax_diagnostics(
     }
 
     let sema = Semantics::new(db);
-    let file_id = sema
+    let editioned_file_id = sema
         .attach_first_edition(file_id)
         .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
 
+    let (file_id, _) = editioned_file_id.unpack();
+
+    let editioned_file_id_wrapper =
+        ide_db::base_db::EditionedFileId::new(db.as_dyn_database(), editioned_file_id);
+
     // [#3434] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily.
-    db.parse_errors(file_id)
+    db.parse_errors(editioned_file_id_wrapper)
         .as_deref()
         .into_iter()
         .flatten()
@@ -333,7 +339,7 @@ pub fn syntax_diagnostics(
             Diagnostic::new(
                 DiagnosticCode::SyntaxError,
                 format!("Syntax Error: {err}"),
-                FileRange { file_id: file_id.into(), range: err.range() },
+                FileRange { file_id, range: err.range() },
             )
         })
         .collect()
@@ -349,26 +355,31 @@ pub fn semantic_diagnostics(
 ) -> Vec<Diagnostic> {
     let _p = tracing::info_span!("semantic_diagnostics").entered();
     let sema = Semantics::new(db);
-    let file_id = sema
+    let editioned_file_id = sema
         .attach_first_edition(file_id)
         .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
+
+    let (file_id, edition) = editioned_file_id.unpack();
+    let editioned_file_id_wrapper =
+        ide_db::base_db::EditionedFileId::new(db.as_dyn_database(), editioned_file_id);
+
     let mut res = Vec::new();
 
-    let parse = sema.parse(file_id);
+    let parse = sema.parse(editioned_file_id_wrapper);
 
     // FIXME: This iterates the entire file which is a rather expensive operation.
     // We should implement these differently in some form?
     // Salsa caching + incremental re-parse would be better here
     for node in parse.syntax().descendants() {
-        handlers::useless_braces::useless_braces(&mut res, file_id, &node);
-        handlers::field_shorthand::field_shorthand(&mut res, file_id, &node);
+        handlers::useless_braces::useless_braces(&mut res, editioned_file_id, &node);
+        handlers::field_shorthand::field_shorthand(&mut res, editioned_file_id, &node);
         handlers::json_is_not_rust::json_in_items(
             &sema,
             &mut res,
-            file_id,
+            editioned_file_id,
             &node,
             config,
-            file_id.edition(),
+            edition,
         );
     }
 
@@ -382,25 +393,19 @@ pub fn semantic_diagnostics(
         (*db.crate_graph().crates_in_topological_order().last().unwrap()).into()
     });
     let display_target = krate.to_display_target(db);
-    let ctx = DiagnosticsContext {
-        config,
-        sema,
-        resolve,
-        edition: file_id.edition(),
-        is_nightly,
-        display_target,
-    };
+    let ctx = DiagnosticsContext { config, sema, resolve, edition, is_nightly, display_target };
 
     let mut diags = Vec::new();
     match module {
         // A bunch of parse errors in a file indicate some bigger structural parse changes in the
         // file, so we skip semantic diagnostics so we can show these faster.
         Some(m) => {
-            if db.parse_errors(file_id).as_deref().is_none_or(|es| es.len() < 16) {
+            if db.parse_errors(editioned_file_id_wrapper).as_deref().is_none_or(|es| es.len() < 16)
+            {
                 m.diagnostics(db, &mut diags, config.style_lints);
             }
         }
-        None => handlers::unlinked_file::unlinked_file(&ctx, &mut res, file_id.file_id()),
+        None => handlers::unlinked_file::unlinked_file(&ctx, &mut res, editioned_file_id.file_id()),
     }
 
     for diag in diags {
@@ -517,7 +522,7 @@ pub fn semantic_diagnostics(
         &mut FxHashMap::default(),
         &mut lints,
         &mut Vec::new(),
-        file_id.edition(),
+        editioned_file_id.edition(),
     );
 
     res.retain(|d| d.severity != Severity::Allow);
@@ -559,7 +564,7 @@ fn handle_diag_from_macros(
     let span_map = sema.db.expansion_span_map(macro_file);
     let mut spans = span_map.spans_for_range(node.text_range());
     if spans.any(|span| {
-        sema.db.lookup_intern_syntax_context(span.ctx).outer_expn.is_some_and(|expansion| {
+        span.ctx.outer_expn(sema.db).is_some_and(|expansion| {
             let macro_call =
                 sema.db.lookup_intern_macro_call(expansion.as_macro_file().macro_call_id);
             // We don't want to show diagnostics for non-local macros at all, but proc macros authors
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs
index fc2a7db7174..7b33bbdaa22 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs
@@ -85,7 +85,7 @@ fn check_nth_fix_with_config(
     let actual = {
         let source_change = fix.source_change.as_ref().unwrap();
         let file_id = *source_change.source_file_edits.keys().next().unwrap();
-        let mut actual = db.file_text(file_id).to_string();
+        let mut actual = db.file_text(file_id).text(&db).to_string();
 
         for (edit, snippet_edit) in source_change.source_file_edits.values() {
             edit.apply(&mut actual);
@@ -142,7 +142,7 @@ pub(crate) fn check_has_fix(
                     let actual = {
                         let source_change = fix.source_change.as_ref().unwrap();
                         let file_id = *source_change.source_file_edits.keys().next().unwrap();
-                        let mut actual = db.file_text(file_id).to_string();
+                        let mut actual = db.file_text(file_id).text(&db).to_string();
 
                         for (edit, snippet_edit) in source_change.source_file_edits.values() {
                             edit.apply(&mut actual);
@@ -190,7 +190,7 @@ pub(crate) fn check_has_single_fix(
                     let actual = {
                         let source_change = fix.source_change.as_ref().unwrap();
                         let file_id = *source_change.source_file_edits.keys().next().unwrap();
-                        let mut actual = db.file_text(file_id).to_string();
+                        let mut actual = db.file_text(file_id).text(&db).to_string();
 
                         for (edit, snippet_edit) in source_change.source_file_edits.values() {
                             edit.apply(&mut actual);
@@ -276,7 +276,7 @@ pub(crate) fn check_diagnostics_with_config(
         let line_index = db.line_index(file_id);
 
         let mut actual = annotations.remove(&file_id).unwrap_or_default();
-        let expected = extract_annotations(&db.file_text(file_id));
+        let expected = extract_annotations(&db.file_text(file_id).text(&db));
         actual.sort_by_key(|(range, _)| range.start());
         // FIXME: We should panic on duplicates instead, but includes currently cause us to report
         // diagnostics twice for the calling module when both files are queried.
@@ -289,7 +289,7 @@ pub(crate) fn check_diagnostics_with_config(
             for (e, _) in &actual {
                 eprintln!(
                     "Code in range {e:?} = {}",
-                    &db.file_text(file_id)[usize::from(e.start())..usize::from(e.end())]
+                    &db.file_text(file_id).text(&db)[usize::from(e.start())..usize::from(e.end())]
                 )
             }
         }
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs
index a14e69030e3..f6fe705a987 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/from_comment.rs
@@ -1,7 +1,10 @@
 //! This module allows building an SSR MatchFinder by parsing the SSR rule
 //! from a comment.
 
-use ide_db::{base_db::SourceDatabase, EditionedFileId, FilePosition, FileRange, RootDatabase};
+use ide_db::{
+    base_db::{salsa::AsDynDatabase, RootQueryDb},
+    EditionedFileId, FilePosition, FileRange, RootDatabase,
+};
 use syntax::{
     ast::{self, AstNode, AstToken},
     TextRange,
@@ -17,7 +20,11 @@ pub fn ssr_from_comment(
     frange: FileRange,
 ) -> Option<(MatchFinder<'_>, TextRange)> {
     let comment = {
-        let file = db.parse(EditionedFileId::current_edition(frange.file_id));
+        let editioned_file_id = EditionedFileId::current_edition(frange.file_id);
+        let file_id =
+            ide_db::base_db::EditionedFileId::new(db.as_dyn_database(), editioned_file_id);
+
+        let file = db.parse(file_id);
         file.tree().syntax().token_at_offset(frange.range.start()).find_map(ast::Comment::cast)
     }?;
     let comment_text_without_prefix = comment.text().strip_prefix(comment.prefix()).unwrap();
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs
index 889258c94c5..971547ca1f8 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs
@@ -80,8 +80,12 @@ pub use crate::{errors::SsrError, from_comment::ssr_from_comment, matching::Matc
 
 use crate::{errors::bail, matching::MatchFailureReason};
 use hir::{FileRange, Semantics};
+use ide_db::symbol_index::SymbolsDatabase;
 use ide_db::text_edit::TextEdit;
-use ide_db::{base_db::SourceDatabase, EditionedFileId, FileId, FxHashMap, RootDatabase};
+use ide_db::{
+    base_db::{salsa::AsDynDatabase, SourceDatabase},
+    EditionedFileId, FileId, FxHashMap, RootDatabase,
+};
 use resolving::ResolvedRule;
 use syntax::{ast, AstNode, SyntaxNode, TextRange};
 
@@ -137,10 +141,11 @@ impl<'db> MatchFinder<'db> {
 
     /// Constructs an instance using the start of the first file in `db` as the lookup context.
     pub fn at_first_file(db: &'db ide_db::RootDatabase) -> Result<MatchFinder<'db>, SsrError> {
-        use ide_db::base_db::SourceRootDatabase;
-        use ide_db::symbol_index::SymbolsDatabase;
-        if let Some(first_file_id) =
-            db.local_roots().iter().next().and_then(|root| db.source_root(*root).iter().next())
+        if let Some(first_file_id) = db
+            .local_roots()
+            .iter()
+            .next()
+            .and_then(|root| db.source_root(*root).source_root(db).iter().next())
         {
             MatchFinder::in_context(
                 db,
@@ -184,7 +189,7 @@ impl<'db> MatchFinder<'db> {
                     replacing::matches_to_edit(
                         self.sema.db,
                         &matches,
-                        &self.sema.db.file_text(file_id),
+                        &self.sema.db.file_text(file_id).text(self.sema.db),
                         &self.rules,
                     ),
                 )
@@ -223,9 +228,12 @@ impl<'db> MatchFinder<'db> {
         file_id: EditionedFileId,
         snippet: &str,
     ) -> Vec<MatchDebugInfo> {
-        let file = self.sema.parse(file_id);
+        let editioned_file_id_wrapper =
+            ide_db::base_db::EditionedFileId::new(self.sema.db.as_dyn_database(), file_id);
+
+        let file = self.sema.parse(editioned_file_id_wrapper);
         let mut res = Vec::new();
-        let file_text = self.sema.db.file_text(file_id.into());
+        let file_text = self.sema.db.file_text(file_id.into()).text(self.sema.db);
         let mut remaining_text = &*file_text;
         let mut base = 0;
         let len = snippet.len() as u32;
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs
index e219ba4bf63..d32ba06f1eb 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/matching.rs
@@ -7,7 +7,7 @@ use crate::{
     SsrMatches,
 };
 use hir::{FileRange, ImportPathConfig, Semantics};
-use ide_db::{base_db::SourceDatabase, FxHashMap};
+use ide_db::{base_db::RootQueryDb, FxHashMap};
 use std::{cell::Cell, iter::Peekable};
 use syntax::{
     ast::{self, AstNode, AstToken, HasGenericArgs},
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs
index 270ee0b3ec9..8c98d8de918 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/resolving.rs
@@ -1,7 +1,7 @@
 //! This module is responsible for resolving paths within rules.
 
 use hir::AsAssocItem;
-use ide_db::FxHashMap;
+use ide_db::{base_db::salsa::AsDynDatabase, FxHashMap};
 use parsing::Placeholder;
 use syntax::{
     ast::{self, HasGenericArgs},
@@ -198,7 +198,12 @@ impl<'db> ResolutionScope<'db> {
         resolve_context: hir::FilePosition,
     ) -> Option<ResolutionScope<'db>> {
         use syntax::ast::AstNode;
-        let file = sema.parse(resolve_context.file_id);
+        let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
+            sema.db.as_dyn_database(),
+            resolve_context.file_id,
+        );
+
+        let file = sema.parse(editioned_file_id_wrapper);
         // Find a node at the requested position, falling back to the whole file.
         let node = file
             .syntax()
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs
index b1cade39266..b094712e1bd 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs
@@ -7,6 +7,7 @@ use crate::{
 };
 use hir::FileRange;
 use ide_db::{
+    base_db::salsa::AsDynDatabase,
     defs::Definition,
     search::{SearchScope, UsageSearchResult},
     EditionedFileId, FileId, FxHashSet,
@@ -74,7 +75,12 @@ impl MatchFinder<'_> {
         resolved_path: &ResolvedPath,
         file_range: FileRange,
     ) -> Vec<SyntaxNode> {
-        let file = self.sema.parse(file_range.file_id);
+        let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
+            self.sema.db.as_dyn_database(),
+            file_range.file_id,
+        );
+
+        let file = self.sema.parse(editioned_file_id_wrapper);
         let depth = resolved_path.depth as usize;
         let offset = file_range.range.start();
 
@@ -156,10 +162,10 @@ impl MatchFinder<'_> {
     fn search_files_do(&self, mut callback: impl FnMut(FileId)) {
         if self.restrict_ranges.is_empty() {
             // Unrestricted search.
-            use ide_db::base_db::SourceRootDatabase;
+            use ide_db::base_db::SourceDatabase;
             use ide_db::symbol_index::SymbolsDatabase;
             for &root in self.sema.db.local_roots().iter() {
-                let sr = self.sema.db.source_root(root);
+                let sr = self.sema.db.source_root(root).source_root(self.sema.db);
                 for file_id in sr.iter() {
                     callback(file_id);
                 }
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs
index d783e195252..0b510c9c6b2 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-ssr/src/tests.rs
@@ -1,7 +1,7 @@
 use expect_test::{expect, Expect};
 use hir::{FilePosition, FileRange};
 use ide_db::{
-    base_db::{ra_salsa::Durability, SourceDatabase},
+    base_db::{salsa::Durability, SourceDatabase},
     EditionedFileId, FxHashSet,
 };
 use test_utils::RangeOrOffset;
@@ -114,7 +114,7 @@ fn assert_ssr_transforms(rules: &[&str], input: &str, expected: Expect) {
     }
     // Note, db.file_text is not necessarily the same as `input`, since fixture parsing alters
     // stuff.
-    let mut actual = db.file_text(position.file_id.into()).to_string();
+    let mut actual = db.file_text(position.file_id.into()).text(&db).to_string();
     edits[&position.file_id.into()].apply(&mut actual);
     expected.assert_eq(&actual);
 }
diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
index 8d2ca33bf25..0acb129e939 100644
--- a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs
@@ -12,7 +12,7 @@ use url::Url;
 
 use hir::{db::HirDatabase, sym, Adt, AsAssocItem, AssocItem, AssocItemContainer, HasAttrs};
 use ide_db::{
-    base_db::{CrateOrigin, LangCrateOrigin, ReleaseChannel, SourceDatabase},
+    base_db::{CrateOrigin, LangCrateOrigin, ReleaseChannel, RootQueryDb},
     defs::{Definition, NameClass, NameRefClass},
     documentation::{docs_with_rangemap, Documentation, HasDocs},
     helpers::pick_best_token,
diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs
index b09e3a3c804..d70a3f9706c 100644
--- a/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/doc_links/tests.rs
@@ -43,7 +43,7 @@ fn check_external_docs(
 
 fn check_rewrite(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
     let (analysis, position) = fixture::position(ra_fixture);
-    let sema = &Semantics::new(&*analysis.db);
+    let sema = &Semantics::new(&analysis.db);
     let (cursor_def, docs) = def_under_cursor(sema, &position);
     let res = rewrite_links(sema.db, docs.as_str(), cursor_def);
     expect.assert_eq(&res)
@@ -54,7 +54,7 @@ fn check_doc_links(#[rust_analyzer::rust_fixture] ra_fixture: &str) {
 
     let (analysis, position, mut expected) = fixture::annotations(ra_fixture);
     expected.sort_by_key(key_fn);
-    let sema = &Semantics::new(&*analysis.db);
+    let sema = &Semantics::new(&analysis.db);
     let (cursor_def, docs) = def_under_cursor(sema, &position);
     let defs = extract_definitions_from_docs(&docs);
     let actual: Vec<_> = defs
diff --git a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs
index ad4308e06a1..2347e2e8a36 100644
--- a/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/expand_macro.rs
@@ -234,7 +234,6 @@ fn _format(
     file_id: FileId,
     expansion: &str,
 ) -> Option<String> {
-    use ide_db::base_db::{FileLoader, SourceDatabase};
     // hack until we get hygiene working (same character amount to preserve formatting as much as possible)
     const DOLLAR_CRATE_REPLACE: &str = "__r_a_";
     const BUILTIN_REPLACE: &str = "builtin__POUND";
@@ -248,8 +247,9 @@ fn _format(
     };
     let expansion = format!("{prefix}{expansion}{suffix}");
 
-    let &crate_id = db.relevant_crates(file_id).iter().next()?;
-    let edition = db.crate_graph()[crate_id].edition;
+    let upcast_db = ide_db::base_db::Upcast::<dyn ide_db::base_db::RootQueryDb>::upcast(db);
+    let &crate_id = upcast_db.relevant_crates(file_id).iter().next()?;
+    let edition = upcast_db.crate_graph()[crate_id].edition;
 
     #[allow(clippy::disallowed_methods)]
     let mut cmd = std::process::Command::new(toolchain::Tool::Rustfmt.path());
diff --git a/src/tools/rust-analyzer/crates/ide/src/fetch_crates.rs b/src/tools/rust-analyzer/crates/ide/src/fetch_crates.rs
index 5ed21444307..0e5bb89b6b7 100644
--- a/src/tools/rust-analyzer/crates/ide/src/fetch_crates.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/fetch_crates.rs
@@ -1,5 +1,5 @@
 use ide_db::{
-    base_db::{CrateOrigin, SourceDatabase},
+    base_db::{CrateOrigin, RootQueryDb},
     FileId, FxIndexSet, RootDatabase,
 };
 
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
index 60a904233a9..84138986f66 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
@@ -10,7 +10,7 @@ use hir::{
     ModuleDef, Semantics,
 };
 use ide_db::{
-    base_db::{AnchoredPath, FileLoader, SourceDatabase},
+    base_db::{AnchoredPath, RootQueryDb, SourceDatabase, Upcast},
     defs::{Definition, IdentClass},
     famous_defs::FamousDefs,
     helpers::pick_best_token,
@@ -216,8 +216,9 @@ fn try_lookup_include_path(
     }
     let path = token.value().ok()?;
 
-    let file_id = sema.db.resolve_path(AnchoredPath { anchor: file_id, path: &path })?;
-    let size = sema.db.file_text(file_id).len().try_into().ok()?;
+    let file_id = Upcast::<dyn RootQueryDb>::upcast(sema.db)
+        .resolve_path(AnchoredPath { anchor: file_id, path: &path })?;
+    let size = sema.db.file_text(file_id).text(sema.db).len().try_into().ok()?;
     Some(NavigationTarget {
         file_id,
         full_range: TextRange::new(0.into(), size),
diff --git a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
index 6463206596a..0ef7eb503f6 100644
--- a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
@@ -2,6 +2,7 @@ use std::iter;
 
 use hir::{db, FilePosition, FileRange, HirFileId, InFile, Semantics};
 use ide_db::{
+    base_db::salsa::AsDynDatabase,
     defs::{Definition, IdentClass},
     helpers::pick_best_token,
     search::{FileReference, ReferenceCategory, SearchScope},
@@ -60,7 +61,10 @@ pub(crate) fn highlight_related(
     let file_id = sema
         .attach_first_edition(file_id)
         .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
-    let syntax = sema.parse(file_id).syntax().clone();
+    let editioned_file_id_wrapper =
+        ide_db::base_db::EditionedFileId::new(sema.db.as_dyn_database(), file_id);
+
+    let syntax = sema.parse(editioned_file_id_wrapper).syntax().clone();
 
     let token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind {
         T![?] => 4, // prefer `?` when the cursor is sandwiched like in `await$0?`
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
index 31ef89a07cd..5d888ceb5ed 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
@@ -8,7 +8,7 @@ use hir::{
     MethodViolationCode, Name, Semantics, Symbol, Trait, Type, TypeInfo, VariantDef,
 };
 use ide_db::{
-    base_db::SourceDatabase,
+    base_db::RootQueryDb,
     defs::Definition,
     documentation::HasDocs,
     famous_defs::FamousDefs,
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
index 6b470d921f7..736d355ef22 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
@@ -47,7 +47,7 @@ fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect: Expect) {
         .unwrap();
 
     let content = analysis.db.file_text(position.file_id);
-    let hovered_element = &content[hover.range];
+    let hovered_element = &content.text(&analysis.db)[hover.range];
 
     let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup);
     expect.assert_eq(&actual)
@@ -72,7 +72,7 @@ fn check_hover_fields_limit(
         .unwrap()
         .unwrap();
 
-    let content = analysis.db.file_text(position.file_id);
+    let content = analysis.db.file_text(position.file_id).text(&analysis.db);
     let hovered_element = &content[hover.range];
 
     let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup);
@@ -98,7 +98,7 @@ fn check_hover_enum_variants_limit(
         .unwrap()
         .unwrap();
 
-    let content = analysis.db.file_text(position.file_id);
+    let content = analysis.db.file_text(position.file_id).text(&analysis.db);
     let hovered_element = &content[hover.range];
 
     let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup);
@@ -124,7 +124,7 @@ fn check_assoc_count(
         .unwrap()
         .unwrap();
 
-    let content = analysis.db.file_text(position.file_id);
+    let content = analysis.db.file_text(position.file_id).text(&analysis.db);
     let hovered_element = &content[hover.range];
 
     let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup);
@@ -141,7 +141,7 @@ fn check_hover_no_links(#[rust_analyzer::rust_fixture] ra_fixture: &str, expect:
         .unwrap()
         .unwrap();
 
-    let content = analysis.db.file_text(position.file_id);
+    let content = analysis.db.file_text(position.file_id).text(&analysis.db);
     let hovered_element = &content[hover.range];
 
     let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup);
@@ -158,7 +158,7 @@ fn check_hover_no_memory_layout(#[rust_analyzer::rust_fixture] ra_fixture: &str,
         .unwrap()
         .unwrap();
 
-    let content = analysis.db.file_text(position.file_id);
+    let content = analysis.db.file_text(position.file_id).text(&analysis.db);
     let hovered_element = &content[hover.range];
 
     let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup);
@@ -179,7 +179,7 @@ fn check_hover_no_markdown(#[rust_analyzer::rust_fixture] ra_fixture: &str, expe
         .unwrap()
         .unwrap();
 
-    let content = analysis.db.file_text(position.file_id);
+    let content = analysis.db.file_text(position.file_id).text(&analysis.db);
     let hovered_element = &content[hover.range];
 
     let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup);
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
index 6babdff52a2..3a709e71fbe 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
@@ -8,7 +8,7 @@ use hir::{
     sym, ClosureStyle, DisplayTarget, HasVisibility, HirDisplay, HirDisplayError, HirWrite,
     ModuleDef, ModuleDefId, Semantics,
 };
-use ide_db::{famous_defs::FamousDefs, FileRange, RootDatabase};
+use ide_db::{base_db::salsa::AsDynDatabase, famous_defs::FamousDefs, FileRange, RootDatabase};
 use ide_db::{text_edit::TextEdit, FxHashSet};
 use itertools::Itertools;
 use smallvec::{smallvec, SmallVec};
@@ -86,7 +86,9 @@ pub(crate) fn inlay_hints(
     let file_id = sema
         .attach_first_edition(file_id)
         .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
-    let file = sema.parse(file_id);
+    let editioned_file_id_wrapper =
+        ide_db::base_db::EditionedFileId::new(sema.db.as_dyn_database(), file_id);
+    let file = sema.parse(editioned_file_id_wrapper);
     let file = file.syntax();
 
     let mut acc = Vec::new();
@@ -137,7 +139,9 @@ pub(crate) fn inlay_hints_resolve(
     let file_id = sema
         .attach_first_edition(file_id)
         .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
-    let file = sema.parse(file_id);
+    let editioned_file_id_wrapper =
+        ide_db::base_db::EditionedFileId::new(sema.db.as_dyn_database(), file_id);
+    let file = sema.parse(editioned_file_id_wrapper);
     let file = file.syntax();
 
     let scope = sema.scope(file)?;
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs
index 4379153acaa..592c8603964 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs
@@ -868,15 +868,15 @@ fn main() {
 //- minicore: fn
 fn main() {
     let x = || 2;
-      //^ {closure#0}
+      //^ {closure#26624}
     let y = |t: i32| x() + t;
-      //^ {closure#1}
+      //^ {closure#26625}
     let mut t = 5;
           //^ i32
     let z = |k: i32| { t += k; };
-      //^ {closure#2}
+      //^ {closure#26626}
     let p = (y, z);
-      //^ ({closure#1}, {closure#2})
+      //^ ({closure#26625}, {closure#26626})
 }
             "#,
         );
diff --git a/src/tools/rust-analyzer/crates/ide/src/interpret.rs b/src/tools/rust-analyzer/crates/ide/src/interpret.rs
index 74dad488b4d..0499d8a447f 100644
--- a/src/tools/rust-analyzer/crates/ide/src/interpret.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/interpret.rs
@@ -1,5 +1,5 @@
 use hir::{ConstEvalError, DefWithBody, DisplayTarget, Semantics};
-use ide_db::{base_db::SourceRootDatabase, FilePosition, LineIndexDatabase, RootDatabase};
+use ide_db::{base_db::SourceDatabase, FilePosition, LineIndexDatabase, RootDatabase};
 use std::time::{Duration, Instant};
 use stdx::format_to;
 use syntax::{algo::ancestors_at_offset, ast, AstNode, TextRange};
@@ -35,10 +35,10 @@ fn find_and_interpret(db: &RootDatabase, position: FilePosition) -> Option<(Dura
         _ => return None,
     };
     let span_formatter = |file_id, text_range: TextRange| {
-        let path = &db
-            .source_root(db.file_source_root(file_id))
-            .path_for_file(&file_id)
-            .map(|x| x.to_string());
+        let source_root = db.file_source_root(file_id).source_root_id(db);
+        let source_root = db.source_root(source_root).source_root(db);
+
+        let path = source_root.path_for_file(&file_id).map(|x| x.to_string());
         let path = path.as_deref().unwrap_or("<unknown file>");
         match db.line_index(file_id).try_line_col(text_range.start()) {
             Some(line_col) => format!("file://{path}:{}:{}", line_col.line + 1, line_col.col),
@@ -64,10 +64,9 @@ pub(crate) fn render_const_eval_error(
     display_target: DisplayTarget,
 ) -> String {
     let span_formatter = |file_id, text_range: TextRange| {
-        let path = &db
-            .source_root(db.file_source_root(file_id))
-            .path_for_file(&file_id)
-            .map(|x| x.to_string());
+        let source_root = db.file_source_root(file_id).source_root_id(db);
+        let source_root = db.source_root(source_root).source_root(db);
+        let path = source_root.path_for_file(&file_id).map(|x| x.to_string());
         let path = path.as_deref().unwrap_or("<unknown file>");
         match db.line_index(file_id).try_line_col(text_range.start()) {
             Some(line_col) => format!("file://{path}:{}:{}", line_col.line + 1, line_col.col),
diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs
index 8ac1a96cc65..dcb170f3f7b 100644
--- a/src/tools/rust-analyzer/crates/ide/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs
@@ -64,9 +64,9 @@ use fetch_crates::CrateInfo;
 use hir::{sym, ChangeWithProcMacros};
 use ide_db::{
     base_db::{
-        ra_salsa::{self, ParallelDatabase},
-        CrateOrigin, CrateWorkspaceData, Env, FileLoader, FileSet, SourceDatabase,
-        SourceRootDatabase, VfsPath,
+        salsa::{AsDynDatabase, Cancelled},
+        CrateOrigin, CrateWorkspaceData, Env, FileSet, RootQueryDb, SourceDatabase, Upcast,
+        VfsPath,
     },
     prime_caches, symbol_index, FxHashMap, FxIndexSet, LineIndexDatabase,
 };
@@ -125,7 +125,7 @@ pub use ide_completion::{
 };
 pub use ide_db::text_edit::{Indel, TextEdit};
 pub use ide_db::{
-    base_db::{Cancelled, CrateGraph, CrateId, FileChange, SourceRoot, SourceRootId},
+    base_db::{CrateGraph, CrateId, FileChange, SourceRoot, SourceRootId},
     documentation::Documentation,
     label::Label,
     line_index::{LineCol, LineIndex},
@@ -217,7 +217,7 @@ impl Default for AnalysisHost {
 /// `Analysis` are canceled (most method return `Err(Canceled)`).
 #[derive(Debug)]
 pub struct Analysis {
-    db: ra_salsa::Snapshot<RootDatabase>,
+    db: RootDatabase,
 }
 
 // As a general design guideline, `Analysis` API are intended to be independent
@@ -276,12 +276,12 @@ impl Analysis {
     }
 
     pub fn source_root_id(&self, file_id: FileId) -> Cancellable<SourceRootId> {
-        self.with_db(|db| db.file_source_root(file_id))
+        self.with_db(|db| db.file_source_root(file_id).source_root_id(db))
     }
 
     pub fn is_local_source_root(&self, source_root_id: SourceRootId) -> Cancellable<bool> {
         self.with_db(|db| {
-            let sr = db.source_root(source_root_id);
+            let sr = db.source_root(source_root_id).source_root(db);
             !sr.is_library
         })
     }
@@ -295,18 +295,28 @@ impl Analysis {
 
     /// Gets the text of the source file.
     pub fn file_text(&self, file_id: FileId) -> Cancellable<Arc<str>> {
-        self.with_db(|db| SourceDatabase::file_text(db, file_id))
+        self.with_db(|db| SourceDatabase::file_text(db, file_id).text(db))
     }
 
     /// Gets the syntax tree of the file.
     pub fn parse(&self, file_id: FileId) -> Cancellable<SourceFile> {
         // FIXME edition
-        self.with_db(|db| db.parse(EditionedFileId::current_edition(file_id)).tree())
+        self.with_db(|db| {
+            let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
+                self.db.as_dyn_database(),
+                EditionedFileId::current_edition(file_id),
+            );
+
+            db.parse(editioned_file_id_wrapper).tree()
+        })
     }
 
     /// Returns true if this file belongs to an immutable library.
     pub fn is_library_file(&self, file_id: FileId) -> Cancellable<bool> {
-        self.with_db(|db| db.source_root(db.file_source_root(file_id)).is_library)
+        self.with_db(|db| {
+            let source_root = db.file_source_root(file_id).source_root_id(db);
+            db.source_root(source_root).source_root(db).is_library
+        })
     }
 
     /// Gets the file's `LineIndex`: data structure to convert between absolute
@@ -324,7 +334,11 @@ impl Analysis {
     /// supported).
     pub fn matching_brace(&self, position: FilePosition) -> Cancellable<Option<TextSize>> {
         self.with_db(|db| {
-            let parse = db.parse(EditionedFileId::current_edition(position.file_id));
+            let file_id = ide_db::base_db::EditionedFileId::new(
+                self.db.as_dyn_database(),
+                EditionedFileId::current_edition(position.file_id),
+            );
+            let parse = db.parse(file_id);
             let file = parse.tree();
             matching_brace::matching_brace(&file, position.offset)
         })
@@ -383,7 +397,11 @@ impl Analysis {
     /// stuff like trailing commas.
     pub fn join_lines(&self, config: &JoinLinesConfig, frange: FileRange) -> Cancellable<TextEdit> {
         self.with_db(|db| {
-            let parse = db.parse(EditionedFileId::current_edition(frange.file_id));
+            let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
+                self.db.as_dyn_database(),
+                EditionedFileId::current_edition(frange.file_id),
+            );
+            let parse = db.parse(editioned_file_id_wrapper);
             join_lines::join_lines(config, &parse.tree(), frange.range)
         })
     }
@@ -419,9 +437,12 @@ impl Analysis {
     pub fn file_structure(&self, file_id: FileId) -> Cancellable<Vec<StructureNode>> {
         // FIXME: Edition
         self.with_db(|db| {
-            file_structure::file_structure(
-                &db.parse(EditionedFileId::current_edition(file_id)).tree(),
-            )
+            let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
+                self.db.as_dyn_database(),
+                EditionedFileId::current_edition(file_id),
+            );
+
+            file_structure::file_structure(&db.parse(editioned_file_id_wrapper).tree())
         })
     }
 
@@ -450,9 +471,12 @@ impl Analysis {
     /// Returns the set of folding ranges.
     pub fn folding_ranges(&self, file_id: FileId) -> Cancellable<Vec<Fold>> {
         self.with_db(|db| {
-            folding_ranges::folding_ranges(
-                &db.parse(EditionedFileId::current_edition(file_id)).tree(),
-            )
+            let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
+                self.db.as_dyn_database(),
+                EditionedFileId::current_edition(file_id),
+            );
+
+            folding_ranges::folding_ranges(&db.parse(editioned_file_id_wrapper).tree())
         })
     }
 
@@ -589,7 +613,10 @@ impl Analysis {
 
     /// Returns crates that this file *might* belong to.
     pub fn relevant_crates_for(&self, file_id: FileId) -> Cancellable<Vec<CrateId>> {
-        self.with_db(|db| db.relevant_crates(file_id).iter().copied().collect())
+        self.with_db(|db| {
+            let db = Upcast::<dyn RootQueryDb>::upcast(db);
+            db.relevant_crates(file_id).iter().copied().collect()
+        })
     }
 
     /// Returns the edition of the given crate.
@@ -828,7 +855,8 @@ impl Analysis {
     where
         F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe,
     {
-        Cancelled::catch(|| f(&self.db))
+        let snap = self.db.snapshot();
+        Cancelled::catch(|| f(&snap))
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/ide/src/parent_module.rs b/src/tools/rust-analyzer/crates/ide/src/parent_module.rs
index 6d82f9b0634..90cccca5e80 100644
--- a/src/tools/rust-analyzer/crates/ide/src/parent_module.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/parent_module.rs
@@ -1,6 +1,6 @@
 use hir::{db::DefDatabase, Semantics};
 use ide_db::{
-    base_db::{CrateId, FileLoader},
+    base_db::{CrateId, RootQueryDb, Upcast},
     FileId, FilePosition, RootDatabase,
 };
 use itertools::Itertools;
@@ -54,7 +54,9 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na
 
 /// This returns `Vec` because a module may be included from several places.
 pub(crate) fn crates_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> {
-    db.relevant_crates(file_id)
+    let root_db = Upcast::<dyn RootQueryDb>::upcast(db);
+    root_db
+        .relevant_crates(file_id)
         .iter()
         .copied()
         .filter(|&crate_id| db.crate_def_map(crate_id).modules_for_file(file_id).next().is_some())
diff --git a/src/tools/rust-analyzer/crates/ide/src/rename.rs b/src/tools/rust-analyzer/crates/ide/src/rename.rs
index 08e25dde606..57d297700ad 100644
--- a/src/tools/rust-analyzer/crates/ide/src/rename.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/rename.rs
@@ -6,6 +6,7 @@
 
 use hir::{AsAssocItem, HirFileIdExt, InFile, Semantics};
 use ide_db::{
+    base_db::salsa::AsDynDatabase,
     defs::{Definition, NameClass, NameRefClass},
     rename::{bail, format_err, source_edit_from_references, IdentifierKind},
     source_change::SourceChangeBuilder,
@@ -85,7 +86,9 @@ pub(crate) fn rename(
     let file_id = sema
         .attach_first_edition(position.file_id)
         .ok_or_else(|| format_err!("No references found at position"))?;
-    let source_file = sema.parse(file_id);
+    let editioned_file_id_wrapper =
+        ide_db::base_db::EditionedFileId::new(db.as_dyn_database(), file_id);
+    let source_file = sema.parse(editioned_file_id_wrapper);
     let syntax = source_file.syntax();
 
     let defs = find_definitions(&sema, syntax, position)?;
diff --git a/src/tools/rust-analyzer/crates/ide/src/runnables.rs b/src/tools/rust-analyzer/crates/ide/src/runnables.rs
index b8deed01fb7..33eef6d75c2 100644
--- a/src/tools/rust-analyzer/crates/ide/src/runnables.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/runnables.rs
@@ -9,7 +9,7 @@ use hir::{
 };
 use ide_assists::utils::{has_test_related_attribute, test_related_attribute_syn};
 use ide_db::{
-    base_db::SourceDatabase,
+    base_db::RootQueryDb,
     defs::Definition,
     documentation::docs_from_attrs,
     helpers::visit_file_defs,
diff --git a/src/tools/rust-analyzer/crates/ide/src/ssr.rs b/src/tools/rust-analyzer/crates/ide/src/ssr.rs
index 5812a92b389..81b59881265 100644
--- a/src/tools/rust-analyzer/crates/ide/src/ssr.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/ssr.rs
@@ -59,7 +59,7 @@ mod tests {
     use expect_test::expect;
     use ide_assists::{Assist, AssistResolveStrategy};
     use ide_db::{
-        base_db::ra_salsa::Durability, symbol_index::SymbolsDatabase, FileRange, FxHashSet,
+        base_db::salsa::Durability, symbol_index::SymbolsDatabase, FileRange, FxHashSet,
         RootDatabase,
     };
     use test_fixture::WithFixture;
diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs
index 332aecf1e3c..ceea34b7213 100644
--- a/src/tools/rust-analyzer/crates/ide/src/static_index.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs
@@ -3,7 +3,7 @@
 
 use hir::{db::HirDatabase, Crate, HirFileIdExt, Module, Semantics};
 use ide_db::{
-    base_db::{SourceDatabase, SourceRootDatabase, VfsPath},
+    base_db::{RootQueryDb, SourceDatabase, VfsPath},
     defs::Definition,
     documentation::Documentation,
     famous_defs::FamousDefs,
@@ -267,11 +267,11 @@ impl StaticIndex<'_> {
         analysis: &'a Analysis,
         vendored_libs_config: VendoredLibrariesConfig<'_>,
     ) -> StaticIndex<'a> {
-        let db = &*analysis.db;
+        let db = &analysis.db;
         let work = all_modules(db).into_iter().filter(|module| {
             let file_id = module.definition_source_file_id(db).original_file(db);
-            let source_root = db.file_source_root(file_id.into());
-            let source_root = db.source_root(source_root);
+            let source_root = db.file_source_root(file_id.into()).source_root_id(db);
+            let source_root = db.source_root(source_root).source_root(db);
             let is_vendored = match vendored_libs_config {
                 VendoredLibrariesConfig::Included { workspace_root } => source_root
                     .path_for_file(&file_id.into())
diff --git a/src/tools/rust-analyzer/crates/ide/src/status.rs b/src/tools/rust-analyzer/crates/ide/src/status.rs
index a44be67668c..47ac4ebf20e 100644
--- a/src/tools/rust-analyzer/crates/ide/src/status.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/status.rs
@@ -1,29 +1,8 @@
-use std::{fmt, marker::PhantomData};
-
-use hir::{
-    db::{AstIdMapQuery, AttrsQuery, BlockDefMapQuery, ParseMacroExpansionQuery},
-    Attr, Attrs, ExpandResult, MacroFileId, Module,
-};
-use ide_db::{
-    base_db::{
-        ra_salsa::{
-            debug::{DebugQueryTable, TableEntry},
-            Query, QueryTable,
-        },
-        CompressedFileTextQuery, CrateData, ParseQuery, SourceDatabase, SourceRootId,
-    },
-    symbol_index::ModuleSymbolsQuery,
-};
-use ide_db::{
-    symbol_index::{LibrarySymbolsQuery, SymbolIndex},
-    RootDatabase,
-};
+use ide_db::base_db::{CrateData, RootQueryDb, Upcast};
+use ide_db::RootDatabase;
 use itertools::Itertools;
-use profile::{memory_usage, Bytes};
-use span::{EditionedFileId, FileId};
+use span::FileId;
 use stdx::format_to;
-use syntax::{ast, Parse, SyntaxNode};
-use triomphe::Arc;
 
 // Feature: Status
 //
@@ -37,17 +16,17 @@ use triomphe::Arc;
 pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {
     let mut buf = String::new();
 
-    format_to!(buf, "{}\n", collect_query(CompressedFileTextQuery.in_db(db)));
-    format_to!(buf, "{}\n", collect_query(ParseQuery.in_db(db)));
-    format_to!(buf, "{}\n", collect_query(ParseMacroExpansionQuery.in_db(db)));
-    format_to!(buf, "{}\n", collect_query(LibrarySymbolsQuery.in_db(db)));
-    format_to!(buf, "{}\n", collect_query(ModuleSymbolsQuery.in_db(db)));
-    format_to!(buf, "{} in total\n", memory_usage());
+    // format_to!(buf, "{}\n", collect_query(CompressedFileTextQuery.in_db(db)));
+    // format_to!(buf, "{}\n", collect_query(ParseQuery.in_db(db)));
+    // format_to!(buf, "{}\n", collect_query(ParseMacroExpansionQuery.in_db(db)));
+    // format_to!(buf, "{}\n", collect_query(LibrarySymbolsQuery.in_db(db)));
+    // format_to!(buf, "{}\n", collect_query(ModuleSymbolsQuery.in_db(db)));
+    // format_to!(buf, "{} in total\n", memory_usage());
 
-    format_to!(buf, "\nDebug info:\n");
-    format_to!(buf, "{}\n", collect_query(AttrsQuery.in_db(db)));
-    format_to!(buf, "{} ast id maps\n", collect_query_count(AstIdMapQuery.in_db(db)));
-    format_to!(buf, "{} block def maps\n", collect_query_count(BlockDefMapQuery.in_db(db)));
+    // format_to!(buf, "\nDebug info:\n");
+    // format_to!(buf, "{}\n", collect_query(AttrsQuery.in_db(db)));
+    // format_to!(buf, "{} ast id maps\n", collect_query_count(AstIdMapQuery.in_db(db)));
+    // format_to!(buf, "{} block def maps\n", collect_query_count(BlockDefMapQuery.in_db(db)));
 
     if let Some(file_id) = file_id {
         format_to!(buf, "\nCrates for file {}:\n", file_id.index());
@@ -55,7 +34,8 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {
         if crates.is_empty() {
             format_to!(buf, "Does not belong to any crate");
         }
-        let crate_graph = db.crate_graph();
+
+        let crate_graph = Upcast::<dyn RootQueryDb>::upcast(db).crate_graph();
         for crate_id in crates {
             let CrateData {
                 root_file_id,
@@ -97,190 +77,3 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {
 
     buf.trim().to_owned()
 }
-
-fn collect_query<'q, Q>(table: QueryTable<'q, Q>) -> <Q as QueryCollect>::Collector
-where
-    QueryTable<'q, Q>: DebugQueryTable,
-    Q: QueryCollect,
-    <Q as Query>::Storage: 'q,
-    <Q as QueryCollect>::Collector: StatCollect<
-        <QueryTable<'q, Q> as DebugQueryTable>::Key,
-        <QueryTable<'q, Q> as DebugQueryTable>::Value,
-    >,
-{
-    struct StatCollectorWrapper<C>(C);
-    impl<C: StatCollect<K, V>, K, V> FromIterator<TableEntry<K, V>> for StatCollectorWrapper<C> {
-        fn from_iter<T>(iter: T) -> StatCollectorWrapper<C>
-        where
-            T: IntoIterator<Item = TableEntry<K, V>>,
-        {
-            let mut res = C::default();
-            for entry in iter {
-                res.collect_entry(entry.key, entry.value);
-            }
-            StatCollectorWrapper(res)
-        }
-    }
-    table.entries::<StatCollectorWrapper<<Q as QueryCollect>::Collector>>().0
-}
-
-fn collect_query_count<'q, Q>(table: QueryTable<'q, Q>) -> usize
-where
-    QueryTable<'q, Q>: DebugQueryTable,
-    Q: Query,
-    <Q as Query>::Storage: 'q,
-{
-    struct EntryCounter(usize);
-    impl<K, V> FromIterator<TableEntry<K, V>> for EntryCounter {
-        fn from_iter<T>(iter: T) -> EntryCounter
-        where
-            T: IntoIterator<Item = TableEntry<K, V>>,
-        {
-            EntryCounter(iter.into_iter().count())
-        }
-    }
-    table.entries::<EntryCounter>().0
-}
-
-trait QueryCollect: Query {
-    type Collector;
-}
-
-impl QueryCollect for LibrarySymbolsQuery {
-    type Collector = SymbolsStats<SourceRootId>;
-}
-
-impl QueryCollect for ParseQuery {
-    type Collector = SyntaxTreeStats<false>;
-}
-
-impl QueryCollect for ParseMacroExpansionQuery {
-    type Collector = SyntaxTreeStats<true>;
-}
-
-impl QueryCollect for CompressedFileTextQuery {
-    type Collector = FilesStats;
-}
-
-impl QueryCollect for ModuleSymbolsQuery {
-    type Collector = SymbolsStats<Module>;
-}
-
-impl QueryCollect for AttrsQuery {
-    type Collector = AttrsStats;
-}
-
-trait StatCollect<K, V>: Default {
-    fn collect_entry(&mut self, key: K, value: Option<V>);
-}
-
-#[derive(Default)]
-struct FilesStats {
-    total: usize,
-    size: Bytes,
-}
-
-impl fmt::Display for FilesStats {
-    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(fmt, "{} of files", self.size)
-    }
-}
-
-impl StatCollect<FileId, Arc<[u8]>> for FilesStats {
-    fn collect_entry(&mut self, _: FileId, value: Option<Arc<[u8]>>) {
-        self.total += 1;
-        self.size += value.unwrap().len();
-    }
-}
-
-#[derive(Default)]
-pub(crate) struct SyntaxTreeStats<const MACROS: bool> {
-    total: usize,
-    pub(crate) retained: usize,
-}
-
-impl<const MACROS: bool> fmt::Display for SyntaxTreeStats<MACROS> {
-    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(
-            fmt,
-            "{} trees, {} preserved{}",
-            self.total,
-            self.retained,
-            if MACROS { " (macros)" } else { "" }
-        )
-    }
-}
-
-impl StatCollect<EditionedFileId, Parse<ast::SourceFile>> for SyntaxTreeStats<false> {
-    fn collect_entry(&mut self, _: EditionedFileId, value: Option<Parse<ast::SourceFile>>) {
-        self.total += 1;
-        self.retained += value.is_some() as usize;
-    }
-}
-
-impl<M> StatCollect<MacroFileId, ExpandResult<(Parse<SyntaxNode>, M)>> for SyntaxTreeStats<true> {
-    fn collect_entry(
-        &mut self,
-        _: MacroFileId,
-        value: Option<ExpandResult<(Parse<SyntaxNode>, M)>>,
-    ) {
-        self.total += 1;
-        self.retained += value.is_some() as usize;
-    }
-}
-
-struct SymbolsStats<Key> {
-    total: usize,
-    size: Bytes,
-    phantom: PhantomData<Key>,
-}
-
-impl<Key> Default for SymbolsStats<Key> {
-    fn default() -> Self {
-        Self { total: Default::default(), size: Default::default(), phantom: PhantomData }
-    }
-}
-
-impl fmt::Display for SymbolsStats<Module> {
-    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(fmt, "{} of module index symbols ({})", self.size, self.total)
-    }
-}
-impl fmt::Display for SymbolsStats<SourceRootId> {
-    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(fmt, "{} of library index symbols ({})", self.size, self.total)
-    }
-}
-impl<Key> StatCollect<Key, Arc<SymbolIndex>> for SymbolsStats<Key> {
-    fn collect_entry(&mut self, _: Key, value: Option<Arc<SymbolIndex>>) {
-        if let Some(symbols) = value {
-            self.total += symbols.len();
-            self.size += symbols.memory_size();
-        }
-    }
-}
-
-#[derive(Default)]
-struct AttrsStats {
-    entries: usize,
-    total: usize,
-}
-
-impl fmt::Display for AttrsStats {
-    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
-        let size = self.entries * size_of::<Attrs>() + self.total * size_of::<Attr>();
-        let size = Bytes::new(size as _);
-        write!(
-            fmt,
-            "{} attribute query entries, {} total attributes ({} for storing entries)",
-            self.entries, self.total, size
-        )
-    }
-}
-
-impl<Key> StatCollect<Key, Attrs> for AttrsStats {
-    fn collect_entry(&mut self, _: Key, value: Option<Attrs>) {
-        self.entries += 1;
-        self.total += value.map_or(0, |it| it.len());
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
index 83082496d5b..ef5d480b49c 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
@@ -18,7 +18,9 @@ use either::Either;
 use hir::{
     DefWithBody, HirFileIdExt, InFile, InRealFile, MacroFileIdExt, MacroKind, Name, Semantics,
 };
-use ide_db::{FxHashMap, FxHashSet, Ranker, RootDatabase, SymbolKind};
+use ide_db::{
+    base_db::salsa::AsDynDatabase, FxHashMap, FxHashSet, Ranker, RootDatabase, SymbolKind,
+};
 use span::EditionedFileId;
 use syntax::{
     ast::{self, IsString},
@@ -203,7 +205,9 @@ pub(crate) fn highlight(
 
     // Determine the root based on the given range.
     let (root, range_to_highlight) = {
-        let file = sema.parse(file_id);
+        let editioned_file_id_wrapper =
+            ide_db::base_db::EditionedFileId::new(db.as_dyn_database(), file_id);
+        let file = sema.parse(editioned_file_id_wrapper);
         let source_file = file.syntax();
         match range_to_highlight {
             Some(range) => {
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs
index 07d40bafeba..6a03da6a6ea 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs
@@ -1,6 +1,7 @@
 //! Renders a bit of code as HTML.
 
 use hir::Semantics;
+use ide_db::base_db::salsa::AsDynDatabase;
 use oorandom::Rand32;
 use span::EditionedFileId;
 use stdx::format_to;
@@ -16,7 +17,9 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo
     let file_id = sema
         .attach_first_edition(file_id)
         .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
-    let file = sema.parse(file_id);
+    let editioned_file_id_wrapper =
+        ide_db::base_db::EditionedFileId::new(db.as_dyn_database(), file_id);
+    let file = sema.parse(editioned_file_id_wrapper);
     let file = file.syntax();
     fn rainbowify(seed: u64) -> String {
         let mut rng = Rand32::new(seed);
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html
index 7f6b4c2c880..e1a8d876c41 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_rainbow.html
@@ -41,14 +41,14 @@ pre                 { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
 .unresolved_reference    { color: #FC5555; text-decoration: wavy underline; }
 </style>
 <pre><code><span class="keyword">fn</span> <span class="function declaration">main</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
-    <span class="keyword">let</span> <span class="variable declaration reference" data-binding-hash="16711699953829236520" style="color: hsl(345,54%,46%);">hello</span> <span class="operator">=</span> <span class="string_literal">"hello"</span><span class="semicolon">;</span>
-    <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="10753541418856619067" style="color: hsl(51,52%,47%);">x</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="16711699953829236520" style="color: hsl(345,54%,46%);">hello</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
-    <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="9865812862466303869" style="color: hsl(329,86%,55%);">y</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="16711699953829236520" style="color: hsl(345,54%,46%);">hello</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+    <span class="keyword">let</span> <span class="variable declaration reference" data-binding-hash="18084384843626695225" style="color: hsl(154,95%,53%);">hello</span> <span class="operator">=</span> <span class="string_literal">"hello"</span><span class="semicolon">;</span>
+    <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="5697120079570210533" style="color: hsl(268,86%,80%);">x</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="18084384843626695225" style="color: hsl(154,95%,53%);">hello</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+    <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="4222724691718692706" style="color: hsl(156,71%,51%);">y</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="18084384843626695225" style="color: hsl(154,95%,53%);">hello</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
 
-    <span class="keyword">let</span> <span class="variable declaration reference" data-binding-hash="4890670724659097491" style="color: hsl(330,46%,45%);">x</span> <span class="operator">=</span> <span class="string_literal">"other color please!"</span><span class="semicolon">;</span>
-    <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="4002942168268782293" style="color: hsl(114,87%,67%);">y</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="4890670724659097491" style="color: hsl(330,46%,45%);">x</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
+    <span class="keyword">let</span> <span class="variable declaration reference" data-binding-hash="17855021198829413584" style="color: hsl(230,76%,79%);">x</span> <span class="operator">=</span> <span class="string_literal">"other color please!"</span><span class="semicolon">;</span>
+    <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="16380625810977895757" style="color: hsl(262,75%,75%);">y</span> <span class="operator">=</span> <span class="variable reference" data-binding-hash="17855021198829413584" style="color: hsl(230,76%,79%);">x</span><span class="operator">.</span><span class="unresolved_reference">to_string</span><span class="parenthesis">(</span><span class="parenthesis">)</span><span class="semicolon">;</span>
 <span class="brace">}</span>
 
 <span class="keyword">fn</span> <span class="function declaration">bar</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span>
-    <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable reference" data-binding-hash="16711699953829236520" style="color: hsl(345,54%,46%);">hello</span> <span class="operator">=</span> <span class="string_literal">"hello"</span><span class="semicolon">;</span>
+    <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable reference" data-binding-hash="18084384843626695225" style="color: hsl(154,95%,53%);">hello</span> <span class="operator">=</span> <span class="string_literal">"hello"</span><span class="semicolon">;</span>
 <span class="brace">}</span></code></pre>
\ No newline at end of file
diff --git a/src/tools/rust-analyzer/crates/ide/src/test_explorer.rs b/src/tools/rust-analyzer/crates/ide/src/test_explorer.rs
index 30b1d4c39b3..68f5c72d205 100644
--- a/src/tools/rust-analyzer/crates/ide/src/test_explorer.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/test_explorer.rs
@@ -2,7 +2,7 @@
 
 use hir::{Crate, Module, ModuleDef, Semantics};
 use ide_db::{
-    base_db::{CrateGraph, CrateId, SourceDatabase},
+    base_db::{CrateGraph, CrateId, RootQueryDb},
     FileId, RootDatabase,
 };
 use syntax::TextRange;
diff --git a/src/tools/rust-analyzer/crates/ide/src/typing.rs b/src/tools/rust-analyzer/crates/ide/src/typing.rs
index 8c9dd051452..f583aa801ea 100644
--- a/src/tools/rust-analyzer/crates/ide/src/typing.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/typing.rs
@@ -15,10 +15,13 @@
 
 mod on_enter;
 
+use ide_db::{
+    base_db::{salsa::AsDynDatabase, RootQueryDb},
+    FilePosition, RootDatabase,
+};
+use span::{Edition, EditionedFileId};
 use std::iter;
 
-use ide_db::{base_db::SourceDatabase, FilePosition, RootDatabase};
-use span::{Edition, EditionedFileId};
 use syntax::{
     algo::{ancestors_at_offset, find_node_at_offset},
     ast::{self, edit::IndentLevel, AstToken},
@@ -73,7 +76,11 @@ pub(crate) fn on_char_typed(
     // FIXME: We are hitting the database here, if we are unlucky this call might block momentarily
     // causing the editor to feel sluggish!
     let edition = Edition::CURRENT_FIXME;
-    let file = &db.parse(EditionedFileId::new(position.file_id, edition));
+    let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
+        db.as_dyn_database(),
+        EditionedFileId::new(position.file_id, edition),
+    );
+    let file = &db.parse(editioned_file_id_wrapper);
     let char_matches_position =
         file.tree().syntax().text().char_at(position.offset) == Some(char_typed);
     if !stdx::always!(char_matches_position) {
diff --git a/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs b/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs
index c6d1c283f4e..8cadb610403 100644
--- a/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/typing/on_enter.rs
@@ -1,8 +1,8 @@
 //! Handles the `Enter` key press. At the momently, this only continues
 //! comments, but should handle indent some time in the future as well.
 
-use ide_db::RootDatabase;
-use ide_db::{base_db::SourceDatabase, FilePosition};
+use ide_db::base_db::RootQueryDb;
+use ide_db::{base_db::salsa::AsDynDatabase, FilePosition, RootDatabase};
 use span::EditionedFileId;
 use syntax::{
     algo::find_node_at_offset,
@@ -51,7 +51,11 @@ use ide_db::text_edit::TextEdit;
 //
 // ![On Enter](https://user-images.githubusercontent.com/48062697/113065578-04c21800-91b1-11eb-82b8-22b8c481e645.gif)
 pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<TextEdit> {
-    let parse = db.parse(EditionedFileId::current_edition(position.file_id));
+    let editioned_file_id_wrapper = ide_db::base_db::EditionedFileId::new(
+        db.as_dyn_database(),
+        EditionedFileId::current_edition(position.file_id),
+    );
+    let parse = db.parse(editioned_file_id_wrapper);
     let file = parse.tree();
     let token = file.syntax().token_at_offset(position.offset).left_biased()?;
 
diff --git a/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs b/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs
index eb6eb7da1e9..eeb65ac0389 100644
--- a/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs
@@ -1,6 +1,6 @@
 use dot::{Id, LabelText};
 use ide_db::{
-    base_db::{CrateGraph, CrateId, Dependency, SourceDatabase, SourceRootDatabase},
+    base_db::{CrateGraph, CrateId, Dependency, RootQueryDb, SourceDatabase, Upcast},
     FxHashSet, RootDatabase,
 };
 use triomphe::Arc;
@@ -16,7 +16,7 @@ use triomphe::Arc;
 // |---------|-------------|
 // | VS Code | **rust-analyzer: View Crate Graph** |
 pub(crate) fn view_crate_graph(db: &RootDatabase, full: bool) -> Result<String, String> {
-    let crate_graph = db.crate_graph();
+    let crate_graph = Upcast::<dyn RootQueryDb>::upcast(db).crate_graph();
     let crates_to_render = crate_graph
         .iter()
         .filter(|krate| {
@@ -24,8 +24,9 @@ pub(crate) fn view_crate_graph(db: &RootDatabase, full: bool) -> Result<String,
                 true
             } else {
                 // Only render workspace crates
-                let root_id = db.file_source_root(crate_graph[*krate].root_file_id);
-                !db.source_root(root_id).is_library
+                let root_id =
+                    db.file_source_root(crate_graph[*krate].root_file_id).source_root_id(db);
+                !db.source_root(root_id).source_root(db).is_library
             }
         })
         .collect();
diff --git a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
index 72ca85c6a2f..fbb0043ad2d 100644
--- a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
@@ -526,7 +526,7 @@ impl ProcMacroExpander for Expander {
 
 #[cfg(test)]
 mod tests {
-    use ide_db::base_db::SourceDatabase;
+    use ide_db::base_db::RootQueryDb;
     use vfs::file_set::FileSetConfigBuilder;
 
     use super::*;
diff --git a/src/tools/rust-analyzer/crates/mbe/src/tests.rs b/src/tools/rust-analyzer/crates/mbe/src/tests.rs
index 4a73b6fa05a..462c206bebf 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/tests.rs
@@ -109,8 +109,8 @@ fn unbalanced_brace() {
 "#,
         r#""#,
         expect![[r#"
-            SUBTREE $$ 1:0@0..0#2 1:0@0..0#2
-              SUBTREE {} 0:0@9..10#2 0:0@11..12#2
+            SUBTREE $$ 1:0@0..0#4294967037 1:0@0..0#4294967037
+              SUBTREE {} 0:0@9..10#4294967037 0:0@11..12#4294967037
 
             {}"#]],
     );
@@ -132,25 +132,25 @@ fn token_mapping_smoke_test() {
 struct MyTraitMap2
 "#,
         expect![[r#"
-            SUBTREE $$ 1:0@0..20#2 1:0@0..20#2
-              IDENT   struct 0:0@34..40#2
-              IDENT   MyTraitMap2 1:0@8..19#2
-              SUBTREE {} 0:0@48..49#2 0:0@100..101#2
-                IDENT   map 0:0@58..61#2
-                PUNCH   : [alone] 0:0@61..62#2
-                PUNCH   : [joint] 0:0@63..64#2
-                PUNCH   : [alone] 0:0@64..65#2
-                IDENT   std 0:0@65..68#2
-                PUNCH   : [joint] 0:0@68..69#2
-                PUNCH   : [alone] 0:0@69..70#2
-                IDENT   collections 0:0@70..81#2
-                PUNCH   : [joint] 0:0@81..82#2
-                PUNCH   : [alone] 0:0@82..83#2
-                IDENT   HashSet 0:0@83..90#2
-                PUNCH   < [alone] 0:0@90..91#2
-                SUBTREE () 0:0@91..92#2 0:0@92..93#2
-                PUNCH   > [joint] 0:0@93..94#2
-                PUNCH   , [alone] 0:0@94..95#2
+            SUBTREE $$ 1:0@0..20#4294967037 1:0@0..20#4294967037
+              IDENT   struct 0:0@34..40#4294967037
+              IDENT   MyTraitMap2 1:0@8..19#4294967037
+              SUBTREE {} 0:0@48..49#4294967037 0:0@100..101#4294967037
+                IDENT   map 0:0@58..61#4294967037
+                PUNCH   : [alone] 0:0@61..62#4294967037
+                PUNCH   : [joint] 0:0@63..64#4294967037
+                PUNCH   : [alone] 0:0@64..65#4294967037
+                IDENT   std 0:0@65..68#4294967037
+                PUNCH   : [joint] 0:0@68..69#4294967037
+                PUNCH   : [alone] 0:0@69..70#4294967037
+                IDENT   collections 0:0@70..81#4294967037
+                PUNCH   : [joint] 0:0@81..82#4294967037
+                PUNCH   : [alone] 0:0@82..83#4294967037
+                IDENT   HashSet 0:0@83..90#4294967037
+                PUNCH   < [alone] 0:0@90..91#4294967037
+                SUBTREE () 0:0@91..92#4294967037 0:0@92..93#4294967037
+                PUNCH   > [joint] 0:0@93..94#4294967037
+                PUNCH   , [alone] 0:0@94..95#4294967037
 
             struct MyTraitMap2 {
                 map: ::std::collections::HashSet<()>,
@@ -179,28 +179,28 @@ fn main() {
 }
 "#,
         expect![[r#"
-            SUBTREE $$ 1:0@0..63#2 1:0@0..63#2
-              IDENT   fn 1:0@1..3#2
-              IDENT   main 1:0@4..8#2
-              SUBTREE () 1:0@8..9#2 1:0@9..10#2
-              SUBTREE {} 1:0@11..12#2 1:0@61..62#2
-                LITERAL Integer 1 1:0@17..18#2
-                PUNCH   ; [alone] 1:0@18..19#2
-                LITERAL Float 1.0 1:0@24..27#2
-                PUNCH   ; [alone] 1:0@27..28#2
-                SUBTREE () 1:0@33..34#2 1:0@39..40#2
-                  SUBTREE () 1:0@34..35#2 1:0@37..38#2
-                    LITERAL Integer 1 1:0@35..36#2
-                    PUNCH   , [alone] 1:0@36..37#2
-                  PUNCH   , [alone] 1:0@38..39#2
-                PUNCH   . [alone] 1:0@40..41#2
-                LITERAL Float 0.0 1:0@41..44#2
-                PUNCH   ; [alone] 1:0@44..45#2
-                IDENT   let 1:0@50..53#2
-                IDENT   x 1:0@54..55#2
-                PUNCH   = [alone] 1:0@56..57#2
-                LITERAL Integer 1 1:0@58..59#2
-                PUNCH   ; [alone] 1:0@59..60#2
+            SUBTREE $$ 1:0@0..63#4294967037 1:0@0..63#4294967037
+              IDENT   fn 1:0@1..3#4294967037
+              IDENT   main 1:0@4..8#4294967037
+              SUBTREE () 1:0@8..9#4294967037 1:0@9..10#4294967037
+              SUBTREE {} 1:0@11..12#4294967037 1:0@61..62#4294967037
+                LITERAL Integer 1 1:0@17..18#4294967037
+                PUNCH   ; [alone] 1:0@18..19#4294967037
+                LITERAL Float 1.0 1:0@24..27#4294967037
+                PUNCH   ; [alone] 1:0@27..28#4294967037
+                SUBTREE () 1:0@33..34#4294967037 1:0@39..40#4294967037
+                  SUBTREE () 1:0@34..35#4294967037 1:0@37..38#4294967037
+                    LITERAL Integer 1 1:0@35..36#4294967037
+                    PUNCH   , [alone] 1:0@36..37#4294967037
+                  PUNCH   , [alone] 1:0@38..39#4294967037
+                PUNCH   . [alone] 1:0@40..41#4294967037
+                LITERAL Float 0.0 1:0@41..44#4294967037
+                PUNCH   ; [alone] 1:0@44..45#4294967037
+                IDENT   let 1:0@50..53#4294967037
+                IDENT   x 1:0@54..55#4294967037
+                PUNCH   = [alone] 1:0@56..57#4294967037
+                LITERAL Integer 1 1:0@58..59#4294967037
+                PUNCH   ; [alone] 1:0@59..60#4294967037
 
             fn main(){
                 1;
@@ -226,14 +226,14 @@ fn expr_2021() {
     const { 1 },
 "#,
         expect![[r#"
-            SUBTREE $$ 1:0@0..25#2 1:0@0..25#2
-              IDENT   _ 1:0@5..6#2
-              PUNCH   ; [joint] 0:0@36..37#2
-              SUBTREE () 0:0@34..35#2 0:0@34..35#2
-                IDENT   const 1:0@12..17#2
-                SUBTREE {} 1:0@18..19#2 1:0@22..23#2
-                  LITERAL Integer 1 1:0@20..21#2
-              PUNCH   ; [alone] 0:0@39..40#2
+            SUBTREE $$ 1:0@0..25#4294967037 1:0@0..25#4294967037
+              IDENT   _ 1:0@5..6#4294967037
+              PUNCH   ; [joint] 0:0@36..37#4294967037
+              SUBTREE () 0:0@34..35#4294967037 0:0@34..35#4294967037
+                IDENT   const 1:0@12..17#4294967037
+                SUBTREE {} 1:0@18..19#4294967037 1:0@22..23#4294967037
+                  LITERAL Integer 1 1:0@20..21#4294967037
+              PUNCH   ; [alone] 0:0@39..40#4294967037
 
             _;
             (const  {
@@ -254,13 +254,13 @@ fn expr_2021() {
         expect![[r#"
             ExpandError {
                 inner: (
-                    1:0@5..6#2,
+                    1:0@5..6#4294967037,
                     NoMatchingRule,
                 ),
             }
 
-            SUBTREE $$ 1:0@0..8#2 1:0@0..8#2
-              PUNCH   ; [alone] 0:0@39..40#2
+            SUBTREE $$ 1:0@0..8#4294967037 1:0@0..8#4294967037
+              PUNCH   ; [alone] 0:0@39..40#4294967037
 
             ;"#]],
     );
@@ -278,13 +278,13 @@ fn expr_2021() {
         expect![[r#"
             ExpandError {
                 inner: (
-                    1:0@5..10#2,
+                    1:0@5..10#4294967037,
                     NoMatchingRule,
                 ),
             }
 
-            SUBTREE $$ 1:0@0..18#2 1:0@0..18#2
-              PUNCH   ; [alone] 0:0@39..40#2
+            SUBTREE $$ 1:0@0..18#4294967037 1:0@0..18#4294967037
+              PUNCH   ; [alone] 0:0@39..40#4294967037
 
             ;"#]],
     );
@@ -304,26 +304,26 @@ fn expr_2021() {
     break 'foo bar,
 "#,
         expect![[r#"
-            SUBTREE $$ 1:0@0..76#2 1:0@0..76#2
-              LITERAL Integer 4 1:0@5..6#2
-              PUNCH   ; [joint] 0:0@41..42#2
-              LITERAL Str literal 1:0@12..21#2
-              PUNCH   ; [joint] 0:0@41..42#2
-              SUBTREE () 0:0@39..40#2 0:0@39..40#2
-                IDENT   funcall 1:0@27..34#2
-                SUBTREE () 1:0@34..35#2 1:0@35..36#2
-              PUNCH   ; [joint] 0:0@41..42#2
-              SUBTREE () 0:0@39..40#2 0:0@39..40#2
-                IDENT   future 1:0@42..48#2
-                PUNCH   . [alone] 1:0@48..49#2
-                IDENT   await 1:0@49..54#2
-              PUNCH   ; [joint] 0:0@41..42#2
-              SUBTREE () 0:0@39..40#2 0:0@39..40#2
-                IDENT   break 1:0@60..65#2
-                PUNCH   ' [joint] 1:0@66..67#2
-                IDENT   foo 1:0@67..70#2
-                IDENT   bar 1:0@71..74#2
-              PUNCH   ; [alone] 0:0@44..45#2
+            SUBTREE $$ 1:0@0..76#4294967037 1:0@0..76#4294967037
+              LITERAL Integer 4 1:0@5..6#4294967037
+              PUNCH   ; [joint] 0:0@41..42#4294967037
+              LITERAL Str literal 1:0@12..21#4294967037
+              PUNCH   ; [joint] 0:0@41..42#4294967037
+              SUBTREE () 0:0@39..40#4294967037 0:0@39..40#4294967037
+                IDENT   funcall 1:0@27..34#4294967037
+                SUBTREE () 1:0@34..35#4294967037 1:0@35..36#4294967037
+              PUNCH   ; [joint] 0:0@41..42#4294967037
+              SUBTREE () 0:0@39..40#4294967037 0:0@39..40#4294967037
+                IDENT   future 1:0@42..48#4294967037
+                PUNCH   . [alone] 1:0@48..49#4294967037
+                IDENT   await 1:0@49..54#4294967037
+              PUNCH   ; [joint] 0:0@41..42#4294967037
+              SUBTREE () 0:0@39..40#4294967037 0:0@39..40#4294967037
+                IDENT   break 1:0@60..65#4294967037
+                PUNCH   ' [joint] 1:0@66..67#4294967037
+                IDENT   foo 1:0@67..70#4294967037
+                IDENT   bar 1:0@71..74#4294967037
+              PUNCH   ; [alone] 0:0@44..45#4294967037
 
             4;
             "literal";
@@ -345,13 +345,13 @@ fn expr_2021() {
         expect![[r#"
             ExpandError {
                 inner: (
-                    1:0@5..6#2,
+                    1:0@5..6#4294967037,
                     NoMatchingRule,
                 ),
             }
 
-            SUBTREE $$ 1:0@0..8#2 1:0@0..8#2
-              PUNCH   ; [alone] 0:0@44..45#2
+            SUBTREE $$ 1:0@0..8#4294967037 1:0@0..8#4294967037
+              PUNCH   ; [alone] 0:0@44..45#4294967037
 
             ;"#]],
     );
diff --git a/src/tools/rust-analyzer/crates/query-group-macro/Cargo.toml b/src/tools/rust-analyzer/crates/query-group-macro/Cargo.toml
index 24e059f40be..99d1bf7e2c7 100644
--- a/src/tools/rust-analyzer/crates/query-group-macro/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/query-group-macro/Cargo.toml
@@ -17,7 +17,7 @@ heck = "0.5.0"
 proc-macro2 = "1.0"
 quote = "1.0"
 syn = { version = "2.0", features = ["full", "extra-traits"] }
-salsa = { version = "0.19.0"  }
+salsa.workspace = true
 
 [dev-dependencies]
 expect-test = "1.5.0"
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
index dee76ee15c3..a391a01fae7 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -22,21 +22,15 @@ use ide::{
     Analysis, AnalysisHost, AnnotationConfig, DiagnosticsConfig, Edition, InlayFieldsToResolve,
     InlayHintsConfig, LineCol, RootDatabase,
 };
-use ide_db::{
-    base_db::{
-        ra_salsa::{self, debug::DebugQueryTable, ParallelDatabase},
-        SourceDatabase, SourceRootDatabase,
-    },
-    EditionedFileId, LineIndexDatabase, SnippetCap,
-};
+use ide_db::{base_db::SourceDatabase, EditionedFileId, LineIndexDatabase, SnippetCap};
 use itertools::Itertools;
 use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice};
 use oorandom::Rand32;
-use profile::{Bytes, StopWatch};
+use profile::StopWatch;
 use project_model::{CargoConfig, CfgOverrides, ProjectManifest, ProjectWorkspace, RustLibSource};
 use rayon::prelude::*;
 use rustc_hash::{FxHashMap, FxHashSet};
-use syntax::{AstNode, SyntaxNode};
+use syntax::AstNode;
 use vfs::{AbsPathBuf, Vfs, VfsPath};
 
 use crate::cli::{
@@ -46,14 +40,6 @@ use crate::cli::{
     report_metric, Verbosity,
 };
 
-/// Need to wrap Snapshot to provide `Clone` impl for `map_with`
-struct Snap<DB>(DB);
-impl<DB: ParallelDatabase> Clone for Snap<ra_salsa::Snapshot<DB>> {
-    fn clone(&self) -> Snap<ra_salsa::Snapshot<DB>> {
-        Snap(self.0.snapshot())
-    }
-}
-
 impl flags::AnalysisStats {
     pub fn run(self, verbosity: Verbosity) -> anyhow::Result<()> {
         let mut rng = {
@@ -129,10 +115,13 @@ impl flags::AnalysisStats {
 
         let mut item_tree_sw = self.stop_watch();
         let mut num_item_trees = 0;
-        let source_roots =
-            krates.iter().cloned().map(|krate| db.file_source_root(krate.root_file(db))).unique();
+        let source_roots = krates
+            .iter()
+            .cloned()
+            .map(|krate| db.file_source_root(krate.root_file(db)).source_root_id(db))
+            .unique();
         for source_root_id in source_roots {
-            let source_root = db.source_root(source_root_id);
+            let source_root = db.source_root(source_root_id).source_root(db);
             if !source_root.is_library || self.with_deps {
                 for file_id in source_root.iter() {
                     if let Some(p) = source_root.path_for_file(&file_id) {
@@ -157,8 +146,9 @@ impl flags::AnalysisStats {
             let module = krate.root_module();
             let file_id = module.definition_source_file_id(db);
             let file_id = file_id.original_file(db);
-            let source_root = db.file_source_root(file_id.into());
-            let source_root = db.source_root(source_root);
+
+            let source_root = db.file_source_root(file_id.into()).source_root_id(db);
+            let source_root = db.source_root(source_root).source_root(db);
             if !source_root.is_library || self.with_deps {
                 num_crates += 1;
                 visit_queue.push(module);
@@ -268,17 +258,21 @@ impl flags::AnalysisStats {
         report_metric("total memory", total_span.memory.allocated.megabytes() as u64, "MB");
 
         if self.source_stats {
-            let mut total_file_size = Bytes::default();
-            for e in ide_db::base_db::ParseQuery.in_db(db).entries::<Vec<_>>() {
-                total_file_size += syntax_len(db.parse(e.key).syntax_node())
-            }
-
-            let mut total_macro_file_size = Bytes::default();
-            for e in hir::db::ParseMacroExpansionQuery.in_db(db).entries::<Vec<_>>() {
-                let val = db.parse_macro_expansion(e.key).value.0;
-                total_macro_file_size += syntax_len(val.syntax_node())
-            }
-            eprintln!("source files: {total_file_size}, macro files: {total_macro_file_size}");
+            // FIXME(salsa-transition): bring back stats for ParseQuery (file size)
+            // and ParseMacroExpansionQuery (mcaro expansion "file") size whenever we implement
+            // Salsa's memory usage tracking works with tracked functions.
+
+            // let mut total_file_size = Bytes::default();
+            // for e in ide_db::base_db::ParseQuery.in_db(db).entries::<Vec<_>>() {
+            //     total_file_size += syntax_len(db.parse(e.key).syntax_node())
+            // }
+
+            // let mut total_macro_file_size = Bytes::default();
+            // for e in hir::db::ParseMacroExpansionQuery.in_db(db).entries::<Vec<_>>() {
+            //     let val = db.parse_macro_expansion(e.key).value.0;
+            //     total_macro_file_size += syntax_len(val.syntax_node())
+            // }
+            // eprintln!("source files: {total_file_size}, macro files: {total_macro_file_size}");
         }
 
         if verbosity.is_verbose() {
@@ -423,6 +417,7 @@ impl flags::AnalysisStats {
                 let range = sema.original_range(expected_tail.syntax()).range;
                 let original_text: String = db
                     .file_text(file_id.into())
+                    .text(db)
                     .chars()
                     .skip(usize::from(range.start()))
                     .take(usize::from(range.end()) - usize::from(range.start()))
@@ -475,7 +470,7 @@ impl flags::AnalysisStats {
                     syntax_hit_found |= trim(&original_text) == trim(&generated);
 
                     // Validate if type-checks
-                    let mut txt = file_txt.to_string();
+                    let mut txt = file_txt.text(db).to_string();
 
                     let edit = ide::TextEdit::replace(range, generated.clone());
                     edit.apply(&mut txt);
@@ -530,7 +525,7 @@ impl flags::AnalysisStats {
             }
             // Revert file back to original state
             if self.validate_term_search {
-                std::fs::write(path, file_txt.to_string()).unwrap();
+                std::fs::write(path, file_txt.text(db).to_string()).unwrap();
             }
 
             bar.inc(1);
@@ -572,6 +567,11 @@ impl flags::AnalysisStats {
     }
 
     fn run_mir_lowering(&self, db: &RootDatabase, bodies: &[DefWithBody], verbosity: Verbosity) {
+        let mut bar = match verbosity {
+            Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
+            _ if self.parallel || self.output.is_some() => ProgressReport::hidden(),
+            _ => ProgressReport::new(bodies.len() as u64),
+        };
         let mut sw = self.stop_watch();
         let mut all = 0;
         let mut fail = 0;
@@ -593,11 +593,13 @@ impl flags::AnalysisStats {
                     .chain(Some(body.name(db).unwrap_or_else(Name::missing)))
                     .map(|it| it.display(db, Edition::LATEST).to_string())
                     .join("::");
-                println!("Mir body for {full_name} failed due {e:?}");
+                bar.println(format!("Mir body for {full_name} failed due {e:?}"));
             }
             fail += 1;
+            bar.tick();
         }
         let mir_lowering_time = sw.elapsed();
+        bar.finish_and_clear();
         eprintln!("{:<20} {}", "MIR lowering:", mir_lowering_time);
         eprintln!("Mir failed bodies: {fail} ({}%)", percentage(fail, all));
         report_metric("mir failed bodies", fail, "#");
@@ -619,12 +621,12 @@ impl flags::AnalysisStats {
 
         if self.parallel {
             let mut inference_sw = self.stop_watch();
-            let snap = Snap(db.snapshot());
+            let snap = db.snapshot();
             bodies
                 .par_iter()
                 .map_with(snap, |snap, &body| {
-                    snap.0.body(body.into());
-                    snap.0.infer(body.into());
+                    snap.body(body.into());
+                    snap.infer(body.into());
                 })
                 .count();
             eprintln!("{:<20} {}", "Parallel Inference:", inference_sw.elapsed());
@@ -1206,8 +1208,10 @@ fn percentage(n: u64, total: u64) -> u64 {
     (n * 100).checked_div(total).unwrap_or(100)
 }
 
-fn syntax_len(node: SyntaxNode) -> usize {
-    // Macro expanded code doesn't contain whitespace, so erase *all* whitespace
-    // to make macro and non-macro code comparable.
-    node.to_string().replace(|it: char| it.is_ascii_whitespace(), "").len()
-}
+// FIXME(salsa-transition): bring this back whenever we implement
+// Salsa's memory usage tracking to work with tracked functions.
+// fn syntax_len(node: SyntaxNode) -> usize {
+//     // Macro expanded code doesn't contain whitespace, so erase *all* whitespace
+//     // to make macro and non-macro code comparable.
+//     node.to_string().replace(|it: char| it.is_ascii_whitespace(), "").len()
+// }
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
index 6a3ceb640b9..ae1054a8049 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
@@ -6,7 +6,7 @@ use rustc_hash::FxHashSet;
 
 use hir::{db::HirDatabase, sym, Crate, HirFileIdExt, Module};
 use ide::{AnalysisHost, AssistResolveStrategy, Diagnostic, DiagnosticsConfig, Severity};
-use ide_db::{base_db::SourceRootDatabase, LineIndexDatabase};
+use ide_db::{base_db::SourceDatabase, LineIndexDatabase};
 use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
 
 use crate::cli::flags;
@@ -51,8 +51,8 @@ impl flags::Diagnostics {
 
         let work = all_modules(db).into_iter().filter(|module| {
             let file_id = module.definition_source_file_id(db).original_file(db);
-            let source_root = db.file_source_root(file_id.into());
-            let source_root = db.source_root(source_root);
+            let source_root = db.file_source_root(file_id.into()).source_root_id(db);
+            let source_root = db.source_root(source_root).source_root(db);
             !source_root.is_library
         });
 
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs
index 7398b9a9ef0..20da40f384b 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs
@@ -2,7 +2,7 @@
 
 use hir::{Crate, Module};
 use hir_ty::db::HirDatabase;
-use ide_db::{base_db::SourceRootDatabase, LineIndexDatabase};
+use ide_db::{base_db::SourceDatabase, LineIndexDatabase};
 use profile::StopWatch;
 use project_model::{CargoConfig, RustLibSource};
 use syntax::TextRange;
@@ -40,10 +40,10 @@ impl flags::RunTests {
                 None => " (unknown line col)".to_owned(),
                 Some(x) => format!("#{}:{}", x.line + 1, x.col),
             };
-            let path = &db
-                .source_root(db.file_source_root(file_id))
-                .path_for_file(&file_id)
-                .map(|x| x.to_string());
+            let source_root = db.file_source_root(file_id).source_root_id(db);
+            let source_root = db.source_root(source_root).source_root(db);
+
+            let path = source_root.path_for_file(&file_id).map(|x| x.to_string());
             let path = path.as_deref().unwrap_or("<unknown file>");
             format!("file://{path}{line_col}")
         };
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs
index e9ca12deaf6..7c7ed911293 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs
@@ -164,13 +164,13 @@ impl Tester {
                     let analysis = self.host.analysis();
                     let root_file = self.root_file;
                     move || {
-                        let res = std::panic::catch_unwind(move || {
+                        let res = std::panic::catch_unwind(AssertUnwindSafe(move || {
                             analysis.full_diagnostics(
                                 diagnostic_config,
                                 ide::AssistResolveStrategy::None,
                                 root_file,
                             )
-                        });
+                        }));
                         main.unpark();
                         res
                     }
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
index c03688e8009..a3bb7042151 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
@@ -34,7 +34,7 @@ impl flags::Ssr {
         let edits = match_finder.edits();
         for (file_id, edit) in edits {
             if let Some(path) = vfs.file_path(file_id).as_path() {
-                let mut contents = db.file_text(file_id).to_string();
+                let mut contents = db.file_text(file_id).text(db).to_string();
                 edit.apply(&mut contents);
                 std::fs::write(path, contents)
                     .with_context(|| format!("failed to write {path}"))?;
@@ -49,7 +49,7 @@ impl flags::Search {
     /// `debug_snippet`. This is intended for debugging and probably isn't in it's current form useful
     /// for much else.
     pub fn run(self) -> anyhow::Result<()> {
-        use ide_db::base_db::SourceRootDatabase;
+        use ide_db::base_db::SourceDatabase;
         use ide_db::symbol_index::SymbolsDatabase;
         let cargo_config =
             CargoConfig { all_targets: true, set_test: true, ..CargoConfig::default() };
@@ -70,7 +70,7 @@ impl flags::Search {
         }
         if let Some(debug_snippet) = &self.debug {
             for &root in db.local_roots().iter() {
-                let sr = db.source_root(root);
+                let sr = db.source_root(root).source_root(db);
                 for file_id in sr.iter() {
                     for debug_info in match_finder.debug_where_text_equal(
                         EditionedFileId::current_edition(file_id),
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs
index 021b1bff393..9acf4b85334 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/unresolved_references.rs
@@ -2,7 +2,7 @@
 use hir::{db::HirDatabase, sym, AnyDiagnostic, Crate, HirFileIdExt as _, Module, Semantics};
 use ide::{AnalysisHost, RootDatabase, TextRange};
 use ide_db::{
-    base_db::{SourceDatabase, SourceRootDatabase},
+    base_db::{salsa::AsDynDatabase, SourceDatabase},
     defs::NameRefClass,
     EditionedFileId, FxHashSet, LineIndexDatabase as _,
 };
@@ -57,8 +57,8 @@ impl flags::UnresolvedReferences {
 
         let work = all_modules(db).into_iter().filter(|module| {
             let file_id = module.definition_source_file_id(db).original_file(db);
-            let source_root = db.file_source_root(file_id.into());
-            let source_root = db.source_root(source_root);
+            let source_root = db.file_source_root(file_id.into()).source_root_id(db);
+            let source_root = db.source_root(source_root).source_root(db);
             !source_root.is_library
         });
 
@@ -77,7 +77,7 @@ impl flags::UnresolvedReferences {
                     let line_col = line_index.line_col(range.start());
                     let line = line_col.line + 1;
                     let col = line_col.col + 1;
-                    let text = &file_text[range];
+                    let text = &file_text.text(db)[range];
                     println!("{file_path}:{line}:{col}: {text}");
                 }
 
@@ -141,7 +141,9 @@ fn all_unresolved_references(
     let file_id = sema
         .attach_first_edition(file_id)
         .unwrap_or_else(|| EditionedFileId::current_edition(file_id));
-    let file = sema.parse(file_id);
+    let editioned_file_id_wrapper =
+        ide_db::base_db::EditionedFileId::new(sema.db.as_dyn_database(), file_id);
+    let file = sema.parse(editioned_file_id_wrapper);
     let root = file.syntax();
 
     let mut unresolved_references = Vec::new();
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
index 54670b67598..f114a4454e0 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
@@ -8,7 +8,7 @@ use std::{ops::Not as _, time::Instant};
 use crossbeam_channel::{unbounded, Receiver, Sender};
 use hir::ChangeWithProcMacros;
 use ide::{Analysis, AnalysisHost, Cancellable, FileId, SourceRootId};
-use ide_db::base_db::{CrateId, ProcMacroPaths, SourceDatabase, SourceRootDatabase};
+use ide_db::base_db::{CrateId, ProcMacroPaths, SourceDatabase};
 use itertools::Itertools;
 use load_cargo::SourceRootConfig;
 use lsp_types::{SemanticTokens, Url};
@@ -426,29 +426,28 @@ impl GlobalState {
                     tracing::info!(%vfs_path, ?change_kind, "Processing rust-analyzer.toml changes");
                     if vfs_path.as_path() == user_config_abs_path {
                         tracing::info!(%vfs_path, ?change_kind, "Use config rust-analyzer.toml changes");
-                        change.change_user_config(Some(db.file_text(file_id)));
-                        continue;
+                        change.change_user_config(Some(db.file_text(file_id).text(db)));
                     }
 
                     // If change has been made to a ratoml file that
                     // belongs to a non-local source root, we will ignore it.
-                    let sr_id = db.file_source_root(file_id);
-                    let sr = db.source_root(sr_id);
+                    let source_root_id = db.file_source_root(file_id).source_root_id(db);
+                    let source_root = db.source_root(source_root_id).source_root(db);
 
-                    if !sr.is_library {
+                    if !source_root.is_library {
                         let entry = if workspace_ratoml_paths.contains(&vfs_path) {
-                            tracing::info!(%vfs_path, ?sr_id, "workspace rust-analyzer.toml changes");
+                            tracing::info!(%vfs_path, ?source_root_id, "workspace rust-analyzer.toml changes");
                             change.change_workspace_ratoml(
-                                sr_id,
+                                source_root_id,
                                 vfs_path.clone(),
-                                Some(db.file_text(file_id)),
+                                Some(db.file_text(file_id).text(db)),
                             )
                         } else {
-                            tracing::info!(%vfs_path, ?sr_id, "crate rust-analyzer.toml changes");
+                            tracing::info!(%vfs_path, ?source_root_id, "crate rust-analyzer.toml changes");
                             change.change_ratoml(
-                                sr_id,
+                                source_root_id,
                                 vfs_path.clone(),
-                                Some(db.file_text(file_id)),
+                                Some(db.file_text(file_id).text(db)),
                             )
                         };
 
@@ -459,10 +458,14 @@ impl GlobalState {
                                 // Put the old one back in.
                                 match kind {
                                     RatomlFileKind::Crate => {
-                                        change.change_ratoml(sr_id, old_path, old_text);
+                                        change.change_ratoml(source_root_id, old_path, old_text);
                                     }
                                     RatomlFileKind::Workspace => {
-                                        change.change_workspace_ratoml(sr_id, old_path, old_text);
+                                        change.change_workspace_ratoml(
+                                            source_root_id,
+                                            old_path,
+                                            old_text,
+                                        );
                                     }
                                 }
                             }
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs
index 4683877db69..680ff006083 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs
@@ -4,8 +4,7 @@ use std::{
     panic, thread,
 };
 
-use ide::Cancelled;
-use ide_db::base_db::ra_salsa::Cycle;
+use ide_db::base_db::salsa::{self, Cancelled, Cycle};
 use lsp_server::{ExtractError, Response, ResponseError};
 use serde::{de::DeserializeOwned, Serialize};
 use stdx::thread::ThreadIntent;
@@ -311,7 +310,7 @@ impl RequestDispatcher<'_> {
 #[derive(Debug)]
 enum HandlerCancelledError {
     PropagatedPanic,
-    Inner(ide::Cancelled),
+    Inner(salsa::Cancelled),
 }
 
 impl std::error::Error for HandlerCancelledError {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs
index 55344a4d6ac..a30e5d8ce26 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs
@@ -3,6 +3,7 @@
 
 use std::ops::{Deref, Not as _};
 
+use ide_db::base_db::salsa::Cancelled;
 use itertools::Itertools;
 use lsp_types::{
     CancelParams, DidChangeConfigurationParams, DidChangeTextDocumentParams,
@@ -305,7 +306,7 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
         let invocation_strategy_once = state.config.flycheck(None).invocation_strategy_once();
         let may_flycheck_workspace = state.config.flycheck_workspace(None);
         let mut updated = false;
-        let task = move || -> std::result::Result<(), ide::Cancelled> {
+        let task = move || -> std::result::Result<(), Cancelled> {
             if invocation_strategy_once {
                 let saved_file = vfs_path.as_path().map(|p| p.to_owned());
                 world.flycheck[0].restart_workspace(saved_file.clone());
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs
index 68b2d6b6962..170481ea701 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs
@@ -502,6 +502,7 @@ pub(crate) fn handle_document_diagnostics(
     if !snap.analysis.is_local_source_root(source_root)? {
         return Ok(empty_diagnostic_report());
     }
+    let source_root = snap.analysis.source_root_id(file_id)?;
     let config = snap.config.diagnostics(Some(source_root));
     if !config.enabled {
         return Ok(empty_diagnostic_report());
@@ -1068,7 +1069,11 @@ pub(crate) fn handle_related_tests(
 
 pub(crate) fn handle_completion(
     snap: GlobalStateSnapshot,
-    lsp_types::CompletionParams { text_document_position, context,.. }: lsp_types::CompletionParams,
+    lsp_types::CompletionParams {
+        text_document_position,
+        context,
+        ..
+    }: lsp_types::CompletionParams,
 ) -> anyhow::Result<Option<lsp_types::CompletionResponse>> {
     let _p = tracing::info_span!("handle_completion").entered();
     let mut position =
@@ -1117,7 +1122,9 @@ pub(crate) fn handle_completion_resolve(
         .into());
     }
 
-    let Some(data) = original_completion.data.take() else { return Ok(original_completion) };
+    let Some(data) = original_completion.data.take() else {
+        return Ok(original_completion);
+    };
 
     let resolve_data: lsp_ext::CompletionResolveData = serde_json::from_value(data)?;
 
@@ -1500,7 +1507,7 @@ pub(crate) fn handle_code_action_resolve(
                 "Failed to parse action id string '{}': {e}",
                 params.id
             ))
-            .into())
+            .into());
         }
     };
 
@@ -1608,7 +1615,9 @@ pub(crate) fn handle_code_lens_resolve(
     snap: GlobalStateSnapshot,
     mut code_lens: CodeLens,
 ) -> anyhow::Result<CodeLens> {
-    let Some(data) = code_lens.data.take() else { return Ok(code_lens) };
+    let Some(data) = code_lens.data.take() else {
+        return Ok(code_lens);
+    };
     let resolve = serde_json::from_value::<lsp_ext::CodeLensResolveData>(data)?;
     let Some(annotation) = from_proto::annotation(&snap, code_lens.range, resolve)? else {
         return Ok(code_lens);
@@ -1718,14 +1727,18 @@ pub(crate) fn handle_inlay_hints_resolve(
 ) -> anyhow::Result<InlayHint> {
     let _p = tracing::info_span!("handle_inlay_hints_resolve").entered();
 
-    let Some(data) = original_hint.data.take() else { return Ok(original_hint) };
+    let Some(data) = original_hint.data.take() else {
+        return Ok(original_hint);
+    };
     let resolve_data: lsp_ext::InlayHintResolveData = serde_json::from_value(data)?;
     let file_id = FileId::from_raw(resolve_data.file_id);
     if resolve_data.version != snap.file_version(file_id) {
         tracing::warn!("Inlay hint resolve data is outdated");
         return Ok(original_hint);
     }
-    let Some(hash) = resolve_data.hash.parse().ok() else { return Ok(original_hint) };
+    let Some(hash) = resolve_data.hash.parse().ok() else {
+        return Ok(original_hint);
+    };
     anyhow::ensure!(snap.file_exists(file_id), "Invalid LSP resolve data");
 
     let line_index = snap.file_line_index(file_id)?;
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
index f5d9469f262..b47a126424a 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
@@ -10,7 +10,7 @@ use std::{
 
 use always_assert::always;
 use crossbeam_channel::{select, Receiver};
-use ide_db::base_db::{SourceDatabase, SourceRootDatabase, VfsPath};
+use ide_db::base_db::{RootQueryDb, SourceDatabase, VfsPath};
 use lsp_server::{Connection, Notification, Request};
 use lsp_types::{notification::Notification as _, TextDocumentIdentifier};
 use stdx::thread::ThreadIntent;
@@ -529,7 +529,7 @@ impl GlobalState {
         let num_worker_threads = self.config.prime_caches_num_threads();
 
         self.task_pool.handle.spawn_with_sender(ThreadIntent::Worker, {
-            let analysis = self.snapshot().analysis;
+            let analysis = AssertUnwindSafe(self.snapshot().analysis);
             move |sender| {
                 sender.send(Task::PrimeCaches(PrimeCachesProgress::Begin)).unwrap();
                 let res = analysis.parallel_prime_caches(num_worker_threads, |progress| {
@@ -555,13 +555,14 @@ impl GlobalState {
                     (excluded == vfs::FileExcluded::No).then_some(file_id)
                 })
                 .filter(|&file_id| {
-                    let source_root = db.file_source_root(file_id);
+                    let source_root_id = db.file_source_root(file_id).source_root_id(db);
+                    let source_root = db.source_root(source_root_id).source_root(db);
                     // Only publish diagnostics for files in the workspace, not from crates.io deps
                     // or the sysroot.
                     // While theoretically these should never have errors, we have quite a few false
                     // positives particularly in the stdlib, and those diagnostics would stay around
                     // forever if we emitted them here.
-                    !db.source_root(source_root).is_library
+                    !source_root.is_library
                 })
                 .collect::<std::sync::Arc<_>>()
         };
@@ -642,8 +643,9 @@ impl GlobalState {
                 (excluded == vfs::FileExcluded::No).then_some(file_id)
             })
             .filter(|&file_id| {
-                let source_root = db.file_source_root(file_id);
-                !db.source_root(source_root).is_library
+                let source_root_id = db.file_source_root(file_id).source_root_id(db);
+                let source_root = db.source_root(source_root_id).source_root(db);
+                !source_root.is_library
             })
             .collect::<Vec<_>>();
         tracing::trace!("updating tests for {:?}", subscriptions);
@@ -908,12 +910,12 @@ impl GlobalState {
             }
             QueuedTask::CheckProcMacroSources(modified_rust_files) => {
                 let crate_graph = self.analysis_host.raw_database().crate_graph();
-                let snap = self.snapshot();
+                let analysis = AssertUnwindSafe(self.snapshot().analysis);
                 self.task_pool.handle.spawn_with_sender(stdx::thread::ThreadIntent::Worker, {
                     move |sender| {
                         if modified_rust_files.into_iter().any(|file_id| {
                             // FIXME: Check whether these files could be build script related
-                            match snap.analysis.crates_for(file_id) {
+                            match analysis.crates_for(file_id) {
                                 Ok(crates) => {
                                     crates.iter().any(|&krate| crate_graph[krate].is_proc_macro)
                                 }
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
index 09b0adfeb8a..1a60fa06c9e 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
@@ -18,7 +18,7 @@ use std::{iter, mem};
 use hir::{db::DefDatabase, ChangeWithProcMacros, ProcMacros, ProcMacrosBuilder};
 use ide::CrateId;
 use ide_db::{
-    base_db::{ra_salsa::Durability, CrateGraph, CrateWorkspaceData, ProcMacroPaths},
+    base_db::{salsa::Durability, CrateGraph, CrateWorkspaceData, ProcMacroPaths},
     FxHashMap,
 };
 use itertools::Itertools;
diff --git a/src/tools/rust-analyzer/crates/span/Cargo.toml b/src/tools/rust-analyzer/crates/span/Cargo.toml
index 097a056c99a..3381dac0b42 100644
--- a/src/tools/rust-analyzer/crates/span/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/span/Cargo.toml
@@ -12,7 +12,7 @@ authors.workspace = true
 
 [dependencies]
 la-arena.workspace = true
-ra-salsa = { workspace = true, optional = true }
+salsa.workspace = true
 rustc-hash.workspace = true
 hashbrown.workspace = true
 text-size.workspace = true
@@ -22,8 +22,5 @@ vfs.workspace = true
 syntax.workspace = true
 stdx.workspace = true
 
-[features]
-default = ["ra-salsa"]
-
 [lints]
 workspace = true
diff --git a/src/tools/rust-analyzer/crates/span/src/hygiene.rs b/src/tools/rust-analyzer/crates/span/src/hygiene.rs
index 6becc8e41ed..a6402e87015 100644
--- a/src/tools/rust-analyzer/crates/span/src/hygiene.rs
+++ b/src/tools/rust-analyzer/crates/span/src/hygiene.rs
@@ -21,44 +21,287 @@
 //! `ExpnData::call_site` in rustc, [`MacroCallLoc::call_site`] in rust-analyzer.
 use std::fmt;
 
-#[cfg(not(feature = "ra-salsa"))]
-use crate::InternId;
-#[cfg(feature = "ra-salsa")]
-use ra_salsa::{InternId, InternValue};
-
 use crate::{Edition, MacroCallId};
 
-/// Interned [`SyntaxContextData`].
-#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
-pub struct SyntaxContextId(InternId);
+// Recursive expansion of interned macro
+// ======================================
 
-impl fmt::Debug for SyntaxContextId {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        if f.alternate() {
-            write!(f, "{}", self.0.as_u32())
-        } else {
-            f.debug_tuple("SyntaxContextId").field(&self.0).finish()
+/// A syntax context describes a hierarchy tracking order of macro definitions.
+#[derive(Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)]
+pub struct SyntaxContext(
+    salsa::Id,
+    std::marker::PhantomData<&'static salsa::plumbing::interned::Value<SyntaxContext>>,
+);
+
+/// The underlying data interned by Salsa.
+#[derive(Clone, Eq, Debug)]
+pub struct SyntaxContextUnderlyingData {
+    pub outer_expn: Option<MacroCallId>,
+    pub outer_transparency: Transparency,
+    pub edition: Edition,
+    pub parent: SyntaxContext,
+    pub opaque: SyntaxContext,
+    pub opaque_and_semitransparent: SyntaxContext,
+}
+
+const _: () = {
+    use salsa::plumbing as zalsa_;
+    use salsa::plumbing::interned as zalsa_struct_;
+
+    impl PartialEq for SyntaxContextUnderlyingData {
+        fn eq(&self, other: &Self) -> bool {
+            self.outer_expn == other.outer_expn
+                && self.outer_transparency == other.outer_transparency
+                && self.edition == other.edition
+                && self.parent == other.parent
         }
     }
-}
 
-#[cfg(feature = "ra-salsa")]
-impl ra_salsa::InternKey for SyntaxContextId {
-    fn from_intern_id(v: ra_salsa::InternId) -> Self {
-        SyntaxContextId(v)
+    impl std::hash::Hash for SyntaxContextUnderlyingData {
+        fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
+            self.outer_expn.hash(state);
+            self.outer_transparency.hash(state);
+            self.edition.hash(state);
+            self.parent.hash(state);
+        }
+    }
+    /// Key to use during hash lookups. Each field is some type that implements `Lookup<T>`
+    /// for the owned type. This permits interning with an `&str` when a `String` is required and so forth.
+    #[derive(Hash)]
+    struct StructKey<'db, T0, T1, T2, T3>(T0, T1, T2, T3, std::marker::PhantomData<&'db ()>);
+
+    impl<'db, T0, T1, T2, T3> zalsa_::interned::HashEqLike<StructKey<'db, T0, T1, T2, T3>>
+        for SyntaxContextUnderlyingData
+    where
+        Option<MacroCallId>: zalsa_::interned::HashEqLike<T0>,
+        Transparency: zalsa_::interned::HashEqLike<T1>,
+        Edition: zalsa_::interned::HashEqLike<T2>,
+        SyntaxContext: zalsa_::interned::HashEqLike<T3>,
+    {
+        fn hash<H: std::hash::Hasher>(&self, h: &mut H) {
+            zalsa_::interned::HashEqLike::<T0>::hash(&self.outer_expn, &mut *h);
+            zalsa_::interned::HashEqLike::<T1>::hash(&self.outer_transparency, &mut *h);
+            zalsa_::interned::HashEqLike::<T2>::hash(&self.edition, &mut *h);
+            zalsa_::interned::HashEqLike::<T3>::hash(&self.parent, &mut *h);
+        }
+        fn eq(&self, data: &StructKey<'db, T0, T1, T2, T3>) -> bool {
+            zalsa_::interned::HashEqLike::<T0>::eq(&self.outer_expn, &data.0)
+                && zalsa_::interned::HashEqLike::<T1>::eq(&self.outer_transparency, &data.1)
+                && zalsa_::interned::HashEqLike::<T2>::eq(&self.edition, &data.2)
+                && zalsa_::interned::HashEqLike::<T3>::eq(&self.parent, &data.3)
+        }
+    }
+    impl zalsa_struct_::Configuration for SyntaxContext {
+        const DEBUG_NAME: &'static str = "SyntaxContextData";
+        type Fields<'a> = SyntaxContextUnderlyingData;
+        type Struct<'a> = SyntaxContext;
+        fn struct_from_id<'db>(id: salsa::Id) -> Self::Struct<'db> {
+            SyntaxContext(id, std::marker::PhantomData)
+        }
+        fn deref_struct(s: Self::Struct<'_>) -> salsa::Id {
+            s.0
+        }
     }
-    fn as_intern_id(&self) -> ra_salsa::InternId {
-        self.0
+    impl SyntaxContext {
+        pub fn ingredient<Db>(db: &Db) -> &zalsa_struct_::IngredientImpl<Self>
+        where
+            Db: ?Sized + zalsa_::Database,
+        {
+            static CACHE: zalsa_::IngredientCache<zalsa_struct_::IngredientImpl<SyntaxContext>> =
+                zalsa_::IngredientCache::new();
+            CACHE.get_or_create(db.as_dyn_database(), || {
+                db.zalsa().add_or_lookup_jar_by_type::<zalsa_struct_::JarImpl<SyntaxContext>>()
+            })
+        }
     }
-}
+    impl zalsa_::AsId for SyntaxContext {
+        fn as_id(&self) -> salsa::Id {
+            self.0
+        }
+    }
+    impl zalsa_::FromId for SyntaxContext {
+        fn from_id(id: salsa::Id) -> Self {
+            Self(id, std::marker::PhantomData)
+        }
+    }
+    unsafe impl Send for SyntaxContext {}
 
-impl fmt::Display for SyntaxContextId {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(f, "{}", self.0.as_u32())
+    unsafe impl Sync for SyntaxContext {}
+
+    impl zalsa_::SalsaStructInDb for SyntaxContext {
+        type MemoIngredientMap = salsa::plumbing::MemoIngredientSingletonIndex;
+
+        fn lookup_or_create_ingredient_index(
+            aux: &salsa::plumbing::Zalsa,
+        ) -> salsa::plumbing::IngredientIndices {
+            aux.add_or_lookup_jar_by_type::<zalsa_struct_::JarImpl<SyntaxContext>>().into()
+        }
+
+        #[inline]
+        fn cast(id: salsa::Id, type_id: std::any::TypeId) -> Option<Self> {
+            if type_id == std::any::TypeId::of::<SyntaxContext>() {
+                Some(<Self as salsa::plumbing::FromId>::from_id(id))
+            } else {
+                None
+            }
+        }
+    }
+
+    unsafe impl salsa::plumbing::Update for SyntaxContext {
+        unsafe fn maybe_update(old_pointer: *mut Self, new_value: Self) -> bool {
+            if unsafe { *old_pointer } != new_value {
+                unsafe { *old_pointer = new_value };
+                true
+            } else {
+                false
+            }
+        }
+    }
+    impl<'db> SyntaxContext {
+        pub fn new<
+            Db,
+            T0: zalsa_::interned::Lookup<Option<MacroCallId>> + std::hash::Hash,
+            T1: zalsa_::interned::Lookup<Transparency> + std::hash::Hash,
+            T2: zalsa_::interned::Lookup<Edition> + std::hash::Hash,
+            T3: zalsa_::interned::Lookup<SyntaxContext> + std::hash::Hash,
+        >(
+            db: &'db Db,
+            outer_expn: T0,
+            outer_transparency: T1,
+            edition: T2,
+            parent: T3,
+            opaque: impl FnOnce(SyntaxContext) -> SyntaxContext,
+            opaque_and_semitransparent: impl FnOnce(SyntaxContext) -> SyntaxContext,
+        ) -> Self
+        where
+            Db: ?Sized + salsa::Database,
+            Option<MacroCallId>: zalsa_::interned::HashEqLike<T0>,
+            Transparency: zalsa_::interned::HashEqLike<T1>,
+            Edition: zalsa_::interned::HashEqLike<T2>,
+            SyntaxContext: zalsa_::interned::HashEqLike<T3>,
+        {
+            SyntaxContext::ingredient(db).intern(
+                db.as_dyn_database(),
+                StructKey::<'db>(
+                    outer_expn,
+                    outer_transparency,
+                    edition,
+                    parent,
+                    std::marker::PhantomData,
+                ),
+                |id, data| SyntaxContextUnderlyingData {
+                    outer_expn: zalsa_::interned::Lookup::into_owned(data.0),
+                    outer_transparency: zalsa_::interned::Lookup::into_owned(data.1),
+                    edition: zalsa_::interned::Lookup::into_owned(data.2),
+                    parent: zalsa_::interned::Lookup::into_owned(data.3),
+                    opaque: opaque(zalsa_::FromId::from_id(id)),
+                    opaque_and_semitransparent: opaque_and_semitransparent(
+                        zalsa_::FromId::from_id(id),
+                    ),
+                },
+            )
+        }
+
+        /// Invariant: Only [`SyntaxContext::ROOT`] has a [`None`] outer expansion.
+        // FIXME: The None case needs to encode the context crate id. We can encode that as the MSB of
+        // MacroCallId is reserved anyways so we can do bit tagging here just fine.
+        // The bigger issue is that this will cause interning to now create completely separate chains
+        // per crate. Though that is likely not a problem as `MacroCallId`s are already crate calling dependent.
+        pub fn outer_expn<Db>(self, db: &'db Db) -> Option<MacroCallId>
+        where
+            Db: ?Sized + zalsa_::Database,
+        {
+            if self.is_root() {
+                return None;
+            }
+            let fields = SyntaxContext::ingredient(db).fields(db.as_dyn_database(), self);
+            std::clone::Clone::clone(&fields.outer_expn)
+        }
+
+        pub fn outer_transparency<Db>(self, db: &'db Db) -> Transparency
+        where
+            Db: ?Sized + zalsa_::Database,
+        {
+            if self.is_root() {
+                return Transparency::Opaque;
+            }
+            let fields = SyntaxContext::ingredient(db).fields(db.as_dyn_database(), self);
+            std::clone::Clone::clone(&fields.outer_transparency)
+        }
+
+        pub fn edition<Db>(self, db: &'db Db) -> Edition
+        where
+            Db: ?Sized + zalsa_::Database,
+        {
+            if self.is_root() {
+                return Edition::from_u32(SyntaxContext::MAX_ID - self.0.as_u32());
+            }
+            let fields = SyntaxContext::ingredient(db).fields(db.as_dyn_database(), self);
+            std::clone::Clone::clone(&fields.edition)
+        }
+
+        pub fn parent<Db>(self, db: &'db Db) -> SyntaxContext
+        where
+            Db: ?Sized + zalsa_::Database,
+        {
+            if self.is_root() {
+                return self;
+            }
+            let fields = SyntaxContext::ingredient(db).fields(db.as_dyn_database(), self);
+            std::clone::Clone::clone(&fields.parent)
+        }
+
+        /// This context, but with all transparent and semi-transparent expansions filtered away.
+        pub fn opaque<Db>(self, db: &'db Db) -> SyntaxContext
+        where
+            Db: ?Sized + zalsa_::Database,
+        {
+            if self.is_root() {
+                return self;
+            }
+            let fields = SyntaxContext::ingredient(db).fields(db.as_dyn_database(), self);
+            std::clone::Clone::clone(&fields.opaque)
+        }
+
+        /// This context, but with all transparent expansions filtered away.
+        pub fn opaque_and_semitransparent<Db>(self, db: &'db Db) -> SyntaxContext
+        where
+            Db: ?Sized + zalsa_::Database,
+        {
+            if self.is_root() {
+                return self;
+            }
+            let fields = SyntaxContext::ingredient(db).fields(db.as_dyn_database(), self);
+            std::clone::Clone::clone(&fields.opaque_and_semitransparent)
+        }
+
+        pub fn default_debug_fmt(this: Self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+            salsa::with_attached_database(|db| {
+                let fields = SyntaxContext::ingredient(db).fields(db.as_dyn_database(), this);
+                let mut f = f.debug_struct("SyntaxContextData");
+                let f = f.field("outer_expn", &fields.outer_expn);
+                let f = f.field("outer_transparency", &fields.outer_expn);
+                let f = f.field("edition", &fields.edition);
+                let f = f.field("parent", &fields.parent);
+                let f = f.field("opaque", &fields.opaque);
+                let f = f.field("opaque_and_semitransparent", &fields.opaque_and_semitransparent);
+                f.finish()
+            })
+            .unwrap_or_else(|| {
+                f.debug_tuple("SyntaxContextData").field(&zalsa_::AsId::as_id(&this)).finish()
+            })
+        }
+    }
+};
+
+impl SyntaxContext {
+    const MAX_ID: u32 = salsa::Id::MAX_U32 - 1;
+
+    pub fn is_root(self) -> bool {
+        (SyntaxContext::MAX_ID - Edition::LATEST as u32) <= self.into_u32()
+            && self.into_u32() <= (SyntaxContext::MAX_ID - Edition::Edition2015 as u32)
     }
-}
 
-impl SyntaxContextId {
     #[inline]
     pub fn remove_root_edition(&mut self) {
         if self.is_root() {
@@ -68,75 +311,19 @@ impl SyntaxContextId {
 
     /// The root context, which is the parent of all other contexts. All [`FileId`]s have this context.
     pub const fn root(edition: Edition) -> Self {
-        SyntaxContextId(unsafe { InternId::new_unchecked(edition as u32) })
+        let edition = edition as u32;
+        SyntaxContext(
+            salsa::Id::from_u32(SyntaxContext::MAX_ID - edition),
+            std::marker::PhantomData,
+        )
     }
 
-    pub fn is_root(self) -> bool {
-        self.into_u32() <= Edition::LATEST as u32
-    }
-
-    /// Deconstruct a `SyntaxContextId` into a raw `u32`.
-    /// This should only be used for deserialization purposes for the proc-macro server.
     pub fn into_u32(self) -> u32 {
         self.0.as_u32()
     }
 
-    /// Constructs a `SyntaxContextId` from a raw `u32`.
-    /// This should only be used for serialization purposes for the proc-macro server.
     pub fn from_u32(u32: u32) -> Self {
-        Self(InternId::from(u32))
-    }
-}
-
-/// A syntax context describes a hierarchy tracking order of macro definitions.
-#[derive(Copy, Clone, Hash, PartialEq, Eq)]
-pub struct SyntaxContextData {
-    /// Invariant: Only [`SyntaxContextId::ROOT`] has a [`None`] outer expansion.
-    // FIXME: The None case needs to encode the context crate id. We can encode that as the MSB of
-    // MacroCallId is reserved anyways so we can do bit tagging here just fine.
-    // The bigger issue is that this will cause interning to now create completely separate chains
-    // per crate. Though that is likely not a problem as `MacroCallId`s are already crate calling dependent.
-    pub outer_expn: Option<MacroCallId>,
-    pub outer_transparency: Transparency,
-    pub edition: Edition,
-    pub parent: SyntaxContextId,
-    /// This context, but with all transparent and semi-transparent expansions filtered away.
-    pub opaque: SyntaxContextId,
-    /// This context, but with all transparent expansions filtered away.
-    pub opaque_and_semitransparent: SyntaxContextId,
-}
-
-#[cfg(feature = "ra-salsa")]
-impl InternValue for SyntaxContextData {
-    type Key = (SyntaxContextId, Option<MacroCallId>, Transparency, Edition);
-
-    fn into_key(&self) -> Self::Key {
-        (self.parent, self.outer_expn, self.outer_transparency, self.edition)
-    }
-}
-
-impl std::fmt::Debug for SyntaxContextData {
-    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-        f.debug_struct("SyntaxContextData")
-            .field("outer_expn", &self.outer_expn)
-            .field("outer_transparency", &self.outer_transparency)
-            .field("parent", &self.parent)
-            .field("opaque", &self.opaque)
-            .field("opaque_and_semitransparent", &self.opaque_and_semitransparent)
-            .finish()
-    }
-}
-
-impl SyntaxContextData {
-    pub fn root(edition: Edition) -> Self {
-        SyntaxContextData {
-            outer_expn: None,
-            outer_transparency: Transparency::Opaque,
-            parent: SyntaxContextId::root(edition),
-            opaque: SyntaxContextId::root(edition),
-            opaque_and_semitransparent: SyntaxContextId::root(edition),
-            edition,
-        }
+        Self(salsa::Id::from_u32(u32), std::marker::PhantomData)
     }
 }
 
@@ -167,3 +354,19 @@ impl Transparency {
         matches!(self, Self::Opaque)
     }
 }
+
+impl fmt::Display for SyntaxContext {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        write!(f, "{}", self.0.as_u32())
+    }
+}
+
+impl std::fmt::Debug for SyntaxContext {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        if f.alternate() {
+            write!(f, "{}", self.0.as_u32())
+        } else {
+            f.debug_tuple("SyntaxContext").field(&self.0).finish()
+        }
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/span/src/lib.rs b/src/tools/rust-analyzer/crates/span/src/lib.rs
index 8dc95735038..7abdacee2b9 100644
--- a/src/tools/rust-analyzer/crates/span/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/span/src/lib.rs
@@ -1,19 +1,19 @@
 //! File and span related types.
 use std::fmt::{self, Write};
 
-#[cfg(feature = "ra-salsa")]
-use ra_salsa::InternId;
-
 mod ast_id;
 mod hygiene;
 mod map;
 
 pub use self::{
     ast_id::{AstIdMap, AstIdNode, ErasedFileAstId, FileAstId},
-    hygiene::{SyntaxContextData, SyntaxContextId, Transparency},
+    hygiene::{SyntaxContext, Transparency},
     map::{RealSpanMap, SpanMap},
 };
 
+// Remove this
+pub use self::hygiene::{SyntaxContext as SyntaxContextId, SyntaxContextUnderlyingData};
+
 pub use syntax::Edition;
 pub use text_size::{TextRange, TextSize};
 pub use vfs::FileId;
@@ -202,11 +202,23 @@ impl EditionedFileId {
 /// `MacroCallId`.
 // FIXME: Give this a better fitting name
 #[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
-pub struct HirFileId(u32);
+pub struct HirFileId(salsa::Id);
+
+impl salsa::plumbing::AsId for HirFileId {
+    fn as_id(&self) -> salsa::Id {
+        self.0
+    }
+}
+
+impl salsa::plumbing::FromId for HirFileId {
+    fn from_id(id: salsa::Id) -> Self {
+        HirFileId(id)
+    }
+}
 
 impl From<HirFileId> for u32 {
     fn from(value: HirFileId) -> Self {
-        value.0
+        value.0.as_u32()
     }
 }
 
@@ -262,18 +274,20 @@ pub struct MacroFileId {
 /// `MacroCallId` identifies a particular macro invocation, like
 /// `println!("Hello, {}", world)`.
 #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
-pub struct MacroCallId(InternId);
+pub struct MacroCallId(salsa::Id);
 
-#[cfg(feature = "ra-salsa")]
-impl ra_salsa::InternKey for MacroCallId {
-    fn from_intern_id(v: ra_salsa::InternId) -> Self {
-        MacroCallId(v)
-    }
-    fn as_intern_id(&self) -> ra_salsa::InternId {
+impl salsa::plumbing::AsId for MacroCallId {
+    fn as_id(&self) -> salsa::Id {
         self.0
     }
 }
 
+impl salsa::plumbing::FromId for MacroCallId {
+    fn from_id(id: salsa::Id) -> Self {
+        MacroCallId(id)
+    }
+}
+
 impl MacroCallId {
     pub const MAX_ID: u32 = 0x7fff_ffff;
 
@@ -307,16 +321,16 @@ impl From<EditionedFileId> for HirFileId {
     #[allow(clippy::let_unit_value)]
     fn from(id: EditionedFileId) -> Self {
         assert!(id.as_u32() <= Self::MAX_HIR_FILE_ID, "FileId index {} is too large", id.as_u32());
-        HirFileId(id.as_u32())
+        HirFileId(salsa::Id::from_u32(id.0))
     }
 }
 
 impl From<MacroFileId> for HirFileId {
     #[allow(clippy::let_unit_value)]
     fn from(MacroFileId { macro_call_id: MacroCallId(id) }: MacroFileId) -> Self {
-        let id = id.as_u32();
+        let id: u32 = id.as_u32();
         assert!(id <= Self::MAX_HIR_FILE_ID, "MacroCallId index {id} is too large");
-        HirFileId(id | Self::MACRO_FILE_TAG_MASK)
+        HirFileId(salsa::Id::from_u32(id | Self::MACRO_FILE_TAG_MASK))
     }
 }
 
@@ -326,33 +340,37 @@ impl HirFileId {
 
     #[inline]
     pub fn is_macro(self) -> bool {
-        self.0 & Self::MACRO_FILE_TAG_MASK != 0
+        self.0.as_u32() & Self::MACRO_FILE_TAG_MASK != 0
     }
 
     #[inline]
     pub fn macro_file(self) -> Option<MacroFileId> {
-        match self.0 & Self::MACRO_FILE_TAG_MASK {
+        match self.0.as_u32() & Self::MACRO_FILE_TAG_MASK {
             0 => None,
             _ => Some(MacroFileId {
-                macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
+                macro_call_id: MacroCallId(salsa::Id::from_u32(
+                    self.0.as_u32() ^ Self::MACRO_FILE_TAG_MASK,
+                )),
             }),
         }
     }
 
     #[inline]
     pub fn file_id(self) -> Option<EditionedFileId> {
-        match self.0 & Self::MACRO_FILE_TAG_MASK {
-            0 => Some(EditionedFileId(self.0)),
+        match self.0.as_u32() & Self::MACRO_FILE_TAG_MASK {
+            0 => Some(EditionedFileId(self.0.as_u32())),
             _ => None,
         }
     }
 
     #[inline]
     pub fn repr(self) -> HirFileIdRepr {
-        match self.0 & Self::MACRO_FILE_TAG_MASK {
-            0 => HirFileIdRepr::FileId(EditionedFileId(self.0)),
+        match self.0.as_u32() & Self::MACRO_FILE_TAG_MASK {
+            0 => HirFileIdRepr::FileId(EditionedFileId(self.0.as_u32())),
             _ => HirFileIdRepr::MacroFile(MacroFileId {
-                macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
+                macro_call_id: MacroCallId(salsa::Id::from_u32(
+                    self.0.as_u32() ^ Self::MACRO_FILE_TAG_MASK,
+                )),
             }),
         }
     }
@@ -369,72 +387,3 @@ impl std::fmt::Debug for TokenId {
         self.0.fmt(f)
     }
 }
-
-#[cfg(not(feature = "ra-salsa"))]
-mod intern_id_proxy {
-    use std::fmt;
-    use std::num::NonZeroU32;
-
-    #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
-    pub(super) struct InternId {
-        value: NonZeroU32,
-    }
-
-    impl InternId {
-        pub(super) const MAX: u32 = 0xFFFF_FF00;
-
-        pub(super) const unsafe fn new_unchecked(value: u32) -> Self {
-            debug_assert!(value < InternId::MAX);
-            let value = unsafe { NonZeroU32::new_unchecked(value + 1) };
-            InternId { value }
-        }
-
-        pub(super) fn as_u32(self) -> u32 {
-            self.value.get() - 1
-        }
-
-        pub(super) fn as_usize(self) -> usize {
-            self.as_u32() as usize
-        }
-    }
-
-    impl From<InternId> for u32 {
-        fn from(raw: InternId) -> u32 {
-            raw.as_u32()
-        }
-    }
-
-    impl From<InternId> for usize {
-        fn from(raw: InternId) -> usize {
-            raw.as_usize()
-        }
-    }
-
-    impl From<u32> for InternId {
-        fn from(id: u32) -> InternId {
-            assert!(id < InternId::MAX);
-            unsafe { InternId::new_unchecked(id) }
-        }
-    }
-
-    impl From<usize> for InternId {
-        fn from(id: usize) -> InternId {
-            assert!(id < (InternId::MAX as usize));
-            unsafe { InternId::new_unchecked(id as u32) }
-        }
-    }
-
-    impl fmt::Debug for InternId {
-        fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-            self.as_usize().fmt(f)
-        }
-    }
-
-    impl fmt::Display for InternId {
-        fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-            self.as_usize().fmt(f)
-        }
-    }
-}
-#[cfg(not(feature = "ra-salsa"))]
-use intern_id_proxy::InternId;
diff --git a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs
index 37dfb87721c..4b5c2ff4cf7 100644
--- a/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/test-fixture/src/lib.rs
@@ -3,7 +3,8 @@ use std::{iter, mem, str::FromStr, sync};
 
 use base_db::{
     CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, CrateWorkspaceData, Dependency,
-    Env, FileChange, FileSet, LangCrateOrigin, SourceRoot, SourceRootDatabase, Version, VfsPath,
+    Env, FileChange, FileSet, LangCrateOrigin, RootQueryDb, SourceDatabase, SourceRoot, Version,
+    VfsPath,
 };
 use cfg::CfgOptions;
 use hir_expand::{
@@ -28,7 +29,7 @@ use test_utils::{
 
 pub const WORKSPACE: base_db::SourceRootId = base_db::SourceRootId(0);
 
-pub trait WithFixture: Default + ExpandDatabase + SourceRootDatabase + 'static {
+pub trait WithFixture: Default + ExpandDatabase + SourceDatabase + 'static {
     #[track_caller]
     fn with_single_file(
         #[rust_analyzer::rust_fixture] ra_fixture: &str,
@@ -101,7 +102,7 @@ pub trait WithFixture: Default + ExpandDatabase + SourceRootDatabase + 'static {
     }
 
     fn test_crate(&self) -> CrateId {
-        let crate_graph = self.crate_graph();
+        let crate_graph = RootQueryDb::crate_graph(self);
         let mut it = crate_graph.iter();
         let mut res = it.next().unwrap();
         while crate_graph[res].origin.is_lang() {
@@ -111,7 +112,7 @@ pub trait WithFixture: Default + ExpandDatabase + SourceRootDatabase + 'static {
     }
 }
 
-impl<DB: ExpandDatabase + SourceRootDatabase + Default + 'static> WithFixture for DB {}
+impl<DB: ExpandDatabase + SourceDatabase + Default + 'static> WithFixture for DB {}
 
 pub struct ChangeFixture {
     pub file_position: Option<(EditionedFileId, RangeOrOffset)>,
diff --git a/src/tools/rust-analyzer/xtask/src/tidy.rs b/src/tools/rust-analyzer/xtask/src/tidy.rs
index 04952de7318..0e4208e0967 100644
--- a/src/tools/rust-analyzer/xtask/src/tidy.rs
+++ b/src/tools/rust-analyzer/xtask/src/tidy.rs
@@ -127,7 +127,7 @@ fn check_cargo_toml(path: &Path, text: String) {
 
 fn check_licenses(sh: &Shell) {
     let expected = "
-(MIT OR Apache-2.0) AND Unicode-DFS-2016
+(MIT OR Apache-2.0) AND Unicode-3.0
 0BSD OR MIT OR Apache-2.0
 Apache-2.0
 Apache-2.0 OR BSL-1.0
@@ -141,14 +141,13 @@ ISC
 MIT
 MIT / Apache-2.0
 MIT OR Apache-2.0
-MIT OR Apache-2.0 OR Zlib
 MIT OR Zlib OR Apache-2.0
 MIT/Apache-2.0
 MPL-2.0
+Unicode-3.0
 Unlicense OR MIT
 Unlicense/MIT
 Zlib
-Zlib OR Apache-2.0 OR MIT
 "
     .lines()
     .filter(|it| !it.is_empty())