about summary refs log tree commit diff
diff options
context:
space:
mode:
authorLaurențiu Nicola <lnicola@dend.ro>2023-02-13 13:55:14 +0200
committerLaurențiu Nicola <lnicola@dend.ro>2023-02-13 13:55:14 +0200
commitc4a2f065854f85fcee397d0b9e8e535e6770e1da (patch)
tree8c1bd3d231f9cc21a955d7a2a31551a279fd49fa
parent20081880ad2a98bbc8c8293f96c5b284d1584d86 (diff)
parentc97aae38f20f64daede9877212aff83c259a4faa (diff)
downloadrust-c4a2f065854f85fcee397d0b9e8e535e6770e1da.tar.gz
rust-c4a2f065854f85fcee397d0b9e8e535e6770e1da.zip
:arrow_up: rust-analyzer
-rw-r--r--src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/blank_issue.md10
-rw-r--r--src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/bug_report.md4
-rw-r--r--src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/feature_request.md8
-rw-r--r--src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/question.md8
-rw-r--r--src/tools/rust-analyzer/.github/workflows/autopublish.yaml (renamed from src/tools/rust-analyzer/.github/workflows/publish.yml)4
-rw-r--r--src/tools/rust-analyzer/.github/workflows/fuzz.yml43
-rw-r--r--src/tools/rust-analyzer/.github/workflows/publish-libs.yaml35
-rw-r--r--src/tools/rust-analyzer/.github/workflows/release.yaml7
-rw-r--r--src/tools/rust-analyzer/Cargo.lock424
-rw-r--r--src/tools/rust-analyzer/Cargo.toml42
-rw-r--r--src/tools/rust-analyzer/crates/base-db/Cargo.toml23
-rw-r--r--src/tools/rust-analyzer/crates/base-db/src/fixture.rs86
-rw-r--r--src/tools/rust-analyzer/crates/base-db/src/input.rs36
-rw-r--r--src/tools/rust-analyzer/crates/base-db/src/lib.rs1
-rw-r--r--src/tools/rust-analyzer/crates/cfg/Cargo.toml21
-rw-r--r--src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs4
-rw-r--r--src/tools/rust-analyzer/crates/flycheck/Cargo.toml17
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/Cargo.toml35
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/adt.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/attr.rs360
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body.rs8
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/data.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/db.rs8
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/expr.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/find_path.rs8
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/generics.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs20
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs24
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs1
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/keys.rs3
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs298
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/layout.rs1
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/lib.rs49
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests.rs12
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs35
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs7
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs3
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs52
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs11
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs8
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/path.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs3
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/pretty.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/resolver.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/type_ref.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/Cargo.toml26
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs349
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs10
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs66
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/db.rs25
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/eager.rs6
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs18
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/lib.rs38
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/name.rs18
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs24
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/quote.rs112
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/Cargo.toml31
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs9
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/builder.rs17
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs83
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs36
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/db.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs41
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/display.rs230
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer.rs122
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs15
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs378
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs25
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/interner.rs7
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/lang_items.rs11
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/layout.rs51
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/layout/target.rs40
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs115
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/lib.rs81
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/lower.rs141
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs112
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs16
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs83
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs337
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/traits.rs18
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/utils.rs185
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/walk.rs147
-rw-r--r--src/tools/rust-analyzer/crates/hir/Cargo.toml29
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/display.rs7
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/lib.rs186
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics.rs16
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs3
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs48
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/Cargo.toml27
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_braces.rs155
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs92
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs201
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_match_to_let_else.rs87
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs312
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs38
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs901
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_macro.rs26
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs44
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs23
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_fields.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_arith_op.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs183
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_match_arm.rs3
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs162
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/lib.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs40
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/utils.rs38
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs36
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/Cargo.toml28
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs3
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs151
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/config.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs15
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/render.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs51
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/proc_macros.rs114
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs8
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/Cargo.toml35
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/defs.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/line_index.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs24
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/rename.rs76
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/search.rs115
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs8
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml28
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs122
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs230
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml25
-rw-r--r--src/tools/rust-analyzer/crates/ide/Cargo.toml39
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/file_structure.rs29
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs1
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_definition.rs64
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/highlight_related.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover.rs166
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover/render.rs163
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover/tests.rs213
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs191
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs135
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs137
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints/binding_mode.rs26
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs323
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs14
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs7
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs108
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints/fn_lifetime_fn.rs11
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs11
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/join_lines.rs6
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/lib.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/markdown_remove.rs140
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/references.rs380
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/rename.rs199
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/signature_help.rs203
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/static_index.rs7
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs9
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/typing.rs57
-rw-r--r--src/tools/rust-analyzer/crates/intern/Cargo.toml20
-rw-r--r--src/tools/rust-analyzer/crates/intern/src/lib.rs (renamed from src/tools/rust-analyzer/crates/hir-def/src/intern.rs)19
-rw-r--r--src/tools/rust-analyzer/crates/limit/Cargo.toml8
-rw-r--r--src/tools/rust-analyzer/crates/mbe/Cargo.toml19
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/benchmark.rs30
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/expander.rs7
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs41
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs74
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/lib.rs55
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/parser.rs22
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs166
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/syntax_bridge/tests.rs4
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs36
-rw-r--r--src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs27
-rw-r--r--src/tools/rust-analyzer/crates/parser/Cargo.toml16
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/event.rs13
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs98
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs8
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/lexed_str.rs5
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/lib.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/output.rs61
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/parser.rs29
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/shortcuts.rs53
-rw-r--r--src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs4
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rast33
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rast43
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rast35
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rs2
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0205_const_closure.rast42
-rw-r--r--src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0205_const_closure.rs1
-rw-r--r--src/tools/rust-analyzer/crates/paths/Cargo.toml8
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml23
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs17
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs23
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs43
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs54
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml10
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml20
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/mod.rs104
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/buffer.rs143
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs485
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/closure.rs24
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/handle.rs70
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/mod.rs429
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/rpc.rs305
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/scoped_cell.rs81
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/server.rs352
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/diagnostic.rs166
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs1056
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/quote.rs140
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs822
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/mod.rs1
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs24
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs119
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs2
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs65
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/token_stream.rs28
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/mod.rs11
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/cli.rs3
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs26
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-test/Cargo.toml15
-rw-r--r--src/tools/rust-analyzer/crates/proc-macro-test/imp/Cargo.toml1
-rw-r--r--src/tools/rust-analyzer/crates/profile/Cargo.toml10
-rw-r--r--src/tools/rust-analyzer/crates/profile/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/project-model/Cargo.toml21
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs1
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/project_json.rs44
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/sysroot.rs34
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/target_data_layout.rs13
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/tests.rs128
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/workspace.rs230
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml56
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs37
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/caps.rs8
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs22
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs4
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/load_cargo.rs33
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs3
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs7
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs6
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs62
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs42
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs2
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs82
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/handlers.rs66
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs8
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs14
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_utils.rs50
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs60
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs41
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs211
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs1
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/tidy.rs27
-rw-r--r--src/tools/rust-analyzer/crates/sourcegen/Cargo.toml8
-rw-r--r--src/tools/rust-analyzer/crates/stdx/Cargo.toml8
-rw-r--r--src/tools/rust-analyzer/crates/stdx/src/lib.rs14
-rw-r--r--src/tools/rust-analyzer/crates/stdx/src/macros.rs9
-rw-r--r--src/tools/rust-analyzer/crates/syntax/Cargo.toml26
-rw-r--r--src/tools/rust-analyzer/crates/syntax/fuzz/Cargo.toml7
-rw-r--r--src/tools/rust-analyzer/crates/syntax/rust.ungram2
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast.rs30
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs1
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/make.rs21
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/syntax_node.rs1
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/utils.rs7
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/Cargo.toml12
-rw-r--r--src/tools/rust-analyzer/crates/test-utils/src/minicore.rs44
-rw-r--r--src/tools/rust-analyzer/crates/text-edit/Cargo.toml8
-rw-r--r--src/tools/rust-analyzer/crates/toolchain/Cargo.toml8
-rw-r--r--src/tools/rust-analyzer/crates/tt/Cargo.toml10
-rw-r--r--src/tools/rust-analyzer/crates/tt/src/buffer.rs105
-rw-r--r--src/tools/rust-analyzer/crates/tt/src/lib.rs228
-rw-r--r--src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml12
-rw-r--r--src/tools/rust-analyzer/crates/vfs/Cargo.toml12
-rw-r--r--src/tools/rust-analyzer/crates/vfs/src/lib.rs3
-rw-r--r--src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs17
-rw-r--r--src/tools/rust-analyzer/docs/dev/lsp-extensions.md2
-rw-r--r--src/tools/rust-analyzer/docs/user/generated_config.adoc18
-rw-r--r--src/tools/rust-analyzer/docs/user/manual.adoc119
-rw-r--r--src/tools/rust-analyzer/editors/code/language-configuration.json4
-rw-r--r--src/tools/rust-analyzer/editors/code/package-lock.json1246
-rw-r--r--src/tools/rust-analyzer/editors/code/package.json28
-rw-r--r--src/tools/rust-analyzer/editors/code/src/bootstrap.ts96
-rw-r--r--src/tools/rust-analyzer/editors/code/src/client.ts60
-rw-r--r--src/tools/rust-analyzer/editors/code/src/commands.ts4
-rw-r--r--src/tools/rust-analyzer/editors/code/src/config.ts184
-rw-r--r--src/tools/rust-analyzer/editors/code/src/ctx.ts17
-rw-r--r--src/tools/rust-analyzer/editors/code/src/debug.ts2
-rw-r--r--src/tools/rust-analyzer/editors/code/src/lsp_ext.ts240
-rw-r--r--src/tools/rust-analyzer/editors/code/src/util.ts48
-rw-r--r--src/tools/rust-analyzer/lib/lsp-server/src/stdio.rs5
-rw-r--r--src/tools/rust-analyzer/xtask/Cargo.toml1
-rw-r--r--src/tools/rust-analyzer/xtask/src/dist.rs9
-rw-r--r--src/tools/rust-analyzer/xtask/src/publish.rs2
-rw-r--r--src/tools/rust-analyzer/xtask/src/release/changelog.rs4
321 files changed, 11188 insertions, 9698 deletions
diff --git a/src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/blank_issue.md b/src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/blank_issue.md
deleted file mode 100644
index a08ad07cbf8..00000000000
--- a/src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/blank_issue.md
+++ /dev/null
@@ -1,10 +0,0 @@
----
-name: Blank Issue
-about: Create a blank issue.
-title: ''
-labels: ''
-assignees: ''
-
----
-
-
diff --git a/src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/bug_report.md b/src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/bug_report.md
index c2e21933c9a..5faee21bdb6 100644
--- a/src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/bug_report.md
+++ b/src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/bug_report.md
@@ -2,7 +2,7 @@
 name: Bug report
 about: Create a bug report for rust-analyzer.
 title: ''
-labels: ''
+labels: 'C-bug'
 assignees: ''
 
 ---
@@ -22,4 +22,4 @@ Otherwise please try to provide information which will help us to fix the issue
 
 **rustc version**: (eg. output of `rustc -V`)
 
-**relevant settings**: (eg. client settings, or environment variables like `CARGO`, `RUSTUP_HOME` or `CARGO_HOME`)
+**relevant settings**: (eg. client settings, or environment variables like `CARGO`, `RUSTC`, `RUSTUP_HOME` or `CARGO_HOME`)
diff --git a/src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/feature_request.md b/src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/feature_request.md
new file mode 100644
index 00000000000..5207957c459
--- /dev/null
+++ b/src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/feature_request.md
@@ -0,0 +1,8 @@
+---
+name: Feature Request
+about: Create a feature request for rust-analyzer.
+title: ''
+labels: 'C-feature'
+assignees: ''
+
+---
diff --git a/src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/question.md b/src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/question.md
new file mode 100644
index 00000000000..a90ade882bd
--- /dev/null
+++ b/src/tools/rust-analyzer/.github/ISSUE_TEMPLATE/question.md
@@ -0,0 +1,8 @@
+---
+name: Support Question
+about: A question regarding functionality of rust-analyzer.
+title: ''
+labels: 'C-support'
+assignees: ''
+
+---
diff --git a/src/tools/rust-analyzer/.github/workflows/publish.yml b/src/tools/rust-analyzer/.github/workflows/autopublish.yaml
index 73e62ab32c6..279f86b458d 100644
--- a/src/tools/rust-analyzer/.github/workflows/publish.yml
+++ b/src/tools/rust-analyzer/.github/workflows/autopublish.yaml
@@ -1,4 +1,4 @@
-name: publish
+name: autopublish
 on:
   workflow_dispatch: # We can add version input when 1.0 is released and scheduled releases are removed
 
@@ -25,7 +25,7 @@ jobs:
       - name: Install cargo-workspaces
         run: cargo install cargo-workspaces
 
-      - name: Release
+      - name: Publish Crates
         env:
           CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }}
           PATCH: ${{ github.run_number }}
diff --git a/src/tools/rust-analyzer/.github/workflows/fuzz.yml b/src/tools/rust-analyzer/.github/workflows/fuzz.yml
new file mode 100644
index 00000000000..5af8aa1f77a
--- /dev/null
+++ b/src/tools/rust-analyzer/.github/workflows/fuzz.yml
@@ -0,0 +1,43 @@
+name: Fuzz
+on:
+  schedule:
+    # Once a week
+    - cron: '0 0 * * 0'
+  push:
+    paths:
+    - '.github/workflows/fuzz.yml'
+  # Allow manual trigger
+  workflow_dispatch:
+
+env:
+  CARGO_INCREMENTAL: 0
+  CARGO_NET_RETRY: 10
+  CI: 1
+  RUST_BACKTRACE: short
+  RUSTFLAGS: "-D warnings -W unreachable-pub -W bare-trait-objects"
+  RUSTUP_MAX_RETRIES: 10
+
+jobs:
+  rust:
+    if: ${{ github.repository == 'rust-lang/rust-analyzer' || github.event.action == 'workflow_dispatch' }}
+    name: Rust
+    runs-on: ubuntu-latest
+    env:
+      CC: deny_c
+
+    steps:
+      - name: Checkout repository
+        uses: actions/checkout@v3
+        with:
+          ref: ${{ github.event.pull_request.head.sha }}
+          fetch-depth: 1
+
+      - name: Install Rust toolchain
+        run: |
+          rustup install --profile minimal nightly
+
+      - name: Build fuzzers
+        run: |
+          cargo install cargo-fuzz
+          cd crates/syntax
+          cargo +nightly fuzz build
diff --git a/src/tools/rust-analyzer/.github/workflows/publish-libs.yaml b/src/tools/rust-analyzer/.github/workflows/publish-libs.yaml
new file mode 100644
index 00000000000..1b843fff1a4
--- /dev/null
+++ b/src/tools/rust-analyzer/.github/workflows/publish-libs.yaml
@@ -0,0 +1,35 @@
+name: publish-libs
+on:
+  workflow_dispatch:
+  push:
+    branches:
+      - main
+    paths:
+      - 'lib/**'
+
+jobs:
+  publish-libs:
+    name: publish
+    runs-on: ubuntu-latest
+    steps:
+      - name: Checkout repository
+        uses: actions/checkout@v3
+        with:
+          fetch-depth: 0
+
+      - name: Install Rust toolchain
+        run: rustup update --no-self-update stable
+
+      - name: Install cargo-workspaces
+        run: cargo install cargo-workspaces
+
+      - name: Publish Crates
+        env:
+          CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }}
+        shell: bash
+        run: |
+          git config --global user.email "runner@gha.local"
+          git config --global user.name "Github Action"
+          # Remove r-a crates from the workspaces so we don't auto-publish them as well
+          sed -i 's/ "crates\/\*"//' ./Cargo.toml
+          cargo workspaces publish --yes --exact --from-git --no-git-commit --allow-dirty
diff --git a/src/tools/rust-analyzer/.github/workflows/release.yaml b/src/tools/rust-analyzer/.github/workflows/release.yaml
index b070dd3406f..48f4c6b55ed 100644
--- a/src/tools/rust-analyzer/.github/workflows/release.yaml
+++ b/src/tools/rust-analyzer/.github/workflows/release.yaml
@@ -29,6 +29,9 @@ jobs:
             target: x86_64-pc-windows-msvc
             code-target: win32-x64
           - os: windows-latest
+            target: i686-pc-windows-msvc
+            code-target: win32-ia32
+          - os: windows-latest
             target: aarch64-pc-windows-msvc
             code-target: win32-arm64
           - os: ubuntu-20.04
@@ -232,6 +235,10 @@ jobs:
           path: dist
       - uses: actions/download-artifact@v1
         with:
+          name: dist-i686-pc-windows-msvc
+          path: dist
+      - uses: actions/download-artifact@v1
+        with:
           name: dist-aarch64-pc-windows-msvc
           path: dist
       - run: ls -al ./dist
diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock
index d27ae416f04..ef0316f30fb 100644
--- a/src/tools/rust-analyzer/Cargo.lock
+++ b/src/tools/rust-analyzer/Cargo.lock
@@ -4,9 +4,9 @@ version = 3
 
 [[package]]
 name = "addr2line"
-version = "0.17.0"
+version = "0.19.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b9ecd88a8c8378ca913a680cd98f0f13ac67383d35993f86c90a70e3f137816b"
+checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97"
 dependencies = [
  "gimli",
 ]
@@ -27,19 +27,10 @@ dependencies = [
 ]
 
 [[package]]
-name = "ansi_term"
-version = "0.12.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"
-dependencies = [
- "winapi",
-]
-
-[[package]]
 name = "anyhow"
-version = "1.0.65"
+version = "1.0.68"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "98161a4e3e2184da77bb14f02184cdd111e83bbbcc9979dfee3c44b9a85f5602"
+checksum = "2cb2f989d18dd141ab8ae82f64d1a8cdd37e0840f73a406896cf5e99502fab61"
 
 [[package]]
 name = "anymap"
@@ -49,9 +40,9 @@ checksum = "8f1f8f5a6f3d50d89e3797d7593a50f96bb2aaa20ca0cc7be1fb673232c91d72"
 
 [[package]]
 name = "arbitrary"
-version = "1.1.7"
+version = "1.2.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d86fd10d912cab78764cc44307d9cd5f164e09abbeb87fb19fb6d95937e8da5f"
+checksum = "b0224938f92e7aef515fac2ff2d18bd1115c1394ddf4a092e0c87e8be9499ee5"
 
 [[package]]
 name = "arrayvec"
@@ -65,7 +56,7 @@ version = "0.2.14"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
 dependencies = [
- "hermit-abi",
+ "hermit-abi 0.1.19",
  "libc",
  "winapi",
 ]
@@ -78,9 +69,9 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
 
 [[package]]
 name = "backtrace"
-version = "0.3.66"
+version = "0.3.67"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cab84319d616cfb654d03394f38ab7e6f0919e181b1b57e1fd15e7fb4077d9a7"
+checksum = "233d376d6d185f2a3093e58f283f60f880315b6c60075b01f36b3b85154564ca"
 dependencies = [
  "addr2line",
  "cc",
@@ -120,9 +111,9 @@ checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
 
 [[package]]
 name = "camino"
-version = "1.1.1"
+version = "1.1.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "88ad0e1e3e88dd237a156ab9f571021b8a158caa0ae44b1968a241efb5144c1e"
+checksum = "c77df041dc383319cc661b428b6961a005db4d6808d5e12536931b1ca9556055"
 dependencies = [
  "serde",
 ]
@@ -138,22 +129,23 @@ dependencies = [
 
 [[package]]
 name = "cargo_metadata"
-version = "0.15.0"
+version = "0.15.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3abb7553d5b9b8421c6de7cb02606ff15e0c6eea7d8eadd75ef013fd636bec36"
+checksum = "982a0cf6a99c350d7246035613882e376d58cebe571785abc5da4f648d53ac0a"
 dependencies = [
  "camino",
  "cargo-platform",
  "semver",
  "serde",
  "serde_json",
+ "thiserror",
 ]
 
 [[package]]
 name = "cc"
-version = "1.0.73"
+version = "1.0.78"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11"
+checksum = "a20104e2335ce8a659d6dd92a51a767a0c062599c73b343fd152cb401e828c3d"
 
 [[package]]
 name = "cfg"
@@ -229,9 +221,9 @@ dependencies = [
 
 [[package]]
 name = "command-group"
-version = "1.0.8"
+version = "2.0.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f7a8a86f409b4a59df3a3e4bee2de0b83f1755fdd2a25e3a9684c396fc4bed2c"
+checksum = "026c3922235f9f7d78f21251a026f3acdeb7cce3deba107fe09a4bfa63d850a2"
 dependencies = [
  "nix",
  "winapi",
@@ -286,22 +278,22 @@ dependencies = [
 
 [[package]]
 name = "crossbeam-epoch"
-version = "0.9.11"
+version = "0.9.13"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f916dfc5d356b0ed9dae65f1db9fc9770aa2851d2662b988ccf4fe3516e86348"
+checksum = "01a9af1f4c2ef74bb8aa1f7e19706bc72d03598c8a570bb5de72243c7a9d9d5a"
 dependencies = [
  "autocfg",
  "cfg-if",
  "crossbeam-utils",
- "memoffset",
+ "memoffset 0.7.1",
  "scopeguard",
 ]
 
 [[package]]
 name = "crossbeam-utils"
-version = "0.8.12"
+version = "0.8.14"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "edbafec5fa1f196ca66527c1b12c2ec4745ca14b50f1ad8f9f6f720b55d11fac"
+checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f"
 dependencies = [
  "cfg-if",
 ]
@@ -316,14 +308,14 @@ dependencies = [
  "hashbrown",
  "lock_api",
  "once_cell",
- "parking_lot_core 0.9.4",
+ "parking_lot_core 0.9.6",
 ]
 
 [[package]]
 name = "derive_arbitrary"
-version = "1.1.6"
+version = "1.2.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "226ad66541d865d7a7173ad6a9e691c33fdb910ac723f4bc734b3e5294a1f931"
+checksum = "cf460bbff5f571bfc762da5102729f59f338be7db17a21fade44c5c4f5005350"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -332,9 +324,9 @@ dependencies = [
 
 [[package]]
 name = "dissimilar"
-version = "1.0.4"
+version = "1.0.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8c97b9233581d84b8e1e689cdd3a47b6f69770084fc246e86a7f78b0d9c1d4a5"
+checksum = "210ec60ae7d710bed8683e333e9d2855a8a56a3e9892b38bad3bb0d4d29b0d5e"
 
 [[package]]
 name = "dot"
@@ -375,14 +367,14 @@ dependencies = [
 
 [[package]]
 name = "filetime"
-version = "0.2.18"
+version = "0.2.19"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4b9663d381d07ae25dc88dbdf27df458faa83a9b25336bcac83d5e452b5fc9d3"
+checksum = "4e884668cd0c7480504233e951174ddc3b382f7c2666e3b7310b5c4e7b0c37f9"
 dependencies = [
  "cfg-if",
  "libc",
  "redox_syscall",
- "windows-sys 0.42.0",
+ "windows-sys",
 ]
 
 [[package]]
@@ -393,9 +385,9 @@ checksum = "37ab347416e802de484e4d03c7316c48f1ecb56574dfd4a46a80f173ce1de04d"
 
 [[package]]
 name = "flate2"
-version = "1.0.24"
+version = "1.0.25"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6"
+checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841"
 dependencies = [
  "crc32fast",
  "miniz_oxide",
@@ -450,9 +442,9 @@ checksum = "7ab85b9b05e3978cc9a9cf8fea7f01b494e1a09ed3037e16ba39edc7a29eb61a"
 
 [[package]]
 name = "gimli"
-version = "0.26.2"
+version = "0.27.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "22030e2c5a68ec659fde1e949a745124b48e6fa8b045b7ed5bd1fe4ccc5c4e5d"
+checksum = "dec7af912d60cdbd3677c1af9352ebae6fb8394d165568a2234df0fa00f87793"
 
 [[package]]
 name = "hashbrown"
@@ -479,6 +471,15 @@ dependencies = [
 ]
 
 [[package]]
+name = "hermit-abi"
+version = "0.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7"
+dependencies = [
+ "libc",
+]
+
+[[package]]
 name = "hir"
 version = "0.0.0"
 dependencies = [
@@ -519,6 +520,7 @@ dependencies = [
  "hkalbasi-rustc-ap-rustc_abi",
  "hkalbasi-rustc-ap-rustc_index",
  "indexmap",
+ "intern",
  "itertools",
  "la-arena",
  "limit",
@@ -544,6 +546,7 @@ dependencies = [
  "either",
  "expect-test",
  "hashbrown",
+ "intern",
  "itertools",
  "la-arena",
  "limit",
@@ -574,11 +577,13 @@ dependencies = [
  "hir-def",
  "hir-expand",
  "hkalbasi-rustc-ap-rustc_index",
+ "intern",
  "itertools",
  "la-arena",
  "limit",
  "once_cell",
  "profile",
+ "project-model",
  "rustc-hash",
  "scoped-tls",
  "smallvec",
@@ -642,6 +647,7 @@ dependencies = [
  "profile",
  "pulldown-cmark",
  "pulldown-cmark-to-cmark",
+ "smallvec",
  "stdx",
  "syntax",
  "test-utils",
@@ -766,9 +772,9 @@ dependencies = [
 
 [[package]]
 name = "indexmap"
-version = "1.9.1"
+version = "1.9.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e"
+checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399"
 dependencies = [
  "autocfg",
  "hashbrown",
@@ -804,6 +810,16 @@ dependencies = [
 ]
 
 [[package]]
+name = "intern"
+version = "0.0.0"
+dependencies = [
+ "dashmap",
+ "hashbrown",
+ "once_cell",
+ "rustc-hash",
+]
+
+[[package]]
 name = "itertools"
 version = "0.10.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -814,9 +830,9 @@ dependencies = [
 
 [[package]]
 name = "itoa"
-version = "1.0.4"
+version = "1.0.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc"
+checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"
 
 [[package]]
 name = "jod-thread"
@@ -826,9 +842,9 @@ checksum = "8b23360e99b8717f20aaa4598f5a6541efbe30630039fbc7706cf954a87947ae"
 
 [[package]]
 name = "kqueue"
-version = "1.0.6"
+version = "1.0.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4d6112e8f37b59803ac47a42d14f1f3a59bbf72fc6857ffc5be455e28a691f8e"
+checksum = "2c8fc60ba15bf51257aa9807a48a61013db043fcf3a78cb0d916e8e396dcad98"
 dependencies = [
  "kqueue-sys",
  "libc",
@@ -856,15 +872,15 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
 
 [[package]]
 name = "libc"
-version = "0.2.135"
+version = "0.2.139"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "68783febc7782c6c5cb401fbda4de5a9898be1762314da0bb2c10ced61f18b0c"
+checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79"
 
 [[package]]
 name = "libloading"
-version = "0.7.3"
+version = "0.7.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "efbc0f03f9a775e9f6aed295c6a1ba2253c5757a9e03d55c6caa46a681abcddd"
+checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f"
 dependencies = [
  "cfg-if",
  "winapi",
@@ -872,11 +888,12 @@ dependencies = [
 
 [[package]]
 name = "libmimalloc-sys"
-version = "0.1.26"
+version = "0.1.30"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8fc093ab289b0bfda3aa1bdfab9c9542be29c7ef385cfcbe77f8c9813588eb48"
+checksum = "dd8c7cbf8b89019683667e347572e6d55a7df7ea36b0c4ce69961b0cde67b174"
 dependencies = [
  "cc",
+ "libc",
 ]
 
 [[package]]
@@ -958,9 +975,9 @@ checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
 
 [[package]]
 name = "memmap2"
-version = "0.5.7"
+version = "0.5.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "95af15f345b17af2efc8ead6080fb8bc376f8cec1b35277b935637595fe77498"
+checksum = "4b182332558b18d807c4ce1ca8ca983b34c3ee32765e47b3f0f69b90355cc1dc"
 dependencies = [
  "libc",
 ]
@@ -975,33 +992,42 @@ dependencies = [
 ]
 
 [[package]]
+name = "memoffset"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
 name = "mimalloc"
-version = "0.1.30"
+version = "0.1.34"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "76ce6a4b40d3bff9eb3ce9881ca0737a85072f9f975886082640cd46a75cdb35"
+checksum = "9dcb174b18635f7561a0c6c9fc2ce57218ac7523cf72c50af80e2d79ab8f3ba1"
 dependencies = [
  "libmimalloc-sys",
 ]
 
 [[package]]
 name = "miniz_oxide"
-version = "0.5.4"
+version = "0.6.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "96590ba8f175222643a85693f33d26e9c8a015f599c216509b1a6894af675d34"
+checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa"
 dependencies = [
  "adler",
 ]
 
 [[package]]
 name = "mio"
-version = "0.8.4"
+version = "0.8.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "57ee1c23c7c63b0c9250c339ffdc69255f110b298b901b9f6c82547b7b87caaf"
+checksum = "e5d732bc30207a6423068df043e3d02e0735b155ad7ce1a6f76fe2baa5b158de"
 dependencies = [
  "libc",
  "log",
  "wasi",
- "windows-sys 0.36.1",
+ "windows-sys",
 ]
 
 [[package]]
@@ -1010,20 +1036,19 @@ version = "0.5.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "52ffbca2f655e33c08be35d87278e5b18b89550a37dbd598c20db92f6a471123"
 dependencies = [
- "windows-sys 0.42.0",
+ "windows-sys",
 ]
 
 [[package]]
 name = "nix"
-version = "0.22.3"
+version = "0.26.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e4916f159ed8e5de0082076562152a76b7a1f64a01fd9d1e0fea002c37624faf"
+checksum = "bfdda3d196821d6af13126e40375cdf7da646a96114af134d5f417a9a1dc8e1a"
 dependencies = [
  "bitflags",
- "cc",
  "cfg-if",
  "libc",
- "memoffset",
+ "static_assertions",
 ]
 
 [[package]]
@@ -1045,29 +1070,39 @@ dependencies = [
 ]
 
 [[package]]
+name = "nu-ansi-term"
+version = "0.46.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
+dependencies = [
+ "overload",
+ "winapi",
+]
+
+[[package]]
 name = "num_cpus"
-version = "1.13.1"
+version = "1.15.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1"
+checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b"
 dependencies = [
- "hermit-abi",
+ "hermit-abi 0.2.6",
  "libc",
 ]
 
 [[package]]
 name = "object"
-version = "0.29.0"
+version = "0.30.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53"
+checksum = "2b8c786513eb403643f2a88c244c2aaa270ef2153f55094587d0c48a3cf22a83"
 dependencies = [
  "memchr",
 ]
 
 [[package]]
 name = "once_cell"
-version = "1.15.0"
+version = "1.17.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1"
+checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66"
 
 [[package]]
 name = "oorandom"
@@ -1076,6 +1111,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575"
 
 [[package]]
+name = "overload"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
+
+[[package]]
 name = "parking_lot"
 version = "0.11.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1083,7 +1124,7 @@ checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99"
 dependencies = [
  "instant",
  "lock_api",
- "parking_lot_core 0.8.5",
+ "parking_lot_core 0.8.6",
 ]
 
 [[package]]
@@ -1093,14 +1134,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
 dependencies = [
  "lock_api",
- "parking_lot_core 0.9.4",
+ "parking_lot_core 0.9.6",
 ]
 
 [[package]]
 name = "parking_lot_core"
-version = "0.8.5"
+version = "0.8.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216"
+checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc"
 dependencies = [
  "cfg-if",
  "instant",
@@ -1112,15 +1153,15 @@ dependencies = [
 
 [[package]]
 name = "parking_lot_core"
-version = "0.9.4"
+version = "0.9.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4dc9e0dc2adc1c69d09143aff38d3d30c5c3f0df0dad82e6d25547af174ebec0"
+checksum = "ba1ef8814b5c993410bb3adfad7a5ed269563e4a2f90c41f5d85be7fb47133bf"
 dependencies = [
  "cfg-if",
  "libc",
  "redox_syscall",
  "smallvec",
- "windows-sys 0.42.0",
+ "windows-sys",
 ]
 
 [[package]]
@@ -1136,9 +1177,9 @@ dependencies = [
 
 [[package]]
 name = "paste"
-version = "1.0.9"
+version = "1.0.11"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b1de2e551fb905ac83f73f7aedf2f0cb4a0da7e35efa24a202a936269f1f18e1"
+checksum = "d01a5bd0424d00070b0098dd17ebca6f961a959dead1dbcbbbc1d1cd8d3deeba"
 
 [[package]]
 name = "paths"
@@ -1238,9 +1279,9 @@ version = "0.0.0"
 
 [[package]]
 name = "proc-macro2"
-version = "1.0.47"
+version = "1.0.50"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725"
+checksum = "6ef7d57beacfaf2d8aee5937dab7b7f28de3cb8b1828479bb5de2a7106f2bae2"
 dependencies = [
  "unicode-ident",
 ]
@@ -1322,30 +1363,28 @@ dependencies = [
 
 [[package]]
 name = "quote"
-version = "1.0.21"
+version = "1.0.23"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
+checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b"
 dependencies = [
  "proc-macro2",
 ]
 
 [[package]]
 name = "rayon"
-version = "1.5.3"
+version = "1.6.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bd99e5772ead8baa5215278c9b15bf92087709e9c1b2d1f97cdb5a183c933a7d"
+checksum = "6db3a213adf02b3bcfd2d3846bb41cb22857d131789e01df434fb7e7bc0759b7"
 dependencies = [
- "autocfg",
- "crossbeam-deque",
  "either",
  "rayon-core",
 ]
 
 [[package]]
 name = "rayon-core"
-version = "1.9.3"
+version = "1.10.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f"
+checksum = "cac410af5d00ab6884528b4ab69d1e8e146e8d471201800fa1b4524126de6ad3"
 dependencies = [
  "crossbeam-channel",
  "crossbeam-deque",
@@ -1364,9 +1403,9 @@ dependencies = [
 
 [[package]]
 name = "regex"
-version = "1.6.0"
+version = "1.7.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b"
+checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733"
 dependencies = [
  "regex-syntax",
 ]
@@ -1382,9 +1421,9 @@ dependencies = [
 
 [[package]]
 name = "regex-syntax"
-version = "0.6.27"
+version = "0.6.28"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"
+checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848"
 
 [[package]]
 name = "rowan"
@@ -1394,7 +1433,7 @@ checksum = "5811547e7ba31e903fe48c8ceab10d40d70a101f3d15523c847cce91aa71f332"
 dependencies = [
  "countme",
  "hashbrown",
- "memoffset",
+ "memoffset 0.6.5",
  "rustc-hash",
  "text-size",
 ]
@@ -1455,9 +1494,9 @@ dependencies = [
 
 [[package]]
 name = "rustc-ap-rustc_lexer"
-version = "725.0.0"
+version = "727.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f950742ef8a203aa7661aad3ab880438ddeb7f95d4b837c30d65db1a2c5df68e"
+checksum = "8f40f26e7abdcd3b982f36c09a634cc6187988fbf6ec466c91f8d30a12ac0237"
 dependencies = [
  "unicode-xid",
 ]
@@ -1476,9 +1515,9 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
 
 [[package]]
 name = "ryu"
-version = "1.0.11"
+version = "1.0.12"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09"
+checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde"
 
 [[package]]
 name = "salsa"
@@ -1529,9 +1568,9 @@ dependencies = [
 
 [[package]]
 name = "scoped-tls"
-version = "1.0.0"
+version = "1.0.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ea6a9290e3c9cf0f18145ef7ffa62d68ee0bf5fcd651017e586dc7fd5da448c2"
+checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294"
 
 [[package]]
 name = "scopeguard"
@@ -1541,27 +1580,27 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
 
 [[package]]
 name = "semver"
-version = "1.0.14"
+version = "1.0.16"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e25dfac463d778e353db5be2449d1cce89bd6fd23c9f1ea21310ce6e5a1b29c4"
+checksum = "58bc9567378fc7690d6b2addae4e60ac2eeea07becb2c64b9f218b53865cba2a"
 dependencies = [
  "serde",
 ]
 
 [[package]]
 name = "serde"
-version = "1.0.145"
+version = "1.0.152"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "728eb6351430bccb993660dfffc5a72f91ccc1295abaa8ce19b27ebe4f75568b"
+checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
 dependencies = [
  "serde_derive",
 ]
 
 [[package]]
 name = "serde_derive"
-version = "1.0.145"
+version = "1.0.152"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "81fa1584d3d1bcacd84c277a0dfe21f5b0f6accf4a23d04d4c6d61f1af522b4c"
+checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -1570,9 +1609,9 @@ dependencies = [
 
 [[package]]
 name = "serde_json"
-version = "1.0.86"
+version = "1.0.91"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "41feea4228a6f1cd09ec7a3593a682276702cd67b5273544757dae23c096f074"
+checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883"
 dependencies = [
  "indexmap",
  "itoa",
@@ -1582,9 +1621,9 @@ dependencies = [
 
 [[package]]
 name = "serde_repr"
-version = "0.1.9"
+version = "0.1.10"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1fe39d9fbb0ebf5eb2c7cb7e2a47e4f462fad1379f1166b8ae49ad9eae89a7ca"
+checksum = "9a5ec9fa74a20ebbe5d9ac23dac1fc96ba0ecfe9f50f2843b52e537b10fbcb4e"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -1617,9 +1656,9 @@ dependencies = [
 
 [[package]]
 name = "snap"
-version = "1.0.5"
+version = "1.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "45456094d1983e2ee2a18fdfebce3189fa451699d0502cb8e3b49dba5ba41451"
+checksum = "5e9f0ab6ef7eb7353d9119c170a436d1bf248eea575ac42d19d12f4e34130831"
 
 [[package]]
 name = "sourcegen"
@@ -1629,6 +1668,12 @@ dependencies = [
 ]
 
 [[package]]
+name = "static_assertions"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
+
+[[package]]
 name = "stdx"
 version = "0.0.0"
 dependencies = [
@@ -1641,9 +1686,9 @@ dependencies = [
 
 [[package]]
 name = "syn"
-version = "1.0.102"
+version = "1.0.107"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3fcd952facd492f9be3ef0d0b7032a6e442ee9b361d4acc2b1d0c4aaa5f613a1"
+checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -1714,18 +1759,18 @@ checksum = "288cb548dbe72b652243ea797201f3d481a0609a967980fcc5b2315ea811560a"
 
 [[package]]
 name = "thiserror"
-version = "1.0.37"
+version = "1.0.38"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "10deb33631e3c9018b9baf9dcbbc4f737320d2b576bac10f6aefa048fa407e3e"
+checksum = "6a9cd18aa97d5c45c6603caea1da6628790b37f7a34b6ca89522331c5180fed0"
 dependencies = [
  "thiserror-impl",
 ]
 
 [[package]]
 name = "thiserror-impl"
-version = "1.0.37"
+version = "1.0.38"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "982d17546b47146b28f7c22e3d08465f6b8903d0ea13c1660d9d84a6e7adcdbb"
+checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -1788,10 +1833,8 @@ version = "0.3.17"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "a561bf4617eebd33bca6434b988f39ed798e527f51a1e797d0ee4f61c0a38376"
 dependencies = [
- "itoa",
  "serde",
  "time-core",
- "time-macros",
 ]
 
 [[package]]
@@ -1801,15 +1844,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd"
 
 [[package]]
-name = "time-macros"
-version = "0.2.6"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d967f99f534ca7e495c575c62638eebc2898a8c84c119b89e250477bc4ba16b2"
-dependencies = [
- "time-core",
-]
-
-[[package]]
 name = "tinyvec"
 version = "1.6.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1893,12 +1927,12 @@ dependencies = [
 
 [[package]]
 name = "tracing-tree"
-version = "0.2.1"
+version = "0.2.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d07e90b329c621ade432823988574e820212648aa40e7a2497777d58de0fb453"
+checksum = "758e983ab7c54fee18403994507e7f212b9005e957ce7984996fac8d11facedb"
 dependencies = [
- "ansi_term",
  "atty",
+ "nu-ansi-term",
  "tracing-core",
  "tracing-log",
  "tracing-subscriber",
@@ -1914,9 +1948,9 @@ dependencies = [
 
 [[package]]
 name = "typed-arena"
-version = "2.0.1"
+version = "2.0.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0685c84d5d54d1c26f7d3eb96cd41550adb97baed141a761cf335d3d33bcd0ae"
+checksum = "6af6ae20167a9ece4bcb41af5b80f8a1f1df981f6391189ce00fd257af04126a"
 
 [[package]]
 name = "ungrammar"
@@ -1935,15 +1969,15 @@ dependencies = [
 
 [[package]]
 name = "unicode-bidi"
-version = "0.3.8"
+version = "0.3.10"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "099b7128301d285f79ddd55b9a83d5e6b9e97c92e0ea0daebee7263e932de992"
+checksum = "d54675592c1dbefd78cbd98db9bacd89886e1ca50692a0692baefffdeb92dd58"
 
 [[package]]
 name = "unicode-ident"
-version = "1.0.5"
+version = "1.0.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"
+checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc"
 
 [[package]]
 name = "unicode-normalization"
@@ -2064,103 +2098,60 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
 
 [[package]]
 name = "windows-sys"
-version = "0.36.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2"
-dependencies = [
- "windows_aarch64_msvc 0.36.1",
- "windows_i686_gnu 0.36.1",
- "windows_i686_msvc 0.36.1",
- "windows_x86_64_gnu 0.36.1",
- "windows_x86_64_msvc 0.36.1",
-]
-
-[[package]]
-name = "windows-sys"
 version = "0.42.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7"
 dependencies = [
  "windows_aarch64_gnullvm",
- "windows_aarch64_msvc 0.42.0",
- "windows_i686_gnu 0.42.0",
- "windows_i686_msvc 0.42.0",
- "windows_x86_64_gnu 0.42.0",
+ "windows_aarch64_msvc",
+ "windows_i686_gnu",
+ "windows_i686_msvc",
+ "windows_x86_64_gnu",
  "windows_x86_64_gnullvm",
- "windows_x86_64_msvc 0.42.0",
+ "windows_x86_64_msvc",
 ]
 
 [[package]]
 name = "windows_aarch64_gnullvm"
-version = "0.42.0"
+version = "0.42.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "41d2aa71f6f0cbe00ae5167d90ef3cfe66527d6f613ca78ac8024c3ccab9a19e"
+checksum = "8c9864e83243fdec7fc9c5444389dcbbfd258f745e7853198f365e3c4968a608"
 
 [[package]]
 name = "windows_aarch64_msvc"
-version = "0.36.1"
+version = "0.42.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47"
-
-[[package]]
-name = "windows_aarch64_msvc"
-version = "0.42.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dd0f252f5a35cac83d6311b2e795981f5ee6e67eb1f9a7f64eb4500fbc4dcdb4"
-
-[[package]]
-name = "windows_i686_gnu"
-version = "0.36.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6"
+checksum = "4c8b1b673ffc16c47a9ff48570a9d85e25d265735c503681332589af6253c6c7"
 
 [[package]]
 name = "windows_i686_gnu"
-version = "0.42.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fbeae19f6716841636c28d695375df17562ca208b2b7d0dc47635a50ae6c5de7"
-
-[[package]]
-name = "windows_i686_msvc"
-version = "0.36.1"
+version = "0.42.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024"
+checksum = "de3887528ad530ba7bdbb1faa8275ec7a1155a45ffa57c37993960277145d640"
 
 [[package]]
 name = "windows_i686_msvc"
-version = "0.42.0"
+version = "0.42.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "84c12f65daa39dd2babe6e442988fc329d6243fdce47d7d2d155b8d874862246"
+checksum = "bf4d1122317eddd6ff351aa852118a2418ad4214e6613a50e0191f7004372605"
 
 [[package]]
 name = "windows_x86_64_gnu"
-version = "0.36.1"
+version = "0.42.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1"
-
-[[package]]
-name = "windows_x86_64_gnu"
-version = "0.42.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bf7b1b21b5362cbc318f686150e5bcea75ecedc74dd157d874d754a2ca44b0ed"
+checksum = "c1040f221285e17ebccbc2591ffdc2d44ee1f9186324dd3e84e99ac68d699c45"
 
 [[package]]
 name = "windows_x86_64_gnullvm"
-version = "0.42.0"
+version = "0.42.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "09d525d2ba30eeb3297665bd434a54297e4170c7f1a44cad4ef58095b4cd2028"
+checksum = "628bfdf232daa22b0d64fdb62b09fcc36bb01f05a3939e20ab73aaf9470d0463"
 
 [[package]]
 name = "windows_x86_64_msvc"
-version = "0.36.1"
+version = "0.42.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680"
-
-[[package]]
-name = "windows_x86_64_msvc"
-version = "0.42.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f40009d85759725a34da6d89a94e63d7bdc50a862acf0dbc7c8e488f1edcb6f5"
+checksum = "447660ad36a13288b1db4d4248e857b510e8c3a225c822ba4fb748c0aafecffd"
 
 [[package]]
 name = "write-json"
@@ -2170,33 +2161,33 @@ checksum = "06069a848f95fceae3e5e03c0ddc8cb78452b56654ee0c8e68f938cf790fb9e3"
 
 [[package]]
 name = "xflags"
-version = "0.3.0"
+version = "0.3.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cbf19f5031a1a812e96fede16f8161218883079946cea87619d3613db1efd268"
+checksum = "c4554b580522d0ca238369c16b8f6ce34524d61dafe7244993754bbd05f2c2ea"
 dependencies = [
  "xflags-macros",
 ]
 
 [[package]]
 name = "xflags-macros"
-version = "0.3.0"
+version = "0.3.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2afbd7f2039bb6cad2dd45f0c5dff49c0d4e26118398768b7a605524d4251809"
+checksum = "f58e7b3ca8977093aae6b87b6a7730216fc4c53a6530bab5c43a783cd810c1a8"
 
 [[package]]
 name = "xshell"
-version = "0.2.2"
+version = "0.2.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6d47097dc5c85234b1e41851b3422dd6d19b3befdd35b4ae5ce386724aeca981"
+checksum = "962c039b3a7b16cf4e9a4248397c6585c07547412e7d6a6e035389a802dcfe90"
 dependencies = [
  "xshell-macros",
 ]
 
 [[package]]
 name = "xshell-macros"
-version = "0.2.2"
+version = "0.2.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "88301b56c26dd9bf5c43d858538f82d6f3f7764767defbc5d34e59459901c41a"
+checksum = "1dbabb1cbd15a1d6d12d9ed6b35cc6777d4af87ab3ba155ea37215f20beab80c"
 
 [[package]]
 name = "xtask"
@@ -2204,6 +2195,7 @@ version = "0.1.0"
 dependencies = [
  "anyhow",
  "flate2",
+ "time",
  "write-json",
  "xflags",
  "xshell",
@@ -2212,9 +2204,9 @@ dependencies = [
 
 [[package]]
 name = "zip"
-version = "0.6.3"
+version = "0.6.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "537ce7411d25e54e8ae21a7ce0b15840e7bfcff15b51d697ec3266cc76bdf080"
+checksum = "0445d0fbc924bb93539b4316c11afb121ea39296f99a3c4c9edad09e3658cdef"
 dependencies = [
  "byteorder",
  "crc32fast",
diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml
index 286ef1e7dcb..ef81105505b 100644
--- a/src/tools/rust-analyzer/Cargo.toml
+++ b/src/tools/rust-analyzer/Cargo.toml
@@ -2,6 +2,12 @@
 members = ["xtask/", "lib/*", "crates/*"]
 exclude = ["crates/proc-macro-test/imp"]
 
+[workspace.package]
+rust-version = "1.66"
+edition = "2021"
+license = "MIT OR Apache-2.0"
+authors = ["rust-analyzer team"]
+
 [profile.dev]
 # Disabling debug info speeds up builds a bunch,
 # and we don't rely on it for debugging that much.
@@ -32,3 +38,39 @@ debug = 0
 # ungrammar = { path = "../ungrammar" }
 
 # salsa = { path = "../salsa" }
+
+[workspace.dependencies]
+# local crates
+base-db = { path = "./crates/base-db", version = "0.0.0" }
+cfg = { path = "./crates/cfg", version = "0.0.0" }
+flycheck = { path = "./crates/flycheck", version = "0.0.0" }
+hir = { path = "./crates/hir", version = "0.0.0" }
+hir-def = { path = "./crates/hir-def", version = "0.0.0" }
+hir-expand = { path = "./crates/hir-expand", version = "0.0.0" }
+hir-ty = { path = "./crates/hir-ty", version = "0.0.0" }
+ide = { path = "./crates/ide", version = "0.0.0" }
+ide-assists = { path = "./crates/ide-assists", version = "0.0.0" }
+ide-completion = { path = "./crates/ide-completion", version = "0.0.0" }
+ide-db = { path = "./crates/ide-db", version = "0.0.0" }
+ide-diagnostics = { path = "./crates/ide-diagnostics", version = "0.0.0" }
+ide-ssr = { path = "./crates/ide-ssr", version = "0.0.0" }
+intern = { path = "./crates/intern", version = "0.0.0" }
+limit = { path = "./crates/limit", version = "0.0.0" }
+mbe = { path = "./crates/mbe", version = "0.0.0" }
+parser = { path = "./crates/parser", version = "0.0.0" }
+paths = { path = "./crates/paths", version = "0.0.0" }
+proc-macro-api = { path = "./crates/proc-macro-api", version = "0.0.0" }
+proc-macro-srv = { path = "./crates/proc-macro-srv", version = "0.0.0" }
+proc-macro-srv-cli = { path = "./crates/proc-macro-srv-cli", version = "0.0.0" }
+proc-macro-test = { path = "./crates/proc-macro-test", version = "0.0.0" }
+profile = { path = "./crates/profile", version = "0.0.0" }
+project-model = { path = "./crates/project-model", version = "0.0.0" }
+sourcegen = { path = "./crates/sourcegen", version = "0.0.0" }
+stdx = { path = "./crates/stdx", version = "0.0.0" }
+syntax = { path = "./crates/syntax", version = "0.0.0" }
+test-utils = { path = "./crates/test-utils", version = "0.0.0" }
+text-edit = { path = "./crates/text-edit", version = "0.0.0" }
+toolchain = { path = "./crates/toolchain", version = "0.0.0" }
+tt = { path = "./crates/tt", version = "0.0.0" }
+vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
+vfs = { path = "./crates/vfs", version = "0.0.0" }
diff --git a/src/tools/rust-analyzer/crates/base-db/Cargo.toml b/src/tools/rust-analyzer/crates/base-db/Cargo.toml
index a484ecec682..f6a1075c190 100644
--- a/src/tools/rust-analyzer/crates/base-db/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/base-db/Cargo.toml
@@ -2,9 +2,11 @@
 name = "base-db"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
@@ -13,10 +15,11 @@ doctest = false
 salsa = "0.17.0-pre.2"
 rustc-hash = "1.1.0"
 
-syntax = { path = "../syntax", version = "0.0.0" }
-stdx = { path = "../stdx", version = "0.0.0" }
-cfg = { path = "../cfg", version = "0.0.0" }
-profile = { path = "../profile", version = "0.0.0" }
-tt = { path = "../tt", version = "0.0.0" }
-test-utils = { path = "../test-utils", version = "0.0.0" }
-vfs = { path = "../vfs", version = "0.0.0" }
+# local deps
+cfg.workspace = true
+profile.workspace = true
+stdx.workspace = true
+syntax.workspace = true
+test-utils.workspace = true
+tt.workspace = true
+vfs.workspace = true
diff --git a/src/tools/rust-analyzer/crates/base-db/src/fixture.rs b/src/tools/rust-analyzer/crates/base-db/src/fixture.rs
index 6f83ea40e76..8a7e9dfadfe 100644
--- a/src/tools/rust-analyzer/crates/base-db/src/fixture.rs
+++ b/src/tools/rust-analyzer/crates/base-db/src/fixture.rs
@@ -6,7 +6,7 @@ use rustc_hash::FxHashMap;
 use test_utils::{
     extract_range_or_offset, Fixture, RangeOrOffset, CURSOR_MARKER, ESCAPED_CURSOR_MARKER,
 };
-use tt::Subtree;
+use tt::token_id::{Leaf, Subtree, TokenTree};
 use vfs::{file_set::FileSet, VfsPath};
 
 use crate::{
@@ -110,6 +110,7 @@ impl ChangeFixture {
         let mut crates = FxHashMap::default();
         let mut crate_deps = Vec::new();
         let mut default_crate_root: Option<FileId> = None;
+        let mut default_target_data_layout: Option<String> = None;
         let mut default_cfg = CfgOptions::default();
 
         let mut file_set = FileSet::default();
@@ -162,7 +163,10 @@ impl ChangeFixture {
                     Ok(Vec::new()),
                     false,
                     origin,
-                    meta.target_data_layout.as_deref().map(Arc::from),
+                    meta.target_data_layout
+                        .as_deref()
+                        .map(Arc::from)
+                        .ok_or_else(|| "target_data_layout unset".into()),
                 );
                 let prev = crates.insert(crate_name.clone(), crate_id);
                 assert!(prev.is_none());
@@ -175,6 +179,7 @@ impl ChangeFixture {
                 assert!(default_crate_root.is_none());
                 default_crate_root = Some(file_id);
                 default_cfg = meta.cfg;
+                default_target_data_layout = meta.target_data_layout;
             }
 
             change.change_file(file_id, Some(Arc::new(text)));
@@ -198,7 +203,9 @@ impl ChangeFixture {
                 Ok(Vec::new()),
                 false,
                 CrateOrigin::CratesIo { repo: None, name: None },
-                None,
+                default_target_data_layout
+                    .map(|x| x.into())
+                    .ok_or_else(|| "target_data_layout unset".into()),
             );
         } else {
             for (from, to, prelude) in crate_deps {
@@ -212,8 +219,10 @@ impl ChangeFixture {
                     .unwrap();
             }
         }
-        let target_layout =
-            crate_graph.iter().next().and_then(|it| crate_graph[it].target_layout.clone());
+        let target_layout = crate_graph.iter().next().map_or_else(
+            || Err("target_data_layout unset".into()),
+            |it| crate_graph[it].target_layout.clone(),
+        );
 
         if let Some(mini_core) = mini_core {
             let core_file = file_id;
@@ -301,7 +310,7 @@ impl ChangeFixture {
     }
 }
 
-fn default_test_proc_macros() -> [(String, ProcMacro); 4] {
+fn default_test_proc_macros() -> [(String, ProcMacro); 5] {
     [
         (
             r#"
@@ -359,6 +368,20 @@ pub fn mirror(input: TokenStream) -> TokenStream {
                 expander: Arc::new(MirrorProcMacroExpander),
             },
         ),
+        (
+            r#"
+#[proc_macro]
+pub fn shorten(input: TokenStream) -> TokenStream {
+    loop {}
+}
+"#
+            .into(),
+            ProcMacro {
+                name: "shorten".into(),
+                kind: crate::ProcMacroKind::FuncLike,
+                expander: Arc::new(ShortenProcMacroExpander),
+            },
+        ),
     ]
 }
 
@@ -486,17 +509,60 @@ impl ProcMacroExpander for MirrorProcMacroExpander {
         _: &Env,
     ) -> Result<Subtree, ProcMacroExpansionError> {
         fn traverse(input: &Subtree) -> Subtree {
-            let mut res = Subtree::default();
-            res.delimiter = input.delimiter;
+            let mut token_trees = vec![];
             for tt in input.token_trees.iter().rev() {
                 let tt = match tt {
                     tt::TokenTree::Leaf(leaf) => tt::TokenTree::Leaf(leaf.clone()),
                     tt::TokenTree::Subtree(sub) => tt::TokenTree::Subtree(traverse(sub)),
                 };
-                res.token_trees.push(tt);
+                token_trees.push(tt);
             }
-            res
+            Subtree { delimiter: input.delimiter, token_trees }
         }
         Ok(traverse(input))
     }
 }
+
+// Replaces every literal with an empty string literal and every identifier with its first letter,
+// but retains all tokens' span. Useful for testing we don't assume token hasn't been modified by
+// macros even if it retains its span.
+#[derive(Debug)]
+struct ShortenProcMacroExpander;
+impl ProcMacroExpander for ShortenProcMacroExpander {
+    fn expand(
+        &self,
+        input: &Subtree,
+        _: Option<&Subtree>,
+        _: &Env,
+    ) -> Result<Subtree, ProcMacroExpansionError> {
+        return Ok(traverse(input));
+
+        fn traverse(input: &Subtree) -> Subtree {
+            let token_trees = input
+                .token_trees
+                .iter()
+                .map(|it| match it {
+                    TokenTree::Leaf(leaf) => tt::TokenTree::Leaf(modify_leaf(leaf)),
+                    TokenTree::Subtree(subtree) => tt::TokenTree::Subtree(traverse(subtree)),
+                })
+                .collect();
+            Subtree { delimiter: input.delimiter, token_trees }
+        }
+
+        fn modify_leaf(leaf: &Leaf) -> Leaf {
+            let mut leaf = leaf.clone();
+            match &mut leaf {
+                Leaf::Literal(it) => {
+                    // XXX Currently replaces any literals with an empty string, but supporting
+                    // "shortening" other literals would be nice.
+                    it.text = "\"\"".into();
+                }
+                Leaf::Punct(_) => {}
+                Leaf::Ident(it) => {
+                    it.text = it.text.chars().take(1).collect();
+                }
+            }
+            leaf
+        }
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/base-db/src/input.rs b/src/tools/rust-analyzer/crates/base-db/src/input.rs
index 5fa4a802495..43388e915b5 100644
--- a/src/tools/rust-analyzer/crates/base-db/src/input.rs
+++ b/src/tools/rust-analyzer/crates/base-db/src/input.rs
@@ -12,7 +12,7 @@ use cfg::CfgOptions;
 use rustc_hash::FxHashMap;
 use stdx::hash::{NoHashHashMap, NoHashHashSet};
 use syntax::SmolStr;
-use tt::Subtree;
+use tt::token_id::Subtree;
 use vfs::{file_set::FileSet, AnchoredPath, FileId, VfsPath};
 
 /// Files are grouped into source roots. A source root is a directory on the
@@ -84,15 +84,10 @@ pub struct CrateGraph {
     arena: NoHashHashMap<CrateId, CrateData>,
 }
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
 pub struct CrateId(pub u32);
 
 impl stdx::hash::NoHashHashable for CrateId {}
-impl std::hash::Hash for CrateId {
-    fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
-        self.0.hash(state);
-    }
-}
 
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct CrateName(SmolStr);
@@ -248,6 +243,7 @@ pub enum ProcMacroExpansionError {
 }
 
 pub type ProcMacroLoadResult = Result<Vec<ProcMacro>, String>;
+pub type TargetLayoutLoadResult = Result<Arc<str>, Arc<str>>;
 
 #[derive(Debug, Clone)]
 pub struct ProcMacro {
@@ -270,7 +266,7 @@ pub struct CrateData {
     pub display_name: Option<CrateDisplayName>,
     pub cfg_options: CfgOptions,
     pub potential_cfg_options: CfgOptions,
-    pub target_layout: Option<Arc<str>>,
+    pub target_layout: TargetLayoutLoadResult,
     pub env: Env,
     pub dependencies: Vec<Dependency>,
     pub proc_macro: ProcMacroLoadResult,
@@ -286,7 +282,7 @@ pub enum Edition {
 }
 
 impl Edition {
-    pub const CURRENT: Edition = Edition::Edition2018;
+    pub const CURRENT: Edition = Edition::Edition2021;
 }
 
 #[derive(Default, Debug, Clone, PartialEq, Eq)]
@@ -329,7 +325,7 @@ impl CrateGraph {
         proc_macro: ProcMacroLoadResult,
         is_proc_macro: bool,
         origin: CrateOrigin,
-        target_layout: Option<Arc<str>>,
+        target_layout: Result<Arc<str>, Arc<str>>,
     ) -> CrateId {
         let data = CrateData {
             root_file_id,
@@ -652,7 +648,7 @@ mod tests {
             Ok(Vec::new()),
             false,
             CrateOrigin::CratesIo { repo: None, name: None },
-            None,
+            Err("".into()),
         );
         let crate2 = graph.add_crate_root(
             FileId(2u32),
@@ -665,7 +661,7 @@ mod tests {
             Ok(Vec::new()),
             false,
             CrateOrigin::CratesIo { repo: None, name: None },
-            None,
+            Err("".into()),
         );
         let crate3 = graph.add_crate_root(
             FileId(3u32),
@@ -678,7 +674,7 @@ mod tests {
             Ok(Vec::new()),
             false,
             CrateOrigin::CratesIo { repo: None, name: None },
-            None,
+            Err("".into()),
         );
         assert!(graph
             .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
@@ -705,7 +701,7 @@ mod tests {
             Ok(Vec::new()),
             false,
             CrateOrigin::CratesIo { repo: None, name: None },
-            None,
+            Err("".into()),
         );
         let crate2 = graph.add_crate_root(
             FileId(2u32),
@@ -718,7 +714,7 @@ mod tests {
             Ok(Vec::new()),
             false,
             CrateOrigin::CratesIo { repo: None, name: None },
-            None,
+            Err("".into()),
         );
         assert!(graph
             .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
@@ -742,7 +738,7 @@ mod tests {
             Ok(Vec::new()),
             false,
             CrateOrigin::CratesIo { repo: None, name: None },
-            None,
+            Err("".into()),
         );
         let crate2 = graph.add_crate_root(
             FileId(2u32),
@@ -755,7 +751,7 @@ mod tests {
             Ok(Vec::new()),
             false,
             CrateOrigin::CratesIo { repo: None, name: None },
-            None,
+            Err("".into()),
         );
         let crate3 = graph.add_crate_root(
             FileId(3u32),
@@ -768,7 +764,7 @@ mod tests {
             Ok(Vec::new()),
             false,
             CrateOrigin::CratesIo { repo: None, name: None },
-            None,
+            Err("".into()),
         );
         assert!(graph
             .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
@@ -792,7 +788,7 @@ mod tests {
             Ok(Vec::new()),
             false,
             CrateOrigin::CratesIo { repo: None, name: None },
-            None,
+            Err("".into()),
         );
         let crate2 = graph.add_crate_root(
             FileId(2u32),
@@ -805,7 +801,7 @@ mod tests {
             Ok(Vec::new()),
             false,
             CrateOrigin::CratesIo { repo: None, name: None },
-            None,
+            Err("".into()),
         );
         assert!(graph
             .add_dep(
diff --git a/src/tools/rust-analyzer/crates/base-db/src/lib.rs b/src/tools/rust-analyzer/crates/base-db/src/lib.rs
index 55a51d3bbb2..9720db9d8ac 100644
--- a/src/tools/rust-analyzer/crates/base-db/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/base-db/src/lib.rs
@@ -17,6 +17,7 @@ pub use crate::{
         CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency,
         Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
         ProcMacroId, ProcMacroKind, ProcMacroLoadResult, SourceRoot, SourceRootId,
+        TargetLayoutLoadResult,
     },
 };
 pub use salsa::{self, Cancelled};
diff --git a/src/tools/rust-analyzer/crates/cfg/Cargo.toml b/src/tools/rust-analyzer/crates/cfg/Cargo.toml
index 2857420c285..0880bc239d8 100644
--- a/src/tools/rust-analyzer/crates/cfg/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/cfg/Cargo.toml
@@ -2,9 +2,11 @@
 name = "cfg"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
@@ -12,15 +14,18 @@ doctest = false
 [dependencies]
 rustc-hash = "1.1.0"
 
-tt = { path = "../tt", version = "0.0.0" }
+# locals deps
+tt.workspace = true
 
 [dev-dependencies]
-mbe = { path = "../mbe" }
-syntax = { path = "../syntax" }
 expect-test = "1.4.0"
 oorandom = "11.1.3"
 # We depend on both individually instead of using `features = ["derive"]` to microoptimize the
 # build graph: if the feature was enabled, syn would be built early on in the graph if `smolstr`
 # supports `arbitrary`. This way, we avoid feature unification.
-arbitrary = "1.1.7"
-derive_arbitrary = "1.1.6"
+arbitrary = "1.2.2"
+derive_arbitrary = "1.2.2"
+
+# local deps
+mbe.workspace = true
+syntax.workspace = true
diff --git a/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs b/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs
index 5f4eefa8366..fb7505ba2dd 100644
--- a/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs
+++ b/src/tools/rust-analyzer/crates/cfg/src/cfg_expr.rs
@@ -66,7 +66,7 @@ impl From<CfgAtom> for CfgExpr {
 }
 
 impl CfgExpr {
-    pub fn parse(tt: &tt::Subtree) -> CfgExpr {
+    pub fn parse<S>(tt: &tt::Subtree<S>) -> CfgExpr {
         next_cfg_expr(&mut tt.token_trees.iter()).unwrap_or(CfgExpr::Invalid)
     }
     /// Fold the cfg by querying all basic `Atom` and `KeyValue` predicates.
@@ -85,7 +85,7 @@ impl CfgExpr {
     }
 }
 
-fn next_cfg_expr(it: &mut SliceIter<'_, tt::TokenTree>) -> Option<CfgExpr> {
+fn next_cfg_expr<S>(it: &mut SliceIter<'_, tt::TokenTree<S>>) -> Option<CfgExpr> {
     let name = match it.next() {
         None => return None,
         Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) => ident.text.clone(),
diff --git a/src/tools/rust-analyzer/crates/flycheck/Cargo.toml b/src/tools/rust-analyzer/crates/flycheck/Cargo.toml
index 514d567fcce..609d18c4eea 100644
--- a/src/tools/rust-analyzer/crates/flycheck/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/flycheck/Cargo.toml
@@ -2,9 +2,11 @@
 name = "flycheck"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
@@ -17,8 +19,9 @@ rustc-hash = "1.1.0"
 serde = { version = "1.0.137", features = ["derive"] }
 serde_json = "1.0.86"
 jod-thread = "0.1.2"
-command-group = "1.0.8"
+command-group = "2.0.1"
 
-toolchain = { path = "../toolchain", version = "0.0.0" }
-stdx = { path = "../stdx", version = "0.0.0" }
-paths = { path = "../paths", version = "0.0.0" }
+# local deps
+paths.workspace = true
+stdx.workspace = true
+toolchain.workspace = true
diff --git a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
index 698be76656c..1daf0428c24 100644
--- a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml
@@ -2,9 +2,11 @@
 name = "hir-def"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
@@ -23,23 +25,28 @@ hashbrown = { version = "0.12.1", default-features = false }
 indexmap = "1.9.1"
 itertools = "0.10.5"
 la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
-once_cell = "1.15.0"
+once_cell = "1.17.0"
 rustc-hash = "1.1.0"
 smallvec = "1.10.0"
 tracing = "0.1.35"
 
-stdx = { path = "../stdx", version = "0.0.0" }
-base-db = { path = "../base-db", version = "0.0.0" }
-syntax = { path = "../syntax", version = "0.0.0" }
-profile = { path = "../profile", version = "0.0.0" }
-hir-expand = { path = "../hir-expand", version = "0.0.0" }
 rustc_abi = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_abi", default-features = false }
 rustc_index = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_index", default-features = false }
-mbe = { path = "../mbe", version = "0.0.0" }
-cfg = { path = "../cfg", version = "0.0.0" }
-tt = { path = "../tt", version = "0.0.0" }
-limit = { path = "../limit", version = "0.0.0" }
+
+# local deps
+stdx.workspace = true
+intern.workspace = true
+base-db.workspace = true
+syntax.workspace = true
+profile.workspace = true
+hir-expand.workspace = true
+mbe.workspace = true
+cfg.workspace = true
+tt.workspace = true
+limit.workspace = true
 
 [dev-dependencies]
-test-utils = { path = "../test-utils" }
 expect-test = "1.4.0"
+
+# local deps
+test-utils.workspace = true
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/adt.rs b/src/tools/rust-analyzer/crates/hir-def/src/adt.rs
index db3b4194881..dcea679567a 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/adt.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/adt.rs
@@ -2,22 +2,22 @@
 
 use std::sync::Arc;
 
+use crate::tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree};
 use base_db::CrateId;
 use either::Either;
 use hir_expand::{
     name::{AsName, Name},
     HirFileId, InFile,
 };
+use intern::Interned;
 use la_arena::{Arena, ArenaMap};
 use rustc_abi::{Integer, IntegerType};
 use syntax::ast::{self, HasName, HasVisibility};
-use tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree};
 
 use crate::{
     body::{CfgExpander, LowerCtx},
     builtin_type::{BuiltinInt, BuiltinUint},
     db::DefDatabase,
-    intern::Interned,
     item_tree::{AttrOwner, Field, FieldAstId, Fields, ItemTree, ModItem, RawVisibilityId},
     layout::{Align, ReprFlags, ReprOptions},
     nameres::diagnostics::DefDiagnostic,
@@ -82,7 +82,7 @@ fn repr_from_value(
 
 fn parse_repr_tt(tt: &Subtree) -> Option<ReprOptions> {
     match tt.delimiter {
-        Some(Delimiter { kind: DelimiterKind::Parenthesis, .. }) => {}
+        Delimiter { kind: DelimiterKind::Parenthesis, .. } => {}
         _ => return None,
     }
 
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
index ab5d180e1bb..fcd92ad3385 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs
@@ -1,27 +1,26 @@
 //! A higher level attributes based on TokenTree, with also some shortcuts.
 
-use std::{fmt, hash::Hash, ops, sync::Arc};
+use std::{hash::Hash, ops, sync::Arc};
 
 use base_db::CrateId;
 use cfg::{CfgExpr, CfgOptions};
 use either::Either;
-use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile};
+use hir_expand::{
+    attrs::{collect_attrs, Attr, AttrId, RawAttrs},
+    HirFileId, InFile,
+};
 use itertools::Itertools;
 use la_arena::{ArenaMap, Idx, RawIdx};
-use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct};
-use smallvec::{smallvec, SmallVec};
+use mbe::DelimiterKind;
 use syntax::{
-    ast::{self, AstNode, HasAttrs, IsString},
-    match_ast, AstPtr, AstToken, SmolStr, SyntaxNode, TextRange, TextSize,
+    ast::{self, HasAttrs, IsString},
+    AstPtr, AstToken, SmolStr, TextRange, TextSize,
 };
-use tt::Subtree;
 
 use crate::{
     db::DefDatabase,
-    intern::Interned,
     item_tree::{AttrOwner, Fields, ItemTreeId, ItemTreeNode},
     nameres::{ModuleOrigin, ModuleSource},
-    path::{ModPath, PathKind},
     src::{HasChildSource, HasSource},
     AdtId, AttrDefId, EnumId, GenericParamId, LocalEnumVariantId, LocalFieldId, Lookup, MacroId,
     VariantId,
@@ -47,12 +46,6 @@ impl From<Documentation> for String {
     }
 }
 
-/// Syntactical attributes, without filtering of `cfg_attr`s.
-#[derive(Default, Debug, Clone, PartialEq, Eq)]
-pub(crate) struct RawAttrs {
-    entries: Option<Arc<[Attr]>>,
-}
-
 #[derive(Default, Debug, Clone, PartialEq, Eq)]
 pub struct Attrs(RawAttrs);
 
@@ -62,30 +55,21 @@ pub struct AttrsWithOwner {
     owner: AttrDefId,
 }
 
-impl ops::Deref for RawAttrs {
-    type Target = [Attr];
-
-    fn deref(&self) -> &[Attr] {
-        match &self.entries {
-            Some(it) => &*it,
-            None => &[],
-        }
-    }
-}
 impl Attrs {
     pub fn get(&self, id: AttrId) -> Option<&Attr> {
         (**self).iter().find(|attr| attr.id == id)
     }
+
+    pub(crate) fn filter(db: &dyn DefDatabase, krate: CrateId, raw_attrs: RawAttrs) -> Attrs {
+        Attrs(raw_attrs.filter(db.upcast(), krate))
+    }
 }
 
 impl ops::Deref for Attrs {
     type Target = [Attr];
 
     fn deref(&self) -> &[Attr] {
-        match &self.0.entries {
-            Some(it) => &*it,
-            None => &[],
-        }
+        &self.0
     }
 }
 
@@ -97,114 +81,6 @@ impl ops::Deref for AttrsWithOwner {
     }
 }
 
-impl RawAttrs {
-    pub(crate) const EMPTY: Self = Self { entries: None };
-
-    pub(crate) fn new(db: &dyn DefDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self {
-        let entries = collect_attrs(owner)
-            .filter_map(|(id, attr)| match attr {
-                Either::Left(attr) => {
-                    attr.meta().and_then(|meta| Attr::from_src(db, meta, hygiene, id))
-                }
-                Either::Right(comment) => comment.doc_comment().map(|doc| Attr {
-                    id,
-                    input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
-                    path: Interned::new(ModPath::from(hir_expand::name!(doc))),
-                }),
-            })
-            .collect::<Arc<_>>();
-
-        Self { entries: if entries.is_empty() { None } else { Some(entries) } }
-    }
-
-    fn from_attrs_owner(db: &dyn DefDatabase, owner: InFile<&dyn ast::HasAttrs>) -> Self {
-        let hygiene = Hygiene::new(db.upcast(), owner.file_id);
-        Self::new(db, owner.value, &hygiene)
-    }
-
-    pub(crate) fn merge(&self, other: Self) -> Self {
-        // FIXME: This needs to fixup `AttrId`s
-        match (&self.entries, other.entries) {
-            (None, None) => Self::EMPTY,
-            (None, entries @ Some(_)) => Self { entries },
-            (Some(entries), None) => Self { entries: Some(entries.clone()) },
-            (Some(a), Some(b)) => {
-                let last_ast_index = a.last().map_or(0, |it| it.id.ast_index + 1);
-                Self {
-                    entries: Some(
-                        a.iter()
-                            .cloned()
-                            .chain(b.iter().map(|it| {
-                                let mut it = it.clone();
-                                it.id.ast_index += last_ast_index;
-                                it
-                            }))
-                            .collect(),
-                    ),
-                }
-            }
-        }
-    }
-
-    /// Processes `cfg_attr`s, returning the resulting semantic `Attrs`.
-    pub(crate) fn filter(self, db: &dyn DefDatabase, krate: CrateId) -> Attrs {
-        let has_cfg_attrs = self.iter().any(|attr| {
-            attr.path.as_ident().map_or(false, |name| *name == hir_expand::name![cfg_attr])
-        });
-        if !has_cfg_attrs {
-            return Attrs(self);
-        }
-
-        let crate_graph = db.crate_graph();
-        let new_attrs = self
-            .iter()
-            .flat_map(|attr| -> SmallVec<[_; 1]> {
-                let is_cfg_attr =
-                    attr.path.as_ident().map_or(false, |name| *name == hir_expand::name![cfg_attr]);
-                if !is_cfg_attr {
-                    return smallvec![attr.clone()];
-                }
-
-                let subtree = match attr.token_tree_value() {
-                    Some(it) => it,
-                    _ => return smallvec![attr.clone()],
-                };
-
-                // Input subtree is: `(cfg, $(attr),+)`
-                // Split it up into a `cfg` subtree and the `attr` subtrees.
-                // FIXME: There should be a common API for this.
-                let mut parts = subtree.token_trees.split(|tt| {
-                    matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. })))
-                });
-                let cfg = match parts.next() {
-                    Some(it) => it,
-                    None => return smallvec![],
-                };
-                let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() };
-                let cfg = CfgExpr::parse(&cfg);
-                let index = attr.id;
-                let attrs = parts.filter(|a| !a.is_empty()).filter_map(|attr| {
-                    let tree = Subtree { delimiter: None, token_trees: attr.to_vec() };
-                    // FIXME hygiene
-                    let hygiene = Hygiene::new_unhygienic();
-                    Attr::from_tt(db, &tree, &hygiene, index)
-                });
-
-                let cfg_options = &crate_graph[krate].cfg_options;
-                if cfg_options.check(&cfg) == Some(false) {
-                    smallvec![]
-                } else {
-                    cov_mark::hit!(cfg_attr_active);
-
-                    attrs.collect()
-                }
-            })
-            .collect();
-
-        Attrs(RawAttrs { entries: Some(new_attrs) })
-    }
-}
-
 impl Attrs {
     pub const EMPTY: Self = Self(RawAttrs::EMPTY);
 
@@ -251,19 +127,18 @@ impl Attrs {
                 let enum_ = &item_tree[loc.id.value];
 
                 let cfg_options = &crate_graph[krate].cfg_options;
-                let variant = 'tri: loop {
-                    let mut idx = 0;
-                    for variant in enum_.variants.clone() {
-                        let attrs = item_tree.attrs(db, krate, variant.into());
-                        if attrs.is_cfg_enabled(cfg_options) {
-                            if it.local_id == Idx::from_raw(RawIdx::from(idx)) {
-                                break 'tri variant;
-                            }
-                            idx += 1;
-                        }
-                    }
+
+                let Some(variant) = enum_.variants.clone().filter(|variant| {
+                    let attrs = item_tree.attrs(db, krate, (*variant).into());
+                    attrs.is_cfg_enabled(cfg_options)
+                })
+                .zip(0u32..)
+                .find(|(_variant, idx)| it.local_id == Idx::from_raw(RawIdx::from(*idx)))
+                .map(|(variant, _idx)| variant)
+                else {
                     return Arc::new(res);
                 };
+
                 (item_tree[variant].fields.clone(), item_tree, krate)
             }
             VariantId::StructId(it) => {
@@ -358,7 +233,7 @@ impl Attrs {
 
     pub fn has_doc_hidden(&self) -> bool {
         self.by_key("doc").tt_values().any(|tt| {
-            tt.delimiter_kind() == Some(DelimiterKind::Parenthesis) &&
+            tt.delimiter.kind == DelimiterKind::Parenthesis &&
                 matches!(&*tt.token_trees, [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.text == "hidden")
         })
     }
@@ -403,7 +278,7 @@ impl AttrsWithOwner {
                         .raw_attrs(AttrOwner::ModItem(definition_tree_id.value.into()))
                         .clone(),
                     ModuleOrigin::BlockExpr { block } => RawAttrs::from_attrs_owner(
-                        db,
+                        db.upcast(),
                         InFile::new(block.file_id, block.to_node(db.upcast()))
                             .as_ref()
                             .map(|it| it as &dyn ast::HasAttrs),
@@ -439,7 +314,7 @@ impl AttrsWithOwner {
                 GenericParamId::ConstParamId(it) => {
                     let src = it.parent().child_source(db);
                     RawAttrs::from_attrs_owner(
-                        db,
+                        db.upcast(),
                         src.with_value(src.value[it.local_id()].as_ref().either(
                             |it| match it {
                                 ast::TypeOrConstParam::Type(it) => it as _,
@@ -452,7 +327,7 @@ impl AttrsWithOwner {
                 GenericParamId::TypeParamId(it) => {
                     let src = it.parent().child_source(db);
                     RawAttrs::from_attrs_owner(
-                        db,
+                        db.upcast(),
                         src.with_value(src.value[it.local_id()].as_ref().either(
                             |it| match it {
                                 ast::TypeOrConstParam::Type(it) => it as _,
@@ -464,14 +339,14 @@ impl AttrsWithOwner {
                 }
                 GenericParamId::LifetimeParamId(it) => {
                     let src = it.parent.child_source(db);
-                    RawAttrs::from_attrs_owner(db, src.with_value(&src.value[it.local_id]))
+                    RawAttrs::from_attrs_owner(db.upcast(), src.with_value(&src.value[it.local_id]))
                 }
             },
             AttrDefId::ExternBlockId(it) => attrs_from_item_tree(it.lookup(db).id, db),
         };
 
-        let attrs = raw_attrs.filter(db, def.krate(db));
-        Self { attrs, owner: def }
+        let attrs = raw_attrs.filter(db.upcast(), def.krate(db));
+        Self { attrs: Attrs(attrs), owner: def }
     }
 
     pub fn source_map(&self, db: &dyn DefDatabase) -> AttrSourceMap {
@@ -627,40 +502,6 @@ fn doc_indent(attrs: &Attrs) -> usize {
         .unwrap_or(0)
 }
 
-fn inner_attributes(
-    syntax: &SyntaxNode,
-) -> Option<impl Iterator<Item = Either<ast::Attr, ast::Comment>>> {
-    let node = match_ast! {
-        match syntax {
-            ast::SourceFile(_) => syntax.clone(),
-            ast::ExternBlock(it) => it.extern_item_list()?.syntax().clone(),
-            ast::Fn(it) => it.body()?.stmt_list()?.syntax().clone(),
-            ast::Impl(it) => it.assoc_item_list()?.syntax().clone(),
-            ast::Module(it) => it.item_list()?.syntax().clone(),
-            ast::BlockExpr(it) => {
-                use syntax::SyntaxKind::{BLOCK_EXPR , EXPR_STMT};
-                // Block expressions accept outer and inner attributes, but only when they are the outer
-                // expression of an expression statement or the final expression of another block expression.
-                let may_carry_attributes = matches!(
-                    it.syntax().parent().map(|it| it.kind()),
-                     Some(BLOCK_EXPR | EXPR_STMT)
-                );
-                if !may_carry_attributes {
-                    return None
-                }
-                syntax.clone()
-            },
-            _ => return None,
-        }
-    };
-
-    let attrs = ast::AttrDocCommentIter::from_syntax_node(&node).filter(|el| match el {
-        Either::Left(attr) => attr.kind().is_inner(),
-        Either::Right(comment) => comment.is_inner(),
-    });
-    Some(attrs)
-}
-
 #[derive(Debug)]
 pub struct AttrSourceMap {
     source: Vec<Either<ast::Attr, ast::Comment>>,
@@ -703,7 +544,7 @@ impl AttrSourceMap {
     }
 
     fn source_of_id(&self, id: AttrId) -> InFile<&Either<ast::Attr, ast::Comment>> {
-        let ast_idx = id.ast_index as usize;
+        let ast_idx = id.ast_index();
         let file_id = match self.mod_def_site_file_id {
             Some((file_id, def_site_cut)) if def_site_cut <= ast_idx => file_id,
             _ => self.file_id,
@@ -779,128 +620,6 @@ fn get_doc_string_in_attr(it: &ast::Attr) -> Option<ast::String> {
     }
 }
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct AttrId {
-    pub(crate) ast_index: u32,
-}
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct Attr {
-    pub(crate) id: AttrId,
-    pub(crate) path: Interned<ModPath>,
-    pub(crate) input: Option<Interned<AttrInput>>,
-}
-
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub enum AttrInput {
-    /// `#[attr = "string"]`
-    Literal(SmolStr),
-    /// `#[attr(subtree)]`
-    TokenTree(tt::Subtree, mbe::TokenMap),
-}
-
-impl fmt::Display for AttrInput {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        match self {
-            AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()),
-            AttrInput::TokenTree(subtree, _) => subtree.fmt(f),
-        }
-    }
-}
-
-impl Attr {
-    fn from_src(
-        db: &dyn DefDatabase,
-        ast: ast::Meta,
-        hygiene: &Hygiene,
-        id: AttrId,
-    ) -> Option<Attr> {
-        let path = Interned::new(ModPath::from_src(db.upcast(), ast.path()?, hygiene)?);
-        let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
-            let value = match lit.kind() {
-                ast::LiteralKind::String(string) => string.value()?.into(),
-                _ => lit.syntax().first_token()?.text().trim_matches('"').into(),
-            };
-            Some(Interned::new(AttrInput::Literal(value)))
-        } else if let Some(tt) = ast.token_tree() {
-            let (tree, map) = syntax_node_to_token_tree(tt.syntax());
-            Some(Interned::new(AttrInput::TokenTree(tree, map)))
-        } else {
-            None
-        };
-        Some(Attr { id, path, input })
-    }
-
-    fn from_tt(
-        db: &dyn DefDatabase,
-        tt: &tt::Subtree,
-        hygiene: &Hygiene,
-        id: AttrId,
-    ) -> Option<Attr> {
-        let (parse, _) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
-        let ast = ast::Meta::cast(parse.syntax_node())?;
-
-        Self::from_src(db, ast, hygiene, id)
-    }
-
-    pub fn path(&self) -> &ModPath {
-        &self.path
-    }
-}
-
-impl Attr {
-    /// #[path = "string"]
-    pub fn string_value(&self) -> Option<&SmolStr> {
-        match self.input.as_deref()? {
-            AttrInput::Literal(it) => Some(it),
-            _ => None,
-        }
-    }
-
-    /// #[path(ident)]
-    pub fn single_ident_value(&self) -> Option<&tt::Ident> {
-        match self.input.as_deref()? {
-            AttrInput::TokenTree(subtree, _) => match &*subtree.token_trees {
-                [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident),
-                _ => None,
-            },
-            _ => None,
-        }
-    }
-
-    /// #[path TokenTree]
-    pub fn token_tree_value(&self) -> Option<&Subtree> {
-        match self.input.as_deref()? {
-            AttrInput::TokenTree(subtree, _) => Some(subtree),
-            _ => None,
-        }
-    }
-
-    /// Parses this attribute as a token tree consisting of comma separated paths.
-    pub fn parse_path_comma_token_tree(&self) -> Option<impl Iterator<Item = ModPath> + '_> {
-        let args = self.token_tree_value()?;
-
-        if args.delimiter_kind() != Some(DelimiterKind::Parenthesis) {
-            return None;
-        }
-        let paths = args
-            .token_trees
-            .split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))))
-            .filter_map(|tts| {
-                if tts.is_empty() {
-                    return None;
-                }
-                let segments = tts.iter().filter_map(|tt| match tt {
-                    tt::TokenTree::Leaf(tt::Leaf::Ident(id)) => Some(id.as_name()),
-                    _ => None,
-                });
-                Some(ModPath::from_segments(PathKind::Plain, segments))
-            });
-
-        Some(paths)
-    }
-}
-
 #[derive(Debug, Clone, Copy)]
 pub struct AttrQuery<'attr> {
     attrs: &'attr Attrs,
@@ -908,7 +627,7 @@ pub struct AttrQuery<'attr> {
 }
 
 impl<'attr> AttrQuery<'attr> {
-    pub fn tt_values(self) -> impl Iterator<Item = &'attr Subtree> {
+    pub fn tt_values(self) -> impl Iterator<Item = &'attr crate::tt::Subtree> {
         self.attrs().filter_map(|attr| attr.token_tree_value())
     }
 
@@ -953,21 +672,6 @@ fn attrs_from_item_tree<N: ItemTreeNode>(id: ItemTreeId<N>, db: &dyn DefDatabase
     tree.raw_attrs(mod_item.into()).clone()
 }
 
-fn collect_attrs(
-    owner: &dyn ast::HasAttrs,
-) -> impl Iterator<Item = (AttrId, Either<ast::Attr, ast::Comment>)> {
-    let inner_attrs = inner_attributes(owner.syntax()).into_iter().flatten();
-    let outer_attrs =
-        ast::AttrDocCommentIter::from_syntax_node(owner.syntax()).filter(|el| match el {
-            Either::Left(attr) => attr.kind().is_outer(),
-            Either::Right(comment) => comment.is_outer(),
-        });
-    outer_attrs
-        .chain(inner_attrs)
-        .enumerate()
-        .map(|(id, attr)| (AttrId { ast_index: id as u32 }, attr))
-}
-
 pub(crate) fn variants_attrs_source_map(
     db: &dyn DefDatabase,
     def: EnumId,
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body.rs b/src/tools/rust-analyzer/crates/hir-def/src/body.rs
index 78fbaa9d7d3..9713256813e 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body.rs
@@ -12,7 +12,9 @@ use base_db::CrateId;
 use cfg::{CfgExpr, CfgOptions};
 use drop_bomb::DropBomb;
 use either::Either;
-use hir_expand::{hygiene::Hygiene, ExpandError, ExpandResult, HirFileId, InFile, MacroCallId};
+use hir_expand::{
+    attrs::RawAttrs, hygiene::Hygiene, ExpandError, ExpandResult, HirFileId, InFile, MacroCallId,
+};
 use la_arena::{Arena, ArenaMap};
 use limit::Limit;
 use profile::Count;
@@ -20,7 +22,7 @@ use rustc_hash::FxHashMap;
 use syntax::{ast, AstPtr, SyntaxNodePtr};
 
 use crate::{
-    attr::{Attrs, RawAttrs},
+    attr::Attrs,
     db::DefDatabase,
     expr::{dummy_expr_id, Expr, ExprId, Label, LabelId, Pat, PatId},
     item_scope::BuiltinShadowMode,
@@ -64,7 +66,7 @@ impl CfgExpander {
     }
 
     pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs {
-        RawAttrs::new(db, owner, &self.hygiene).filter(db, self.krate)
+        Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, &self.hygiene))
     }
 
     pub(crate) fn is_cfg_enabled(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> bool {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
index e8da24e3add..a78fa91f53b 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs
@@ -10,6 +10,7 @@ use hir_expand::{
     name::{name, AsName, Name},
     AstId, ExpandError, HirFileId, InFile,
 };
+use intern::Interned;
 use la_arena::Arena;
 use once_cell::unsync::OnceCell;
 use profile::Count;
@@ -33,7 +34,6 @@ use crate::{
         Label, LabelId, Literal, MatchArm, Movability, Pat, PatId, RecordFieldPat, RecordLitField,
         Statement,
     },
-    intern::Interned,
     item_scope::BuiltinShadowMode,
     path::{GenericArgs, Path},
     type_ref::{Mutability, Rawness, TypeRef},
@@ -67,9 +67,9 @@ impl<'a> LowerCtx<'a> {
         Path::from_src(ast, self)
     }
 
-    pub(crate) fn ast_id<N: AstNode>(&self, db: &dyn DefDatabase, item: &N) -> Option<AstId<N>> {
+    pub(crate) fn ast_id<N: AstNode>(&self, item: &N) -> Option<AstId<N>> {
         let &(file_id, ref ast_id_map) = self.ast_id_map.as_ref()?;
-        let ast_id_map = ast_id_map.get_or_init(|| db.ast_id_map(file_id));
+        let ast_id_map = ast_id_map.get_or_init(|| self.db.ast_id_map(file_id));
         Some(InFile::new(file_id, ast_id_map.ast_id(item)))
     }
 }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs
index 10b9b26bbea..4b4664a1cf4 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs
@@ -80,7 +80,7 @@ impl<'a> Write for Printer<'a> {
     fn write_str(&mut self, s: &str) -> fmt::Result {
         for line in s.split_inclusive('\n') {
             if self.needs_indent {
-                match self.buf.chars().rev().skip_while(|ch| *ch == ' ').next() {
+                match self.buf.chars().rev().find(|ch| *ch != ' ') {
                     Some('\n') | None => {}
                     _ => self.buf.push('\n'),
                 }
@@ -113,7 +113,7 @@ impl<'a> Printer<'a> {
     }
 
     fn newline(&mut self) {
-        match self.buf.chars().rev().skip_while(|ch| *ch == ' ').next() {
+        match self.buf.chars().rev().find(|ch| *ch != ' ') {
             Some('\n') | None => {}
             _ => writeln!(self).unwrap(),
         }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs b/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs
index bb13165257b..19d2fe956f0 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/child_by_source.rs
@@ -117,7 +117,7 @@ impl ChildBySource for ItemScope {
                 let adt = ast_id.to_node(db.upcast());
                 calls.for_each(|(attr_id, call_id, calls)| {
                     if let Some(Either::Left(attr)) =
-                        adt.doc_comments_and_attrs().nth(attr_id.ast_index as usize)
+                        adt.doc_comments_and_attrs().nth(attr_id.ast_index())
                     {
                         res[keys::DERIVE_MACRO_CALL].insert(attr, (attr_id, call_id, calls.into()));
                     }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data.rs b/src/tools/rust-analyzer/crates/hir-def/src/data.rs
index e6b05f27a54..c3c1dfd39ac 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/data.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/data.rs
@@ -3,6 +3,7 @@
 use std::sync::Arc;
 
 use hir_expand::{name::Name, AstId, ExpandResult, HirFileId, InFile, MacroCallId, MacroDefKind};
+use intern::Interned;
 use smallvec::SmallVec;
 use syntax::ast;
 
@@ -10,7 +11,6 @@ use crate::{
     attr::Attrs,
     body::{Expander, Mark},
     db::DefDatabase,
-    intern::Interned,
     item_tree::{self, AssocItem, FnFlags, ItemTree, ItemTreeId, ModItem, Param, TreeId},
     nameres::{
         attr_resolution::ResolvedAttr,
@@ -142,7 +142,7 @@ impl FunctionData {
     }
 }
 
-fn parse_rustc_legacy_const_generics(tt: &tt::Subtree) -> Box<[u32]> {
+fn parse_rustc_legacy_const_generics(tt: &crate::tt::Subtree) -> Box<[u32]> {
     let mut indices = Vec::new();
     for args in tt.token_trees.chunks(2) {
         match &args[0] {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/db.rs b/src/tools/rust-analyzer/crates/hir-def/src/db.rs
index 431c8255497..b23427a73b3 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/db.rs
@@ -4,8 +4,9 @@ use std::sync::Arc;
 use base_db::{salsa, CrateId, SourceDatabase, Upcast};
 use either::Either;
 use hir_expand::{db::AstDatabase, HirFileId};
+use intern::Interned;
 use la_arena::ArenaMap;
-use syntax::{ast, AstPtr, SmolStr};
+use syntax::{ast, AstPtr};
 
 use crate::{
     adt::{EnumData, StructData},
@@ -17,9 +18,8 @@ use crate::{
     },
     generics::GenericParams,
     import_map::ImportMap,
-    intern::Interned,
     item_tree::{AttrOwner, ItemTree},
-    lang_item::{LangItemTarget, LangItems},
+    lang_item::{LangItem, LangItemTarget, LangItems},
     nameres::{diagnostics::DefDiagnostic, DefMap},
     visibility::{self, Visibility},
     AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, ExternBlockId,
@@ -183,7 +183,7 @@ pub trait DefDatabase: InternDatabase + AstDatabase + Upcast<dyn AstDatabase> {
     fn crate_lang_items(&self, krate: CrateId) -> Arc<LangItems>;
 
     #[salsa::invoke(LangItems::lang_item_query)]
-    fn lang_item(&self, start_crate: CrateId, item: SmolStr) -> Option<LangItemTarget>;
+    fn lang_item(&self, start_crate: CrateId, item: LangItem) -> Option<LangItemTarget>;
 
     #[salsa::invoke(ImportMap::import_map_query)]
     fn import_map(&self, krate: CrateId) -> Arc<ImportMap>;
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr.rs
index 7b656942119..48028b7c6a8 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/expr.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/expr.rs
@@ -15,11 +15,11 @@
 use std::fmt;
 
 use hir_expand::name::Name;
+use intern::Interned;
 use la_arena::{Idx, RawIdx};
 
 use crate::{
     builtin_type::{BuiltinFloat, BuiltinInt, BuiltinUint},
-    intern::Interned,
     path::{GenericArgs, Path},
     type_ref::{Mutability, Rawness, TypeRef},
     BlockId,
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs
index ddd7ad99e9a..3f439232083 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs
@@ -811,7 +811,7 @@ pub struct S;
     fn prelude() {
         check_found_path(
             r#"
-//- /main.rs crate:main deps:std
+//- /main.rs edition:2018 crate:main deps:std
 $0
 //- /std.rs crate:std
 pub mod prelude {
@@ -852,7 +852,7 @@ pub mod prelude {
     fn imported_prelude() {
         check_found_path(
             r#"
-//- /main.rs crate:main deps:std
+//- /main.rs edition:2018 crate:main deps:std
 use S;
 $0
 //- /std.rs crate:std
@@ -872,7 +872,7 @@ pub mod prelude {
     #[test]
     fn enum_variant_from_prelude() {
         let code = r#"
-//- /main.rs crate:main deps:std
+//- /main.rs edition:2018 crate:main deps:std
 $0
 //- /std.rs crate:std
 pub mod prelude {
@@ -1273,7 +1273,7 @@ fn f() {
     fn prelude_with_inner_items() {
         check_found_path(
             r#"
-//- /main.rs crate:main deps:std
+//- /main.rs edition:2018 crate:main deps:std
 fn f() {
     fn inner() {}
     $0
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs
index f74559f5d66..b2ab0c30e03 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs
@@ -9,6 +9,7 @@ use hir_expand::{
     name::{AsName, Name},
     ExpandResult, HirFileId, InFile,
 };
+use intern::Interned;
 use la_arena::{Arena, ArenaMap, Idx};
 use once_cell::unsync::Lazy;
 use std::ops::DerefMut;
@@ -20,7 +21,6 @@ use crate::{
     child_by_source::ChildBySource,
     db::DefDatabase,
     dyn_map::DynMap,
-    intern::Interned,
     keys,
     src::{HasChildSource, HasSource},
     type_ref::{LifetimeRef, TypeBound, TypeRef},
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs
index c7b213b7e98..53a4173ff42 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs
@@ -4,7 +4,7 @@
 use std::collections::hash_map::Entry;
 
 use base_db::CrateId;
-use hir_expand::{name::Name, AstId, MacroCallId};
+use hir_expand::{attrs::AttrId, name::Name, AstId, MacroCallId};
 use itertools::Itertools;
 use once_cell::sync::Lazy;
 use profile::Count;
@@ -14,8 +14,8 @@ use stdx::format_to;
 use syntax::ast;
 
 use crate::{
-    attr::AttrId, db::DefDatabase, per_ns::PerNs, visibility::Visibility, AdtId, BuiltinType,
-    ConstId, HasModule, ImplId, LocalModuleId, MacroId, ModuleDefId, ModuleId, TraitId,
+    db::DefDatabase, per_ns::PerNs, visibility::Visibility, AdtId, BuiltinType, ConstId, HasModule,
+    ImplId, LocalModuleId, MacroId, ModuleDefId, ModuleId, TraitId,
 };
 
 #[derive(Copy, Clone, Debug)]
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
index 80297f8adf1..19d01630ef0 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs
@@ -48,10 +48,12 @@ use base_db::CrateId;
 use either::Either;
 use hir_expand::{
     ast_id_map::FileAstId,
+    attrs::RawAttrs,
     hygiene::Hygiene,
     name::{name, AsName, Name},
     ExpandTo, HirFileId, InFile,
 };
+use intern::Interned;
 use la_arena::{Arena, Idx, IdxRange, RawIdx};
 use profile::Count;
 use rustc_hash::FxHashMap;
@@ -60,10 +62,9 @@ use stdx::never;
 use syntax::{ast, match_ast, SyntaxKind};
 
 use crate::{
-    attr::{Attrs, RawAttrs},
+    attr::Attrs,
     db::DefDatabase,
     generics::GenericParams,
-    intern::Interned,
     path::{path, AssociatedTypeBinding, GenericArgs, ImportAlias, ModPath, Path, PathKind},
     type_ref::{Mutability, TraitRef, TypeBound, TypeRef},
     visibility::RawVisibility,
@@ -110,7 +111,8 @@ impl ItemTree {
             Some(node) => node,
             None => return Default::default(),
         };
-        if never!(syntax.kind() == SyntaxKind::ERROR) {
+        if never!(syntax.kind() == SyntaxKind::ERROR, "{:?} from {:?} {}", file_id, syntax, syntax)
+        {
             // FIXME: not 100% sure why these crop up, but return an empty tree to avoid a panic
             return Default::default();
         }
@@ -120,7 +122,7 @@ impl ItemTree {
         let mut item_tree = match_ast! {
             match syntax {
                 ast::SourceFile(file) => {
-                    top_attrs = Some(RawAttrs::new(db, &file, ctx.hygiene()));
+                    top_attrs = Some(RawAttrs::new(db.upcast(), &file, ctx.hygiene()));
                     ctx.lower_module_items(&file)
                 },
                 ast::MacroItems(items) => {
@@ -132,7 +134,7 @@ impl ItemTree {
                     ctx.lower_macro_stmts(stmts)
                 },
                 _ => {
-                    panic!("cannot create item tree from {syntax:?} {syntax}");
+                    panic!("cannot create item tree for file {file_id:?} from {syntax:?} {syntax}");
                 },
             }
         };
@@ -152,7 +154,11 @@ impl ItemTree {
 
     /// Returns the inner attributes of the source file.
     pub fn top_level_attrs(&self, db: &dyn DefDatabase, krate: CrateId) -> Attrs {
-        self.attrs.get(&AttrOwner::TopLevel).unwrap_or(&RawAttrs::EMPTY).clone().filter(db, krate)
+        Attrs::filter(
+            db,
+            krate,
+            self.attrs.get(&AttrOwner::TopLevel).unwrap_or(&RawAttrs::EMPTY).clone(),
+        )
     }
 
     pub(crate) fn raw_attrs(&self, of: AttrOwner) -> &RawAttrs {
@@ -160,7 +166,7 @@ impl ItemTree {
     }
 
     pub(crate) fn attrs(&self, db: &dyn DefDatabase, krate: CrateId, of: AttrOwner) -> Attrs {
-        self.raw_attrs(of).clone().filter(db, krate)
+        Attrs::filter(db, krate, self.raw_attrs(of).clone())
     }
 
     pub fn pretty_print(&self) -> String {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
index b25274bccc9..27705cbbbdc 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs
@@ -99,7 +99,7 @@ impl<'a> Ctx<'a> {
     }
 
     fn lower_mod_item(&mut self, item: &ast::Item) -> Option<ModItem> {
-        let attrs = RawAttrs::new(self.db, item, self.hygiene());
+        let attrs = RawAttrs::new(self.db.upcast(), item, self.hygiene());
         let item: ModItem = match item {
             ast::Item::Struct(ast) => self.lower_struct(ast)?.into(),
             ast::Item::Union(ast) => self.lower_union(ast)?.into(),
@@ -173,7 +173,7 @@ impl<'a> Ctx<'a> {
         for field in fields.fields() {
             if let Some(data) = self.lower_record_field(&field) {
                 let idx = self.data().fields.alloc(data);
-                self.add_attrs(idx.into(), RawAttrs::new(self.db, &field, self.hygiene()));
+                self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.hygiene()));
             }
         }
         let end = self.next_field_idx();
@@ -194,7 +194,7 @@ impl<'a> Ctx<'a> {
         for (i, field) in fields.fields().enumerate() {
             let data = self.lower_tuple_field(i, &field);
             let idx = self.data().fields.alloc(data);
-            self.add_attrs(idx.into(), RawAttrs::new(self.db, &field, self.hygiene()));
+            self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.hygiene()));
         }
         let end = self.next_field_idx();
         IdxRange::new(start..end)
@@ -239,7 +239,10 @@ impl<'a> Ctx<'a> {
         for variant in variants.variants() {
             if let Some(data) = self.lower_variant(&variant) {
                 let idx = self.data().variants.alloc(data);
-                self.add_attrs(idx.into(), RawAttrs::new(self.db, &variant, self.hygiene()));
+                self.add_attrs(
+                    idx.into(),
+                    RawAttrs::new(self.db.upcast(), &variant, self.hygiene()),
+                );
             }
         }
         let end = self.next_variant_idx();
@@ -283,7 +286,10 @@ impl<'a> Ctx<'a> {
                 };
                 let ty = Interned::new(self_type);
                 let idx = self.data().params.alloc(Param::Normal(None, ty));
-                self.add_attrs(idx.into(), RawAttrs::new(self.db, &self_param, self.hygiene()));
+                self.add_attrs(
+                    idx.into(),
+                    RawAttrs::new(self.db.upcast(), &self_param, self.hygiene()),
+                );
                 has_self_param = true;
             }
             for param in param_list.params() {
@@ -307,7 +313,7 @@ impl<'a> Ctx<'a> {
                         self.data().params.alloc(Param::Normal(name, ty))
                     }
                 };
-                self.add_attrs(idx.into(), RawAttrs::new(self.db, &param, self.hygiene()));
+                self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &param, self.hygiene()));
             }
         }
         let end_param = self.next_param_idx();
@@ -442,7 +448,7 @@ impl<'a> Ctx<'a> {
         let items = trait_def.assoc_item_list().map(|list| {
             list.assoc_items()
                 .filter_map(|item| {
-                    let attrs = RawAttrs::new(self.db, &item, self.hygiene());
+                    let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene());
                     self.lower_assoc_item(&item).map(|item| {
                         self.add_attrs(ModItem::from(item).into(), attrs);
                         item
@@ -471,7 +477,7 @@ impl<'a> Ctx<'a> {
             .flat_map(|it| it.assoc_items())
             .filter_map(|item| {
                 let assoc = self.lower_assoc_item(&item)?;
-                let attrs = RawAttrs::new(self.db, &item, self.hygiene());
+                let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene());
                 self.add_attrs(ModItem::from(assoc).into(), attrs);
                 Some(assoc)
             })
@@ -541,7 +547,7 @@ impl<'a> Ctx<'a> {
                     // (in other words, the knowledge that they're in an extern block must not be used).
                     // This is because an extern block can contain macros whose ItemTree's top-level items
                     // should be considered to be in an extern block too.
-                    let attrs = RawAttrs::new(self.db, &item, self.hygiene());
+                    let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene());
                     let id: ModItem = match item {
                         ast::ExternItem::Fn(ast) => self.lower_function(&ast)?.into(),
                         ast::ExternItem::Static(ast) => self.lower_static(&ast)?.into(),
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
index 48c40df22ff..8f230b87d01 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs
@@ -3,7 +3,6 @@
 use std::fmt::{self, Write};
 
 use crate::{
-    attr::RawAttrs,
     generics::{TypeOrConstParamData, WherePredicate, WherePredicateTypeTarget},
     pretty::{print_path, print_type_bounds, print_type_ref},
     visibility::RawVisibility,
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/keys.rs b/src/tools/rust-analyzer/crates/hir-def/src/keys.rs
index c5cb9a2af53..72beec8186c 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/keys.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/keys.rs
@@ -2,12 +2,11 @@
 
 use std::marker::PhantomData;
 
-use hir_expand::MacroCallId;
+use hir_expand::{attrs::AttrId, MacroCallId};
 use rustc_hash::FxHashMap;
 use syntax::{ast, AstNode, AstPtr};
 
 use crate::{
-    attr::AttrId,
     dyn_map::{DynMap, Policy},
     ConstId, EnumId, EnumVariantId, FieldId, FunctionId, ImplId, LifetimeParamId, Macro2Id,
     MacroRulesId, ProcMacroId, StaticId, StructId, TraitId, TypeAliasId, TypeOrConstParamId,
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs
index 87785018458..ab9bc615daf 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs
@@ -8,19 +8,21 @@ use rustc_hash::FxHashMap;
 use syntax::SmolStr;
 
 use crate::{
-    db::DefDatabase, AdtId, AttrDefId, CrateId, EnumId, EnumVariantId, FunctionId, ImplId,
-    ModuleDefId, StaticId, StructId, TraitId,
+    db::DefDatabase, AdtId, AssocItemId, AttrDefId, CrateId, EnumId, EnumVariantId, FunctionId,
+    ImplId, ModuleDefId, StaticId, StructId, TraitId, TypeAliasId, UnionId,
 };
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub enum LangItemTarget {
     EnumId(EnumId),
-    FunctionId(FunctionId),
-    ImplDefId(ImplId),
-    StaticId(StaticId),
-    StructId(StructId),
-    TraitId(TraitId),
-    EnumVariantId(EnumVariantId),
+    Function(FunctionId),
+    ImplDef(ImplId),
+    Static(StaticId),
+    Struct(StructId),
+    Union(UnionId),
+    TypeAlias(TypeAliasId),
+    Trait(TraitId),
+    EnumVariant(EnumVariantId),
 }
 
 impl LangItemTarget {
@@ -33,42 +35,42 @@ impl LangItemTarget {
 
     pub fn as_function(self) -> Option<FunctionId> {
         match self {
-            LangItemTarget::FunctionId(id) => Some(id),
+            LangItemTarget::Function(id) => Some(id),
             _ => None,
         }
     }
 
     pub fn as_impl_def(self) -> Option<ImplId> {
         match self {
-            LangItemTarget::ImplDefId(id) => Some(id),
+            LangItemTarget::ImplDef(id) => Some(id),
             _ => None,
         }
     }
 
     pub fn as_static(self) -> Option<StaticId> {
         match self {
-            LangItemTarget::StaticId(id) => Some(id),
+            LangItemTarget::Static(id) => Some(id),
             _ => None,
         }
     }
 
     pub fn as_struct(self) -> Option<StructId> {
         match self {
-            LangItemTarget::StructId(id) => Some(id),
+            LangItemTarget::Struct(id) => Some(id),
             _ => None,
         }
     }
 
     pub fn as_trait(self) -> Option<TraitId> {
         match self {
-            LangItemTarget::TraitId(id) => Some(id),
+            LangItemTarget::Trait(id) => Some(id),
             _ => None,
         }
     }
 
     pub fn as_enum_variant(self) -> Option<EnumVariantId> {
         match self {
-            LangItemTarget::EnumVariantId(id) => Some(id),
+            LangItemTarget::EnumVariant(id) => Some(id),
             _ => None,
         }
     }
@@ -76,12 +78,12 @@ impl LangItemTarget {
 
 #[derive(Default, Debug, Clone, PartialEq, Eq)]
 pub struct LangItems {
-    items: FxHashMap<SmolStr, LangItemTarget>,
+    items: FxHashMap<LangItem, LangItemTarget>,
 }
 
 impl LangItems {
-    pub fn target(&self, item: &str) -> Option<LangItemTarget> {
-        self.items.get(item).copied()
+    pub fn target(&self, item: LangItem) -> Option<LangItemTarget> {
+        self.items.get(&item).copied()
     }
 
     /// Salsa query. This will look for lang items in a specific crate.
@@ -94,16 +96,27 @@ impl LangItems {
 
         for (_, module_data) in crate_def_map.modules() {
             for impl_def in module_data.scope.impls() {
-                lang_items.collect_lang_item(db, impl_def, LangItemTarget::ImplDefId)
+                lang_items.collect_lang_item(db, impl_def, LangItemTarget::ImplDef);
+                for assoc in db.impl_data(impl_def).items.iter().copied() {
+                    match assoc {
+                        AssocItemId::FunctionId(f) => {
+                            lang_items.collect_lang_item(db, f, LangItemTarget::Function)
+                        }
+                        AssocItemId::TypeAliasId(t) => {
+                            lang_items.collect_lang_item(db, t, LangItemTarget::TypeAlias)
+                        }
+                        AssocItemId::ConstId(_) => (),
+                    }
+                }
             }
 
             for def in module_data.scope.declarations() {
                 match def {
                     ModuleDefId::TraitId(trait_) => {
-                        lang_items.collect_lang_item(db, trait_, LangItemTarget::TraitId);
+                        lang_items.collect_lang_item(db, trait_, LangItemTarget::Trait);
                         db.trait_data(trait_).items.iter().for_each(|&(_, assoc_id)| {
-                            if let crate::AssocItemId::FunctionId(f) = assoc_id {
-                                lang_items.collect_lang_item(db, f, LangItemTarget::FunctionId);
+                            if let AssocItemId::FunctionId(f) = assoc_id {
+                                lang_items.collect_lang_item(db, f, LangItemTarget::Function);
                             }
                         });
                     }
@@ -113,18 +126,24 @@ impl LangItems {
                             lang_items.collect_lang_item(
                                 db,
                                 EnumVariantId { parent: e, local_id },
-                                LangItemTarget::EnumVariantId,
+                                LangItemTarget::EnumVariant,
                             );
                         });
                     }
                     ModuleDefId::AdtId(AdtId::StructId(s)) => {
-                        lang_items.collect_lang_item(db, s, LangItemTarget::StructId);
+                        lang_items.collect_lang_item(db, s, LangItemTarget::Struct);
+                    }
+                    ModuleDefId::AdtId(AdtId::UnionId(u)) => {
+                        lang_items.collect_lang_item(db, u, LangItemTarget::Union);
                     }
                     ModuleDefId::FunctionId(f) => {
-                        lang_items.collect_lang_item(db, f, LangItemTarget::FunctionId);
+                        lang_items.collect_lang_item(db, f, LangItemTarget::Function);
                     }
                     ModuleDefId::StaticId(s) => {
-                        lang_items.collect_lang_item(db, s, LangItemTarget::StaticId);
+                        lang_items.collect_lang_item(db, s, LangItemTarget::Static);
+                    }
+                    ModuleDefId::TypeAliasId(t) => {
+                        lang_items.collect_lang_item(db, t, LangItemTarget::TypeAlias);
                     }
                     _ => {}
                 }
@@ -139,7 +158,7 @@ impl LangItems {
     pub(crate) fn lang_item_query(
         db: &dyn DefDatabase,
         start_crate: CrateId,
-        item: SmolStr,
+        item: LangItem,
     ) -> Option<LangItemTarget> {
         let _p = profile::span("lang_item_query");
         let lang_items = db.crate_lang_items(start_crate);
@@ -150,7 +169,7 @@ impl LangItems {
         db.crate_graph()[start_crate]
             .dependencies
             .iter()
-            .find_map(|dep| db.lang_item(dep.crate_id, item.clone()))
+            .find_map(|dep| db.lang_item(dep.crate_id, item))
     }
 
     fn collect_lang_item<T>(
@@ -162,8 +181,8 @@ impl LangItems {
         T: Into<AttrDefId> + Copy,
     {
         let _p = profile::span("collect_lang_item");
-        if let Some(lang_item_name) = lang_attr(db, item) {
-            self.items.entry(lang_item_name).or_insert_with(|| constructor(item));
+        if let Some(lang_item) = lang_attr(db, item).and_then(|it| LangItem::from_str(&it)) {
+            self.items.entry(lang_item).or_insert_with(|| constructor(item));
         }
     }
 }
@@ -172,3 +191,224 @@ pub fn lang_attr(db: &dyn DefDatabase, item: impl Into<AttrDefId> + Copy) -> Opt
     let attrs = db.attrs(item.into());
     attrs.by_key("lang").string_value().cloned()
 }
+
+pub enum GenericRequirement {
+    None,
+    Minimum(usize),
+    Exact(usize),
+}
+
+macro_rules! language_item_table {
+    (
+        $( $(#[$attr:meta])* $variant:ident, $name:ident, $method:ident, $target:expr, $generics:expr; )*
+    ) => {
+
+        /// A representation of all the valid language items in Rust.
+        #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+        pub enum LangItem {
+            $(
+                #[doc = concat!("The `", stringify!($name), "` lang item.")]
+                $(#[$attr])*
+                $variant,
+            )*
+        }
+
+        impl LangItem {
+            pub fn name(self) -> SmolStr {
+                match self {
+                    $( LangItem::$variant => SmolStr::new(stringify!($name)), )*
+                }
+            }
+
+            /// Opposite of [`LangItem::name`]
+            pub fn from_name(name: &hir_expand::name::Name) -> Option<Self> {
+                Self::from_str(name.as_str()?)
+            }
+
+            /// Opposite of [`LangItem::name`]
+            pub fn from_str(name: &str) -> Option<Self> {
+                match name {
+                    $( stringify!($name) => Some(LangItem::$variant), )*
+                    _ => None,
+                }
+            }
+        }
+    }
+}
+
+language_item_table! {
+//  Variant name,            Name,                     Getter method name,         Target                  Generic requirements;
+    Sized,                   sized,               sized_trait,                Target::Trait,          GenericRequirement::Exact(0);
+    Unsize,                  unsize,              unsize_trait,               Target::Trait,          GenericRequirement::Minimum(1);
+    /// Trait injected by `#[derive(PartialEq)]`, (i.e. "Partial EQ").
+    StructuralPeq,           structural_peq,      structural_peq_trait,       Target::Trait,          GenericRequirement::None;
+    /// Trait injected by `#[derive(Eq)]`, (i.e. "Total EQ"; no, I will not apologize).
+    StructuralTeq,           structural_teq,      structural_teq_trait,       Target::Trait,          GenericRequirement::None;
+    Copy,                    copy,                copy_trait,                 Target::Trait,          GenericRequirement::Exact(0);
+    Clone,                   clone,               clone_trait,                Target::Trait,          GenericRequirement::None;
+    Sync,                    sync,                sync_trait,                 Target::Trait,          GenericRequirement::Exact(0);
+    DiscriminantKind,        discriminant_kind,   discriminant_kind_trait,    Target::Trait,          GenericRequirement::None;
+    /// The associated item of the [`DiscriminantKind`] trait.
+    Discriminant,            discriminant_type,   discriminant_type,          Target::AssocTy,        GenericRequirement::None;
+
+    PointeeTrait,            pointee_trait,       pointee_trait,              Target::Trait,          GenericRequirement::None;
+    Metadata,                metadata_type,       metadata_type,              Target::AssocTy,        GenericRequirement::None;
+    DynMetadata,             dyn_metadata,        dyn_metadata,               Target::Struct,         GenericRequirement::None;
+
+    Freeze,                  freeze,              freeze_trait,               Target::Trait,          GenericRequirement::Exact(0);
+
+    Drop,                    drop,                drop_trait,                 Target::Trait,          GenericRequirement::None;
+    Destruct,                destruct,            destruct_trait,             Target::Trait,          GenericRequirement::None;
+
+    CoerceUnsized,           coerce_unsized,      coerce_unsized_trait,       Target::Trait,          GenericRequirement::Minimum(1);
+    DispatchFromDyn,         dispatch_from_dyn,   dispatch_from_dyn_trait,    Target::Trait,          GenericRequirement::Minimum(1);
+
+    // language items relating to transmutability
+    TransmuteOpts,           transmute_opts,      transmute_opts,             Target::Struct,         GenericRequirement::Exact(0);
+    TransmuteTrait,          transmute_trait,     transmute_trait,            Target::Trait,          GenericRequirement::Exact(3);
+
+    Add,                     add,                 add_trait,                  Target::Trait,          GenericRequirement::Exact(1);
+    Sub,                     sub,                 sub_trait,                  Target::Trait,          GenericRequirement::Exact(1);
+    Mul,                     mul,                 mul_trait,                  Target::Trait,          GenericRequirement::Exact(1);
+    Div,                     div,                 div_trait,                  Target::Trait,          GenericRequirement::Exact(1);
+    Rem,                     rem,                 rem_trait,                  Target::Trait,          GenericRequirement::Exact(1);
+    Neg,                     neg,                 neg_trait,                  Target::Trait,          GenericRequirement::Exact(0);
+    Not,                     not,                 not_trait,                  Target::Trait,          GenericRequirement::Exact(0);
+    BitXor,                  bitxor,              bitxor_trait,               Target::Trait,          GenericRequirement::Exact(1);
+    BitAnd,                  bitand,              bitand_trait,               Target::Trait,          GenericRequirement::Exact(1);
+    BitOr,                   bitor,               bitor_trait,                Target::Trait,          GenericRequirement::Exact(1);
+    Shl,                     shl,                 shl_trait,                  Target::Trait,          GenericRequirement::Exact(1);
+    Shr,                     shr,                 shr_trait,                  Target::Trait,          GenericRequirement::Exact(1);
+    AddAssign,               add_assign,          add_assign_trait,           Target::Trait,          GenericRequirement::Exact(1);
+    SubAssign,               sub_assign,          sub_assign_trait,           Target::Trait,          GenericRequirement::Exact(1);
+    MulAssign,               mul_assign,          mul_assign_trait,           Target::Trait,          GenericRequirement::Exact(1);
+    DivAssign,               div_assign,          div_assign_trait,           Target::Trait,          GenericRequirement::Exact(1);
+    RemAssign,               rem_assign,          rem_assign_trait,           Target::Trait,          GenericRequirement::Exact(1);
+    BitXorAssign,            bitxor_assign,       bitxor_assign_trait,        Target::Trait,          GenericRequirement::Exact(1);
+    BitAndAssign,            bitand_assign,       bitand_assign_trait,        Target::Trait,          GenericRequirement::Exact(1);
+    BitOrAssign,             bitor_assign,        bitor_assign_trait,         Target::Trait,          GenericRequirement::Exact(1);
+    ShlAssign,               shl_assign,          shl_assign_trait,           Target::Trait,          GenericRequirement::Exact(1);
+    ShrAssign,               shr_assign,          shr_assign_trait,           Target::Trait,          GenericRequirement::Exact(1);
+    Index,                   index,               index_trait,                Target::Trait,          GenericRequirement::Exact(1);
+    IndexMut,                index_mut,           index_mut_trait,            Target::Trait,          GenericRequirement::Exact(1);
+
+    UnsafeCell,              unsafe_cell,         unsafe_cell_type,           Target::Struct,         GenericRequirement::None;
+    VaList,                  va_list,             va_list,                    Target::Struct,         GenericRequirement::None;
+
+    Deref,                   deref,               deref_trait,                Target::Trait,          GenericRequirement::Exact(0);
+    DerefMut,                deref_mut,           deref_mut_trait,            Target::Trait,          GenericRequirement::Exact(0);
+    DerefTarget,             deref_target,        deref_target,               Target::AssocTy,        GenericRequirement::None;
+    Receiver,                receiver,            receiver_trait,             Target::Trait,          GenericRequirement::None;
+
+    Fn,                      fn,                  fn_trait,                   Target::Trait,          GenericRequirement::Exact(1);
+    FnMut,                   fn_mut,              fn_mut_trait,               Target::Trait,          GenericRequirement::Exact(1);
+    FnOnce,                  fn_once,             fn_once_trait,              Target::Trait,          GenericRequirement::Exact(1);
+
+    FnOnceOutput,            fn_once_output,      fn_once_output,             Target::AssocTy,        GenericRequirement::None;
+
+    Future,                  future_trait,        future_trait,               Target::Trait,          GenericRequirement::Exact(0);
+    GeneratorState,          generator_state,     gen_state,                  Target::Enum,           GenericRequirement::None;
+    Generator,               generator,           gen_trait,                  Target::Trait,          GenericRequirement::Minimum(1);
+    Unpin,                   unpin,               unpin_trait,                Target::Trait,          GenericRequirement::None;
+    Pin,                     pin,                 pin_type,                   Target::Struct,         GenericRequirement::None;
+
+    PartialEq,               eq,                  eq_trait,                   Target::Trait,          GenericRequirement::Exact(1);
+    PartialOrd,              partial_ord,         partial_ord_trait,          Target::Trait,          GenericRequirement::Exact(1);
+
+    // A number of panic-related lang items. The `panic` item corresponds to divide-by-zero and
+    // various panic cases with `match`. The `panic_bounds_check` item is for indexing arrays.
+    //
+    // The `begin_unwind` lang item has a predefined symbol name and is sort of a "weak lang item"
+    // in the sense that a crate is not required to have it defined to use it, but a final product
+    // is required to define it somewhere. Additionally, there are restrictions on crates that use
+    // a weak lang item, but do not have it defined.
+    Panic,                   panic,               panic_fn,                   Target::Fn,             GenericRequirement::Exact(0);
+    PanicNounwind,           panic_nounwind,      panic_nounwind,             Target::Fn,             GenericRequirement::Exact(0);
+    PanicFmt,                panic_fmt,           panic_fmt,                  Target::Fn,             GenericRequirement::None;
+    PanicDisplay,            panic_display,       panic_display,              Target::Fn,             GenericRequirement::None;
+    ConstPanicFmt,           const_panic_fmt,     const_panic_fmt,            Target::Fn,             GenericRequirement::None;
+    PanicBoundsCheck,        panic_bounds_check,  panic_bounds_check_fn,      Target::Fn,             GenericRequirement::Exact(0);
+    PanicInfo,               panic_info,          panic_info,                 Target::Struct,         GenericRequirement::None;
+    PanicLocation,           panic_location,      panic_location,             Target::Struct,         GenericRequirement::None;
+    PanicImpl,               panic_impl,          panic_impl,                 Target::Fn,             GenericRequirement::None;
+    PanicCannotUnwind,       panic_cannot_unwind, panic_cannot_unwind,        Target::Fn,             GenericRequirement::Exact(0);
+    /// libstd panic entry point. Necessary for const eval to be able to catch it
+    BeginPanic,              begin_panic,         begin_panic_fn,             Target::Fn,             GenericRequirement::None;
+
+    ExchangeMalloc,          exchange_malloc,     exchange_malloc_fn,         Target::Fn,             GenericRequirement::None;
+    BoxFree,                 box_free,            box_free_fn,                Target::Fn,             GenericRequirement::Minimum(1);
+    DropInPlace,             drop_in_place,       drop_in_place_fn,           Target::Fn,             GenericRequirement::Minimum(1);
+    AllocLayout,             alloc_layout,        alloc_layout,               Target::Struct,         GenericRequirement::None;
+
+    Start,                   start,               start_fn,                   Target::Fn,             GenericRequirement::Exact(1);
+
+    EhPersonality,           eh_personality,      eh_personality,             Target::Fn,             GenericRequirement::None;
+    EhCatchTypeinfo,         eh_catch_typeinfo,   eh_catch_typeinfo,          Target::Static,         GenericRequirement::None;
+
+    OwnedBox,                owned_box,           owned_box,                  Target::Struct,         GenericRequirement::Minimum(1);
+
+    PhantomData,             phantom_data,        phantom_data,               Target::Struct,         GenericRequirement::Exact(1);
+
+    ManuallyDrop,            manually_drop,       manually_drop,              Target::Struct,         GenericRequirement::None;
+
+    MaybeUninit,             maybe_uninit,        maybe_uninit,               Target::Union,          GenericRequirement::None;
+
+    /// Align offset for stride != 1; must not panic.
+    AlignOffset,             align_offset,        align_offset_fn,            Target::Fn,             GenericRequirement::None;
+
+    Termination,             termination,         termination,                Target::Trait,          GenericRequirement::None;
+
+    Try,                     Try,                 try_trait,                  Target::Trait,          GenericRequirement::None;
+
+    Tuple,                   tuple_trait,         tuple_trait,                Target::Trait,          GenericRequirement::Exact(0);
+
+    SliceLen,                slice_len_fn,        slice_len_fn,               Target::Method(MethodKind::Inherent), GenericRequirement::None;
+
+    // Language items from AST lowering
+    TryTraitFromResidual,    from_residual,       from_residual_fn,           Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
+    TryTraitFromOutput,      from_output,         from_output_fn,             Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
+    TryTraitBranch,          branch,              branch_fn,                  Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
+    TryTraitFromYeet,        from_yeet,           from_yeet_fn,               Target::Fn,             GenericRequirement::None;
+
+    PointerSized,            pointer_sized,       pointer_sized,              Target::Trait,          GenericRequirement::Exact(0);
+
+    Poll,                    Poll,                poll,                       Target::Enum,           GenericRequirement::None;
+    PollReady,               Ready,               poll_ready_variant,         Target::Variant,        GenericRequirement::None;
+    PollPending,             Pending,             poll_pending_variant,       Target::Variant,        GenericRequirement::None;
+
+    // FIXME(swatinem): the following lang items are used for async lowering and
+    // should become obsolete eventually.
+    ResumeTy,                ResumeTy,            resume_ty,                  Target::Struct,         GenericRequirement::None;
+    IdentityFuture,          identity_future,     identity_future_fn,         Target::Fn,             GenericRequirement::None;
+    GetContext,              get_context,         get_context_fn,             Target::Fn,             GenericRequirement::None;
+
+    Context,                 Context,             context,                    Target::Struct,         GenericRequirement::None;
+    FuturePoll,              poll,                future_poll_fn,             Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
+
+    FromFrom,                from,                from_fn,                    Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
+
+    OptionSome,              Some,                option_some_variant,        Target::Variant,        GenericRequirement::None;
+    OptionNone,              None,                option_none_variant,        Target::Variant,        GenericRequirement::None;
+
+    ResultOk,                Ok,                  result_ok_variant,          Target::Variant,        GenericRequirement::None;
+    ResultErr,               Err,                 result_err_variant,         Target::Variant,        GenericRequirement::None;
+
+    ControlFlowContinue,     Continue,            cf_continue_variant,        Target::Variant,        GenericRequirement::None;
+    ControlFlowBreak,        Break,               cf_break_variant,           Target::Variant,        GenericRequirement::None;
+
+    IntoFutureIntoFuture,    into_future,         into_future_fn,             Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
+    IntoIterIntoIter,        into_iter,           into_iter_fn,               Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
+    IteratorNext,            next,                next_fn,                    Target::Method(MethodKind::Trait { body: false}), GenericRequirement::None;
+
+    PinNewUnchecked,         new_unchecked,       new_unchecked_fn,           Target::Method(MethodKind::Inherent), GenericRequirement::None;
+
+    RangeFrom,               RangeFrom,           range_from_struct,          Target::Struct,         GenericRequirement::None;
+    RangeFull,               RangeFull,           range_full_struct,          Target::Struct,         GenericRequirement::None;
+    RangeInclusiveStruct,    RangeInclusive,      range_inclusive_struct,     Target::Struct,         GenericRequirement::None;
+    RangeInclusiveNew,       range_inclusive_new, range_inclusive_new_method, Target::Method(MethodKind::Inherent), GenericRequirement::None;
+    Range,                   Range,               range_struct,               Target::Struct,         GenericRequirement::None;
+    RangeToInclusive,        RangeToInclusive,    range_to_inclusive_struct,  Target::Struct,         GenericRequirement::None;
+    RangeTo,                 RangeTo,             range_to_struct,            Target::Struct,         GenericRequirement::None;
+
+    String,                  String,              string,                     Target::Struct,         GenericRequirement::None;
+}
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/layout.rs b/src/tools/rust-analyzer/crates/hir-def/src/layout.rs
index 6bb4cd94f8a..49b1190ad46 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/layout.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/layout.rs
@@ -90,6 +90,7 @@ impl IntegerExt for Integer {
 pub enum LayoutError {
     UserError(String),
     SizeOverflow,
+    TargetLayoutNotAvailable,
     HasPlaceholder,
     NotImplemented,
     Unknown,
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
index 8267ef09cb0..d07c5fb67c6 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs
@@ -28,7 +28,6 @@ pub mod dyn_map;
 pub mod keys;
 
 pub mod item_tree;
-pub mod intern;
 
 pub mod adt;
 pub mod data;
@@ -61,10 +60,10 @@ use std::{
     sync::Arc,
 };
 
-use attr::Attr;
 use base_db::{impl_intern_key, salsa, CrateId, ProcMacroKind};
 use hir_expand::{
     ast_id_map::FileAstId,
+    attrs::{Attr, AttrId, AttrInput},
     builtin_attr_macro::BuiltinAttrExpander,
     builtin_derive_macro::BuiltinDeriveExpander,
     builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
@@ -80,9 +79,10 @@ use nameres::DefMap;
 use stdx::impl_from;
 use syntax::ast;
 
+use ::tt::token_id as tt;
+
 use crate::{
     adt::VariantData,
-    attr::AttrId,
     builtin_type::BuiltinType,
     item_tree::{
         Const, Enum, Function, Impl, ItemTreeId, ItemTreeNode, MacroDef, MacroRules, ModItem,
@@ -292,6 +292,7 @@ pub struct Macro2Loc {
     pub container: ModuleId,
     pub id: ItemTreeId<MacroDef>,
     pub expander: MacroExpander,
+    pub allow_internal_unsafe: bool,
 }
 impl_intern!(Macro2Id, Macro2Loc, intern_macro2, lookup_intern_macro2);
 
@@ -301,8 +302,9 @@ pub struct MacroRulesId(salsa::InternId);
 pub struct MacroRulesLoc {
     pub container: ModuleId,
     pub id: ItemTreeId<MacroRules>,
-    pub local_inner: bool,
     pub expander: MacroExpander,
+    pub allow_internal_unsafe: bool,
+    pub local_inner: bool,
 }
 impl_intern!(MacroRulesId, MacroRulesLoc, intern_macro_rules, lookup_intern_macro_rules);
 
@@ -896,6 +898,7 @@ pub fn macro_id_to_def_id(db: &dyn db::DefDatabase, id: MacroId) -> MacroDefId {
                     }
                 },
                 local_inner: false,
+                allow_internal_unsafe: loc.allow_internal_unsafe,
             }
         }
         MacroId::MacroRulesId(it) => {
@@ -920,6 +923,7 @@ pub fn macro_id_to_def_id(db: &dyn db::DefDatabase, id: MacroId) -> MacroDefId {
                     }
                 },
                 local_inner: loc.local_inner,
+                allow_internal_unsafe: loc.allow_internal_unsafe,
             }
         }
         MacroId::ProcMacroId(it) => {
@@ -935,6 +939,7 @@ pub fn macro_id_to_def_id(db: &dyn db::DefDatabase, id: MacroId) -> MacroDefId {
                     InFile::new(loc.id.file_id(), makro.ast_id),
                 ),
                 local_inner: false,
+                allow_internal_unsafe: false,
             }
         }
     }
@@ -943,7 +948,7 @@ pub fn macro_id_to_def_id(db: &dyn db::DefDatabase, id: MacroId) -> MacroDefId {
 fn derive_macro_as_call_id(
     db: &dyn db::DefDatabase,
     item_attr: &AstIdWithPath<ast::Adt>,
-    derive_attr: AttrId,
+    derive_attr_index: AttrId,
     derive_pos: u32,
     krate: CrateId,
     resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
@@ -956,7 +961,7 @@ fn derive_macro_as_call_id(
         MacroCallKind::Derive {
             ast_id: item_attr.ast_id,
             derive_index: derive_pos,
-            derive_attr_index: derive_attr.ast_index,
+            derive_attr_index,
         },
     );
     Ok((macro_id, def_id, call_id))
@@ -970,23 +975,33 @@ fn attr_macro_as_call_id(
     def: MacroDefId,
     is_derive: bool,
 ) -> MacroCallId {
-    let mut arg = match macro_attr.input.as_deref() {
-        Some(attr::AttrInput::TokenTree(tt, map)) => (tt.clone(), map.clone()),
-        _ => Default::default(),
+    let arg = match macro_attr.input.as_deref() {
+        Some(AttrInput::TokenTree(tt, map)) => (
+            {
+                let mut tt = tt.clone();
+                tt.delimiter = tt::Delimiter::UNSPECIFIED;
+                tt
+            },
+            map.clone(),
+        ),
+        _ => (tt::Subtree::empty(), Default::default()),
     };
 
-    // The parentheses are always disposed here.
-    arg.0.delimiter = None;
-
-    let res = def.as_lazy_macro(
+    def.as_lazy_macro(
         db.upcast(),
         krate,
         MacroCallKind::Attr {
             ast_id: item_attr.ast_id,
             attr_args: Arc::new(arg),
-            invoc_attr_index: macro_attr.id.ast_index,
+            invoc_attr_index: macro_attr.id,
             is_derive,
         },
-    );
-    res
-}
+    )
+}
+intern::impl_internable!(
+    crate::type_ref::TypeRef,
+    crate::type_ref::TraitRef,
+    crate::type_ref::TypeBound,
+    crate::path::GenericArgs,
+    generics::GenericParams,
+);
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests.rs
index 79c85d11831..5ab90d92d9b 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests.rs
@@ -30,7 +30,7 @@ use syntax::{
     SyntaxKind::{self, COMMENT, EOF, IDENT, LIFETIME_IDENT},
     SyntaxNode, TextRange, T,
 };
-use tt::{Subtree, TokenId};
+use tt::token_id::{Subtree, TokenId};
 
 use crate::{
     db::DefDatabase, macro_id_to_def_id, nameres::ModuleSource, resolver::HasResolver,
@@ -97,7 +97,9 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
         let ast_id = AstId::new(source.file_id, file_ast_id.upcast());
         let kind = MacroDefKind::Declarative(ast_id);
 
-        let macro_def = db.macro_def(MacroDefId { krate, kind, local_inner: false }).unwrap();
+        let macro_def = db
+            .macro_def(MacroDefId { krate, kind, local_inner: false, allow_internal_unsafe: false })
+            .unwrap();
         if let TokenExpander::DeclarativeMacro { mac, def_site_token_map } = &*macro_def {
             let tt = match &macro_ {
                 ast::Macro::MacroRules(mac) => mac.token_tree().unwrap(),
@@ -251,9 +253,9 @@ fn extract_id_ranges(ranges: &mut Vec<(TextRange, TokenId)>, map: &TokenMap, tre
     tree.token_trees.iter().for_each(|tree| match tree {
         tt::TokenTree::Leaf(leaf) => {
             let id = match leaf {
-                tt::Leaf::Literal(it) => it.id,
-                tt::Leaf::Punct(it) => it.id,
-                tt::Leaf::Ident(it) => it.id,
+                tt::Leaf::Literal(it) => it.span,
+                tt::Leaf::Punct(it) => it.span,
+                tt::Leaf::Ident(it) => it.span,
             };
             ranges.extend(map.ranges_by_token(id, SyntaxKind::ERROR).map(|range| (range, id)));
         }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
index 2d5f2a692e5..49bbc64bff1 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
@@ -97,6 +97,41 @@ fn#19 main#20(#21)#21 {#22
 "##]],
     );
 }
+#[test]
+fn float_field_acces_macro_input() {
+    check(
+        r#"
+macro_rules! foo {
+    ($expr:expr) => {
+        fn foo() {
+            $expr;
+        }
+    };
+}
+foo!(x .0.1);
+foo!(x .2. 3);
+foo!(x .4 .5);
+"#,
+        expect![[r#"
+macro_rules! foo {
+    ($expr:expr) => {
+        fn foo() {
+            $expr;
+        }
+    };
+}
+fn foo() {
+    (x.0.1);
+}
+fn foo() {
+    (x.2.3);
+}
+fn foo() {
+    (x.4.5);
+}
+"#]],
+    );
+}
 
 #[test]
 fn mbe_smoke_test() {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
index 118c14ed843..822bdcc122d 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs
@@ -104,7 +104,7 @@ macro_rules! id {
         $($t)*
     };
 }
-id /*+errors*/! {
+id! {
     #[proc_macros::identity]
     impl Foo for WrapBj {
         async fn foo(&self) {
@@ -113,18 +113,17 @@ id /*+errors*/! {
     }
 }
 "#,
-        expect![[r##"
+        expect![[r#"
 macro_rules! id {
     ($($t:tt)*) => {
         $($t)*
     };
 }
-/* parse error: expected SEMICOLON */
 #[proc_macros::identity] impl Foo for WrapBj {
     async fn foo(&self ) {
         self .0.id().await ;
     }
 }
-"##]],
+"#]],
     );
 }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs
index 3650204ee9d..79cabeb0fb7 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/attr_resolution.rs
@@ -1,10 +1,9 @@
 //! Post-nameres attribute resolution.
 
-use hir_expand::MacroCallId;
+use hir_expand::{attrs::Attr, MacroCallId};
 use syntax::{ast, SmolStr};
 
 use crate::{
-    attr::Attr,
     attr_macro_as_call_id, builtin_attr,
     db::DefDatabase,
     item_scope::BuiltinShadowMode,
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
index 160203b7783..4b39a20d86c 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
@@ -10,6 +10,7 @@ use cfg::{CfgExpr, CfgOptions};
 use either::Either;
 use hir_expand::{
     ast_id_map::FileAstId,
+    attrs::{Attr, AttrId},
     builtin_attr_macro::find_builtin_attr,
     builtin_derive_macro::find_builtin_derive,
     builtin_fn_macro::find_builtin_macro,
@@ -26,7 +27,7 @@ use stdx::always;
 use syntax::{ast, SmolStr};
 
 use crate::{
-    attr::{Attr, AttrId, Attrs},
+    attr::Attrs,
     attr_macro_as_call_id,
     db::DefDatabase,
     derive_macro_as_call_id,
@@ -45,6 +46,7 @@ use crate::{
     },
     path::{ImportAlias, ModPath, PathKind},
     per_ns::PerNs,
+    tt,
     visibility::{RawVisibility, Visibility},
     AdtId, AstId, AstIdWithPath, ConstLoc, EnumLoc, EnumVariantId, ExternBlockLoc, FunctionId,
     FunctionLoc, ImplLoc, Intern, ItemContainerId, LocalModuleId, Macro2Id, Macro2Loc,
@@ -82,7 +84,8 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, mut def_map: DefMap, tree_id: T
                 .enumerate()
                 .map(|(idx, it)| {
                     // FIXME: a hacky way to create a Name from string.
-                    let name = tt::Ident { text: it.name.clone(), id: tt::TokenId::unspecified() };
+                    let name =
+                        tt::Ident { text: it.name.clone(), span: tt::TokenId::unspecified() };
                     (
                         name.as_name(),
                         ProcMacroExpander::new(def_map.krate, base_db::ProcMacroId(idx as u32)),
@@ -450,8 +453,11 @@ impl DefCollector<'_> {
                         directive.module_id,
                         MacroCallKind::Attr {
                             ast_id: ast_id.ast_id,
-                            attr_args: Default::default(),
-                            invoc_attr_index: attr.id.ast_index,
+                            attr_args: std::sync::Arc::new((
+                                tt::Subtree::empty(),
+                                Default::default(),
+                            )),
+                            invoc_attr_index: attr.id,
                             is_derive: false,
                         },
                         attr.path().clone(),
@@ -1406,7 +1412,7 @@ impl DefCollector<'_> {
                         directive.module_id,
                         MacroCallKind::Derive {
                             ast_id: ast_id.ast_id,
-                            derive_attr_index: derive_attr.ast_index,
+                            derive_attr_index: *derive_attr,
                             derive_index: *derive_pos as u32,
                         },
                         ast_id.path.clone(),
@@ -1599,17 +1605,15 @@ impl ModCollector<'_, '_> {
                         FunctionLoc { container, id: ItemTreeId::new(self.tree_id, id) }.intern(db);
 
                     let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
-                    if self.def_collector.is_proc_macro {
-                        if self.module_id == def_map.root {
-                            if let Some(proc_macro) = attrs.parse_proc_macro_decl(&it.name) {
-                                let crate_root = def_map.module_id(def_map.root);
-                                self.def_collector.export_proc_macro(
-                                    proc_macro,
-                                    ItemTreeId::new(self.tree_id, id),
-                                    fn_id,
-                                    crate_root,
-                                );
-                            }
+                    if self.def_collector.is_proc_macro && self.module_id == def_map.root {
+                        if let Some(proc_macro) = attrs.parse_proc_macro_decl(&it.name) {
+                            let crate_root = def_map.module_id(def_map.root);
+                            self.def_collector.export_proc_macro(
+                                proc_macro,
+                                ItemTreeId::new(self.tree_id, id),
+                                fn_id,
+                                crate_root,
+                            );
                         }
                     }
 
@@ -1948,7 +1952,8 @@ impl ModCollector<'_, '_> {
             let name = match attrs.by_key("rustc_builtin_macro").string_value() {
                 Some(it) => {
                     // FIXME: a hacky way to create a Name from string.
-                    name = tt::Ident { text: it.clone(), id: tt::TokenId::unspecified() }.as_name();
+                    name =
+                        tt::Ident { text: it.clone(), span: tt::TokenId::unspecified() }.as_name();
                     &name
                 }
                 None => {
@@ -1983,11 +1988,13 @@ impl ModCollector<'_, '_> {
             // Case 2: normal `macro_rules!` macro
             MacroExpander::Declarative
         };
+        let allow_internal_unsafe = attrs.by_key("allow_internal_unsafe").exists();
 
         let macro_id = MacroRulesLoc {
             container: module,
             id: ItemTreeId::new(self.tree_id, id),
             local_inner,
+            allow_internal_unsafe,
             expander,
         }
         .intern(self.def_collector.db);
@@ -2047,10 +2054,15 @@ impl ModCollector<'_, '_> {
             // Case 2: normal `macro`
             MacroExpander::Declarative
         };
+        let allow_internal_unsafe = attrs.by_key("allow_internal_unsafe").exists();
 
-        let macro_id =
-            Macro2Loc { container: module, id: ItemTreeId::new(self.tree_id, id), expander }
-                .intern(self.def_collector.db);
+        let macro_id = Macro2Loc {
+            container: module,
+            id: ItemTreeId::new(self.tree_id, id),
+            expander,
+            allow_internal_unsafe,
+        }
+        .intern(self.def_collector.db);
         self.def_collector.define_macro_def(
             self.module_id,
             mac.name.clone(),
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs
index 06614229198..b024d7c6777 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs
@@ -2,12 +2,11 @@
 
 use base_db::CrateId;
 use cfg::{CfgExpr, CfgOptions};
-use hir_expand::MacroCallKind;
+use hir_expand::{attrs::AttrId, MacroCallKind};
 use la_arena::Idx;
 use syntax::ast::{self, AnyHasAttrs};
 
 use crate::{
-    attr::AttrId,
     item_tree::{self, ItemTreeId},
     nameres::LocalModuleId,
     path::ModPath,
@@ -32,9 +31,9 @@ pub enum DefDiagnosticKind {
 
     UnimplementedBuiltinMacro { ast: AstId<ast::Macro> },
 
-    InvalidDeriveTarget { ast: AstId<ast::Item>, id: u32 },
+    InvalidDeriveTarget { ast: AstId<ast::Item>, id: usize },
 
-    MalformedDerive { ast: AstId<ast::Adt>, id: u32 },
+    MalformedDerive { ast: AstId<ast::Adt>, id: usize },
 }
 
 #[derive(Debug, PartialEq, Eq)]
@@ -120,7 +119,7 @@ impl DefDiagnostic {
     ) -> Self {
         Self {
             in_module: container,
-            kind: DefDiagnosticKind::InvalidDeriveTarget { ast, id: id.ast_index },
+            kind: DefDiagnosticKind::InvalidDeriveTarget { ast, id: id.ast_index() },
         }
     }
 
@@ -131,7 +130,7 @@ impl DefDiagnostic {
     ) -> Self {
         Self {
             in_module: container,
-            kind: DefDiagnosticKind::MalformedDerive { ast, id: id.ast_index },
+            kind: DefDiagnosticKind::MalformedDerive { ast, id: id.ast_index() },
         }
     }
 }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs
index 4c263846d27..51c565fe123 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/mod_resolution.rs
@@ -7,7 +7,7 @@ use syntax::SmolStr;
 
 use crate::{db::DefDatabase, HirFileId};
 
-const MOD_DEPTH_LIMIT: Limit = Limit::new(32);
+static MOD_DEPTH_LIMIT: Limit = Limit::new(32);
 
 #[derive(Clone, Debug)]
 pub(super) struct ModDir {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs
index 06b23392cfe..caad4a1f381 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/proc_macro.rs
@@ -1,9 +1,9 @@
 //! Nameres-specific procedural macro data and helpers.
 
 use hir_expand::name::{AsName, Name};
-use tt::{Leaf, TokenTree};
 
 use crate::attr::Attrs;
+use crate::tt::{Leaf, TokenTree};
 
 #[derive(Debug, PartialEq, Eq)]
 pub struct ProcMacroDef {
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs
index 0d90047c28f..8a27c60df5c 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests.rs
@@ -476,7 +476,7 @@ pub struct Bar;
 fn no_std_prelude() {
     check(
         r#"
-        //- /main.rs crate:main deps:core,std
+        //- /main.rs edition:2018 crate:main deps:core,std
         #![cfg_attr(not(never), no_std)]
         use Rust;
 
@@ -544,7 +544,7 @@ fn edition_specific_preludes() {
 fn std_prelude_takes_precedence_above_core_prelude() {
     check(
         r#"
-//- /main.rs crate:main deps:core,std
+//- /main.rs edition:2018 crate:main deps:core,std
 use {Foo, Bar};
 
 //- /std.rs crate:std deps:core
@@ -574,7 +574,7 @@ pub mod prelude {
 fn cfg_not_test() {
     check(
         r#"
-//- /main.rs crate:main deps:std
+//- /main.rs edition:2018 crate:main deps:std
 use {Foo, Bar, Baz};
 
 //- /lib.rs crate:std
@@ -602,7 +602,7 @@ pub mod prelude {
 fn cfg_test() {
     check(
         r#"
-//- /main.rs crate:main deps:std
+//- /main.rs edition:2018 crate:main deps:std
 use {Foo, Bar, Baz};
 
 //- /lib.rs crate:std cfg:test,feature=foo,feature=bar,opt=42
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs
index fe0ad4f3863..a4ccd14cbb4 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs
@@ -264,7 +264,7 @@ fn prelude_is_macro_use() {
     cov_mark::check!(prelude_is_macro_use);
     check(
         r#"
-//- /main.rs crate:main deps:std
+//- /main.rs edition:2018 crate:main deps:std
 structs!(Foo);
 structs_priv!(Bar);
 structs_outside!(Out);
@@ -634,7 +634,7 @@ fn macro_dollar_crate_is_correct_in_indirect_deps() {
     // From std
     check(
         r#"
-//- /main.rs crate:main deps:std
+//- /main.rs edition:2018 crate:main deps:std
 foo!();
 
 //- /std.rs crate:std deps:core
@@ -1034,7 +1034,7 @@ structs!(Foo);
 fn macro_in_prelude() {
     check(
         r#"
-//- /lib.rs crate:lib deps:std
+//- /lib.rs edition:2018 crate:lib deps:std
 global_asm!();
 
 //- /std.rs crate:std
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path.rs b/src/tools/rust-analyzer/crates/hir-def/src/path.rs
index 592223f7d85..25a23fcd61a 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/path.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/path.rs
@@ -8,10 +8,10 @@ use std::{
 
 use crate::{
     body::LowerCtx,
-    intern::Interned,
     type_ref::{ConstScalarOrPath, LifetimeRef},
 };
 use hir_expand::name::Name;
+use intern::Interned;
 use syntax::ast;
 
 use crate::type_ref::{TypeBound, TypeRef};
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs
index cfa3a6baaf8..d570191595b 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs
@@ -1,9 +1,10 @@
 //! Transforms syntax into `Path` objects, ideally with accounting for hygiene
 
-use crate::{intern::Interned, type_ref::ConstScalarOrPath};
+use crate::type_ref::ConstScalarOrPath;
 
 use either::Either;
 use hir_expand::name::{name, AsName};
+use intern::Interned;
 use syntax::ast::{self, AstNode, HasTypeBounds};
 
 use super::AssociatedTypeBinding;
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs
index befd0c5ffa0..1c0bd204d30 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs
@@ -3,10 +3,10 @@
 use std::fmt::{self, Write};
 
 use hir_expand::mod_path::PathKind;
+use intern::Interned;
 use itertools::Itertools;
 
 use crate::{
-    intern::Interned,
     path::{GenericArg, GenericArgs, Path},
     type_ref::{Mutability, TraitBoundModifier, TypeBound, TypeRef},
 };
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
index 1ef7f9577fe..86958e3daea 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs
@@ -4,6 +4,7 @@ use std::{hash::BuildHasherDefault, sync::Arc};
 use base_db::CrateId;
 use hir_expand::name::{name, Name};
 use indexmap::IndexMap;
+use intern::Interned;
 use rustc_hash::FxHashSet;
 use smallvec::{smallvec, SmallVec};
 
@@ -13,7 +14,6 @@ use crate::{
     db::DefDatabase,
     expr::{ExprId, LabelId, PatId},
     generics::{GenericParams, TypeOrConstParamData},
-    intern::Interned,
     item_scope::{BuiltinShadowMode, BUILTIN_SCOPE},
     nameres::DefMap,
     path::{ModPath, PathKind},
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/type_ref.rs b/src/tools/rust-analyzer/crates/hir-def/src/type_ref.rs
index f8bb78ddcfe..8fa12c7aafd 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/type_ref.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/type_ref.rs
@@ -7,13 +7,13 @@ use hir_expand::{
     name::{AsName, Name},
     AstId,
 };
+use intern::Interned;
 use syntax::ast::{self, HasName};
 
 use crate::{
     body::LowerCtx,
     builtin_type::{BuiltinInt, BuiltinType, BuiltinUint},
     expr::Literal,
-    intern::Interned,
     path::Path,
 };
 
@@ -240,7 +240,7 @@ impl TypeRef {
                 TypeRef::DynTrait(type_bounds_from_ast(ctx, inner.type_bound_list()))
             }
             ast::Type::MacroType(mt) => match mt.macro_call() {
-                Some(mc) => ctx.ast_id(ctx.db, &mc).map(TypeRef::Macro).unwrap_or(TypeRef::Error),
+                Some(mc) => ctx.ast_id(&mc).map(TypeRef::Macro).unwrap_or(TypeRef::Error),
                 None => TypeRef::Error,
             },
         }
diff --git a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
index 77eb1fd4504..525cdc32b87 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml
@@ -2,9 +2,11 @@
 name = "hir-expand"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
@@ -21,14 +23,16 @@ hashbrown = { version = "0.12.1", features = [
 ], default-features = false }
 smallvec = { version = "1.10.0", features = ["const_new"] }
 
-stdx = { path = "../stdx", version = "0.0.0" }
-base-db = { path = "../base-db", version = "0.0.0" }
-cfg = { path = "../cfg", version = "0.0.0" }
-syntax = { path = "../syntax", version = "0.0.0" }
-profile = { path = "../profile", version = "0.0.0" }
-tt = { path = "../tt", version = "0.0.0" }
-mbe = { path = "../mbe", version = "0.0.0" }
-limit = { path = "../limit", version = "0.0.0" }
+# local deps
+stdx.workspace = true
+intern.workspace = true
+base-db.workspace = true
+cfg.workspace = true
+syntax.workspace = true
+profile.workspace = true
+tt.workspace = true
+mbe.workspace = true
+limit.workspace = true
 
 [dev-dependencies]
 expect-test = "1.4.0"
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs
new file mode 100644
index 00000000000..5c04f8e8b8f
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/attrs.rs
@@ -0,0 +1,349 @@
+//! A higher level attributes based on TokenTree, with also some shortcuts.
+use std::{fmt, ops, sync::Arc};
+
+use base_db::CrateId;
+use cfg::CfgExpr;
+use either::Either;
+use intern::Interned;
+use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct};
+use smallvec::{smallvec, SmallVec};
+use syntax::{ast, match_ast, AstNode, SmolStr, SyntaxNode};
+
+use crate::{
+    db::AstDatabase,
+    hygiene::Hygiene,
+    mod_path::{ModPath, PathKind},
+    name::AsName,
+    tt::{self, Subtree},
+    InFile,
+};
+
+/// Syntactical attributes, without filtering of `cfg_attr`s.
+#[derive(Default, Debug, Clone, PartialEq, Eq)]
+pub struct RawAttrs {
+    entries: Option<Arc<[Attr]>>,
+}
+
+impl ops::Deref for RawAttrs {
+    type Target = [Attr];
+
+    fn deref(&self) -> &[Attr] {
+        match &self.entries {
+            Some(it) => &*it,
+            None => &[],
+        }
+    }
+}
+
+impl RawAttrs {
+    pub const EMPTY: Self = Self { entries: None };
+
+    pub fn new(db: &dyn AstDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self {
+        let entries = collect_attrs(owner)
+            .filter_map(|(id, attr)| match attr {
+                Either::Left(attr) => {
+                    attr.meta().and_then(|meta| Attr::from_src(db, meta, hygiene, id))
+                }
+                Either::Right(comment) => comment.doc_comment().map(|doc| Attr {
+                    id,
+                    input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
+                    path: Interned::new(ModPath::from(crate::name!(doc))),
+                }),
+            })
+            .collect::<Arc<_>>();
+
+        Self { entries: if entries.is_empty() { None } else { Some(entries) } }
+    }
+
+    pub fn from_attrs_owner(db: &dyn AstDatabase, owner: InFile<&dyn ast::HasAttrs>) -> Self {
+        let hygiene = Hygiene::new(db, owner.file_id);
+        Self::new(db, owner.value, &hygiene)
+    }
+
+    pub fn merge(&self, other: Self) -> Self {
+        match (&self.entries, other.entries) {
+            (None, None) => Self::EMPTY,
+            (None, entries @ Some(_)) => Self { entries },
+            (Some(entries), None) => Self { entries: Some(entries.clone()) },
+            (Some(a), Some(b)) => {
+                let last_ast_index = a.last().map_or(0, |it| it.id.ast_index() + 1) as u32;
+                Self {
+                    entries: Some(
+                        a.iter()
+                            .cloned()
+                            .chain(b.iter().map(|it| {
+                                let mut it = it.clone();
+                                it.id.id = it.id.ast_index() as u32 + last_ast_index
+                                    | (it.id.cfg_attr_index().unwrap_or(0) as u32)
+                                        << AttrId::AST_INDEX_BITS;
+                                it
+                            }))
+                            .collect(),
+                    ),
+                }
+            }
+        }
+    }
+
+    /// Processes `cfg_attr`s, returning the resulting semantic `Attrs`.
+    // FIXME: This should return a different type
+    pub fn filter(self, db: &dyn AstDatabase, krate: CrateId) -> RawAttrs {
+        let has_cfg_attrs = self
+            .iter()
+            .any(|attr| attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]));
+        if !has_cfg_attrs {
+            return self;
+        }
+
+        let crate_graph = db.crate_graph();
+        let new_attrs = self
+            .iter()
+            .flat_map(|attr| -> SmallVec<[_; 1]> {
+                let is_cfg_attr =
+                    attr.path.as_ident().map_or(false, |name| *name == crate::name![cfg_attr]);
+                if !is_cfg_attr {
+                    return smallvec![attr.clone()];
+                }
+
+                let subtree = match attr.token_tree_value() {
+                    Some(it) => it,
+                    _ => return smallvec![attr.clone()],
+                };
+
+                let (cfg, parts) = match parse_cfg_attr_input(subtree) {
+                    Some(it) => it,
+                    None => return smallvec![attr.clone()],
+                };
+                let index = attr.id;
+                let attrs =
+                    parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(|(idx, attr)| {
+                        let tree = Subtree {
+                            delimiter: tt::Delimiter::unspecified(),
+                            token_trees: attr.to_vec(),
+                        };
+                        // FIXME hygiene
+                        let hygiene = Hygiene::new_unhygienic();
+                        Attr::from_tt(db, &tree, &hygiene, index.with_cfg_attr(idx))
+                    });
+
+                let cfg_options = &crate_graph[krate].cfg_options;
+                let cfg = Subtree { delimiter: subtree.delimiter, token_trees: cfg.to_vec() };
+                let cfg = CfgExpr::parse(&cfg);
+                if cfg_options.check(&cfg) == Some(false) {
+                    smallvec![]
+                } else {
+                    cov_mark::hit!(cfg_attr_active);
+
+                    attrs.collect()
+                }
+            })
+            .collect();
+
+        RawAttrs { entries: Some(new_attrs) }
+    }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct AttrId {
+    id: u32,
+}
+
+// FIXME: This only handles a single level of cfg_attr nesting
+// that is `#[cfg_attr(all(), cfg_attr(all(), cfg(any())))]` breaks again
+impl AttrId {
+    const CFG_ATTR_BITS: usize = 7;
+    const AST_INDEX_MASK: usize = 0x00FF_FFFF;
+    const AST_INDEX_BITS: usize = Self::AST_INDEX_MASK.count_ones() as usize;
+    const CFG_ATTR_SET_BITS: u32 = 1 << 31;
+
+    pub fn ast_index(&self) -> usize {
+        self.id as usize & Self::AST_INDEX_MASK
+    }
+
+    pub fn cfg_attr_index(&self) -> Option<usize> {
+        if self.id & Self::CFG_ATTR_SET_BITS == 0 {
+            None
+        } else {
+            Some(self.id as usize >> Self::AST_INDEX_BITS)
+        }
+    }
+
+    pub fn with_cfg_attr(self, idx: usize) -> AttrId {
+        AttrId { id: self.id | (idx as u32) << Self::AST_INDEX_BITS | Self::CFG_ATTR_SET_BITS }
+    }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct Attr {
+    pub id: AttrId,
+    pub path: Interned<ModPath>,
+    pub input: Option<Interned<AttrInput>>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum AttrInput {
+    /// `#[attr = "string"]`
+    Literal(SmolStr),
+    /// `#[attr(subtree)]`
+    TokenTree(tt::Subtree, mbe::TokenMap),
+}
+
+impl fmt::Display for AttrInput {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        match self {
+            AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()),
+            AttrInput::TokenTree(subtree, _) => subtree.fmt(f),
+        }
+    }
+}
+
+impl Attr {
+    fn from_src(
+        db: &dyn AstDatabase,
+        ast: ast::Meta,
+        hygiene: &Hygiene,
+        id: AttrId,
+    ) -> Option<Attr> {
+        let path = Interned::new(ModPath::from_src(db, ast.path()?, hygiene)?);
+        let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
+            let value = match lit.kind() {
+                ast::LiteralKind::String(string) => string.value()?.into(),
+                _ => lit.syntax().first_token()?.text().trim_matches('"').into(),
+            };
+            Some(Interned::new(AttrInput::Literal(value)))
+        } else if let Some(tt) = ast.token_tree() {
+            let (tree, map) = syntax_node_to_token_tree(tt.syntax());
+            Some(Interned::new(AttrInput::TokenTree(tree, map)))
+        } else {
+            None
+        };
+        Some(Attr { id, path, input })
+    }
+
+    fn from_tt(
+        db: &dyn AstDatabase,
+        tt: &tt::Subtree,
+        hygiene: &Hygiene,
+        id: AttrId,
+    ) -> Option<Attr> {
+        let (parse, _) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
+        let ast = ast::Meta::cast(parse.syntax_node())?;
+
+        Self::from_src(db, ast, hygiene, id)
+    }
+
+    pub fn path(&self) -> &ModPath {
+        &self.path
+    }
+}
+
+impl Attr {
+    /// #[path = "string"]
+    pub fn string_value(&self) -> Option<&SmolStr> {
+        match self.input.as_deref()? {
+            AttrInput::Literal(it) => Some(it),
+            _ => None,
+        }
+    }
+
+    /// #[path(ident)]
+    pub fn single_ident_value(&self) -> Option<&tt::Ident> {
+        match self.input.as_deref()? {
+            AttrInput::TokenTree(subtree, _) => match &*subtree.token_trees {
+                [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident),
+                _ => None,
+            },
+            _ => None,
+        }
+    }
+
+    /// #[path TokenTree]
+    pub fn token_tree_value(&self) -> Option<&Subtree> {
+        match self.input.as_deref()? {
+            AttrInput::TokenTree(subtree, _) => Some(subtree),
+            _ => None,
+        }
+    }
+
+    /// Parses this attribute as a token tree consisting of comma separated paths.
+    pub fn parse_path_comma_token_tree(&self) -> Option<impl Iterator<Item = ModPath> + '_> {
+        let args = self.token_tree_value()?;
+
+        if args.delimiter.kind != DelimiterKind::Parenthesis {
+            return None;
+        }
+        let paths = args
+            .token_trees
+            .split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))))
+            .filter_map(|tts| {
+                if tts.is_empty() {
+                    return None;
+                }
+                let segments = tts.iter().filter_map(|tt| match tt {
+                    tt::TokenTree::Leaf(tt::Leaf::Ident(id)) => Some(id.as_name()),
+                    _ => None,
+                });
+                Some(ModPath::from_segments(PathKind::Plain, segments))
+            });
+
+        Some(paths)
+    }
+}
+
+pub fn collect_attrs(
+    owner: &dyn ast::HasAttrs,
+) -> impl Iterator<Item = (AttrId, Either<ast::Attr, ast::Comment>)> {
+    let inner_attrs = inner_attributes(owner.syntax()).into_iter().flatten();
+    let outer_attrs =
+        ast::AttrDocCommentIter::from_syntax_node(owner.syntax()).filter(|el| match el {
+            Either::Left(attr) => attr.kind().is_outer(),
+            Either::Right(comment) => comment.is_outer(),
+        });
+    outer_attrs.chain(inner_attrs).enumerate().map(|(id, attr)| (AttrId { id: id as u32 }, attr))
+}
+
+fn inner_attributes(
+    syntax: &SyntaxNode,
+) -> Option<impl Iterator<Item = Either<ast::Attr, ast::Comment>>> {
+    let node = match_ast! {
+        match syntax {
+            ast::SourceFile(_) => syntax.clone(),
+            ast::ExternBlock(it) => it.extern_item_list()?.syntax().clone(),
+            ast::Fn(it) => it.body()?.stmt_list()?.syntax().clone(),
+            ast::Impl(it) => it.assoc_item_list()?.syntax().clone(),
+            ast::Module(it) => it.item_list()?.syntax().clone(),
+            ast::BlockExpr(it) => {
+                use syntax::SyntaxKind::{BLOCK_EXPR , EXPR_STMT};
+                // Block expressions accept outer and inner attributes, but only when they are the outer
+                // expression of an expression statement or the final expression of another block expression.
+                let may_carry_attributes = matches!(
+                    it.syntax().parent().map(|it| it.kind()),
+                     Some(BLOCK_EXPR | EXPR_STMT)
+                );
+                if !may_carry_attributes {
+                    return None
+                }
+                syntax.clone()
+            },
+            _ => return None,
+        }
+    };
+
+    let attrs = ast::AttrDocCommentIter::from_syntax_node(&node).filter(|el| match el {
+        Either::Left(attr) => attr.kind().is_inner(),
+        Either::Right(comment) => comment.is_inner(),
+    });
+    Some(attrs)
+}
+
+// Input subtree is: `(cfg, $(attr),+)`
+// Split it up into a `cfg` subtree and the `attr` subtrees.
+pub fn parse_cfg_attr_input(
+    subtree: &Subtree,
+) -> Option<(&[tt::TokenTree], impl Iterator<Item = &[tt::TokenTree]>)> {
+    let mut parts = subtree
+        .token_trees
+        .split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(Punct { char: ',', .. }))));
+    let cfg = parts.next()?;
+    Some((cfg, parts.filter(|it| !it.is_empty())))
+}
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
index 58d192f9fe0..906ca991d73 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs
@@ -1,6 +1,6 @@
 //! Builtin attributes.
 
-use crate::{db::AstDatabase, name, ExpandResult, MacroCallId, MacroCallKind};
+use crate::{db::AstDatabase, name, tt, ExpandResult, MacroCallId, MacroCallKind};
 
 macro_rules! register_builtin {
     ( $(($name:ident, $variant:ident) => $expand:ident),* ) => {
@@ -97,7 +97,7 @@ fn derive_attr_expand(
     let loc = db.lookup_intern_macro_call(id);
     let derives = match &loc.kind {
         MacroCallKind::Attr { attr_args, is_derive: true, .. } => &attr_args.0,
-        _ => return ExpandResult::ok(Default::default()),
+        _ => return ExpandResult::ok(tt::Subtree::empty()),
     };
     pseudo_derive_attr_expansion(tt, derives)
 }
@@ -110,7 +110,7 @@ pub fn pseudo_derive_attr_expansion(
         tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
             char,
             spacing: tt::Spacing::Alone,
-            id: tt::TokenId::unspecified(),
+            span: tt::TokenId::unspecified(),
         }))
     };
 
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
index 8966047c9b2..060a680542f 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs
@@ -3,11 +3,11 @@
 use base_db::{CrateOrigin, LangCrateOrigin};
 use tracing::debug;
 
+use crate::tt::{self, TokenId};
 use syntax::{
     ast::{self, AstNode, HasGenericParams, HasModuleItem, HasName},
     match_ast,
 };
-use tt::TokenId;
 
 use crate::{db::AstDatabase, name, quote, ExpandError, ExpandResult, MacroCallId};
 
@@ -92,7 +92,7 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
     })?;
     let name_token_id =
         token_map.token_by_range(name.syntax().text_range()).unwrap_or_else(TokenId::unspecified);
-    let name_token = tt::Ident { id: name_token_id, text: name.text().into() };
+    let name_token = tt::Ident { span: name_token_id, text: name.text().into() };
     let param_types = params
         .into_iter()
         .flat_map(|param_list| param_list.type_or_const_params())
@@ -101,7 +101,7 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
                 let ty = param
                     .ty()
                     .map(|ty| mbe::syntax_node_to_token_tree(ty.syntax()).0)
-                    .unwrap_or_default();
+                    .unwrap_or_else(tt::Subtree::empty);
                 Some(ty)
             } else {
                 None
@@ -114,7 +114,7 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> {
 fn expand_simple_derive(tt: &tt::Subtree, trait_path: tt::Subtree) -> ExpandResult<tt::Subtree> {
     let info = match parse_adt(tt) {
         Ok(info) => info,
-        Err(e) => return ExpandResult::only_err(e),
+        Err(e) => return ExpandResult::with_err(tt::Subtree::empty(), e),
     };
     let (params, args): (Vec<_>, Vec<_>) = info
         .param_types
@@ -122,7 +122,7 @@ fn expand_simple_derive(tt: &tt::Subtree, trait_path: tt::Subtree) -> ExpandResu
         .enumerate()
         .map(|(idx, param_ty)| {
             let ident = tt::Leaf::Ident(tt::Ident {
-                id: tt::TokenId::unspecified(),
+                span: tt::TokenId::unspecified(),
                 text: format!("T{idx}").into(),
             });
             let ident_ = ident.clone();
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
index 5522bdf3b3f..9f3fa73d4e6 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs
@@ -9,7 +9,9 @@ use syntax::{
     SmolStr,
 };
 
-use crate::{db::AstDatabase, name, quote, ExpandError, ExpandResult, MacroCallId, MacroCallLoc};
+use crate::{
+    db::AstDatabase, name, quote, tt, ExpandError, ExpandResult, MacroCallId, MacroCallLoc,
+};
 
 macro_rules! register_builtin {
     ( LAZY: $(($name:ident, $kind: ident) => $expand:ident),* , EAGER: $(($e_name:ident, $e_kind: ident) => $e_expand:ident),*  ) => {
@@ -61,7 +63,7 @@ macro_rules! register_builtin {
     };
 }
 
-#[derive(Debug, Default)]
+#[derive(Debug)]
 pub struct ExpandedEager {
     pub(crate) subtree: tt::Subtree,
     /// The included file ID of the include macro.
@@ -116,7 +118,7 @@ register_builtin! {
 }
 
 const DOLLAR_CRATE: tt::Ident =
-    tt::Ident { text: SmolStr::new_inline("$crate"), id: tt::TokenId::unspecified() };
+    tt::Ident { text: SmolStr::new_inline("$crate"), span: tt::TokenId::unspecified() };
 
 fn module_path_expand(
     _db: &dyn AstDatabase,
@@ -162,7 +164,7 @@ fn stringify_expand(
     _id: MacroCallId,
     tt: &tt::Subtree,
 ) -> ExpandResult<tt::Subtree> {
-    let pretty = tt::pretty(&tt.token_trees);
+    let pretty = ::tt::pretty(&tt.token_trees);
 
     let expanded = quote! {
         #pretty
@@ -194,11 +196,11 @@ fn assert_expand(
     let expanded = match &*args {
         [cond, panic_args @ ..] => {
             let comma = tt::Subtree {
-                delimiter: None,
+                delimiter: tt::Delimiter::unspecified(),
                 token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
                     char: ',',
                     spacing: tt::Spacing::Alone,
-                    id: tt::TokenId::unspecified(),
+                    span: tt::TokenId::unspecified(),
                 }))],
             };
             let cond = cond.clone();
@@ -247,7 +249,10 @@ fn format_args_expand(
     let mut args = parse_exprs_with_sep(tt, ',');
 
     if args.is_empty() {
-        return ExpandResult::only_err(mbe::ExpandError::NoMatchingRule.into());
+        return ExpandResult::with_err(
+            tt::Subtree::empty(),
+            mbe::ExpandError::NoMatchingRule.into(),
+        );
     }
     for arg in &mut args {
         // Remove `key =`.
@@ -282,7 +287,7 @@ fn asm_expand(
     for tt in tt.token_trees.chunks(2) {
         match tt {
             [tt::TokenTree::Leaf(tt::Leaf::Literal(lit))]
-            | [tt::TokenTree::Leaf(tt::Leaf::Literal(lit)), tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', id: _, spacing: _ }))] =>
+            | [tt::TokenTree::Leaf(tt::Leaf::Literal(lit)), tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', span: _, spacing: _ }))] =>
             {
                 let krate = DOLLAR_CRATE.clone();
                 literals.push(quote!(#krate::format_args!(#lit);));
@@ -400,7 +405,7 @@ fn concat_expand(
         // FIXME: hack on top of a hack: `$e:expr` captures get surrounded in parentheses
         // to ensure the right parsing order, so skip the parentheses here. Ideally we'd
         // implement rustc's model. cc https://github.com/rust-lang/rust-analyzer/pull/10623
-        if let tt::TokenTree::Subtree(tt::Subtree { delimiter: Some(delim), token_trees }) = t {
+        if let tt::TokenTree::Subtree(tt::Subtree { delimiter: delim, token_trees }) = t {
             if let [tt] = &**token_trees {
                 if delim.kind == tt::DelimiterKind::Parenthesis {
                     t = tt;
@@ -459,9 +464,7 @@ fn concat_bytes_expand(
                 }
             }
             tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if i % 2 == 1 && punct.char == ',' => (),
-            tt::TokenTree::Subtree(tree)
-                if tree.delimiter_kind() == Some(tt::DelimiterKind::Bracket) =>
-            {
+            tt::TokenTree::Subtree(tree) if tree.delimiter.kind == tt::DelimiterKind::Bracket => {
                 if let Err(e) = concat_bytes_expand_subtree(tree, &mut bytes) {
                     err.get_or_insert(e);
                     break;
@@ -473,7 +476,7 @@ fn concat_bytes_expand(
             }
         }
     }
-    let ident = tt::Ident { text: bytes.join(", ").into(), id: tt::TokenId::unspecified() };
+    let ident = tt::Ident { text: bytes.join(", ").into(), span: tt::TokenId::unspecified() };
     ExpandResult { value: ExpandedEager::new(quote!([#ident])), err }
 }
 
@@ -521,7 +524,7 @@ fn concat_idents_expand(
             }
         }
     }
-    let ident = tt::Ident { text: ident.into(), id: tt::TokenId::unspecified() };
+    let ident = tt::Ident { text: ident.into(), span: tt::TokenId::unspecified() };
     ExpandResult { value: ExpandedEager::new(quote!(#ident)), err }
 }
 
@@ -572,7 +575,10 @@ fn include_expand(
         Ok((subtree, file_id)) => {
             ExpandResult::ok(ExpandedEager { subtree, included_file: Some(file_id) })
         }
-        Err(e) => ExpandResult::only_err(e),
+        Err(e) => ExpandResult::with_err(
+            ExpandedEager { subtree: tt::Subtree::empty(), included_file: None },
+            e,
+        ),
     }
 }
 
@@ -582,15 +588,18 @@ fn include_bytes_expand(
     tt: &tt::Subtree,
 ) -> ExpandResult<ExpandedEager> {
     if let Err(e) = parse_string(tt) {
-        return ExpandResult::only_err(e);
+        return ExpandResult::with_err(
+            ExpandedEager { subtree: tt::Subtree::empty(), included_file: None },
+            e,
+        );
     }
 
     // FIXME: actually read the file here if the user asked for macro expansion
     let res = tt::Subtree {
-        delimiter: None,
+        delimiter: tt::Delimiter::unspecified(),
         token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
             text: r#"b"""#.into(),
-            id: tt::TokenId::unspecified(),
+            span: tt::TokenId::unspecified(),
         }))],
     };
     ExpandResult::ok(ExpandedEager::new(res))
@@ -603,7 +612,12 @@ fn include_str_expand(
 ) -> ExpandResult<ExpandedEager> {
     let path = match parse_string(tt) {
         Ok(it) => it,
-        Err(e) => return ExpandResult::only_err(e),
+        Err(e) => {
+            return ExpandResult::with_err(
+                ExpandedEager { subtree: tt::Subtree::empty(), included_file: None },
+                e,
+            )
+        }
     };
 
     // FIXME: we're not able to read excluded files (which is most of them because
@@ -635,7 +649,12 @@ fn env_expand(
 ) -> ExpandResult<ExpandedEager> {
     let key = match parse_string(tt) {
         Ok(it) => it,
-        Err(e) => return ExpandResult::only_err(e),
+        Err(e) => {
+            return ExpandResult::with_err(
+                ExpandedEager { subtree: tt::Subtree::empty(), included_file: None },
+                e,
+            )
+        }
     };
 
     let mut err = None;
@@ -666,7 +685,12 @@ fn option_env_expand(
 ) -> ExpandResult<ExpandedEager> {
     let key = match parse_string(tt) {
         Ok(it) => it,
-        Err(e) => return ExpandResult::only_err(e),
+        Err(e) => {
+            return ExpandResult::with_err(
+                ExpandedEager { subtree: tt::Subtree::empty(), included_file: None },
+                e,
+            )
+        }
     };
 
     let expanded = match get_env_inner(db, arg_id, &key) {
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
index b28e60187de..76016274f0e 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
@@ -14,7 +14,7 @@ use syntax::{
 
 use crate::{
     ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion, fixup,
-    hygiene::HygieneFrame, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander,
+    hygiene::HygieneFrame, tt, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander,
     ExpandError, ExpandResult, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind,
     MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander,
 };
@@ -25,7 +25,7 @@ use crate::{
 /// an error will be emitted.
 ///
 /// Actual max for `analysis-stats .` at some point: 30672.
-static TOKEN_LIMIT: Limit = Limit::new(524_288);
+static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
 
 #[derive(Debug, Clone, Eq, PartialEq)]
 pub enum TokenExpander {
@@ -168,12 +168,14 @@ pub fn expand_speculative(
                 // Attributes may have an input token tree, build the subtree and map for this as well
                 // then try finding a token id for our token if it is inside this input subtree.
                 let item = ast::Item::cast(speculative_args.clone())?;
-                item.doc_comments_and_attrs().nth(invoc_attr_index as usize).and_then(Either::left)
+                item.doc_comments_and_attrs()
+                    .nth(invoc_attr_index.ast_index())
+                    .and_then(Either::left)
             }?;
             match attr.token_tree() {
                 Some(token_tree) => {
                     let (mut tree, map) = syntax_node_to_token_tree(attr.token_tree()?.syntax());
-                    tree.delimiter = None;
+                    tree.delimiter = tt::Delimiter::unspecified();
 
                     let shift = mbe::Shift::new(&tt);
                     shift.shift_all(&mut tree);
@@ -208,7 +210,7 @@ pub fn expand_speculative(
     // Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
     let mut speculative_expansion = match loc.def.kind {
         MacroDefKind::ProcMacro(expander, ..) => {
-            tt.delimiter = None;
+            tt.delimiter = tt::Delimiter::unspecified();
             expander.expand(db, loc.krate, &tt, attr_arg.as_ref())
         }
         MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
@@ -314,13 +316,13 @@ fn macro_arg(
 
     if loc.def.is_proc_macro() {
         // proc macros expect their inputs without parentheses, MBEs expect it with them included
-        tt.delimiter = None;
+        tt.delimiter = tt::Delimiter::unspecified();
     }
-
     Some(Arc::new((tt, tmap, fixups.undo_info)))
 }
 
 fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {
+    // FIXME: handle `cfg_attr`
     (|| {
         let censor = match loc.kind {
             MacroCallKind::FnLike { .. } => return None,
@@ -328,7 +330,7 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
                 cov_mark::hit!(derive_censoring);
                 ast::Item::cast(node.clone())?
                     .attrs()
-                    .take(derive_attr_index as usize + 1)
+                    .take(derive_attr_index.ast_index() + 1)
                     // FIXME, this resolution should not be done syntactically
                     // derive is a proper macro now, no longer builtin
                     // But we do not have resolution at this stage, this means
@@ -343,7 +345,7 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
                 cov_mark::hit!(attribute_macro_attr_censoring);
                 ast::Item::cast(node.clone())?
                     .doc_comments_and_attrs()
-                    .nth(invoc_attr_index as usize)
+                    .nth(invoc_attr_index.ast_index())
                     .and_then(Either::left)
                     .map(|attr| attr.syntax().clone())
                     .into_iter()
@@ -476,7 +478,10 @@ fn expand_proc_macro(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<tt::
     let macro_arg = match db.macro_arg(id) {
         Some(it) => it,
         None => {
-            return ExpandResult::only_err(ExpandError::Other("No arguments for proc-macro".into()))
+            return ExpandResult::with_err(
+                tt::Subtree::empty(),
+                ExpandError::Other("No arguments for proc-macro".into()),
+            )
         }
     };
 
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
index a1474c44e6c..dfab7ec92c7 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs
@@ -108,7 +108,7 @@ pub fn expand_eager_macro(
         .value
         .token_tree()
         .map(|tt| mbe::syntax_node_to_token_tree(tt.syntax()).0)
-        .unwrap_or_default();
+        .unwrap_or_else(tt::Subtree::empty);
 
     let ast_map = db.ast_id_map(macro_call.file_id);
     let call_id = InFile::new(macro_call.file_id, ast_map.ast_id(&macro_call.value));
@@ -165,9 +165,9 @@ pub fn expand_eager_macro(
     }
 }
 
-fn to_subtree(node: &SyntaxNode) -> tt::Subtree {
+fn to_subtree(node: &SyntaxNode) -> crate::tt::Subtree {
     let mut subtree = mbe::syntax_node_to_token_tree(node).0;
-    subtree.delimiter = None;
+    subtree.delimiter = crate::tt::Delimiter::unspecified();
     subtree
 }
 
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
index 75d364d5f84..c811d1c66a8 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs
@@ -9,7 +9,7 @@ use syntax::{
     ast::{self, AstNode, HasLoopBody},
     match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange,
 };
-use tt::Subtree;
+use tt::token_id::Subtree;
 
 /// The result of calculating fixes for a syntax node -- a bunch of changes
 /// (appending to and replacing nodes), the information that is needed to
@@ -297,9 +297,11 @@ pub(crate) fn reverse_fixups(
     tt.token_trees = tts
         .into_iter()
         .filter(|tt| match tt {
-            tt::TokenTree::Leaf(leaf) => token_map.synthetic_token_id(leaf.id()) != Some(EMPTY_ID),
+            tt::TokenTree::Leaf(leaf) => {
+                token_map.synthetic_token_id(*leaf.span()) != Some(EMPTY_ID)
+            }
             tt::TokenTree::Subtree(st) => {
-                st.delimiter.map_or(true, |d| token_map.synthetic_token_id(d.id) != Some(EMPTY_ID))
+                token_map.synthetic_token_id(st.delimiter.open) != Some(EMPTY_ID)
             }
         })
         .flat_map(|tt| match tt {
@@ -308,9 +310,9 @@ pub(crate) fn reverse_fixups(
                 SmallVec::from_const([tt.into()])
             }
             tt::TokenTree::Leaf(leaf) => {
-                if let Some(id) = token_map.synthetic_token_id(leaf.id()) {
+                if let Some(id) = token_map.synthetic_token_id(*leaf.span()) {
                     let original = undo_info.original[id.0 as usize].clone();
-                    if original.delimiter.is_none() {
+                    if original.delimiter.kind == tt::DelimiterKind::Invisible {
                         original.token_trees.into()
                     } else {
                         SmallVec::from_const([original.into()])
@@ -327,6 +329,8 @@ pub(crate) fn reverse_fixups(
 mod tests {
     use expect_test::{expect, Expect};
 
+    use crate::tt;
+
     use super::reverse_fixups;
 
     // The following three functions are only meant to check partial structural equivalence of
@@ -341,7 +345,7 @@ mod tests {
     }
 
     fn check_subtree_eq(a: &tt::Subtree, b: &tt::Subtree) -> bool {
-        a.delimiter.map(|it| it.kind) == b.delimiter.map(|it| it.kind)
+        a.delimiter.kind == b.delimiter.kind
             && a.token_trees.len() == b.token_trees.len()
             && a.token_trees.iter().zip(&b.token_trees).all(|(a, b)| check_tt_eq(a, b))
     }
@@ -386,7 +390,7 @@ mod tests {
         let (original_as_tt, _) = mbe::syntax_node_to_token_tree(&parsed.syntax_node());
         assert!(
             check_subtree_eq(&tt, &original_as_tt),
-            "different token tree: {tt:?}, {original_as_tt:?}"
+            "different token tree: {tt:?},\n{original_as_tt:?}"
         );
     }
 
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
index df1e20256ca..2300ee9d089 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
@@ -128,7 +128,7 @@ struct HygieneInfo {
     attr_input_or_mac_def_start: Option<InFile<TextSize>>,
 
     macro_def: Arc<TokenExpander>,
-    macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
+    macro_arg: Arc<(crate::tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
     macro_arg_shift: mbe::Shift,
     exp_map: Arc<mbe::TokenMap>,
 }
@@ -191,7 +191,7 @@ fn make_hygiene_info(
             let tt = ast_id
                 .to_node(db)
                 .doc_comments_and_attrs()
-                .nth(invoc_attr_index as usize)
+                .nth(invoc_attr_index.ast_index())
                 .and_then(Either::left)?
                 .token_tree()?;
             Some(InFile::new(ast_id.file_id, tt))
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
index bc5f9f3b8af..bc941b54172 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
@@ -17,10 +17,13 @@ pub mod proc_macro;
 pub mod quote;
 pub mod eager;
 pub mod mod_path;
+pub mod attrs;
 mod fixup;
 
 pub use mbe::{Origin, ValueResult};
 
+use ::tt::token_id as tt;
+
 use std::{fmt, hash::Hash, iter, sync::Arc};
 
 use base_db::{
@@ -37,6 +40,7 @@ use syntax::{
 
 use crate::{
     ast_id_map::FileAstId,
+    attrs::AttrId,
     builtin_attr_macro::BuiltinAttrExpander,
     builtin_derive_macro::BuiltinDeriveExpander,
     builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
@@ -114,6 +118,7 @@ pub struct MacroDefId {
     pub krate: CrateId,
     pub kind: MacroDefKind,
     pub local_inner: bool,
+    pub allow_internal_unsafe: bool,
 }
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -145,7 +150,7 @@ pub enum MacroCallKind {
         ///
         /// Outer attributes are counted first, then inner attributes. This does not support
         /// out-of-line modules, which may have attributes spread across 2 files!
-        derive_attr_index: u32,
+        derive_attr_index: AttrId,
         /// Index of the derive macro in the derive attribute
         derive_index: u32,
     },
@@ -156,7 +161,7 @@ pub enum MacroCallKind {
         ///
         /// Outer attributes are counted first, then inner attributes. This does not support
         /// out-of-line modules, which may have attributes spread across 2 files!
-        invoc_attr_index: u32,
+        invoc_attr_index: AttrId,
         /// Whether this attribute is the `#[derive]` attribute.
         is_derive: bool,
     },
@@ -261,10 +266,11 @@ impl HirFileId {
         });
         let attr_input_or_mac_def = def.or_else(|| match loc.kind {
             MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
+                // FIXME: handle `cfg_attr`
                 let tt = ast_id
                     .to_node(db)
                     .doc_comments_and_attrs()
-                    .nth(invoc_attr_index as usize)
+                    .nth(invoc_attr_index.ast_index())
                     .and_then(Either::left)?
                     .token_tree()?;
                 Some(InFile::new(ast_id.file_id, tt))
@@ -353,6 +359,14 @@ impl HirFileId {
         }
     }
 
+    #[inline]
+    pub fn file_id(self) -> Option<FileId> {
+        match self.0 & Self::MACRO_FILE_TAG_MASK {
+            0 => Some(FileId(self.0)),
+            _ => None,
+        }
+    }
+
     fn repr(self) -> HirFileIdRepr {
         match self.0 & Self::MACRO_FILE_TAG_MASK {
             0 => HirFileIdRepr::FileId(FileId(self.0)),
@@ -397,8 +411,7 @@ impl MacroDefId {
     }
 }
 
-// FIXME: attribute indices do not account for `cfg_attr`, which means that we'll strip the whole
-// `cfg_attr` instead of just one of the attributes it expands to
+// FIXME: attribute indices do not account for nested `cfg_attr`
 
 impl MacroCallKind {
     /// Returns the file containing the macro invocation.
@@ -419,7 +432,7 @@ impl MacroCallKind {
                 // FIXME: handle `cfg_attr`
                 ast_id.with_value(ast_id.to_node(db)).map(|it| {
                     it.doc_comments_and_attrs()
-                        .nth(*derive_attr_index as usize)
+                        .nth(derive_attr_index.ast_index())
                         .and_then(|it| match it {
                             Either::Left(attr) => Some(attr.syntax().clone()),
                             Either::Right(_) => None,
@@ -431,7 +444,7 @@ impl MacroCallKind {
                 // FIXME: handle `cfg_attr`
                 ast_id.with_value(ast_id.to_node(db)).map(|it| {
                     it.doc_comments_and_attrs()
-                        .nth(*invoc_attr_index as usize)
+                        .nth(invoc_attr_index.ast_index())
                         .and_then(|it| match it {
                             Either::Left(attr) => Some(attr.syntax().clone()),
                             Either::Right(_) => None,
@@ -488,19 +501,21 @@ impl MacroCallKind {
             MacroCallKind::FnLike { ast_id, .. } => ast_id.to_node(db).syntax().text_range(),
             MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
                 // FIXME: should be the range of the macro name, not the whole derive
+                // FIXME: handle `cfg_attr`
                 ast_id
                     .to_node(db)
                     .doc_comments_and_attrs()
-                    .nth(derive_attr_index as usize)
+                    .nth(derive_attr_index.ast_index())
                     .expect("missing derive")
                     .expect_left("derive is a doc comment?")
                     .syntax()
                     .text_range()
             }
+            // FIXME: handle `cfg_attr`
             MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => ast_id
                 .to_node(db)
                 .doc_comments_and_attrs()
-                .nth(invoc_attr_index as usize)
+                .nth(invoc_attr_index.ast_index())
                 .expect("missing attribute")
                 .expect_left("attribute macro is a doc comment?")
                 .syntax()
@@ -592,9 +607,10 @@ impl ExpansionInfo {
             let token_range = token.value.text_range();
             match &loc.kind {
                 MacroCallKind::Attr { attr_args, invoc_attr_index, is_derive, .. } => {
+                    // FIXME: handle `cfg_attr`
                     let attr = item
                         .doc_comments_and_attrs()
-                        .nth(*invoc_attr_index as usize)
+                        .nth(invoc_attr_index.ast_index())
                         .and_then(Either::left)?;
                     match attr.token_tree() {
                         Some(token_tree)
@@ -1031,3 +1047,5 @@ impl ExpandTo {
 pub struct UnresolvedMacro {
     pub path: ModPath,
 }
+
+intern::impl_internable!(ModPath, attrs::AttrInput);
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
index e8b3e312aab..c3462beac73 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs
@@ -2,7 +2,7 @@
 
 use std::fmt;
 
-use syntax::{ast, SmolStr, SyntaxKind};
+use syntax::{ast, utils::is_raw_identifier, SmolStr};
 
 /// `Name` is a wrapper around string, which is used in hir for both references
 /// and declarations. In theory, names should also carry hygiene info, but we are
@@ -33,11 +33,6 @@ impl fmt::Display for Name {
     }
 }
 
-fn is_raw_identifier(name: &str) -> bool {
-    let is_keyword = SyntaxKind::from_keyword(name).is_some();
-    is_keyword && !matches!(name, "self" | "crate" | "super" | "Self")
-}
-
 impl<'a> fmt::Display for UnescapedName<'a> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         match &self.0 .0 {
@@ -133,6 +128,14 @@ impl Name {
         }
     }
 
+    /// Returns the text this name represents if it isn't a tuple field.
+    pub fn as_str(&self) -> Option<&str> {
+        match &self.0 {
+            Repr::Text(it) => Some(it),
+            _ => None,
+        }
+    }
+
     /// Returns the textual representation of this name as a [`SmolStr`].
     /// Prefer using this over [`ToString::to_string`] if possible as this conversion is cheaper in
     /// the general case.
@@ -183,7 +186,7 @@ impl AsName for ast::NameOrNameRef {
     }
 }
 
-impl AsName for tt::Ident {
+impl<Span> AsName for tt::Ident<Span> {
     fn as_name(&self) -> Name {
         Name::resolve(&self.text)
     }
@@ -339,6 +342,7 @@ pub mod known {
         recursion_limit,
         feature,
         // known methods of lang items
+        call_once,
         eq,
         ne,
         ge,
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs
index 5afdcc0e66d..3f4d2540c09 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/proc_macro.rs
@@ -3,7 +3,7 @@
 use base_db::{CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind};
 use stdx::never;
 
-use crate::{db::AstDatabase, ExpandError, ExpandResult};
+use crate::{db::AstDatabase, tt, ExpandError, ExpandResult};
 
 #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
 pub struct ProcMacroExpander {
@@ -39,7 +39,10 @@ impl ProcMacroExpander {
                     Ok(proc_macros) => proc_macros,
                     Err(_) => {
                         never!("Non-dummy expander even though there are no proc macros");
-                        return ExpandResult::only_err(ExpandError::Other("Internal error".into()));
+                        return ExpandResult::with_err(
+                            tt::Subtree::empty(),
+                            ExpandError::Other("Internal error".into()),
+                        );
                     }
                 };
                 let proc_macro = match proc_macros.get(id.0 as usize) {
@@ -50,7 +53,10 @@ impl ProcMacroExpander {
                             proc_macros.len(),
                             id.0
                         );
-                        return ExpandResult::only_err(ExpandError::Other("Internal error".into()));
+                        return ExpandResult::with_err(
+                            tt::Subtree::empty(),
+                            ExpandError::Other("Internal error".into()),
+                        );
                     }
                 };
 
@@ -69,13 +75,17 @@ impl ProcMacroExpander {
                             }
                         }
                         ProcMacroExpansionError::System(text)
-                        | ProcMacroExpansionError::Panic(text) => {
-                            ExpandResult::only_err(ExpandError::Other(text.into()))
-                        }
+                        | ProcMacroExpansionError::Panic(text) => ExpandResult::with_err(
+                            tt::Subtree::empty(),
+                            ExpandError::Other(text.into()),
+                        ),
                     },
                 }
             }
-            None => ExpandResult::only_err(ExpandError::UnresolvedProcMacro(self.krate)),
+            None => ExpandResult::with_err(
+                tt::Subtree::empty(),
+                ExpandError::UnresolvedProcMacro(self.krate),
+            ),
         }
     }
 }
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs b/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs
index c0a7bc7ca88..63586f9daf0 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/quote.rs
@@ -9,17 +9,18 @@
 #[macro_export]
 macro_rules! __quote {
     () => {
-        Vec::<tt::TokenTree>::new()
+        Vec::<crate::tt::TokenTree>::new()
     };
 
     ( @SUBTREE $delim:ident $($tt:tt)* ) => {
         {
             let children = $crate::__quote!($($tt)*);
-            tt::Subtree {
-                delimiter: Some(tt::Delimiter {
-                    kind: tt::DelimiterKind::$delim,
-                    id: tt::TokenId::unspecified(),
-                }),
+            crate::tt::Subtree {
+                delimiter: crate::tt::Delimiter {
+                    kind: crate::tt::DelimiterKind::$delim,
+                    open: crate::tt::TokenId::unspecified(),
+                    close: crate::tt::TokenId::unspecified(),
+                },
                 token_trees: $crate::quote::IntoTt::to_tokens(children),
             }
         }
@@ -28,10 +29,10 @@ macro_rules! __quote {
     ( @PUNCT $first:literal ) => {
         {
             vec![
-                tt::Leaf::Punct(tt::Punct {
+                crate::tt::Leaf::Punct(crate::tt::Punct {
                     char: $first,
-                    spacing: tt::Spacing::Alone,
-                    id: tt::TokenId::unspecified(),
+                    spacing: crate::tt::Spacing::Alone,
+                    span: crate::tt::TokenId::unspecified(),
                 }).into()
             ]
         }
@@ -40,15 +41,15 @@ macro_rules! __quote {
     ( @PUNCT $first:literal, $sec:literal ) => {
         {
             vec![
-                tt::Leaf::Punct(tt::Punct {
+                crate::tt::Leaf::Punct(crate::tt::Punct {
                     char: $first,
-                    spacing: tt::Spacing::Joint,
-                    id: tt::TokenId::unspecified(),
+                    spacing: crate::tt::Spacing::Joint,
+                    span: crate::tt::TokenId::unspecified(),
                 }).into(),
-                tt::Leaf::Punct(tt::Punct {
+                crate::tt::Leaf::Punct(crate::tt::Punct {
                     char: $sec,
-                    spacing: tt::Spacing::Alone,
-                    id: tt::TokenId::unspecified(),
+                    spacing: crate::tt::Spacing::Alone,
+                    span: crate::tt::TokenId::unspecified(),
                 }).into()
             ]
         }
@@ -67,7 +68,7 @@ macro_rules! __quote {
 
     ( ## $first:ident $($tail:tt)* ) => {
         {
-            let mut tokens = $first.into_iter().map($crate::quote::ToTokenTree::to_token).collect::<Vec<tt::TokenTree>>();
+            let mut tokens = $first.into_iter().map($crate::quote::ToTokenTree::to_token).collect::<Vec<crate::tt::TokenTree>>();
             let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
             tokens.append(&mut tail_tokens);
             tokens
@@ -86,9 +87,9 @@ macro_rules! __quote {
     // Ident
     ( $tt:ident ) => {
         vec![ {
-            tt::Leaf::Ident(tt::Ident {
+            crate::tt::Leaf::Ident(crate::tt::Ident {
                 text: stringify!($tt).into(),
-                id: tt::TokenId::unspecified(),
+                span: crate::tt::TokenId::unspecified(),
             }).into()
         }]
     };
@@ -127,42 +128,42 @@ macro_rules! quote {
 }
 
 pub(crate) trait IntoTt {
-    fn to_subtree(self) -> tt::Subtree;
-    fn to_tokens(self) -> Vec<tt::TokenTree>;
+    fn to_subtree(self) -> crate::tt::Subtree;
+    fn to_tokens(self) -> Vec<crate::tt::TokenTree>;
 }
 
-impl IntoTt for Vec<tt::TokenTree> {
-    fn to_subtree(self) -> tt::Subtree {
-        tt::Subtree { delimiter: None, token_trees: self }
+impl IntoTt for Vec<crate::tt::TokenTree> {
+    fn to_subtree(self) -> crate::tt::Subtree {
+        crate::tt::Subtree { delimiter: crate::tt::Delimiter::unspecified(), token_trees: self }
     }
 
-    fn to_tokens(self) -> Vec<tt::TokenTree> {
+    fn to_tokens(self) -> Vec<crate::tt::TokenTree> {
         self
     }
 }
 
-impl IntoTt for tt::Subtree {
-    fn to_subtree(self) -> tt::Subtree {
+impl IntoTt for crate::tt::Subtree {
+    fn to_subtree(self) -> crate::tt::Subtree {
         self
     }
 
-    fn to_tokens(self) -> Vec<tt::TokenTree> {
-        vec![tt::TokenTree::Subtree(self)]
+    fn to_tokens(self) -> Vec<crate::tt::TokenTree> {
+        vec![crate::tt::TokenTree::Subtree(self)]
     }
 }
 
 pub(crate) trait ToTokenTree {
-    fn to_token(self) -> tt::TokenTree;
+    fn to_token(self) -> crate::tt::TokenTree;
 }
 
-impl ToTokenTree for tt::TokenTree {
-    fn to_token(self) -> tt::TokenTree {
+impl ToTokenTree for crate::tt::TokenTree {
+    fn to_token(self) -> crate::tt::TokenTree {
         self
     }
 }
 
-impl ToTokenTree for tt::Subtree {
-    fn to_token(self) -> tt::TokenTree {
+impl ToTokenTree for crate::tt::Subtree {
+    fn to_token(self) -> crate::tt::TokenTree {
         self.into()
     }
 }
@@ -171,15 +172,15 @@ macro_rules! impl_to_to_tokentrees {
     ($($ty:ty => $this:ident $im:block);*) => {
         $(
             impl ToTokenTree for $ty {
-                fn to_token($this) -> tt::TokenTree {
-                    let leaf: tt::Leaf = $im.into();
+                fn to_token($this) -> crate::tt::TokenTree {
+                    let leaf: crate::tt::Leaf = $im.into();
                     leaf.into()
                 }
             }
 
             impl ToTokenTree for &$ty {
-                fn to_token($this) -> tt::TokenTree {
-                    let leaf: tt::Leaf = $im.clone().into();
+                fn to_token($this) -> crate::tt::TokenTree {
+                    let leaf: crate::tt::Leaf = $im.clone().into();
                     leaf.into()
                 }
             }
@@ -188,16 +189,16 @@ macro_rules! impl_to_to_tokentrees {
 }
 
 impl_to_to_tokentrees! {
-    u32 => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()} };
-    usize => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()} };
-    i32 => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()} };
-    bool => self { tt::Ident{text: self.to_string().into(), id: tt::TokenId::unspecified()} };
-    tt::Leaf => self { self };
-    tt::Literal => self { self };
-    tt::Ident => self { self };
-    tt::Punct => self { self };
-    &str => self { tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), id: tt::TokenId::unspecified()}};
-    String => self { tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), id: tt::TokenId::unspecified()}}
+    u32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
+    usize => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
+    i32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
+    bool => self { crate::tt::Ident{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
+    crate::tt::Leaf => self { self };
+    crate::tt::Literal => self { self };
+    crate::tt::Ident => self { self };
+    crate::tt::Punct => self { self };
+    &str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}};
+    String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}}
 }
 
 #[cfg(test)]
@@ -223,8 +224,8 @@ mod tests {
         assert_eq!(quote!(#s).to_string(), "\"hello\"");
     }
 
-    fn mk_ident(name: &str) -> tt::Ident {
-        tt::Ident { text: name.into(), id: tt::TokenId::unspecified() }
+    fn mk_ident(name: &str) -> crate::tt::Ident {
+        crate::tt::Ident { text: name.into(), span: crate::tt::TokenId::unspecified() }
     }
 
     #[test]
@@ -234,7 +235,7 @@ mod tests {
         let quoted = quote!(#a);
         assert_eq!(quoted.to_string(), "hello");
         let t = format!("{quoted:?}");
-        assert_eq!(t, "SUBTREE $\n  IDENT   hello 4294967295");
+        assert_eq!(t, "SUBTREE $$ 4294967295 4294967295\n  IDENT   hello 4294967295");
     }
 
     #[test]
@@ -263,11 +264,12 @@ mod tests {
         let fields = [mk_ident("name"), mk_ident("id")];
         let fields = fields.iter().flat_map(|it| quote!(#it: self.#it.clone(), ).token_trees);
 
-        let list = tt::Subtree {
-            delimiter: Some(tt::Delimiter {
-                kind: tt::DelimiterKind::Brace,
-                id: tt::TokenId::unspecified(),
-            }),
+        let list = crate::tt::Subtree {
+            delimiter: crate::tt::Delimiter {
+                kind: crate::tt::DelimiterKind::Brace,
+                open: crate::tt::TokenId::unspecified(),
+                close: crate::tt::TokenId::unspecified(),
+            },
             token_trees: fields.collect(),
         };
 
diff --git a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
index ae837ac6dce..490bbe1e724 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml
@@ -2,9 +2,11 @@
 name = "hir-ty"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
@@ -24,20 +26,21 @@ chalk-ir = "0.88.0"
 chalk-recursive = { version = "0.88.0", default-features = false }
 chalk-derive = "0.88.0"
 la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
-once_cell = "1.15.0"
+once_cell = "1.17.0"
 typed-arena = "2.0.1"
 rustc_index = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_index", default-features = false }
 
-stdx = { path = "../stdx", version = "0.0.0" }
-hir-def = { path = "../hir-def", version = "0.0.0" }
-hir-expand = { path = "../hir-expand", version = "0.0.0" }
-base-db = { path = "../base-db", version = "0.0.0" }
-profile = { path = "../profile", version = "0.0.0" }
-syntax = { path = "../syntax", version = "0.0.0" }
-limit = { path = "../limit", version = "0.0.0" }
+# local deps
+stdx.workspace = true
+intern.workspace = true
+hir-def.workspace = true
+hir-expand.workspace = true
+base-db.workspace = true
+profile.workspace = true
+syntax.workspace = true
+limit.workspace = true
 
 [dev-dependencies]
-test-utils = { path = "../test-utils" }
 expect-test = "1.4.0"
 tracing = "0.1.35"
 tracing-subscriber = { version = "0.3.16", default-features = false, features = [
@@ -45,3 +48,7 @@ tracing-subscriber = { version = "0.3.16", default-features = false, features =
     "registry",
 ] }
 tracing-tree = "0.2.1"
+project-model = { path = "../project-model" }
+
+# local deps
+test-utils.workspace = true
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs
index cbcf8f74c55..58744dd0c0f 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs
@@ -6,9 +6,9 @@
 use std::sync::Arc;
 
 use chalk_ir::cast::Cast;
+use hir_def::lang_item::LangItem;
 use hir_expand::name::name;
 use limit::Limit;
-use syntax::SmolStr;
 
 use crate::{
     db::HirDatabase, infer::unify::InferenceTable, Canonical, Goal, Interner, ProjectionTyExt,
@@ -17,11 +17,13 @@ use crate::{
 
 static AUTODEREF_RECURSION_LIMIT: Limit = Limit::new(10);
 
+#[derive(Debug)]
 pub(crate) enum AutoderefKind {
     Builtin,
     Overloaded,
 }
 
+#[derive(Debug)]
 pub(crate) struct Autoderef<'a, 'db> {
     pub(crate) table: &'a mut InferenceTable<'db>,
     ty: Ty,
@@ -117,9 +119,8 @@ fn deref_by_trait(table: &mut InferenceTable<'_>, ty: Ty) -> Option<Ty> {
     }
 
     let db = table.db;
-    let deref_trait = db
-        .lang_item(table.trait_env.krate, SmolStr::new_inline("deref"))
-        .and_then(|l| l.as_trait())?;
+    let deref_trait =
+        db.lang_item(table.trait_env.krate, LangItem::Deref).and_then(|l| l.as_trait())?;
     let target = db.trait_data(deref_trait).associated_type_by_name(&name![Target])?;
 
     let projection = {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs
index d5ef0c22dec..8faef7bf71e 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/builder.rs
@@ -63,7 +63,7 @@ impl<D> TyBuilder<D> {
     }
 
     fn build_internal(self) -> (D, Substitution) {
-        assert_eq!(self.vec.len(), self.param_kinds.len());
+        assert_eq!(self.vec.len(), self.param_kinds.len(), "{:?}", &self.param_kinds);
         for (a, e) in self.vec.iter().zip(self.param_kinds.iter()) {
             self.assert_match_kind(a, e);
         }
@@ -282,6 +282,21 @@ impl TyBuilder<Tuple> {
         let (Tuple(size), subst) = self.build_internal();
         TyKind::Tuple(size, subst).intern(Interner)
     }
+
+    pub fn tuple_with<I>(elements: I) -> Ty
+    where
+        I: IntoIterator<Item = Ty>,
+        <I as IntoIterator>::IntoIter: ExactSizeIterator,
+    {
+        let elements = elements.into_iter();
+        let len = elements.len();
+        let mut b =
+            TyBuilder::new(Tuple(len), iter::repeat(ParamKind::Type).take(len).collect(), None);
+        for e in elements {
+            b = b.push(e);
+        }
+        b.build()
+    }
 }
 
 impl TyBuilder<TraitId> {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
index 1c2b8de7f78..6989e9fb9be 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
@@ -3,7 +3,6 @@
 use std::sync::Arc;
 
 use cov_mark::hit;
-use syntax::SmolStr;
 use tracing::debug;
 
 use chalk_ir::{cast::Cast, fold::shift::Shift, CanonicalVarKinds};
@@ -12,7 +11,7 @@ use chalk_solve::rust_ir::{self, OpaqueTyDatumBound, WellKnownTrait};
 use base_db::CrateId;
 use hir_def::{
     expr::Movability,
-    lang_item::{lang_attr, LangItemTarget},
+    lang_item::{lang_attr, LangItem, LangItemTarget},
     AssocItemId, GenericDefId, HasModule, ItemContainerId, Lookup, ModuleId, TypeAliasId,
 };
 use hir_expand::name::name;
@@ -182,9 +181,9 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
         &self,
         well_known_trait: rust_ir::WellKnownTrait,
     ) -> Option<chalk_ir::TraitId<Interner>> {
-        let lang_attr = lang_attr_from_well_known_trait(well_known_trait);
+        let lang_attr = lang_item_from_well_known_trait(well_known_trait);
         let trait_ = match self.db.lang_item(self.krate, lang_attr.into()) {
-            Some(LangItemTarget::TraitId(trait_)) => trait_,
+            Some(LangItemTarget::Trait(trait_)) => trait_,
             _ => return None,
         };
         Some(to_chalk_trait_id(trait_))
@@ -206,7 +205,7 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
                     .return_type_impl_traits(func)
                     .expect("impl trait id without impl traits");
                 let (datas, binders) = (*datas).as_ref().into_value_and_skipped_binders();
-                let data = &datas.impl_traits[idx as usize];
+                let data = &datas.impl_traits[idx];
                 let bound = OpaqueTyDatumBound {
                     bounds: make_single_type_binders(data.bounds.skip_binders().to_vec()),
                     where_clauses: chalk_ir::Binders::empty(Interner, vec![]),
@@ -216,7 +215,7 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
             crate::ImplTraitId::AsyncBlockTypeImplTrait(..) => {
                 if let Some((future_trait, future_output)) = self
                     .db
-                    .lang_item(self.krate, SmolStr::new_inline("future_trait"))
+                    .lang_item(self.krate, LangItem::Future)
                     .and_then(|item| item.as_trait())
                     .and_then(|trait_| {
                         let alias =
@@ -246,7 +245,7 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
                     binder.push(crate::wrap_empty_binders(impl_bound));
                     let sized_trait = self
                         .db
-                        .lang_item(self.krate, SmolStr::new_inline("sized"))
+                        .lang_item(self.krate, LangItem::Sized)
                         .and_then(|item| item.as_trait());
                     if let Some(sized_trait_) = sized_trait {
                         let sized_bound = WhereClause::Implemented(TraitRef {
@@ -493,7 +492,7 @@ pub(crate) fn associated_ty_data_query(
 
     if !ctx.unsized_types.borrow().contains(&self_ty) {
         let sized_trait = db
-            .lang_item(resolver.krate(), SmolStr::new_inline("sized"))
+            .lang_item(resolver.krate(), LangItem::Sized)
             .and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id));
         let sized_bound = sized_trait.into_iter().map(|sized_trait| {
             let trait_bound =
@@ -541,8 +540,8 @@ pub(crate) fn trait_datum_query(
     let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars);
     let associated_ty_ids = trait_data.associated_types().map(to_assoc_type_id).collect();
     let trait_datum_bound = rust_ir::TraitDatumBound { where_clauses };
-    let well_known =
-        lang_attr(db.upcast(), trait_).and_then(|name| well_known_trait_from_lang_attr(&name));
+    let well_known = lang_attr(db.upcast(), trait_)
+        .and_then(|name| well_known_trait_from_lang_item(LangItem::from_str(&name)?));
     let trait_datum = TraitDatum {
         id: trait_id,
         binders: make_binders(db, &generic_params, trait_datum_bound),
@@ -553,42 +552,42 @@ pub(crate) fn trait_datum_query(
     Arc::new(trait_datum)
 }
 
-fn well_known_trait_from_lang_attr(name: &str) -> Option<WellKnownTrait> {
-    Some(match name {
-        "clone" => WellKnownTrait::Clone,
-        "coerce_unsized" => WellKnownTrait::CoerceUnsized,
-        "copy" => WellKnownTrait::Copy,
-        "discriminant_kind" => WellKnownTrait::DiscriminantKind,
-        "dispatch_from_dyn" => WellKnownTrait::DispatchFromDyn,
-        "drop" => WellKnownTrait::Drop,
-        "fn" => WellKnownTrait::Fn,
-        "fn_mut" => WellKnownTrait::FnMut,
-        "fn_once" => WellKnownTrait::FnOnce,
-        "generator" => WellKnownTrait::Generator,
-        "sized" => WellKnownTrait::Sized,
-        "unpin" => WellKnownTrait::Unpin,
-        "unsize" => WellKnownTrait::Unsize,
-        "tuple_trait" => WellKnownTrait::Tuple,
+fn well_known_trait_from_lang_item(item: LangItem) -> Option<WellKnownTrait> {
+    Some(match item {
+        LangItem::Clone => WellKnownTrait::Clone,
+        LangItem::CoerceUnsized => WellKnownTrait::CoerceUnsized,
+        LangItem::Copy => WellKnownTrait::Copy,
+        LangItem::DiscriminantKind => WellKnownTrait::DiscriminantKind,
+        LangItem::DispatchFromDyn => WellKnownTrait::DispatchFromDyn,
+        LangItem::Drop => WellKnownTrait::Drop,
+        LangItem::Fn => WellKnownTrait::Fn,
+        LangItem::FnMut => WellKnownTrait::FnMut,
+        LangItem::FnOnce => WellKnownTrait::FnOnce,
+        LangItem::Generator => WellKnownTrait::Generator,
+        LangItem::Sized => WellKnownTrait::Sized,
+        LangItem::Unpin => WellKnownTrait::Unpin,
+        LangItem::Unsize => WellKnownTrait::Unsize,
+        LangItem::Tuple => WellKnownTrait::Tuple,
         _ => return None,
     })
 }
 
-fn lang_attr_from_well_known_trait(attr: WellKnownTrait) -> &'static str {
-    match attr {
-        WellKnownTrait::Clone => "clone",
-        WellKnownTrait::CoerceUnsized => "coerce_unsized",
-        WellKnownTrait::Copy => "copy",
-        WellKnownTrait::DiscriminantKind => "discriminant_kind",
-        WellKnownTrait::DispatchFromDyn => "dispatch_from_dyn",
-        WellKnownTrait::Drop => "drop",
-        WellKnownTrait::Fn => "fn",
-        WellKnownTrait::FnMut => "fn_mut",
-        WellKnownTrait::FnOnce => "fn_once",
-        WellKnownTrait::Generator => "generator",
-        WellKnownTrait::Sized => "sized",
-        WellKnownTrait::Tuple => "tuple_trait",
-        WellKnownTrait::Unpin => "unpin",
-        WellKnownTrait::Unsize => "unsize",
+fn lang_item_from_well_known_trait(trait_: WellKnownTrait) -> LangItem {
+    match trait_ {
+        WellKnownTrait::Clone => LangItem::Clone,
+        WellKnownTrait::CoerceUnsized => LangItem::CoerceUnsized,
+        WellKnownTrait::Copy => LangItem::Copy,
+        WellKnownTrait::DiscriminantKind => LangItem::DiscriminantKind,
+        WellKnownTrait::DispatchFromDyn => LangItem::DispatchFromDyn,
+        WellKnownTrait::Drop => LangItem::Drop,
+        WellKnownTrait::Fn => LangItem::Fn,
+        WellKnownTrait::FnMut => LangItem::FnMut,
+        WellKnownTrait::FnOnce => LangItem::FnOnce,
+        WellKnownTrait::Generator => LangItem::Generator,
+        WellKnownTrait::Sized => LangItem::Sized,
+        WellKnownTrait::Tuple => LangItem::Tuple,
+        WellKnownTrait::Unpin => LangItem::Unpin,
+        WellKnownTrait::Unsize => LangItem::Unsize,
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
index 996b42f5bd8..45c975dfcdc 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs
@@ -1,13 +1,13 @@
 //! Various extensions traits for Chalk types.
 
-use chalk_ir::{FloatTy, IntTy, Mutability, Scalar, UintTy};
+use chalk_ir::{FloatTy, IntTy, Mutability, Scalar, TyVariableKind, UintTy};
 use hir_def::{
     builtin_type::{BuiltinFloat, BuiltinInt, BuiltinType, BuiltinUint},
     generics::TypeOrConstParamData,
+    lang_item::LangItem,
     type_ref::Rawness,
     FunctionId, GenericDefId, HasModule, ItemContainerId, Lookup, TraitId,
 };
-use syntax::SmolStr;
 
 use crate::{
     db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id,
@@ -18,6 +18,8 @@ use crate::{
 
 pub trait TyExt {
     fn is_unit(&self) -> bool;
+    fn is_integral(&self) -> bool;
+    fn is_floating_point(&self) -> bool;
     fn is_never(&self) -> bool;
     fn is_unknown(&self) -> bool;
     fn is_ty_var(&self) -> bool;
@@ -51,6 +53,21 @@ impl TyExt for Ty {
         matches!(self.kind(Interner), TyKind::Tuple(0, _))
     }
 
+    fn is_integral(&self) -> bool {
+        matches!(
+            self.kind(Interner),
+            TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_))
+                | TyKind::InferenceVar(_, TyVariableKind::Integer)
+        )
+    }
+
+    fn is_floating_point(&self) -> bool {
+        matches!(
+            self.kind(Interner),
+            TyKind::Scalar(Scalar::Float(_)) | TyKind::InferenceVar(_, TyVariableKind::Float)
+        )
+    }
+
     fn is_never(&self) -> bool {
         matches!(self.kind(Interner), TyKind::Never)
     }
@@ -197,9 +214,8 @@ impl TyExt for Ty {
                 match db.lookup_intern_impl_trait_id((*opaque_ty_id).into()) {
                     ImplTraitId::AsyncBlockTypeImplTrait(def, _expr) => {
                         let krate = def.module(db.upcast()).krate();
-                        if let Some(future_trait) = db
-                            .lang_item(krate, SmolStr::new_inline("future_trait"))
-                            .and_then(|item| item.as_trait())
+                        if let Some(future_trait) =
+                            db.lang_item(krate, LangItem::Future).and_then(|item| item.as_trait())
                         {
                             // This is only used by type walking.
                             // Parameters will be walked outside, and projection predicate is not used.
@@ -218,9 +234,8 @@ impl TyExt for Ty {
                     }
                     ImplTraitId::ReturnTypeImplTrait(func, idx) => {
                         db.return_type_impl_traits(func).map(|it| {
-                            let data = (*it)
-                                .as_ref()
-                                .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
+                            let data =
+                                (*it).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone());
                             data.substitute(Interner, &subst).into_value_and_skipped_binders().0
                         })
                     }
@@ -231,9 +246,8 @@ impl TyExt for Ty {
                 {
                     ImplTraitId::ReturnTypeImplTrait(func, idx) => {
                         db.return_type_impl_traits(func).map(|it| {
-                            let data = (*it)
-                                .as_ref()
-                                .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
+                            let data =
+                                (*it).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone());
                             data.substitute(Interner, &opaque_ty.substitution)
                         })
                     }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
index 54b244620fb..d45e2a943ad 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
@@ -65,7 +65,7 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
     fn layout_of_adt(&self, def: AdtId, subst: Substitution) -> Result<Layout, LayoutError>;
 
     #[salsa::invoke(crate::layout::target_data_layout_query)]
-    fn target_data_layout(&self, krate: CrateId) -> Arc<TargetDataLayout>;
+    fn target_data_layout(&self, krate: CrateId) -> Option<Arc<TargetDataLayout>>;
 
     #[salsa::invoke(crate::lower::callable_item_sig)]
     fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig;
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs
index 88d607194f7..2c136896209 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs
@@ -162,6 +162,7 @@ mod tests {
         check(to_lower_snake_case, "a", expect![[""]]);
         check(to_lower_snake_case, "abc", expect![[""]]);
         check(to_lower_snake_case, "foo__bar", expect![["foo_bar"]]);
+        check(to_lower_snake_case, "Δ", expect!["δ"]);
     }
 
     #[test]
@@ -195,5 +196,6 @@ mod tests {
         check(to_upper_snake_case, "X86_64", expect![[""]]);
         check(to_upper_snake_case, "FOO_BAr", expect![["FOO_BAR"]]);
         check(to_upper_snake_case, "FOO__BAR", expect![["FOO_BAR"]]);
+        check(to_upper_snake_case, "ß", expect!["SS"]);
     }
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs
index c8df4c796ef..3286dcb5afd 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs
@@ -5,7 +5,9 @@
 use std::fmt;
 use std::sync::Arc;
 
-use hir_def::{path::path, resolver::HasResolver, AdtId, AssocItemId, DefWithBodyId, HasModule};
+use hir_def::lang_item::LangItem;
+use hir_def::{resolver::HasResolver, AdtId, AssocItemId, DefWithBodyId, HasModule};
+use hir_def::{ItemContainerId, Lookup};
 use hir_expand::name;
 use itertools::Either;
 use itertools::Itertools;
@@ -245,26 +247,25 @@ struct FilterMapNextChecker {
 impl FilterMapNextChecker {
     fn new(resolver: &hir_def::resolver::Resolver, db: &dyn HirDatabase) -> Self {
         // Find and store the FunctionIds for Iterator::filter_map and Iterator::next
-        let iterator_path = path![core::iter::Iterator];
-        let mut filter_map_function_id = None;
-        let mut next_function_id = None;
-
-        if let Some(iterator_trait_id) = resolver.resolve_known_trait(db.upcast(), &iterator_path) {
-            let iterator_trait_items = &db.trait_data(iterator_trait_id).items;
-            for item in iterator_trait_items.iter() {
-                if let (name, AssocItemId::FunctionId(id)) = item {
-                    if *name == name![filter_map] {
-                        filter_map_function_id = Some(*id);
+        let (next_function_id, filter_map_function_id) = match db
+            .lang_item(resolver.krate(), LangItem::IteratorNext)
+            .and_then(|it| it.as_function())
+        {
+            Some(next_function_id) => (
+                Some(next_function_id),
+                match next_function_id.lookup(db.upcast()).container {
+                    ItemContainerId::TraitId(iterator_trait_id) => {
+                        let iterator_trait_items = &db.trait_data(iterator_trait_id).items;
+                        iterator_trait_items.iter().find_map(|(name, it)| match it {
+                            &AssocItemId::FunctionId(id) if *name == name![filter_map] => Some(id),
+                            _ => None,
+                        })
                     }
-                    if *name == name![next] {
-                        next_function_id = Some(*id);
-                    }
-                }
-                if filter_map_function_id.is_some() && next_function_id.is_some() {
-                    break;
-                }
-            }
-        }
+                    _ => None,
+                },
+            ),
+            None => (None, None),
+        };
         Self { filter_map_function_id, next_function_id, prev_filter_map_expr_id: None }
     }
 
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
index 66e813eed8b..5fcbdf34f3c 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs
@@ -11,17 +11,17 @@ use hir_def::{
     db::DefDatabase,
     find_path,
     generics::{TypeOrConstParamData, TypeParamProvenance},
-    intern::{Internable, Interned},
     item_scope::ItemInNs,
+    lang_item::{LangItem, LangItemTarget},
     path::{Path, PathKind},
     type_ref::{ConstScalar, TraitBoundModifier, TypeBound, TypeRef},
     visibility::Visibility,
     HasModule, ItemContainerId, Lookup, ModuleDefId, ModuleId, TraitId,
 };
 use hir_expand::{hygiene::Hygiene, name::Name};
+use intern::{Internable, Interned};
 use itertools::Itertools;
 use smallvec::SmallVec;
-use syntax::SmolStr;
 
 use crate::{
     db::HirDatabase,
@@ -325,7 +325,7 @@ impl HirDisplay for ProjectionTy {
 
         let trait_ref = self.trait_ref(f.db);
         write!(f, "<")?;
-        fmt_trait_ref(&trait_ref, f, true)?;
+        fmt_trait_ref(f, &trait_ref, true)?;
         write!(f, ">::{}", f.db.type_alias_data(from_assoc_type_id(self.associated_ty_id)).name)?;
         let proj_params_count =
             self.substitution.len(Interner) - trait_ref.substitution.len(Interner);
@@ -383,7 +383,10 @@ impl HirDisplay for BoundVar {
 }
 
 impl HirDisplay for Ty {
-    fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
+    fn hir_fmt(
+        &self,
+        f @ &mut HirFormatter { db, .. }: &mut HirFormatter<'_>,
+    ) -> Result<(), HirDisplayError> {
         if f.should_truncate() {
             return write!(f, "{TYPE_HINT_TRUNCATION}");
         }
@@ -434,7 +437,7 @@ impl HirDisplay for Ty {
                     bounds.iter().any(|bound| {
                         if let WhereClause::Implemented(trait_ref) = bound.skip_binders() {
                             let trait_ = trait_ref.hir_trait_id();
-                            fn_traits(f.db.upcast(), trait_).any(|it| it == trait_)
+                            fn_traits(db.upcast(), trait_).any(|it| it == trait_)
                         } else {
                             false
                         }
@@ -450,22 +453,20 @@ impl HirDisplay for Ty {
                         substitution: parameters,
                     }))
                     | TyKind::OpaqueType(opaque_ty_id, parameters) => {
-                        let impl_trait_id =
-                            f.db.lookup_intern_impl_trait_id((*opaque_ty_id).into());
+                        let impl_trait_id = db.lookup_intern_impl_trait_id((*opaque_ty_id).into());
                         if let ImplTraitId::ReturnTypeImplTrait(func, idx) = impl_trait_id {
-                            let datas =
-                                f.db.return_type_impl_traits(func)
-                                    .expect("impl trait id without data");
-                            let data = (*datas)
-                                .as_ref()
-                                .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
+                            let datas = db
+                                .return_type_impl_traits(func)
+                                .expect("impl trait id without data");
+                            let data =
+                                (*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone());
                             let bounds = data.substitute(Interner, parameters);
                             let mut len = bounds.skip_binders().len();
 
                             // Don't count Sized but count when it absent
                             // (i.e. when explicit ?Sized bound is set).
                             let default_sized = SizedByDefault::Sized {
-                                anchor: func.lookup(f.db.upcast()).module(f.db.upcast()).krate(),
+                                anchor: func.lookup(db.upcast()).module(db.upcast()).krate(),
                             };
                             let sized_bounds = bounds
                                 .skip_binders()
@@ -476,7 +477,7 @@ impl HirDisplay for Ty {
                                         WhereClause::Implemented(trait_ref)
                                             if default_sized.is_sized_trait(
                                                 trait_ref.hir_trait_id(),
-                                                f.db.upcast(),
+                                                db.upcast(),
                                             ),
                                     )
                                 })
@@ -524,19 +525,19 @@ impl HirDisplay for Ty {
                 sig.hir_fmt(f)?;
             }
             TyKind::FnDef(def, parameters) => {
-                let def = from_chalk(f.db, *def);
-                let sig = f.db.callable_item_signature(def).substitute(Interner, parameters);
+                let def = from_chalk(db, *def);
+                let sig = db.callable_item_signature(def).substitute(Interner, parameters);
+                f.start_location_link(def.into());
                 match def {
-                    CallableDefId::FunctionId(ff) => {
-                        write!(f, "fn {}", f.db.function_data(ff).name)?
-                    }
-                    CallableDefId::StructId(s) => write!(f, "{}", f.db.struct_data(s).name)?,
+                    CallableDefId::FunctionId(ff) => write!(f, "fn {}", db.function_data(ff).name)?,
+                    CallableDefId::StructId(s) => write!(f, "{}", db.struct_data(s).name)?,
                     CallableDefId::EnumVariantId(e) => {
-                        write!(f, "{}", f.db.enum_data(e.parent).variants[e.local_id].name)?
+                        write!(f, "{}", db.enum_data(e.parent).variants[e.local_id].name)?
                     }
                 };
+                f.end_location_link();
                 if parameters.len(Interner) > 0 {
-                    let generics = generics(f.db.upcast(), def.into());
+                    let generics = generics(db.upcast(), def.into());
                     let (parent_params, self_param, type_params, const_params, _impl_trait_params) =
                         generics.provenance_split();
                     let total_len = parent_params + self_param + type_params + const_params;
@@ -568,15 +569,15 @@ impl HirDisplay for Ty {
                 match f.display_target {
                     DisplayTarget::Diagnostics | DisplayTarget::Test => {
                         let name = match *def_id {
-                            hir_def::AdtId::StructId(it) => f.db.struct_data(it).name.clone(),
-                            hir_def::AdtId::UnionId(it) => f.db.union_data(it).name.clone(),
-                            hir_def::AdtId::EnumId(it) => f.db.enum_data(it).name.clone(),
+                            hir_def::AdtId::StructId(it) => db.struct_data(it).name.clone(),
+                            hir_def::AdtId::UnionId(it) => db.union_data(it).name.clone(),
+                            hir_def::AdtId::EnumId(it) => db.enum_data(it).name.clone(),
                         };
                         write!(f, "{name}")?;
                     }
                     DisplayTarget::SourceCode { module_id } => {
                         if let Some(path) = find_path::find_path(
-                            f.db.upcast(),
+                            db.upcast(),
                             ItemInNs::Types((*def_id).into()),
                             module_id,
                             false,
@@ -596,8 +597,8 @@ impl HirDisplay for Ty {
                         || f.omit_verbose_types()
                     {
                         match self
-                            .as_generic_def(f.db)
-                            .map(|generic_def_id| f.db.generic_defaults(generic_def_id))
+                            .as_generic_def(db)
+                            .map(|generic_def_id| db.generic_defaults(generic_def_id))
                             .filter(|defaults| !defaults.is_empty())
                         {
                             None => parameters.as_slice(Interner),
@@ -669,16 +670,23 @@ impl HirDisplay for Ty {
             }
             TyKind::AssociatedType(assoc_type_id, parameters) => {
                 let type_alias = from_assoc_type_id(*assoc_type_id);
-                let trait_ = match type_alias.lookup(f.db.upcast()).container {
+                let trait_ = match type_alias.lookup(db.upcast()).container {
                     ItemContainerId::TraitId(it) => it,
                     _ => panic!("not an associated type"),
                 };
-                let trait_ = f.db.trait_data(trait_);
-                let type_alias_data = f.db.type_alias_data(type_alias);
+                let trait_data = db.trait_data(trait_);
+                let type_alias_data = db.type_alias_data(type_alias);
 
                 // Use placeholder associated types when the target is test (https://rust-lang.github.io/chalk/book/clauses/type_equality.html#placeholder-associated-types)
                 if f.display_target.is_test() {
-                    write!(f, "{}::{}", trait_.name, type_alias_data.name)?;
+                    f.start_location_link(trait_.into());
+                    write!(f, "{}", trait_data.name)?;
+                    f.end_location_link();
+                    write!(f, "::")?;
+
+                    f.start_location_link(type_alias.into());
+                    write!(f, "{}", type_alias_data.name)?;
+                    f.end_location_link();
                     // Note that the generic args for the associated type come before those for the
                     // trait (including the self type).
                     // FIXME: reconsider the generic args order upon formatting?
@@ -697,30 +705,54 @@ impl HirDisplay for Ty {
                 }
             }
             TyKind::Foreign(type_alias) => {
-                let type_alias = f.db.type_alias_data(from_foreign_def_id(*type_alias));
+                let alias = from_foreign_def_id(*type_alias);
+                let type_alias = db.type_alias_data(alias);
+                f.start_location_link(alias.into());
                 write!(f, "{}", type_alias.name)?;
+                f.end_location_link();
             }
             TyKind::OpaqueType(opaque_ty_id, parameters) => {
-                let impl_trait_id = f.db.lookup_intern_impl_trait_id((*opaque_ty_id).into());
+                let impl_trait_id = db.lookup_intern_impl_trait_id((*opaque_ty_id).into());
                 match impl_trait_id {
                     ImplTraitId::ReturnTypeImplTrait(func, idx) => {
                         let datas =
-                            f.db.return_type_impl_traits(func).expect("impl trait id without data");
-                        let data = (*datas)
-                            .as_ref()
-                            .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
+                            db.return_type_impl_traits(func).expect("impl trait id without data");
+                        let data =
+                            (*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone());
                         let bounds = data.substitute(Interner, &parameters);
-                        let krate = func.lookup(f.db.upcast()).module(f.db.upcast()).krate();
+                        let krate = func.lookup(db.upcast()).module(db.upcast()).krate();
                         write_bounds_like_dyn_trait_with_prefix(
+                            f,
                             "impl",
                             bounds.skip_binders(),
                             SizedByDefault::Sized { anchor: krate },
-                            f,
                         )?;
                         // FIXME: it would maybe be good to distinguish this from the alias type (when debug printing), and to show the substitution
                     }
-                    ImplTraitId::AsyncBlockTypeImplTrait(..) => {
-                        write!(f, "impl Future<Output = ")?;
+                    ImplTraitId::AsyncBlockTypeImplTrait(body, ..) => {
+                        let future_trait = db
+                            .lang_item(body.module(db.upcast()).krate(), LangItem::Future)
+                            .and_then(LangItemTarget::as_trait);
+                        let output = future_trait.and_then(|t| {
+                            db.trait_data(t).associated_type_by_name(&hir_expand::name!(Output))
+                        });
+                        write!(f, "impl ")?;
+                        if let Some(t) = future_trait {
+                            f.start_location_link(t.into());
+                        }
+                        write!(f, "Future")?;
+                        if let Some(_) = future_trait {
+                            f.end_location_link();
+                        }
+                        write!(f, "<")?;
+                        if let Some(t) = output {
+                            f.start_location_link(t.into());
+                        }
+                        write!(f, "Output")?;
+                        if let Some(_) = output {
+                            f.end_location_link();
+                        }
+                        write!(f, " = ")?;
                         parameters.at(Interner, 0).hir_fmt(f)?;
                         write!(f, ">")?;
                     }
@@ -732,7 +764,7 @@ impl HirDisplay for Ty {
                         DisplaySourceCodeError::Closure,
                     ));
                 }
-                let sig = substs.at(Interner, 0).assert_ty_ref(Interner).callable_sig(f.db);
+                let sig = substs.at(Interner, 0).assert_ty_ref(Interner).callable_sig(db);
                 if let Some(sig) = sig {
                     if sig.params().is_empty() {
                         write!(f, "||")?;
@@ -751,8 +783,8 @@ impl HirDisplay for Ty {
                 }
             }
             TyKind::Placeholder(idx) => {
-                let id = from_placeholder_idx(f.db, *idx);
-                let generics = generics(f.db.upcast(), id.parent);
+                let id = from_placeholder_idx(db, *idx);
+                let generics = generics(db.upcast(), id.parent);
                 let param_data = &generics.params.type_or_consts[id.local_id];
                 match param_data {
                     TypeOrConstParamData::TypeParamData(p) => match p.provenance {
@@ -760,28 +792,28 @@ impl HirDisplay for Ty {
                             write!(f, "{}", p.name.clone().unwrap_or_else(Name::missing))?
                         }
                         TypeParamProvenance::ArgumentImplTrait => {
-                            let substs = generics.placeholder_subst(f.db);
-                            let bounds =
-                                f.db.generic_predicates(id.parent)
-                                    .iter()
-                                    .map(|pred| pred.clone().substitute(Interner, &substs))
-                                    .filter(|wc| match &wc.skip_binders() {
-                                        WhereClause::Implemented(tr) => {
-                                            &tr.self_type_parameter(Interner) == self
-                                        }
-                                        WhereClause::AliasEq(AliasEq {
-                                            alias: AliasTy::Projection(proj),
-                                            ty: _,
-                                        }) => &proj.self_type_parameter(f.db) == self,
-                                        _ => false,
-                                    })
-                                    .collect::<Vec<_>>();
-                            let krate = id.parent.module(f.db.upcast()).krate();
+                            let substs = generics.placeholder_subst(db);
+                            let bounds = db
+                                .generic_predicates(id.parent)
+                                .iter()
+                                .map(|pred| pred.clone().substitute(Interner, &substs))
+                                .filter(|wc| match &wc.skip_binders() {
+                                    WhereClause::Implemented(tr) => {
+                                        &tr.self_type_parameter(Interner) == self
+                                    }
+                                    WhereClause::AliasEq(AliasEq {
+                                        alias: AliasTy::Projection(proj),
+                                        ty: _,
+                                    }) => &proj.self_type_parameter(db) == self,
+                                    _ => false,
+                                })
+                                .collect::<Vec<_>>();
+                            let krate = id.parent.module(db.upcast()).krate();
                             write_bounds_like_dyn_trait_with_prefix(
+                                f,
                                 "impl",
                                 &bounds,
                                 SizedByDefault::Sized { anchor: krate },
-                                f,
                             )?;
                         }
                     },
@@ -803,29 +835,28 @@ impl HirDisplay for Ty {
                 bounds.extend(auto_traits);
 
                 write_bounds_like_dyn_trait_with_prefix(
+                    f,
                     "dyn",
                     &bounds,
                     SizedByDefault::NotSized,
-                    f,
                 )?;
             }
             TyKind::Alias(AliasTy::Projection(p_ty)) => p_ty.hir_fmt(f)?,
             TyKind::Alias(AliasTy::Opaque(opaque_ty)) => {
-                let impl_trait_id = f.db.lookup_intern_impl_trait_id(opaque_ty.opaque_ty_id.into());
+                let impl_trait_id = db.lookup_intern_impl_trait_id(opaque_ty.opaque_ty_id.into());
                 match impl_trait_id {
                     ImplTraitId::ReturnTypeImplTrait(func, idx) => {
                         let datas =
-                            f.db.return_type_impl_traits(func).expect("impl trait id without data");
-                        let data = (*datas)
-                            .as_ref()
-                            .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone());
+                            db.return_type_impl_traits(func).expect("impl trait id without data");
+                        let data =
+                            (*datas).as_ref().map(|rpit| rpit.impl_traits[idx].bounds.clone());
                         let bounds = data.substitute(Interner, &opaque_ty.substitution);
-                        let krate = func.lookup(f.db.upcast()).module(f.db.upcast()).krate();
+                        let krate = func.lookup(db.upcast()).module(db.upcast()).krate();
                         write_bounds_like_dyn_trait_with_prefix(
+                            f,
                             "impl",
                             bounds.skip_binders(),
                             SizedByDefault::Sized { anchor: krate },
-                            f,
                         )?;
                     }
                     ImplTraitId::AsyncBlockTypeImplTrait(..) => {
@@ -848,7 +879,6 @@ impl HirDisplay for Ty {
                         DisplaySourceCodeError::Generator,
                     ));
                 }
-
                 let subst = subst.as_slice(Interner);
                 let a: Option<SmallVec<[&Ty; 3]>> = subst
                     .get(subst.len() - 3..)
@@ -897,7 +927,7 @@ impl HirDisplay for CallableSig {
     }
 }
 
-fn fn_traits(db: &dyn DefDatabase, trait_: TraitId) -> impl Iterator<Item = TraitId> {
+fn fn_traits(db: &dyn DefDatabase, trait_: TraitId) -> impl Iterator<Item = TraitId> + '_ {
     let krate = trait_.lookup(db).container.krate();
     utils::fn_traits(db, krate)
 }
@@ -914,7 +944,7 @@ impl SizedByDefault {
             Self::NotSized => false,
             Self::Sized { anchor } => {
                 let sized_trait = db
-                    .lang_item(anchor, SmolStr::new_inline("sized"))
+                    .lang_item(anchor, LangItem::Sized)
                     .and_then(|lang_item| lang_item.as_trait());
                 Some(trait_) == sized_trait
             }
@@ -923,26 +953,26 @@ impl SizedByDefault {
 }
 
 pub fn write_bounds_like_dyn_trait_with_prefix(
+    f: &mut HirFormatter<'_>,
     prefix: &str,
     predicates: &[QuantifiedWhereClause],
     default_sized: SizedByDefault,
-    f: &mut HirFormatter<'_>,
 ) -> Result<(), HirDisplayError> {
     write!(f, "{prefix}")?;
     if !predicates.is_empty()
         || predicates.is_empty() && matches!(default_sized, SizedByDefault::Sized { .. })
     {
         write!(f, " ")?;
-        write_bounds_like_dyn_trait(predicates, default_sized, f)
+        write_bounds_like_dyn_trait(f, predicates, default_sized)
     } else {
         Ok(())
     }
 }
 
 fn write_bounds_like_dyn_trait(
+    f: &mut HirFormatter<'_>,
     predicates: &[QuantifiedWhereClause],
     default_sized: SizedByDefault,
-    f: &mut HirFormatter<'_>,
 ) -> Result<(), HirDisplayError> {
     // Note: This code is written to produce nice results (i.e.
     // corresponding to surface Rust) for types that can occur in
@@ -978,7 +1008,9 @@ fn write_bounds_like_dyn_trait(
                 // We assume that the self type is ^0.0 (i.e. the
                 // existential) here, which is the only thing that's
                 // possible in actual Rust, and hence don't print it
+                f.start_location_link(trait_.into());
                 write!(f, "{}", f.db.trait_data(trait_).name)?;
+                f.end_location_link();
                 if let [_, params @ ..] = &*trait_ref.substitution.as_slice(Interner) {
                     if is_fn_trait {
                         if let Some(args) =
@@ -1015,7 +1047,9 @@ fn write_bounds_like_dyn_trait(
                 if let AliasTy::Projection(proj) = alias {
                     let assoc_ty_id = from_assoc_type_id(proj.associated_ty_id);
                     let type_alias = f.db.type_alias_data(assoc_ty_id);
+                    f.start_location_link(assoc_ty_id.into());
                     write!(f, "{}", type_alias.name)?;
+                    f.end_location_link();
 
                     let proj_arg_count = generics(f.db.upcast(), assoc_ty_id.into()).len_self();
                     if proj_arg_count > 0 {
@@ -1040,19 +1074,33 @@ fn write_bounds_like_dyn_trait(
     if angle_open {
         write!(f, ">")?;
     }
-    if matches!(default_sized, SizedByDefault::Sized { .. }) {
+    if let SizedByDefault::Sized { anchor } = default_sized {
+        let sized_trait =
+            f.db.lang_item(anchor, LangItem::Sized).and_then(|lang_item| lang_item.as_trait());
         if !is_sized {
-            write!(f, "{}?Sized", if first { "" } else { " + " })?;
+            if !first {
+                write!(f, " + ")?;
+            }
+            if let Some(sized_trait) = sized_trait {
+                f.start_location_link(sized_trait.into());
+            }
+            write!(f, "?Sized")?;
         } else if first {
+            if let Some(sized_trait) = sized_trait {
+                f.start_location_link(sized_trait.into());
+            }
             write!(f, "Sized")?;
         }
+        if let Some(_) = sized_trait {
+            f.end_location_link();
+        }
     }
     Ok(())
 }
 
 fn fmt_trait_ref(
-    tr: &TraitRef,
     f: &mut HirFormatter<'_>,
+    tr: &TraitRef,
     use_as: bool,
 ) -> Result<(), HirDisplayError> {
     if f.should_truncate() {
@@ -1065,7 +1113,10 @@ fn fmt_trait_ref(
     } else {
         write!(f, ": ")?;
     }
-    write!(f, "{}", f.db.trait_data(tr.hir_trait_id()).name)?;
+    let trait_ = tr.hir_trait_id();
+    f.start_location_link(trait_.into());
+    write!(f, "{}", f.db.trait_data(trait_).name)?;
+    f.end_location_link();
     if tr.substitution.len(Interner) > 1 {
         write!(f, "<")?;
         f.write_joined(&tr.substitution.as_slice(Interner)[1..], ", ")?;
@@ -1076,7 +1127,7 @@ fn fmt_trait_ref(
 
 impl HirDisplay for TraitRef {
     fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
-        fmt_trait_ref(self, f, false)
+        fmt_trait_ref(f, self, false)
     }
 }
 
@@ -1090,12 +1141,13 @@ impl HirDisplay for WhereClause {
             WhereClause::Implemented(trait_ref) => trait_ref.hir_fmt(f)?,
             WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection_ty), ty }) => {
                 write!(f, "<")?;
-                fmt_trait_ref(&projection_ty.trait_ref(f.db), f, true)?;
-                write!(
-                    f,
-                    ">::{} = ",
-                    f.db.type_alias_data(from_assoc_type_id(projection_ty.associated_ty_id)).name,
-                )?;
+                fmt_trait_ref(f, &projection_ty.trait_ref(f.db), true)?;
+                write!(f, ">::",)?;
+                let type_alias = from_assoc_type_id(projection_ty.associated_ty_id);
+                f.start_location_link(type_alias.into());
+                write!(f, "{}", f.db.type_alias_data(type_alias).name,)?;
+                f.end_location_link();
+                write!(f, " = ")?;
                 ty.hir_fmt(f)?;
             }
             WhereClause::AliasEq(_) => write!(f, "{{error}}")?,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
index 6b59f1c20da..767afdf9eb4 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs
@@ -22,15 +22,15 @@ use hir_def::{
     builtin_type::{BuiltinInt, BuiltinType, BuiltinUint},
     data::{ConstData, StaticData},
     expr::{BindingAnnotation, ExprId, ExprOrPatId, PatId},
-    lang_item::LangItemTarget,
+    lang_item::{LangItem, LangItemTarget},
     layout::Integer,
-    path::{path, Path},
+    path::Path,
     resolver::{HasResolver, ResolveValueResult, Resolver, TypeNs, ValueNs},
     type_ref::TypeRef,
     AdtId, AssocItemId, DefWithBodyId, EnumVariantId, FieldId, FunctionId, HasModule,
     ItemContainerId, Lookup, TraitId, TypeAliasId, VariantId,
 };
-use hir_expand::name::{name, Name};
+use hir_expand::name::name;
 use itertools::Either;
 use la_arena::ArenaMap;
 use rustc_hash::FxHashMap;
@@ -39,7 +39,7 @@ use stdx::always;
 use crate::{
     db::HirDatabase, fold_tys, fold_tys_and_consts, infer::coerce::CoerceMany,
     lower::ImplTraitLoweringMode, to_assoc_type_id, AliasEq, AliasTy, Const, DomainGoal,
-    GenericArg, Goal, ImplTraitId, InEnvironment, Interner, ProjectionTy, Substitution,
+    GenericArg, Goal, ImplTraitId, InEnvironment, Interner, ProjectionTy, RpitId, Substitution,
     TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind,
 };
 
@@ -219,6 +219,7 @@ struct InternedStandardTypes {
     unknown: Ty,
     bool_: Ty,
     unit: Ty,
+    never: Ty,
 }
 
 impl Default for InternedStandardTypes {
@@ -227,6 +228,7 @@ impl Default for InternedStandardTypes {
             unknown: TyKind::Error.intern(Interner),
             bool_: TyKind::Scalar(Scalar::Bool).intern(Interner),
             unit: TyKind::Tuple(0, Substitution::empty(Interner)).intern(Interner),
+            never: TyKind::Never.intern(Interner),
         }
     }
 }
@@ -352,6 +354,7 @@ pub struct InferenceResult {
     /// **Note**: When a pattern type is resolved it may still contain
     /// unresolved or missing subpatterns or subpatterns of mismatched types.
     pub type_of_pat: ArenaMap<PatId, Ty>,
+    pub type_of_rpit: ArenaMap<RpitId, Ty>,
     type_mismatches: FxHashMap<ExprOrPatId, TypeMismatch>,
     /// Interned common types to return references to.
     standard_types: InternedStandardTypes,
@@ -525,6 +528,9 @@ impl<'a> InferenceContext<'a> {
         for ty in result.type_of_pat.values_mut() {
             *ty = table.resolve_completely(ty.clone());
         }
+        for ty in result.type_of_rpit.iter_mut().map(|x| x.1) {
+            *ty = table.resolve_completely(ty.clone());
+        }
         for mismatch in result.type_mismatches.values_mut() {
             mismatch.expected = table.resolve_completely(mismatch.expected.clone());
             mismatch.actual = table.resolve_completely(mismatch.actual.clone());
@@ -603,7 +609,7 @@ impl<'a> InferenceContext<'a> {
                         _ => unreachable!(),
                     };
                     let bounds = (*rpits).map_ref(|rpits| {
-                        rpits.impl_traits[idx as usize].bounds.map_ref(|it| it.into_iter())
+                        rpits.impl_traits[idx].bounds.map_ref(|it| it.into_iter())
                     });
                     let var = self.table.new_type_var();
                     let var_subst = Substitution::from1(Interner, var.clone());
@@ -616,6 +622,7 @@ impl<'a> InferenceContext<'a> {
                         always!(binders.is_empty(Interner)); // quantified where clauses not yet handled
                         self.push_obligation(var_predicate.cast(Interner));
                     }
+                    self.result.type_of_rpit.insert(idx, var.clone());
                     var
                 },
                 DebruijnIndex::INNERMOST,
@@ -917,104 +924,98 @@ impl<'a> InferenceContext<'a> {
         }
     }
 
-    fn resolve_lang_item(&self, name: Name) -> Option<LangItemTarget> {
+    fn resolve_lang_item(&self, item: LangItem) -> Option<LangItemTarget> {
         let krate = self.resolver.krate();
-        self.db.lang_item(krate, name.to_smol_str())
+        self.db.lang_item(krate, item)
     }
 
     fn resolve_into_iter_item(&self) -> Option<TypeAliasId> {
-        let path = path![core::iter::IntoIterator];
-        let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?;
+        let ItemContainerId::TraitId(trait_) = self.resolve_lang_item(LangItem::IntoIterIntoIter)?
+            .as_function()?
+            .lookup(self.db.upcast()).container
+        else { return None };
         self.db.trait_data(trait_).associated_type_by_name(&name![IntoIter])
     }
 
     fn resolve_iterator_item(&self) -> Option<TypeAliasId> {
-        let path = path![core::iter::Iterator];
-        let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?;
+        let ItemContainerId::TraitId(trait_) = self.resolve_lang_item(LangItem::IteratorNext)?
+            .as_function()?
+            .lookup(self.db.upcast()).container
+        else { return None };
         self.db.trait_data(trait_).associated_type_by_name(&name![Item])
     }
 
-    fn resolve_ops_try_ok(&self) -> Option<TypeAliasId> {
-        // FIXME resolve via lang_item once try v2 is stable
-        let path = path![core::ops::Try];
-        let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?;
-        let trait_data = self.db.trait_data(trait_);
-        trait_data
-            // FIXME remove once try v2 is stable
-            .associated_type_by_name(&name![Ok])
-            .or_else(|| trait_data.associated_type_by_name(&name![Output]))
+    fn resolve_output_on(&self, trait_: TraitId) -> Option<TypeAliasId> {
+        self.db.trait_data(trait_).associated_type_by_name(&name![Output])
+    }
+
+    fn resolve_lang_trait(&self, lang: LangItem) -> Option<TraitId> {
+        self.resolve_lang_item(lang)?.as_trait()
+    }
+
+    fn resolve_ops_try_output(&self) -> Option<TypeAliasId> {
+        self.resolve_output_on(self.resolve_lang_trait(LangItem::Try)?)
     }
 
     fn resolve_ops_neg_output(&self) -> Option<TypeAliasId> {
-        let trait_ = self.resolve_lang_item(name![neg])?.as_trait()?;
-        self.db.trait_data(trait_).associated_type_by_name(&name![Output])
+        self.resolve_output_on(self.resolve_lang_trait(LangItem::Neg)?)
     }
 
     fn resolve_ops_not_output(&self) -> Option<TypeAliasId> {
-        let trait_ = self.resolve_lang_item(name![not])?.as_trait()?;
-        self.db.trait_data(trait_).associated_type_by_name(&name![Output])
+        self.resolve_output_on(self.resolve_lang_trait(LangItem::Not)?)
     }
 
     fn resolve_future_future_output(&self) -> Option<TypeAliasId> {
-        let trait_ = self
-            .resolver
-            .resolve_known_trait(self.db.upcast(), &path![core::future::IntoFuture])
-            .or_else(|| self.resolve_lang_item(name![future_trait])?.as_trait())?;
-        self.db.trait_data(trait_).associated_type_by_name(&name![Output])
+        let ItemContainerId::TraitId(trait_) = self
+            .resolve_lang_item(LangItem::IntoFutureIntoFuture)?
+            .as_function()?
+            .lookup(self.db.upcast())
+            .container
+        else { return None };
+        self.resolve_output_on(trait_)
     }
 
     fn resolve_boxed_box(&self) -> Option<AdtId> {
-        let struct_ = self.resolve_lang_item(name![owned_box])?.as_struct()?;
+        let struct_ = self.resolve_lang_item(LangItem::OwnedBox)?.as_struct()?;
         Some(struct_.into())
     }
 
     fn resolve_range_full(&self) -> Option<AdtId> {
-        let path = path![core::ops::RangeFull];
-        let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+        let struct_ = self.resolve_lang_item(LangItem::RangeFull)?.as_struct()?;
         Some(struct_.into())
     }
 
     fn resolve_range(&self) -> Option<AdtId> {
-        let path = path![core::ops::Range];
-        let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+        let struct_ = self.resolve_lang_item(LangItem::Range)?.as_struct()?;
         Some(struct_.into())
     }
 
     fn resolve_range_inclusive(&self) -> Option<AdtId> {
-        let path = path![core::ops::RangeInclusive];
-        let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+        let struct_ = self.resolve_lang_item(LangItem::RangeInclusiveStruct)?.as_struct()?;
         Some(struct_.into())
     }
 
     fn resolve_range_from(&self) -> Option<AdtId> {
-        let path = path![core::ops::RangeFrom];
-        let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+        let struct_ = self.resolve_lang_item(LangItem::RangeFrom)?.as_struct()?;
         Some(struct_.into())
     }
 
     fn resolve_range_to(&self) -> Option<AdtId> {
-        let path = path![core::ops::RangeTo];
-        let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+        let struct_ = self.resolve_lang_item(LangItem::RangeTo)?.as_struct()?;
         Some(struct_.into())
     }
 
     fn resolve_range_to_inclusive(&self) -> Option<AdtId> {
-        let path = path![core::ops::RangeToInclusive];
-        let struct_ = self.resolver.resolve_known_struct(self.db.upcast(), &path)?;
+        let struct_ = self.resolve_lang_item(LangItem::RangeToInclusive)?.as_struct()?;
         Some(struct_.into())
     }
 
-    fn resolve_ops_index(&self) -> Option<TraitId> {
-        self.resolve_lang_item(name![index])?.as_trait()
-    }
-
     fn resolve_ops_index_output(&self) -> Option<TypeAliasId> {
-        let trait_ = self.resolve_ops_index()?;
-        self.db.trait_data(trait_).associated_type_by_name(&name![Output])
+        self.resolve_output_on(self.resolve_lang_trait(LangItem::Index)?)
     }
 
     fn resolve_va_list(&self) -> Option<AdtId> {
-        let struct_ = self.resolve_lang_item(name![va_list])?.as_struct()?;
+        let struct_ = self.resolve_lang_item(LangItem::VaList)?.as_struct()?;
         Some(struct_.into())
     }
 }
@@ -1025,7 +1026,8 @@ impl<'a> InferenceContext<'a> {
 pub(crate) enum Expectation {
     None,
     HasType(Ty),
-    // Castable(Ty), // rustc has this, we currently just don't propagate an expectation for casts
+    #[allow(dead_code)]
+    Castable(Ty),
     RValueLikeUnsized(Ty),
 }
 
@@ -1041,10 +1043,6 @@ impl Expectation {
         }
     }
 
-    fn from_option(ty: Option<Ty>) -> Self {
-        ty.map_or(Expectation::None, Expectation::HasType)
-    }
-
     /// The following explanation is copied straight from rustc:
     /// Provides an expectation for an rvalue expression given an *optional*
     /// hint, which is not required for type safety (the resulting type might
@@ -1082,6 +1080,7 @@ impl Expectation {
         match self {
             Expectation::None => Expectation::None,
             Expectation::HasType(t) => Expectation::HasType(table.resolve_ty_shallow(t)),
+            Expectation::Castable(t) => Expectation::Castable(table.resolve_ty_shallow(t)),
             Expectation::RValueLikeUnsized(t) => {
                 Expectation::RValueLikeUnsized(table.resolve_ty_shallow(t))
             }
@@ -1091,20 +1090,25 @@ impl Expectation {
     fn to_option(&self, table: &mut unify::InferenceTable<'_>) -> Option<Ty> {
         match self.resolve(table) {
             Expectation::None => None,
-            Expectation::HasType(t) |
-            // Expectation::Castable(t) |
-            Expectation::RValueLikeUnsized(t) => Some(t),
+            Expectation::HasType(t)
+            | Expectation::Castable(t)
+            | Expectation::RValueLikeUnsized(t) => Some(t),
         }
     }
 
     fn only_has_type(&self, table: &mut unify::InferenceTable<'_>) -> Option<Ty> {
         match self {
             Expectation::HasType(t) => Some(table.resolve_ty_shallow(t)),
-            // Expectation::Castable(_) |
-            Expectation::RValueLikeUnsized(_) | Expectation::None => None,
+            Expectation::Castable(_) | Expectation::RValueLikeUnsized(_) | Expectation::None => {
+                None
+            }
         }
     }
 
+    fn coercion_target_type(&self, table: &mut unify::InferenceTable<'_>) -> Ty {
+        self.only_has_type(table).unwrap_or_else(|| table.new_type_var())
+    }
+
     /// Comment copied from rustc:
     /// Disregard "castable to" expectations because they
     /// can lead us astray. Consider for example `if cond
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
index 094e460dbf7..a6449d019ff 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs
@@ -51,7 +51,7 @@ impl InferenceContext<'_> {
                 .map(to_chalk_trait_id)
                 .collect();
 
-        let self_ty = TyKind::Error.intern(Interner);
+        let self_ty = self.result.standard_types.unknown.clone();
         let bounds = dyn_ty.bounds.clone().substitute(Interner, &[self_ty.cast(Interner)]);
         for bound in bounds.iter(Interner) {
             // NOTE(skip_binders): the extracted types are rebound by the returned `FnPointer`
@@ -67,7 +67,7 @@ impl InferenceContext<'_> {
                 let arg = projection.substitution.as_slice(Interner).get(1)?;
                 if let Some(subst) = arg.ty(Interner)?.as_tuple() {
                     let generic_args = subst.as_slice(Interner);
-                    let mut sig_tys = Vec::new();
+                    let mut sig_tys = Vec::with_capacity(generic_args.len() + 1);
                     for arg in generic_args {
                         sig_tys.push(arg.ty(Interner)?.clone());
                     }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
index 8df25c83c6e..3293534a068 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs
@@ -8,9 +8,11 @@
 use std::{iter, sync::Arc};
 
 use chalk_ir::{cast::Cast, BoundVar, Goal, Mutability, TyVariableKind};
-use hir_def::{expr::ExprId, lang_item::LangItemTarget};
+use hir_def::{
+    expr::ExprId,
+    lang_item::{LangItem, LangItemTarget},
+};
 use stdx::always;
-use syntax::SmolStr;
 
 use crate::{
     autoderef::{Autoderef, AutoderefKind},
@@ -570,11 +572,10 @@ impl<'a> InferenceTable<'a> {
             reborrow.as_ref().map_or_else(|| from_ty.clone(), |(_, adj)| adj.target.clone());
 
         let krate = self.trait_env.krate;
-        let coerce_unsized_trait =
-            match self.db.lang_item(krate, SmolStr::new_inline("coerce_unsized")) {
-                Some(LangItemTarget::TraitId(trait_)) => trait_,
-                _ => return Err(TypeError),
-            };
+        let coerce_unsized_trait = match self.db.lang_item(krate, LangItem::CoerceUnsized) {
+            Some(LangItemTarget::Trait(trait_)) => trait_,
+            _ => return Err(TypeError),
+        };
 
         let coerce_unsized_tref = {
             let b = TyBuilder::trait_ref(self.db, coerce_unsized_trait);
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
index 8f9cdac3784..175fded8cca 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs
@@ -10,15 +10,15 @@ use chalk_ir::{
 };
 use hir_def::{
     expr::{
-        ArithOp, Array, BinaryOp, ClosureKind, CmpOp, Expr, ExprId, LabelId, Literal, Statement,
-        UnaryOp,
+        ArithOp, Array, BinaryOp, ClosureKind, Expr, ExprId, LabelId, Literal, Statement, UnaryOp,
     },
     generics::TypeOrConstParamData,
+    lang_item::LangItem,
     path::{GenericArg, GenericArgs},
     resolver::resolver_for_expr,
     ConstParamId, FieldId, ItemContainerId, Lookup,
 };
-use hir_expand::name::Name;
+use hir_expand::name::{name, Name};
 use stdx::always;
 use syntax::ast::RangeOp;
 
@@ -30,7 +30,7 @@ use crate::{
         const_or_path_to_chalk, generic_arg_to_chalk, lower_to_chalk_mutability, ParamLoweringMode,
     },
     mapping::{from_chalk, ToChalk},
-    method_resolution::{self, lang_names_for_bin_op, VisibleFromModule},
+    method_resolution::{self, lang_items_for_bin_op, VisibleFromModule},
     primitive::{self, UintTy},
     static_lifetime, to_chalk_trait_id,
     utils::{generics, Generics},
@@ -87,16 +87,15 @@ impl<'a> InferenceContext<'a> {
                 let expected = &expected.adjust_for_branches(&mut self.table);
                 self.infer_expr(
                     condition,
-                    &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)),
+                    &Expectation::HasType(self.result.standard_types.bool_.clone()),
                 );
 
                 let condition_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
                 let mut both_arms_diverge = Diverges::Always;
 
-                let result_ty = self.table.new_type_var();
                 let then_ty = self.infer_expr_inner(then_branch, expected);
                 both_arms_diverge &= mem::replace(&mut self.diverges, Diverges::Maybe);
-                let mut coerce = CoerceMany::new(result_ty);
+                let mut coerce = CoerceMany::new(expected.coercion_target_type(&mut self.table));
                 coerce.coerce(self, Some(then_branch), &then_ty);
                 let else_ty = match else_branch {
                     Some(else_branch) => self.infer_expr_inner(else_branch, expected),
@@ -113,7 +112,7 @@ impl<'a> InferenceContext<'a> {
             &Expr::Let { pat, expr } => {
                 let input_ty = self.infer_expr(expr, &Expectation::none());
                 self.infer_pat(pat, &input_ty, BindingMode::default());
-                TyKind::Scalar(Scalar::Bool).intern(Interner)
+                self.result.standard_types.bool_.clone()
             }
             Expr::Block { statements, tail, label, id: _ } => {
                 let old_resolver = mem::replace(
@@ -158,7 +157,8 @@ impl<'a> InferenceContext<'a> {
                 }
 
                 // The ok-ish type that is expected from the last expression
-                let ok_ty = self.resolve_associated_type(try_ty.clone(), self.resolve_ops_try_ok());
+                let ok_ty =
+                    self.resolve_associated_type(try_ty.clone(), self.resolve_ops_try_output());
 
                 self.with_breakable_ctx(BreakableKind::Block, ok_ty.clone(), None, |this| {
                     this.infer_expr(*body, &Expectation::has_type(ok_ty));
@@ -187,10 +187,12 @@ impl<'a> InferenceContext<'a> {
                     .intern(Interner)
             }
             &Expr::Loop { body, label } => {
+                // FIXME: should be:
+                // let ty = expected.coercion_target_type(&mut self.table);
                 let ty = self.table.new_type_var();
                 let (breaks, ()) =
                     self.with_breakable_ctx(BreakableKind::Loop, ty, label, |this| {
-                        this.infer_expr(body, &Expectation::has_type(TyBuilder::unit()));
+                        this.infer_expr(body, &Expectation::HasType(TyBuilder::unit()));
                     });
 
                 match breaks {
@@ -198,16 +200,16 @@ impl<'a> InferenceContext<'a> {
                         self.diverges = Diverges::Maybe;
                         breaks
                     }
-                    None => TyKind::Never.intern(Interner),
+                    None => self.result.standard_types.never.clone(),
                 }
             }
             &Expr::While { condition, body, label } => {
                 self.with_breakable_ctx(BreakableKind::Loop, self.err_ty(), label, |this| {
                     this.infer_expr(
                         condition,
-                        &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)),
+                        &Expectation::HasType(this.result.standard_types.bool_.clone()),
                     );
-                    this.infer_expr(body, &Expectation::has_type(TyBuilder::unit()));
+                    this.infer_expr(body, &Expectation::HasType(TyBuilder::unit()));
                 });
 
                 // the body may not run, so it diverging doesn't mean we diverge
@@ -223,7 +225,7 @@ impl<'a> InferenceContext<'a> {
 
                 self.infer_pat(pat, &pat_ty, BindingMode::default());
                 self.with_breakable_ctx(BreakableKind::Loop, self.err_ty(), label, |this| {
-                    this.infer_expr(body, &Expectation::has_type(TyBuilder::unit()));
+                    this.infer_expr(body, &Expectation::HasType(TyBuilder::unit()));
                 });
 
                 // the body may not run, so it diverging doesn't mean we diverge
@@ -233,7 +235,7 @@ impl<'a> InferenceContext<'a> {
             Expr::Closure { body, args, ret_type, arg_types, closure_kind } => {
                 assert_eq!(args.len(), arg_types.len());
 
-                let mut sig_tys = Vec::new();
+                let mut sig_tys = Vec::with_capacity(arg_types.len() + 1);
 
                 // collect explicitly written argument types
                 for arg_type in arg_types.iter() {
@@ -254,7 +256,8 @@ impl<'a> InferenceContext<'a> {
                     num_binders: 0,
                     sig: FnSig { abi: (), safety: chalk_ir::Safety::Safe, variadic: false },
                     substitution: FnSubst(
-                        Substitution::from_iter(Interner, sig_tys.clone()).shifted_in(Interner),
+                        Substitution::from_iter(Interner, sig_tys.iter().cloned())
+                            .shifted_in(Interner),
                     ),
                 })
                 .intern(Interner);
@@ -316,27 +319,34 @@ impl<'a> InferenceContext<'a> {
             Expr::Call { callee, args, .. } => {
                 let callee_ty = self.infer_expr(*callee, &Expectation::none());
                 let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone());
-                let mut res = None;
-                let mut derefed_callee = callee_ty.clone();
-                // manual loop to be able to access `derefs.table`
-                while let Some((callee_deref_ty, _)) = derefs.next() {
-                    res = derefs.table.callable_sig(&callee_deref_ty, args.len());
-                    if res.is_some() {
-                        derefed_callee = callee_deref_ty;
-                        break;
+                let (res, derefed_callee) = 'b: {
+                    // manual loop to be able to access `derefs.table`
+                    while let Some((callee_deref_ty, _)) = derefs.next() {
+                        let res = derefs.table.callable_sig(&callee_deref_ty, args.len());
+                        if res.is_some() {
+                            break 'b (res, callee_deref_ty);
+                        }
                     }
-                }
+                    (None, callee_ty.clone())
+                };
                 // if the function is unresolved, we use is_varargs=true to
                 // suppress the arg count diagnostic here
                 let is_varargs =
                     derefed_callee.callable_sig(self.db).map_or(false, |sig| sig.is_varargs)
                         || res.is_none();
                 let (param_tys, ret_ty) = match res {
-                    Some(res) => {
+                    Some((func, params, ret_ty)) => {
                         let adjustments = auto_deref_adjust_steps(&derefs);
                         // FIXME: Handle call adjustments for Fn/FnMut
                         self.write_expr_adj(*callee, adjustments);
-                        res
+                        if let Some((trait_, func)) = func {
+                            let subst = TyBuilder::subst_for_def(self.db, trait_, None)
+                                .push(callee_ty.clone())
+                                .push(TyBuilder::tuple_with(params.iter().cloned()))
+                                .build();
+                            self.write_method_resolution(tgt_expr, func, subst.clone());
+                        }
+                        (params, ret_ty)
                     }
                     None => (Vec::new(), self.err_ty()), // FIXME diagnostic
                 };
@@ -374,12 +384,9 @@ impl<'a> InferenceContext<'a> {
                 let expected = expected.adjust_for_branches(&mut self.table);
 
                 let result_ty = if arms.is_empty() {
-                    TyKind::Never.intern(Interner)
+                    self.result.standard_types.never.clone()
                 } else {
-                    match &expected {
-                        Expectation::HasType(ty) => ty.clone(),
-                        _ => self.table.new_type_var(),
-                    }
+                    expected.coercion_target_type(&mut self.table)
                 };
                 let mut coerce = CoerceMany::new(result_ty);
 
@@ -392,7 +399,7 @@ impl<'a> InferenceContext<'a> {
                     if let Some(guard_expr) = arm.guard {
                         self.infer_expr(
                             guard_expr,
-                            &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)),
+                            &Expectation::HasType(self.result.standard_types.bool_.clone()),
                         );
                     }
 
@@ -417,7 +424,7 @@ impl<'a> InferenceContext<'a> {
                         is_break: false,
                     });
                 };
-                TyKind::Never.intern(Interner)
+                self.result.standard_types.never.clone()
             }
             Expr::Break { expr, label } => {
                 let val_ty = if let Some(expr) = *expr {
@@ -431,7 +438,7 @@ impl<'a> InferenceContext<'a> {
                         // avoiding the borrowck
                         let mut coerce = mem::replace(
                             &mut ctxt.coerce,
-                            CoerceMany::new(self.result.standard_types.unknown.clone()),
+                            CoerceMany::new(expected.coercion_target_type(&mut self.table)),
                         );
 
                         // FIXME: create a synthetic `()` during lowering so we have something to refer to here?
@@ -449,7 +456,7 @@ impl<'a> InferenceContext<'a> {
                         });
                     }
                 }
-                TyKind::Never.intern(Interner)
+                self.result.standard_types.never.clone()
             }
             Expr::Return { expr } => {
                 if let Some(expr) = expr {
@@ -458,7 +465,7 @@ impl<'a> InferenceContext<'a> {
                     let unit = TyBuilder::unit();
                     let _ = self.coerce(Some(tgt_expr), &unit, &self.return_ty.clone());
                 }
-                TyKind::Never.intern(Interner)
+                self.result.standard_types.never.clone()
             }
             Expr::Yield { expr } => {
                 if let Some((resume_ty, yield_ty)) = self.resume_yield_tys.clone() {
@@ -471,14 +478,14 @@ impl<'a> InferenceContext<'a> {
                     resume_ty
                 } else {
                     // FIXME: report error (yield expr in non-generator)
-                    TyKind::Error.intern(Interner)
+                    self.result.standard_types.unknown.clone()
                 }
             }
             Expr::Yeet { expr } => {
                 if let &Some(expr) = expr {
                     self.infer_expr_inner(expr, &Expectation::None);
                 }
-                TyKind::Never.intern(Interner)
+                self.result.standard_types.never.clone()
             }
             Expr::RecordLit { path, fields, spread, .. } => {
                 let (ty, def_id) = self.resolve_variant(path.as_deref(), false);
@@ -588,12 +595,23 @@ impl<'a> InferenceContext<'a> {
             }
             Expr::Try { expr } => {
                 let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
-                self.resolve_associated_type(inner_ty, self.resolve_ops_try_ok())
+                if let Some(trait_) = self.resolve_lang_trait(LangItem::Try) {
+                    if let Some(func) = self.db.trait_data(trait_).method_by_name(&name!(branch)) {
+                        let subst = TyBuilder::subst_for_def(self.db, trait_, None)
+                            .push(inner_ty.clone())
+                            .build();
+                        self.write_method_resolution(tgt_expr, func, subst.clone());
+                    }
+                    let try_output = self.resolve_output_on(trait_);
+                    self.resolve_associated_type(inner_ty, try_output)
+                } else {
+                    self.err_ty()
+                }
             }
             Expr::Cast { expr, type_ref } => {
-                // FIXME: propagate the "castable to" expectation (and find a test case that shows this is necessary)
-                let _inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
                 let cast_ty = self.make_ty(type_ref);
+                // FIXME: propagate the "castable to" expectation
+                let _inner_ty = self.infer_expr_inner(*expr, &Expectation::None);
                 // FIXME check the cast...
                 cast_ty
             }
@@ -627,6 +645,7 @@ impl<'a> InferenceContext<'a> {
             Expr::UnaryOp { expr, op } => {
                 let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
                 let inner_ty = self.resolve_ty_shallow(&inner_ty);
+                // FIXME: Note down method resolution her
                 match op {
                     UnaryOp::Deref => {
                         autoderef::deref(&mut self.table, inner_ty).unwrap_or_else(|| self.err_ty())
@@ -736,7 +755,7 @@ impl<'a> InferenceContext<'a> {
                 let base_ty = self.infer_expr_inner(*base, &Expectation::none());
                 let index_ty = self.infer_expr(*index, &Expectation::none());
 
-                if let Some(index_trait) = self.resolve_ops_index() {
+                if let Some(index_trait) = self.resolve_lang_trait(LangItem::Index) {
                     let canonicalized = self.canonicalize(base_ty.clone());
                     let receiver_adjustments = method_resolution::resolve_indexing_op(
                         self.db,
@@ -749,6 +768,15 @@ impl<'a> InferenceContext<'a> {
                             adj.apply(&mut self.table, base_ty)
                         });
                     self.write_expr_adj(*base, adj);
+                    if let Some(func) =
+                        self.db.trait_data(index_trait).method_by_name(&name!(index))
+                    {
+                        let substs = TyBuilder::subst_for_def(self.db, index_trait, None)
+                            .push(self_ty.clone())
+                            .push(index_ty.clone())
+                            .build();
+                        self.write_method_resolution(tgt_expr, func, substs.clone());
+                    }
                     self.resolve_associated_type_with_params(
                         self_ty,
                         self.resolve_ops_index_output(),
@@ -800,7 +828,7 @@ impl<'a> InferenceContext<'a> {
                         self.infer_expr_coerce(initializer, &Expectation::has_type(elem_ty));
                         self.infer_expr(
                             repeat,
-                            &Expectation::has_type(
+                            &Expectation::HasType(
                                 TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(Interner),
                             ),
                         );
@@ -823,7 +851,7 @@ impl<'a> InferenceContext<'a> {
                 TyKind::Array(coerce.complete(), len).intern(Interner)
             }
             Expr::Literal(lit) => match lit {
-                Literal::Bool(..) => TyKind::Scalar(Scalar::Bool).intern(Interner),
+                Literal::Bool(..) => self.result.standard_types.bool_.clone(),
                 Literal::String(..) => {
                     TyKind::Ref(Mutability::Not, static_lifetime(), TyKind::Str.intern(Interner))
                         .intern(Interner)
@@ -1009,7 +1037,7 @@ impl<'a> InferenceContext<'a> {
         let lhs_ty = self.infer_expr(lhs, &lhs_expectation);
         let rhs_ty = self.table.new_type_var();
 
-        let trait_func = lang_names_for_bin_op(op).and_then(|(name, lang_item)| {
+        let trait_func = lang_items_for_bin_op(op).and_then(|(name, lang_item)| {
             let trait_id = self.resolve_lang_item(lang_item)?.as_trait()?;
             let func = self.db.trait_data(trait_id).method_by_name(&name)?;
             Some((trait_id, func))
@@ -1017,11 +1045,21 @@ impl<'a> InferenceContext<'a> {
         let (trait_, func) = match trait_func {
             Some(it) => it,
             None => {
-                let rhs_ty = self.builtin_binary_op_rhs_expectation(op, lhs_ty.clone());
-                let rhs_ty = self.infer_expr_coerce(rhs, &Expectation::from_option(rhs_ty));
-                return self
-                    .builtin_binary_op_return_ty(op, lhs_ty, rhs_ty)
-                    .unwrap_or_else(|| self.err_ty());
+                // HACK: `rhs_ty` is a general inference variable with no clue at all at this
+                // point. Passing `lhs_ty` as both operands just to check if `lhs_ty` is a builtin
+                // type applicable to `op`.
+                let ret_ty = if self.is_builtin_binop(&lhs_ty, &lhs_ty, op) {
+                    // Assume both operands are builtin so we can continue inference. No guarantee
+                    // on the correctness, rustc would complain as necessary lang items don't seem
+                    // to exist anyway.
+                    self.enforce_builtin_binop_types(&lhs_ty, &rhs_ty, op)
+                } else {
+                    self.err_ty()
+                };
+
+                self.infer_expr_coerce(rhs, &Expectation::has_type(rhs_ty));
+
+                return ret_ty;
             }
         };
 
@@ -1071,11 +1109,9 @@ impl<'a> InferenceContext<'a> {
 
         let ret_ty = self.normalize_associated_types_in(ret_ty);
 
-        // use knowledge of built-in binary ops, which can sometimes help inference
-        if let Some(builtin_rhs) = self.builtin_binary_op_rhs_expectation(op, lhs_ty.clone()) {
-            self.unify(&builtin_rhs, &rhs_ty);
-        }
-        if let Some(builtin_ret) = self.builtin_binary_op_return_ty(op, lhs_ty, rhs_ty) {
+        if self.is_builtin_binop(&lhs_ty, &rhs_ty, op) {
+            // use knowledge of built-in binary ops, which can sometimes help inference
+            let builtin_ret = self.enforce_builtin_binop_types(&lhs_ty, &rhs_ty, op);
             self.unify(&builtin_ret, &ret_ty);
         }
 
@@ -1111,7 +1147,7 @@ impl<'a> InferenceContext<'a> {
                     if let Some(expr) = else_branch {
                         self.infer_expr_coerce(
                             *expr,
-                            &Expectation::has_type(Ty::new(Interner, TyKind::Never)),
+                            &Expectation::HasType(self.result.standard_types.never.clone()),
                         );
                     }
 
@@ -1136,18 +1172,16 @@ impl<'a> InferenceContext<'a> {
             if self.diverges.is_always() {
                 // we don't even make an attempt at coercion
                 self.table.new_maybe_never_var()
-            } else {
-                if let Some(t) = expected.only_has_type(&mut self.table) {
-                    if self.coerce(Some(expr), &TyBuilder::unit(), &t).is_err() {
-                        self.result.type_mismatches.insert(
-                            expr.into(),
-                            TypeMismatch { expected: t.clone(), actual: TyBuilder::unit() },
-                        );
-                    }
-                    t
-                } else {
-                    TyBuilder::unit()
+            } else if let Some(t) = expected.only_has_type(&mut self.table) {
+                if self.coerce(Some(expr), &TyBuilder::unit(), &t).is_err() {
+                    self.result.type_mismatches.insert(
+                        expr.into(),
+                        TypeMismatch { expected: t.clone(), actual: TyBuilder::unit() },
+                    );
                 }
+                t
+            } else {
+                TyBuilder::unit()
             }
         }
     }
@@ -1271,7 +1305,7 @@ impl<'a> InferenceContext<'a> {
         // that are not closures, then we type-check the closures. This is so
         // that we have more information about the types of arguments when we
         // type-check the functions. This isn't really the right way to do this.
-        for &check_closures in &[false, true] {
+        for check_closures in [false, true] {
             let mut skip_indices = skip_indices.into_iter().copied().fuse().peekable();
             let param_iter = param_tys.iter().cloned().chain(repeat(self.err_ty()));
             let expected_iter = expected_inputs
@@ -1314,13 +1348,13 @@ impl<'a> InferenceContext<'a> {
                 } else {
                     param_ty
                 };
-                if !coercion_target.is_unknown() {
-                    if self.coerce(Some(arg), &ty, &coercion_target).is_err() {
-                        self.result.type_mismatches.insert(
-                            arg.into(),
-                            TypeMismatch { expected: coercion_target, actual: ty.clone() },
-                        );
-                    }
+                if !coercion_target.is_unknown()
+                    && self.coerce(Some(arg), &ty, &coercion_target).is_err()
+                {
+                    self.result.type_mismatches.insert(
+                        arg.into(),
+                        TypeMismatch { expected: coercion_target, actual: ty.clone() },
+                    );
                 }
             }
         }
@@ -1479,92 +1513,124 @@ impl<'a> InferenceContext<'a> {
         indices
     }
 
-    fn builtin_binary_op_return_ty(&mut self, op: BinaryOp, lhs_ty: Ty, rhs_ty: Ty) -> Option<Ty> {
-        let lhs_ty = self.resolve_ty_shallow(&lhs_ty);
-        let rhs_ty = self.resolve_ty_shallow(&rhs_ty);
-        match op {
-            BinaryOp::LogicOp(_) | BinaryOp::CmpOp(_) => {
-                Some(TyKind::Scalar(Scalar::Bool).intern(Interner))
+    /// Dereferences a single level of immutable referencing.
+    fn deref_ty_if_possible(&mut self, ty: &Ty) -> Ty {
+        let ty = self.resolve_ty_shallow(ty);
+        match ty.kind(Interner) {
+            TyKind::Ref(Mutability::Not, _, inner) => self.resolve_ty_shallow(inner),
+            _ => ty,
+        }
+    }
+
+    /// Enforces expectations on lhs type and rhs type depending on the operator and returns the
+    /// output type of the binary op.
+    fn enforce_builtin_binop_types(&mut self, lhs: &Ty, rhs: &Ty, op: BinaryOp) -> Ty {
+        // Special-case a single layer of referencing, so that things like `5.0 + &6.0f32` work (See rust-lang/rust#57447).
+        let lhs = self.deref_ty_if_possible(lhs);
+        let rhs = self.deref_ty_if_possible(rhs);
+
+        let (op, is_assign) = match op {
+            BinaryOp::Assignment { op: Some(inner) } => (BinaryOp::ArithOp(inner), true),
+            _ => (op, false),
+        };
+
+        let output_ty = match op {
+            BinaryOp::LogicOp(_) => {
+                let bool_ = self.result.standard_types.bool_.clone();
+                self.unify(&lhs, &bool_);
+                self.unify(&rhs, &bool_);
+                bool_
             }
-            BinaryOp::Assignment { .. } => Some(TyBuilder::unit()),
+
             BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) => {
-                // all integer combinations are valid here
-                if matches!(
-                    lhs_ty.kind(Interner),
-                    TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_))
-                        | TyKind::InferenceVar(_, TyVariableKind::Integer)
-                ) && matches!(
-                    rhs_ty.kind(Interner),
-                    TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_))
-                        | TyKind::InferenceVar(_, TyVariableKind::Integer)
-                ) {
-                    Some(lhs_ty)
-                } else {
-                    None
-                }
+                // result type is same as LHS always
+                lhs
             }
-            BinaryOp::ArithOp(_) => match (lhs_ty.kind(Interner), rhs_ty.kind(Interner)) {
-                // (int, int) | (uint, uint) | (float, float)
-                (TyKind::Scalar(Scalar::Int(_)), TyKind::Scalar(Scalar::Int(_)))
-                | (TyKind::Scalar(Scalar::Uint(_)), TyKind::Scalar(Scalar::Uint(_)))
-                | (TyKind::Scalar(Scalar::Float(_)), TyKind::Scalar(Scalar::Float(_))) => {
-                    Some(rhs_ty)
-                }
-                // ({int}, int) | ({int}, uint)
-                (
-                    TyKind::InferenceVar(_, TyVariableKind::Integer),
-                    TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_)),
-                ) => Some(rhs_ty),
-                // (int, {int}) | (uint, {int})
-                (
-                    TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_)),
-                    TyKind::InferenceVar(_, TyVariableKind::Integer),
-                ) => Some(lhs_ty),
-                // ({float} | float)
-                (
-                    TyKind::InferenceVar(_, TyVariableKind::Float),
-                    TyKind::Scalar(Scalar::Float(_)),
-                ) => Some(rhs_ty),
-                // (float, {float})
-                (
-                    TyKind::Scalar(Scalar::Float(_)),
-                    TyKind::InferenceVar(_, TyVariableKind::Float),
-                ) => Some(lhs_ty),
-                // ({int}, {int}) | ({float}, {float})
-                (
-                    TyKind::InferenceVar(_, TyVariableKind::Integer),
-                    TyKind::InferenceVar(_, TyVariableKind::Integer),
-                )
-                | (
-                    TyKind::InferenceVar(_, TyVariableKind::Float),
-                    TyKind::InferenceVar(_, TyVariableKind::Float),
-                ) => Some(rhs_ty),
-                _ => None,
-            },
+
+            BinaryOp::ArithOp(_) => {
+                // LHS, RHS, and result will have the same type
+                self.unify(&lhs, &rhs);
+                lhs
+            }
+
+            BinaryOp::CmpOp(_) => {
+                // LHS and RHS will have the same type
+                self.unify(&lhs, &rhs);
+                self.result.standard_types.bool_.clone()
+            }
+
+            BinaryOp::Assignment { op: None } => {
+                stdx::never!("Simple assignment operator is not binary op.");
+                lhs
+            }
+
+            BinaryOp::Assignment { .. } => unreachable!("handled above"),
+        };
+
+        if is_assign {
+            self.result.standard_types.unit.clone()
+        } else {
+            output_ty
         }
     }
 
-    fn builtin_binary_op_rhs_expectation(&mut self, op: BinaryOp, lhs_ty: Ty) -> Option<Ty> {
-        Some(match op {
-            BinaryOp::LogicOp(..) => TyKind::Scalar(Scalar::Bool).intern(Interner),
-            BinaryOp::Assignment { op: None } => lhs_ty,
-            BinaryOp::CmpOp(CmpOp::Eq { .. }) => match self
-                .resolve_ty_shallow(&lhs_ty)
-                .kind(Interner)
-            {
-                TyKind::Scalar(_) | TyKind::Str => lhs_ty,
-                TyKind::InferenceVar(_, TyVariableKind::Integer | TyVariableKind::Float) => lhs_ty,
-                _ => return None,
-            },
-            BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) => return None,
-            BinaryOp::CmpOp(CmpOp::Ord { .. })
-            | BinaryOp::Assignment { op: Some(_) }
-            | BinaryOp::ArithOp(_) => match self.resolve_ty_shallow(&lhs_ty).kind(Interner) {
-                TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_) | Scalar::Float(_)) => lhs_ty,
-                TyKind::InferenceVar(_, TyVariableKind::Integer | TyVariableKind::Float) => lhs_ty,
-                _ => return None,
-            },
-        })
+    fn is_builtin_binop(&mut self, lhs: &Ty, rhs: &Ty, op: BinaryOp) -> bool {
+        // Special-case a single layer of referencing, so that things like `5.0 + &6.0f32` work (See rust-lang/rust#57447).
+        let lhs = self.deref_ty_if_possible(lhs);
+        let rhs = self.deref_ty_if_possible(rhs);
+
+        let op = match op {
+            BinaryOp::Assignment { op: Some(inner) } => BinaryOp::ArithOp(inner),
+            _ => op,
+        };
+
+        match op {
+            BinaryOp::LogicOp(_) => true,
+
+            BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) => {
+                lhs.is_integral() && rhs.is_integral()
+            }
+
+            BinaryOp::ArithOp(
+                ArithOp::Add | ArithOp::Sub | ArithOp::Mul | ArithOp::Div | ArithOp::Rem,
+            ) => {
+                lhs.is_integral() && rhs.is_integral()
+                    || lhs.is_floating_point() && rhs.is_floating_point()
+            }
+
+            BinaryOp::ArithOp(ArithOp::BitAnd | ArithOp::BitOr | ArithOp::BitXor) => {
+                lhs.is_integral() && rhs.is_integral()
+                    || lhs.is_floating_point() && rhs.is_floating_point()
+                    || matches!(
+                        (lhs.kind(Interner), rhs.kind(Interner)),
+                        (TyKind::Scalar(Scalar::Bool), TyKind::Scalar(Scalar::Bool))
+                    )
+            }
+
+            BinaryOp::CmpOp(_) => {
+                let is_scalar = |kind| {
+                    matches!(
+                        kind,
+                        &TyKind::Scalar(_)
+                            | TyKind::FnDef(..)
+                            | TyKind::Function(_)
+                            | TyKind::Raw(..)
+                            | TyKind::InferenceVar(
+                                _,
+                                TyVariableKind::Integer | TyVariableKind::Float
+                            )
+                    )
+                };
+                is_scalar(lhs.kind(Interner)) && is_scalar(rhs.kind(Interner))
+            }
+
+            BinaryOp::Assignment { op: None } => {
+                stdx::never!("Simple assignment operator is not binary op.");
+                false
+            }
+
+            BinaryOp::Assignment { .. } => unreachable!("handled above"),
+        }
     }
 
     fn with_breakable_ctx<T>(
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
index 8bd17c0f39f..0a8527afbd0 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs
@@ -112,7 +112,7 @@ impl<'a> InferenceContext<'a> {
         let ty = TyBuilder::value_ty(self.db, typable, parent_substs)
             .fill(|x| {
                 it.next().unwrap_or_else(|| match x {
-                    ParamKind::Type => TyKind::Error.intern(Interner).cast(Interner),
+                    ParamKind::Type => self.result.standard_types.unknown.clone().cast(Interner),
                     ParamKind::Const(ty) => consteval::unknown_const_as_generic(ty.clone()),
                 })
             })
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
index e7ddd1591fe..46ed3533c8c 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs
@@ -8,6 +8,7 @@ use chalk_ir::{
 };
 use chalk_solve::infer::ParameterEnaVariableExt;
 use ena::unify::UnifyKey;
+use hir_def::{FunctionId, TraitId};
 use hir_expand::name;
 use stdx::never;
 
@@ -626,18 +627,26 @@ impl<'a> InferenceTable<'a> {
         }
     }
 
-    pub(crate) fn callable_sig(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec<Ty>, Ty)> {
+    pub(crate) fn callable_sig(
+        &mut self,
+        ty: &Ty,
+        num_args: usize,
+    ) -> Option<(Option<(TraitId, FunctionId)>, Vec<Ty>, Ty)> {
         match ty.callable_sig(self.db) {
-            Some(sig) => Some((sig.params().to_vec(), sig.ret().clone())),
+            Some(sig) => Some((None, sig.params().to_vec(), sig.ret().clone())),
             None => self.callable_sig_from_fn_trait(ty, num_args),
         }
     }
 
-    fn callable_sig_from_fn_trait(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec<Ty>, Ty)> {
+    fn callable_sig_from_fn_trait(
+        &mut self,
+        ty: &Ty,
+        num_args: usize,
+    ) -> Option<(Option<(TraitId, FunctionId)>, Vec<Ty>, Ty)> {
         let krate = self.trait_env.krate;
         let fn_once_trait = FnTrait::FnOnce.get_id(self.db, krate)?;
-        let output_assoc_type =
-            self.db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?;
+        let trait_data = self.db.trait_data(fn_once_trait);
+        let output_assoc_type = trait_data.associated_type_by_name(&name![Output])?;
 
         let mut arg_tys = vec![];
         let arg_ty = TyBuilder::tuple(num_args)
@@ -675,7 +684,11 @@ impl<'a> InferenceTable<'a> {
         if self.db.trait_solve(krate, canonical.value.cast(Interner)).is_some() {
             self.register_obligation(obligation.goal);
             let return_ty = self.normalize_projection_ty(projection);
-            Some((arg_tys, return_ty))
+            Some((
+                Some(fn_once_trait).zip(trait_data.method_by_name(&name!(call_once))),
+                arg_tys,
+                return_ty,
+            ))
         } else {
             None
         }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs b/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs
index 441503a300e..7bf73560cbe 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/interner.rs
@@ -4,11 +4,8 @@
 use crate::{chalk_db, tls, GenericArg};
 use base_db::salsa::InternId;
 use chalk_ir::{Goal, GoalData};
-use hir_def::{
-    intern::{impl_internable, InternStorage, Internable, Interned},
-    type_ref::ConstScalar,
-    TypeAliasId,
-};
+use hir_def::{type_ref::ConstScalar, TypeAliasId};
+use intern::{impl_internable, Interned};
 use smallvec::SmallVec;
 use std::{fmt, sync::Arc};
 
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lang_items.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lang_items.rs
index afc54e729f9..5308c72161b 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lang_items.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lang_items.rs
@@ -1,20 +1,19 @@
 //! Functions to detect special lang items
 
-use hir_def::{AdtId, HasModule};
-use hir_expand::name;
+use hir_def::{lang_item::LangItem, AdtId, HasModule};
 
 use crate::db::HirDatabase;
 
 pub fn is_box(adt: AdtId, db: &dyn HirDatabase) -> bool {
-    let owned_box = name![owned_box].to_smol_str();
     let krate = adt.module(db.upcast()).krate();
-    let box_adt = db.lang_item(krate, owned_box).and_then(|it| it.as_struct()).map(AdtId::from);
+    let box_adt =
+        db.lang_item(krate, LangItem::OwnedBox).and_then(|it| it.as_struct()).map(AdtId::from);
     Some(adt) == box_adt
 }
 
 pub fn is_unsafe_cell(adt: AdtId, db: &dyn HirDatabase) -> bool {
-    let owned_box = name![unsafe_cell].to_smol_str();
     let krate = adt.module(db.upcast()).krate();
-    let box_adt = db.lang_item(krate, owned_box).and_then(|it| it.as_struct()).map(AdtId::from);
+    let box_adt =
+        db.lang_item(krate, LangItem::UnsafeCell).and_then(|it| it.as_struct()).map(AdtId::from);
     Some(adt) == box_adt
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
index 7a1cca3143e..c82c274524a 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs
@@ -1,7 +1,5 @@
 //! Compute the binary representation of a type
 
-use std::sync::Arc;
-
 use base_db::CrateId;
 use chalk_ir::{AdtId, TyKind};
 use hir_def::{
@@ -31,19 +29,19 @@ mod adt;
 mod target;
 
 struct LayoutCx<'a> {
-    db: &'a dyn HirDatabase,
     krate: CrateId,
+    target: &'a TargetDataLayout,
 }
 
-impl LayoutCalculator for LayoutCx<'_> {
-    type TargetDataLayoutRef = Arc<TargetDataLayout>;
+impl<'a> LayoutCalculator for LayoutCx<'a> {
+    type TargetDataLayoutRef = &'a TargetDataLayout;
 
     fn delay_bug(&self, txt: &str) {
         never!("{}", txt);
     }
 
-    fn current_data_layout(&self) -> Arc<TargetDataLayout> {
-        self.db.target_data_layout(self.krate)
+    fn current_data_layout(&self) -> &'a TargetDataLayout {
+        self.target
     }
 }
 
@@ -56,7 +54,8 @@ fn scalar(dl: &TargetDataLayout, value: Primitive) -> Layout {
 }
 
 pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result<Layout, LayoutError> {
-    let cx = LayoutCx { db, krate };
+    let Some(target) = db.target_data_layout(krate) else { return Err(LayoutError::TargetLayoutNotAvailable) };
+    let cx = LayoutCx { krate, target: &target };
     let dl = &*cx.current_data_layout();
     Ok(match ty.kind(Interner) {
         TyKind::Adt(AdtId(def), subst) => db.layout_of_adt(*def, subst.clone())?,
@@ -226,10 +225,21 @@ pub fn layout_of_ty(db: &dyn HirDatabase, ty: &Ty, krate: CrateId) -> Result<Lay
             ptr.valid_range_mut().start = 1;
             Layout::scalar(dl, ptr)
         }
-        TyKind::Closure(_, _)
-        | TyKind::OpaqueType(_, _)
-        | TyKind::Generator(_, _)
-        | TyKind::GeneratorWitness(_, _) => return Err(LayoutError::NotImplemented),
+        TyKind::OpaqueType(opaque_ty_id, _) => {
+            let impl_trait_id = db.lookup_intern_impl_trait_id((*opaque_ty_id).into());
+            match impl_trait_id {
+                crate::ImplTraitId::ReturnTypeImplTrait(func, idx) => {
+                    let infer = db.infer(func.into());
+                    layout_of_ty(db, &infer.type_of_rpit[idx], krate)?
+                }
+                crate::ImplTraitId::AsyncBlockTypeImplTrait(_, _) => {
+                    return Err(LayoutError::NotImplemented)
+                }
+            }
+        }
+        TyKind::Closure(_, _) | TyKind::Generator(_, _) | TyKind::GeneratorWitness(_, _) => {
+            return Err(LayoutError::NotImplemented)
+        }
         TyKind::AssociatedType(_, _)
         | TyKind::Error
         | TyKind::Alias(_)
@@ -251,17 +261,14 @@ fn layout_of_unit(cx: &LayoutCx<'_>, dl: &TargetDataLayout) -> Result<Layout, La
 
 fn struct_tail_erasing_lifetimes(db: &dyn HirDatabase, pointee: Ty) -> Ty {
     match pointee.kind(Interner) {
-        TyKind::Adt(AdtId(adt), subst) => match adt {
-            &hir_def::AdtId::StructId(i) => {
-                let data = db.struct_data(i);
-                let mut it = data.variant_data.fields().iter().rev();
-                match it.next() {
-                    Some((f, _)) => field_ty(db, i.into(), f, subst),
-                    None => pointee,
-                }
+        TyKind::Adt(AdtId(hir_def::AdtId::StructId(i)), subst) => {
+            let data = db.struct_data(*i);
+            let mut it = data.variant_data.fields().iter().rev();
+            match it.next() {
+                Some((f, _)) => field_ty(db, (*i).into(), f, subst),
+                None => pointee,
             }
-            _ => pointee,
-        },
+        }
         _ => pointee,
     }
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
index 23166a5a522..cb7968c1446 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs
@@ -23,7 +23,9 @@ pub fn layout_of_adt_query(
     def: AdtId,
     subst: Substitution,
 ) -> Result<Layout, LayoutError> {
-    let cx = LayoutCx { db, krate: def.module(db.upcast()).krate() };
+    let krate = def.module(db.upcast()).krate();
+    let Some(target) = db.target_data_layout(krate) else { return Err(LayoutError::TargetLayoutNotAvailable) };
+    let cx = LayoutCx { krate, target: &target };
     let dl = cx.current_data_layout();
     let handle_variant = |def: VariantId, var: &VariantData| {
         var.fields()
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/target.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/target.rs
index 37b831652f5..adfae0a1abb 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/target.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/target.rs
@@ -3,34 +3,22 @@
 use std::sync::Arc;
 
 use base_db::CrateId;
-use hir_def::layout::{Endian, Size, TargetDataLayout};
+use hir_def::layout::TargetDataLayout;
 
 use crate::db::HirDatabase;
 
-pub fn target_data_layout_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<TargetDataLayout> {
+pub fn target_data_layout_query(
+    db: &dyn HirDatabase,
+    krate: CrateId,
+) -> Option<Arc<TargetDataLayout>> {
     let crate_graph = db.crate_graph();
-    let target_layout = &crate_graph[krate].target_layout;
-    let cfg_options = &crate_graph[krate].cfg_options;
-    Arc::new(
-        target_layout
-            .as_ref()
-            .and_then(|it| TargetDataLayout::parse_from_llvm_datalayout_string(it).ok())
-            .unwrap_or_else(|| {
-                let endian = match cfg_options.get_cfg_values("target_endian").next() {
-                    Some(x) if x.as_str() == "big" => Endian::Big,
-                    _ => Endian::Little,
-                };
-                let pointer_size = Size::from_bytes(
-                    match cfg_options.get_cfg_values("target_pointer_width").next() {
-                        Some(x) => match x.as_str() {
-                            "16" => 2,
-                            "32" => 4,
-                            _ => 8,
-                        },
-                        _ => 8,
-                    },
-                );
-                TargetDataLayout { endian, pointer_size, ..TargetDataLayout::default() }
-            }),
-    )
+    let target_layout = crate_graph[krate].target_layout.as_ref().ok()?;
+    let res = TargetDataLayout::parse_from_llvm_datalayout_string(&target_layout);
+    if let Err(_e) = &res {
+        // FIXME: Print the error here once it implements debug/display
+        // also logging here is somewhat wrong, but unfortunately this is the earliest place we can
+        // parse that doesn't impose a dependency to the rust-abi crate for project-model
+        tracing::error!("Failed to parse target data layout for {krate:?}");
+    }
+    res.ok().map(Arc::new)
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs
index 53838cf41d2..067bdc960da 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs
@@ -1,3 +1,5 @@
+use std::collections::HashMap;
+
 use base_db::fixture::WithFixture;
 use chalk_ir::{AdtId, TyKind};
 use hir_def::{
@@ -5,20 +7,16 @@ use hir_def::{
     layout::{Layout, LayoutError},
 };
 
-use crate::{test_db::TestDB, Interner, Substitution};
+use crate::{db::HirDatabase, test_db::TestDB, Interner, Substitution};
 
 use super::layout_of_ty;
 
-fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Layout, LayoutError> {
-    // using unstable cargo features failed, fall back to using plain rustc
-    let mut cmd = std::process::Command::new("rustc");
-    cmd.args(["-Z", "unstable-options", "--print", "target-spec-json"]).env("RUSTC_BOOTSTRAP", "1");
-    let output = cmd.output().unwrap();
-    assert!(output.status.success(), "{}", output.status);
-    let stdout = String::from_utf8(output.stdout).unwrap();
-    let target_data_layout =
-        stdout.split_once(r#""data-layout": ""#).unwrap().1.split_once('"').unwrap().0.to_owned();
+fn current_machine_data_layout() -> String {
+    project_model::target_data_layout::get(None, None, &HashMap::default()).unwrap()
+}
 
+fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Layout, LayoutError> {
+    let target_data_layout = current_machine_data_layout();
     let ra_fixture = format!(
         "{minicore}//- /main.rs crate:test target_data_layout:{target_data_layout}\n{ra_fixture}",
     );
@@ -45,6 +43,42 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Layout, LayoutError> {
     layout_of_ty(&db, &goal_ty, module_id.krate())
 }
 
+/// A version of `eval_goal` for types that can not be expressed in ADTs, like closures and `impl Trait`
+fn eval_expr(ra_fixture: &str, minicore: &str) -> Result<Layout, LayoutError> {
+    let target_data_layout = current_machine_data_layout();
+    let ra_fixture = format!(
+        "{minicore}//- /main.rs crate:test target_data_layout:{target_data_layout}\nfn main(){{let goal = {{{ra_fixture}}};}}",
+    );
+
+    let (db, file_id) = TestDB::with_single_file(&ra_fixture);
+    let module_id = db.module_for_file(file_id);
+    let def_map = module_id.def_map(&db);
+    let scope = &def_map[module_id.local_id].scope;
+    let adt_id = scope
+        .declarations()
+        .find_map(|x| match x {
+            hir_def::ModuleDefId::FunctionId(x) => {
+                let name = db.function_data(x).name.to_smol_str();
+                (name == "main").then_some(x)
+            }
+            _ => None,
+        })
+        .unwrap();
+    let hir_body = db.body(adt_id.into());
+    let pat = hir_body
+        .pats
+        .iter()
+        .find(|x| match x.1 {
+            hir_def::expr::Pat::Bind { name, .. } => name.to_smol_str() == "goal",
+            _ => false,
+        })
+        .unwrap()
+        .0;
+    let infer = db.infer(adt_id.into());
+    let goal_ty = infer.type_of_pat[pat].clone();
+    layout_of_ty(&db, &goal_ty, module_id.krate())
+}
+
 #[track_caller]
 fn check_size_and_align(ra_fixture: &str, minicore: &str, size: u64, align: u64) {
     let l = eval_goal(ra_fixture, minicore).unwrap();
@@ -53,6 +87,13 @@ fn check_size_and_align(ra_fixture: &str, minicore: &str, size: u64, align: u64)
 }
 
 #[track_caller]
+fn check_size_and_align_expr(ra_fixture: &str, minicore: &str, size: u64, align: u64) {
+    let l = eval_expr(ra_fixture, minicore).unwrap();
+    assert_eq!(l.size.bytes(), size);
+    assert_eq!(l.align.abi.bytes(), align);
+}
+
+#[track_caller]
 fn check_fail(ra_fixture: &str, e: LayoutError) {
     let r = eval_goal(ra_fixture, "");
     assert_eq!(r, Err(e));
@@ -85,11 +126,31 @@ macro_rules! size_and_align {
     };
 }
 
+macro_rules! size_and_align_expr {
+    ($($t:tt)*) => {
+        {
+            #[allow(dead_code)]
+            {
+                let val = { $($t)* };
+                check_size_and_align_expr(
+                    stringify!($($t)*),
+                    "",
+                    ::std::mem::size_of_val(&val) as u64,
+                    ::std::mem::align_of_val(&val) as u64,
+                );
+            }
+        }
+    };
+}
+
 #[test]
 fn hello_world() {
     size_and_align! {
         struct Goal(i32);
     }
+    size_and_align_expr! {
+        2i32
+    }
 }
 
 #[test]
@@ -144,6 +205,40 @@ fn generic() {
 }
 
 #[test]
+fn return_position_impl_trait() {
+    size_and_align_expr! {
+        trait T {}
+        impl T for i32 {}
+        impl T for i64 {}
+        fn foo() -> impl T { 2i64 }
+        foo()
+    }
+    size_and_align_expr! {
+        trait T {}
+        impl T for i32 {}
+        impl T for i64 {}
+        fn foo() -> (impl T, impl T, impl T) { (2i64, 5i32, 7i32) }
+        foo()
+    }
+    size_and_align_expr! {
+        struct Foo<T>(T, T, (T, T));
+        trait T {}
+        impl T for Foo<i32> {}
+        impl T for Foo<i64> {}
+
+        fn foo() -> Foo<impl T> { Foo(
+            Foo(1i64, 2, (3, 4)),
+            Foo(5, 6, (7, 8)),
+            (
+                Foo(1i64, 2, (3, 4)),
+                Foo(5, 6, (7, 8)),
+            ),
+        ) }
+        foo()
+    }
+}
+
+#[test]
 fn enums() {
     size_and_align! {
         enum Goal {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
index cbe6873c7d5..59a5ef8c14d 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
@@ -20,7 +20,6 @@ mod lower;
 mod mapping;
 mod tls;
 mod utils;
-mod walk;
 pub mod db;
 pub mod diagnostics;
 pub mod display;
@@ -40,11 +39,14 @@ use std::sync::Arc;
 use chalk_ir::{
     fold::{Shift, TypeFoldable},
     interner::HasInterner,
-    NoSolution,
+    visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor},
+    NoSolution, TyData,
 };
 use hir_def::{expr::ExprId, type_ref::Rawness, TypeOrConstParamId};
 use hir_expand::name;
 use itertools::Either;
+use la_arena::{Arena, Idx};
+use rustc_hash::FxHashSet;
 use traits::FnTrait;
 use utils::Generics;
 
@@ -71,7 +73,6 @@ pub use mapping::{
 };
 pub use traits::TraitEnvironment;
 pub use utils::{all_super_traits, is_fn_unsafe_to_call};
-pub use walk::TypeWalk;
 
 pub use chalk_ir::{
     cast::Cast, AdtId, BoundVar, DebruijnIndex, Mutability, Safety, Scalar, TyVariableKind,
@@ -107,6 +108,7 @@ pub type GenericArgData = chalk_ir::GenericArgData<Interner>;
 
 pub type Ty = chalk_ir::Ty<Interner>;
 pub type TyKind = chalk_ir::TyKind<Interner>;
+pub type TypeFlags = chalk_ir::TypeFlags;
 pub type DynTy = chalk_ir::DynTy<Interner>;
 pub type FnPointer = chalk_ir::FnPointer<Interner>;
 // pub type FnSubst = chalk_ir::FnSubst<Interner>;
@@ -289,22 +291,24 @@ impl TypeFoldable<Interner> for CallableSig {
 
 #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
 pub enum ImplTraitId {
-    ReturnTypeImplTrait(hir_def::FunctionId, u16),
+    ReturnTypeImplTrait(hir_def::FunctionId, RpitId),
     AsyncBlockTypeImplTrait(hir_def::DefWithBodyId, ExprId),
 }
 
 #[derive(Clone, PartialEq, Eq, Debug, Hash)]
 pub struct ReturnTypeImplTraits {
-    pub(crate) impl_traits: Vec<ReturnTypeImplTrait>,
+    pub(crate) impl_traits: Arena<ReturnTypeImplTrait>,
 }
 
 has_interner!(ReturnTypeImplTraits);
 
 #[derive(Clone, PartialEq, Eq, Debug, Hash)]
-pub(crate) struct ReturnTypeImplTrait {
+pub struct ReturnTypeImplTrait {
     pub(crate) bounds: Binders<Vec<QuantifiedWhereClause>>,
 }
 
+pub type RpitId = Idx<ReturnTypeImplTrait>;
+
 pub fn static_lifetime() -> Lifetime {
     LifetimeData::Static.intern(Interner)
 }
@@ -563,3 +567,68 @@ pub fn callable_sig_from_fnonce(
 
     Some(CallableSig::from_params_and_return(params, ret_ty, false, Safety::Safe))
 }
+
+struct PlaceholderCollector<'db> {
+    db: &'db dyn HirDatabase,
+    placeholders: FxHashSet<TypeOrConstParamId>,
+}
+
+impl PlaceholderCollector<'_> {
+    fn collect(&mut self, idx: PlaceholderIndex) {
+        let id = from_placeholder_idx(self.db, idx);
+        self.placeholders.insert(id);
+    }
+}
+
+impl TypeVisitor<Interner> for PlaceholderCollector<'_> {
+    type BreakTy = ();
+
+    fn as_dyn(&mut self) -> &mut dyn TypeVisitor<Interner, BreakTy = Self::BreakTy> {
+        self
+    }
+
+    fn interner(&self) -> Interner {
+        Interner
+    }
+
+    fn visit_ty(
+        &mut self,
+        ty: &Ty,
+        outer_binder: DebruijnIndex,
+    ) -> std::ops::ControlFlow<Self::BreakTy> {
+        let has_placeholder_bits = TypeFlags::HAS_TY_PLACEHOLDER | TypeFlags::HAS_CT_PLACEHOLDER;
+        let TyData { kind, flags } = ty.data(Interner);
+
+        if let TyKind::Placeholder(idx) = kind {
+            self.collect(*idx);
+        } else if flags.intersects(has_placeholder_bits) {
+            return ty.super_visit_with(self, outer_binder);
+        } else {
+            // Fast path: don't visit inner types (e.g. generic arguments) when `flags` indicate
+            // that there are no placeholders.
+        }
+
+        std::ops::ControlFlow::Continue(())
+    }
+
+    fn visit_const(
+        &mut self,
+        constant: &chalk_ir::Const<Interner>,
+        _outer_binder: DebruijnIndex,
+    ) -> std::ops::ControlFlow<Self::BreakTy> {
+        if let chalk_ir::ConstValue::Placeholder(idx) = constant.data(Interner).value {
+            self.collect(idx);
+        }
+        std::ops::ControlFlow::Continue(())
+    }
+}
+
+/// Returns unique placeholders for types and consts contained in `value`.
+pub fn collect_placeholders<T>(value: &T, db: &dyn HirDatabase) -> Vec<TypeOrConstParamId>
+where
+    T: ?Sized + TypeVisitable<Interner>,
+{
+    let mut collector = PlaceholderCollector { db, placeholders: FxHashSet::default() };
+    value.visit_with(&mut collector, DebruijnIndex::INNERMOST);
+    collector.placeholders.into_iter().collect()
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
index 592410008a6..7cce13a793e 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
@@ -23,24 +23,24 @@ use hir_def::{
     generics::{
         TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget,
     },
-    intern::Interned,
-    lang_item::lang_attr,
+    lang_item::{lang_attr, LangItem},
     path::{GenericArg, ModPath, Path, PathKind, PathSegment, PathSegments},
     resolver::{HasResolver, Resolver, TypeNs},
     type_ref::{
         ConstScalarOrPath, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, TypeRef,
     },
     AdtId, AssocItemId, ConstId, ConstParamId, EnumId, EnumVariantId, FunctionId, GenericDefId,
-    HasModule, ImplId, ItemContainerId, LocalFieldId, Lookup, StaticId, StructId, TraitId,
-    TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, VariantId,
+    HasModule, ImplId, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, StaticId, StructId,
+    TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, VariantId,
 };
 use hir_expand::{name::Name, ExpandResult};
+use intern::Interned;
 use itertools::Either;
-use la_arena::ArenaMap;
+use la_arena::{Arena, ArenaMap};
 use rustc_hash::FxHashSet;
 use smallvec::SmallVec;
 use stdx::{impl_from, never};
-use syntax::{ast, SmolStr};
+use syntax::ast;
 
 use crate::{
     all_super_traits,
@@ -58,6 +58,51 @@ use crate::{
 };
 
 #[derive(Debug)]
+enum ImplTraitLoweringState {
+    /// When turning `impl Trait` into opaque types, we have to collect the
+    /// bounds at the same time to get the IDs correct (without becoming too
+    /// complicated). I don't like using interior mutability (as for the
+    /// counter), but I've tried and failed to make the lifetimes work for
+    /// passing around a `&mut TyLoweringContext`. The core problem is that
+    /// we're grouping the mutable data (the counter and this field) together
+    /// with the immutable context (the references to the DB and resolver).
+    /// Splitting this up would be a possible fix.
+    Opaque(RefCell<Arena<ReturnTypeImplTrait>>),
+    Param(Cell<u16>),
+    Variable(Cell<u16>),
+    Disallowed,
+}
+impl ImplTraitLoweringState {
+    fn new(impl_trait_mode: ImplTraitLoweringMode) -> ImplTraitLoweringState {
+        match impl_trait_mode {
+            ImplTraitLoweringMode::Opaque => Self::Opaque(RefCell::new(Arena::new())),
+            ImplTraitLoweringMode::Param => Self::Param(Cell::new(0)),
+            ImplTraitLoweringMode::Variable => Self::Variable(Cell::new(0)),
+            ImplTraitLoweringMode::Disallowed => Self::Disallowed,
+        }
+    }
+
+    fn take(&self) -> Self {
+        match self {
+            Self::Opaque(x) => Self::Opaque(RefCell::new(x.take())),
+            Self::Param(x) => Self::Param(Cell::new(x.get())),
+            Self::Variable(x) => Self::Variable(Cell::new(x.get())),
+            Self::Disallowed => Self::Disallowed,
+        }
+    }
+
+    fn swap(&self, impl_trait_mode: &Self) {
+        match (self, impl_trait_mode) {
+            (Self::Opaque(x), Self::Opaque(y)) => x.swap(y),
+            (Self::Param(x), Self::Param(y)) => x.swap(y),
+            (Self::Variable(x), Self::Variable(y)) => x.swap(y),
+            (Self::Disallowed, Self::Disallowed) => (),
+            _ => panic!("mismatched lowering mode"),
+        }
+    }
+}
+
+#[derive(Debug)]
 pub struct TyLoweringContext<'a> {
     pub db: &'a dyn HirDatabase,
     pub resolver: &'a Resolver,
@@ -67,17 +112,7 @@ pub struct TyLoweringContext<'a> {
     /// should be converted to variables. I think in practice, this isn't
     /// possible currently, so this should be fine for now.
     pub type_param_mode: ParamLoweringMode,
-    pub impl_trait_mode: ImplTraitLoweringMode,
-    impl_trait_counter: Cell<u16>,
-    /// When turning `impl Trait` into opaque types, we have to collect the
-    /// bounds at the same time to get the IDs correct (without becoming too
-    /// complicated). I don't like using interior mutability (as for the
-    /// counter), but I've tried and failed to make the lifetimes work for
-    /// passing around a `&mut TyLoweringContext`. The core problem is that
-    /// we're grouping the mutable data (the counter and this field) together
-    /// with the immutable context (the references to the DB and resolver).
-    /// Splitting this up would be a possible fix.
-    opaque_type_data: RefCell<Vec<ReturnTypeImplTrait>>,
+    impl_trait_mode: ImplTraitLoweringState,
     expander: RefCell<Option<Expander>>,
     /// Tracks types with explicit `?Sized` bounds.
     pub(crate) unsized_types: RefCell<FxHashSet<Ty>>,
@@ -85,19 +120,15 @@ pub struct TyLoweringContext<'a> {
 
 impl<'a> TyLoweringContext<'a> {
     pub fn new(db: &'a dyn HirDatabase, resolver: &'a Resolver) -> Self {
-        let impl_trait_counter = Cell::new(0);
-        let impl_trait_mode = ImplTraitLoweringMode::Disallowed;
+        let impl_trait_mode = ImplTraitLoweringState::Disallowed;
         let type_param_mode = ParamLoweringMode::Placeholder;
         let in_binders = DebruijnIndex::INNERMOST;
-        let opaque_type_data = RefCell::new(Vec::new());
         Self {
             db,
             resolver,
             in_binders,
             impl_trait_mode,
-            impl_trait_counter,
             type_param_mode,
-            opaque_type_data,
             expander: RefCell::new(None),
             unsized_types: RefCell::default(),
         }
@@ -108,20 +139,18 @@ impl<'a> TyLoweringContext<'a> {
         debruijn: DebruijnIndex,
         f: impl FnOnce(&TyLoweringContext<'_>) -> T,
     ) -> T {
-        let opaque_ty_data_vec = self.opaque_type_data.take();
+        let impl_trait_mode = self.impl_trait_mode.take();
         let expander = self.expander.take();
         let unsized_types = self.unsized_types.take();
         let new_ctx = Self {
             in_binders: debruijn,
-            impl_trait_counter: Cell::new(self.impl_trait_counter.get()),
-            opaque_type_data: RefCell::new(opaque_ty_data_vec),
+            impl_trait_mode,
             expander: RefCell::new(expander),
             unsized_types: RefCell::new(unsized_types),
             ..*self
         };
         let result = f(&new_ctx);
-        self.impl_trait_counter.set(new_ctx.impl_trait_counter.get());
-        self.opaque_type_data.replace(new_ctx.opaque_type_data.into_inner());
+        self.impl_trait_mode.swap(&new_ctx.impl_trait_mode);
         self.expander.replace(new_ctx.expander.into_inner());
         self.unsized_types.replace(new_ctx.unsized_types.into_inner());
         result
@@ -136,7 +165,7 @@ impl<'a> TyLoweringContext<'a> {
     }
 
     pub fn with_impl_trait_mode(self, impl_trait_mode: ImplTraitLoweringMode) -> Self {
-        Self { impl_trait_mode, ..self }
+        Self { impl_trait_mode: ImplTraitLoweringState::new(impl_trait_mode), ..self }
     }
 
     pub fn with_type_param_mode(self, type_param_mode: ParamLoweringMode) -> Self {
@@ -244,20 +273,17 @@ impl<'a> TyLoweringContext<'a> {
             }
             TypeRef::DynTrait(bounds) => self.lower_dyn_trait(bounds),
             TypeRef::ImplTrait(bounds) => {
-                match self.impl_trait_mode {
-                    ImplTraitLoweringMode::Opaque => {
-                        let idx = self.impl_trait_counter.get();
-                        self.impl_trait_counter.set(idx + 1);
+                match &self.impl_trait_mode {
+                    ImplTraitLoweringState::Opaque(opaque_type_data) => {
                         let func = match self.resolver.generic_def() {
                             Some(GenericDefId::FunctionId(f)) => f,
                             _ => panic!("opaque impl trait lowering in non-function"),
                         };
 
-                        assert!(idx as usize == self.opaque_type_data.borrow().len());
                         // this dance is to make sure the data is in the right
                         // place even if we encounter more opaque types while
                         // lowering the bounds
-                        self.opaque_type_data.borrow_mut().push(ReturnTypeImplTrait {
+                        let idx = opaque_type_data.borrow_mut().alloc(ReturnTypeImplTrait {
                             bounds: crate::make_single_type_binders(Vec::new()),
                         });
                         // We don't want to lower the bounds inside the binders
@@ -273,7 +299,7 @@ impl<'a> TyLoweringContext<'a> {
                             .with_debruijn(DebruijnIndex::INNERMOST, |ctx| {
                                 ctx.lower_impl_trait(bounds, func)
                             });
-                        self.opaque_type_data.borrow_mut()[idx as usize] = actual_opaque_type_data;
+                        opaque_type_data.borrow_mut()[idx] = actual_opaque_type_data;
 
                         let impl_trait_id = ImplTraitId::ReturnTypeImplTrait(func, idx);
                         let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into();
@@ -281,10 +307,10 @@ impl<'a> TyLoweringContext<'a> {
                         let parameters = generics.bound_vars_subst(self.db, self.in_binders);
                         TyKind::OpaqueType(opaque_ty_id, parameters).intern(Interner)
                     }
-                    ImplTraitLoweringMode::Param => {
-                        let idx = self.impl_trait_counter.get();
+                    ImplTraitLoweringState::Param(counter) => {
+                        let idx = counter.get();
                         // FIXME we're probably doing something wrong here
-                        self.impl_trait_counter.set(idx + count_impl_traits(type_ref) as u16);
+                        counter.set(idx + count_impl_traits(type_ref) as u16);
                         if let Some(def) = self.resolver.generic_def() {
                             let generics = generics(self.db.upcast(), def);
                             let param = generics
@@ -305,10 +331,10 @@ impl<'a> TyLoweringContext<'a> {
                             TyKind::Error.intern(Interner)
                         }
                     }
-                    ImplTraitLoweringMode::Variable => {
-                        let idx = self.impl_trait_counter.get();
+                    ImplTraitLoweringState::Variable(counter) => {
+                        let idx = counter.get();
                         // FIXME we're probably doing something wrong here
-                        self.impl_trait_counter.set(idx + count_impl_traits(type_ref) as u16);
+                        counter.set(idx + count_impl_traits(type_ref) as u16);
                         let (
                             _parent_params,
                             self_params,
@@ -327,7 +353,7 @@ impl<'a> TyLoweringContext<'a> {
                         ))
                         .intern(Interner)
                     }
-                    ImplTraitLoweringMode::Disallowed => {
+                    ImplTraitLoweringState::Disallowed => {
                         // FIXME: report error
                         TyKind::Error.intern(Interner)
                     }
@@ -954,7 +980,7 @@ impl<'a> TyLoweringContext<'a> {
             TypeBound::Path(path, TraitBoundModifier::Maybe) => {
                 let sized_trait = self
                     .db
-                    .lang_item(self.resolver.krate(), SmolStr::new_inline("sized"))
+                    .lang_item(self.resolver.krate(), LangItem::Sized)
                     .and_then(|lang_item| lang_item.as_trait());
                 // Don't lower associated type bindings as the only possible relaxed trait bound
                 // `?Sized` has no of them.
@@ -1150,7 +1176,7 @@ impl<'a> TyLoweringContext<'a> {
                 let krate = func.lookup(ctx.db.upcast()).module(ctx.db.upcast()).krate();
                 let sized_trait = ctx
                     .db
-                    .lang_item(krate, SmolStr::new_inline("sized"))
+                    .lang_item(krate, LangItem::Sized)
                     .and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id));
                 let sized_clause = sized_trait.map(|trait_id| {
                     let clause = WhereClause::Implemented(TraitRef {
@@ -1209,7 +1235,7 @@ fn named_associated_type_shorthand_candidates<R>(
     mut cb: impl FnMut(&Name, &TraitRef, TypeAliasId) -> Option<R>,
 ) -> Option<R> {
     let mut search = |t| {
-        for t in all_super_trait_refs(db, t) {
+        all_super_trait_refs(db, t, |t| {
             let data = db.trait_data(t.hir_trait_id());
 
             for (name, assoc_id) in &data.items {
@@ -1219,8 +1245,8 @@ fn named_associated_type_shorthand_candidates<R>(
                     }
                 }
             }
-        }
-        None
+            None
+        })
     };
 
     match res {
@@ -1489,7 +1515,7 @@ fn implicitly_sized_clauses<'a>(
     let is_trait_def = matches!(def, GenericDefId::TraitId(..));
     let generic_args = &substitution.as_slice(Interner)[is_trait_def as usize..];
     let sized_trait = db
-        .lang_item(resolver.krate(), SmolStr::new_inline("sized"))
+        .lang_item(resolver.krate(), LangItem::Sized)
         .and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id));
 
     sized_trait.into_iter().flat_map(move |sized_trait| {
@@ -1704,6 +1730,15 @@ pub enum CallableDefId {
     EnumVariantId(EnumVariantId),
 }
 impl_from!(FunctionId, StructId, EnumVariantId for CallableDefId);
+impl From<CallableDefId> for ModuleDefId {
+    fn from(def: CallableDefId) -> ModuleDefId {
+        match def {
+            CallableDefId::FunctionId(f) => ModuleDefId::FunctionId(f),
+            CallableDefId::StructId(s) => ModuleDefId::AdtId(AdtId::StructId(s)),
+            CallableDefId::EnumVariantId(e) => ModuleDefId::EnumVariantId(e),
+        }
+    }
+}
 
 impl CallableDefId {
     pub fn krate(self, db: &dyn HirDatabase) -> CrateId {
@@ -1854,8 +1889,12 @@ pub(crate) fn return_type_impl_traits(
         .with_type_param_mode(ParamLoweringMode::Variable);
     let _ret = ctx_ret.lower_ty(&data.ret_type);
     let generics = generics(db.upcast(), def.into());
-    let return_type_impl_traits =
-        ReturnTypeImplTraits { impl_traits: ctx_ret.opaque_type_data.into_inner() };
+    let return_type_impl_traits = ReturnTypeImplTraits {
+        impl_traits: match ctx_ret.impl_trait_mode {
+            ImplTraitLoweringState::Opaque(x) => x.into_inner(),
+            _ => unreachable!(),
+        },
+    };
     if return_type_impl_traits.impl_traits.is_empty() {
         None
     } else {
@@ -1931,7 +1970,7 @@ pub(crate) fn const_or_path_to_chalk(
     debruijn: DebruijnIndex,
 ) -> Const {
     match value {
-        ConstScalarOrPath::Scalar(s) => intern_const_scalar(s.clone(), expected_ty),
+        ConstScalarOrPath::Scalar(s) => intern_const_scalar(*s, expected_ty),
         ConstScalarOrPath::Path(n) => {
             let path = ModPath::from_segments(PathKind::Plain, Some(n.clone()));
             path_to_const(db, resolver, &path, mode, args, debruijn)
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
index 2328dceb839..8c7714b9a69 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs
@@ -5,10 +5,11 @@
 use std::{ops::ControlFlow, sync::Arc};
 
 use base_db::{CrateId, Edition};
-use chalk_ir::{cast::Cast, Mutability, UniverseIndex};
+use chalk_ir::{cast::Cast, Mutability, TyKind, UniverseIndex};
 use hir_def::{
-    data::ImplData, item_scope::ItemScope, nameres::DefMap, AssocItemId, BlockId, ConstId,
-    FunctionId, HasModule, ImplId, ItemContainerId, Lookup, ModuleDefId, ModuleId, TraitId,
+    data::ImplData, item_scope::ItemScope, lang_item::LangItem, nameres::DefMap, AssocItemId,
+    BlockId, ConstId, FunctionId, HasModule, ImplId, ItemContainerId, Lookup, ModuleDefId,
+    ModuleId, TraitId,
 };
 use hir_expand::name::Name;
 use rustc_hash::{FxHashMap, FxHashSet};
@@ -24,7 +25,7 @@ use crate::{
     static_lifetime, to_chalk_trait_id,
     utils::all_super_traits,
     AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, ForeignDefId, InEnvironment, Interner,
-    Scalar, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt, TyKind,
+    Scalar, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt,
 };
 
 /// This is used as a key for indexing impls.
@@ -437,49 +438,49 @@ pub fn def_crates(
     }
 }
 
-pub fn lang_names_for_bin_op(op: syntax::ast::BinaryOp) -> Option<(Name, Name)> {
+pub fn lang_items_for_bin_op(op: syntax::ast::BinaryOp) -> Option<(Name, LangItem)> {
     use hir_expand::name;
     use syntax::ast::{ArithOp, BinaryOp, CmpOp, Ordering};
     Some(match op {
         BinaryOp::LogicOp(_) => return None,
         BinaryOp::ArithOp(aop) => match aop {
-            ArithOp::Add => (name!(add), name!(add)),
-            ArithOp::Mul => (name!(mul), name!(mul)),
-            ArithOp::Sub => (name!(sub), name!(sub)),
-            ArithOp::Div => (name!(div), name!(div)),
-            ArithOp::Rem => (name!(rem), name!(rem)),
-            ArithOp::Shl => (name!(shl), name!(shl)),
-            ArithOp::Shr => (name!(shr), name!(shr)),
-            ArithOp::BitXor => (name!(bitxor), name!(bitxor)),
-            ArithOp::BitOr => (name!(bitor), name!(bitor)),
-            ArithOp::BitAnd => (name!(bitand), name!(bitand)),
+            ArithOp::Add => (name![add], LangItem::Add),
+            ArithOp::Mul => (name![mul], LangItem::Mul),
+            ArithOp::Sub => (name![sub], LangItem::Sub),
+            ArithOp::Div => (name![div], LangItem::Div),
+            ArithOp::Rem => (name![rem], LangItem::Rem),
+            ArithOp::Shl => (name![shl], LangItem::Shl),
+            ArithOp::Shr => (name![shr], LangItem::Shr),
+            ArithOp::BitXor => (name![bitxor], LangItem::BitXor),
+            ArithOp::BitOr => (name![bitor], LangItem::BitOr),
+            ArithOp::BitAnd => (name![bitand], LangItem::BitAnd),
         },
         BinaryOp::Assignment { op: Some(aop) } => match aop {
-            ArithOp::Add => (name!(add_assign), name!(add_assign)),
-            ArithOp::Mul => (name!(mul_assign), name!(mul_assign)),
-            ArithOp::Sub => (name!(sub_assign), name!(sub_assign)),
-            ArithOp::Div => (name!(div_assign), name!(div_assign)),
-            ArithOp::Rem => (name!(rem_assign), name!(rem_assign)),
-            ArithOp::Shl => (name!(shl_assign), name!(shl_assign)),
-            ArithOp::Shr => (name!(shr_assign), name!(shr_assign)),
-            ArithOp::BitXor => (name!(bitxor_assign), name!(bitxor_assign)),
-            ArithOp::BitOr => (name!(bitor_assign), name!(bitor_assign)),
-            ArithOp::BitAnd => (name!(bitand_assign), name!(bitand_assign)),
+            ArithOp::Add => (name![add_assign], LangItem::AddAssign),
+            ArithOp::Mul => (name![mul_assign], LangItem::MulAssign),
+            ArithOp::Sub => (name![sub_assign], LangItem::SubAssign),
+            ArithOp::Div => (name![div_assign], LangItem::DivAssign),
+            ArithOp::Rem => (name![rem_assign], LangItem::RemAssign),
+            ArithOp::Shl => (name![shl_assign], LangItem::ShlAssign),
+            ArithOp::Shr => (name![shr_assign], LangItem::ShrAssign),
+            ArithOp::BitXor => (name![bitxor_assign], LangItem::BitXorAssign),
+            ArithOp::BitOr => (name![bitor_assign], LangItem::BitOrAssign),
+            ArithOp::BitAnd => (name![bitand_assign], LangItem::BitAndAssign),
         },
         BinaryOp::CmpOp(cop) => match cop {
-            CmpOp::Eq { negated: false } => (name!(eq), name!(eq)),
-            CmpOp::Eq { negated: true } => (name!(ne), name!(eq)),
+            CmpOp::Eq { negated: false } => (name![eq], LangItem::PartialEq),
+            CmpOp::Eq { negated: true } => (name![ne], LangItem::PartialEq),
             CmpOp::Ord { ordering: Ordering::Less, strict: false } => {
-                (name!(le), name!(partial_ord))
+                (name![le], LangItem::PartialOrd)
             }
             CmpOp::Ord { ordering: Ordering::Less, strict: true } => {
-                (name!(lt), name!(partial_ord))
+                (name![lt], LangItem::PartialOrd)
             }
             CmpOp::Ord { ordering: Ordering::Greater, strict: false } => {
-                (name!(ge), name!(partial_ord))
+                (name![ge], LangItem::PartialOrd)
             }
             CmpOp::Ord { ordering: Ordering::Greater, strict: true } => {
-                (name!(gt), name!(partial_ord))
+                (name![gt], LangItem::PartialOrd)
             }
         },
         BinaryOp::Assignment { op: None } => return None,
@@ -587,25 +588,31 @@ impl ReceiverAdjustments {
                 }
             }
         }
+        if let Some(m) = self.autoref {
+            ty = TyKind::Ref(m, static_lifetime(), ty).intern(Interner);
+            adjust
+                .push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(m)), target: ty.clone() });
+        }
         if self.unsize_array {
-            ty = match ty.kind(Interner) {
-                TyKind::Array(inner, _) => TyKind::Slice(inner.clone()).intern(Interner),
-                _ => {
-                    never!("unsize_array with non-array {:?}", ty);
-                    ty
+            ty = 'x: {
+                if let TyKind::Ref(m, l, inner) = ty.kind(Interner) {
+                    if let TyKind::Array(inner, _) = inner.kind(Interner) {
+                        break 'x TyKind::Ref(
+                            m.clone(),
+                            l.clone(),
+                            TyKind::Slice(inner.clone()).intern(Interner),
+                        )
+                        .intern(Interner);
+                    }
                 }
+                never!("unsize_array with non-reference-to-array {:?}", ty);
+                ty
             };
-            // FIXME this is kind of wrong since the unsize needs to happen to a pointer/reference
             adjust.push(Adjustment {
                 kind: Adjust::Pointer(PointerCast::Unsize),
                 target: ty.clone(),
             });
         }
-        if let Some(m) = self.autoref {
-            ty = TyKind::Ref(m, static_lifetime(), ty).intern(Interner);
-            adjust
-                .push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(m)), target: ty.clone() });
-        }
         (ty, adjust)
     }
 
@@ -712,17 +719,17 @@ fn lookup_impl_assoc_item_for_trait_ref(
     let table = InferenceTable::new(db, env);
 
     let impl_data = find_matching_impl(impls, table, trait_ref)?;
-    impl_data.items.iter().find_map(|it| match it {
+    impl_data.items.iter().find_map(|&it| match it {
         AssocItemId::FunctionId(f) => {
-            (db.function_data(*f).name == *name).then_some(AssocItemId::FunctionId(*f))
+            (db.function_data(f).name == *name).then_some(AssocItemId::FunctionId(f))
         }
         AssocItemId::ConstId(c) => db
-            .const_data(*c)
+            .const_data(c)
             .name
             .as_ref()
-            .map(|n| *n == *name)
-            .and_then(|result| if result { Some(AssocItemId::ConstId(*c)) } else { None }),
-        _ => None,
+            .map(|n| n == name)
+            .and_then(|result| if result { Some(AssocItemId::ConstId(c)) } else { None }),
+        AssocItemId::TypeAliasId(_) => None,
     })
 }
 
@@ -1094,13 +1101,13 @@ fn iterate_inherent_methods(
         None => return ControlFlow::Continue(()),
     };
 
-    let (module, block) = match visible_from_module {
+    let (module, mut block) = match visible_from_module {
         VisibleFromModule::Filter(module) => (Some(module), module.containing_block()),
         VisibleFromModule::IncludeBlock(block) => (None, Some(block)),
         VisibleFromModule::None => (None, None),
     };
 
-    if let Some(block_id) = block {
+    while let Some(block_id) = block {
         if let Some(impls) = db.inherent_impls_in_block(block_id) {
             impls_for_self_ty(
                 &impls,
@@ -1113,6 +1120,11 @@ fn iterate_inherent_methods(
                 callback,
             )?;
         }
+
+        block = db
+            .block_def_map(block_id)
+            .and_then(|map| map.parent())
+            .and_then(|module| module.containing_block());
     }
 
     for krate in def_crates {
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs
index 6c7a5329970..41c53701df6 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs
@@ -813,7 +813,7 @@ fn test() {
 fn method_resolution_trait_from_prelude() {
     check_types(
         r#"
-//- /main.rs crate:main deps:core
+//- /main.rs edition:2018 crate:main deps:core
 struct S;
 impl Clone for S {}
 
@@ -986,14 +986,13 @@ fn main() {
 }
 
 #[test]
-fn method_resolution_encountering_fn_type() {
+fn explicit_fn_once_call_fn_item() {
     check_types(
         r#"
-//- /main.rs
+//- minicore: fn
 fn foo() {}
-trait FnOnce { fn call(self); }
-fn test() { foo.call(); }
-          //^^^^^^^^^^ {unknown}
+fn test() { foo.call_once(); }
+          //^^^^^^^^^^^^^^^ ()
 "#,
     );
 }
@@ -1527,7 +1526,7 @@ fn f(x: U2) {
 fn skip_array_during_method_dispatch() {
     check_types(
         r#"
-//- /main2018.rs crate:main2018 deps:core
+//- /main2018.rs crate:main2018 deps:core edition:2018
 use core::IntoIterator;
 
 fn f() {
@@ -1725,14 +1724,13 @@ fn test() {
 
 #[test]
 fn receiver_adjustment_unsize_array() {
-    // FIXME not quite correct
     check(
         r#"
 //- minicore: slice
 fn test() {
     let a = [1, 2, 3];
     a.len();
-} //^ adjustments: Pointer(Unsize), Borrow(Ref(Not))
+} //^ adjustments: Borrow(Ref(Not)), Pointer(Unsize)
 "#,
     );
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
index 146145523b2..2e5787b701c 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs
@@ -3200,3 +3200,86 @@ fn func() {
     "#,
     );
 }
+
+// FIXME
+#[test]
+fn castable_to() {
+    check_infer(
+        r#"
+//- minicore: sized
+#[lang = "owned_box"]
+pub struct Box<T: ?Sized> {
+    inner: *mut T,
+}
+impl<T> Box<T> {
+    fn new(t: T) -> Self { loop {} }
+}
+
+fn func() {
+    let x = Box::new([]) as Box<[i32; 0]>;
+}
+"#,
+        expect![[r#"
+            99..100 't': T
+            113..124 '{ loop {} }': Box<T>
+            115..122 'loop {}': !
+            120..122 '{}': ()
+            138..184 '{     ...0]>; }': ()
+            148..149 'x': Box<[i32; 0]>
+            152..160 'Box::new': fn new<[{unknown}; 0]>([{unknown}; 0]) -> Box<[{unknown}; 0]>
+            152..164 'Box::new([])': Box<[{unknown}; 0]>
+            152..181 'Box::n...2; 0]>': Box<[i32; 0]>
+            161..163 '[]': [{unknown}; 0]
+        "#]],
+    );
+}
+
+#[test]
+fn castable_to1() {
+    check_infer(
+        r#"
+struct Ark<T>(T);
+impl<T> Ark<T> {
+    fn foo(&self) -> *const T {
+        &self.0
+    }
+}
+fn f<T>(t: Ark<T>) {
+    Ark::foo(&t) as *const ();
+}
+"#,
+        expect![[r#"
+            47..51 'self': &Ark<T>
+            65..88 '{     ...     }': *const T
+            75..82 '&self.0': &T
+            76..80 'self': &Ark<T>
+            76..82 'self.0': T
+            99..100 't': Ark<T>
+            110..144 '{     ... (); }': ()
+            116..124 'Ark::foo': fn foo<T>(&Ark<T>) -> *const T
+            116..128 'Ark::foo(&t)': *const T
+            116..141 'Ark::f...nst ()': *const ()
+            125..127 '&t': &Ark<T>
+            126..127 't': Ark<T>
+        "#]],
+    );
+}
+
+// FIXME
+#[test]
+fn castable_to2() {
+    check_infer(
+        r#"
+fn func() {
+    let x = &0u32 as *const _;
+}
+"#,
+        expect![[r#"
+            10..44 '{     ...t _; }': ()
+            20..21 'x': *const {unknown}
+            24..29 '&0u32': &u32
+            24..41 '&0u32 ...onst _': *const {unknown}
+            25..29 '0u32': u32
+        "#]],
+    );
+}
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
index d01fe063285..015085bde45 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs
@@ -163,98 +163,22 @@ fn test() {
 }
 
 #[test]
-fn infer_try() {
+fn infer_try_trait() {
     check_types(
         r#"
-//- /main.rs crate:main deps:core
+//- minicore: try, result
 fn test() {
     let r: Result<i32, u64> = Result::Ok(1);
     let v = r?;
     v;
 } //^ i32
 
-//- /core.rs crate:core
-pub mod ops {
-    pub trait Try {
-        type Ok;
-        type Error;
-    }
-}
-
-pub mod result {
-    pub enum Result<O, E> {
-        Ok(O),
-        Err(E)
-    }
-
-    impl<O, E> crate::ops::Try for Result<O, E> {
-        type Ok = O;
-        type Error = E;
-    }
-}
-
-pub mod prelude {
-    pub mod rust_2018 {
-        pub use crate::{result::*, ops::*};
-    }
-}
-"#,
-    );
-}
-
-#[test]
-fn infer_try_trait_v2() {
-    check_types(
-        r#"
-//- /main.rs crate:main deps:core
-fn test() {
-    let r: Result<i32, u64> = Result::Ok(1);
-    let v = r?;
-    v;
-} //^ i32
-
-//- /core.rs crate:core
-mod ops {
-    mod try_trait {
-        pub trait Try: FromResidual {
-            type Output;
-            type Residual;
-        }
-        pub trait FromResidual<R = <Self as Try>::Residual> {}
-    }
-
-    pub use self::try_trait::FromResidual;
-    pub use self::try_trait::Try;
+impl<O, E> core::ops::Try for Result<O, E> {
+    type Output = O;
+    type Error = Result<core::convert::Infallible, E>;
 }
 
-mod convert {
-    pub trait From<T> {}
-    impl<T> From<T> for T {}
-}
-
-pub mod result {
-    use crate::convert::From;
-    use crate::ops::{Try, FromResidual};
-
-    pub enum Infallible {}
-    pub enum Result<O, E> {
-        Ok(O),
-        Err(E)
-    }
-
-    impl<O, E> Try for Result<O, E> {
-        type Output = O;
-        type Error = Result<Infallible, E>;
-    }
-
-    impl<T, E, F: From<E>> FromResidual<Result<Infallible, E>> for Result<T, F> {}
-}
-
-pub mod prelude {
-    pub mod rust_2018 {
-        pub use crate::result::*;
-    }
-}
+impl<T, E, F: From<E>> core::ops::FromResidual<Result<core::convert::Infallible, E>> for Result<T, F> {}
 "#,
     );
 }
@@ -263,7 +187,8 @@ pub mod prelude {
 fn infer_for_loop() {
     check_types(
         r#"
-//- /main.rs crate:main deps:core,alloc
+//- minicore: iterator
+//- /main.rs crate:main deps:alloc
 #![no_std]
 use alloc::collections::Vec;
 
@@ -275,23 +200,7 @@ fn test() {
     } //^ &str
 }
 
-//- /core.rs crate:core
-pub mod iter {
-    pub trait IntoIterator {
-        type Item;
-        type IntoIter: Iterator<Item = Self::Item>;
-    }
-    pub trait Iterator {
-        type Item;
-    }
-}
-pub mod prelude {
-    pub mod rust_2018 {
-        pub use crate::iter::*;
-    }
-}
-
-//- /alloc.rs crate:alloc deps:core
+//- /alloc.rs crate:alloc
 #![no_std]
 pub mod collections {
     pub struct Vec<T> {}
@@ -1848,25 +1757,19 @@ fn test() {
 fn fn_trait() {
     check_infer_with_mismatches(
         r#"
-trait FnOnce<Args> {
-    type Output;
-
-    fn call_once(self, args: Args) -> <Self as FnOnce<Args>>::Output;
-}
+//- minicore: fn
 
 fn test<F: FnOnce(u32, u64) -> u128>(f: F) {
     f.call_once((1, 2));
 }"#,
         expect![[r#"
-            56..60 'self': Self
-            62..66 'args': Args
-            149..150 'f': F
-            155..183 '{     ...2)); }': ()
-            161..162 'f': F
-            161..180 'f.call...1, 2))': u128
-            173..179 '(1, 2)': (u32, u64)
-            174..175 '1': u32
-            177..178 '2': u64
+            38..39 'f': F
+            44..72 '{     ...2)); }': ()
+            50..51 'f': F
+            50..69 'f.call...1, 2))': u128
+            62..68 '(1, 2)': (u32, u64)
+            63..64 '1': u32
+            66..67 '2': u64
         "#]],
     );
 }
@@ -1875,12 +1778,7 @@ fn test<F: FnOnce(u32, u64) -> u128>(f: F) {
 fn fn_ptr_and_item() {
     check_infer_with_mismatches(
         r#"
-#[lang="fn_once"]
-trait FnOnce<Args> {
-    type Output;
-
-    fn call_once(self, args: Args) -> Self::Output;
-}
+//- minicore: fn
 
 trait Foo<T> {
     fn foo(&self) -> T;
@@ -1906,27 +1804,25 @@ fn test() {
     opt.map(f);
 }"#,
         expect![[r#"
-            74..78 'self': Self
-            80..84 'args': Args
-            139..143 'self': &Self
-            243..247 'self': &Bar<F>
-            260..271 '{ loop {} }': (A1, R)
-            262..269 'loop {}': !
-            267..269 '{}': ()
-            355..359 'self': Opt<T>
-            361..362 'f': F
-            377..388 '{ loop {} }': Opt<U>
-            379..386 'loop {}': !
-            384..386 '{}': ()
-            402..518 '{     ...(f); }': ()
-            412..415 'bar': Bar<fn(u8) -> u32>
-            441..444 'bar': Bar<fn(u8) -> u32>
-            441..450 'bar.foo()': (u8, u32)
-            461..464 'opt': Opt<u8>
-            483..484 'f': fn(u8) -> u32
-            505..508 'opt': Opt<u8>
-            505..515 'opt.map(f)': Opt<u32>
-            513..514 'f': fn(u8) -> u32
+            28..32 'self': &Self
+            132..136 'self': &Bar<F>
+            149..160 '{ loop {} }': (A1, R)
+            151..158 'loop {}': !
+            156..158 '{}': ()
+            244..248 'self': Opt<T>
+            250..251 'f': F
+            266..277 '{ loop {} }': Opt<U>
+            268..275 'loop {}': !
+            273..275 '{}': ()
+            291..407 '{     ...(f); }': ()
+            301..304 'bar': Bar<fn(u8) -> u32>
+            330..333 'bar': Bar<fn(u8) -> u32>
+            330..339 'bar.foo()': (u8, u32)
+            350..353 'opt': Opt<u8>
+            372..373 'f': fn(u8) -> u32
+            394..397 'opt': Opt<u8>
+            394..404 'opt.map(f)': Opt<u32>
+            402..403 'f': fn(u8) -> u32
         "#]],
     );
 }
@@ -2399,10 +2295,8 @@ fn unselected_projection_in_trait_env_no_cycle() {
     // this is not a cycle
     check_types(
         r#"
-//- /main.rs
-trait Index {
-    type Output;
-}
+//- minicore: index
+use core::ops::Index;
 
 type Key<S: UnificationStoreBase> = <S as UnificationStoreBase>::Key;
 
@@ -2999,40 +2893,17 @@ fn test() {
 fn integer_range_iterate() {
     check_types(
         r#"
-//- /main.rs crate:main deps:core
+//- minicore: range, iterator
+//- /main.rs crate:main
 fn test() {
     for x in 0..100 { x; }
 }                   //^ i32
 
-//- /core.rs crate:core
-pub mod ops {
-    pub struct Range<Idx> {
-        pub start: Idx,
-        pub end: Idx,
-    }
-}
-
-pub mod iter {
-    pub trait Iterator {
-        type Item;
-    }
-
-    pub trait IntoIterator {
-        type Item;
-        type IntoIter: Iterator<Item = Self::Item>;
-    }
-
-    impl<T> IntoIterator for T where T: Iterator {
-        type Item = <T as Iterator>::Item;
-        type IntoIter = Self;
-    }
-}
-
 trait Step {}
 impl Step for i32 {}
 impl Step for i64 {}
 
-impl<A: Step> iter::Iterator for ops::Range<A> {
+impl<A: Step> core::iter::Iterator for core::ops::Range<A> {
     type Item = A;
 }
 "#,
@@ -3507,14 +3378,9 @@ trait Request {
 fn bin_op_adt_with_rhs_primitive() {
     check_infer_with_mismatches(
         r#"
-#[lang = "add"]
-pub trait Add<Rhs = Self> {
-    type Output;
-    fn add(self, rhs: Rhs) -> Self::Output;
-}
-
+//- minicore: add
 struct Wrapper(u32);
-impl Add<u32> for Wrapper {
+impl core::ops::Add<u32> for Wrapper {
     type Output = Self;
     fn add(self, rhs: u32) -> Wrapper {
         Wrapper(rhs)
@@ -3527,30 +3393,107 @@ fn main(){
 
 }"#,
         expect![[r#"
-            72..76 'self': Self
-            78..81 'rhs': Rhs
-            192..196 'self': Wrapper
-            198..201 'rhs': u32
-            219..247 '{     ...     }': Wrapper
-            229..236 'Wrapper': Wrapper(u32) -> Wrapper
-            229..241 'Wrapper(rhs)': Wrapper
-            237..240 'rhs': u32
-            259..345 '{     ...um;  }': ()
-            269..276 'wrapped': Wrapper
-            279..286 'Wrapper': Wrapper(u32) -> Wrapper
-            279..290 'Wrapper(10)': Wrapper
-            287..289 '10': u32
-            300..303 'num': u32
-            311..312 '2': u32
-            322..325 'res': Wrapper
-            328..335 'wrapped': Wrapper
-            328..341 'wrapped + num': Wrapper
-            338..341 'num': u32
+            95..99 'self': Wrapper
+            101..104 'rhs': u32
+            122..150 '{     ...     }': Wrapper
+            132..139 'Wrapper': Wrapper(u32) -> Wrapper
+            132..144 'Wrapper(rhs)': Wrapper
+            140..143 'rhs': u32
+            162..248 '{     ...um;  }': ()
+            172..179 'wrapped': Wrapper
+            182..189 'Wrapper': Wrapper(u32) -> Wrapper
+            182..193 'Wrapper(10)': Wrapper
+            190..192 '10': u32
+            203..206 'num': u32
+            214..215 '2': u32
+            225..228 'res': Wrapper
+            231..238 'wrapped': Wrapper
+            231..244 'wrapped + num': Wrapper
+            241..244 'num': u32
         "#]],
     )
 }
 
 #[test]
+fn builtin_binop_expectation_works_on_single_reference() {
+    check_types(
+        r#"
+//- minicore: add
+use core::ops::Add;
+impl Add<i32> for i32 { type Output = i32 }
+impl Add<&i32> for i32 { type Output = i32 }
+impl Add<u32> for u32 { type Output = u32 }
+impl Add<&u32> for u32 { type Output = u32 }
+
+struct V<T>;
+impl<T> V<T> {
+    fn default() -> Self { loop {} }
+    fn get(&self, _: &T) -> &T { loop {} }
+}
+
+fn take_u32(_: u32) {}
+fn minimized() {
+    let v = V::default();
+    let p = v.get(&0);
+      //^ &u32
+    take_u32(42 + p);
+}
+"#,
+    );
+}
+
+#[test]
+fn no_builtin_binop_expectation_for_general_ty_var() {
+    // FIXME: Ideally type mismatch should be reported on `take_u32(42 - p)`.
+    check_types(
+        r#"
+//- minicore: add
+use core::ops::Add;
+impl Add<i32> for i32 { type Output = i32; }
+impl Add<&i32> for i32 { type Output = i32; }
+// This is needed to prevent chalk from giving unique solution to `i32: Add<&?0>` after applying
+// fallback to integer type variable for `42`.
+impl Add<&()> for i32 { type Output = (); }
+
+struct V<T>;
+impl<T> V<T> {
+    fn default() -> Self { loop {} }
+    fn get(&self) -> &T { loop {} }
+}
+
+fn take_u32(_: u32) {}
+fn minimized() {
+    let v = V::default();
+    let p = v.get();
+      //^ &{unknown}
+    take_u32(42 + p);
+}
+"#,
+    );
+}
+
+#[test]
+fn no_builtin_binop_expectation_for_non_builtin_types() {
+    check_no_mismatches(
+        r#"
+//- minicore: default, eq
+struct S;
+impl Default for S { fn default() -> Self { S } }
+impl Default for i32 { fn default() -> Self { 0 } }
+impl PartialEq<S> for i32 { fn eq(&self, _: &S) -> bool { true } }
+impl PartialEq<i32> for i32 { fn eq(&self, _: &S) -> bool { true } }
+
+fn take_s(_: S) {}
+fn test() {
+    let s = Default::default();
+    let _eq = 0 == s;
+    take_s(s);
+}
+"#,
+    )
+}
+
+#[test]
 fn array_length() {
     check_infer(
         r#"
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
index 778a6b82047..3ab85c68f5b 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs
@@ -7,9 +7,11 @@ use chalk_recursive::Cache;
 use chalk_solve::{logging_db::LoggingRustIrDatabase, Solver};
 
 use base_db::CrateId;
-use hir_def::{lang_item::LangItemTarget, TraitId};
+use hir_def::{
+    lang_item::{LangItem, LangItemTarget},
+    TraitId,
+};
 use stdx::panic_context;
-use syntax::SmolStr;
 
 use crate::{
     db::HirDatabase, infer::unify::InferenceTable, AliasEq, AliasTy, Canonical, DomainGoal, Goal,
@@ -177,18 +179,18 @@ pub enum FnTrait {
 }
 
 impl FnTrait {
-    const fn lang_item_name(self) -> &'static str {
+    const fn lang_item(self) -> LangItem {
         match self {
-            FnTrait::FnOnce => "fn_once",
-            FnTrait::FnMut => "fn_mut",
-            FnTrait::Fn => "fn",
+            FnTrait::FnOnce => LangItem::FnOnce,
+            FnTrait::FnMut => LangItem::FnMut,
+            FnTrait::Fn => LangItem::Fn,
         }
     }
 
     pub fn get_id(&self, db: &dyn HirDatabase, krate: CrateId) -> Option<TraitId> {
-        let target = db.lang_item(krate, SmolStr::new_inline(self.lang_item_name()))?;
+        let target = db.lang_item(krate, self.lang_item())?;
         match target {
-            LangItemTarget::TraitId(t) => Some(t),
+            LangItemTarget::Trait(t) => Some(t),
             _ => None,
         }
     }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
index 9893566bd54..70d2d5efa6c 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
@@ -11,39 +11,100 @@ use hir_def::{
         GenericParams, TypeOrConstParamData, TypeParamProvenance, WherePredicate,
         WherePredicateTypeTarget,
     },
-    intern::Interned,
+    lang_item::LangItem,
     resolver::{HasResolver, TypeNs},
     type_ref::{TraitBoundModifier, TypeRef},
     ConstParamId, FunctionId, GenericDefId, ItemContainerId, Lookup, TraitId, TypeAliasId,
     TypeOrConstParamId, TypeParamId,
 };
 use hir_expand::name::Name;
+use intern::Interned;
 use itertools::Either;
 use rustc_hash::FxHashSet;
 use smallvec::{smallvec, SmallVec};
-use syntax::SmolStr;
 
 use crate::{
     db::HirDatabase, ChalkTraitId, Interner, Substitution, TraitRef, TraitRefExt, WhereClause,
 };
 
-pub(crate) fn fn_traits(db: &dyn DefDatabase, krate: CrateId) -> impl Iterator<Item = TraitId> {
-    [
-        db.lang_item(krate, SmolStr::new_inline("fn")),
-        db.lang_item(krate, SmolStr::new_inline("fn_mut")),
-        db.lang_item(krate, SmolStr::new_inline("fn_once")),
-    ]
-    .into_iter()
-    .flatten()
-    .flat_map(|it| it.as_trait())
+pub(crate) fn fn_traits(
+    db: &dyn DefDatabase,
+    krate: CrateId,
+) -> impl Iterator<Item = TraitId> + '_ {
+    [LangItem::Fn, LangItem::FnMut, LangItem::FnOnce]
+        .into_iter()
+        .filter_map(move |lang| db.lang_item(krate, lang))
+        .flat_map(|it| it.as_trait())
 }
 
-fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> SmallVec<[TraitId; 4]> {
+/// Returns an iterator over the whole super trait hierarchy (including the
+/// trait itself).
+pub fn all_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> SmallVec<[TraitId; 4]> {
+    // we need to take care a bit here to avoid infinite loops in case of cycles
+    // (i.e. if we have `trait A: B; trait B: A;`)
+
+    let mut result = smallvec![trait_];
+    let mut i = 0;
+    while let Some(&t) = result.get(i) {
+        // yeah this is quadratic, but trait hierarchies should be flat
+        // enough that this doesn't matter
+        direct_super_traits(db, t, |tt| {
+            if !result.contains(&tt) {
+                result.push(tt);
+            }
+        });
+        i += 1;
+    }
+    result
+}
+
+/// Given a trait ref (`Self: Trait`), builds all the implied trait refs for
+/// super traits. The original trait ref will be included. So the difference to
+/// `all_super_traits` is that we keep track of type parameters; for example if
+/// we have `Self: Trait<u32, i32>` and `Trait<T, U>: OtherTrait<U>` we'll get
+/// `Self: OtherTrait<i32>`.
+pub(super) fn all_super_trait_refs<T>(
+    db: &dyn HirDatabase,
+    trait_ref: TraitRef,
+    cb: impl FnMut(TraitRef) -> Option<T>,
+) -> Option<T> {
+    let seen = iter::once(trait_ref.trait_id).collect();
+    let mut stack = Vec::new();
+    stack.push(trait_ref);
+    SuperTraits { db, seen, stack }.find_map(cb)
+}
+
+struct SuperTraits<'a> {
+    db: &'a dyn HirDatabase,
+    stack: Vec<TraitRef>,
+    seen: FxHashSet<ChalkTraitId>,
+}
+
+impl<'a> SuperTraits<'a> {
+    fn elaborate(&mut self, trait_ref: &TraitRef) {
+        direct_super_trait_refs(self.db, trait_ref, |trait_ref| {
+            if !self.seen.contains(&trait_ref.trait_id) {
+                self.stack.push(trait_ref);
+            }
+        });
+    }
+}
+
+impl<'a> Iterator for SuperTraits<'a> {
+    type Item = TraitRef;
+
+    fn next(&mut self) -> Option<Self::Item> {
+        if let Some(next) = self.stack.pop() {
+            self.elaborate(&next);
+            Some(next)
+        } else {
+            None
+        }
+    }
+}
+
+fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId, cb: impl FnMut(TraitId)) {
     let resolver = trait_.resolver(db);
-    // returning the iterator directly doesn't easily work because of
-    // lifetime problems, but since there usually shouldn't be more than a
-    // few direct traits this should be fine (we could even use some kind of
-    // SmallVec if performance is a concern)
     let generic_params = db.generic_params(trait_.into());
     let trait_self = generic_params.find_trait_self_param();
     generic_params
@@ -73,18 +134,14 @@ fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> SmallVec<[Trait
             Some(TypeNs::TraitId(t)) => Some(t),
             _ => None,
         })
-        .collect()
+        .for_each(cb);
 }
 
-fn direct_super_trait_refs(db: &dyn HirDatabase, trait_ref: &TraitRef) -> Vec<TraitRef> {
-    // returning the iterator directly doesn't easily work because of
-    // lifetime problems, but since there usually shouldn't be more than a
-    // few direct traits this should be fine (we could even use some kind of
-    // SmallVec if performance is a concern)
+fn direct_super_trait_refs(db: &dyn HirDatabase, trait_ref: &TraitRef, cb: impl FnMut(TraitRef)) {
     let generic_params = db.generic_params(trait_ref.hir_trait_id().into());
     let trait_self = match generic_params.find_trait_self_param() {
         Some(p) => TypeOrConstParamId { parent: trait_ref.hir_trait_id().into(), local_id: p },
-        None => return Vec::new(),
+        None => return,
     };
     db.generic_predicates_for_param(trait_self.parent, trait_self, None)
         .iter()
@@ -100,64 +157,7 @@ fn direct_super_trait_refs(db: &dyn HirDatabase, trait_ref: &TraitRef) -> Vec<Tr
             })
         })
         .map(|pred| pred.substitute(Interner, &trait_ref.substitution))
-        .collect()
-}
-
-/// Returns an iterator over the whole super trait hierarchy (including the
-/// trait itself).
-pub fn all_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> SmallVec<[TraitId; 4]> {
-    // we need to take care a bit here to avoid infinite loops in case of cycles
-    // (i.e. if we have `trait A: B; trait B: A;`)
-
-    let mut result = smallvec![trait_];
-    let mut i = 0;
-    while let Some(&t) = result.get(i) {
-        // yeah this is quadratic, but trait hierarchies should be flat
-        // enough that this doesn't matter
-        for tt in direct_super_traits(db, t) {
-            if !result.contains(&tt) {
-                result.push(tt);
-            }
-        }
-        i += 1;
-    }
-    result
-}
-
-/// Given a trait ref (`Self: Trait`), builds all the implied trait refs for
-/// super traits. The original trait ref will be included. So the difference to
-/// `all_super_traits` is that we keep track of type parameters; for example if
-/// we have `Self: Trait<u32, i32>` and `Trait<T, U>: OtherTrait<U>` we'll get
-/// `Self: OtherTrait<i32>`.
-pub(super) fn all_super_trait_refs(db: &dyn HirDatabase, trait_ref: TraitRef) -> SuperTraits<'_> {
-    SuperTraits { db, seen: iter::once(trait_ref.trait_id).collect(), stack: vec![trait_ref] }
-}
-
-pub(super) struct SuperTraits<'a> {
-    db: &'a dyn HirDatabase,
-    stack: Vec<TraitRef>,
-    seen: FxHashSet<ChalkTraitId>,
-}
-
-impl<'a> SuperTraits<'a> {
-    fn elaborate(&mut self, trait_ref: &TraitRef) {
-        let mut trait_refs = direct_super_trait_refs(self.db, trait_ref);
-        trait_refs.retain(|tr| !self.seen.contains(&tr.trait_id));
-        self.stack.extend(trait_refs);
-    }
-}
-
-impl<'a> Iterator for SuperTraits<'a> {
-    type Item = TraitRef;
-
-    fn next(&mut self) -> Option<Self::Item> {
-        if let Some(next) = self.stack.pop() {
-            self.elaborate(&next);
-            Some(next)
-        } else {
-            None
-        }
-    }
+        .for_each(cb);
 }
 
 pub(super) fn associated_type_by_name_including_super_traits(
@@ -165,7 +165,7 @@ pub(super) fn associated_type_by_name_including_super_traits(
     trait_ref: TraitRef,
     name: &Name,
 ) -> Option<(TraitRef, TypeAliasId)> {
-    all_super_trait_refs(db, trait_ref).find_map(|t| {
+    all_super_trait_refs(db, trait_ref, |t| {
         let assoc_type = db.trait_data(t.hir_trait_id()).associated_type_by_name(name)?;
         Some((t, assoc_type))
     })
@@ -238,15 +238,18 @@ impl Generics {
 
     /// (parent total, self param, type param list, const param list, impl trait)
     pub(crate) fn provenance_split(&self) -> (usize, usize, usize, usize, usize) {
-        let ty_iter = || self.params.iter().filter_map(|x| x.1.type_param());
-
-        let self_params =
-            ty_iter().filter(|p| p.provenance == TypeParamProvenance::TraitSelf).count();
-        let type_params =
-            ty_iter().filter(|p| p.provenance == TypeParamProvenance::TypeParamList).count();
-        let impl_trait_params =
-            ty_iter().filter(|p| p.provenance == TypeParamProvenance::ArgumentImplTrait).count();
-        let const_params = self.params.iter().filter_map(|x| x.1.const_param()).count();
+        let mut self_params = 0;
+        let mut type_params = 0;
+        let mut impl_trait_params = 0;
+        let mut const_params = 0;
+        self.params.iter().for_each(|(_, data)| match data {
+            TypeOrConstParamData::TypeParamData(p) => match p.provenance {
+                TypeParamProvenance::TypeParamList => type_params += 1,
+                TypeParamProvenance::TraitSelf => self_params += 1,
+                TypeParamProvenance::ArgumentImplTrait => impl_trait_params += 1,
+            },
+            TypeOrConstParamData::ConstParamData(_) => const_params += 1,
+        });
 
         let parent_len = self.parent_generics().map_or(0, Generics::len);
         (parent_len, self_params, type_params, const_params, impl_trait_params)
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/walk.rs b/src/tools/rust-analyzer/crates/hir-ty/src/walk.rs
deleted file mode 100644
index c476894552e..00000000000
--- a/src/tools/rust-analyzer/crates/hir-ty/src/walk.rs
+++ /dev/null
@@ -1,147 +0,0 @@
-//! The `TypeWalk` trait (probably to be replaced by Chalk's `Fold` and
-//! `Visit`).
-
-use chalk_ir::interner::HasInterner;
-
-use crate::{
-    AliasEq, AliasTy, Binders, CallableSig, FnSubst, GenericArg, GenericArgData, Interner,
-    OpaqueTy, ProjectionTy, Substitution, TraitRef, Ty, TyKind, WhereClause,
-};
-
-/// This allows walking structures that contain types to do something with those
-/// types, similar to Chalk's `Fold` trait.
-pub trait TypeWalk {
-    fn walk(&self, f: &mut impl FnMut(&Ty));
-}
-
-impl TypeWalk for Ty {
-    fn walk(&self, f: &mut impl FnMut(&Ty)) {
-        match self.kind(Interner) {
-            TyKind::Alias(AliasTy::Projection(p_ty)) => {
-                for t in p_ty.substitution.iter(Interner) {
-                    t.walk(f);
-                }
-            }
-            TyKind::Alias(AliasTy::Opaque(o_ty)) => {
-                for t in o_ty.substitution.iter(Interner) {
-                    t.walk(f);
-                }
-            }
-            TyKind::Dyn(dyn_ty) => {
-                for p in dyn_ty.bounds.skip_binders().interned().iter() {
-                    p.walk(f);
-                }
-            }
-            TyKind::Slice(ty)
-            | TyKind::Array(ty, _)
-            | TyKind::Ref(_, _, ty)
-            | TyKind::Raw(_, ty) => {
-                ty.walk(f);
-            }
-            TyKind::Function(fn_pointer) => {
-                fn_pointer.substitution.0.walk(f);
-            }
-            TyKind::Adt(_, substs)
-            | TyKind::FnDef(_, substs)
-            | TyKind::Tuple(_, substs)
-            | TyKind::OpaqueType(_, substs)
-            | TyKind::AssociatedType(_, substs)
-            | TyKind::Closure(.., substs) => {
-                substs.walk(f);
-            }
-            _ => {}
-        }
-        f(self);
-    }
-}
-
-impl<T: TypeWalk> TypeWalk for Vec<T> {
-    fn walk(&self, f: &mut impl FnMut(&Ty)) {
-        for t in self {
-            t.walk(f);
-        }
-    }
-}
-
-impl TypeWalk for OpaqueTy {
-    fn walk(&self, f: &mut impl FnMut(&Ty)) {
-        self.substitution.walk(f);
-    }
-}
-
-impl TypeWalk for ProjectionTy {
-    fn walk(&self, f: &mut impl FnMut(&Ty)) {
-        self.substitution.walk(f);
-    }
-}
-
-impl TypeWalk for AliasTy {
-    fn walk(&self, f: &mut impl FnMut(&Ty)) {
-        match self {
-            AliasTy::Projection(it) => it.walk(f),
-            AliasTy::Opaque(it) => it.walk(f),
-        }
-    }
-}
-
-impl TypeWalk for GenericArg {
-    fn walk(&self, f: &mut impl FnMut(&Ty)) {
-        if let GenericArgData::Ty(ty) = &self.interned() {
-            ty.walk(f);
-        }
-    }
-}
-
-impl TypeWalk for Substitution {
-    fn walk(&self, f: &mut impl FnMut(&Ty)) {
-        for t in self.iter(Interner) {
-            t.walk(f);
-        }
-    }
-}
-
-impl<T: TypeWalk + HasInterner<Interner = Interner>> TypeWalk for Binders<T> {
-    fn walk(&self, f: &mut impl FnMut(&Ty)) {
-        self.skip_binders().walk(f);
-    }
-}
-
-impl TypeWalk for TraitRef {
-    fn walk(&self, f: &mut impl FnMut(&Ty)) {
-        self.substitution.walk(f);
-    }
-}
-
-impl TypeWalk for WhereClause {
-    fn walk(&self, f: &mut impl FnMut(&Ty)) {
-        match self {
-            WhereClause::Implemented(trait_ref) => trait_ref.walk(f),
-            WhereClause::AliasEq(alias_eq) => alias_eq.walk(f),
-            _ => {}
-        }
-    }
-}
-
-impl TypeWalk for CallableSig {
-    fn walk(&self, f: &mut impl FnMut(&Ty)) {
-        for t in self.params_and_return.iter() {
-            t.walk(f);
-        }
-    }
-}
-
-impl TypeWalk for AliasEq {
-    fn walk(&self, f: &mut impl FnMut(&Ty)) {
-        self.ty.walk(f);
-        match &self.alias {
-            AliasTy::Projection(projection_ty) => projection_ty.walk(f),
-            AliasTy::Opaque(opaque) => opaque.walk(f),
-        }
-    }
-}
-
-impl TypeWalk for FnSubst<Interner> {
-    fn walk(&self, f: &mut impl FnMut(&Ty)) {
-        self.0.walk(f)
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/hir/Cargo.toml b/src/tools/rust-analyzer/crates/hir/Cargo.toml
index f780e3f53c8..32cde8a7732 100644
--- a/src/tools/rust-analyzer/crates/hir/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/hir/Cargo.toml
@@ -2,9 +2,11 @@
 name = "hir"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
@@ -15,14 +17,15 @@ either = "1.7.0"
 arrayvec = "0.7.2"
 itertools = "0.10.5"
 smallvec = "1.10.0"
-once_cell = "1.15.0"
+once_cell = "1.17.0"
 
-stdx = { path = "../stdx", version = "0.0.0" }
-syntax = { path = "../syntax", version = "0.0.0" }
-base-db = { path = "../base-db", version = "0.0.0" }
-profile = { path = "../profile", version = "0.0.0" }
-hir-expand = { path = "../hir-expand", version = "0.0.0" }
-hir-def = { path = "../hir-def", version = "0.0.0" }
-hir-ty = { path = "../hir-ty", version = "0.0.0" }
-tt = { path = "../tt", version = "0.0.0" }
-cfg = { path = "../cfg", version = "0.0.0" }
+# local deps
+base-db.workspace = true
+cfg.workspace = true
+hir-def.workspace = true
+hir-expand.workspace = true
+hir-ty.workspace = true
+profile.workspace = true
+stdx.workspace = true
+syntax.workspace = true
+tt.workspace = true
diff --git a/src/tools/rust-analyzer/crates/hir/src/display.rs b/src/tools/rust-analyzer/crates/hir/src/display.rs
index 5a4b2f33449..0d19420127f 100644
--- a/src/tools/rust-analyzer/crates/hir/src/display.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/display.rs
@@ -4,6 +4,7 @@ use hir_def::{
     generics::{
         TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget,
     },
+    lang_item::LangItem,
     type_ref::{TypeBound, TypeRef},
     AdtId, GenericDefId,
 };
@@ -14,7 +15,6 @@ use hir_ty::{
     },
     Interner, TraitRefExt, WhereClause,
 };
-use syntax::SmolStr;
 
 use crate::{
     Adt, Const, ConstParam, Enum, Field, Function, GenericParam, HasCrate, HasVisibility,
@@ -261,8 +261,7 @@ impl HirDisplay for TypeParam {
             bounds.iter().cloned().map(|b| b.substitute(Interner, &substs)).collect();
         let krate = self.id.parent().krate(f.db).id;
         let sized_trait =
-            f.db.lang_item(krate, SmolStr::new_inline("sized"))
-                .and_then(|lang_item| lang_item.as_trait());
+            f.db.lang_item(krate, LangItem::Sized).and_then(|lang_item| lang_item.as_trait());
         let has_only_sized_bound = predicates.iter().all(move |pred| match pred.skip_binders() {
             WhereClause::Implemented(it) => Some(it.hir_trait_id()) == sized_trait,
             _ => false,
@@ -270,7 +269,7 @@ impl HirDisplay for TypeParam {
         let has_only_not_sized_bound = predicates.is_empty();
         if !has_only_sized_bound || has_only_not_sized_bound {
             let default_sized = SizedByDefault::Sized { anchor: krate };
-            write_bounds_like_dyn_trait_with_prefix(":", &predicates, default_sized, f)?;
+            write_bounds_like_dyn_trait_with_prefix(f, ":", &predicates, default_sized)?;
         }
         Ok(())
     }
diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs
index 08fd4453dfc..2cb4ed2c335 100644
--- a/src/tools/rust-analyzer/crates/hir/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs
@@ -44,12 +44,13 @@ use hir_def::{
     expr::{BindingAnnotation, ExprOrPatId, LabelId, Pat, PatId},
     generics::{TypeOrConstParamData, TypeParamProvenance},
     item_tree::ItemTreeNode,
-    lang_item::LangItemTarget,
+    lang_item::{LangItem, LangItemTarget},
     layout::{Layout, LayoutError, ReprOptions},
     nameres::{self, diagnostics::DefDiagnostic},
     per_ns::PerNs,
     resolver::{HasResolver, Resolver},
     src::HasSource as _,
+    type_ref::ConstScalar,
     AdtId, AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, DefWithBodyId, EnumId,
     EnumVariantId, FunctionId, GenericDefId, HasModule, ImplId, ItemContainerId, LifetimeParamId,
     LocalEnumVariantId, LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId,
@@ -65,8 +66,9 @@ use hir_ty::{
     primitive::UintTy,
     traits::FnTrait,
     AliasTy, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId,
-    GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar, Substitution,
-    TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind, WhereClause,
+    ConcreteConst, ConstValue, GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar,
+    Substitution, TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind,
+    WhereClause,
 };
 use itertools::Itertools;
 use nameres::diagnostics::DefDiagnosticKind;
@@ -107,7 +109,7 @@ pub use {
     cfg::{CfgAtom, CfgExpr, CfgOptions},
     hir_def::{
         adt::StructKind,
-        attr::{Attr, Attrs, AttrsWithOwner, Documentation},
+        attr::{Attrs, AttrsWithOwner, Documentation},
         builtin_attr::AttributeTemplate,
         find_path::PrefixKind,
         import_map,
@@ -122,11 +124,12 @@ pub use {
         ModuleDefId,
     },
     hir_expand::{
+        attrs::Attr,
         name::{known, Name},
         ExpandResult, HirFileId, InFile, MacroFile, Origin,
     },
     hir_ty::{
-        display::{HirDisplay, HirWrite},
+        display::{HirDisplay, HirDisplayError, HirWrite},
         PointerCast, Safety,
     },
 };
@@ -471,8 +474,8 @@ impl Module {
         let def_map = self.id.def_map(db.upcast());
         let children = def_map[self.id.local_id]
             .children
-            .iter()
-            .map(|(_, module_id)| Module { id: def_map.module_id(*module_id) })
+            .values()
+            .map(|module_id| Module { id: def_map.module_id(*module_id) })
             .collect::<Vec<_>>();
         children.into_iter()
     }
@@ -784,7 +787,7 @@ fn precise_macro_call_location(
             let token = (|| {
                 let derive_attr = node
                     .doc_comments_and_attrs()
-                    .nth(*derive_attr_index as usize)
+                    .nth(derive_attr_index.ast_index())
                     .and_then(Either::left)?;
                 let token_tree = derive_attr.meta()?.token_tree()?;
                 let group_by = token_tree
@@ -812,9 +815,11 @@ fn precise_macro_call_location(
             let node = ast_id.to_node(db.upcast());
             let attr = node
                 .doc_comments_and_attrs()
-                .nth((*invoc_attr_index) as usize)
+                .nth(invoc_attr_index.ast_index())
                 .and_then(Either::left)
-                .unwrap_or_else(|| panic!("cannot find attribute #{invoc_attr_index}"));
+                .unwrap_or_else(|| {
+                    panic!("cannot find attribute #{}", invoc_attr_index.ast_index())
+                });
 
             (
                 ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))),
@@ -920,7 +925,7 @@ impl Struct {
     }
 
     pub fn repr(self, db: &dyn HirDatabase) -> Option<ReprOptions> {
-        db.struct_data(self.id).repr.clone()
+        db.struct_data(self.id).repr
     }
 
     pub fn kind(self, db: &dyn HirDatabase) -> StructKind {
@@ -1831,7 +1836,7 @@ pub struct Trait {
 
 impl Trait {
     pub fn lang(db: &dyn HirDatabase, krate: Crate, name: &Name) -> Option<Trait> {
-        db.lang_item(krate.into(), name.to_smol_str())
+        db.lang_item(krate.into(), LangItem::from_name(name)?)
             .and_then(LangItemTarget::as_trait)
             .map(Into::into)
     }
@@ -2126,7 +2131,7 @@ pub enum AssocItem {
     Const(Const),
     TypeAlias(TypeAlias),
 }
-#[derive(Debug)]
+#[derive(Debug, Clone)]
 pub enum AssocItemContainer {
     Trait(Trait),
     Impl(Impl),
@@ -2160,6 +2165,16 @@ impl AsAssocItem for ModuleDef {
         }
     }
 }
+impl AsAssocItem for DefWithBody {
+    fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem> {
+        match self {
+            DefWithBody::Function(it) => it.as_assoc_item(db),
+            DefWithBody::Const(it) => it.as_assoc_item(db),
+            DefWithBody::Static(_) | DefWithBody::Variant(_) => None,
+        }
+    }
+}
+
 fn as_assoc_item<ID, DEF, CTOR, AST>(db: &dyn HirDatabase, ctor: CTOR, id: ID) -> Option<AssocItem>
 where
     ID: Lookup<Data = AssocItemLoc<AST>>,
@@ -2406,7 +2421,7 @@ impl Local {
 #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
 pub struct DeriveHelper {
     pub(crate) derive: MacroId,
-    pub(crate) idx: usize,
+    pub(crate) idx: u32,
 }
 
 impl DeriveHelper {
@@ -2416,15 +2431,18 @@ impl DeriveHelper {
 
     pub fn name(&self, db: &dyn HirDatabase) -> Name {
         match self.derive {
-            MacroId::Macro2Id(it) => {
-                db.macro2_data(it).helpers.as_deref().and_then(|it| it.get(self.idx)).cloned()
-            }
+            MacroId::Macro2Id(it) => db
+                .macro2_data(it)
+                .helpers
+                .as_deref()
+                .and_then(|it| it.get(self.idx as usize))
+                .cloned(),
             MacroId::MacroRulesId(_) => None,
             MacroId::ProcMacroId(proc_macro) => db
                 .proc_macro_data(proc_macro)
                 .helpers
                 .as_deref()
-                .and_then(|it| it.get(self.idx))
+                .and_then(|it| it.get(self.idx as usize))
                 .cloned(),
         }
         .unwrap_or_else(|| Name::missing())
@@ -2435,7 +2453,7 @@ impl DeriveHelper {
 #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
 pub struct BuiltinAttr {
     krate: Option<CrateId>,
-    idx: usize,
+    idx: u32,
 }
 
 impl BuiltinAttr {
@@ -2444,7 +2462,8 @@ impl BuiltinAttr {
         if let builtin @ Some(_) = Self::builtin(name) {
             return builtin;
         }
-        let idx = db.crate_def_map(krate.id).registered_attrs().iter().position(|it| it == name)?;
+        let idx =
+            db.crate_def_map(krate.id).registered_attrs().iter().position(|it| it == name)? as u32;
         Some(BuiltinAttr { krate: Some(krate.id), idx })
     }
 
@@ -2452,21 +2471,21 @@ impl BuiltinAttr {
         hir_def::builtin_attr::INERT_ATTRIBUTES
             .iter()
             .position(|tool| tool.name == name)
-            .map(|idx| BuiltinAttr { krate: None, idx })
+            .map(|idx| BuiltinAttr { krate: None, idx: idx as u32 })
     }
 
     pub fn name(&self, db: &dyn HirDatabase) -> SmolStr {
         // FIXME: Return a `Name` here
         match self.krate {
-            Some(krate) => db.crate_def_map(krate).registered_attrs()[self.idx].clone(),
-            None => SmolStr::new(hir_def::builtin_attr::INERT_ATTRIBUTES[self.idx].name),
+            Some(krate) => db.crate_def_map(krate).registered_attrs()[self.idx as usize].clone(),
+            None => SmolStr::new(hir_def::builtin_attr::INERT_ATTRIBUTES[self.idx as usize].name),
         }
     }
 
     pub fn template(&self, _: &dyn HirDatabase) -> Option<AttributeTemplate> {
         match self.krate {
             Some(_) => None,
-            None => Some(hir_def::builtin_attr::INERT_ATTRIBUTES[self.idx].template),
+            None => Some(hir_def::builtin_attr::INERT_ATTRIBUTES[self.idx as usize].template),
         }
     }
 }
@@ -2474,7 +2493,7 @@ impl BuiltinAttr {
 #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
 pub struct ToolModule {
     krate: Option<CrateId>,
-    idx: usize,
+    idx: u32,
 }
 
 impl ToolModule {
@@ -2483,7 +2502,8 @@ impl ToolModule {
         if let builtin @ Some(_) = Self::builtin(name) {
             return builtin;
         }
-        let idx = db.crate_def_map(krate.id).registered_tools().iter().position(|it| it == name)?;
+        let idx =
+            db.crate_def_map(krate.id).registered_tools().iter().position(|it| it == name)? as u32;
         Some(ToolModule { krate: Some(krate.id), idx })
     }
 
@@ -2491,14 +2511,14 @@ impl ToolModule {
         hir_def::builtin_attr::TOOL_MODULES
             .iter()
             .position(|&tool| tool == name)
-            .map(|idx| ToolModule { krate: None, idx })
+            .map(|idx| ToolModule { krate: None, idx: idx as u32 })
     }
 
     pub fn name(&self, db: &dyn HirDatabase) -> SmolStr {
         // FIXME: Return a `Name` here
         match self.krate {
-            Some(krate) => db.crate_def_map(krate).registered_tools()[self.idx].clone(),
-            None => SmolStr::new(hir_def::builtin_attr::TOOL_MODULES[self.idx]),
+            Some(krate) => db.crate_def_map(krate).registered_tools()[self.idx as usize].clone(),
+            None => SmolStr::new(hir_def::builtin_attr::TOOL_MODULES[self.idx as usize]),
         }
     }
 }
@@ -2555,6 +2575,14 @@ impl GenericParam {
             GenericParam::LifetimeParam(it) => it.name(db),
         }
     }
+
+    pub fn parent(self) -> GenericDef {
+        match self {
+            GenericParam::TypeParam(it) => it.id.parent().into(),
+            GenericParam::ConstParam(it) => it.id.parent().into(),
+            GenericParam::LifetimeParam(it) => it.id.parent.into(),
+        }
+    }
 }
 
 #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
@@ -2788,14 +2816,19 @@ impl Impl {
         all
     }
 
-    // FIXME: the return type is wrong. This should be a hir version of
-    // `TraitRef` (to account for parameters and qualifiers)
     pub fn trait_(self, db: &dyn HirDatabase) -> Option<Trait> {
-        let trait_ref = db.impl_trait(self.id)?.skip_binders().clone();
-        let id = hir_ty::from_chalk_trait_id(trait_ref.trait_id);
+        let trait_ref = db.impl_trait(self.id)?;
+        let id = trait_ref.skip_binders().hir_trait_id();
         Some(Trait { id })
     }
 
+    pub fn trait_ref(self, db: &dyn HirDatabase) -> Option<TraitRef> {
+        let substs = TyBuilder::placeholder_subst(db, self.id);
+        let trait_ref = db.impl_trait(self.id)?.substitute(Interner, &substs);
+        let resolver = self.id.resolver(db.upcast());
+        Some(TraitRef::new_with_resolver(db, &resolver, trait_ref))
+    }
+
     pub fn self_ty(self, db: &dyn HirDatabase) -> Type {
         let resolver = self.id.resolver(db.upcast());
         let substs = TyBuilder::placeholder_subst(db, self.id);
@@ -2821,6 +2854,48 @@ impl Impl {
     }
 }
 
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct TraitRef {
+    env: Arc<TraitEnvironment>,
+    trait_ref: hir_ty::TraitRef,
+}
+
+impl TraitRef {
+    pub(crate) fn new_with_resolver(
+        db: &dyn HirDatabase,
+        resolver: &Resolver,
+        trait_ref: hir_ty::TraitRef,
+    ) -> TraitRef {
+        let env = resolver.generic_def().map_or_else(
+            || Arc::new(TraitEnvironment::empty(resolver.krate())),
+            |d| db.trait_environment(d),
+        );
+        TraitRef { env, trait_ref }
+    }
+
+    pub fn trait_(&self) -> Trait {
+        let id = self.trait_ref.hir_trait_id();
+        Trait { id }
+    }
+
+    pub fn self_ty(&self) -> Type {
+        let ty = self.trait_ref.self_type_parameter(Interner);
+        Type { env: self.env.clone(), ty }
+    }
+
+    /// Returns `idx`-th argument of this trait reference if it is a type argument. Note that the
+    /// first argument is the `Self` type.
+    pub fn get_type_argument(&self, idx: usize) -> Option<Type> {
+        self.trait_ref
+            .substitution
+            .as_slice(Interner)
+            .get(idx)
+            .and_then(|arg| arg.ty(Interner))
+            .cloned()
+            .map(|ty| Type { env: self.env.clone(), ty })
+    }
+}
+
 #[derive(Clone, PartialEq, Eq, Debug)]
 pub struct Type {
     env: Arc<TraitEnvironment>,
@@ -2957,7 +3032,7 @@ impl Type {
     /// This function is used in `.await` syntax completion.
     pub fn impls_into_future(&self, db: &dyn HirDatabase) -> bool {
         let trait_ = db
-            .lang_item(self.env.krate, SmolStr::new_inline("into_future"))
+            .lang_item(self.env.krate, LangItem::IntoFutureIntoFuture)
             .and_then(|it| {
                 let into_future_fn = it.as_function()?;
                 let assoc_item = as_assoc_item(db, AssocItem::Function, into_future_fn)?;
@@ -2965,8 +3040,7 @@ impl Type {
                 Some(into_future_trait.id)
             })
             .or_else(|| {
-                let future_trait =
-                    db.lang_item(self.env.krate, SmolStr::new_inline("future_trait"))?;
+                let future_trait = db.lang_item(self.env.krate, LangItem::Future)?;
                 future_trait.as_trait()
             });
 
@@ -3059,9 +3133,9 @@ impl Type {
     }
 
     pub fn is_copy(&self, db: &dyn HirDatabase) -> bool {
-        let lang_item = db.lang_item(self.env.krate, SmolStr::new_inline("copy"));
+        let lang_item = db.lang_item(self.env.krate, LangItem::Copy);
         let copy_trait = match lang_item {
-            Some(LangItemTarget::TraitId(it)) => it,
+            Some(LangItemTarget::Trait(it)) => it,
             _ => return false,
         };
         self.impls_trait(db, copy_trait.into(), &[])
@@ -3088,15 +3162,15 @@ impl Type {
     }
 
     pub fn is_closure(&self) -> bool {
-        matches!(&self.ty.kind(Interner), TyKind::Closure { .. })
+        matches!(self.ty.kind(Interner), TyKind::Closure { .. })
     }
 
     pub fn is_fn(&self) -> bool {
-        matches!(&self.ty.kind(Interner), TyKind::FnDef(..) | TyKind::Function { .. })
+        matches!(self.ty.kind(Interner), TyKind::FnDef(..) | TyKind::Function { .. })
     }
 
     pub fn is_array(&self) -> bool {
-        matches!(&self.ty.kind(Interner), TyKind::Array(..))
+        matches!(self.ty.kind(Interner), TyKind::Array(..))
     }
 
     pub fn is_packed(&self, db: &dyn HirDatabase) -> bool {
@@ -3113,10 +3187,12 @@ impl Type {
     }
 
     pub fn is_raw_ptr(&self) -> bool {
-        matches!(&self.ty.kind(Interner), TyKind::Raw(..))
+        matches!(self.ty.kind(Interner), TyKind::Raw(..))
     }
 
     pub fn contains_unknown(&self) -> bool {
+        // FIXME: When we get rid of `ConstScalar::Unknown`, we can just look at precomputed
+        // `TypeFlags` in `TyData`.
         return go(&self.ty);
 
         fn go(ty: &Ty) -> bool {
@@ -3182,6 +3258,19 @@ impl Type {
         }
     }
 
+    pub fn as_array(&self, _db: &dyn HirDatabase) -> Option<(Type, usize)> {
+        if let TyKind::Array(ty, len) = &self.ty.kind(Interner) {
+            match len.data(Interner).value {
+                ConstValue::Concrete(ConcreteConst { interned: ConstScalar::UInt(len) }) => {
+                    Some((self.derived(ty.clone()), len as usize))
+                }
+                _ => None,
+            }
+        } else {
+            None
+        }
+    }
+
     pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Type> + 'a {
         self.autoderef_(db).map(move |ty| self.derived(ty))
     }
@@ -3418,10 +3507,9 @@ impl Type {
         Type { env: self.env.clone(), ty }
     }
 
+    /// Visits every type, including generic arguments, in this type. `cb` is called with type
+    /// itself first, and then with its generic arguments.
     pub fn walk(&self, db: &dyn HirDatabase, mut cb: impl FnMut(Type)) {
-        // TypeWalk::walk for a Ty at first visits parameters and only after that the Ty itself.
-        // We need a different order here.
-
         fn walk_substs(
             db: &dyn HirDatabase,
             type_: &Type,
@@ -3534,6 +3622,14 @@ impl Type {
             _ => None,
         }
     }
+
+    /// Returns unique `GenericParam`s contained in this type.
+    pub fn generic_params(&self, db: &dyn HirDatabase) -> FxHashSet<GenericParam> {
+        hir_ty::collect_placeholders(&self.ty, db)
+            .into_iter()
+            .map(|id| TypeOrConstParam { id }.split(db).either_into())
+            .collect()
+    }
 }
 
 #[derive(Debug)]
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
index e0d26103915..486b7ee62ed 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
@@ -1319,10 +1319,7 @@ impl<'db> SemanticsImpl<'db> {
         let _p = profile::span("Semantics::analyze_impl");
         let node = self.find_file(node);
 
-        let container = match self.with_ctx(|ctx| ctx.find_container(node)) {
-            Some(it) => it,
-            None => return None,
-        };
+        let container = self.with_ctx(|ctx| ctx.find_container(node))?;
 
         let resolver = match container {
             ChildContainer::DefWithBodyId(def) => {
@@ -1472,14 +1469,7 @@ impl<'db> SemanticsImpl<'db> {
     }
 
     fn is_inside_unsafe(&self, expr: &ast::Expr) -> bool {
-        let item_or_variant = |ancestor: SyntaxNode| {
-            if ast::Item::can_cast(ancestor.kind()) {
-                ast::Item::cast(ancestor).map(Either::Left)
-            } else {
-                ast::Variant::cast(ancestor).map(Either::Right)
-            }
-        };
-        let Some(enclosing_item) = expr.syntax().ancestors().find_map(item_or_variant) else { return false };
+        let Some(enclosing_item) = expr.syntax().ancestors().find_map(Either::<ast::Item, ast::Variant>::cast) else { return false };
 
         let def = match &enclosing_item {
             Either::Left(ast::Item::Fn(it)) if it.unsafe_token().is_some() => return true,
@@ -1589,7 +1579,7 @@ fn find_root(node: &SyntaxNode) -> SyntaxNode {
     node.ancestors().last().unwrap()
 }
 
-/// `SemanticScope` encapsulates the notion of a scope (the set of visible
+/// `SemanticsScope` encapsulates the notion of a scope (the set of visible
 /// names) at a particular program point.
 ///
 /// It is a bit tricky, as scopes do not really exist inside the compiler.
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
index fa45e3c12eb..2b5bfda1d43 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
@@ -87,7 +87,6 @@
 
 use base_db::FileId;
 use hir_def::{
-    attr::AttrId,
     child_by_source::ChildBySource,
     dyn_map::DynMap,
     expr::{LabelId, PatId},
@@ -96,7 +95,7 @@ use hir_def::{
     GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, StaticId, StructId,
     TraitId, TypeAliasId, TypeParamId, UnionId, VariantId,
 };
-use hir_expand::{name::AsName, HirFileId, MacroCallId};
+use hir_expand::{attrs::AttrId, name::AsName, HirFileId, MacroCallId};
 use rustc_hash::FxHashMap;
 use smallvec::SmallVec;
 use stdx::impl_from;
diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
index 059b80bcf13..3b39e9fa919 100644
--- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs
@@ -17,6 +17,7 @@ use hir_def::{
         Body, BodySourceMap,
     },
     expr::{ExprId, Pat, PatId},
+    lang_item::LangItem,
     macro_id_to_def_id,
     path::{ModPath, Path, PathKind},
     resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs},
@@ -37,7 +38,7 @@ use hir_ty::{
         record_literal_missing_fields, record_pattern_missing_fields, unsafe_expressions,
         UnsafeExpr,
     },
-    method_resolution::{self, lang_names_for_bin_op},
+    method_resolution::{self, lang_items_for_bin_op},
     Adjustment, InferenceResult, Interner, Substitution, Ty, TyExt, TyKind, TyLoweringContext,
 };
 use itertools::Itertools;
@@ -294,12 +295,8 @@ impl SourceAnalyzer {
             }
         }
 
-        let future_trait = db
-            .lang_item(self.resolver.krate(), hir_expand::name![future_trait].to_smol_str())?
-            .as_trait()?;
-        let poll_fn = db
-            .lang_item(self.resolver.krate(), hir_expand::name![poll].to_smol_str())?
-            .as_function()?;
+        let future_trait = db.lang_item(self.resolver.krate(), LangItem::Future)?.as_trait()?;
+        let poll_fn = db.lang_item(self.resolver.krate(), LangItem::FuturePoll)?.as_function()?;
         // HACK: subst for `poll()` coincides with that for `Future` because `poll()` itself
         // doesn't have any generic parameters, so we skip building another subst for `poll()`.
         let substs = hir_ty::TyBuilder::subst_for_def(db, future_trait, None).push(ty).build();
@@ -311,14 +308,14 @@ impl SourceAnalyzer {
         db: &dyn HirDatabase,
         prefix_expr: &ast::PrefixExpr,
     ) -> Option<FunctionId> {
-        let lang_item_name = match prefix_expr.op_kind()? {
-            ast::UnaryOp::Deref => name![deref],
-            ast::UnaryOp::Not => name![not],
-            ast::UnaryOp::Neg => name![neg],
+        let (lang_item, fn_name) = match prefix_expr.op_kind()? {
+            ast::UnaryOp::Deref => (LangItem::Deref, name![deref]),
+            ast::UnaryOp::Not => (LangItem::Not, name![not]),
+            ast::UnaryOp::Neg => (LangItem::Neg, name![neg]),
         };
         let ty = self.ty_of_expr(db, &prefix_expr.expr()?)?;
 
-        let (op_trait, op_fn) = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?;
+        let (op_trait, op_fn) = self.lang_trait_fn(db, lang_item, &fn_name)?;
         // HACK: subst for all methods coincides with that for their trait because the methods
         // don't have any generic parameters, so we skip building another subst for the methods.
         let substs = hir_ty::TyBuilder::subst_for_def(db, op_trait, None).push(ty.clone()).build();
@@ -334,9 +331,7 @@ impl SourceAnalyzer {
         let base_ty = self.ty_of_expr(db, &index_expr.base()?)?;
         let index_ty = self.ty_of_expr(db, &index_expr.index()?)?;
 
-        let lang_item_name = name![index];
-
-        let (op_trait, op_fn) = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?;
+        let (op_trait, op_fn) = self.lang_trait_fn(db, LangItem::Index, &name![index])?;
         // HACK: subst for all methods coincides with that for their trait because the methods
         // don't have any generic parameters, so we skip building another subst for the methods.
         let substs = hir_ty::TyBuilder::subst_for_def(db, op_trait, None)
@@ -355,8 +350,8 @@ impl SourceAnalyzer {
         let lhs = self.ty_of_expr(db, &binop_expr.lhs()?)?;
         let rhs = self.ty_of_expr(db, &binop_expr.rhs()?)?;
 
-        let (op_trait, op_fn) = lang_names_for_bin_op(op)
-            .and_then(|(name, lang_item)| self.lang_trait_fn(db, &lang_item, &name))?;
+        let (op_trait, op_fn) = lang_items_for_bin_op(op)
+            .and_then(|(name, lang_item)| self.lang_trait_fn(db, lang_item, &name))?;
         // HACK: subst for `index()` coincides with that for `Index` because `index()` itself
         // doesn't have any generic parameters, so we skip building another subst for `index()`.
         let substs = hir_ty::TyBuilder::subst_for_def(db, op_trait, None)
@@ -374,8 +369,7 @@ impl SourceAnalyzer {
     ) -> Option<FunctionId> {
         let ty = self.ty_of_expr(db, &try_expr.expr()?)?;
 
-        let op_fn =
-            db.lang_item(self.resolver.krate(), name![branch].to_smol_str())?.as_function()?;
+        let op_fn = db.lang_item(self.resolver.krate(), LangItem::TryTraitBranch)?.as_function()?;
         let op_trait = match op_fn.lookup(db.upcast()).container {
             ItemContainerId::TraitId(id) => id,
             _ => return None,
@@ -504,7 +498,7 @@ impl SourceAnalyzer {
                         AssocItemId::ConstId(const_id) => {
                             self.resolve_impl_const_or_trait_def(db, const_id, subs).into()
                         }
-                        _ => assoc,
+                        assoc => assoc,
                     };
 
                     return Some(PathResolution::Def(AssocItem::from(assoc).into()));
@@ -517,7 +511,13 @@ impl SourceAnalyzer {
                 prefer_value_ns = true;
             } else if let Some(path_pat) = parent().and_then(ast::PathPat::cast) {
                 let pat_id = self.pat_id(&path_pat.into())?;
-                if let Some((assoc, _)) = infer.assoc_resolutions_for_pat(pat_id) {
+                if let Some((assoc, subs)) = infer.assoc_resolutions_for_pat(pat_id) {
+                    let assoc = match assoc {
+                        AssocItemId::ConstId(const_id) => {
+                            self.resolve_impl_const_or_trait_def(db, const_id, subs).into()
+                        }
+                        assoc => assoc,
+                    };
                     return Some(PathResolution::Def(AssocItem::from(assoc).into()));
                 }
                 if let Some(VariantId::EnumVariantId(variant)) =
@@ -628,7 +628,7 @@ impl SourceAnalyzer {
                                 {
                                     return Some(PathResolution::DeriveHelper(DeriveHelper {
                                         derive: *macro_id,
-                                        idx,
+                                        idx: idx as u32,
                                     }));
                                 }
                             }
@@ -815,10 +815,10 @@ impl SourceAnalyzer {
     fn lang_trait_fn(
         &self,
         db: &dyn HirDatabase,
-        lang_trait: &Name,
+        lang_trait: LangItem,
         method_name: &Name,
     ) -> Option<(TraitId, FunctionId)> {
-        let trait_id = db.lang_item(self.resolver.krate(), lang_trait.to_smol_str())?.as_trait()?;
+        let trait_id = db.lang_item(self.resolver.krate(), lang_trait)?.as_trait()?;
         let fn_id = db.trait_data(trait_id).method_by_name(method_name)?;
         Some((trait_id, fn_id))
     }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml b/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml
index b9260473b12..3954abfdb7c 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-assists/Cargo.toml
@@ -2,9 +2,11 @@
 name = "ide-assists"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
@@ -16,17 +18,20 @@ itertools = "0.10.5"
 either = "1.7.0"
 smallvec = "1.10.0"
 
-stdx = { path = "../stdx", version = "0.0.0" }
-syntax = { path = "../syntax", version = "0.0.0" }
-text-edit = { path = "../text-edit", version = "0.0.0" }
-profile = { path = "../profile", version = "0.0.0" }
-ide-db = { path = "../ide-db", version = "0.0.0" }
-hir = { path = "../hir", version = "0.0.0" }
+# local deps
+stdx.workspace = true
+syntax.workspace = true
+text-edit.workspace = true
+profile.workspace = true
+ide-db.workspace = true
+hir.workspace = true
 
 [dev-dependencies]
-test-utils = { path = "../test-utils" }
-sourcegen = { path = "../sourcegen" }
 expect-test = "1.4.0"
 
+# local deps
+test-utils.workspace = true
+sourcegen.workspace = true
+
 [features]
 in-rust-tree = []
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_braces.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_braces.rs
new file mode 100644
index 00000000000..2f4a263ee07
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_braces.rs
@@ -0,0 +1,155 @@
+use syntax::{
+    ast::{self, edit::AstNodeEdit, make},
+    AstNode,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: add_braces
+//
+// Adds braces to lambda and match arm expressions.
+//
+// ```
+// fn foo(n: i32) -> i32 {
+//     match n {
+//         1 =>$0 n + 1,
+//         _ => 0
+//     }
+// }
+// ```
+// ->
+// ```
+// fn foo(n: i32) -> i32 {
+//     match n {
+//         1 => {
+//             n + 1
+//         },
+//         _ => 0
+//     }
+// }
+// ```
+pub(crate) fn add_braces(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+    let (expr_type, expr) = get_replacement_node(ctx)?;
+
+    acc.add(
+        AssistId("add_braces", AssistKind::RefactorRewrite),
+        match expr_type {
+            ParentType::ClosureExpr => "Add braces to closure body",
+            ParentType::MatchArmExpr => "Add braces to arm expression",
+        },
+        expr.syntax().text_range(),
+        |builder| {
+            let block_expr = AstNodeEdit::indent(
+                &make::block_expr(None, Some(expr.clone())),
+                AstNodeEdit::indent_level(&expr),
+            );
+
+            builder.replace(expr.syntax().text_range(), block_expr.syntax().text());
+        },
+    )
+}
+
+enum ParentType {
+    MatchArmExpr,
+    ClosureExpr,
+}
+
+fn get_replacement_node(ctx: &AssistContext<'_>) -> Option<(ParentType, ast::Expr)> {
+    if let Some(match_arm) = ctx.find_node_at_offset::<ast::MatchArm>() {
+        let match_arm_expr = match_arm.expr()?;
+
+        if matches!(match_arm_expr, ast::Expr::BlockExpr(_)) {
+            return None;
+        }
+
+        return Some((ParentType::MatchArmExpr, match_arm_expr));
+    } else if let Some(closure_expr) = ctx.find_node_at_offset::<ast::ClosureExpr>() {
+        let body = closure_expr.body()?;
+
+        if matches!(body, ast::Expr::BlockExpr(_)) {
+            return None;
+        }
+
+        return Some((ParentType::ClosureExpr, body));
+    }
+
+    None
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::tests::{check_assist, check_assist_not_applicable};
+
+    use super::*;
+
+    #[test]
+    fn suggest_add_braces_for_closure() {
+        check_assist(
+            add_braces,
+            r#"
+fn foo() {
+    t(|n|$0 n + 100);
+}
+"#,
+            r#"
+fn foo() {
+    t(|n| {
+        n + 100
+    });
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn no_assist_for_closures_with_braces() {
+        check_assist_not_applicable(
+            add_braces,
+            r#"
+fn foo() {
+    t(|n|$0 { n + 100 });
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn suggest_add_braces_for_match() {
+        check_assist(
+            add_braces,
+            r#"
+fn foo() {
+    match n {
+        Some(n) $0=> 29,
+        _ => ()
+    };
+}
+"#,
+            r#"
+fn foo() {
+    match n {
+        Some(n) => {
+            29
+        },
+        _ => ()
+    };
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn no_assist_for_match_with_braces() {
+        check_assist_not_applicable(
+            add_braces,
+            r#"
+fn foo() {
+    match n {
+        Some(n) $0=> { return 29; },
+        _ => ()
+    };
+}
+"#,
+        );
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs
index 161bcc5c8da..4e11b31deb0 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_impl_members.rs
@@ -1,7 +1,5 @@
 use hir::HasSource;
-use ide_db::{
-    syntax_helpers::insert_whitespace_into_node::insert_ws_into, traits::resolve_target_trait,
-};
+use ide_db::syntax_helpers::insert_whitespace_into_node::insert_ws_into;
 use syntax::ast::{self, make, AstNode};
 
 use crate::{
@@ -107,16 +105,19 @@ fn add_missing_impl_members_inner(
 ) -> Option<()> {
     let _p = profile::span("add_missing_impl_members_inner");
     let impl_def = ctx.find_node_at_offset::<ast::Impl>()?;
+    let impl_ = ctx.sema.to_def(&impl_def)?;
 
     if ctx.token_at_offset().all(|t| {
         t.parent_ancestors()
+            .take_while(|node| node != impl_def.syntax())
             .any(|s| ast::BlockExpr::can_cast(s.kind()) || ast::ParamList::can_cast(s.kind()))
     }) {
         return None;
     }
 
     let target_scope = ctx.sema.scope(impl_def.syntax())?;
-    let trait_ = resolve_target_trait(&ctx.sema, &impl_def)?;
+    let trait_ref = impl_.trait_ref(ctx.db())?;
+    let trait_ = trait_ref.trait_();
 
     let missing_items = filter_assoc_items(
         &ctx.sema,
@@ -155,7 +156,7 @@ fn add_missing_impl_members_inner(
                 let placeholder;
                 if let DefaultMethods::No = mode {
                     if let ast::AssocItem::Fn(func) = &first_new_item {
-                        if try_gen_trait_body(ctx, func, &trait_, &impl_def).is_none() {
+                        if try_gen_trait_body(ctx, func, trait_ref, &impl_def).is_none() {
                             if let Some(m) =
                                 func.syntax().descendants().find_map(ast::MacroCall::cast)
                             {
@@ -180,13 +181,13 @@ fn add_missing_impl_members_inner(
 fn try_gen_trait_body(
     ctx: &AssistContext<'_>,
     func: &ast::Fn,
-    trait_: &hir::Trait,
+    trait_ref: hir::TraitRef,
     impl_def: &ast::Impl,
 ) -> Option<()> {
-    let trait_path = make::ext::ident_path(&trait_.name(ctx.db()).to_string());
+    let trait_path = make::ext::ident_path(&trait_ref.trait_().name(ctx.db()).to_string());
     let hir_ty = ctx.sema.resolve_type(&impl_def.self_ty()?)?;
     let adt = hir_ty.as_adt()?.source(ctx.db())?;
-    gen_trait_fn_body(func, &trait_path, &adt.value)
+    gen_trait_fn_body(func, &trait_path, &adt.value, Some(trait_ref))
 }
 
 #[cfg(test)]
@@ -1353,6 +1354,50 @@ impl PartialEq for SomeStruct {
     }
 
     #[test]
+    fn test_partial_eq_body_when_types_semantically_match() {
+        check_assist(
+            add_missing_impl_members,
+            r#"
+//- minicore: eq
+struct S<T, U>(T, U);
+type Alias<T> = S<T, T>;
+impl<T> PartialEq<Alias<T>> for S<T, T> {$0}
+"#,
+            r#"
+struct S<T, U>(T, U);
+type Alias<T> = S<T, T>;
+impl<T> PartialEq<Alias<T>> for S<T, T> {
+    $0fn eq(&self, other: &Alias<T>) -> bool {
+        self.0 == other.0 && self.1 == other.1
+    }
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn test_partial_eq_body_when_types_dont_match() {
+        check_assist(
+            add_missing_impl_members,
+            r#"
+//- minicore: eq
+struct S<T, U>(T, U);
+type Alias<T> = S<T, T>;
+impl<T> PartialEq<Alias<T>> for S<T, i32> {$0}
+"#,
+            r#"
+struct S<T, U>(T, U);
+type Alias<T> = S<T, T>;
+impl<T> PartialEq<Alias<T>> for S<T, i32> {
+    fn eq(&self, other: &Alias<T>) -> bool {
+        ${0:todo!()}
+    }
+}
+"#,
+        );
+    }
+
+    #[test]
     fn test_ignore_function_body() {
         check_assist_not_applicable(
             add_missing_default_members,
@@ -1442,4 +1487,35 @@ impl Trait for () {
 }"#,
         )
     }
+
+    #[test]
+    fn test_works_inside_function() {
+        check_assist(
+            add_missing_impl_members,
+            r#"
+trait Tr {
+    fn method();
+}
+fn main() {
+    struct S;
+    impl Tr for S {
+        $0
+    }
+}
+"#,
+            r#"
+trait Tr {
+    fn method();
+}
+fn main() {
+    struct S;
+    impl Tr for S {
+        fn method() {
+        ${0:todo!()}
+    }
+    }
+}
+"#,
+        );
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
index 8e4ac69ae6f..5d81e8cfeac 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs
@@ -140,6 +140,31 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
             })
             .filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat));
         ((Box::new(missing_pats) as Box<dyn Iterator<Item = _>>).peekable(), is_non_exhaustive)
+    } else if let Some((enum_def, len)) = resolve_array_of_enum_def(&ctx.sema, &expr) {
+        let is_non_exhaustive = enum_def.is_non_exhaustive(ctx.db(), module.krate());
+        let variants = enum_def.variants(ctx.db());
+
+        if len.pow(variants.len() as u32) > 256 {
+            return None;
+        }
+
+        let variants_of_enums = vec![variants.clone(); len];
+
+        let missing_pats = variants_of_enums
+            .into_iter()
+            .multi_cartesian_product()
+            .inspect(|_| cov_mark::hit!(add_missing_match_arms_lazy_computation))
+            .map(|variants| {
+                let is_hidden = variants
+                    .iter()
+                    .any(|variant| variant.should_be_hidden(ctx.db(), module.krate()));
+                let patterns = variants.into_iter().filter_map(|variant| {
+                    build_pat(ctx.db(), module, variant.clone(), ctx.config.prefer_no_std)
+                });
+                (ast::Pat::from(make::slice_pat(patterns)), is_hidden)
+            })
+            .filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat));
+        ((Box::new(missing_pats) as Box<dyn Iterator<Item = _>>).peekable(), is_non_exhaustive)
     } else {
         return None;
     };
@@ -266,9 +291,13 @@ fn is_variant_missing(existing_pats: &[Pat], var: &Pat) -> bool {
 fn does_pat_match_variant(pat: &Pat, var: &Pat) -> bool {
     match (pat, var) {
         (Pat::WildcardPat(_), _) => true,
+        (Pat::SlicePat(spat), Pat::SlicePat(svar)) => {
+            spat.pats().zip(svar.pats()).all(|(p, v)| does_pat_match_variant(&p, &v))
+        }
         (Pat::TuplePat(tpat), Pat::TuplePat(tvar)) => {
             tpat.fields().zip(tvar.fields()).all(|(p, v)| does_pat_match_variant(&p, &v))
         }
+        (Pat::OrPat(opat), _) => opat.pats().any(|p| does_pat_match_variant(&p, var)),
         _ => utils::does_pat_match_variant(pat, var),
     }
 }
@@ -279,7 +308,7 @@ enum ExtendedEnum {
     Enum(hir::Enum),
 }
 
-#[derive(Eq, PartialEq, Clone, Copy)]
+#[derive(Eq, PartialEq, Clone, Copy, Debug)]
 enum ExtendedVariant {
     True,
     False,
@@ -339,15 +368,30 @@ fn resolve_tuple_of_enum_def(
         .tuple_fields(sema.db)
         .iter()
         .map(|ty| {
-            ty.autoderef(sema.db).find_map(|ty| match ty.as_adt() {
-                Some(Adt::Enum(e)) => Some(lift_enum(e)),
-                // For now we only handle expansion for a tuple of enums. Here
-                // we map non-enum items to None and rely on `collect` to
-                // convert Vec<Option<hir::Enum>> into Option<Vec<hir::Enum>>.
-                _ => ty.is_bool().then_some(ExtendedEnum::Bool),
+            ty.autoderef(sema.db).find_map(|ty| {
+                match ty.as_adt() {
+                    Some(Adt::Enum(e)) => Some(lift_enum(e)),
+                    // For now we only handle expansion for a tuple of enums. Here
+                    // we map non-enum items to None and rely on `collect` to
+                    // convert Vec<Option<hir::Enum>> into Option<Vec<hir::Enum>>.
+                    _ => ty.is_bool().then_some(ExtendedEnum::Bool),
+                }
             })
         })
-        .collect()
+        .collect::<Option<Vec<ExtendedEnum>>>()
+        .and_then(|list| if list.is_empty() { None } else { Some(list) })
+}
+
+fn resolve_array_of_enum_def(
+    sema: &Semantics<'_, RootDatabase>,
+    expr: &ast::Expr,
+) -> Option<(ExtendedEnum, usize)> {
+    sema.type_of_expr(expr)?.adjusted().as_array(sema.db).and_then(|(ty, len)| {
+        ty.autoderef(sema.db).find_map(|ty| match ty.as_adt() {
+            Some(Adt::Enum(e)) => Some((lift_enum(e), len)),
+            _ => ty.is_bool().then_some((ExtendedEnum::Bool, len)),
+        })
+    })
 }
 
 fn build_pat(
@@ -376,7 +420,6 @@ fn build_pat(
                 }
                 ast::StructKind::Unit => make::path_pat(path),
             };
-
             Some(pat)
         }
         ExtendedVariant::True => Some(ast::Pat::from(make::literal_pat("true"))),
@@ -526,6 +569,19 @@ fn foo(a: bool) {
             r#"
 fn foo(a: bool) {
     match (a, a)$0 {
+        (true | false, true) => {}
+        (true, false) => {}
+        (false, false) => {}
+    }
+}
+"#,
+        );
+
+        check_assist_not_applicable(
+            add_missing_match_arms,
+            r#"
+fn foo(a: bool) {
+    match (a, a)$0 {
         (true, true) => {}
         (true, false) => {}
         (false, true) => {}
@@ -560,12 +616,112 @@ fn foo(a: bool) {
     }
 
     #[test]
+    fn fill_boolean_array() {
+        check_assist(
+            add_missing_match_arms,
+            r#"
+fn foo(a: bool) {
+    match [a]$0 {
+    }
+}
+"#,
+            r#"
+fn foo(a: bool) {
+    match [a] {
+        $0[true] => todo!(),
+        [false] => todo!(),
+    }
+}
+"#,
+        );
+
+        check_assist(
+            add_missing_match_arms,
+            r#"
+fn foo(a: bool) {
+    match [a,]$0 {
+    }
+}
+"#,
+            r#"
+fn foo(a: bool) {
+    match [a,] {
+        $0[true] => todo!(),
+        [false] => todo!(),
+    }
+}
+"#,
+        );
+
+        check_assist(
+            add_missing_match_arms,
+            r#"
+fn foo(a: bool) {
+    match [a, a]$0 {
+        [true, true] => todo!(),
+    }
+}
+"#,
+            r#"
+fn foo(a: bool) {
+    match [a, a] {
+        [true, true] => todo!(),
+        $0[true, false] => todo!(),
+        [false, true] => todo!(),
+        [false, false] => todo!(),
+    }
+}
+"#,
+        );
+
+        check_assist(
+            add_missing_match_arms,
+            r#"
+fn foo(a: bool) {
+    match [a, a]$0 {
+    }
+}
+"#,
+            r#"
+fn foo(a: bool) {
+    match [a, a] {
+        $0[true, true] => todo!(),
+        [true, false] => todo!(),
+        [false, true] => todo!(),
+        [false, false] => todo!(),
+    }
+}
+"#,
+        )
+    }
+
+    #[test]
     fn partial_fill_boolean_tuple() {
         check_assist(
             add_missing_match_arms,
             r#"
 fn foo(a: bool) {
     match (a, a)$0 {
+        (true | false, true) => {}
+    }
+}
+"#,
+            r#"
+fn foo(a: bool) {
+    match (a, a) {
+        (true | false, true) => {}
+        $0(true, false) => todo!(),
+        (false, false) => todo!(),
+    }
+}
+"#,
+        );
+
+        check_assist(
+            add_missing_match_arms,
+            r#"
+fn foo(a: bool) {
+    match (a, a)$0 {
         (false, true) => {}
     }
 }
@@ -882,6 +1038,33 @@ fn main() {
 }
 "#,
         );
+
+        check_assist(
+            add_missing_match_arms,
+            r#"
+enum E { A, B, C }
+fn main() {
+    use E::*;
+    match (A, B, C)$0 {
+        (A | B , A, A | B | C) => (),
+        (A | B | C , B | C, A | B | C) => (),
+    }
+}
+"#,
+            r#"
+enum E { A, B, C }
+fn main() {
+    use E::*;
+    match (A, B, C) {
+        (A | B , A, A | B | C) => (),
+        (A | B | C , B | C, A | B | C) => (),
+        $0(C, A, A) => todo!(),
+        (C, A, B) => todo!(),
+        (C, A, C) => todo!(),
+    }
+}
+"#,
+        )
     }
 
     #[test]
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs
index 312cb65abd2..1acd5ee9728 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_comment_block.rs
@@ -107,7 +107,7 @@ fn line_to_block(acc: &mut Assists, comment: ast::Comment) -> Option<()> {
 /// The line -> block assist can  be invoked from anywhere within a sequence of line comments.
 /// relevant_line_comments crawls backwards and forwards finding the complete sequence of comments that will
 /// be joined.
-fn relevant_line_comments(comment: &ast::Comment) -> Vec<Comment> {
+pub(crate) fn relevant_line_comments(comment: &ast::Comment) -> Vec<Comment> {
     // The prefix identifies the kind of comment we're dealing with
     let prefix = comment.prefix();
     let same_prefix = |c: &ast::Comment| c.prefix() == prefix;
@@ -159,7 +159,7 @@ fn relevant_line_comments(comment: &ast::Comment) -> Vec<Comment> {
 //              */
 //
 // But since such comments aren't idiomatic we're okay with this.
-fn line_comment_text(indentation: IndentLevel, comm: ast::Comment) -> String {
+pub(crate) fn line_comment_text(indentation: IndentLevel, comm: ast::Comment) -> String {
     let contents_without_prefix = comm.text().strip_prefix(comm.prefix()).unwrap();
     let contents = contents_without_prefix.strip_prefix(' ').unwrap_or(contents_without_prefix);
 
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_match_to_let_else.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_match_to_let_else.rs
index 5bf04a3ad37..65c2479e9f2 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_match_to_let_else.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_match_to_let_else.rs
@@ -30,24 +30,23 @@ use crate::{
 // ```
 pub(crate) fn convert_match_to_let_else(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
     let let_stmt: ast::LetStmt = ctx.find_node_at_offset()?;
-    let binding = find_binding(let_stmt.pat()?)?;
+    let binding = let_stmt.pat()?;
 
-    let initializer = match let_stmt.initializer() {
-        Some(ast::Expr::MatchExpr(it)) => it,
-        _ => return None,
-    };
+    let Some(ast::Expr::MatchExpr(initializer)) = let_stmt.initializer() else { return None };
     let initializer_expr = initializer.expr()?;
 
-    let (extracting_arm, diverging_arm) = match find_arms(ctx, &initializer) {
-        Some(it) => it,
-        None => return None,
-    };
+    let Some((extracting_arm, diverging_arm)) = find_arms(ctx, &initializer) else { return None };
     if extracting_arm.guard().is_some() {
         cov_mark::hit!(extracting_arm_has_guard);
         return None;
     }
 
-    let diverging_arm_expr = diverging_arm.expr()?;
+    let diverging_arm_expr = match diverging_arm.expr()? {
+        ast::Expr::BlockExpr(block) if block.modifier().is_none() && block.label().is_none() => {
+            block.to_string()
+        }
+        other => format!("{{ {other} }}"),
+    };
     let extracting_arm_pat = extracting_arm.pat()?;
     let extracted_variable = find_extracted_variable(ctx, &extracting_arm)?;
 
@@ -56,24 +55,16 @@ pub(crate) fn convert_match_to_let_else(acc: &mut Assists, ctx: &AssistContext<'
         "Convert match to let-else",
         let_stmt.syntax().text_range(),
         |builder| {
-            let extracting_arm_pat = rename_variable(&extracting_arm_pat, extracted_variable, binding);
+            let extracting_arm_pat =
+                rename_variable(&extracting_arm_pat, extracted_variable, binding);
             builder.replace(
                 let_stmt.syntax().text_range(),
-                format!("let {extracting_arm_pat} = {initializer_expr} else {{ {diverging_arm_expr} }};")
+                format!("let {extracting_arm_pat} = {initializer_expr} else {diverging_arm_expr};"),
             )
         },
     )
 }
 
-// Given a pattern, find the name introduced to the surrounding scope.
-fn find_binding(pat: ast::Pat) -> Option<ast::IdentPat> {
-    if let ast::Pat::IdentPat(ident) = pat {
-        Some(ident)
-    } else {
-        None
-    }
-}
-
 // Given a match expression, find extracting and diverging arms.
 fn find_arms(
     ctx: &AssistContext<'_>,
@@ -87,7 +78,7 @@ fn find_arms(
     let mut extracting = None;
     let mut diverging = None;
     for arm in arms {
-        if ctx.sema.type_of_expr(&arm.expr().unwrap()).unwrap().original().is_never() {
+        if ctx.sema.type_of_expr(&arm.expr()?)?.original().is_never() {
             diverging = Some(arm);
         } else {
             extracting = Some(arm);
@@ -124,7 +115,7 @@ fn find_extracted_variable(ctx: &AssistContext<'_>, arm: &ast::MatchArm) -> Opti
 }
 
 // Rename `extracted` with `binding` in `pat`.
-fn rename_variable(pat: &ast::Pat, extracted: ast::Name, binding: ast::IdentPat) -> SyntaxNode {
+fn rename_variable(pat: &ast::Pat, extracted: ast::Name, binding: ast::Pat) -> SyntaxNode {
     let syntax = pat.syntax().clone_for_update();
     let extracted_syntax = syntax.covering_element(extracted.syntax().text_range());
 
@@ -136,7 +127,7 @@ fn rename_variable(pat: &ast::Pat, extracted: ast::Name, binding: ast::IdentPat)
         if let Some(name_ref) = record_pat_field.field_name() {
             ted::replace(
                 record_pat_field.syntax(),
-                ast::make::record_pat_field(ast::make::name_ref(&name_ref.text()), binding.into())
+                ast::make::record_pat_field(ast::make::name_ref(&name_ref.text()), binding)
                     .syntax()
                     .clone_for_update(),
             );
@@ -410,4 +401,52 @@ fn foo(opt: Option<i32>) -> Option<i32> {
     "#,
         );
     }
+
+    #[test]
+    fn complex_pattern() {
+        check_assist(
+            convert_match_to_let_else,
+            r#"
+//- minicore: option
+fn f() {
+    let (x, y) = $0match Some((0, 1)) {
+        Some(it) => it,
+        None => return,
+    };
+}
+"#,
+            r#"
+fn f() {
+    let Some((x, y)) = Some((0, 1)) else { return };
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn diverging_block() {
+        check_assist(
+            convert_match_to_let_else,
+            r#"
+//- minicore: option
+fn f() {
+    let x = $0match Some(()) {
+        Some(it) => it,
+        None => {//comment
+            println!("nope");
+            return
+        },
+    };
+}
+"#,
+            r#"
+fn f() {
+    let Some(x) = Some(()) else {//comment
+            println!("nope");
+            return
+        };
+}
+"#,
+        );
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs
index 8d11e0bac94..9dc1da2461a 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs
@@ -52,10 +52,7 @@ pub(crate) fn convert_named_struct_to_tuple_struct(
     acc: &mut Assists,
     ctx: &AssistContext<'_>,
 ) -> Option<()> {
-    let strukt = ctx
-        .find_node_at_offset::<ast::Struct>()
-        .map(Either::Left)
-        .or_else(|| ctx.find_node_at_offset::<ast::Variant>().map(Either::Right))?;
+    let strukt = ctx.find_node_at_offset::<Either<ast::Struct, ast::Variant>>()?;
     let field_list = strukt.as_ref().either(|s| s.field_list(), |v| v.field_list())?;
     let record_fields = match field_list {
         ast::FieldList::RecordFieldList(it) => it,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
index b0383291e73..772e032fb29 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_struct_to_named_struct.rs
@@ -50,10 +50,7 @@ pub(crate) fn convert_tuple_struct_to_named_struct(
     acc: &mut Assists,
     ctx: &AssistContext<'_>,
 ) -> Option<()> {
-    let strukt = ctx
-        .find_node_at_offset::<ast::Struct>()
-        .map(Either::Left)
-        .or_else(|| ctx.find_node_at_offset::<ast::Variant>().map(Either::Right))?;
+    let strukt = ctx.find_node_at_offset::<Either<ast::Struct, ast::Variant>>()?;
     let field_list = strukt.as_ref().either(|s| s.field_list(), |v| v.field_list())?;
     let tuple_fields = match field_list {
         ast::FieldList::TupleFieldList(it) => it,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs
new file mode 100644
index 00000000000..226a5dd9fa8
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs
@@ -0,0 +1,312 @@
+use either::Either;
+use itertools::Itertools;
+use syntax::{
+    ast::{self, edit::IndentLevel, CommentPlacement, Whitespace},
+    AstToken, TextRange,
+};
+
+use crate::{
+    handlers::convert_comment_block::{line_comment_text, relevant_line_comments},
+    utils::required_hashes,
+    AssistContext, AssistId, AssistKind, Assists,
+};
+
+// Assist: desugar_doc_comment
+//
+// Desugars doc-comments to the attribute form.
+//
+// ```
+// /// Multi-line$0
+// /// comment
+// ```
+// ->
+// ```
+// #[doc = r"Multi-line
+// comment"]
+// ```
+pub(crate) fn desugar_doc_comment(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+    let comment = ctx.find_token_at_offset::<ast::Comment>()?;
+    // Only allow doc comments
+    let Some(placement) = comment.kind().doc else { return None; };
+
+    // Only allow comments which are alone on their line
+    if let Some(prev) = comment.syntax().prev_token() {
+        if Whitespace::cast(prev).filter(|w| w.text().contains('\n')).is_none() {
+            return None;
+        }
+    }
+
+    let indentation = IndentLevel::from_token(comment.syntax()).to_string();
+
+    let (target, comments) = match comment.kind().shape {
+        ast::CommentShape::Block => (comment.syntax().text_range(), Either::Left(comment)),
+        ast::CommentShape::Line => {
+            // Find all the comments we'll be desugaring
+            let comments = relevant_line_comments(&comment);
+
+            // Establish the target of our edit based on the comments we found
+            (
+                TextRange::new(
+                    comments[0].syntax().text_range().start(),
+                    comments.last().unwrap().syntax().text_range().end(),
+                ),
+                Either::Right(comments),
+            )
+        }
+    };
+
+    acc.add(
+        AssistId("desugar_doc_comment", AssistKind::RefactorRewrite),
+        "Desugar doc-comment to attribute macro",
+        target,
+        |edit| {
+            let text = match comments {
+                Either::Left(comment) => {
+                    let text = comment.text();
+                    text[comment.prefix().len()..(text.len() - "*/".len())]
+                        .trim()
+                        .lines()
+                        .map(|l| l.strip_prefix(&indentation).unwrap_or(l))
+                        .join("\n")
+                }
+                Either::Right(comments) => {
+                    comments.into_iter().map(|c| line_comment_text(IndentLevel(0), c)).join("\n")
+                }
+            };
+
+            let hashes = "#".repeat(required_hashes(&text));
+
+            let prefix = match placement {
+                CommentPlacement::Inner => "#!",
+                CommentPlacement::Outer => "#",
+            };
+
+            let output = format!(r#"{prefix}[doc = r{hashes}"{text}"{hashes}]"#);
+
+            edit.replace(target, output)
+        },
+    )
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::tests::{check_assist, check_assist_not_applicable};
+
+    use super::*;
+
+    #[test]
+    fn single_line() {
+        check_assist(
+            desugar_doc_comment,
+            r#"
+/// line$0 comment
+fn main() {
+    foo();
+}
+"#,
+            r#"
+#[doc = r"line comment"]
+fn main() {
+    foo();
+}
+"#,
+        );
+        check_assist(
+            desugar_doc_comment,
+            r#"
+//! line$0 comment
+fn main() {
+    foo();
+}
+"#,
+            r#"
+#![doc = r"line comment"]
+fn main() {
+    foo();
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn single_line_indented() {
+        check_assist(
+            desugar_doc_comment,
+            r#"
+fn main() {
+    /// line$0 comment
+    struct Foo;
+}
+"#,
+            r#"
+fn main() {
+    #[doc = r"line comment"]
+    struct Foo;
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn multiline() {
+        check_assist(
+            desugar_doc_comment,
+            r#"
+fn main() {
+    /// above
+    /// line$0 comment
+    ///
+    /// below
+    struct Foo;
+}
+"#,
+            r#"
+fn main() {
+    #[doc = r"above
+line comment
+
+below"]
+    struct Foo;
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn end_of_line() {
+        check_assist_not_applicable(
+            desugar_doc_comment,
+            r#"
+fn main() { /// end-of-line$0 comment
+    struct Foo;
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn single_line_different_kinds() {
+        check_assist(
+            desugar_doc_comment,
+            r#"
+fn main() {
+    //! different prefix
+    /// line$0 comment
+    /// below
+    struct Foo;
+}
+"#,
+            r#"
+fn main() {
+    //! different prefix
+    #[doc = r"line comment
+below"]
+    struct Foo;
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn single_line_separate_chunks() {
+        check_assist(
+            desugar_doc_comment,
+            r#"
+/// different chunk
+
+/// line$0 comment
+/// below
+"#,
+            r#"
+/// different chunk
+
+#[doc = r"line comment
+below"]
+"#,
+        );
+    }
+
+    #[test]
+    fn block_comment() {
+        check_assist(
+            desugar_doc_comment,
+            r#"
+/**
+ hi$0 there
+*/
+"#,
+            r#"
+#[doc = r"hi there"]
+"#,
+        );
+    }
+
+    #[test]
+    fn inner_doc_block() {
+        check_assist(
+            desugar_doc_comment,
+            r#"
+/*!
+ hi$0 there
+*/
+"#,
+            r#"
+#![doc = r"hi there"]
+"#,
+        );
+    }
+
+    #[test]
+    fn block_indent() {
+        check_assist(
+            desugar_doc_comment,
+            r#"
+fn main() {
+    /*!
+    hi$0 there
+
+    ```
+      code_sample
+    ```
+    */
+}
+"#,
+            r#"
+fn main() {
+    #![doc = r"hi there
+
+```
+  code_sample
+```"]
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn end_of_line_block() {
+        check_assist_not_applicable(
+            desugar_doc_comment,
+            r#"
+fn main() {
+    foo(); /** end-of-line$0 comment */
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn regular_comment() {
+        check_assist_not_applicable(desugar_doc_comment, r#"// some$0 comment"#);
+        check_assist_not_applicable(desugar_doc_comment, r#"/* some$0 comment*/"#);
+    }
+
+    #[test]
+    fn quotes_and_escapes() {
+        check_assist(
+            desugar_doc_comment,
+            r###"/// some$0 "\ "## comment"###,
+            r####"#[doc = r###"some "\ "## comment"###]"####,
+        );
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs
index 0505f5784f8..b310c2db9fa 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs
@@ -1,9 +1,6 @@
 use either::Either;
 use ide_db::syntax_helpers::node_ext::walk_ty;
-use syntax::{
-    ast::{self, edit::IndentLevel, make, AstNode, HasGenericParams, HasName},
-    match_ast,
-};
+use syntax::ast::{self, edit::IndentLevel, make, AstNode, HasGenericParams, HasName};
 
 use crate::{AssistContext, AssistId, AssistKind, Assists};
 
@@ -31,15 +28,8 @@ pub(crate) fn extract_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) ->
 
     let ty = ctx.find_node_at_range::<ast::Type>()?;
     let item = ty.syntax().ancestors().find_map(ast::Item::cast)?;
-    let assoc_owner = item.syntax().ancestors().nth(2).and_then(|it| {
-        match_ast! {
-            match it {
-                ast::Trait(tr) => Some(Either::Left(tr)),
-                ast::Impl(impl_) => Some(Either::Right(impl_)),
-                _ => None,
-            }
-        }
-    });
+    let assoc_owner =
+        item.syntax().ancestors().nth(2).and_then(Either::<ast::Trait, ast::Impl>::cast);
     let node = assoc_owner.as_ref().map_or_else(
         || item.syntax(),
         |impl_| impl_.as_ref().either(AstNode::syntax, AstNode::syntax),
@@ -161,19 +151,17 @@ fn collect_used_generics<'gp>(
                     .and_then(|lt| known_generics.iter().find(find_lifetime(&lt.text()))),
             ),
             ast::Type::ArrayType(ar) => {
-                if let Some(expr) = ar.expr() {
-                    if let ast::Expr::PathExpr(p) = expr {
-                        if let Some(path) = p.path() {
-                            if let Some(name_ref) = path.as_single_name_ref() {
-                                if let Some(param) = known_generics.iter().find(|gp| {
-                                    if let ast::GenericParam::ConstParam(cp) = gp {
-                                        cp.name().map_or(false, |n| n.text() == name_ref.text())
-                                    } else {
-                                        false
-                                    }
-                                }) {
-                                    generics.push(param);
+                if let Some(ast::Expr::PathExpr(p)) = ar.expr() {
+                    if let Some(path) = p.path() {
+                        if let Some(name_ref) = path.as_single_name_ref() {
+                            if let Some(param) = known_generics.iter().find(|gp| {
+                                if let ast::GenericParam::ConstParam(cp) = gp {
+                                    cp.name().map_or(false, |n| n.text() == name_ref.text())
+                                } else {
+                                    false
                                 }
+                            }) {
+                                generics.push(param);
                             }
                         }
                     }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs
index 2d074a33e7f..860372941f7 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs
@@ -82,18 +82,18 @@ fn generate_trait_impl_text_from_impl(impl_: &ast::Impl, trait_text: &str, code:
     let generic_params = impl_.generic_param_list().map(|generic_params| {
         let lifetime_params =
             generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam);
-        let ty_or_const_params = generic_params.type_or_const_params().filter_map(|param| {
+        let ty_or_const_params = generic_params.type_or_const_params().map(|param| {
             // remove defaults since they can't be specified in impls
             match param {
                 ast::TypeOrConstParam::Type(param) => {
                     let param = param.clone_for_update();
                     param.remove_default();
-                    Some(ast::GenericParam::TypeParam(param))
+                    ast::GenericParam::TypeParam(param)
                 }
                 ast::TypeOrConstParam::Const(param) => {
                     let param = param.clone_for_update();
                     param.remove_default();
-                    Some(ast::GenericParam::ConstParam(param))
+                    ast::GenericParam::ConstParam(param)
                 }
             }
         });
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs
index c8d0493d097..ed1b8f4e28d 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs
@@ -109,7 +109,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
                 let tail_expr_finished =
                     if is_async { make::expr_await(tail_expr) } else { tail_expr };
                 let body = make::block_expr([], Some(tail_expr_finished));
-                let f = make::fn_(vis, name, type_params, params, body, ret_type, is_async)
+                let f = make::fn_(vis, name, type_params, None, params, body, ret_type, is_async)
                     .indent(ast::edit::IndentLevel(1))
                     .clone_for_update();
 
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
index da9b0cda5b5..45b27a63ce2 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs
@@ -1,8 +1,11 @@
-use hir::{Adt, HasSource, HirDisplay, Module, Semantics, TypeInfo};
+use hir::{
+    Adt, AsAssocItem, HasSource, HirDisplay, Module, PathResolution, Semantics, Type, TypeInfo,
+};
 use ide_db::{
     base_db::FileId,
     defs::{Definition, NameRefClass},
     famous_defs::FamousDefs,
+    path_transform::PathTransform,
     FxHashMap, FxHashSet, RootDatabase, SnippetCap,
 };
 use stdx::to_lower_snake_case;
@@ -10,14 +13,13 @@ use syntax::{
     ast::{
         self,
         edit::{AstNodeEdit, IndentLevel},
-        make, AstNode, CallExpr, HasArgList, HasModuleItem,
+        make, AstNode, CallExpr, HasArgList, HasGenericParams, HasModuleItem, HasTypeBounds,
     },
     SyntaxKind, SyntaxNode, TextRange, TextSize,
 };
 
 use crate::{
-    utils::convert_reference_type,
-    utils::{find_struct_impl, render_snippet, Cursor},
+    utils::{convert_reference_type, find_struct_impl, render_snippet, Cursor},
     AssistContext, AssistId, AssistKind, Assists,
 };
 
@@ -107,7 +109,7 @@ fn fn_target_info(
     match path.qualifier() {
         Some(qualifier) => match ctx.sema.resolve_path(&qualifier) {
             Some(hir::PathResolution::Def(hir::ModuleDef::Module(module))) => {
-                get_fn_target_info(ctx, &Some(module), call.clone())
+                get_fn_target_info(ctx, Some(module), call.clone())
             }
             Some(hir::PathResolution::Def(hir::ModuleDef::Adt(adt))) => {
                 if let hir::Adt::Enum(_) = adt {
@@ -125,7 +127,7 @@ fn fn_target_info(
             }
             _ => None,
         },
-        _ => get_fn_target_info(ctx, &None, call.clone()),
+        _ => get_fn_target_info(ctx, None, call.clone()),
     }
 }
 
@@ -136,7 +138,8 @@ fn gen_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
     }
 
     let fn_name = call.name_ref()?;
-    let adt = ctx.sema.type_of_expr(&call.receiver()?)?.original().strip_references().as_adt()?;
+    let receiver_ty = ctx.sema.type_of_expr(&call.receiver()?)?.original().strip_references();
+    let adt = receiver_ty.as_adt()?;
 
     let current_module = ctx.sema.scope(call.syntax())?.module();
     let target_module = adt.module(ctx.sema.db);
@@ -147,8 +150,14 @@ fn gen_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
     let (impl_, file) = get_adt_source(ctx, &adt, fn_name.text().as_str())?;
     let (target, insert_offset) = get_method_target(ctx, &impl_, &adt)?;
 
-    let function_builder =
-        FunctionBuilder::from_method_call(ctx, &call, &fn_name, target_module, target)?;
+    let function_builder = FunctionBuilder::from_method_call(
+        ctx,
+        &call,
+        &fn_name,
+        receiver_ty,
+        target_module,
+        target,
+    )?;
     let text_range = call.syntax().text_range();
     let adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None };
     let label = format!("Generate {} method", function_builder.fn_name);
@@ -179,6 +188,7 @@ fn add_func_to_accumulator(
         let function_template = function_builder.render(adt_name.is_some());
         let mut func = function_template.to_string(ctx.config.snippet_cap);
         if let Some(name) = adt_name {
+            // FIXME: adt may have generic params.
             func = format!("\n{indent}impl {name} {{\n{func}\n{indent}}}");
         }
         builder.edit_file(file);
@@ -238,7 +248,8 @@ impl FunctionTemplate {
 struct FunctionBuilder {
     target: GeneratedFunctionTarget,
     fn_name: ast::Name,
-    type_params: Option<ast::GenericParamList>,
+    generic_param_list: Option<ast::GenericParamList>,
+    where_clause: Option<ast::WhereClause>,
     params: ast::ParamList,
     ret_type: Option<ast::RetType>,
     should_focus_return_type: bool,
@@ -260,19 +271,32 @@ impl FunctionBuilder {
         let target_module =
             target_module.or_else(|| ctx.sema.scope(target.syntax()).map(|it| it.module()))?;
         let fn_name = make::name(fn_name);
-        let (type_params, params) =
-            fn_args(ctx, target_module, ast::CallableExpr::Call(call.clone()))?;
+        let mut necessary_generic_params = FxHashSet::default();
+        let params = fn_args(
+            ctx,
+            target_module,
+            ast::CallableExpr::Call(call.clone()),
+            &mut necessary_generic_params,
+        )?;
 
         let await_expr = call.syntax().parent().and_then(ast::AwaitExpr::cast);
         let is_async = await_expr.is_some();
 
-        let (ret_type, should_focus_return_type) =
-            make_return_type(ctx, &ast::Expr::CallExpr(call.clone()), target_module);
+        let (ret_type, should_focus_return_type) = make_return_type(
+            ctx,
+            &ast::Expr::CallExpr(call.clone()),
+            target_module,
+            &mut necessary_generic_params,
+        );
+
+        let (generic_param_list, where_clause) =
+            fn_generic_params(ctx, necessary_generic_params, &target)?;
 
         Some(Self {
             target,
             fn_name,
-            type_params,
+            generic_param_list,
+            where_clause,
             params,
             ret_type,
             should_focus_return_type,
@@ -285,25 +309,40 @@ impl FunctionBuilder {
         ctx: &AssistContext<'_>,
         call: &ast::MethodCallExpr,
         name: &ast::NameRef,
+        receiver_ty: Type,
         target_module: Module,
         target: GeneratedFunctionTarget,
     ) -> Option<Self> {
         let needs_pub =
             !module_is_descendant(&ctx.sema.scope(call.syntax())?.module(), &target_module, ctx);
         let fn_name = make::name(&name.text());
-        let (type_params, params) =
-            fn_args(ctx, target_module, ast::CallableExpr::MethodCall(call.clone()))?;
+        let mut necessary_generic_params = FxHashSet::default();
+        necessary_generic_params.extend(receiver_ty.generic_params(ctx.db()));
+        let params = fn_args(
+            ctx,
+            target_module,
+            ast::CallableExpr::MethodCall(call.clone()),
+            &mut necessary_generic_params,
+        )?;
 
         let await_expr = call.syntax().parent().and_then(ast::AwaitExpr::cast);
         let is_async = await_expr.is_some();
 
-        let (ret_type, should_focus_return_type) =
-            make_return_type(ctx, &ast::Expr::MethodCallExpr(call.clone()), target_module);
+        let (ret_type, should_focus_return_type) = make_return_type(
+            ctx,
+            &ast::Expr::MethodCallExpr(call.clone()),
+            target_module,
+            &mut necessary_generic_params,
+        );
+
+        let (generic_param_list, where_clause) =
+            fn_generic_params(ctx, necessary_generic_params, &target)?;
 
         Some(Self {
             target,
             fn_name,
-            type_params,
+            generic_param_list,
+            where_clause,
             params,
             ret_type,
             should_focus_return_type,
@@ -319,7 +358,8 @@ impl FunctionBuilder {
         let mut fn_def = make::fn_(
             visibility,
             self.fn_name,
-            self.type_params,
+            self.generic_param_list,
+            self.where_clause,
             self.params,
             fn_body,
             self.ret_type,
@@ -375,6 +415,7 @@ fn make_return_type(
     ctx: &AssistContext<'_>,
     call: &ast::Expr,
     target_module: Module,
+    necessary_generic_params: &mut FxHashSet<hir::GenericParam>,
 ) -> (Option<ast::RetType>, bool) {
     let (ret_ty, should_focus_return_type) = {
         match ctx.sema.type_of_expr(call).map(TypeInfo::original) {
@@ -382,6 +423,7 @@ fn make_return_type(
             None => (Some(make::ty_placeholder()), true),
             Some(ty) if ty.is_unit() => (None, false),
             Some(ty) => {
+                necessary_generic_params.extend(ty.generic_params(ctx.db()));
                 let rendered = ty.display_source_code(ctx.db(), target_module.into());
                 match rendered {
                     Ok(rendered) => (Some(make::ty(&rendered)), false),
@@ -396,16 +438,16 @@ fn make_return_type(
 
 fn get_fn_target_info(
     ctx: &AssistContext<'_>,
-    target_module: &Option<Module>,
+    target_module: Option<Module>,
     call: CallExpr,
 ) -> Option<TargetInfo> {
     let (target, file, insert_offset) = get_fn_target(ctx, target_module, call)?;
-    Some(TargetInfo::new(*target_module, None, target, file, insert_offset))
+    Some(TargetInfo::new(target_module, None, target, file, insert_offset))
 }
 
 fn get_fn_target(
     ctx: &AssistContext<'_>,
-    target_module: &Option<Module>,
+    target_module: Option<Module>,
     call: CallExpr,
 ) -> Option<(GeneratedFunctionTarget, FileId, TextSize)> {
     let mut file = ctx.file_id();
@@ -473,37 +515,386 @@ impl GeneratedFunctionTarget {
             GeneratedFunctionTarget::InEmptyItemList(it) => it,
         }
     }
+
+    fn parent(&self) -> SyntaxNode {
+        match self {
+            GeneratedFunctionTarget::BehindItem(it) => it.parent().expect("item without parent"),
+            GeneratedFunctionTarget::InEmptyItemList(it) => it.clone(),
+        }
+    }
 }
 
-/// Computes the type variables and arguments required for the generated function
+/// Computes parameter list for the generated function.
 fn fn_args(
     ctx: &AssistContext<'_>,
     target_module: hir::Module,
     call: ast::CallableExpr,
-) -> Option<(Option<ast::GenericParamList>, ast::ParamList)> {
+    necessary_generic_params: &mut FxHashSet<hir::GenericParam>,
+) -> Option<ast::ParamList> {
     let mut arg_names = Vec::new();
     let mut arg_types = Vec::new();
     for arg in call.arg_list()?.args() {
         arg_names.push(fn_arg_name(&ctx.sema, &arg));
-        arg_types.push(fn_arg_type(ctx, target_module, &arg));
+        arg_types.push(fn_arg_type(ctx, target_module, &arg, necessary_generic_params));
     }
     deduplicate_arg_names(&mut arg_names);
     let params = arg_names.into_iter().zip(arg_types).map(|(name, ty)| {
         make::param(make::ext::simple_ident_pat(make::name(&name)).into(), make::ty(&ty))
     });
 
-    Some((
-        None,
-        make::param_list(
-            match call {
-                ast::CallableExpr::Call(_) => None,
-                ast::CallableExpr::MethodCall(_) => Some(make::self_param()),
-            },
-            params,
-        ),
+    Some(make::param_list(
+        match call {
+            ast::CallableExpr::Call(_) => None,
+            ast::CallableExpr::MethodCall(_) => Some(make::self_param()),
+        },
+        params,
     ))
 }
 
+/// Gets parameter bounds and where predicates in scope and filters out irrelevant ones. Returns
+/// `None` when it fails to get scope information.
+///
+/// See comment on `filter_unnecessary_bounds()` for what bounds we consider relevant.
+///
+/// NOTE: Generic parameters returned from this function may cause name clash at `target`. We don't
+/// currently do anything about it because it's actually easy to resolve it after the assist: just
+/// use the Rename functionality.
+fn fn_generic_params(
+    ctx: &AssistContext<'_>,
+    necessary_params: FxHashSet<hir::GenericParam>,
+    target: &GeneratedFunctionTarget,
+) -> Option<(Option<ast::GenericParamList>, Option<ast::WhereClause>)> {
+    if necessary_params.is_empty() {
+        // Not really needed but fast path.
+        return Some((None, None));
+    }
+
+    // 1. Get generic parameters (with bounds) and where predicates in scope.
+    let (generic_params, where_preds) = params_and_where_preds_in_scope(ctx);
+
+    // 2. Extract type parameters included in each bound.
+    let mut generic_params = generic_params
+        .into_iter()
+        .filter_map(|it| compute_contained_params_in_generic_param(ctx, it))
+        .collect();
+    let mut where_preds = where_preds
+        .into_iter()
+        .filter_map(|it| compute_contained_params_in_where_pred(ctx, it))
+        .collect();
+
+    // 3. Filter out unnecessary bounds.
+    filter_unnecessary_bounds(&mut generic_params, &mut where_preds, necessary_params);
+    filter_bounds_in_scope(&mut generic_params, &mut where_preds, ctx, target);
+
+    let generic_params: Vec<_> =
+        generic_params.into_iter().map(|it| it.node.clone_for_update()).collect();
+    let where_preds: Vec<_> =
+        where_preds.into_iter().map(|it| it.node.clone_for_update()).collect();
+
+    // 4. Rewrite paths
+    if let Some(param) = generic_params.first() {
+        let source_scope = ctx.sema.scope(param.syntax())?;
+        let target_scope = ctx.sema.scope(&target.parent())?;
+        if source_scope.module() != target_scope.module() {
+            let transform = PathTransform::generic_transformation(&target_scope, &source_scope);
+            let generic_params = generic_params.iter().map(|it| it.syntax());
+            let where_preds = where_preds.iter().map(|it| it.syntax());
+            transform.apply_all(generic_params.chain(where_preds));
+        }
+    }
+
+    let generic_param_list = make::generic_param_list(generic_params);
+    let where_clause =
+        if where_preds.is_empty() { None } else { Some(make::where_clause(where_preds)) };
+
+    Some((Some(generic_param_list), where_clause))
+}
+
+fn params_and_where_preds_in_scope(
+    ctx: &AssistContext<'_>,
+) -> (Vec<ast::GenericParam>, Vec<ast::WherePred>) {
+    let Some(body) = containing_body(ctx) else { return Default::default(); };
+
+    let mut generic_params = Vec::new();
+    let mut where_clauses = Vec::new();
+
+    // There are two items where generic parameters currently in scope may be declared: the item
+    // the cursor is at, and its parent (if any).
+    //
+    // We handle parent first so that their generic parameters appear first in the generic
+    // parameter list of the function we're generating.
+    let db = ctx.db();
+    if let Some(parent) = body.as_assoc_item(db).map(|it| it.container(db)) {
+        match parent {
+            hir::AssocItemContainer::Impl(it) => {
+                let (params, clauses) = get_bounds_in_scope(ctx, it);
+                generic_params.extend(params);
+                where_clauses.extend(clauses);
+            }
+            hir::AssocItemContainer::Trait(it) => {
+                let (params, clauses) = get_bounds_in_scope(ctx, it);
+                generic_params.extend(params);
+                where_clauses.extend(clauses);
+            }
+        }
+    }
+
+    // Other defs with body may inherit generic parameters from its parent, but never have their
+    // own generic parameters.
+    if let hir::DefWithBody::Function(it) = body {
+        let (params, clauses) = get_bounds_in_scope(ctx, it);
+        generic_params.extend(params);
+        where_clauses.extend(clauses);
+    }
+
+    (generic_params, where_clauses)
+}
+
+fn containing_body(ctx: &AssistContext<'_>) -> Option<hir::DefWithBody> {
+    let item: ast::Item = ctx.find_node_at_offset()?;
+    let def = match item {
+        ast::Item::Fn(it) => ctx.sema.to_def(&it)?.into(),
+        ast::Item::Const(it) => ctx.sema.to_def(&it)?.into(),
+        ast::Item::Static(it) => ctx.sema.to_def(&it)?.into(),
+        _ => return None,
+    };
+    Some(def)
+}
+
+fn get_bounds_in_scope<D>(
+    ctx: &AssistContext<'_>,
+    def: D,
+) -> (impl Iterator<Item = ast::GenericParam>, impl Iterator<Item = ast::WherePred>)
+where
+    D: HasSource,
+    D::Ast: HasGenericParams,
+{
+    // This function should be only called with `Impl`, `Trait`, or `Function`, for which it's
+    // infallible to get source ast.
+    let node = ctx.sema.source(def).unwrap().value;
+    let generic_params = node.generic_param_list().into_iter().flat_map(|it| it.generic_params());
+    let where_clauses = node.where_clause().into_iter().flat_map(|it| it.predicates());
+    (generic_params, where_clauses)
+}
+
+#[derive(Debug)]
+struct ParamBoundWithParams {
+    node: ast::GenericParam,
+    /// Generic parameter `node` introduces.
+    ///
+    /// ```text
+    /// impl<T> S<T> {
+    ///     fn f<U: Trait<T>>() {}
+    ///          ^ this
+    /// }
+    /// ```
+    ///
+    /// `U` in this example.
+    self_ty_param: hir::GenericParam,
+    /// Generic parameters contained in the trait reference of this bound.
+    ///
+    /// ```text
+    /// impl<T> S<T> {
+    ///     fn f<U: Trait<T>>() {}
+    ///             ^^^^^^^^ params in this part
+    /// }
+    /// ```
+    ///
+    /// `T` in this example.
+    other_params: FxHashSet<hir::GenericParam>,
+}
+
+#[derive(Debug)]
+struct WherePredWithParams {
+    node: ast::WherePred,
+    /// Generic parameters contained in the "self type" of this where predicate.
+    ///
+    /// ```text
+    /// Struct<T, U>: Trait<T, Assoc = V>,
+    /// ^^^^^^^^^^^^ params in this part
+    /// ```
+    ///
+    /// `T` and `U` in this example.
+    self_ty_params: FxHashSet<hir::GenericParam>,
+    /// Generic parameters contained in the trait reference of this where predicate.
+    ///
+    /// ```text
+    /// Struct<T, U>: Trait<T, Assoc = V>,
+    ///               ^^^^^^^^^^^^^^^^^^^ params in this part
+    /// ```
+    ///
+    /// `T` and `V` in this example.
+    other_params: FxHashSet<hir::GenericParam>,
+}
+
+fn compute_contained_params_in_generic_param(
+    ctx: &AssistContext<'_>,
+    node: ast::GenericParam,
+) -> Option<ParamBoundWithParams> {
+    match &node {
+        ast::GenericParam::TypeParam(ty) => {
+            let self_ty_param = ctx.sema.to_def(ty)?.into();
+
+            let other_params = ty
+                .type_bound_list()
+                .into_iter()
+                .flat_map(|it| it.bounds())
+                .flat_map(|bound| bound.syntax().descendants())
+                .filter_map(|node| filter_generic_params(ctx, node))
+                .collect();
+
+            Some(ParamBoundWithParams { node, self_ty_param, other_params })
+        }
+        ast::GenericParam::ConstParam(ct) => {
+            let self_ty_param = ctx.sema.to_def(ct)?.into();
+            Some(ParamBoundWithParams { node, self_ty_param, other_params: FxHashSet::default() })
+        }
+        ast::GenericParam::LifetimeParam(_) => {
+            // FIXME: It might be a good idea to handle lifetime parameters too.
+            None
+        }
+    }
+}
+
+fn compute_contained_params_in_where_pred(
+    ctx: &AssistContext<'_>,
+    node: ast::WherePred,
+) -> Option<WherePredWithParams> {
+    let self_ty = node.ty()?;
+    let bound_list = node.type_bound_list()?;
+
+    let self_ty_params = self_ty
+        .syntax()
+        .descendants()
+        .filter_map(|node| filter_generic_params(ctx, node))
+        .collect();
+
+    let other_params = bound_list
+        .bounds()
+        .flat_map(|bound| bound.syntax().descendants())
+        .filter_map(|node| filter_generic_params(ctx, node))
+        .collect();
+
+    Some(WherePredWithParams { node, self_ty_params, other_params })
+}
+
+fn filter_generic_params(ctx: &AssistContext<'_>, node: SyntaxNode) -> Option<hir::GenericParam> {
+    let path = ast::Path::cast(node)?;
+    match ctx.sema.resolve_path(&path)? {
+        PathResolution::TypeParam(it) => Some(it.into()),
+        PathResolution::ConstParam(it) => Some(it.into()),
+        _ => None,
+    }
+}
+
+/// Filters out irrelevant bounds from `generic_params` and `where_preds`.
+///
+/// Say we have a trait bound `Struct<T>: Trait<U>`. Given `necessary_params`, when is it relevant
+/// and when not? Some observations:
+/// - When `necessary_params` contains `T`, it's likely that we want this bound, but now we have
+/// an extra param to consider: `U`.
+/// - On the other hand, when `necessary_params` contains `U` (but not `T`), then it's unlikely
+/// that we want this bound because it doesn't really constrain `U`.
+///
+/// (FIXME?: The latter clause might be overstating. We may want to include the bound if the self
+/// type does *not* include generic params at all - like `Option<i32>: From<U>`)
+///
+/// Can we make this a bit more formal? Let's define "dependency" between generic parameters and
+/// trait bounds:
+/// - A generic parameter `T` depends on a trait bound if `T` appears in the self type (i.e. left
+/// part) of the bound.
+/// - A trait bound depends on a generic parameter `T` if `T` appears in the bound.
+///
+/// Using the notion, what we want is all the bounds that params in `necessary_params`
+/// *transitively* depend on!
+///
+/// Now it's not hard to solve: we build a dependency graph and compute all reachable nodes from
+/// nodes that represent params in `necessary_params` by usual and boring DFS.
+///
+/// The time complexity is O(|generic_params| + |where_preds| + |necessary_params|).
+fn filter_unnecessary_bounds(
+    generic_params: &mut Vec<ParamBoundWithParams>,
+    where_preds: &mut Vec<WherePredWithParams>,
+    necessary_params: FxHashSet<hir::GenericParam>,
+) {
+    // All `self_ty_param` should be unique as they were collected from `ast::GenericParamList`s.
+    let param_map: FxHashMap<hir::GenericParam, usize> =
+        generic_params.iter().map(|it| it.self_ty_param).zip(0..).collect();
+    let param_count = param_map.len();
+    let generic_params_upper_bound = param_count + generic_params.len();
+    let node_count = generic_params_upper_bound + where_preds.len();
+
+    // | node index range                        | what the node represents |
+    // |-----------------------------------------|--------------------------|
+    // | 0..param_count                          | generic parameter        |
+    // | param_count..generic_params_upper_bound | `ast::GenericParam`      |
+    // | generic_params_upper_bound..node_count  | `ast::WherePred`         |
+    let mut graph = Graph::new(node_count);
+    for (pred, pred_idx) in generic_params.iter().zip(param_count..) {
+        let param_idx = param_map[&pred.self_ty_param];
+        graph.add_edge(param_idx, pred_idx);
+        graph.add_edge(pred_idx, param_idx);
+
+        for param in &pred.other_params {
+            let param_idx = param_map[param];
+            graph.add_edge(pred_idx, param_idx);
+        }
+    }
+    for (pred, pred_idx) in where_preds.iter().zip(generic_params_upper_bound..) {
+        for param in &pred.self_ty_params {
+            let param_idx = param_map[param];
+            graph.add_edge(param_idx, pred_idx);
+            graph.add_edge(pred_idx, param_idx);
+        }
+        for param in &pred.other_params {
+            let param_idx = param_map[param];
+            graph.add_edge(pred_idx, param_idx);
+        }
+    }
+
+    let starting_nodes = necessary_params.iter().map(|param| param_map[param]);
+    let reachable = graph.compute_reachable_nodes(starting_nodes);
+
+    // Not pretty, but effective. If only there were `Vec::retain_index()`...
+    let mut idx = param_count;
+    generic_params.retain(|_| {
+        idx += 1;
+        reachable[idx - 1]
+    });
+    stdx::always!(idx == generic_params_upper_bound, "inconsistent index");
+    where_preds.retain(|_| {
+        idx += 1;
+        reachable[idx - 1]
+    });
+}
+
+/// Filters out bounds from impl if we're generating the function into the same impl we're
+/// generating from.
+fn filter_bounds_in_scope(
+    generic_params: &mut Vec<ParamBoundWithParams>,
+    where_preds: &mut Vec<WherePredWithParams>,
+    ctx: &AssistContext<'_>,
+    target: &GeneratedFunctionTarget,
+) -> Option<()> {
+    let target_impl = target.parent().ancestors().find_map(ast::Impl::cast)?;
+    let target_impl = ctx.sema.to_def(&target_impl)?;
+    // It's sufficient to test only the first element of `generic_params` because of the order of
+    // insertion (see `relevant_parmas_and_where_clauses()`).
+    let def = generic_params.first()?.self_ty_param.parent();
+    if def != hir::GenericDef::Impl(target_impl) {
+        return None;
+    }
+
+    // Now we know every element that belongs to an impl would be in scope at `target`, we can
+    // filter them out just by lookint at their parent.
+    generic_params.retain(|it| !matches!(it.self_ty_param.parent(), hir::GenericDef::Impl(_)));
+    where_preds.retain(|it| {
+        it.node.syntax().parent().and_then(|it| it.parent()).and_then(ast::Impl::cast).is_none()
+    });
+
+    Some(())
+}
+
 /// Makes duplicate argument names unique by appending incrementing numbers.
 ///
 /// ```
@@ -564,17 +955,25 @@ fn fn_arg_name(sema: &Semantics<'_, RootDatabase>, arg_expr: &ast::Expr) -> Stri
     }
 }
 
-fn fn_arg_type(ctx: &AssistContext<'_>, target_module: hir::Module, fn_arg: &ast::Expr) -> String {
+fn fn_arg_type(
+    ctx: &AssistContext<'_>,
+    target_module: hir::Module,
+    fn_arg: &ast::Expr,
+    generic_params: &mut FxHashSet<hir::GenericParam>,
+) -> String {
     fn maybe_displayed_type(
         ctx: &AssistContext<'_>,
         target_module: hir::Module,
         fn_arg: &ast::Expr,
+        generic_params: &mut FxHashSet<hir::GenericParam>,
     ) -> Option<String> {
         let ty = ctx.sema.type_of_expr(fn_arg)?.adjusted();
         if ty.is_unknown() {
             return None;
         }
 
+        generic_params.extend(ty.generic_params(ctx.db()));
+
         if ty.is_reference() || ty.is_mutable_reference() {
             let famous_defs = &FamousDefs(&ctx.sema, ctx.sema.scope(fn_arg.syntax())?.krate());
             convert_reference_type(ty.strip_references(), ctx.db(), famous_defs)
@@ -585,7 +984,8 @@ fn fn_arg_type(ctx: &AssistContext<'_>, target_module: hir::Module, fn_arg: &ast
         }
     }
 
-    maybe_displayed_type(ctx, target_module, fn_arg).unwrap_or_else(|| String::from("_"))
+    maybe_displayed_type(ctx, target_module, fn_arg, generic_params)
+        .unwrap_or_else(|| String::from("_"))
 }
 
 /// Returns the position inside the current mod or file
@@ -640,10 +1040,11 @@ fn next_space_for_fn_in_module(
 }
 
 fn next_space_for_fn_in_impl(impl_: &ast::Impl) -> Option<GeneratedFunctionTarget> {
-    if let Some(last_item) = impl_.assoc_item_list().and_then(|it| it.assoc_items().last()) {
+    let assoc_item_list = impl_.assoc_item_list()?;
+    if let Some(last_item) = assoc_item_list.assoc_items().last() {
         Some(GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()))
     } else {
-        Some(GeneratedFunctionTarget::InEmptyItemList(impl_.assoc_item_list()?.syntax().clone()))
+        Some(GeneratedFunctionTarget::InEmptyItemList(assoc_item_list.syntax().clone()))
     }
 }
 
@@ -659,6 +1060,73 @@ fn module_is_descendant(module: &hir::Module, ans: &hir::Module, ctx: &AssistCon
     false
 }
 
+// This is never intended to be used as a generic graph strucuture. If there's ever another need of
+// graph algorithm, consider adding a library for that (and replace the following).
+/// Minimally implemented directed graph structure represented by adjacency list.
+struct Graph {
+    edges: Vec<Vec<usize>>,
+}
+
+impl Graph {
+    fn new(node_count: usize) -> Self {
+        Self { edges: vec![Vec::new(); node_count] }
+    }
+
+    fn add_edge(&mut self, from: usize, to: usize) {
+        self.edges[from].push(to);
+    }
+
+    fn edges_for(&self, node_idx: usize) -> &[usize] {
+        &self.edges[node_idx]
+    }
+
+    fn len(&self) -> usize {
+        self.edges.len()
+    }
+
+    fn compute_reachable_nodes(
+        &self,
+        starting_nodes: impl IntoIterator<Item = usize>,
+    ) -> Vec<bool> {
+        let mut visitor = Visitor::new(self);
+        for idx in starting_nodes {
+            visitor.mark_reachable(idx);
+        }
+        visitor.visited
+    }
+}
+
+struct Visitor<'g> {
+    graph: &'g Graph,
+    visited: Vec<bool>,
+    // Stack is held in this struct so we can reuse its buffer.
+    stack: Vec<usize>,
+}
+
+impl<'g> Visitor<'g> {
+    fn new(graph: &'g Graph) -> Self {
+        let visited = vec![false; graph.len()];
+        Self { graph, visited, stack: Vec::new() }
+    }
+
+    fn mark_reachable(&mut self, start_idx: usize) {
+        // non-recursive DFS
+        stdx::always!(self.stack.is_empty());
+
+        self.stack.push(start_idx);
+        while let Some(idx) = self.stack.pop() {
+            if !self.visited[idx] {
+                self.visited[idx] = true;
+                for &neighbor in self.graph.edges_for(idx) {
+                    if !self.visited[neighbor] {
+                        self.stack.push(neighbor);
+                    }
+                }
+            }
+        }
+    }
+}
+
 #[cfg(test)]
 mod tests {
     use crate::tests::{check_assist, check_assist_not_applicable};
@@ -1087,21 +1555,167 @@ fn bar(baz: Baz::Bof) {
     }
 
     #[test]
-    fn add_function_with_generic_arg() {
-        // FIXME: This is wrong, generated `bar` should include generic parameter.
+    fn generate_function_with_generic_param() {
+        check_assist(
+            generate_function,
+            r"
+fn foo<T, const N: usize>(t: [T; N]) { $0bar(t) }
+",
+            r"
+fn foo<T, const N: usize>(t: [T; N]) { bar(t) }
+
+fn bar<T, const N: usize>(t: [T; N]) {
+    ${0:todo!()}
+}
+",
+        )
+    }
+
+    #[test]
+    fn generate_function_with_parent_generic_param() {
+        check_assist(
+            generate_function,
+            r"
+struct S<T>(T);
+impl<T> S<T> {
+    fn foo<U>(t: T, u: U) { $0bar(t, u) }
+}
+",
+            r"
+struct S<T>(T);
+impl<T> S<T> {
+    fn foo<U>(t: T, u: U) { bar(t, u) }
+}
+
+fn bar<T, U>(t: T, u: U) {
+    ${0:todo!()}
+}
+",
+        )
+    }
+
+    #[test]
+    fn generic_param_in_receiver_type() {
+        // FIXME: Generic parameter `T` should be part of impl, not method.
+        check_assist(
+            generate_function,
+            r"
+struct S<T>(T);
+fn foo<T, U>(s: S<T>, u: U) { s.$0foo(u) }
+",
+            r"
+struct S<T>(T);
+impl S {
+    fn foo<T, U>(&self, u: U) {
+        ${0:todo!()}
+    }
+}
+fn foo<T, U>(s: S<T>, u: U) { s.foo(u) }
+",
+        )
+    }
+
+    #[test]
+    fn generic_param_in_return_type() {
+        check_assist(
+            generate_function,
+            r"
+fn foo<T, const N: usize>() -> [T; N] { $0bar() }
+",
+            r"
+fn foo<T, const N: usize>() -> [T; N] { bar() }
+
+fn bar<T, const N: usize>() -> [T; N] {
+    ${0:todo!()}
+}
+",
+        )
+    }
+
+    #[test]
+    fn generate_fn_with_bounds() {
+        // FIXME: where predicates should be on next lines.
+        check_assist(
+            generate_function,
+            r"
+trait A<T> {}
+struct S<T>(T);
+impl<T: A<i32>> S<T>
+where
+    T: A<i64>,
+{
+    fn foo<U>(t: T, u: U)
+    where
+        T: A<()>,
+        U: A<i32> + A<i64>,
+    {
+        $0bar(t, u)
+    }
+}
+",
+            r"
+trait A<T> {}
+struct S<T>(T);
+impl<T: A<i32>> S<T>
+where
+    T: A<i64>,
+{
+    fn foo<U>(t: T, u: U)
+    where
+        T: A<()>,
+        U: A<i32> + A<i64>,
+    {
+        bar(t, u)
+    }
+}
+
+fn bar<T: A<i32>, U>(t: T, u: U) where T: A<i64>, T: A<()>, U: A<i32> + A<i64> {
+    ${0:todo!()}
+}
+",
+        )
+    }
+
+    #[test]
+    fn include_transitive_param_dependency() {
+        // FIXME: where predicates should be on next lines.
         check_assist(
             generate_function,
             r"
-fn foo<T>(t: T) {
-    $0bar(t)
+trait A<T> { type Assoc; }
+trait B { type Item; }
+struct S<T>(T);
+impl<T, U, V: B, W> S<(T, U, V, W)>
+where
+    T: A<U, Assoc = V>,
+    S<V::Item>: A<U, Assoc = W>,
+{
+    fn foo<I>(t: T, u: U)
+    where
+        U: A<T, Assoc = I>,
+    {
+        $0bar(u)
+    }
 }
 ",
             r"
-fn foo<T>(t: T) {
-    bar(t)
+trait A<T> { type Assoc; }
+trait B { type Item; }
+struct S<T>(T);
+impl<T, U, V: B, W> S<(T, U, V, W)>
+where
+    T: A<U, Assoc = V>,
+    S<V::Item>: A<U, Assoc = W>,
+{
+    fn foo<I>(t: T, u: U)
+    where
+        U: A<T, Assoc = I>,
+    {
+        bar(u)
+    }
 }
 
-fn bar(t: T) {
+fn bar<T, U, V: B, W, I>(u: U) where T: A<U, Assoc = V>, S<V::Item>: A<U, Assoc = W>, U: A<T, Assoc = I> {
     ${0:todo!()}
 }
 ",
@@ -1109,6 +1723,135 @@ fn bar(t: T) {
     }
 
     #[test]
+    fn irrelevant_bounds_are_filtered_out() {
+        check_assist(
+            generate_function,
+            r"
+trait A<T> {}
+struct S<T>(T);
+impl<T, U, V, W> S<(T, U, V, W)>
+where
+    T: A<U>,
+    V: A<W>,
+{
+    fn foo<I>(t: T, u: U)
+    where
+        U: A<T> + A<I>,
+    {
+        $0bar(u)
+    }
+}
+",
+            r"
+trait A<T> {}
+struct S<T>(T);
+impl<T, U, V, W> S<(T, U, V, W)>
+where
+    T: A<U>,
+    V: A<W>,
+{
+    fn foo<I>(t: T, u: U)
+    where
+        U: A<T> + A<I>,
+    {
+        bar(u)
+    }
+}
+
+fn bar<T, U, I>(u: U) where T: A<U>, U: A<T> + A<I> {
+    ${0:todo!()}
+}
+",
+        )
+    }
+
+    #[test]
+    fn params_in_trait_arg_are_not_dependency() {
+        // Even though `bar` depends on `U` and `I`, we don't have to copy these bounds:
+        // `T: A<I>` and `T: A<U>`.
+        check_assist(
+            generate_function,
+            r"
+trait A<T> {}
+struct S<T>(T);
+impl<T, U> S<(T, U)>
+where
+    T: A<U>,
+{
+    fn foo<I>(t: T, u: U)
+    where
+        T: A<I>,
+        U: A<I>,
+    {
+        $0bar(u)
+    }
+}
+",
+            r"
+trait A<T> {}
+struct S<T>(T);
+impl<T, U> S<(T, U)>
+where
+    T: A<U>,
+{
+    fn foo<I>(t: T, u: U)
+    where
+        T: A<I>,
+        U: A<I>,
+    {
+        bar(u)
+    }
+}
+
+fn bar<U, I>(u: U) where U: A<I> {
+    ${0:todo!()}
+}
+",
+        )
+    }
+
+    #[test]
+    fn dont_copy_bounds_already_in_scope() {
+        check_assist(
+            generate_function,
+            r"
+trait A<T> {}
+struct S<T>(T);
+impl<T: A<i32>> S<T>
+where
+    T: A<usize>,
+{
+    fn foo<U: A<()>>(t: T, u: U)
+    where
+        T: A<S<i32>>,
+    {
+        Self::$0bar(t, u);
+    }
+}
+",
+            r"
+trait A<T> {}
+struct S<T>(T);
+impl<T: A<i32>> S<T>
+where
+    T: A<usize>,
+{
+    fn foo<U: A<()>>(t: T, u: U)
+    where
+        T: A<S<i32>>,
+    {
+        Self::bar(t, u);
+    }
+
+    fn bar<U: A<()>>(t: T, u: U) ${0:-> _} where T: A<S<i32>> {
+        todo!()
+    }
+}
+",
+        )
+    }
+
+    #[test]
     fn add_function_with_fn_arg() {
         // FIXME: The argument in `bar` is wrong.
         check_assist(
@@ -1290,6 +2033,50 @@ fn baz(foo: foo::Foo) {
     }
 
     #[test]
+    fn qualified_path_in_generic_bounds_uses_correct_scope() {
+        check_assist(
+            generate_function,
+            r"
+mod a {
+    pub trait A {};
+}
+pub mod b {
+    pub struct S<T>(T);
+}
+struct S<T>(T);
+impl<T> S<T>
+where
+    T: a::A,
+{
+    fn foo<U: a::A>(t: b::S<T>, u: S<U>) {
+        a::$0bar(t, u);
+    }
+}
+",
+            r"
+mod a {
+    pub trait A {}
+
+    pub(crate) fn bar<T, U: self::A>(t: crate::b::S<T>, u: crate::S<U>) ${0:-> _} where T: self::A {
+        todo!()
+    };
+}
+pub mod b {
+    pub struct S<T>(T);
+}
+struct S<T>(T);
+impl<T> S<T>
+where
+    T: a::A,
+{
+    fn foo<U: a::A>(t: b::S<T>, u: S<U>) {
+        a::bar(t, u);
+    }
+}
+",
+        )
+    }
+    #[test]
     fn add_function_in_module_containing_other_items() {
         check_assist(
             generate_function,
@@ -1607,6 +2394,26 @@ fn foo() {S::bar();}
     }
 
     #[test]
+    fn create_generic_static_method() {
+        check_assist(
+            generate_function,
+            r"
+struct S;
+fn foo<T, const N: usize>(t: [T; N]) { S::bar$0(t); }
+",
+            r"
+struct S;
+impl S {
+    fn bar<T, const N: usize>(t: [T; N]) ${0:-> _} {
+        todo!()
+    }
+}
+fn foo<T, const N: usize>(t: [T; N]) { S::bar(t); }
+",
+        )
+    }
+
+    #[test]
     fn create_static_method_within_an_impl() {
         check_assist(
             generate_function,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_macro.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_macro.rs
index 9d03f03d201..3fc552306a6 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_macro.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_macro.rs
@@ -1,3 +1,4 @@
+use ide_db::syntax_helpers::insert_whitespace_into_node::insert_ws_into;
 use syntax::ast::{self, AstNode};
 
 use crate::{AssistContext, AssistId, AssistKind, Assists};
@@ -35,7 +36,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
 // ```
 pub(crate) fn inline_macro(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
     let unexpanded = ctx.find_node_at_offset::<ast::MacroCall>()?;
-    let expanded = ctx.sema.expand(&unexpanded)?.clone_for_update();
+    let expanded = insert_ws_into(ctx.sema.expand(&unexpanded)?.clone_for_update());
 
     let text_range = unexpanded.syntax().text_range();
 
@@ -230,4 +231,27 @@ fn f() { let result = foo$0(); }
 "#,
         );
     }
+
+    #[test]
+    fn inline_macro_with_whitespace() {
+        check_assist(
+            inline_macro,
+            r#"
+macro_rules! whitespace {
+    () => {
+        if true {}
+    };
+}
+fn f() { whitespace$0!(); }
+"#,
+            r#"
+macro_rules! whitespace {
+    () => {
+        if true {}
+    };
+}
+fn f() { if true{}; }
+"#,
+        )
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs
index 2bdbec93b1f..d7ddc5f23f5 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/merge_imports.rs
@@ -92,7 +92,7 @@ trait Merge: AstNode + Clone {
     fn try_merge_from(self, items: &mut dyn Iterator<Item = Self>) -> Option<Vec<Edit>> {
         let mut edits = Vec::new();
         let mut merged = self.clone();
-        while let Some(item) = items.next() {
+        for item in items {
             merged = merged.try_merge(&item)?;
             edits.push(Edit::Remove(item.into_either()));
         }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs
index 0e3a1e652b0..d848fce4be8 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs
@@ -5,10 +5,7 @@ use syntax::{
     SyntaxKind,
 };
 
-use crate::{
-    assist_context::{AssistContext, Assists},
-    utils,
-};
+use crate::assist_context::{AssistContext, Assists};
 
 // NOTE: Code may break if the self type implements a trait that has associated const with the same
 // name, but it's pretty expensive to check that (`hir::Impl::all_for_type()`) and we assume that's
@@ -130,9 +127,7 @@ pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) ->
 
             let const_ = const_.clone_for_update();
             const_.reindent_to(indent);
-            let mut const_text = format!("\n{indent}{const_}{fixup}");
-            utils::escape_non_snippet(&mut const_text);
-            builder.insert(insert_offset, const_text);
+            builder.insert(insert_offset, format!("\n{indent}{const_}{fixup}"));
         },
     )
 }
@@ -443,39 +438,4 @@ impl S {
 "#,
         );
     }
-
-    #[test]
-    fn moved_const_body_is_escaped() {
-        // Note that the last argument is what *lsp clients would see* rather than
-        // what users would see. Unescaping happens thereafter.
-        check_assist(
-            move_const_to_impl,
-            r#"
-struct S;
-impl S {
-    fn f() -> usize {
-        /// doc comment
-        /// \\
-        /// ${snippet}
-        const C$0: &str = "\ and $1";
-
-        C.len()
-    }
-}
-"#,
-            r#"
-struct S;
-impl S {
-    /// doc comment
-    /// \\\\
-    /// \${snippet}
-    const C: &str = "\\ and \$1";
-
-    fn f() -> usize {
-        Self::C.len()
-    }
-}
-"#,
-        )
-    }
 }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs
index c9bc25b27a5..01420430bb4 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/raw_string.rs
@@ -2,7 +2,7 @@ use std::borrow::Cow;
 
 use syntax::{ast, ast::IsString, AstToken, TextRange, TextSize};
 
-use crate::{AssistContext, AssistId, AssistKind, Assists};
+use crate::{utils::required_hashes, AssistContext, AssistId, AssistKind, Assists};
 
 // Assist: make_raw_string
 //
@@ -155,16 +155,6 @@ pub(crate) fn remove_hash(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
     })
 }
 
-fn required_hashes(s: &str) -> usize {
-    let mut res = 0usize;
-    for idx in s.match_indices('"').map(|(i, _)| i) {
-        let (_, sub) = s.split_at(idx + 1);
-        let n_hashes = sub.chars().take_while(|c| *c == '#').count();
-        res = res.max(n_hashes + 1)
-    }
-    res
-}
-
 #[cfg(test)]
 mod tests {
     use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target};
@@ -172,17 +162,6 @@ mod tests {
     use super::*;
 
     #[test]
-    fn test_required_hashes() {
-        assert_eq!(0, required_hashes("abc"));
-        assert_eq!(0, required_hashes("###"));
-        assert_eq!(1, required_hashes("\""));
-        assert_eq!(2, required_hashes("\"#abc"));
-        assert_eq!(0, required_hashes("#abc"));
-        assert_eq!(3, required_hashes("#ab\"##c"));
-        assert_eq!(5, required_hashes("#ab\"##\"####c"));
-    }
-
-    #[test]
     fn make_raw_string_target() {
         check_assist_target(
             make_raw_string,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_fields.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_fields.rs
index a899c7a6457..58dcaf9a221 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_fields.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/reorder_fields.rs
@@ -20,10 +20,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
 // const test: Foo = Foo {foo: 1, bar: 0}
 // ```
 pub(crate) fn reorder_fields(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
-    let record = ctx
-        .find_node_at_offset::<ast::RecordExpr>()
-        .map(Either::Left)
-        .or_else(|| ctx.find_node_at_offset::<ast::RecordPat>().map(Either::Right))?;
+    let record = ctx.find_node_at_offset::<Either<ast::RecordExpr, ast::RecordPat>>()?;
 
     let path = record.as_ref().either(|it| it.path(), |it| it.path())?;
     let ranks = compute_fields_ranks(&path, ctx)?;
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_arith_op.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_arith_op.rs
index f1ca35cafc3..4b20b35c446 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_arith_op.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_arith_op.rs
@@ -81,7 +81,7 @@ fn replace_arith(acc: &mut Assists, ctx: &AssistContext<'_>, kind: ArithKind) ->
     let range = TextRange::new(start, end);
 
     acc.add_group(
-        &GroupLabel("replace_arith".into()),
+        &GroupLabel("Replace arithmetic...".into()),
         kind.assist_id(),
         kind.label(),
         range,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
index a6693d7d790..4cfae0c7212 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs
@@ -214,7 +214,7 @@ fn impl_def_from_trait(
 
     // Generate a default `impl` function body for the derived trait.
     if let ast::AssocItem::Fn(ref func) = first_assoc_item {
-        let _ = gen_trait_fn_body(func, trait_path, adt);
+        let _ = gen_trait_fn_body(func, trait_path, adt, None);
     };
 
     Some((impl_def, first_assoc_item))
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs
index 484c27387da..457559656a4 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs
@@ -13,7 +13,7 @@ use syntax::{
         edit::{AstNodeEdit, IndentLevel},
         make, HasName,
     },
-    AstNode, TextRange,
+    AstNode, TextRange, T,
 };
 
 use crate::{
@@ -96,8 +96,9 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext<'
         cond_bodies.push((cond, body));
     }
 
-    if !pat_seen {
-        // Don't offer turning an if (chain) without patterns into a match
+    if !pat_seen && cond_bodies.len() != 1 {
+        // Don't offer turning an if (chain) without patterns into a match,
+        // unless its a simple `if cond { .. } (else { .. })`
         return None;
     }
 
@@ -114,6 +115,11 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext<'
                         Either::Left(pat) => {
                             make::match_arm(iter::once(pat), None, unwrap_trivial_block(body))
                         }
+                        Either::Right(_) if !pat_seen => make::match_arm(
+                            iter::once(make::literal_pat("true").into()),
+                            None,
+                            unwrap_trivial_block(body),
+                        ),
                         Either::Right(expr) => make::match_arm(
                             iter::once(make::wildcard_pat().into()),
                             Some(expr),
@@ -144,31 +150,36 @@ fn make_else_arm(
     else_block: Option<ast::BlockExpr>,
     conditionals: &[(Either<ast::Pat, ast::Expr>, ast::BlockExpr)],
 ) -> ast::MatchArm {
-    if let Some(else_block) = else_block {
-        let pattern = if let [(Either::Left(pat), _)] = conditionals {
-            ctx.sema
+    let (pattern, expr) = if let Some(else_block) = else_block {
+        let pattern = match conditionals {
+            [(Either::Right(_), _)] => make::literal_pat("false").into(),
+            [(Either::Left(pat), _)] => match ctx
+                .sema
                 .type_of_pat(pat)
                 .and_then(|ty| TryEnum::from_ty(&ctx.sema, &ty.adjusted()))
-                .zip(Some(pat))
-        } else {
-            None
-        };
-        let pattern = match pattern {
-            Some((it, pat)) => {
-                if does_pat_match_variant(pat, &it.sad_pattern()) {
-                    it.happy_pattern_wildcard()
-                } else if does_nested_pattern(pat) {
-                    make::wildcard_pat().into()
-                } else {
-                    it.sad_pattern()
+            {
+                Some(it) => {
+                    if does_pat_match_variant(pat, &it.sad_pattern()) {
+                        it.happy_pattern_wildcard()
+                    } else if does_nested_pattern(pat) {
+                        make::wildcard_pat().into()
+                    } else {
+                        it.sad_pattern()
+                    }
                 }
-            }
-            None => make::wildcard_pat().into(),
+                None => make::wildcard_pat().into(),
+            },
+            _ => make::wildcard_pat().into(),
         };
-        make::match_arm(iter::once(pattern), None, unwrap_trivial_block(else_block))
+        (pattern, unwrap_trivial_block(else_block))
     } else {
-        make::match_arm(iter::once(make::wildcard_pat().into()), None, make::expr_unit())
-    }
+        let pattern = match conditionals {
+            [(Either::Right(_), _)] => make::literal_pat("false").into(),
+            _ => make::wildcard_pat().into(),
+        };
+        (pattern, make::expr_unit())
+    };
+    make::match_arm(iter::once(pattern), None, expr)
 }
 
 // Assist: replace_match_with_if_let
@@ -231,7 +242,19 @@ pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext<'
                 }
             }
 
-            let condition = make::expr_let(if_let_pat, scrutinee);
+            let condition = match if_let_pat {
+                ast::Pat::LiteralPat(p)
+                    if p.literal().map_or(false, |it| it.token().kind() == T![true]) =>
+                {
+                    scrutinee
+                }
+                ast::Pat::LiteralPat(p)
+                    if p.literal().map_or(false, |it| it.token().kind() == T![false]) =>
+                {
+                    make::expr_prefix(T![!], scrutinee)
+                }
+                _ => make::expr_let(if_let_pat, scrutinee).into(),
+            };
             let then_block = make_block_expr(then_expr.reset_indent());
             let else_expr = if is_empty_expr(&else_expr) { None } else { Some(else_expr) };
             let if_let_expr = make::expr_if(
@@ -328,6 +351,58 @@ fn main() {
     }
 
     #[test]
+    fn test_if_with_match_no_else() {
+        check_assist(
+            replace_if_let_with_match,
+            r#"
+pub fn foo(foo: bool) {
+    if foo$0 {
+        self.foo();
+    }
+}
+"#,
+            r#"
+pub fn foo(foo: bool) {
+    match foo {
+        true => {
+            self.foo();
+        }
+        false => (),
+    }
+}
+"#,
+        )
+    }
+
+    #[test]
+    fn test_if_with_match_with_else() {
+        check_assist(
+            replace_if_let_with_match,
+            r#"
+pub fn foo(foo: bool) {
+    if foo$0 {
+        self.foo();
+    } else {
+        self.bar();
+    }
+}
+"#,
+            r#"
+pub fn foo(foo: bool) {
+    match foo {
+        true => {
+            self.foo();
+        }
+        false => {
+            self.bar();
+        }
+    }
+}
+"#,
+        )
+    }
+
+    #[test]
     fn test_if_let_with_match_no_else() {
         check_assist(
             replace_if_let_with_match,
@@ -996,4 +1071,64 @@ fn main() {
 "#,
         )
     }
+
+    #[test]
+    fn test_replace_match_with_if_bool() {
+        check_assist(
+            replace_match_with_if_let,
+            r#"
+fn main() {
+    match$0 b {
+        true => (),
+        _ => code(),
+    }
+}
+"#,
+            r#"
+fn main() {
+    if b {
+        ()
+    } else {
+        code()
+    }
+}
+"#,
+        );
+        check_assist(
+            replace_match_with_if_let,
+            r#"
+fn main() {
+    match$0 b {
+        false => code(),
+        true => (),
+    }
+}
+"#,
+            r#"
+fn main() {
+    if !b {
+        code()
+    }
+}
+"#,
+        );
+        check_assist(
+            replace_match_with_if_let,
+            r#"
+fn main() {
+    match$0 b {
+        false => (),
+        true => code(),
+    }
+}
+"#,
+            r#"
+fn main() {
+    if b {
+        code()
+    }
+}
+"#,
+        )
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_match_arm.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_match_arm.rs
index 9565f0ee6f2..db789cfa334 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_match_arm.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unmerge_match_arm.rs
@@ -86,8 +86,7 @@ pub(crate) fn unmerge_match_arm(acc: &mut Assists, ctx: &AssistContext<'_>) -> O
                     it.prev_sibling_or_token()
                 })
                 .map(|it| it.kind())
-                .skip_while(|it| it.is_trivia())
-                .next()
+                .find(|it| !it.is_trivia())
                     == Some(T![,]);
             let has_arms_after = neighbor(&match_arm, Direction::Next).is_some();
             if !has_comma_after && !has_arms_after {
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs
index 53cdac03a33..33b19a354b9 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_block.rs
@@ -2,6 +2,7 @@ use syntax::{
     ast::{
         self,
         edit::{AstNodeEdit, IndentLevel},
+        make,
     },
     AstNode, SyntaxKind, TextRange, T,
 };
@@ -37,61 +38,89 @@ pub(crate) fn unwrap_block(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
         parent = parent.ancestors().find(|it| ast::MatchExpr::can_cast(it.kind()))?
     }
 
-    if matches!(parent.kind(), SyntaxKind::STMT_LIST | SyntaxKind::EXPR_STMT | SyntaxKind::LET_STMT)
-    {
-        return acc.add(assist_id, assist_label, target, |builder| {
+    let kind = parent.kind();
+    if matches!(kind, SyntaxKind::STMT_LIST | SyntaxKind::EXPR_STMT) {
+        acc.add(assist_id, assist_label, target, |builder| {
             builder.replace(block.syntax().text_range(), update_expr_string(block.to_string()));
-        });
-    }
-
-    let parent = ast::Expr::cast(parent)?;
-
-    match parent.clone() {
-        ast::Expr::ForExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::LoopExpr(_) => (),
-        ast::Expr::MatchExpr(_) => block = block.dedent(IndentLevel(1)),
-        ast::Expr::IfExpr(if_expr) => {
-            let then_branch = if_expr.then_branch()?;
-            if then_branch == block {
-                if let Some(ancestor) = if_expr.syntax().parent().and_then(ast::IfExpr::cast) {
-                    // For `else if` blocks
-                    let ancestor_then_branch = ancestor.then_branch()?;
-
+        })
+    } else if matches!(kind, SyntaxKind::LET_STMT) {
+        let parent = ast::LetStmt::cast(parent)?;
+        let pattern = ast::Pat::cast(parent.syntax().first_child()?)?;
+        let ty = parent.ty();
+        let list = block.stmt_list()?;
+        let replaced = match list.syntax().last_child() {
+            Some(last) => {
+                let stmts: Vec<ast::Stmt> = list.statements().collect();
+                let initializer = ast::Expr::cast(last.clone())?;
+                let let_stmt = make::let_stmt(pattern, ty, Some(initializer));
+                if stmts.len() > 0 {
+                    let block = make::block_expr(stmts, None);
+                    format!(
+                        "{}\n    {}",
+                        update_expr_string(block.to_string()),
+                        let_stmt.to_string()
+                    )
+                } else {
+                    let_stmt.to_string()
+                }
+            }
+            None => {
+                let empty_tuple = make::expr_tuple([]);
+                make::let_stmt(pattern, ty, Some(empty_tuple)).to_string()
+            }
+        };
+        acc.add(assist_id, assist_label, target, |builder| {
+            builder.replace(parent.syntax().text_range(), replaced);
+        })
+    } else {
+        let parent = ast::Expr::cast(parent)?;
+        match parent.clone() {
+            ast::Expr::ForExpr(_) | ast::Expr::WhileExpr(_) | ast::Expr::LoopExpr(_) => (),
+            ast::Expr::MatchExpr(_) => block = block.dedent(IndentLevel(1)),
+            ast::Expr::IfExpr(if_expr) => {
+                let then_branch = if_expr.then_branch()?;
+                if then_branch == block {
+                    if let Some(ancestor) = if_expr.syntax().parent().and_then(ast::IfExpr::cast) {
+                        // For `else if` blocks
+                        let ancestor_then_branch = ancestor.then_branch()?;
+
+                        return acc.add(assist_id, assist_label, target, |edit| {
+                            let range_to_del_else_if = TextRange::new(
+                                ancestor_then_branch.syntax().text_range().end(),
+                                l_curly_token.text_range().start(),
+                            );
+                            let range_to_del_rest = TextRange::new(
+                                then_branch.syntax().text_range().end(),
+                                if_expr.syntax().text_range().end(),
+                            );
+
+                            edit.delete(range_to_del_rest);
+                            edit.delete(range_to_del_else_if);
+                            edit.replace(
+                                target,
+                                update_expr_string_without_newline(then_branch.to_string()),
+                            );
+                        });
+                    }
+                } else {
                     return acc.add(assist_id, assist_label, target, |edit| {
-                        let range_to_del_else_if = TextRange::new(
-                            ancestor_then_branch.syntax().text_range().end(),
-                            l_curly_token.text_range().start(),
-                        );
-                        let range_to_del_rest = TextRange::new(
+                        let range_to_del = TextRange::new(
                             then_branch.syntax().text_range().end(),
-                            if_expr.syntax().text_range().end(),
+                            l_curly_token.text_range().start(),
                         );
 
-                        edit.delete(range_to_del_rest);
-                        edit.delete(range_to_del_else_if);
-                        edit.replace(
-                            target,
-                            update_expr_string_without_newline(then_branch.to_string()),
-                        );
+                        edit.delete(range_to_del);
+                        edit.replace(target, update_expr_string_without_newline(block.to_string()));
                     });
                 }
-            } else {
-                return acc.add(assist_id, assist_label, target, |edit| {
-                    let range_to_del = TextRange::new(
-                        then_branch.syntax().text_range().end(),
-                        l_curly_token.text_range().start(),
-                    );
-
-                    edit.delete(range_to_del);
-                    edit.replace(target, update_expr_string_without_newline(block.to_string()));
-                });
             }
-        }
-        _ => return None,
-    };
+            _ => return None,
+        };
 
-    acc.add(assist_id, assist_label, target, |builder| {
-        builder.replace(parent.syntax().text_range(), update_expr_string(block.to_string()));
-    })
+        acc.add(assist_id, assist_label, target, |builder| {
+            builder.replace(parent.syntax().text_range(), update_expr_string(block.to_string()));
+        })
+    }
 }
 
 fn update_expr_string(expr_string: String) -> String {
@@ -725,6 +754,19 @@ fn main() -> i32 {
             unwrap_block,
             r#"
 fn main() {
+    let x = {$0};
+}
+"#,
+            r#"
+fn main() {
+    let x = ();
+}
+"#,
+        );
+        check_assist(
+            unwrap_block,
+            r#"
+fn main() {
     let x = {$0
         bar
     };
@@ -736,6 +778,34 @@ fn main() {
 }
 "#,
         );
+        check_assist(
+            unwrap_block,
+            r#"
+fn main() -> i32 {
+    let _ = {$01; 2};
+}
+"#,
+            r#"
+fn main() -> i32 {
+    1;
+    let _ = 2;
+}
+"#,
+        );
+        check_assist(
+            unwrap_block,
+            r#"
+fn main() -> i32 {
+    let mut a = {$01; 2};
+}
+"#,
+            r#"
+fn main() -> i32 {
+    1;
+    let mut a = 2;
+}
+"#,
+        );
     }
 
     #[test]
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
index 7813c9f9cbe..276cf5f5dd0 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs
@@ -106,6 +106,7 @@ mod handlers {
 
     pub(crate) type Handler = fn(&mut Assists, &AssistContext<'_>) -> Option<()>;
 
+    mod add_braces;
     mod add_explicit_type;
     mod add_label_to_loop;
     mod add_lifetime_to_type;
@@ -126,6 +127,7 @@ mod handlers {
     mod convert_to_guarded_return;
     mod convert_two_arm_bool_match_to_matches_macro;
     mod convert_while_to_loop;
+    mod desugar_doc_comment;
     mod destructure_tuple_binding;
     mod expand_glob_import;
     mod extract_expressions_from_format_string;
@@ -208,6 +210,7 @@ mod handlers {
     pub(crate) fn all() -> &'static [Handler] {
         &[
             // These are alphabetic for the foolish consistency
+            add_braces::add_braces,
             add_explicit_type::add_explicit_type,
             add_label_to_loop::add_label_to_loop,
             add_missing_match_arms::add_missing_match_arms,
@@ -231,6 +234,7 @@ mod handlers {
             convert_tuple_struct_to_named_struct::convert_tuple_struct_to_named_struct,
             convert_two_arm_bool_match_to_matches_macro::convert_two_arm_bool_match_to_matches_macro,
             convert_while_to_loop::convert_while_to_loop,
+            desugar_doc_comment::desugar_doc_comment,
             destructure_tuple_binding::destructure_tuple_binding,
             expand_glob_import::expand_glob_import,
             extract_expressions_from_format_string::extract_expressions_from_format_string,
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
index 006ae4b3034..8a25e1f648a 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs
@@ -3,6 +3,31 @@
 use super::check_doc_test;
 
 #[test]
+fn doctest_add_braces() {
+    check_doc_test(
+        "add_braces",
+        r#####"
+fn foo(n: i32) -> i32 {
+    match n {
+        1 =>$0 n + 1,
+        _ => 0
+    }
+}
+"#####,
+        r#####"
+fn foo(n: i32) -> i32 {
+    match n {
+        1 => {
+            n + 1
+        },
+        _ => 0
+    }
+}
+"#####,
+    )
+}
+
+#[test]
 fn doctest_add_explicit_type() {
     check_doc_test(
         "add_explicit_type",
@@ -598,6 +623,21 @@ fn main() {
 }
 
 #[test]
+fn doctest_desugar_doc_comment() {
+    check_doc_test(
+        "desugar_doc_comment",
+        r#####"
+/// Multi-line$0
+/// comment
+"#####,
+        r#####"
+#[doc = r"Multi-line
+comment"]
+"#####,
+    )
+}
+
+#[test]
 fn doctest_expand_glob_import() {
     check_doc_test(
         "expand_glob_import",
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
index 7add6606492..f323ebcf7a3 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs
@@ -208,23 +208,6 @@ pub(crate) fn render_snippet(_cap: SnippetCap, node: &SyntaxNode, cursor: Cursor
     }
 }
 
-/// Escapes text that should be rendered as-is, typically those that we're copy-pasting what the
-/// users wrote.
-///
-/// This function should only be used when the text doesn't contain snippet **AND** the text
-/// wouldn't be included in a snippet.
-pub(crate) fn escape_non_snippet(text: &mut String) {
-    // While we *can* escape `}`, we don't really have to in this specific case. We only need to
-    // escape it inside `${}` to disambiguate it from the ending token of the syntax, but after we
-    // escape every occurrence of `$`, we wouldn't have `${}` in the first place.
-    //
-    // This will break if the text contains snippet or it will be included in a snippet (hence doc
-    // comment). Compare `fn escape(buf)` in `render_snippet()` above, where the escaped text is
-    // included in a snippet.
-    stdx::replace(text, '\\', r"\\");
-    stdx::replace(text, '$', r"\$");
-}
-
 pub(crate) fn vis_offset(node: &SyntaxNode) -> TextSize {
     node.children_with_tokens()
         .find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR))
@@ -758,3 +741,24 @@ pub(crate) fn convert_param_list_to_arg_list(list: ast::ParamList) -> ast::ArgLi
     }
     make::arg_list(args)
 }
+
+/// Calculate the number of hashes required for a raw string containing `s`
+pub(crate) fn required_hashes(s: &str) -> usize {
+    let mut res = 0usize;
+    for idx in s.match_indices('"').map(|(i, _)| i) {
+        let (_, sub) = s.split_at(idx + 1);
+        let n_hashes = sub.chars().take_while(|c| *c == '#').count();
+        res = res.max(n_hashes + 1)
+    }
+    res
+}
+#[test]
+fn test_required_hashes() {
+    assert_eq!(0, required_hashes("abc"));
+    assert_eq!(0, required_hashes("###"));
+    assert_eq!(1, required_hashes("\""));
+    assert_eq!(2, required_hashes("\"#abc"));
+    assert_eq!(0, required_hashes("#abc"));
+    assert_eq!(3, required_hashes("#ab\"##c"));
+    assert_eq!(5, required_hashes("#ab\"##\"####c"));
+}
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs
index d4abb51259e..808b2340595 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils/gen_trait_fn_body.rs
@@ -1,5 +1,6 @@
 //! This module contains functions to generate default trait impl function bodies where possible.
 
+use hir::TraitRef;
 use syntax::{
     ast::{self, edit::AstNodeEdit, make, AstNode, BinaryOp, CmpOp, HasName, LogicOp},
     ted,
@@ -7,6 +8,8 @@ use syntax::{
 
 /// Generate custom trait bodies without default implementation where possible.
 ///
+/// If `func` is defined within an existing impl block, pass [`TraitRef`]. Otherwise pass `None`.
+///
 /// Returns `Option` so that we can use `?` rather than `if let Some`. Returning
 /// `None` means that generating a custom trait body failed, and the body will remain
 /// as `todo!` instead.
@@ -14,14 +17,15 @@ pub(crate) fn gen_trait_fn_body(
     func: &ast::Fn,
     trait_path: &ast::Path,
     adt: &ast::Adt,
+    trait_ref: Option<TraitRef>,
 ) -> Option<()> {
     match trait_path.segment()?.name_ref()?.text().as_str() {
         "Clone" => gen_clone_impl(adt, func),
         "Debug" => gen_debug_impl(adt, func),
         "Default" => gen_default_impl(adt, func),
         "Hash" => gen_hash_impl(adt, func),
-        "PartialEq" => gen_partial_eq(adt, func),
-        "PartialOrd" => gen_partial_ord(adt, func),
+        "PartialEq" => gen_partial_eq(adt, func, trait_ref),
+        "PartialOrd" => gen_partial_ord(adt, func, trait_ref),
         _ => None,
     }
 }
@@ -395,7 +399,7 @@ fn gen_hash_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
 }
 
 /// Generate a `PartialEq` impl based on the fields and members of the target type.
-fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
+fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef>) -> Option<()> {
     stdx::always!(func.name().map_or(false, |name| name.text() == "eq"));
     fn gen_eq_chain(expr: Option<ast::Expr>, cmp: ast::Expr) -> Option<ast::Expr> {
         match expr {
@@ -423,8 +427,15 @@ fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
         ast::Pat::IdentPat(make::ident_pat(false, false, make::name(field_name)))
     }
 
-    // FIXME: return `None` if the trait carries a generic type; we can only
-    // generate this code `Self` for the time being.
+    // Check that self type and rhs type match. We don't know how to implement the method
+    // automatically otherwise.
+    if let Some(trait_ref) = trait_ref {
+        let self_ty = trait_ref.self_ty();
+        let rhs_ty = trait_ref.get_type_argument(1)?;
+        if self_ty != rhs_ty {
+            return None;
+        }
+    }
 
     let body = match adt {
         // `PartialEq` cannot be derived for unions, so no default impl can be provided.
@@ -568,7 +579,7 @@ fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
                 make::block_expr(None, expr).indent(ast::edit::IndentLevel(1))
             }
 
-            // No fields in the body means there's nothing to hash.
+            // No fields in the body means there's nothing to compare.
             None => {
                 let expr = make::expr_literal("true").into();
                 make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1))
@@ -580,7 +591,7 @@ fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
     Some(())
 }
 
-fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
+fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn, trait_ref: Option<TraitRef>) -> Option<()> {
     stdx::always!(func.name().map_or(false, |name| name.text() == "partial_cmp"));
     fn gen_partial_eq_match(match_target: ast::Expr) -> Option<ast::Stmt> {
         let mut arms = vec![];
@@ -605,8 +616,15 @@ fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
         make::expr_method_call(lhs, method, make::arg_list(Some(rhs)))
     }
 
-    // FIXME: return `None` if the trait carries a generic type; we can only
-    // generate this code `Self` for the time being.
+    // Check that self type and rhs type match. We don't know how to implement the method
+    // automatically otherwise.
+    if let Some(trait_ref) = trait_ref {
+        let self_ty = trait_ref.self_ty();
+        let rhs_ty = trait_ref.get_type_argument(1)?;
+        if self_ty != rhs_ty {
+            return None;
+        }
+    }
 
     let body = match adt {
         // `PartialOrd` cannot be derived for unions, so no default impl can be provided.
diff --git a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml
index 11310e2f129..34ef092cfc4 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-completion/Cargo.toml
@@ -2,9 +2,11 @@
 name = "ide-completion"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
@@ -13,21 +15,23 @@ doctest = false
 cov-mark = "2.0.0-pre.1"
 itertools = "0.10.5"
 
-once_cell = "1.15.0"
+once_cell = "1.17.0"
 smallvec = "1.10.0"
 
-stdx = { path = "../stdx", version = "0.0.0" }
-syntax = { path = "../syntax", version = "0.0.0" }
-text-edit = { path = "../text-edit", version = "0.0.0" }
-base-db = { path = "../base-db", version = "0.0.0" }
-ide-db = { path = "../ide-db", version = "0.0.0" }
-profile = { path = "../profile", version = "0.0.0" }
 
+# local deps
+base-db.workspace = true
+ide-db.workspace = true
+profile.workspace = true
+stdx.workspace = true
+syntax.workspace = true
+text-edit.workspace = true
 # completions crate should depend only on the top-level `hir` package. if you need
 # something from some `hir-xxx` subpackage, reexport the API via `hir`.
-hir = { path = "../hir", version = "0.0.0" }
+hir.workspace = true
 
 [dev-dependencies]
 expect-test = "1.4.0"
 
-test-utils = { path = "../test-utils" }
+# local deps
+test-utils.workspace = true
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs
index 9a060857e9e..889d90095fa 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs
@@ -869,7 +869,7 @@ impl Test for T {{
         };
 
         // Enumerate some possible next siblings.
-        for next_sibling in &[
+        for next_sibling in [
             "",
             "fn other_fn() {}", // `const $0 fn` -> `const fn`
             "type OtherType = i32;",
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs
index 1d03c8cc5ca..b9ab2afca2b 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/keyword.rs
@@ -86,6 +86,7 @@ fn foo(a: A) { a.$0 }
                 sn match                  match expr {}
                 sn ref                    &expr
                 sn refm                   &mut expr
+                sn unsafe                 unsafe {}
             "#]],
         );
 
@@ -110,6 +111,7 @@ fn foo() {
                 sn match                  match expr {}
                 sn ref                    &expr
                 sn refm                   &mut expr
+                sn unsafe                 unsafe {}
             "#]],
         );
     }
@@ -136,6 +138,7 @@ fn foo(a: A) { a.$0 }
                 sn match                  match expr {}
                 sn ref                    &expr
                 sn refm                   &mut expr
+                sn unsafe                 unsafe {}
             "#]],
         );
     }
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs
index f4f37d77d81..90c523735da 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/postfix.rs
@@ -6,7 +6,7 @@ use hir::{Documentation, HasAttrs};
 use ide_db::{imports::insert_use::ImportScope, ty_filter::TryEnum, SnippetCap};
 use syntax::{
     ast::{self, make, AstNode, AstToken},
-    SyntaxKind::{EXPR_STMT, STMT_LIST},
+    SyntaxKind::{BLOCK_EXPR, EXPR_STMT, FOR_EXPR, IF_EXPR, LOOP_EXPR, STMT_LIST, WHILE_EXPR},
     TextRange, TextSize,
 };
 use text_edit::TextEdit;
@@ -123,6 +123,22 @@ pub(crate) fn complete_postfix(
     postfix_snippet("ref", "&expr", &format!("&{receiver_text}")).add_to(acc);
     postfix_snippet("refm", "&mut expr", &format!("&mut {receiver_text}")).add_to(acc);
 
+    let mut unsafe_should_be_wrapped = true;
+    if dot_receiver.syntax().kind() == BLOCK_EXPR {
+        unsafe_should_be_wrapped = false;
+        if let Some(parent) = dot_receiver.syntax().parent() {
+            if matches!(parent.kind(), IF_EXPR | WHILE_EXPR | LOOP_EXPR | FOR_EXPR) {
+                unsafe_should_be_wrapped = true;
+            }
+        }
+    };
+    let unsafe_completion_string = if unsafe_should_be_wrapped {
+        format!("unsafe {{ {receiver_text} }}")
+    } else {
+        format!("unsafe {receiver_text}")
+    };
+    postfix_snippet("unsafe", "unsafe {}", &unsafe_completion_string).add_to(acc);
+
     // The rest of the postfix completions create an expression that moves an argument,
     // so it's better to consider references now to avoid breaking the compilation
 
@@ -329,18 +345,19 @@ fn main() {
 }
 "#,
             expect![[r#"
-                sn box   Box::new(expr)
-                sn call  function(expr)
-                sn dbg   dbg!(expr)
-                sn dbgr  dbg!(&expr)
-                sn if    if expr {}
-                sn let   let
-                sn letm  let mut
-                sn match match expr {}
-                sn not   !expr
-                sn ref   &expr
-                sn refm  &mut expr
-                sn while while expr {}
+                sn box    Box::new(expr)
+                sn call   function(expr)
+                sn dbg    dbg!(expr)
+                sn dbgr   dbg!(&expr)
+                sn if     if expr {}
+                sn let    let
+                sn letm   let mut
+                sn match  match expr {}
+                sn not    !expr
+                sn ref    &expr
+                sn refm   &mut expr
+                sn unsafe unsafe {}
+                sn while  while expr {}
             "#]],
         );
     }
@@ -359,16 +376,17 @@ fn main() {
 }
 "#,
             expect![[r#"
-                sn box   Box::new(expr)
-                sn call  function(expr)
-                sn dbg   dbg!(expr)
-                sn dbgr  dbg!(&expr)
-                sn if    if expr {}
-                sn match match expr {}
-                sn not   !expr
-                sn ref   &expr
-                sn refm  &mut expr
-                sn while while expr {}
+                sn box    Box::new(expr)
+                sn call   function(expr)
+                sn dbg    dbg!(expr)
+                sn dbgr   dbg!(&expr)
+                sn if     if expr {}
+                sn match  match expr {}
+                sn not    !expr
+                sn ref    &expr
+                sn refm   &mut expr
+                sn unsafe unsafe {}
+                sn while  while expr {}
             "#]],
         );
     }
@@ -383,15 +401,16 @@ fn main() {
 }
 "#,
             expect![[r#"
-                sn box   Box::new(expr)
-                sn call  function(expr)
-                sn dbg   dbg!(expr)
-                sn dbgr  dbg!(&expr)
-                sn let   let
-                sn letm  let mut
-                sn match match expr {}
-                sn ref   &expr
-                sn refm  &mut expr
+                sn box    Box::new(expr)
+                sn call   function(expr)
+                sn dbg    dbg!(expr)
+                sn dbgr   dbg!(&expr)
+                sn let    let
+                sn letm   let mut
+                sn match  match expr {}
+                sn ref    &expr
+                sn refm   &mut expr
+                sn unsafe unsafe {}
             "#]],
         )
     }
@@ -406,18 +425,19 @@ fn main() {
 }
 "#,
             expect![[r#"
-                sn box   Box::new(expr)
-                sn call  function(expr)
-                sn dbg   dbg!(expr)
-                sn dbgr  dbg!(&expr)
-                sn if    if expr {}
-                sn let   let
-                sn letm  let mut
-                sn match match expr {}
-                sn not   !expr
-                sn ref   &expr
-                sn refm  &mut expr
-                sn while while expr {}
+                sn box    Box::new(expr)
+                sn call   function(expr)
+                sn dbg    dbg!(expr)
+                sn dbgr   dbg!(&expr)
+                sn if     if expr {}
+                sn let    let
+                sn letm   let mut
+                sn match  match expr {}
+                sn not    !expr
+                sn ref    &expr
+                sn refm   &mut expr
+                sn unsafe unsafe {}
+                sn while  while expr {}
             "#]],
         );
     }
@@ -518,6 +538,49 @@ fn main() {
     }
 
     #[test]
+    fn postfix_completion_for_unsafe() {
+        check_edit("unsafe", r#"fn main() { foo.$0 }"#, r#"fn main() { unsafe { foo } }"#);
+        check_edit("unsafe", r#"fn main() { { foo }.$0 }"#, r#"fn main() { unsafe { foo } }"#);
+        check_edit(
+            "unsafe",
+            r#"fn main() { if x { foo }.$0 }"#,
+            r#"fn main() { unsafe { if x { foo } } }"#,
+        );
+        check_edit(
+            "unsafe",
+            r#"fn main() { loop { foo }.$0 }"#,
+            r#"fn main() { unsafe { loop { foo } } }"#,
+        );
+        check_edit(
+            "unsafe",
+            r#"fn main() { if true {}.$0 }"#,
+            r#"fn main() { unsafe { if true {} } }"#,
+        );
+        check_edit(
+            "unsafe",
+            r#"fn main() { while true {}.$0 }"#,
+            r#"fn main() { unsafe { while true {} } }"#,
+        );
+        check_edit(
+            "unsafe",
+            r#"fn main() { for i in 0..10 {}.$0 }"#,
+            r#"fn main() { unsafe { for i in 0..10 {} } }"#,
+        );
+        check_edit(
+            "unsafe",
+            r#"fn main() { let x = if true {1} else {2}.$0 }"#,
+            r#"fn main() { let x = unsafe { if true {1} else {2} } }"#,
+        );
+
+        // completion will not be triggered
+        check_edit(
+            "unsafe",
+            r#"fn main() { let x = true else {panic!()}.$0}"#,
+            r#"fn main() { let x = true else {panic!()}.unsafe}"#,
+        );
+    }
+
+    #[test]
     fn custom_postfix_completion() {
         let config = CompletionConfig {
             snippets: vec![Snippet::new(
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs
index a0f5e81b4fb..8f6a97e1e09 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs
@@ -19,6 +19,7 @@ pub struct CompletionConfig {
     pub insert_use: InsertUseConfig,
     pub prefer_no_std: bool,
     pub snippets: Vec<Snippet>,
+    pub limit: Option<usize>,
 }
 
 #[derive(Clone, Debug, PartialEq, Eq)]
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
index e34824e22ea..4bff665ab1d 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
@@ -48,7 +48,9 @@ pub(super) fn expand_and_analyze(
     // make the offset point to the start of the original token, as that is what the
     // intermediate offsets calculated in expansion always points to
     let offset = offset - relative_offset;
-    let expansion = expand(sema, original_file, speculative_file, offset, fake_ident_token);
+    let expansion =
+        expand(sema, original_file, speculative_file, offset, fake_ident_token, relative_offset);
+
     // add the relative offset back, so that left_biased finds the proper token
     let offset = expansion.offset + relative_offset;
     let token = expansion.original_file.token_at_offset(offset).left_biased()?;
@@ -67,6 +69,7 @@ fn expand(
     mut speculative_file: SyntaxNode,
     mut offset: TextSize,
     mut fake_ident_token: SyntaxToken,
+    relative_offset: TextSize,
 ) -> ExpansionResult {
     let _p = profile::span("CompletionContext::expand");
     let mut derive_ctx = None;
@@ -97,7 +100,7 @@ fn expand(
                 // successful expansions
                 (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
                     let new_offset = fake_mapped_token.text_range().start();
-                    if new_offset > actual_expansion.text_range().end() {
+                    if new_offset + relative_offset > actual_expansion.text_range().end() {
                         // offset outside of bounds from the original expansion,
                         // stop here to prevent problems from happening
                         break 'expansion;
@@ -176,7 +179,7 @@ fn expand(
                 // successful expansions
                 (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
                     let new_offset = fake_mapped_token.text_range().start();
-                    if new_offset > actual_expansion.text_range().end() {
+                    if new_offset + relative_offset > actual_expansion.text_range().end() {
                         // offset outside of bounds from the original expansion,
                         // stop here to prevent problems from happening
                         break 'expansion;
@@ -672,10 +675,10 @@ fn classify_name_ref(
         {
             if let Some(item) = ast::Item::cast(n) {
                 let is_inbetween = match &item {
-                    ast::Item::Const(it) => it.body().is_none(),
+                    ast::Item::Const(it) => it.body().is_none() && it.semicolon_token().is_none(),
                     ast::Item::Enum(it) => it.variant_list().is_none(),
                     ast::Item::ExternBlock(it) => it.extern_item_list().is_none(),
-                    ast::Item::Fn(it) => it.body().is_none(),
+                    ast::Item::Fn(it) => it.body().is_none() && it.semicolon_token().is_none(),
                     ast::Item::Impl(it) => it.assoc_item_list().is_none(),
                     ast::Item::Module(it) => {
                         it.item_list().is_none() && it.semicolon_token().is_none()
@@ -685,7 +688,7 @@ fn classify_name_ref(
                         it.field_list().is_none() && it.semicolon_token().is_none()
                     }
                     ast::Item::Trait(it) => it.assoc_item_list().is_none(),
-                    ast::Item::TypeAlias(it) => it.ty().is_none(),
+                    ast::Item::TypeAlias(it) => it.ty().is_none() && it.semicolon_token().is_none(),
                     ast::Item::Union(it) => it.record_field_list().is_none(),
                     _ => false,
                 };
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
index e48d1aecd04..d6476c10258 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/render.rs
@@ -1691,6 +1691,7 @@ fn main() {
                 sn while []
                 sn ref []
                 sn refm []
+                sn unsafe []
                 sn match []
                 sn box []
                 sn dbg []
@@ -1718,6 +1719,7 @@ fn main() {
                 me f() []
                 sn ref []
                 sn refm []
+                sn unsafe []
                 sn match []
                 sn box []
                 sn dbg []
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
index abe14e48e22..540b0fd0ef7 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
@@ -75,6 +75,7 @@ pub(crate) const TEST_CONFIG: CompletionConfig = CompletionConfig {
         skip_glob_imports: true,
     },
     snippets: Vec::new(),
+    limit: None,
 };
 
 pub(crate) fn completion_list(ra_fixture: &str) -> String {
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs
index a63ef006875..0b485eb776d 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/flyimport.rs
@@ -541,9 +541,9 @@ fn main() {
 }
 "#,
         expect![[r#"
-                fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED
-                ct SPECIAL_CONST (use dep::test_mod::TestTrait) DEPRECATED
-            "#]],
+            ct SPECIAL_CONST (use dep::test_mod::TestTrait) DEPRECATED
+            fn weird_function() (use dep::test_mod::TestTrait) fn() DEPRECATED
+        "#]],
     );
 }
 
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs
index b62b988885d..9fc731bb11d 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs
@@ -215,6 +215,57 @@ fn in_trait_assoc_item_list() {
 }
 
 #[test]
+fn in_trait_assoc_fn_missing_body() {
+    check(
+        r#"trait Foo { fn function(); $0 }"#,
+        expect![[r#"
+            ma makro!(…) macro_rules! makro
+            md module
+            kw const
+            kw crate::
+            kw fn
+            kw self::
+            kw type
+            kw unsafe
+        "#]],
+    );
+}
+
+#[test]
+fn in_trait_assoc_const_missing_body() {
+    check(
+        r#"trait Foo { const CONST: (); $0 }"#,
+        expect![[r#"
+            ma makro!(…) macro_rules! makro
+            md module
+            kw const
+            kw crate::
+            kw fn
+            kw self::
+            kw type
+            kw unsafe
+        "#]],
+    );
+}
+
+#[test]
+fn in_trait_assoc_type_aliases_missing_ty() {
+    check(
+        r#"trait Foo { type Type; $0 }"#,
+        expect![[r#"
+            ma makro!(…) macro_rules! makro
+            md module
+            kw const
+            kw crate::
+            kw fn
+            kw self::
+            kw type
+            kw unsafe
+        "#]],
+    );
+}
+
+#[test]
 fn in_trait_impl_assoc_item_list() {
     check(
         r#"
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/proc_macros.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/proc_macros.rs
index 9eae6f84954..92ea4d15b85 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/proc_macros.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/proc_macros.rs
@@ -24,16 +24,17 @@ fn main() {
 }
 "#,
         expect![[r#"
-            me foo() fn(&self)
-            sn box   Box::new(expr)
-            sn call  function(expr)
-            sn dbg   dbg!(expr)
-            sn dbgr  dbg!(&expr)
-            sn let   let
-            sn letm  let mut
-            sn match match expr {}
-            sn ref   &expr
-            sn refm  &mut expr
+            me foo()  fn(&self)
+            sn box    Box::new(expr)
+            sn call   function(expr)
+            sn dbg    dbg!(expr)
+            sn dbgr   dbg!(&expr)
+            sn let    let
+            sn letm   let mut
+            sn match  match expr {}
+            sn ref    &expr
+            sn refm   &mut expr
+            sn unsafe unsafe {}
         "#]],
     )
 }
@@ -54,16 +55,17 @@ fn main() {
 }
 "#,
         expect![[r#"
-            me foo() fn(&self)
-            sn box   Box::new(expr)
-            sn call  function(expr)
-            sn dbg   dbg!(expr)
-            sn dbgr  dbg!(&expr)
-            sn let   let
-            sn letm  let mut
-            sn match match expr {}
-            sn ref   &expr
-            sn refm  &mut expr
+            me foo()  fn(&self)
+            sn box    Box::new(expr)
+            sn call   function(expr)
+            sn dbg    dbg!(expr)
+            sn dbgr   dbg!(&expr)
+            sn let    let
+            sn letm   let mut
+            sn match  match expr {}
+            sn ref    &expr
+            sn refm   &mut expr
+            sn unsafe unsafe {}
         "#]],
     )
 }
@@ -86,16 +88,17 @@ impl Foo {
 fn main() {}
 "#,
         expect![[r#"
-            me foo() fn(&self)
-            sn box   Box::new(expr)
-            sn call  function(expr)
-            sn dbg   dbg!(expr)
-            sn dbgr  dbg!(&expr)
-            sn let   let
-            sn letm  let mut
-            sn match match expr {}
-            sn ref   &expr
-            sn refm  &mut expr
+            me foo()  fn(&self)
+            sn box    Box::new(expr)
+            sn call   function(expr)
+            sn dbg    dbg!(expr)
+            sn dbgr   dbg!(&expr)
+            sn let    let
+            sn letm   let mut
+            sn match  match expr {}
+            sn ref    &expr
+            sn refm   &mut expr
+            sn unsafe unsafe {}
         "#]],
     )
 }
@@ -118,16 +121,47 @@ impl Foo {
 fn main() {}
 "#,
         expect![[r#"
-            me foo() fn(&self)
-            sn box   Box::new(expr)
-            sn call  function(expr)
-            sn dbg   dbg!(expr)
-            sn dbgr  dbg!(&expr)
-            sn let   let
-            sn letm  let mut
-            sn match match expr {}
-            sn ref   &expr
-            sn refm  &mut expr
+            me foo()  fn(&self)
+            sn box    Box::new(expr)
+            sn call   function(expr)
+            sn dbg    dbg!(expr)
+            sn dbgr   dbg!(&expr)
+            sn let    let
+            sn letm   let mut
+            sn match  match expr {}
+            sn ref    &expr
+            sn refm   &mut expr
+            sn unsafe unsafe {}
         "#]],
     )
 }
+
+#[test]
+fn issue_13836_str() {
+    check(
+        r#"
+//- proc_macros: shorten
+fn main() {
+    let s = proc_macros::shorten!("text.$0");
+}
+"#,
+        expect![[r#""#]],
+    )
+}
+
+#[test]
+fn issue_13836_ident() {
+    check(
+        r#"
+//- proc_macros: shorten
+struct S;
+impl S {
+    fn foo(&self) {}
+}
+fn main() {
+    let s = proc_macros::shorten!(S.fo$0);
+}
+"#,
+        expect![[r#""#]],
+    )
+}
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs
index cad4af4937d..6052b062320 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/special.rs
@@ -61,7 +61,7 @@ fn _alpha() {}
 fn completes_prelude() {
     check(
         r#"
-//- /main.rs crate:main deps:std
+//- /main.rs edition:2018 crate:main deps:std
 fn foo() { let x: $0 }
 
 //- /std/lib.rs crate:std
@@ -83,7 +83,7 @@ pub mod prelude {
 fn completes_prelude_macros() {
     check(
         r#"
-//- /main.rs crate:main deps:std
+//- /main.rs edition:2018 crate:main deps:std
 fn f() {$0}
 
 //- /std/lib.rs crate:std
@@ -117,14 +117,14 @@ fn foo() { let x: $0 }
 
 //- /core/lib.rs crate:core
 pub mod prelude {
-    pub mod rust_2018 {
+    pub mod rust_2021 {
         pub struct Option;
     }
 }
 
 //- /std/lib.rs crate:std deps:core
 pub mod prelude {
-    pub mod rust_2018 {
+    pub mod rust_2021 {
         pub struct String;
     }
 }
diff --git a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
index f48cce58c6e..9672bb9b7b5 100644
--- a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml
@@ -2,9 +2,11 @@
 name = "ide-db"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
@@ -12,29 +14,32 @@ doctest = false
 [dependencies]
 cov-mark = "2.0.0-pre.1"
 tracing = "0.1.35"
-rayon = "1.5.3"
+rayon = "1.6.1"
 fst = { version = "0.4.7", default-features = false }
 rustc-hash = "1.1.0"
-once_cell = "1.15.0"
+once_cell = "1.17.0"
 either = "1.7.0"
 itertools = "0.10.5"
 arrayvec = "0.7.2"
 indexmap = "1.9.1"
 memchr = "2.5.0"
 
-stdx = { path = "../stdx", version = "0.0.0" }
-parser = { path = "../parser", version = "0.0.0" }
-syntax = { path = "../syntax", version = "0.0.0" }
-text-edit = { path = "../text-edit", version = "0.0.0" }
-base-db = { path = "../base-db", version = "0.0.0" }
-profile = { path = "../profile", version = "0.0.0" }
+# local deps
+base-db.workspace = true
+limit.workspace = true
+parser.workspace = true
+profile.workspace = true
+stdx.workspace = true
+syntax.workspace = true
+text-edit.workspace = true
 # ide should depend only on the top-level `hir` package. if you need
 # something from some `hir-xxx` subpackage, reexport the API via `hir`.
-hir = { path = "../hir", version = "0.0.0" }
-limit = { path = "../limit", version = "0.0.0" }
+hir.workspace = true
 
 [dev-dependencies]
-test-utils = { path = "../test-utils" }
-sourcegen = { path = "../sourcegen" }
 xshell = "0.2.2"
 expect-test = "1.4.0"
+
+# local deps
+test-utils.workspace = true
+sourcegen.workspace = true
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
index 6c13c039723..ed7f04fd8e7 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/defs.rs
@@ -34,8 +34,8 @@ pub enum Definition {
     TypeAlias(TypeAlias),
     BuiltinType(BuiltinType),
     SelfType(Impl),
-    Local(Local),
     GenericParam(GenericParam),
+    Local(Local),
     Label(Label),
     DeriveHelper(DeriveHelper),
     BuiltinAttr(BuiltinAttr),
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/line_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/line_index.rs
index 1b8f56187a0..8f12ab33409 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/line_index.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/line_index.rs
@@ -185,14 +185,14 @@ mod tests {
         ];
 
         let index = LineIndex::new(text);
-        for &(offset, line, col) in &table {
+        for (offset, line, col) in table {
             assert_eq!(index.line_col(offset.into()), LineCol { line, col });
         }
 
         let text = "\nhello\nworld";
         let table = [(0, 0, 0), (1, 1, 0), (2, 1, 1), (6, 1, 5), (7, 2, 0)];
         let index = LineIndex::new(text);
-        for &(offset, line, col) in &table {
+        for (offset, line, col) in table {
             assert_eq!(index.line_col(offset.into()), LineCol { line, col });
         }
     }
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
index 12d873b4a0a..6402a84a68b 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs
@@ -33,7 +33,7 @@ use syntax::{
 /// }
 /// ```
 pub struct PathTransform<'a> {
-    generic_def: hir::GenericDef,
+    generic_def: Option<hir::GenericDef>,
     substs: Vec<ast::Type>,
     target_scope: &'a SemanticsScope<'a>,
     source_scope: &'a SemanticsScope<'a>,
@@ -49,7 +49,7 @@ impl<'a> PathTransform<'a> {
         PathTransform {
             source_scope,
             target_scope,
-            generic_def: trait_.into(),
+            generic_def: Some(trait_.into()),
             substs: get_syntactic_substs(impl_).unwrap_or_default(),
         }
     }
@@ -63,28 +63,42 @@ impl<'a> PathTransform<'a> {
         PathTransform {
             source_scope,
             target_scope,
-            generic_def: function.into(),
+            generic_def: Some(function.into()),
             substs: get_type_args_from_arg_list(generic_arg_list).unwrap_or_default(),
         }
     }
 
+    pub fn generic_transformation(
+        target_scope: &'a SemanticsScope<'a>,
+        source_scope: &'a SemanticsScope<'a>,
+    ) -> PathTransform<'a> {
+        PathTransform { source_scope, target_scope, generic_def: None, substs: Vec::new() }
+    }
+
     pub fn apply(&self, syntax: &SyntaxNode) {
         self.build_ctx().apply(syntax)
     }
 
+    pub fn apply_all<'b>(&self, nodes: impl IntoIterator<Item = &'b SyntaxNode>) {
+        let ctx = self.build_ctx();
+        for node in nodes {
+            ctx.apply(node);
+        }
+    }
+
     fn build_ctx(&self) -> Ctx<'a> {
         let db = self.source_scope.db;
         let target_module = self.target_scope.module();
         let source_module = self.source_scope.module();
         let skip = match self.generic_def {
             // this is a trait impl, so we need to skip the first type parameter -- this is a bit hacky
-            hir::GenericDef::Trait(_) => 1,
+            Some(hir::GenericDef::Trait(_)) => 1,
             _ => 0,
         };
         let substs_by_param: FxHashMap<_, _> = self
             .generic_def
-            .type_params(db)
             .into_iter()
+            .flat_map(|it| it.type_params(db))
             .skip(skip)
             // The actual list of trait type parameters may be longer than the one
             // used in the `impl` block due to trailing default type parameters.
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
index cd4a7e1554c..84d70b258ff 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/rename.rs
@@ -190,6 +190,7 @@ fn rename_mod(
 
     let InFile { file_id, value: def_source } = module.definition_source(sema.db);
     if let ModuleSource::SourceFile(..) = def_source {
+        let new_name = new_name.trim_start_matches("r#");
         let anchor = file_id.original_file(sema.db);
 
         let is_mod_rs = module.is_mod_rs(sema.db);
@@ -207,9 +208,13 @@ fn rename_mod(
         //  - Module has submodules defined in separate files
         let dir_paths = match (is_mod_rs, has_detached_child, module.name(sema.db)) {
             // Go up one level since the anchor is inside the dir we're trying to rename
-            (true, _, Some(mod_name)) => Some((format!("../{mod_name}"), format!("../{new_name}"))),
+            (true, _, Some(mod_name)) => {
+                Some((format!("../{}", mod_name.unescaped()), format!("../{new_name}")))
+            }
             // The anchor is on the same level as target dir
-            (false, true, Some(mod_name)) => Some((mod_name.to_string(), new_name.to_string())),
+            (false, true, Some(mod_name)) => {
+                Some((mod_name.unescaped().to_string(), new_name.to_string()))
+            }
             _ => None,
         };
 
@@ -263,11 +268,10 @@ fn rename_reference(
         Definition::GenericParam(hir::GenericParam::LifetimeParam(_)) | Definition::Label(_)
     ) {
         match ident_kind {
-            IdentifierKind::Ident | IdentifierKind::Underscore => {
-                cov_mark::hit!(rename_not_a_lifetime_ident_ref);
+            IdentifierKind::Underscore => {
                 bail!("Invalid name `{}`: not a lifetime identifier", new_name);
             }
-            IdentifierKind::Lifetime => cov_mark::hit!(rename_lifetime),
+            _ => cov_mark::hit!(rename_lifetime),
         }
     } else {
         match ident_kind {
@@ -334,11 +338,17 @@ pub fn source_edit_from_references(
             }
             _ => false,
         };
-        if !has_emitted_edit {
-            if !edited_ranges.contains(&range.start()) {
-                edit.replace(range, new_name.to_string());
-                edited_ranges.push(range.start());
-            }
+        if !has_emitted_edit && !edited_ranges.contains(&range.start()) {
+            let (range, new_name) = match name {
+                ast::NameLike::Lifetime(_) => (
+                    TextRange::new(range.start() + syntax::TextSize::from(1), range.end()),
+                    new_name.strip_prefix('\'').unwrap_or(new_name).to_owned(),
+                ),
+                _ => (range, new_name.to_owned()),
+            };
+
+            edit.replace(range, new_name);
+            edited_ranges.push(range.start());
         }
     }
 
@@ -391,19 +401,17 @@ fn source_edit_from_name_ref(
                         edit.delete(TextRange::new(s, e));
                         return true;
                     }
-                } else if init == name_ref {
-                    if field_name.text() == new_name {
-                        cov_mark::hit!(test_rename_local_put_init_shorthand);
-                        // Foo { field: local } -> Foo { field }
-                        //            ^^^^^^^ delete this
-
-                        // same names, we can use a shorthand here instead.
-                        // we do not want to erase attributes hence this range start
-                        let s = field_name.syntax().text_range().end();
-                        let e = init.syntax().text_range().end();
-                        edit.delete(TextRange::new(s, e));
-                        return true;
-                    }
+                } else if init == name_ref && field_name.text() == new_name {
+                    cov_mark::hit!(test_rename_local_put_init_shorthand);
+                    // Foo { field: local } -> Foo { field }
+                    //            ^^^^^^^ delete this
+
+                    // same names, we can use a shorthand here instead.
+                    // we do not want to erase attributes hence this range start
+                    let s = field_name.syntax().text_range().end();
+                    let e = init.syntax().text_range().end();
+                    edit.delete(TextRange::new(s, e));
+                    return true;
                 }
             }
             // init shorthand
@@ -505,7 +513,15 @@ fn source_edit_from_def(
         }
     }
     if edit.is_empty() {
-        edit.replace(range, new_name.to_string());
+        let (range, new_name) = match def {
+            Definition::GenericParam(hir::GenericParam::LifetimeParam(_))
+            | Definition::Label(_) => (
+                TextRange::new(range.start() + syntax::TextSize::from(1), range.end()),
+                new_name.strip_prefix('\'').unwrap_or(new_name).to_owned(),
+            ),
+            _ => (range, new_name.to_owned()),
+        };
+        edit.replace(range, new_name);
     }
     Ok((file_id, edit.finish()))
 }
@@ -521,14 +537,18 @@ impl IdentifierKind {
     pub fn classify(new_name: &str) -> Result<IdentifierKind> {
         match parser::LexedStr::single_token(new_name) {
             Some(res) => match res {
-                (SyntaxKind::IDENT, _) => Ok(IdentifierKind::Ident),
+                (SyntaxKind::IDENT, _) => {
+                    if let Some(inner) = new_name.strip_prefix("r#") {
+                        if matches!(inner, "self" | "crate" | "super" | "Self") {
+                            bail!("Invalid name: `{}` cannot be a raw identifier", inner);
+                        }
+                    }
+                    Ok(IdentifierKind::Ident)
+                }
                 (T![_], _) => Ok(IdentifierKind::Underscore),
                 (SyntaxKind::LIFETIME_IDENT, _) if new_name != "'static" && new_name != "'_" => {
                     Ok(IdentifierKind::Lifetime)
                 }
-                (SyntaxKind::LIFETIME_IDENT, _) => {
-                    bail!("Invalid name `{}`: not a lifetime identifier", new_name)
-                }
                 (_, Some(syntax_error)) => bail!("Invalid name `{}`: {}", new_name, syntax_error),
                 (_, None) => bail!("Invalid name `{}`: not an identifier", new_name),
             },
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
index b2b0e49085c..ada2821d6b1 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
@@ -7,7 +7,9 @@
 use std::{mem, sync::Arc};
 
 use base_db::{FileId, FileRange, SourceDatabase, SourceDatabaseExt};
-use hir::{DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility};
+use hir::{
+    AsAssocItem, DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility,
+};
 use memchr::memmem::Finder;
 use once_cell::unsync::Lazy;
 use parser::SyntaxKind;
@@ -311,15 +313,15 @@ impl Definition {
 
     pub fn usages<'a>(self, sema: &'a Semantics<'_, RootDatabase>) -> FindUsages<'a> {
         FindUsages {
-            local_repr: match self {
-                Definition::Local(local) => Some(local.representative(sema.db)),
-                _ => None,
-            },
             def: self,
-            trait_assoc_def: as_trait_assoc_def(sema.db, self),
+            assoc_item_container: self.as_assoc_item(sema.db).map(|a| a.container(sema.db)),
             sema,
             scope: None,
             include_self_kw_refs: None,
+            local_repr: match self {
+                Definition::Local(local) => Some(local.representative(sema.db)),
+                _ => None,
+            },
             search_self_mod: false,
         }
     }
@@ -328,12 +330,16 @@ impl Definition {
 #[derive(Clone)]
 pub struct FindUsages<'a> {
     def: Definition,
-    /// If def is an assoc item from a trait or trait impl, this is the corresponding item of the trait definition
-    trait_assoc_def: Option<Definition>,
     sema: &'a Semantics<'a, RootDatabase>,
     scope: Option<SearchScope>,
+    /// The container of our definition should it be an assoc item
+    assoc_item_container: Option<hir::AssocItemContainer>,
+    /// whether to search for the `Self` type of the definition
     include_self_kw_refs: Option<hir::Type>,
+    /// the local representative for the local definition we are searching for
+    /// (this is required for finding all local declarations in a or-pattern)
     local_repr: Option<hir::Local>,
+    /// whether to search for the `self` module
     search_self_mod: bool,
 }
 
@@ -380,7 +386,9 @@ impl<'a> FindUsages<'a> {
         let sema = self.sema;
 
         let search_scope = {
-            let base = self.trait_assoc_def.unwrap_or(self.def).search_scope(sema.db);
+            // FIXME: Is the trait scope needed for trait impl assoc items?
+            let base =
+                as_trait_assoc_def(sema.db, self.def).unwrap_or(self.def).search_scope(sema.db);
             match &self.scope {
                 None => base,
                 Some(scope) => base.intersection(scope),
@@ -494,20 +502,28 @@ impl<'a> FindUsages<'a> {
         }
 
         // Search for `super` and `crate` resolving to our module
-        match self.def {
-            Definition::Module(module) => {
-                let scope = search_scope
-                    .intersection(&SearchScope::module_and_children(self.sema.db, module));
+        if let Definition::Module(module) = self.def {
+            let scope =
+                search_scope.intersection(&SearchScope::module_and_children(self.sema.db, module));
 
-                let is_crate_root =
-                    module.is_crate_root(self.sema.db).then(|| Finder::new("crate"));
-                let finder = &Finder::new("super");
+            let is_crate_root = module.is_crate_root(self.sema.db).then(|| Finder::new("crate"));
+            let finder = &Finder::new("super");
 
-                for (text, file_id, search_range) in scope_files(sema, &scope) {
-                    let tree = Lazy::new(move || sema.parse(file_id).syntax().clone());
+            for (text, file_id, search_range) in scope_files(sema, &scope) {
+                let tree = Lazy::new(move || sema.parse(file_id).syntax().clone());
 
+                for offset in match_indices(&text, finder, search_range) {
+                    if let Some(iter) = find_nodes("super", &tree, offset) {
+                        for name_ref in iter.filter_map(ast::NameRef::cast) {
+                            if self.found_name_ref(&name_ref, sink) {
+                                return;
+                            }
+                        }
+                    }
+                }
+                if let Some(finder) = &is_crate_root {
                     for offset in match_indices(&text, finder, search_range) {
-                        if let Some(iter) = find_nodes("super", &tree, offset) {
+                        if let Some(iter) = find_nodes("crate", &tree, offset) {
                             for name_ref in iter.filter_map(ast::NameRef::cast) {
                                 if self.found_name_ref(&name_ref, sink) {
                                     return;
@@ -515,20 +531,8 @@ impl<'a> FindUsages<'a> {
                             }
                         }
                     }
-                    if let Some(finder) = &is_crate_root {
-                        for offset in match_indices(&text, finder, search_range) {
-                            if let Some(iter) = find_nodes("crate", &tree, offset) {
-                                for name_ref in iter.filter_map(ast::NameRef::cast) {
-                                    if self.found_name_ref(&name_ref, sink) {
-                                        return;
-                                    }
-                                }
-                            }
-                        }
-                    }
                 }
             }
-            _ => (),
         }
 
         // search for module `self` references in our module's definition source
@@ -655,13 +659,26 @@ impl<'a> FindUsages<'a> {
                 sink(file_id, reference)
             }
             Some(NameRefClass::Definition(def))
-                if match self.trait_assoc_def {
-                    Some(trait_assoc_def) => {
-                        // we have a trait assoc item, so force resolve all assoc items to their trait version
-                        convert_to_def_in_trait(self.sema.db, def) == trait_assoc_def
-                    }
-                    None => self.def == def,
-                } =>
+                if self.def == def
+                    // is our def a trait assoc item? then we want to find all assoc items from trait impls of our trait
+                    || matches!(self.assoc_item_container, Some(hir::AssocItemContainer::Trait(_)))
+                        && convert_to_def_in_trait(self.sema.db, def) == self.def =>
+            {
+                let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
+                let reference = FileReference {
+                    range,
+                    name: ast::NameLike::NameRef(name_ref.clone()),
+                    category: ReferenceCategory::new(&def, name_ref),
+                };
+                sink(file_id, reference)
+            }
+            // FIXME: special case type aliases, we can't filter between impl and trait defs here as we lack the substitutions
+            // so we always resolve all assoc type aliases to both their trait def and impl defs
+            Some(NameRefClass::Definition(def))
+                if self.assoc_item_container.is_some()
+                    && matches!(self.def, Definition::TypeAlias(_))
+                    && convert_to_def_in_trait(self.sema.db, def)
+                        == convert_to_def_in_trait(self.sema.db, self.def) =>
             {
                 let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
                 let reference = FileReference {
@@ -752,13 +769,21 @@ impl<'a> FindUsages<'a> {
                 false
             }
             Some(NameClass::Definition(def)) if def != self.def => {
-                // if the def we are looking for is a trait (impl) assoc item, we'll have to resolve the items to trait definition assoc item
-                if !matches!(
-                    self.trait_assoc_def,
-                    Some(trait_assoc_def)
-                        if convert_to_def_in_trait(self.sema.db, def) == trait_assoc_def
-                ) {
-                    return false;
+                match (&self.assoc_item_container, self.def) {
+                    // for type aliases we always want to reference the trait def and all the trait impl counterparts
+                    // FIXME: only until we can resolve them correctly, see FIXME above
+                    (Some(_), Definition::TypeAlias(_))
+                        if convert_to_def_in_trait(self.sema.db, def)
+                            != convert_to_def_in_trait(self.sema.db, self.def) =>
+                    {
+                        return false
+                    }
+                    (Some(_), Definition::TypeAlias(_)) => {}
+                    // We looking at an assoc item of a trait definition, so reference all the
+                    // corresponding assoc items belonging to this trait's trait implementations
+                    (Some(hir::AssocItemContainer::Trait(_)), _)
+                        if convert_to_def_in_trait(self.sema.db, def) == self.def => {}
+                    _ => return false,
                 }
                 let FileRange { file_id, range } = self.sema.original_range(name.syntax());
                 let reference = FileReference {
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
index c054cc15979..a91ffd1ec4f 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/symbol_index.rs
@@ -323,10 +323,10 @@ impl Query {
                         if symbol.name != self.query {
                             continue;
                         }
-                    } else if self.case_sensitive {
-                        if self.query.chars().any(|c| !symbol.name.contains(c)) {
-                            continue;
-                        }
+                    } else if self.case_sensitive
+                        && self.query.chars().any(|c| !symbol.name.contains(c))
+                    {
+                        continue;
                     }
 
                     res.push(symbol.clone());
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml
index 7e9a1125d75..e18624fcc26 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml
@@ -2,9 +2,11 @@
 name = "ide-diagnostics"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
@@ -15,19 +17,21 @@ either = "1.7.0"
 itertools = "0.10.5"
 serde_json = "1.0.86"
 
-profile = { path = "../profile", version = "0.0.0" }
-stdx = { path = "../stdx", version = "0.0.0" }
-syntax = { path = "../syntax", version = "0.0.0" }
-text-edit = { path = "../text-edit", version = "0.0.0" }
-cfg = { path = "../cfg", version = "0.0.0" }
-hir = { path = "../hir", version = "0.0.0" }
-ide-db = { path = "../ide-db", version = "0.0.0" }
+# local deps
+profile.workspace = true
+stdx.workspace = true
+syntax.workspace = true
+text-edit.workspace = true
+cfg.workspace = true
+hir.workspace = true
+ide-db.workspace = true
 
 [dev-dependencies]
 expect-test = "1.4.0"
 
-test-utils = { path = "../test-utils" }
-sourcegen = { path = "../sourcegen" }
+# local deps
+test-utils.workspace = true
+sourcegen.workspace = true
 
 [features]
 in-rust-tree = []
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
index e8df6dcf285..04ce1e0feee 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs
@@ -99,76 +99,66 @@ pub(crate) fn json_in_items(
             && node.last_token().map(|x| x.kind()) == Some(SyntaxKind::R_CURLY)
         {
             let node_string = node.to_string();
-            if let Ok(it) = serde_json::from_str(&node_string) {
-                if let serde_json::Value::Object(it) = it {
-                    let import_scope = ImportScope::find_insert_use_container(node, sema)?;
-                    let range = node.text_range();
-                    let mut edit = TextEdit::builder();
-                    edit.delete(range);
-                    let mut state = State::default();
-                    let semantics_scope = sema.scope(node)?;
-                    let scope_resolve =
-                        |it| semantics_scope.speculative_resolve(&make::path_from_text(it));
-                    let scope_has = |it| scope_resolve(it).is_some();
-                    let deserialize_resolved = scope_resolve("::serde::Deserialize");
-                    let serialize_resolved = scope_resolve("::serde::Serialize");
-                    state.has_deserialize = deserialize_resolved.is_some();
-                    state.has_serialize = serialize_resolved.is_some();
-                    state.build_struct(&it);
-                    edit.insert(range.start(), state.result);
-                    acc.push(
-                        Diagnostic::new(
-                            "json-is-not-rust",
-                            "JSON syntax is not valid as a Rust item",
-                            range,
-                        )
-                        .severity(Severity::WeakWarning)
-                        .with_fixes(Some(vec![{
-                            let mut scb = SourceChangeBuilder::new(file_id);
-                            let scope = match import_scope {
-                                ImportScope::File(it) => ImportScope::File(scb.make_mut(it)),
-                                ImportScope::Module(it) => ImportScope::Module(scb.make_mut(it)),
-                                ImportScope::Block(it) => ImportScope::Block(scb.make_mut(it)),
-                            };
-                            let current_module = semantics_scope.module();
-                            if !scope_has("Serialize") {
-                                if let Some(PathResolution::Def(it)) = serialize_resolved {
-                                    if let Some(it) = current_module.find_use_path_prefixed(
-                                        sema.db,
-                                        it,
-                                        config.insert_use.prefix_kind,
-                                        config.prefer_no_std,
-                                    ) {
-                                        insert_use(
-                                            &scope,
-                                            mod_path_to_ast(&it),
-                                            &config.insert_use,
-                                        );
-                                    }
+            if let Ok(serde_json::Value::Object(it)) = serde_json::from_str(&node_string) {
+                let import_scope = ImportScope::find_insert_use_container(node, sema)?;
+                let range = node.text_range();
+                let mut edit = TextEdit::builder();
+                edit.delete(range);
+                let mut state = State::default();
+                let semantics_scope = sema.scope(node)?;
+                let scope_resolve =
+                    |it| semantics_scope.speculative_resolve(&make::path_from_text(it));
+                let scope_has = |it| scope_resolve(it).is_some();
+                let deserialize_resolved = scope_resolve("::serde::Deserialize");
+                let serialize_resolved = scope_resolve("::serde::Serialize");
+                state.has_deserialize = deserialize_resolved.is_some();
+                state.has_serialize = serialize_resolved.is_some();
+                state.build_struct(&it);
+                edit.insert(range.start(), state.result);
+                acc.push(
+                    Diagnostic::new(
+                        "json-is-not-rust",
+                        "JSON syntax is not valid as a Rust item",
+                        range,
+                    )
+                    .severity(Severity::WeakWarning)
+                    .with_fixes(Some(vec![{
+                        let mut scb = SourceChangeBuilder::new(file_id);
+                        let scope = match import_scope {
+                            ImportScope::File(it) => ImportScope::File(scb.make_mut(it)),
+                            ImportScope::Module(it) => ImportScope::Module(scb.make_mut(it)),
+                            ImportScope::Block(it) => ImportScope::Block(scb.make_mut(it)),
+                        };
+                        let current_module = semantics_scope.module();
+                        if !scope_has("Serialize") {
+                            if let Some(PathResolution::Def(it)) = serialize_resolved {
+                                if let Some(it) = current_module.find_use_path_prefixed(
+                                    sema.db,
+                                    it,
+                                    config.insert_use.prefix_kind,
+                                    config.prefer_no_std,
+                                ) {
+                                    insert_use(&scope, mod_path_to_ast(&it), &config.insert_use);
                                 }
                             }
-                            if !scope_has("Deserialize") {
-                                if let Some(PathResolution::Def(it)) = deserialize_resolved {
-                                    if let Some(it) = current_module.find_use_path_prefixed(
-                                        sema.db,
-                                        it,
-                                        config.insert_use.prefix_kind,
-                                        config.prefer_no_std,
-                                    ) {
-                                        insert_use(
-                                            &scope,
-                                            mod_path_to_ast(&it),
-                                            &config.insert_use,
-                                        );
-                                    }
+                        }
+                        if !scope_has("Deserialize") {
+                            if let Some(PathResolution::Def(it)) = deserialize_resolved {
+                                if let Some(it) = current_module.find_use_path_prefixed(
+                                    sema.db,
+                                    it,
+                                    config.insert_use.prefix_kind,
+                                    config.prefer_no_std,
+                                ) {
+                                    insert_use(&scope, mod_path_to_ast(&it), &config.insert_use);
                                 }
                             }
-                            let mut sc = scb.finish();
-                            sc.insert_source_edit(file_id, edit.finish());
-                            fix("convert_json_to_struct", "Convert JSON to struct", sc, range)
-                        }])),
-                    );
-                }
+                        }
+                        let mut sc = scb.finish();
+                        sc.insert_source_edit(file_id, edit.finish());
+                        fix("convert_json_to_struct", "Convert JSON to struct", sc, range)
+                    }])),
+                );
             }
         }
         Some(())
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs
index b363a516dd1..0b3121c765d 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs
@@ -11,10 +11,7 @@ pub(crate) fn private_assoc_item(
     d: &hir::PrivateAssocItem,
 ) -> Diagnostic {
     // FIXME: add quickfix
-    let name = match d.item.name(ctx.sema.db) {
-        Some(name) => format!("`{}` ", name),
-        None => String::new(),
-    };
+    let name = d.item.name(ctx.sema.db).map(|name| format!("`{name}` ")).unwrap_or_default();
     Diagnostic::new(
         "private-assoc-item",
         format!(
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs
index be70f0ac4f7..3d45a75913a 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs
@@ -1,13 +1,15 @@
 //! Diagnostic emitted for files that aren't part of any crate.
 
-use hir::db::DefDatabase;
+use std::iter;
+
+use hir::{db::DefDatabase, InFile, ModuleSource};
 use ide_db::{
     base_db::{FileId, FileLoader, SourceDatabase, SourceDatabaseExt},
     source_change::SourceChange,
     RootDatabase,
 };
 use syntax::{
-    ast::{self, HasModuleItem, HasName},
+    ast::{self, edit::IndentLevel, HasModuleItem, HasName},
     AstNode, TextRange, TextSize,
 };
 use text_edit::TextEdit;
@@ -42,47 +44,99 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option<Vec<Assist>> {
 
     let source_root = ctx.sema.db.source_root(ctx.sema.db.file_source_root(file_id));
     let our_path = source_root.path_for_file(&file_id)?;
-    let (mut module_name, _) = our_path.name_and_extension()?;
-
-    // Candidates to look for:
-    // - `mod.rs`, `main.rs` and `lib.rs` in the same folder
-    // - `$dir.rs` in the parent folder, where `$dir` is the directory containing `self.file_id`
     let parent = our_path.parent()?;
-    let paths = {
-        let parent = if module_name == "mod" {
-            // for mod.rs we need to actually look up one higher
-            // and take the parent as our to be module name
-            let (name, _) = parent.name_and_extension()?;
-            module_name = name;
-            parent.parent()?
-        } else {
-            parent
-        };
-        let mut paths =
-            vec![parent.join("mod.rs")?, parent.join("lib.rs")?, parent.join("main.rs")?];
-
-        // `submod/bla.rs` -> `submod.rs`
-        let parent_mod = (|| {
+    let (module_name, _) = our_path.name_and_extension()?;
+    let (parent, module_name) = match module_name {
+        // for mod.rs we need to actually look up one higher
+        // and take the parent as our to be module name
+        "mod" => {
             let (name, _) = parent.name_and_extension()?;
-            parent.parent()?.join(&format!("{name}.rs"))
-        })();
-        paths.extend(parent_mod);
-        paths
+            (parent.parent()?, name.to_owned())
+        }
+        _ => (parent, module_name.to_owned()),
     };
 
-    for &parent_id in paths.iter().filter_map(|path| source_root.file_for_path(path)) {
-        for &krate in ctx.sema.db.relevant_crates(parent_id).iter() {
-            let crate_def_map = ctx.sema.db.crate_def_map(krate);
-            for (_, module) in crate_def_map.modules() {
-                if module.origin.is_inline() {
-                    // We don't handle inline `mod parent {}`s, they use different paths.
-                    continue;
-                }
+    // check crate roots, i.e. main.rs, lib.rs, ...
+    'crates: for &krate in &*ctx.sema.db.relevant_crates(file_id) {
+        let crate_def_map = ctx.sema.db.crate_def_map(krate);
+
+        let root_module = &crate_def_map[crate_def_map.root()];
+        let Some(root_file_id) = root_module.origin.file_id() else { continue };
+        let Some(crate_root_path) = source_root.path_for_file(&root_file_id) else { continue };
+        let Some(rel) = parent.strip_prefix(&crate_root_path.parent()?) else { continue };
+
+        // try resolving the relative difference of the paths as inline modules
+        let mut current = root_module;
+        for ele in rel.as_ref().components() {
+            let seg = match ele {
+                std::path::Component::Normal(seg) => seg.to_str()?,
+                std::path::Component::RootDir => continue,
+                // shouldn't occur
+                _ => continue 'crates,
+            };
+            match current.children.iter().find(|(name, _)| name.to_smol_str() == seg) {
+                Some((_, &child)) => current = &crate_def_map[child],
+                None => continue 'crates,
+            }
+            if !current.origin.is_inline() {
+                continue 'crates;
+            }
+        }
+
+        let InFile { file_id: parent_file_id, value: source } =
+            current.definition_source(ctx.sema.db);
+        let parent_file_id = parent_file_id.file_id()?;
+        return make_fixes(ctx.sema.db, parent_file_id, source, &module_name, file_id);
+    }
 
-                if module.origin.file_id() == Some(parent_id) {
-                    return make_fixes(ctx.sema.db, parent_id, module_name, file_id);
+    // if we aren't adding to a crate root, walk backwards such that we support `#[path = ...]` overrides if possible
+
+    // build all parent paths of the form `../module_name/mod.rs` and `../module_name.rs`
+    let paths = iter::successors(Some(parent.clone()), |prev| prev.parent()).filter_map(|path| {
+        let parent = path.parent()?;
+        let (name, _) = path.name_and_extension()?;
+        Some(([parent.join(&format!("{name}.rs"))?, path.join("mod.rs")?], name.to_owned()))
+    });
+    let mut stack = vec![];
+    let &parent_id =
+        paths.inspect(|(_, name)| stack.push(name.clone())).find_map(|(paths, _)| {
+            paths.into_iter().find_map(|path| source_root.file_for_path(&path))
+        })?;
+    stack.pop();
+    'crates: for &krate in ctx.sema.db.relevant_crates(parent_id).iter() {
+        let crate_def_map = ctx.sema.db.crate_def_map(krate);
+        let Some((_, module)) =
+            crate_def_map.modules()
+            .find(|(_, module)| module.origin.file_id() == Some(parent_id) && !module.origin.is_inline())
+        else { continue };
+
+        if stack.is_empty() {
+            return make_fixes(
+                ctx.sema.db,
+                parent_id,
+                module.definition_source(ctx.sema.db).value,
+                &module_name,
+                file_id,
+            );
+        } else {
+            // direct parent file is missing,
+            // try finding a parent that has an inline tree from here on
+            let mut current = module;
+            for s in stack.iter().rev() {
+                match module.children.iter().find(|(name, _)| name.to_smol_str() == s) {
+                    Some((_, child)) => {
+                        current = &crate_def_map[*child];
+                    }
+                    None => continue 'crates,
+                }
+                if !current.origin.is_inline() {
+                    continue 'crates;
                 }
             }
+            let InFile { file_id: parent_file_id, value: source } =
+                current.definition_source(ctx.sema.db);
+            let parent_file_id = parent_file_id.file_id()?;
+            return make_fixes(ctx.sema.db, parent_file_id, source, &module_name, file_id);
         }
     }
 
@@ -92,6 +146,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option<Vec<Assist>> {
 fn make_fixes(
     db: &RootDatabase,
     parent_file_id: FileId,
+    source: ModuleSource,
     new_mod_name: &str,
     added_file_id: FileId,
 ) -> Option<Vec<Assist>> {
@@ -102,14 +157,18 @@ fn make_fixes(
     let mod_decl = format!("mod {new_mod_name};");
     let pub_mod_decl = format!("pub mod {new_mod_name};");
 
-    let ast: ast::SourceFile = db.parse(parent_file_id).tree();
-
     let mut mod_decl_builder = TextEdit::builder();
     let mut pub_mod_decl_builder = TextEdit::builder();
 
+    let mut items = match &source {
+        ModuleSource::SourceFile(it) => it.items(),
+        ModuleSource::Module(it) => it.item_list()?.items(),
+        ModuleSource::BlockExpr(_) => return None,
+    };
+
     // If there's an existing `mod m;` statement matching the new one, don't emit a fix (it's
     // probably `#[cfg]`d out).
-    for item in ast.items() {
+    for item in items.clone() {
         if let ast::Item::Module(m) = item {
             if let Some(name) = m.name() {
                 if m.item_list().is_none() && name.to_string() == new_mod_name {
@@ -121,28 +180,40 @@ fn make_fixes(
     }
 
     // If there are existing `mod m;` items, append after them (after the first group of them, rather).
-    match ast.items().skip_while(|item| !is_outline_mod(item)).take_while(is_outline_mod).last() {
+    match items.clone().skip_while(|item| !is_outline_mod(item)).take_while(is_outline_mod).last() {
         Some(last) => {
             cov_mark::hit!(unlinked_file_append_to_existing_mods);
             let offset = last.syntax().text_range().end();
-            mod_decl_builder.insert(offset, format!("\n{mod_decl}"));
-            pub_mod_decl_builder.insert(offset, format!("\n{pub_mod_decl}"));
+            let indent = IndentLevel::from_node(last.syntax());
+            mod_decl_builder.insert(offset, format!("\n{indent}{mod_decl}"));
+            pub_mod_decl_builder.insert(offset, format!("\n{indent}{pub_mod_decl}"));
         }
         None => {
             // Prepend before the first item in the file.
-            match ast.items().next() {
-                Some(item) => {
+            match items.next() {
+                Some(first) => {
                     cov_mark::hit!(unlinked_file_prepend_before_first_item);
-                    let offset = item.syntax().text_range().start();
-                    mod_decl_builder.insert(offset, format!("{mod_decl}\n\n"));
-                    pub_mod_decl_builder.insert(offset, format!("{pub_mod_decl}\n\n"));
+                    let offset = first.syntax().text_range().start();
+                    let indent = IndentLevel::from_node(first.syntax());
+                    mod_decl_builder.insert(offset, format!("{mod_decl}\n\n{indent}"));
+                    pub_mod_decl_builder.insert(offset, format!("{pub_mod_decl}\n\n{indent}"));
                 }
                 None => {
                     // No items in the file, so just append at the end.
                     cov_mark::hit!(unlinked_file_empty_file);
-                    let offset = ast.syntax().text_range().end();
-                    mod_decl_builder.insert(offset, format!("{mod_decl}\n"));
-                    pub_mod_decl_builder.insert(offset, format!("{pub_mod_decl}\n"));
+                    let mut indent = IndentLevel::from(0);
+                    let offset = match &source {
+                        ModuleSource::SourceFile(it) => it.syntax().text_range().end(),
+                        ModuleSource::Module(it) => {
+                            indent = IndentLevel::from_node(it.syntax()) + 1;
+                            it.item_list()?.r_curly_token()?.text_range().start()
+                        }
+                        ModuleSource::BlockExpr(it) => {
+                            it.stmt_list()?.r_curly_token()?.text_range().start()
+                        }
+                    };
+                    mod_decl_builder.insert(offset, format!("{indent}{mod_decl}\n"));
+                    pub_mod_decl_builder.insert(offset, format!("{indent}{pub_mod_decl}\n"));
                 }
             }
         }
@@ -167,7 +238,6 @@ fn make_fixes(
 
 #[cfg(test)]
 mod tests {
-
     use crate::tests::{check_diagnostics, check_fix, check_fixes, check_no_fix};
 
     #[test]
@@ -333,4 +403,62 @@ mod foo;
 "#,
         );
     }
+
+    #[test]
+    fn unlinked_file_insert_into_inline_simple() {
+        check_fix(
+            r#"
+//- /main.rs
+mod bar;
+//- /bar.rs
+mod foo {
+}
+//- /bar/foo/baz.rs
+$0
+"#,
+            r#"
+mod foo {
+    mod baz;
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn unlinked_file_insert_into_inline_simple_modrs() {
+        check_fix(
+            r#"
+//- /main.rs
+mod bar;
+//- /bar.rs
+mod baz {
+}
+//- /bar/baz/foo/mod.rs
+$0
+"#,
+            r#"
+mod baz {
+    mod foo;
+}
+"#,
+        );
+    }
+
+    #[test]
+    fn unlinked_file_insert_into_inline_simple_modrs_main() {
+        check_fix(
+            r#"
+//- /main.rs
+mod bar {
+}
+//- /bar/foo/mod.rs
+$0
+"#,
+            r#"
+mod bar {
+    mod foo;
+}
+"#,
+        );
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs
index b2ed19104e2..9a984ba6bf0 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs
@@ -34,10 +34,7 @@ pub(crate) fn unresolved_proc_macro(
     let message = format!(
         "{message}: {}",
         if config_enabled {
-            match def_map.proc_macro_loading_error() {
-                Some(e) => e,
-                None => "proc macro not found in the built dylib",
-            }
+            def_map.proc_macro_loading_error().unwrap_or("proc macro not found in the built dylib")
         } else {
             match d.kind {
                 hir::MacroKind::Attr if proc_macros_enabled => {
diff --git a/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml b/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml
index 7be62a8d9ff..04efa7b91d8 100644
--- a/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml
@@ -2,10 +2,12 @@
 name = "ide-ssr"
 version = "0.0.0"
 description = "Structural search and replace of Rust code"
-license = "MIT OR Apache-2.0"
 repository = "https://github.com/rust-lang/rust-analyzer"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
@@ -14,13 +16,16 @@ doctest = false
 cov-mark = "2.0.0-pre.1"
 itertools = "0.10.5"
 
-text-edit = { path = "../text-edit", version = "0.0.0" }
-parser = { path = "../parser", version = "0.0.0" }
-syntax = { path = "../syntax", version = "0.0.0" }
-ide-db = { path = "../ide-db", version = "0.0.0" }
-hir = { path = "../hir", version = "0.0.0" }
-stdx = { path = "../stdx", version = "0.0.0" }
+# local deps
+hir.workspace = true
+ide-db.workspace = true
+parser.workspace = true
+stdx.workspace = true
+syntax.workspace = true
+text-edit.workspace = true
 
 [dev-dependencies]
-test-utils = { path = "../test-utils" }
 expect-test = "1.4.0"
+
+# local deps
+test-utils.workspace = true
diff --git a/src/tools/rust-analyzer/crates/ide/Cargo.toml b/src/tools/rust-analyzer/crates/ide/Cargo.toml
index 73f202630f1..414c08ff7e0 100644
--- a/src/tools/rust-analyzer/crates/ide/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/ide/Cargo.toml
@@ -2,9 +2,11 @@
 name = "ide"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
@@ -20,28 +22,31 @@ pulldown-cmark-to-cmark = "10.0.4"
 pulldown-cmark = { version = "0.9.1", default-features = false }
 url = "2.3.1"
 dot = "0.1.4"
+smallvec = "1.10.0"
 
-stdx = { path = "../stdx", version = "0.0.0" }
-syntax = { path = "../syntax", version = "0.0.0" }
-text-edit = { path = "../text-edit", version = "0.0.0" }
-ide-db = { path = "../ide-db", version = "0.0.0" }
-cfg = { path = "../cfg", version = "0.0.0" }
-profile = { path = "../profile", version = "0.0.0" }
-ide-assists = { path = "../ide-assists", version = "0.0.0" }
-ide-diagnostics = { path = "../ide-diagnostics", version = "0.0.0" }
-ide-ssr = { path = "../ide-ssr", version = "0.0.0" }
-ide-completion = { path = "../ide-completion", version = "0.0.0" }
-
+# local deps
+cfg.workspace = true
+ide-assists.workspace = true
+ide-completion.workspace = true
+ide-db.workspace = true
+ide-diagnostics.workspace = true
+ide-ssr.workspace = true
+profile.workspace = true
+stdx.workspace = true
+syntax.workspace = true
+text-edit.workspace = true
 # ide should depend only on the top-level `hir` package. if you need
 # something from some `hir-xxx` subpackage, reexport the API via `hir`.
-hir = { path = "../hir", version = "0.0.0" }
+hir.workspace = true
 
 [target.'cfg(not(any(target_arch = "wasm32", target_os = "emscripten")))'.dependencies]
-toolchain = { path = "../toolchain", version = "0.0.0" }
+toolchain.workspace = true
 
 [dev-dependencies]
-test-utils = { path = "../test-utils" }
 expect-test = "1.4.0"
 
+# local deps
+test-utils.workspace = true
+
 [features]
 in-rust-tree = ["ide-assists/in-rust-tree", "ide-diagnostics/in-rust-tree"]
diff --git a/src/tools/rust-analyzer/crates/ide/src/file_structure.rs b/src/tools/rust-analyzer/crates/ide/src/file_structure.rs
index 68fd0952b48..b23763dce86 100644
--- a/src/tools/rust-analyzer/crates/ide/src/file_structure.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/file_structure.rs
@@ -160,7 +160,11 @@ fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
                 let label = match target_trait {
                     None => format!("impl {}", target_type.syntax().text()),
                     Some(t) => {
-                        format!("impl {} for {}", t.syntax().text(), target_type.syntax().text(),)
+                        format!("impl {}{} for {}",
+                            it.excl_token().map(|x| x.to_string()).unwrap_or_default(),
+                            t.syntax().text(),
+                            target_type.syntax().text(),
+                        )
                     }
                 };
 
@@ -214,6 +218,29 @@ mod tests {
     }
 
     #[test]
+    fn test_nagative_trait_bound() {
+        let txt = r#"impl !Unpin for Test {}"#;
+        check(
+            txt,
+            expect![[r#"
+        [
+            StructureNode {
+                parent: None,
+                label: "impl !Unpin for Test",
+                navigation_range: 16..20,
+                node_range: 0..23,
+                kind: SymbolKind(
+                    Impl,
+                ),
+                detail: None,
+                deprecated: false,
+            },
+        ]
+        "#]],
+        );
+    }
+
+    #[test]
     fn test_file_structure() {
         check(
             r#"
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs
index c7130a2a4bb..e70bc2ec541 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs
@@ -17,6 +17,7 @@ use crate::{
 // This is the same as `Go to Definition` with the following exceptions:
 // - outline modules will navigate to the `mod name;` item declaration
 // - trait assoc items will navigate to the assoc item of the trait declaration opposed to the trait impl
+// - fields in patterns will navigate to the field declaration of the struct, union or variant
 pub(crate) fn goto_declaration(
     db: &RootDatabase,
     position: FilePosition,
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
index 73fd518a9ef..93019527f44 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
@@ -1916,4 +1916,68 @@ fn main() {
 "#,
         )
     }
+
+    #[test]
+    fn query_impls_in_nearest_block() {
+        check(
+            r#"
+struct S1;
+impl S1 {
+    fn e() -> () {}
+}
+fn f1() {
+    struct S1;
+    impl S1 {
+        fn e() -> () {}
+         //^
+    }
+    fn f2() {
+        fn f3() {
+            S1::e$0();
+        }
+    }
+}
+"#,
+        );
+
+        check(
+            r#"
+struct S1;
+impl S1 {
+    fn e() -> () {}
+}
+fn f1() {
+    struct S1;
+    impl S1 {
+        fn e() -> () {}
+         //^
+    }
+    fn f2() {
+        struct S2;
+        S1::e$0();
+    }
+}
+fn f12() {
+    struct S1;
+    impl S1 {
+        fn e() -> () {}
+    }
+}
+"#,
+        );
+
+        check(
+            r#"
+struct S1;
+impl S1 {
+    fn e() -> () {}
+     //^
+}
+fn f2() {
+    struct S2;
+    S1::e$0();
+}
+"#,
+        );
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
index 55f8779eed7..c889eb930f3 100644
--- a/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/highlight_related.rs
@@ -1356,7 +1356,6 @@ fn main() {
             r#"
 trait Trait {
     fn func(self) {}
-     //^^^^
 }
 
 impl Trait for () {
@@ -1376,7 +1375,6 @@ fn main() {
             r#"
 trait Trait {
     fn func(self) {}
-     //^^^^
 }
 
 impl Trait for () {
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover.rs b/src/tools/rust-analyzer/crates/ide/src/hover.rs
index b214fa12a4f..2058a4f5f19 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover.rs
@@ -15,10 +15,11 @@ use ide_db::{
     FxIndexSet, RootDatabase,
 };
 use itertools::Itertools;
-use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxNode, SyntaxToken, T};
+use syntax::{ast, AstNode, SyntaxKind::*, SyntaxNode, T};
 
 use crate::{
     doc_links::token_as_doc_comment,
+    markdown_remove::remove_markdown,
     markup::Markup,
     runnables::{runnable_fn, runnable_mod},
     FileId, FilePosition, NavigationTarget, RangeInfo, Runnable, TryToNav,
@@ -26,14 +27,9 @@ use crate::{
 #[derive(Clone, Debug, PartialEq, Eq)]
 pub struct HoverConfig {
     pub links_in_hover: bool,
-    pub documentation: Option<HoverDocFormat>,
+    pub documentation: bool,
     pub keywords: bool,
-}
-
-impl HoverConfig {
-    fn markdown(&self) -> bool {
-        matches!(self.documentation, Some(HoverDocFormat::Markdown))
-    }
+    pub format: HoverDocFormat,
 }
 
 #[derive(Clone, Debug, PartialEq, Eq)]
@@ -90,19 +86,38 @@ pub struct HoverResult {
 // image::https://user-images.githubusercontent.com/48062697/113020658-b5f98b80-917a-11eb-9f88-3dbc27320c95.gif[]
 pub(crate) fn hover(
     db: &RootDatabase,
-    FileRange { file_id, range }: FileRange,
+    frange @ FileRange { file_id, range }: FileRange,
     config: &HoverConfig,
 ) -> Option<RangeInfo<HoverResult>> {
     let sema = &hir::Semantics::new(db);
     let file = sema.parse(file_id).syntax().clone();
+    let mut res = if range.is_empty() {
+        hover_simple(sema, FilePosition { file_id, offset: range.start() }, file, config)
+    } else {
+        hover_ranged(sema, frange, file, config)
+    }?;
 
-    if !range.is_empty() {
-        return hover_ranged(&file, range, sema, config);
+    if let HoverDocFormat::PlainText = config.format {
+        res.info.markup = remove_markdown(res.info.markup.as_str()).into();
     }
-    let offset = range.start();
+    Some(res)
+}
 
+fn hover_simple(
+    sema: &Semantics<'_, RootDatabase>,
+    FilePosition { file_id, offset }: FilePosition,
+    file: SyntaxNode,
+    config: &HoverConfig,
+) -> Option<RangeInfo<HoverResult>> {
     let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
-        IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self] => 4,
+        IDENT
+        | INT_NUMBER
+        | LIFETIME_IDENT
+        | T![self]
+        | T![super]
+        | T![crate]
+        | T![Self]
+        | T![_] => 4,
         // index and prefix ops
         T!['['] | T![']'] | T![?] | T![*] | T![-] | T![!] => 3,
         kind if kind.is_keyword() => 2,
@@ -135,19 +150,18 @@ pub(crate) fn hover(
     } else {
         sema.descend_into_macros_with_same_text(original_token.clone())
     };
+    let descended = || descended.iter();
 
-    // try lint hover
-    let result = descended
-        .iter()
+    let result = descended()
+        // try lint hover
         .find_map(|token| {
             // FIXME: Definition should include known lints and the like instead of having this special case here
             let attr = token.parent_ancestors().find_map(ast::Attr::cast)?;
             render::try_for_lint(&attr, token)
         })
-        // try item definitions
+        // try definitions
         .or_else(|| {
-            descended
-                .iter()
+            descended()
                 .filter_map(|token| {
                     let node = token.parent()?;
                     let class = IdentClass::classify_token(sema, token)?;
@@ -168,10 +182,12 @@ pub(crate) fn hover(
                 })
         })
         // try keywords
-        .or_else(|| descended.iter().find_map(|token| render::keyword(sema, config, token)))
-        // try rest item hover
+        .or_else(|| descended().find_map(|token| render::keyword(sema, config, token)))
+        // try _ hovers
+        .or_else(|| descended().find_map(|token| render::underscore(sema, config, token)))
+        // try rest pattern hover
         .or_else(|| {
-            descended.iter().find_map(|token| {
+            descended().find_map(|token| {
                 if token.kind() != DOT2 {
                     return None;
                 }
@@ -187,58 +203,24 @@ pub(crate) fn hover(
             })
         });
 
-    result
-        .map(|mut res: HoverResult| {
-            res.actions = dedupe_or_merge_hover_actions(res.actions);
-            RangeInfo::new(original_token.text_range(), res)
-        })
-        // fallback to type hover if there aren't any other suggestions
-        // this finds its own range instead of using the closest token's range
-        .or_else(|| {
-            descended.iter().find_map(|token| hover_type_fallback(sema, config, token, token))
-        })
-}
-
-pub(crate) fn hover_for_definition(
-    sema: &Semantics<'_, RootDatabase>,
-    file_id: FileId,
-    definition: Definition,
-    node: &SyntaxNode,
-    config: &HoverConfig,
-) -> Option<HoverResult> {
-    let famous_defs = match &definition {
-        Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(node)?.krate())),
-        _ => None,
-    };
-    render::definition(sema.db, definition, famous_defs.as_ref(), config).map(|markup| {
-        HoverResult {
-            markup: render::process_markup(sema.db, definition, &markup, config),
-            actions: show_implementations_action(sema.db, definition)
-                .into_iter()
-                .chain(show_fn_references_action(sema.db, definition))
-                .chain(runnable_action(sema, definition, file_id))
-                .chain(goto_type_action_for_def(sema.db, definition))
-                .collect(),
-        }
+    result.map(|mut res: HoverResult| {
+        res.actions = dedupe_or_merge_hover_actions(res.actions);
+        RangeInfo::new(original_token.text_range(), res)
     })
 }
 
 fn hover_ranged(
-    file: &SyntaxNode,
-    range: syntax::TextRange,
     sema: &Semantics<'_, RootDatabase>,
+    FileRange { range, .. }: FileRange,
+    file: SyntaxNode,
     config: &HoverConfig,
 ) -> Option<RangeInfo<HoverResult>> {
     // FIXME: make this work in attributes
-    let expr_or_pat = file.covering_element(range).ancestors().find_map(|it| {
-        match_ast! {
-            match it {
-                ast::Expr(expr) => Some(Either::Left(expr)),
-                ast::Pat(pat) => Some(Either::Right(pat)),
-                _ => None,
-            }
-        }
-    })?;
+    let expr_or_pat = file
+        .covering_element(range)
+        .ancestors()
+        .take_while(|it| ast::MacroCall::can_cast(it.kind()) || !ast::Item::can_cast(it.kind()))
+        .find_map(Either::<ast::Expr, ast::Pat>::cast)?;
     let res = match &expr_or_pat {
         Either::Left(ast::Expr::TryExpr(try_expr)) => render::try_expr(sema, config, try_expr),
         Either::Left(ast::Expr::PrefixExpr(prefix_expr))
@@ -248,7 +230,7 @@ fn hover_ranged(
         }
         _ => None,
     };
-    let res = res.or_else(|| render::type_info(sema, config, &expr_or_pat));
+    let res = res.or_else(|| render::type_info_of(sema, config, &expr_or_pat));
     res.map(|it| {
         let range = match expr_or_pat {
             Either::Left(it) => it.syntax().text_range(),
@@ -258,37 +240,31 @@ fn hover_ranged(
     })
 }
 
-fn hover_type_fallback(
+pub(crate) fn hover_for_definition(
     sema: &Semantics<'_, RootDatabase>,
+    file_id: FileId,
+    definition: Definition,
+    node: &SyntaxNode,
     config: &HoverConfig,
-    token: &SyntaxToken,
-    original_token: &SyntaxToken,
-) -> Option<RangeInfo<HoverResult>> {
-    let node =
-        token.parent_ancestors().take_while(|it| !ast::Item::can_cast(it.kind())).find(|n| {
-            ast::Expr::can_cast(n.kind())
-                || ast::Pat::can_cast(n.kind())
-                || ast::Type::can_cast(n.kind())
-        })?;
-
-    let expr_or_pat = match_ast! {
-        match node {
-            ast::Expr(it) => Either::Left(it),
-            ast::Pat(it) => Either::Right(it),
-            // If this node is a MACRO_CALL, it means that `descend_into_macros_many` failed to resolve.
-            // (e.g expanding a builtin macro). So we give up here.
-            ast::MacroCall(_it) => return None,
-            _ => return None,
-        }
+) -> Option<HoverResult> {
+    let famous_defs = match &definition {
+        Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(node)?.krate())),
+        _ => None,
     };
-
-    let res = render::type_info(sema, config, &expr_or_pat)?;
-
-    let range = sema
-        .original_range_opt(&node)
-        .map(|frange| frange.range)
-        .unwrap_or_else(|| original_token.text_range());
-    Some(RangeInfo::new(range, res))
+    render::definition(sema.db, definition, famous_defs.as_ref(), config).map(|markup| {
+        HoverResult {
+            markup: render::process_markup(sema.db, definition, &markup, config),
+            actions: [
+                show_implementations_action(sema.db, definition),
+                show_fn_references_action(sema.db, definition),
+                runnable_action(sema, definition, file_id),
+                goto_type_action_for_def(sema.db, definition),
+            ]
+            .into_iter()
+            .flatten()
+            .collect(),
+        }
+    })
 }
 
 fn show_implementations_action(db: &RootDatabase, def: Definition) -> Option<HoverAction> {
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
index 47257f0bfad..22611cfb892 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
@@ -26,58 +26,24 @@ use syntax::{
 use crate::{
     doc_links::{remove_links, rewrite_links},
     hover::walk_and_push_ty,
-    markdown_remove::remove_markdown,
     HoverAction, HoverConfig, HoverResult, Markup,
 };
 
-pub(super) fn type_info(
+pub(super) fn type_info_of(
     sema: &Semantics<'_, RootDatabase>,
-    config: &HoverConfig,
+    _config: &HoverConfig,
     expr_or_pat: &Either<ast::Expr, ast::Pat>,
 ) -> Option<HoverResult> {
     let TypeInfo { original, adjusted } = match expr_or_pat {
         Either::Left(expr) => sema.type_of_expr(expr)?,
         Either::Right(pat) => sema.type_of_pat(pat)?,
     };
-
-    let mut res = HoverResult::default();
-    let mut targets: Vec<hir::ModuleDef> = Vec::new();
-    let mut push_new_def = |item: hir::ModuleDef| {
-        if !targets.contains(&item) {
-            targets.push(item);
-        }
-    };
-    walk_and_push_ty(sema.db, &original, &mut push_new_def);
-
-    res.markup = if let Some(adjusted_ty) = adjusted {
-        walk_and_push_ty(sema.db, &adjusted_ty, &mut push_new_def);
-        let original = original.display(sema.db).to_string();
-        let adjusted = adjusted_ty.display(sema.db).to_string();
-        let static_text_diff_len = "Coerced to: ".len() - "Type: ".len();
-        format!(
-            "{bt_start}Type: {:>apad$}\nCoerced to: {:>opad$}\n{bt_end}",
-            original,
-            adjusted,
-            apad = static_text_diff_len + adjusted.len().max(original.len()),
-            opad = original.len(),
-            bt_start = if config.markdown() { "```text\n" } else { "" },
-            bt_end = if config.markdown() { "```\n" } else { "" }
-        )
-        .into()
-    } else {
-        if config.markdown() {
-            Markup::fenced_block(&original.display(sema.db))
-        } else {
-            original.display(sema.db).to_string().into()
-        }
-    };
-    res.actions.push(HoverAction::goto_type_from_targets(sema.db, targets));
-    Some(res)
+    type_info(sema, _config, original, adjusted)
 }
 
 pub(super) fn try_expr(
     sema: &Semantics<'_, RootDatabase>,
-    config: &HoverConfig,
+    _config: &HoverConfig,
     try_expr: &ast::TryExpr,
 ) -> Option<HoverResult> {
     let inner_ty = sema.type_of_expr(&try_expr.expr()?)?.original;
@@ -153,14 +119,12 @@ pub(super) fn try_expr(
     let ppad = static_text_len_diff.min(0).abs() as usize;
 
     res.markup = format!(
-        "{bt_start}{} Type: {:>pad0$}\nPropagated as: {:>pad1$}\n{bt_end}",
+        "```text\n{} Type: {:>pad0$}\nPropagated as: {:>pad1$}\n```\n",
         s,
         inner_ty,
         body_ty,
         pad0 = ty_len_max + tpad,
         pad1 = ty_len_max + ppad,
-        bt_start = if config.markdown() { "```text\n" } else { "" },
-        bt_end = if config.markdown() { "```\n" } else { "" }
     )
     .into();
     Some(res)
@@ -168,7 +132,7 @@ pub(super) fn try_expr(
 
 pub(super) fn deref_expr(
     sema: &Semantics<'_, RootDatabase>,
-    config: &HoverConfig,
+    _config: &HoverConfig,
     deref_expr: &ast::PrefixExpr,
 ) -> Option<HoverResult> {
     let inner_ty = sema.type_of_expr(&deref_expr.expr()?)?.original;
@@ -197,15 +161,13 @@ pub(super) fn deref_expr(
             .max(adjusted.len() + coerced_len)
             .max(inner.len() + deref_len);
         format!(
-            "{bt_start}Dereferenced from: {:>ipad$}\nTo type: {:>apad$}\nCoerced to: {:>opad$}\n{bt_end}",
+            "```text\nDereferenced from: {:>ipad$}\nTo type: {:>apad$}\nCoerced to: {:>opad$}\n```\n",
             inner,
             original,
             adjusted,
             ipad = max_len - deref_len,
             apad = max_len - type_len,
             opad = max_len - coerced_len,
-            bt_start = if config.markdown() { "```text\n" } else { "" },
-            bt_end = if config.markdown() { "```\n" } else { "" }
         )
         .into()
     } else {
@@ -215,13 +177,11 @@ pub(super) fn deref_expr(
         let deref_len = "Dereferenced from: ".len();
         let max_len = (original.len() + type_len).max(inner.len() + deref_len);
         format!(
-            "{bt_start}Dereferenced from: {:>ipad$}\nTo type: {:>apad$}\n{bt_end}",
+            "```text\nDereferenced from: {:>ipad$}\nTo type: {:>apad$}\n```\n",
             inner,
             original,
             ipad = max_len - deref_len,
             apad = max_len - type_len,
-            bt_start = if config.markdown() { "```text\n" } else { "" },
-            bt_end = if config.markdown() { "```\n" } else { "" }
         )
         .into()
     };
@@ -230,12 +190,54 @@ pub(super) fn deref_expr(
     Some(res)
 }
 
+pub(super) fn underscore(
+    sema: &Semantics<'_, RootDatabase>,
+    config: &HoverConfig,
+    token: &SyntaxToken,
+) -> Option<HoverResult> {
+    if token.kind() != T![_] {
+        return None;
+    }
+    let parent = token.parent()?;
+    let _it = match_ast! {
+        match parent {
+            ast::InferType(it) => it,
+            ast::UnderscoreExpr(it) => return type_info_of(sema, config, &Either::Left(ast::Expr::UnderscoreExpr(it))),
+            ast::WildcardPat(it) => return type_info_of(sema, config, &Either::Right(ast::Pat::WildcardPat(it))),
+            _ => return None,
+        }
+    };
+    // let it = infer_type.syntax().parent()?;
+    // match_ast! {
+    //     match it {
+    //         ast::LetStmt(_it) => (),
+    //         ast::Param(_it) => (),
+    //         ast::RetType(_it) => (),
+    //         ast::TypeArg(_it) => (),
+
+    //         ast::CastExpr(_it) => (),
+    //         ast::ParenType(_it) => (),
+    //         ast::TupleType(_it) => (),
+    //         ast::PtrType(_it) => (),
+    //         ast::RefType(_it) => (),
+    //         ast::ArrayType(_it) => (),
+    //         ast::SliceType(_it) => (),
+    //         ast::ForType(_it) => (),
+    //         _ => return None,
+    //     }
+    // }
+
+    // FIXME: https://github.com/rust-lang/rust-analyzer/issues/11762, this currently always returns Unknown
+    // type_info(sema, config, sema.resolve_type(&ast::Type::InferType(it))?, None)
+    None
+}
+
 pub(super) fn keyword(
     sema: &Semantics<'_, RootDatabase>,
     config: &HoverConfig,
     token: &SyntaxToken,
 ) -> Option<HoverResult> {
-    if !token.kind().is_keyword() || !config.documentation.is_some() || !config.keywords {
+    if !token.kind().is_keyword() || !config.documentation || !config.keywords {
         return None;
     }
     let parent = token.parent()?;
@@ -259,7 +261,7 @@ pub(super) fn keyword(
 /// i.e. `let S {a, ..} = S {a: 1, b: 2}`
 pub(super) fn struct_rest_pat(
     sema: &Semantics<'_, RootDatabase>,
-    config: &HoverConfig,
+    _config: &HoverConfig,
     pattern: &RecordPat,
 ) -> HoverResult {
     let missing_fields = sema.record_pattern_missing_fields(pattern);
@@ -288,11 +290,7 @@ pub(super) fn struct_rest_pat(
         // get rid of trailing comma
         s.truncate(s.len() - 2);
 
-        if config.markdown() {
-            Markup::fenced_block(&s)
-        } else {
-            s.into()
-        }
+        Markup::fenced_block(&s)
     };
     res.actions.push(HoverAction::goto_type_from_targets(sema.db, targets));
     res
@@ -346,13 +344,8 @@ pub(super) fn process_markup(
     config: &HoverConfig,
 ) -> Markup {
     let markup = markup.as_str();
-    let markup = if !config.markdown() {
-        remove_markdown(markup)
-    } else if config.links_in_hover {
-        rewrite_links(db, markup, def)
-    } else {
-        remove_links(markup)
-    };
+    let markup =
+        if config.links_in_hover { rewrite_links(db, markup, def) } else { remove_links(markup) };
     Markup::from(markup)
 }
 
@@ -465,8 +458,9 @@ pub(super) fn definition(
         Definition::DeriveHelper(it) => (format!("derive_helper {}", it.name(db)), None),
     };
 
-    let docs = match config.documentation {
-        Some(_) => docs.or_else(|| {
+    let docs = docs
+        .filter(|_| config.documentation)
+        .or_else(|| {
             // docs are missing, for assoc items of trait impls try to fall back to the docs of the
             // original item of the trait
             let assoc = def.as_assoc_item(db)?;
@@ -474,13 +468,46 @@ pub(super) fn definition(
             let name = Some(assoc.name(db)?);
             let item = trait_.items(db).into_iter().find(|it| it.name(db) == name)?;
             item.docs(db)
-        }),
-        None => None,
-    };
-    let docs = docs.filter(|_| config.documentation.is_some()).map(Into::into);
+        })
+        .map(Into::into);
     markup(docs, label, mod_path)
 }
 
+fn type_info(
+    sema: &Semantics<'_, RootDatabase>,
+    _config: &HoverConfig,
+    original: hir::Type,
+    adjusted: Option<hir::Type>,
+) -> Option<HoverResult> {
+    let mut res = HoverResult::default();
+    let mut targets: Vec<hir::ModuleDef> = Vec::new();
+    let mut push_new_def = |item: hir::ModuleDef| {
+        if !targets.contains(&item) {
+            targets.push(item);
+        }
+    };
+    walk_and_push_ty(sema.db, &original, &mut push_new_def);
+
+    res.markup = if let Some(adjusted_ty) = adjusted {
+        walk_and_push_ty(sema.db, &adjusted_ty, &mut push_new_def);
+        let original = original.display(sema.db).to_string();
+        let adjusted = adjusted_ty.display(sema.db).to_string();
+        let static_text_diff_len = "Coerced to: ".len() - "Type: ".len();
+        format!(
+            "```text\nType: {:>apad$}\nCoerced to: {:>opad$}\n```\n",
+            original,
+            adjusted,
+            apad = static_text_diff_len + adjusted.len().max(original.len()),
+            opad = original.len(),
+        )
+        .into()
+    } else {
+        Markup::fenced_block(&original.display(sema.db))
+    };
+    res.actions.push(HoverAction::goto_type_from_targets(sema.db, targets));
+    Some(res)
+}
+
 fn render_builtin_attr(db: &RootDatabase, attr: hir::BuiltinAttr) -> Option<Markup> {
     let name = attr.name(db);
     let desc = format!("#[{name}]");
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
index c7f241f2fea..2830212add8 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
@@ -2,7 +2,7 @@ use expect_test::{expect, Expect};
 use ide_db::base_db::{FileLoader, FileRange};
 use syntax::TextRange;
 
-use crate::{fixture, hover::HoverDocFormat, HoverConfig};
+use crate::{fixture, HoverConfig, HoverDocFormat};
 
 fn check_hover_no_result(ra_fixture: &str) {
     let (analysis, position) = fixture::position(ra_fixture);
@@ -10,8 +10,9 @@ fn check_hover_no_result(ra_fixture: &str) {
         .hover(
             &HoverConfig {
                 links_in_hover: true,
-                documentation: Some(HoverDocFormat::Markdown),
+                documentation: true,
                 keywords: true,
+                format: HoverDocFormat::Markdown,
             },
             FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
         )
@@ -26,8 +27,9 @@ fn check(ra_fixture: &str, expect: Expect) {
         .hover(
             &HoverConfig {
                 links_in_hover: true,
-                documentation: Some(HoverDocFormat::Markdown),
+                documentation: true,
                 keywords: true,
+                format: HoverDocFormat::Markdown,
             },
             FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
         )
@@ -47,8 +49,9 @@ fn check_hover_no_links(ra_fixture: &str, expect: Expect) {
         .hover(
             &HoverConfig {
                 links_in_hover: false,
-                documentation: Some(HoverDocFormat::Markdown),
+                documentation: true,
                 keywords: true,
+                format: HoverDocFormat::Markdown,
             },
             FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
         )
@@ -68,8 +71,9 @@ fn check_hover_no_markdown(ra_fixture: &str, expect: Expect) {
         .hover(
             &HoverConfig {
                 links_in_hover: true,
-                documentation: Some(HoverDocFormat::PlainText),
+                documentation: true,
                 keywords: true,
+                format: HoverDocFormat::PlainText,
             },
             FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
         )
@@ -89,8 +93,9 @@ fn check_actions(ra_fixture: &str, expect: Expect) {
         .hover(
             &HoverConfig {
                 links_in_hover: true,
-                documentation: Some(HoverDocFormat::Markdown),
+                documentation: true,
                 keywords: true,
+                format: HoverDocFormat::Markdown,
             },
             FileRange { file_id, range: position.range_or_empty() },
         )
@@ -105,8 +110,9 @@ fn check_hover_range(ra_fixture: &str, expect: Expect) {
         .hover(
             &HoverConfig {
                 links_in_hover: false,
-                documentation: Some(HoverDocFormat::Markdown),
+                documentation: true,
                 keywords: true,
+                format: HoverDocFormat::Markdown,
             },
             range,
         )
@@ -121,8 +127,9 @@ fn check_hover_range_no_results(ra_fixture: &str) {
         .hover(
             &HoverConfig {
                 links_in_hover: false,
-                documentation: Some(HoverDocFormat::Markdown),
+                documentation: true,
                 keywords: true,
+                format: HoverDocFormat::Markdown,
             },
             range,
         )
@@ -207,37 +214,20 @@ m!(ab$0c);
 }
 
 #[test]
-fn hover_shows_type_of_an_expression() {
-    check(
-        r#"
-pub fn foo() -> u32 { 1 }
-
-fn main() {
-    let foo_test = foo()$0;
-}
-"#,
-        expect![[r#"
-            *foo()*
-            ```rust
-            u32
-            ```
-        "#]],
-    );
-}
-
-#[test]
 fn hover_remove_markdown_if_configured() {
     check_hover_no_markdown(
         r#"
 pub fn foo() -> u32 { 1 }
 
 fn main() {
-    let foo_test = foo()$0;
+    let foo_test = foo$0();
 }
 "#,
         expect![[r#"
-            *foo()*
-            u32
+            *foo*
+            test
+
+            pub fn foo() -> u32
         "#]],
     );
 }
@@ -297,33 +287,6 @@ fn main() { let foo_test = fo$0o(); }
             "#]],
     );
 
-    // Multiple candidates but results are ambiguous.
-    check(
-        r#"
-//- /a.rs
-pub fn foo() -> u32 { 1 }
-
-//- /b.rs
-pub fn foo() -> &str { "" }
-
-//- /c.rs
-pub fn foo(a: u32, b: u32) {}
-
-//- /main.rs
-mod a;
-mod b;
-mod c;
-
-fn main() { let foo_test = fo$0o(); }
-        "#,
-        expect![[r#"
-                *foo*
-                ```rust
-                {unknown}
-                ```
-            "#]],
-    );
-
     // Use literal `crate` in path
     check(
         r#"
@@ -527,6 +490,7 @@ fn hover_field_offset() {
     // Hovering over the field when instantiating
     check(
         r#"
+//- /main.rs target_data_layout:e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128
 struct Foo { fiel$0d_a: u8, field_b: i32, field_c: i16 }
 "#,
         expect![[r#"
@@ -548,6 +512,7 @@ fn hover_shows_struct_field_info() {
     // Hovering over the field when instantiating
     check(
         r#"
+//- /main.rs target_data_layout:e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128
 struct Foo { field_a: u32 }
 
 fn main() {
@@ -570,6 +535,7 @@ fn main() {
     // Hovering over the field in the definition
     check(
         r#"
+//- /main.rs target_data_layout:e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128
 struct Foo { field_a$0: u32 }
 
 fn main() {
@@ -1184,33 +1150,19 @@ fn test_hover_through_func_in_macro_recursive() {
 macro_rules! id_deep { ($($tt:tt)*) => { $($tt)* } }
 macro_rules! id { ($($tt:tt)*) => { id_deep!($($tt)*) } }
 fn bar() -> u32 { 0 }
-fn foo() { let a = id!([0u32, bar($0)] ); }
+fn foo() { let a = id!([0u32, bar$0()] ); }
 "#,
         expect![[r#"
-                *bar()*
-                ```rust
-                u32
-                ```
-            "#]],
-    );
-}
+            *bar*
 
-#[test]
-fn test_hover_through_literal_string_in_macro() {
-    check(
-        r#"
-macro_rules! arr { ($($tt:tt)*) => { [$($tt)*] } }
-fn foo() {
-    let mastered_for_itunes = "";
-    let _ = arr!("Tr$0acks", &mastered_for_itunes);
-}
-"#,
-        expect![[r#"
-                *"Tracks"*
-                ```rust
-                &str
-                ```
-            "#]],
+            ```rust
+            test
+            ```
+
+            ```rust
+            fn bar() -> u32
+            ```
+        "#]],
     );
 }
 
@@ -1515,6 +1467,8 @@ fn my() {}
 fn test_hover_struct_doc_comment() {
     check(
         r#"
+//- /main.rs target_data_layout:e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128
+
 /// This is an example
 /// multiline doc
 ///
@@ -1573,7 +1527,7 @@ fn foo() { let bar = Ba$0r; }
             ```
 
             ```rust
-            struct Bar // size = 0, align = 1
+            struct Bar
             ```
 
             ---
@@ -1602,7 +1556,7 @@ fn foo() { let bar = Ba$0r; }
             ```
 
             ```rust
-            struct Bar // size = 0, align = 1
+            struct Bar
             ```
 
             ---
@@ -1630,7 +1584,7 @@ pub struct B$0ar
             ```
 
             ```rust
-            pub struct Bar // size = 0, align = 1
+            pub struct Bar
             ```
 
             ---
@@ -1657,7 +1611,7 @@ pub struct B$0ar
             ```
 
             ```rust
-            pub struct Bar // size = 0, align = 1
+            pub struct Bar
             ```
 
             ---
@@ -2959,6 +2913,8 @@ fn main() { let foo_test = name_with_dashes::wrapper::Thing::new$0(); }
 fn hover_field_pat_shorthand_ref_match_ergonomics() {
     check(
         r#"
+//- /main.rs target_data_layout:e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128
+
 struct S {
     f: i32,
 }
@@ -4398,6 +4354,7 @@ fn main() {
 fn hover_intra_doc_links() {
     check(
         r#"
+//- /main.rs target_data_layout:e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128
 
 pub mod theitem {
     /// This is the item. Cool!
@@ -4539,7 +4496,7 @@ trait A where
 fn string_shadowed_with_inner_items() {
     check(
         r#"
-//- /main.rs crate:main deps:alloc
+//- /main.rs crate:main deps:alloc target_data_layout:e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128
 
 /// Custom `String` type.
 struct String;
@@ -5234,7 +5191,7 @@ foo_macro!(
             ```
 
             ```rust
-            pub struct Foo // size = 0, align = 1
+            pub struct Foo
             ```
 
             ---
@@ -5248,6 +5205,8 @@ foo_macro!(
 fn hover_intra_in_attr() {
     check(
         r#"
+//- /main.rs target_data_layout:e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128
+
 #[doc = "Doc comment for [`Foo$0`]"]
 pub struct Foo(i32);
 "#,
@@ -5368,6 +5327,8 @@ enum Enum {
 fn hover_record_variant_field() {
     check(
         r#"
+//- /main.rs target_data_layout:e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128
+
 enum Enum {
     RecordV { field$0: u32 }
 }
@@ -5573,3 +5534,81 @@ fn main() {
         "#]],
     );
 }
+
+#[test]
+fn hover_underscore_pat() {
+    check(
+        r#"
+fn main() {
+    let _$0 = 0;
+}
+"#,
+        expect![[r#"
+            *_*
+            ```rust
+            i32
+            ```
+        "#]],
+    );
+    check(
+        r#"
+fn main() {
+    let (_$0,) = (0,);
+}
+"#,
+        expect![[r#"
+            *_*
+            ```rust
+            i32
+            ```
+        "#]],
+    );
+}
+
+#[test]
+fn hover_underscore_expr() {
+    check(
+        r#"
+fn main() {
+    _$0 = 0;
+}
+"#,
+        expect![[r#"
+            *_*
+            ```rust
+            i32
+            ```
+        "#]],
+    );
+    check(
+        r#"
+fn main() {
+    (_$0,) = (0,);
+}
+"#,
+        expect![[r#"
+            *_*
+            ```rust
+            i32
+            ```
+        "#]],
+    );
+}
+
+#[test]
+fn hover_underscore_type() {
+    check_hover_no_result(
+        r#"
+fn main() {
+    let x: _$0 = 0;
+}
+"#,
+    );
+    check_hover_no_result(
+        r#"
+fn main() {
+    let x: (_$0,) = (0,);
+}
+"#,
+    );
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
index 48a7bbfecff..ac477339ec2 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints.rs
@@ -4,13 +4,16 @@ use std::{
 };
 
 use either::Either;
-use hir::{known, HasVisibility, HirDisplay, HirWrite, ModuleDef, ModuleDefId, Semantics};
+use hir::{
+    known, HasVisibility, HirDisplay, HirDisplayError, HirWrite, ModuleDef, ModuleDefId, Semantics,
+};
 use ide_db::{base_db::FileRange, famous_defs::FamousDefs, RootDatabase};
 use itertools::Itertools;
+use smallvec::{smallvec, SmallVec};
 use stdx::never;
 use syntax::{
     ast::{self, AstNode},
-    match_ast, NodeOrToken, SyntaxNode, TextRange, TextSize,
+    match_ast, NodeOrToken, SyntaxNode, TextRange,
 };
 
 use crate::{navigation_target::TryToNav, FileId};
@@ -28,7 +31,6 @@ mod discriminant;
 
 #[derive(Clone, Debug, PartialEq, Eq)]
 pub struct InlayHintsConfig {
-    pub location_links: bool,
     pub render_colons: bool,
     pub type_hints: bool,
     pub discriminant_hints: DiscriminantHints,
@@ -83,75 +85,108 @@ pub enum AdjustmentHintsMode {
     PreferPostfix,
 }
 
-#[derive(Clone, Debug, PartialEq, Eq)]
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
 pub enum InlayKind {
-    BindingModeHint,
-    ChainingHint,
-    ClosingBraceHint,
-    ClosureReturnTypeHint,
-    GenericParamListHint,
-    AdjustmentHint,
-    AdjustmentHintPostfix,
-    LifetimeHint,
-    ParameterHint,
-    TypeHint,
-    DiscriminantHint,
+    BindingMode,
+    Chaining,
+    ClosingBrace,
+    ClosureReturnType,
+    GenericParamList,
+    Adjustment,
+    AdjustmentPostfix,
+    Lifetime,
+    Parameter,
+    Type,
+    Discriminant,
     OpeningParenthesis,
     ClosingParenthesis,
 }
 
 #[derive(Debug)]
 pub struct InlayHint {
+    /// The text range this inlay hint applies to.
     pub range: TextRange,
+    /// The kind of this inlay hint. This is used to determine side and padding of the hint for
+    /// rendering purposes.
     pub kind: InlayKind,
+    /// The actual label to show in the inlay hint.
     pub label: InlayHintLabel,
-    pub tooltip: Option<InlayTooltip>,
+}
+
+impl InlayHint {
+    fn closing_paren(range: TextRange) -> InlayHint {
+        InlayHint { range, kind: InlayKind::ClosingParenthesis, label: InlayHintLabel::from(")") }
+    }
+    fn opening_paren(range: TextRange) -> InlayHint {
+        InlayHint { range, kind: InlayKind::OpeningParenthesis, label: InlayHintLabel::from("(") }
+    }
 }
 
 #[derive(Debug)]
 pub enum InlayTooltip {
     String(String),
-    HoverRanged(FileId, TextRange),
-    HoverOffset(FileId, TextSize),
+    Markdown(String),
 }
 
 #[derive(Default)]
 pub struct InlayHintLabel {
-    pub parts: Vec<InlayHintLabelPart>,
+    pub parts: SmallVec<[InlayHintLabelPart; 1]>,
 }
 
 impl InlayHintLabel {
-    pub fn as_simple_str(&self) -> Option<&str> {
-        match &*self.parts {
-            [part] => part.as_simple_str(),
-            _ => None,
+    pub fn simple(
+        s: impl Into<String>,
+        tooltip: Option<InlayTooltip>,
+        linked_location: Option<FileRange>,
+    ) -> InlayHintLabel {
+        InlayHintLabel {
+            parts: smallvec![InlayHintLabelPart { text: s.into(), linked_location, tooltip }],
         }
     }
 
     pub fn prepend_str(&mut self, s: &str) {
         match &mut *self.parts {
-            [part, ..] if part.as_simple_str().is_some() => part.text = format!("{s}{}", part.text),
-            _ => self.parts.insert(0, InlayHintLabelPart { text: s.into(), linked_location: None }),
+            [InlayHintLabelPart { text, linked_location: None, tooltip: None }, ..] => {
+                text.insert_str(0, s)
+            }
+            _ => self.parts.insert(
+                0,
+                InlayHintLabelPart { text: s.into(), linked_location: None, tooltip: None },
+            ),
         }
     }
 
     pub fn append_str(&mut self, s: &str) {
         match &mut *self.parts {
-            [.., part] if part.as_simple_str().is_some() => part.text.push_str(s),
-            _ => self.parts.push(InlayHintLabelPart { text: s.into(), linked_location: None }),
+            [.., InlayHintLabelPart { text, linked_location: None, tooltip: None }] => {
+                text.push_str(s)
+            }
+            _ => self.parts.push(InlayHintLabelPart {
+                text: s.into(),
+                linked_location: None,
+                tooltip: None,
+            }),
         }
     }
 }
 
 impl From<String> for InlayHintLabel {
     fn from(s: String) -> Self {
-        Self { parts: vec![InlayHintLabelPart { text: s, linked_location: None }] }
+        Self {
+            parts: smallvec![InlayHintLabelPart { text: s, linked_location: None, tooltip: None }],
+        }
     }
 }
 
 impl From<&str> for InlayHintLabel {
     fn from(s: &str) -> Self {
-        Self { parts: vec![InlayHintLabelPart { text: s.into(), linked_location: None }] }
+        Self {
+            parts: smallvec![InlayHintLabelPart {
+                text: s.into(),
+                linked_location: None,
+                tooltip: None
+            }],
+        }
     }
 }
 
@@ -175,25 +210,25 @@ pub struct InlayHintLabelPart {
     /// When setting this, no tooltip must be set on the containing hint, or VS Code will display
     /// them both.
     pub linked_location: Option<FileRange>,
-}
-
-impl InlayHintLabelPart {
-    pub fn as_simple_str(&self) -> Option<&str> {
-        match self {
-            Self { text, linked_location: None } => Some(text),
-            _ => None,
-        }
-    }
+    /// The tooltip to show when hovering over the inlay hint, this may invoke other actions like
+    /// hover requests to show.
+    pub tooltip: Option<InlayTooltip>,
 }
 
 impl fmt::Debug for InlayHintLabelPart {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        match self.as_simple_str() {
-            Some(string) => string.fmt(f),
-            None => f
+        match self {
+            Self { text, linked_location: None, tooltip: None } => text.fmt(f),
+            Self { text, linked_location, tooltip } => f
                 .debug_struct("InlayHintLabelPart")
-                .field("text", &self.text)
-                .field("linked_location", &self.linked_location)
+                .field("text", text)
+                .field("linked_location", linked_location)
+                .field(
+                    "tooltip",
+                    &tooltip.as_ref().map_or("", |it| match it {
+                        InlayTooltip::String(it) | InlayTooltip::Markdown(it) => it,
+                    }),
+                )
                 .finish(),
         }
     }
@@ -204,7 +239,6 @@ struct InlayHintLabelBuilder<'a> {
     db: &'a RootDatabase,
     result: InlayHintLabel,
     last_part: String,
-    location_link_enabled: bool,
     location: Option<FileRange>,
 }
 
@@ -216,9 +250,6 @@ impl fmt::Write for InlayHintLabelBuilder<'_> {
 
 impl HirWrite for InlayHintLabelBuilder<'_> {
     fn start_location_link(&mut self, def: ModuleDefId) {
-        if !self.location_link_enabled {
-            return;
-        }
         if self.location.is_some() {
             never!("location link is already started");
         }
@@ -230,9 +261,6 @@ impl HirWrite for InlayHintLabelBuilder<'_> {
     }
 
     fn end_location_link(&mut self) {
-        if !self.location_link_enabled {
-            return;
-        }
         self.make_new_part();
     }
 }
@@ -242,6 +270,7 @@ impl InlayHintLabelBuilder<'_> {
         self.result.parts.push(InlayHintLabelPart {
             text: take(&mut self.last_part),
             linked_location: self.location.take(),
+            tooltip: None,
         });
     }
 
@@ -262,34 +291,51 @@ fn label_of_ty(
         mut max_length: Option<usize>,
         ty: hir::Type,
         label_builder: &mut InlayHintLabelBuilder<'_>,
-    ) {
+    ) -> Result<(), HirDisplayError> {
         let iter_item_type = hint_iterator(sema, famous_defs, &ty);
         match iter_item_type {
-            Some(ty) => {
-                const LABEL_START: &str = "impl Iterator<Item = ";
+            Some((iter_trait, item, ty)) => {
+                const LABEL_START: &str = "impl ";
+                const LABEL_ITERATOR: &str = "Iterator";
+                const LABEL_MIDDLE: &str = "<";
+                const LABEL_ITEM: &str = "Item";
+                const LABEL_MIDDLE2: &str = " = ";
                 const LABEL_END: &str = ">";
 
-                max_length =
-                    max_length.map(|len| len.saturating_sub(LABEL_START.len() + LABEL_END.len()));
-
-                label_builder.write_str(LABEL_START).unwrap();
-                rec(sema, famous_defs, max_length, ty, label_builder);
-                label_builder.write_str(LABEL_END).unwrap();
-            }
-            None => {
-                let _ = ty.display_truncated(sema.db, max_length).write_to(label_builder);
+                max_length = max_length.map(|len| {
+                    len.saturating_sub(
+                        LABEL_START.len()
+                            + LABEL_ITERATOR.len()
+                            + LABEL_MIDDLE.len()
+                            + LABEL_MIDDLE2.len()
+                            + LABEL_END.len(),
+                    )
+                });
+
+                label_builder.write_str(LABEL_START)?;
+                label_builder.start_location_link(ModuleDef::from(iter_trait).into());
+                label_builder.write_str(LABEL_ITERATOR)?;
+                label_builder.end_location_link();
+                label_builder.write_str(LABEL_MIDDLE)?;
+                label_builder.start_location_link(ModuleDef::from(item).into());
+                label_builder.write_str(LABEL_ITEM)?;
+                label_builder.end_location_link();
+                label_builder.write_str(LABEL_MIDDLE2)?;
+                rec(sema, famous_defs, max_length, ty, label_builder)?;
+                label_builder.write_str(LABEL_END)?;
+                Ok(())
             }
-        };
+            None => ty.display_truncated(sema.db, max_length).write_to(label_builder),
+        }
     }
 
     let mut label_builder = InlayHintLabelBuilder {
         db: sema.db,
         last_part: String::new(),
         location: None,
-        location_link_enabled: config.location_links,
         result: InlayHintLabel::default(),
     };
-    rec(sema, famous_defs, config.max_length, ty, &mut label_builder);
+    let _ = rec(sema, famous_defs, config.max_length, ty, &mut label_builder);
     let r = label_builder.finish();
     Some(r)
 }
@@ -383,11 +429,9 @@ fn hints(
                 // static type elisions
                 ast::Item::Static(it) => implicit_static::hints(hints, config, Either::Left(it)),
                 ast::Item::Const(it) => implicit_static::hints(hints, config, Either::Right(it)),
+                ast::Item::Enum(it) => discriminant::enum_hints(hints, famous_defs, config, file_id, it),
                 _ => None,
             },
-            ast::Variant(v) => {
-                discriminant::hints(hints, famous_defs, config, file_id, &v)
-            },
             // FIXME: fn-ptr type, dyn fn type, and trait object type elisions
             ast::Type(_) => None,
             _ => None,
@@ -395,12 +439,12 @@ fn hints(
     };
 }
 
-/// Checks if the type is an Iterator from std::iter and returns its item type.
+/// Checks if the type is an Iterator from std::iter and returns the iterator trait and the item type of the concrete iterator.
 fn hint_iterator(
     sema: &Semantics<'_, RootDatabase>,
     famous_defs: &FamousDefs<'_, '_>,
     ty: &hir::Type,
-) -> Option<hir::Type> {
+) -> Option<(hir::Trait, hir::TypeAlias, hir::Type)> {
     let db = sema.db;
     let strukt = ty.strip_references().as_adt()?;
     let krate = strukt.module(db).krate();
@@ -423,7 +467,7 @@ fn hint_iterator(
             _ => None,
         })?;
         if let Some(ty) = ty.normalize_trait_assoc_type(db, &[], assoc_type_item) {
-            return Some(ty);
+            return Some((iter_trait, assoc_type_item, ty));
         }
     }
 
@@ -447,7 +491,6 @@ mod tests {
     use super::ClosureReturnTypeHints;
 
     pub(super) const DISABLED_CONFIG: InlayHintsConfig = InlayHintsConfig {
-        location_links: false,
         discriminant_hints: DiscriminantHints::Never,
         render_colons: false,
         type_hints: false,
@@ -465,8 +508,6 @@ mod tests {
         max_length: None,
         closing_brace_hints_min_lines: None,
     };
-    pub(super) const DISABLED_CONFIG_WITH_LINKS: InlayHintsConfig =
-        InlayHintsConfig { location_links: true, ..DISABLED_CONFIG };
     pub(super) const TEST_CONFIG: InlayHintsConfig = InlayHintsConfig {
         type_hints: true,
         parameter_hints: true,
@@ -474,7 +515,7 @@ mod tests {
         closure_return_type_hints: ClosureReturnTypeHints::WithBlock,
         binding_mode_hints: true,
         lifetime_elision_hints: LifetimeElisionHints::Always,
-        ..DISABLED_CONFIG_WITH_LINKS
+        ..DISABLED_CONFIG
     };
 
     #[track_caller]
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs
index bdd7c05e008..188eb7f977b 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs
@@ -3,15 +3,19 @@
 //! let _: u32  = /* <never-to-any> */ loop {};
 //! let _: &u32 = /* &* */ &mut 0;
 //! ```
-use hir::{Adjust, AutoBorrow, Mutability, OverloadedDeref, PointerCast, Safety, Semantics};
+use hir::{Adjust, Adjustment, AutoBorrow, HirDisplay, Mutability, PointerCast, Safety, Semantics};
 use ide_db::RootDatabase;
 
+use stdx::never;
 use syntax::{
     ast::{self, make, AstNode},
     ted,
 };
 
-use crate::{AdjustmentHints, AdjustmentHintsMode, InlayHint, InlayHintsConfig, InlayKind};
+use crate::{
+    AdjustmentHints, AdjustmentHintsMode, InlayHint, InlayHintLabel, InlayHintsConfig, InlayKind,
+    InlayTooltip,
+};
 
 pub(super) fn hints(
     acc: &mut Vec<InlayHint>,
@@ -44,27 +48,12 @@ pub(super) fn hints(
         mode_and_needs_parens_for_adjustment_hints(expr, config.adjustment_hints_mode);
 
     if needs_outer_parens {
-        acc.push(InlayHint {
-            range: expr.syntax().text_range(),
-            kind: InlayKind::OpeningParenthesis,
-            label: "(".into(),
-            tooltip: None,
-        });
+        acc.push(InlayHint::opening_paren(expr.syntax().text_range()));
     }
 
     if postfix && needs_inner_parens {
-        acc.push(InlayHint {
-            range: expr.syntax().text_range(),
-            kind: InlayKind::OpeningParenthesis,
-            label: "(".into(),
-            tooltip: None,
-        });
-        acc.push(InlayHint {
-            range: expr.syntax().text_range(),
-            kind: InlayKind::ClosingParenthesis,
-            label: ")".into(),
-            tooltip: None,
-        });
+        acc.push(InlayHint::opening_paren(expr.syntax().text_range()));
+        acc.push(InlayHint::closing_paren(expr.syntax().text_range()));
     }
 
     let (mut tmp0, mut tmp1);
@@ -76,72 +65,71 @@ pub(super) fn hints(
         &mut tmp1
     };
 
-    for adjustment in iter {
-        if adjustment.source == adjustment.target {
+    for Adjustment { source, target, kind } in iter {
+        if source == target {
             continue;
         }
 
         // FIXME: Add some nicer tooltips to each of these
-        let text = match adjustment.kind {
+        let (text, coercion) = match kind {
             Adjust::NeverToAny if config.adjustment_hints == AdjustmentHints::Always => {
-                "<never-to-any>"
+                ("<never-to-any>", "never to any")
+            }
+            Adjust::Deref(_) => ("*", "dereference"),
+            Adjust::Borrow(AutoBorrow::Ref(Mutability::Shared)) => ("&", "borrow"),
+            Adjust::Borrow(AutoBorrow::Ref(Mutability::Mut)) => ("&mut ", "unique borrow"),
+            Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Shared)) => {
+                ("&raw const ", "const pointer borrow")
+            }
+            Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Mut)) => {
+                ("&raw mut ", "mut pointer borrow")
             }
-            Adjust::Deref(None) => "*",
-            Adjust::Deref(Some(OverloadedDeref(Mutability::Mut))) => "*",
-            Adjust::Deref(Some(OverloadedDeref(Mutability::Shared))) => "*",
-            Adjust::Borrow(AutoBorrow::Ref(Mutability::Shared)) => "&",
-            Adjust::Borrow(AutoBorrow::Ref(Mutability::Mut)) => "&mut ",
-            Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Shared)) => "&raw const ",
-            Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Mut)) => "&raw mut ",
             // some of these could be represented via `as` casts, but that's not too nice and
             // handling everything as a prefix expr makes the `(` and `)` insertion easier
             Adjust::Pointer(cast) if config.adjustment_hints == AdjustmentHints::Always => {
                 match cast {
-                    PointerCast::ReifyFnPointer => "<fn-item-to-fn-pointer>",
-                    PointerCast::UnsafeFnPointer => "<safe-fn-pointer-to-unsafe-fn-pointer>",
+                    PointerCast::ReifyFnPointer => {
+                        ("<fn-item-to-fn-pointer>", "fn item to fn pointer")
+                    }
+                    PointerCast::UnsafeFnPointer => (
+                        "<safe-fn-pointer-to-unsafe-fn-pointer>",
+                        "safe fn pointer to unsafe fn pointer",
+                    ),
                     PointerCast::ClosureFnPointer(Safety::Unsafe) => {
-                        "<closure-to-unsafe-fn-pointer>"
+                        ("<closure-to-unsafe-fn-pointer>", "closure to unsafe fn pointer")
+                    }
+                    PointerCast::ClosureFnPointer(Safety::Safe) => {
+                        ("<closure-to-fn-pointer>", "closure to fn pointer")
+                    }
+                    PointerCast::MutToConstPointer => {
+                        ("<mut-ptr-to-const-ptr>", "mut ptr to const ptr")
                     }
-                    PointerCast::ClosureFnPointer(Safety::Safe) => "<closure-to-fn-pointer>",
-                    PointerCast::MutToConstPointer => "<mut-ptr-to-const-ptr>",
-                    PointerCast::ArrayToPointer => "<array-ptr-to-element-ptr>",
-                    PointerCast::Unsize => "<unsize>",
+                    PointerCast::ArrayToPointer => ("<array-ptr-to-element-ptr>", ""),
+                    PointerCast::Unsize => ("<unsize>", "unsize"),
                 }
             }
             _ => continue,
         };
         acc.push(InlayHint {
             range: expr.syntax().text_range(),
-            kind: if postfix {
-                InlayKind::AdjustmentHintPostfix
-            } else {
-                InlayKind::AdjustmentHint
-            },
-            label: if postfix { format!(".{}", text.trim_end()).into() } else { text.into() },
-            tooltip: None,
+            kind: if postfix { InlayKind::AdjustmentPostfix } else { InlayKind::Adjustment },
+            label: InlayHintLabel::simple(
+                if postfix { format!(".{}", text.trim_end()) } else { text.to_owned() },
+                Some(InlayTooltip::Markdown(format!(
+                    "`{}` → `{}` ({coercion} coercion)",
+                    source.display(sema.db),
+                    target.display(sema.db),
+                ))),
+                None,
+            ),
         });
     }
     if !postfix && needs_inner_parens {
-        acc.push(InlayHint {
-            range: expr.syntax().text_range(),
-            kind: InlayKind::OpeningParenthesis,
-            label: "(".into(),
-            tooltip: None,
-        });
-        acc.push(InlayHint {
-            range: expr.syntax().text_range(),
-            kind: InlayKind::ClosingParenthesis,
-            label: ")".into(),
-            tooltip: None,
-        });
+        acc.push(InlayHint::opening_paren(expr.syntax().text_range()));
+        acc.push(InlayHint::closing_paren(expr.syntax().text_range()));
     }
     if needs_outer_parens {
-        acc.push(InlayHint {
-            range: expr.syntax().text_range(),
-            kind: InlayKind::ClosingParenthesis,
-            label: ")".into(),
-            tooltip: None,
-        });
+        acc.push(InlayHint::closing_paren(expr.syntax().text_range()));
     }
     Some(())
 }
@@ -223,16 +211,21 @@ fn needs_parens_for_adjustment_hints(expr: &ast::Expr, postfix: bool) -> (bool,
     ted::replace(expr.syntax(), dummy_expr.syntax());
 
     let parent = dummy_expr.syntax().parent();
-    let expr = if postfix {
-        let ast::Expr::TryExpr(e) = &dummy_expr else { unreachable!() };
-        let Some(ast::Expr::ParenExpr(e)) = e.expr() else { unreachable!() };
+    let Some(expr) = (|| {
+        if postfix {
+            let ast::Expr::TryExpr(e) = &dummy_expr else { return None };
+            let Some(ast::Expr::ParenExpr(e)) = e.expr() else { return None };
 
-        e.expr().unwrap()
-    } else {
-        let ast::Expr::RefExpr(e) = &dummy_expr else { unreachable!() };
-        let Some(ast::Expr::ParenExpr(e)) = e.expr() else { unreachable!() };
+            e.expr()
+        } else {
+            let ast::Expr::RefExpr(e) = &dummy_expr else { return None };
+            let Some(ast::Expr::ParenExpr(e)) = e.expr() else { return None };
 
-        e.expr().unwrap()
+            e.expr()
+        }
+    })() else {
+        never!("broken syntax tree?\n{:?}\n{:?}", expr, dummy_expr);
+        return (true, true)
     };
 
     // At this point
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs
index adec19c765a..4af7f9bdb73 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/bind_pat.rs
@@ -12,9 +12,7 @@ use syntax::{
     match_ast,
 };
 
-use crate::{
-    inlay_hints::closure_has_block_body, InlayHint, InlayHintsConfig, InlayKind, InlayTooltip,
-};
+use crate::{inlay_hints::closure_has_block_body, InlayHint, InlayHintsConfig, InlayKind};
 
 use super::label_of_ty;
 
@@ -22,7 +20,7 @@ pub(super) fn hints(
     acc: &mut Vec<InlayHint>,
     famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>,
     config: &InlayHintsConfig,
-    file_id: FileId,
+    _file_id: FileId,
     pat: &ast::IdentPat,
 ) -> Option<()> {
     if !config.type_hints {
@@ -50,12 +48,8 @@ pub(super) fn hints(
             Some(name) => name.syntax().text_range(),
             None => pat.syntax().text_range(),
         },
-        kind: InlayKind::TypeHint,
+        kind: InlayKind::Type,
         label,
-        tooltip: pat
-            .name()
-            .map(|it| it.syntax().text_range())
-            .map(|it| InlayTooltip::HoverRanged(file_id, it)),
     });
 
     Some(())
@@ -73,28 +67,23 @@ fn should_not_display_type_hint(
         return true;
     }
 
-    if let Some(hir::Adt::Struct(s)) = pat_ty.as_adt() {
-        if s.fields(db).is_empty() && s.name(db).to_smol_str() == bind_pat.to_string() {
-            return true;
-        }
-    }
-
-    if config.hide_closure_initialization_hints {
-        if let Some(parent) = bind_pat.syntax().parent() {
-            if let Some(it) = ast::LetStmt::cast(parent) {
-                if let Some(ast::Expr::ClosureExpr(closure)) = it.initializer() {
-                    if closure_has_block_body(&closure) {
-                        return true;
-                    }
-                }
-            }
-        }
+    if sema.resolve_bind_pat_to_const(bind_pat).is_some() {
+        return true;
     }
 
     for node in bind_pat.syntax().ancestors() {
         match_ast! {
             match node {
-                ast::LetStmt(it) => return it.ty().is_some(),
+                ast::LetStmt(it) => {
+                    if config.hide_closure_initialization_hints {
+                        if let Some(ast::Expr::ClosureExpr(closure)) = it.initializer() {
+                            if closure_has_block_body(&closure) {
+                                return true;
+                            }
+                        }
+                    }
+                    return it.ty().is_some()
+                },
                 // FIXME: We might wanna show type hints in parameters for non-top level patterns as well
                 ast::Param(it) => return it.ty().is_some(),
                 ast::MatchArm(_) => return pat_is_enum_variant(db, bind_pat, pat_ty),
@@ -194,8 +183,7 @@ mod tests {
     use crate::{fixture, inlay_hints::InlayHintsConfig};
 
     use crate::inlay_hints::tests::{
-        check, check_expect, check_with_config, DISABLED_CONFIG, DISABLED_CONFIG_WITH_LINKS,
-        TEST_CONFIG,
+        check, check_expect, check_with_config, DISABLED_CONFIG, TEST_CONFIG,
     };
     use crate::ClosureReturnTypeHints;
 
@@ -291,7 +279,7 @@ fn main() {
     fn iterator_hint_regression_issue_12674() {
         // Ensure we don't crash while solving the projection type of iterators.
         check_expect(
-            InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG_WITH_LINKS },
+            InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG },
             r#"
 //- minicore: iterators
 struct S<T>(T);
@@ -322,22 +310,66 @@ fn main(a: SliceIter<'_, Container>) {
                 [
                     InlayHint {
                         range: 484..554,
-                        kind: ChainingHint,
+                        kind: Chaining,
                         label: [
-                            "impl Iterator<Item = impl Iterator<Item = &&str>>",
-                        ],
-                        tooltip: Some(
-                            HoverRanged(
-                                FileId(
-                                    0,
+                            "impl ",
+                            InlayHintLabelPart {
+                                text: "Iterator",
+                                linked_location: Some(
+                                    FileRange {
+                                        file_id: FileId(
+                                            1,
+                                        ),
+                                        range: 2611..2619,
+                                    },
+                                ),
+                                tooltip: "",
+                            },
+                            "<",
+                            InlayHintLabelPart {
+                                text: "Item",
+                                linked_location: Some(
+                                    FileRange {
+                                        file_id: FileId(
+                                            1,
+                                        ),
+                                        range: 2643..2647,
+                                    },
+                                ),
+                                tooltip: "",
+                            },
+                            " = impl ",
+                            InlayHintLabelPart {
+                                text: "Iterator",
+                                linked_location: Some(
+                                    FileRange {
+                                        file_id: FileId(
+                                            1,
+                                        ),
+                                        range: 2611..2619,
+                                    },
+                                ),
+                                tooltip: "",
+                            },
+                            "<",
+                            InlayHintLabelPart {
+                                text: "Item",
+                                linked_location: Some(
+                                    FileRange {
+                                        file_id: FileId(
+                                            1,
+                                        ),
+                                        range: 2643..2647,
+                                    },
                                 ),
-                                484..554,
-                            ),
-                        ),
+                                tooltip: "",
+                            },
+                            " = &&str>>",
+                        ],
                     },
                     InlayHint {
                         range: 484..485,
-                        kind: ChainingHint,
+                        kind: Chaining,
                         label: [
                             "",
                             InlayHintLabelPart {
@@ -350,6 +382,7 @@ fn main(a: SliceIter<'_, Container>) {
                                         range: 289..298,
                                     },
                                 ),
+                                tooltip: "",
                             },
                             "<",
                             InlayHintLabelPart {
@@ -362,17 +395,10 @@ fn main(a: SliceIter<'_, Container>) {
                                         range: 238..247,
                                     },
                                 ),
+                                tooltip: "",
                             },
                             ">",
                         ],
-                        tooltip: Some(
-                            HoverRanged(
-                                FileId(
-                                    0,
-                                ),
-                                484..485,
-                            ),
-                        ),
                     },
                 ]
             "#]],
@@ -537,6 +563,21 @@ fn main() {
     }
 
     #[test]
+    fn const_pats_have_no_type_hints() {
+        check_types(
+            r#"
+const FOO: usize = 0;
+
+fn main() {
+    match 0 {
+        FOO => (),
+        _ => ()
+    }
+}"#,
+        );
+    }
+
+    #[test]
     fn let_statement() {
         check_types(
             r#"
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/binding_mode.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/binding_mode.rs
index a0166d0048a..11b9cd269bf 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/binding_mode.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/binding_mode.rs
@@ -7,7 +7,7 @@ use ide_db::RootDatabase;
 
 use syntax::ast::{self, AstNode};
 
-use crate::{InlayHint, InlayHintsConfig, InlayKind, InlayTooltip};
+use crate::{InlayHint, InlayHintsConfig, InlayKind};
 
 pub(super) fn hints(
     acc: &mut Vec<InlayHint>,
@@ -40,12 +40,7 @@ pub(super) fn hints(
             (true, false) => "&",
             _ => return,
         };
-        acc.push(InlayHint {
-            range,
-            kind: InlayKind::BindingModeHint,
-            label: r.to_string().into(),
-            tooltip: Some(InlayTooltip::String("Inferred binding mode".into())),
-        });
+        acc.push(InlayHint { range, kind: InlayKind::BindingMode, label: r.to_string().into() });
     });
     match pat {
         ast::Pat::IdentPat(pat) if pat.ref_token().is_none() && pat.mut_token().is_none() => {
@@ -57,24 +52,13 @@ pub(super) fn hints(
             };
             acc.push(InlayHint {
                 range: pat.syntax().text_range(),
-                kind: InlayKind::BindingModeHint,
+                kind: InlayKind::BindingMode,
                 label: bm.to_string().into(),
-                tooltip: Some(InlayTooltip::String("Inferred binding mode".into())),
             });
         }
         ast::Pat::OrPat(pat) if !pattern_adjustments.is_empty() && outer_paren_pat.is_none() => {
-            acc.push(InlayHint {
-                range: pat.syntax().text_range(),
-                kind: InlayKind::OpeningParenthesis,
-                label: "(".into(),
-                tooltip: None,
-            });
-            acc.push(InlayHint {
-                range: pat.syntax().text_range(),
-                kind: InlayKind::ClosingParenthesis,
-                label: ")".into(),
-                tooltip: None,
-            });
+            acc.push(InlayHint::opening_paren(pat.syntax().text_range()));
+            acc.push(InlayHint::closing_paren(pat.syntax().text_range()));
         }
         _ => (),
     }
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs
index 8810d5d34db..0c54f084c19 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs
@@ -5,7 +5,7 @@ use syntax::{
     Direction, NodeOrToken, SyntaxKind, T,
 };
 
-use crate::{FileId, InlayHint, InlayHintsConfig, InlayKind, InlayTooltip};
+use crate::{FileId, InlayHint, InlayHintsConfig, InlayKind};
 
 use super::label_of_ty;
 
@@ -13,7 +13,7 @@ pub(super) fn hints(
     acc: &mut Vec<InlayHint>,
     famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>,
     config: &InlayHintsConfig,
-    file_id: FileId,
+    _file_id: FileId,
     expr: &ast::Expr,
 ) -> Option<()> {
     if !config.chaining_hints {
@@ -59,9 +59,8 @@ pub(super) fn hints(
             }
             acc.push(InlayHint {
                 range: expr.syntax().text_range(),
-                kind: InlayKind::ChainingHint,
+                kind: InlayKind::Chaining,
                 label: label_of_ty(famous_defs, config, ty)?,
-                tooltip: Some(InlayTooltip::HoverRanged(file_id, expr.syntax().text_range())),
             });
         }
     }
@@ -73,10 +72,7 @@ mod tests {
     use expect_test::expect;
 
     use crate::{
-        inlay_hints::tests::{
-            check_expect, check_with_config, DISABLED_CONFIG, DISABLED_CONFIG_WITH_LINKS,
-            TEST_CONFIG,
-        },
+        inlay_hints::tests::{check_expect, check_with_config, DISABLED_CONFIG, TEST_CONFIG},
         InlayHintsConfig,
     };
 
@@ -88,11 +84,7 @@ mod tests {
     #[test]
     fn chaining_hints_ignore_comments() {
         check_expect(
-            InlayHintsConfig {
-                type_hints: false,
-                chaining_hints: true,
-                ..DISABLED_CONFIG_WITH_LINKS
-            },
+            InlayHintsConfig { type_hints: false, chaining_hints: true, ..DISABLED_CONFIG },
             r#"
 struct A(B);
 impl A { fn into_b(self) -> B { self.0 } }
@@ -111,7 +103,7 @@ fn main() {
                 [
                     InlayHint {
                         range: 147..172,
-                        kind: ChainingHint,
+                        kind: Chaining,
                         label: [
                             "",
                             InlayHintLabelPart {
@@ -124,21 +116,14 @@ fn main() {
                                         range: 63..64,
                                     },
                                 ),
+                                tooltip: "",
                             },
                             "",
                         ],
-                        tooltip: Some(
-                            HoverRanged(
-                                FileId(
-                                    0,
-                                ),
-                                147..172,
-                            ),
-                        ),
                     },
                     InlayHint {
                         range: 147..154,
-                        kind: ChainingHint,
+                        kind: Chaining,
                         label: [
                             "",
                             InlayHintLabelPart {
@@ -151,17 +136,10 @@ fn main() {
                                         range: 7..8,
                                     },
                                 ),
+                                tooltip: "",
                             },
                             "",
                         ],
-                        tooltip: Some(
-                            HoverRanged(
-                                FileId(
-                                    0,
-                                ),
-                                147..154,
-                            ),
-                        ),
                     },
                 ]
             "#]],
@@ -210,33 +188,43 @@ fn main() {
                 [
                     InlayHint {
                         range: 143..190,
-                        kind: ChainingHint,
+                        kind: Chaining,
                         label: [
-                            "C",
-                        ],
-                        tooltip: Some(
-                            HoverRanged(
-                                FileId(
-                                    0,
+                            "",
+                            InlayHintLabelPart {
+                                text: "C",
+                                linked_location: Some(
+                                    FileRange {
+                                        file_id: FileId(
+                                            0,
+                                        ),
+                                        range: 51..52,
+                                    },
                                 ),
-                                143..190,
-                            ),
-                        ),
+                                tooltip: "",
+                            },
+                            "",
+                        ],
                     },
                     InlayHint {
                         range: 143..179,
-                        kind: ChainingHint,
+                        kind: Chaining,
                         label: [
-                            "B",
-                        ],
-                        tooltip: Some(
-                            HoverRanged(
-                                FileId(
-                                    0,
+                            "",
+                            InlayHintLabelPart {
+                                text: "B",
+                                linked_location: Some(
+                                    FileRange {
+                                        file_id: FileId(
+                                            0,
+                                        ),
+                                        range: 29..30,
+                                    },
                                 ),
-                                143..179,
-                            ),
-                        ),
+                                tooltip: "",
+                            },
+                            "",
+                        ],
                     },
                 ]
             "#]],
@@ -246,7 +234,7 @@ fn main() {
     #[test]
     fn struct_access_chaining_hints() {
         check_expect(
-            InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG_WITH_LINKS },
+            InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG },
             r#"
 struct A { pub b: B }
 struct B { pub c: C }
@@ -269,7 +257,7 @@ fn main() {
                 [
                     InlayHint {
                         range: 143..190,
-                        kind: ChainingHint,
+                        kind: Chaining,
                         label: [
                             "",
                             InlayHintLabelPart {
@@ -282,21 +270,14 @@ fn main() {
                                         range: 51..52,
                                     },
                                 ),
+                                tooltip: "",
                             },
                             "",
                         ],
-                        tooltip: Some(
-                            HoverRanged(
-                                FileId(
-                                    0,
-                                ),
-                                143..190,
-                            ),
-                        ),
                     },
                     InlayHint {
                         range: 143..179,
-                        kind: ChainingHint,
+                        kind: Chaining,
                         label: [
                             "",
                             InlayHintLabelPart {
@@ -309,17 +290,10 @@ fn main() {
                                         range: 29..30,
                                     },
                                 ),
+                                tooltip: "",
                             },
                             "",
                         ],
-                        tooltip: Some(
-                            HoverRanged(
-                                FileId(
-                                    0,
-                                ),
-                                143..179,
-                            ),
-                        ),
                     },
                 ]
             "#]],
@@ -329,7 +303,7 @@ fn main() {
     #[test]
     fn generic_chaining_hints() {
         check_expect(
-            InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG_WITH_LINKS },
+            InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG },
             r#"
 struct A<T>(T);
 struct B<T>(T);
@@ -353,7 +327,7 @@ fn main() {
                 [
                     InlayHint {
                         range: 246..283,
-                        kind: ChainingHint,
+                        kind: Chaining,
                         label: [
                             "",
                             InlayHintLabelPart {
@@ -366,6 +340,7 @@ fn main() {
                                         range: 23..24,
                                     },
                                 ),
+                                tooltip: "",
                             },
                             "<",
                             InlayHintLabelPart {
@@ -378,21 +353,14 @@ fn main() {
                                         range: 55..56,
                                     },
                                 ),
+                                tooltip: "",
                             },
                             "<i32, bool>>",
                         ],
-                        tooltip: Some(
-                            HoverRanged(
-                                FileId(
-                                    0,
-                                ),
-                                246..283,
-                            ),
-                        ),
                     },
                     InlayHint {
                         range: 246..265,
-                        kind: ChainingHint,
+                        kind: Chaining,
                         label: [
                             "",
                             InlayHintLabelPart {
@@ -405,6 +373,7 @@ fn main() {
                                         range: 7..8,
                                     },
                                 ),
+                                tooltip: "",
                             },
                             "<",
                             InlayHintLabelPart {
@@ -417,17 +386,10 @@ fn main() {
                                         range: 55..56,
                                     },
                                 ),
+                                tooltip: "",
                             },
                             "<i32, bool>>",
                         ],
-                        tooltip: Some(
-                            HoverRanged(
-                                FileId(
-                                    0,
-                                ),
-                                246..265,
-                            ),
-                        ),
                     },
                 ]
             "#]],
@@ -437,7 +399,7 @@ fn main() {
     #[test]
     fn shorten_iterator_chaining_hints() {
         check_expect(
-            InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG_WITH_LINKS },
+            InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG },
             r#"
 //- minicore: iterators
 use core::iter;
@@ -463,52 +425,106 @@ fn main() {
                 [
                     InlayHint {
                         range: 174..241,
-                        kind: ChainingHint,
+                        kind: Chaining,
                         label: [
-                            "impl Iterator<Item = ()>",
-                        ],
-                        tooltip: Some(
-                            HoverRanged(
-                                FileId(
-                                    0,
+                            "impl ",
+                            InlayHintLabelPart {
+                                text: "Iterator",
+                                linked_location: Some(
+                                    FileRange {
+                                        file_id: FileId(
+                                            1,
+                                        ),
+                                        range: 2611..2619,
+                                    },
+                                ),
+                                tooltip: "",
+                            },
+                            "<",
+                            InlayHintLabelPart {
+                                text: "Item",
+                                linked_location: Some(
+                                    FileRange {
+                                        file_id: FileId(
+                                            1,
+                                        ),
+                                        range: 2643..2647,
+                                    },
                                 ),
-                                174..241,
-                            ),
-                        ),
+                                tooltip: "",
+                            },
+                            " = ()>",
+                        ],
                     },
                     InlayHint {
                         range: 174..224,
-                        kind: ChainingHint,
+                        kind: Chaining,
                         label: [
-                            "impl Iterator<Item = ()>",
-                        ],
-                        tooltip: Some(
-                            HoverRanged(
-                                FileId(
-                                    0,
+                            "impl ",
+                            InlayHintLabelPart {
+                                text: "Iterator",
+                                linked_location: Some(
+                                    FileRange {
+                                        file_id: FileId(
+                                            1,
+                                        ),
+                                        range: 2611..2619,
+                                    },
                                 ),
-                                174..224,
-                            ),
-                        ),
+                                tooltip: "",
+                            },
+                            "<",
+                            InlayHintLabelPart {
+                                text: "Item",
+                                linked_location: Some(
+                                    FileRange {
+                                        file_id: FileId(
+                                            1,
+                                        ),
+                                        range: 2643..2647,
+                                    },
+                                ),
+                                tooltip: "",
+                            },
+                            " = ()>",
+                        ],
                     },
                     InlayHint {
                         range: 174..206,
-                        kind: ChainingHint,
+                        kind: Chaining,
                         label: [
-                            "impl Iterator<Item = ()>",
-                        ],
-                        tooltip: Some(
-                            HoverRanged(
-                                FileId(
-                                    0,
+                            "impl ",
+                            InlayHintLabelPart {
+                                text: "Iterator",
+                                linked_location: Some(
+                                    FileRange {
+                                        file_id: FileId(
+                                            1,
+                                        ),
+                                        range: 2611..2619,
+                                    },
                                 ),
-                                174..206,
-                            ),
-                        ),
+                                tooltip: "",
+                            },
+                            "<",
+                            InlayHintLabelPart {
+                                text: "Item",
+                                linked_location: Some(
+                                    FileRange {
+                                        file_id: FileId(
+                                            1,
+                                        ),
+                                        range: 2643..2647,
+                                    },
+                                ),
+                                tooltip: "",
+                            },
+                            " = ()>",
+                        ],
                     },
                     InlayHint {
                         range: 174..189,
-                        kind: ChainingHint,
+                        kind: Chaining,
                         label: [
                             "&mut ",
                             InlayHintLabelPart {
@@ -521,17 +537,10 @@ fn main() {
                                         range: 24..30,
                                     },
                                 ),
+                                tooltip: "",
                             },
                             "",
                         ],
-                        tooltip: Some(
-                            HoverRanged(
-                                FileId(
-                                    0,
-                                ),
-                                174..189,
-                            ),
-                        ),
                     },
                 ]
             "#]],
@@ -564,7 +573,7 @@ fn main() {
                 [
                     InlayHint {
                         range: 124..130,
-                        kind: TypeHint,
+                        kind: Type,
                         label: [
                             "",
                             InlayHintLabelPart {
@@ -577,21 +586,14 @@ fn main() {
                                         range: 7..13,
                                     },
                                 ),
+                                tooltip: "",
                             },
                             "",
                         ],
-                        tooltip: Some(
-                            HoverRanged(
-                                FileId(
-                                    0,
-                                ),
-                                124..130,
-                            ),
-                        ),
                     },
                     InlayHint {
                         range: 145..185,
-                        kind: ChainingHint,
+                        kind: Chaining,
                         label: [
                             "",
                             InlayHintLabelPart {
@@ -604,21 +606,14 @@ fn main() {
                                         range: 7..13,
                                     },
                                 ),
+                                tooltip: "",
                             },
                             "",
                         ],
-                        tooltip: Some(
-                            HoverRanged(
-                                FileId(
-                                    0,
-                                ),
-                                145..185,
-                            ),
-                        ),
                     },
                     InlayHint {
                         range: 145..168,
-                        kind: ChainingHint,
+                        kind: Chaining,
                         label: [
                             "",
                             InlayHintLabelPart {
@@ -631,32 +626,28 @@ fn main() {
                                         range: 7..13,
                                     },
                                 ),
+                                tooltip: "",
                             },
                             "",
                         ],
-                        tooltip: Some(
-                            HoverRanged(
-                                FileId(
-                                    0,
-                                ),
-                                145..168,
-                            ),
-                        ),
                     },
                     InlayHint {
                         range: 222..228,
-                        kind: ParameterHint,
+                        kind: Parameter,
                         label: [
-                            "self",
-                        ],
-                        tooltip: Some(
-                            HoverOffset(
-                                FileId(
-                                    0,
+                            InlayHintLabelPart {
+                                text: "self",
+                                linked_location: Some(
+                                    FileRange {
+                                        file_id: FileId(
+                                            0,
+                                        ),
+                                        range: 42..46,
+                                    },
                                 ),
-                                42,
-                            ),
-                        ),
+                                tooltip: "",
+                            },
+                        ],
                     },
                 ]
             "#]],
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs
index e340c64c54b..14c11be54ef 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closing_brace.rs
@@ -10,9 +10,7 @@ use syntax::{
     match_ast, SyntaxKind, SyntaxNode, T,
 };
 
-use crate::{
-    inlay_hints::InlayHintLabelPart, FileId, InlayHint, InlayHintLabel, InlayHintsConfig, InlayKind,
-};
+use crate::{FileId, InlayHint, InlayHintLabel, InlayHintsConfig, InlayKind};
 
 pub(super) fn hints(
     acc: &mut Vec<InlayHint>,
@@ -109,15 +107,11 @@ pub(super) fn hints(
         return None;
     }
 
-    let linked_location = config
-        .location_links
-        .then(|| name_range.map(|range| FileRange { file_id, range }))
-        .flatten();
+    let linked_location = name_range.map(|range| FileRange { file_id, range });
     acc.push(InlayHint {
         range: closing_token.text_range(),
-        kind: InlayKind::ClosingBraceHint,
-        label: InlayHintLabel { parts: vec![InlayHintLabelPart { text: label, linked_location }] },
-        tooltip: None, // provided by label part location
+        kind: InlayKind::ClosingBrace,
+        label: InlayHintLabel::simple(label, None, linked_location),
     });
 
     None
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs
index d9929beaac0..f03a18b8e96 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/closure_ret.rs
@@ -4,7 +4,7 @@ use syntax::ast::{self, AstNode};
 
 use crate::{
     inlay_hints::closure_has_block_body, ClosureReturnTypeHints, InlayHint, InlayHintsConfig,
-    InlayKind, InlayTooltip,
+    InlayKind,
 };
 
 use super::label_of_ty;
@@ -13,7 +13,7 @@ pub(super) fn hints(
     acc: &mut Vec<InlayHint>,
     famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>,
     config: &InlayHintsConfig,
-    file_id: FileId,
+    _file_id: FileId,
     closure: ast::ClosureExpr,
 ) -> Option<()> {
     if config.closure_return_type_hints == ClosureReturnTypeHints::Never {
@@ -41,9 +41,8 @@ pub(super) fn hints(
     }
     acc.push(InlayHint {
         range: param_list.syntax().text_range(),
-        kind: InlayKind::ClosureReturnTypeHint,
+        kind: InlayKind::ClosureReturnType,
         label: label_of_ty(famous_defs, config, ty)?,
-        tooltip: Some(InlayTooltip::HoverRanged(file_id, param_list.syntax().text_range())),
     });
     Some(())
 }
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs
index f32c4bdf288..5dd51ad11f4 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/discriminant.rs
@@ -4,27 +4,43 @@
 //!    Bar/* = 0*/,
 //! }
 //! ```
-use ide_db::{base_db::FileId, famous_defs::FamousDefs};
+use hir::Semantics;
+use ide_db::{base_db::FileId, famous_defs::FamousDefs, RootDatabase};
 use syntax::ast::{self, AstNode, HasName};
 
-use crate::{DiscriminantHints, InlayHint, InlayHintsConfig, InlayKind, InlayTooltip};
+use crate::{
+    DiscriminantHints, InlayHint, InlayHintLabel, InlayHintsConfig, InlayKind, InlayTooltip,
+};
 
-pub(super) fn hints(
+pub(super) fn enum_hints(
     acc: &mut Vec<InlayHint>,
     FamousDefs(sema, _): &FamousDefs<'_, '_>,
     config: &InlayHintsConfig,
     _: FileId,
-    variant: &ast::Variant,
+    enum_: ast::Enum,
 ) -> Option<()> {
-    let field_list = match config.discriminant_hints {
-        DiscriminantHints::Always => variant.field_list(),
-        DiscriminantHints::Fieldless => match variant.field_list() {
-            Some(_) => return None,
-            None => None,
-        },
-        DiscriminantHints::Never => return None,
+    let enabled = match config.discriminant_hints {
+        DiscriminantHints::Always => true,
+        DiscriminantHints::Fieldless => {
+            !sema.to_def(&enum_)?.is_data_carrying(sema.db)
+                || enum_.variant_list()?.variants().any(|v| v.expr().is_some())
+        }
+        DiscriminantHints::Never => false,
     };
+    if !enabled {
+        return None;
+    }
+    for variant in enum_.variant_list()?.variants() {
+        variant_hints(acc, sema, &variant);
+    }
+    None
+}
 
+fn variant_hints(
+    acc: &mut Vec<InlayHint>,
+    sema: &Semantics<'_, RootDatabase>,
+    variant: &ast::Variant,
+) -> Option<()> {
     if variant.eq_token().is_some() {
         return None;
     }
@@ -37,19 +53,22 @@ pub(super) fn hints(
     let d = v.eval(sema.db);
 
     acc.push(InlayHint {
-        range: match field_list {
+        range: match variant.field_list() {
             Some(field_list) => name.syntax().text_range().cover(field_list.syntax().text_range()),
             None => name.syntax().text_range(),
         },
-        kind: InlayKind::DiscriminantHint,
-        label: match &d {
-            Ok(v) => format!("{}", v).into(),
-            Err(_) => "?".into(),
-        },
-        tooltip: Some(InlayTooltip::String(match &d {
-            Ok(_) => "enum variant discriminant".into(),
-            Err(e) => format!("{e:?}").into(),
-        })),
+        kind: InlayKind::Discriminant,
+        label: InlayHintLabel::simple(
+            match &d {
+                Ok(v) => format!("{}", v),
+                Err(_) => "?".into(),
+            },
+            Some(InlayTooltip::String(match &d {
+                Ok(_) => "enum variant discriminant".into(),
+                Err(e) => format!("{e:?}").into(),
+            })),
+            None,
+        ),
     });
 
     Some(())
@@ -86,15 +105,30 @@ mod tests {
         check_discriminants(
             r#"
 enum Enum {
-    Variant,
-  //^^^^^^^0
-    Variant1,
-  //^^^^^^^^1
-    Variant2,
-  //^^^^^^^^2
-    Variant5 = 5,
-    Variant6,
-  //^^^^^^^^6
+  Variant,
+//^^^^^^^0
+  Variant1,
+//^^^^^^^^1
+  Variant2,
+//^^^^^^^^2
+  Variant5 = 5,
+  Variant6,
+//^^^^^^^^6
+}
+"#,
+        );
+        check_discriminants_fieldless(
+            r#"
+enum Enum {
+  Variant,
+//^^^^^^^0
+  Variant1,
+//^^^^^^^^1
+  Variant2,
+//^^^^^^^^2
+  Variant5 = 5,
+  Variant6,
+//^^^^^^^^6
 }
 "#,
         );
@@ -128,8 +162,22 @@ enum Enum {
 enum Enum {
     Variant(),
     Variant1,
+    Variant2 {},
+    Variant3,
+    Variant5,
+    Variant6,
+}
+"#,
+        );
+        check_discriminants_fieldless(
+            r#"
+enum Enum {
+    Variant(),
+  //^^^^^^^^^0
+    Variant1,
   //^^^^^^^^1
     Variant2 {},
+  //^^^^^^^^^^^2
     Variant3,
   //^^^^^^^^3
     Variant5 = 5,
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/fn_lifetime_fn.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/fn_lifetime_fn.rs
index 2aa5e3dc734..b7182085b31 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/fn_lifetime_fn.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/fn_lifetime_fn.rs
@@ -10,7 +10,7 @@ use syntax::{
     SyntaxToken,
 };
 
-use crate::{InlayHint, InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints};
+use crate::{InlayHint, InlayHintsConfig, InlayKind, LifetimeElisionHints};
 
 pub(super) fn hints(
     acc: &mut Vec<InlayHint>,
@@ -23,9 +23,8 @@ pub(super) fn hints(
 
     let mk_lt_hint = |t: SyntaxToken, label: String| InlayHint {
         range: t.text_range(),
-        kind: InlayKind::LifetimeHint,
+        kind: InlayKind::Lifetime,
         label: label.into(),
-        tooltip: Some(InlayTooltip::String("Elided lifetime".into())),
     };
 
     let param_list = func.param_list()?;
@@ -183,21 +182,19 @@ pub(super) fn hints(
             let is_empty = gpl.generic_params().next().is_none();
             acc.push(InlayHint {
                 range: angle_tok.text_range(),
-                kind: InlayKind::LifetimeHint,
+                kind: InlayKind::Lifetime,
                 label: format!(
                     "{}{}",
                     allocated_lifetimes.iter().format(", "),
                     if is_empty { "" } else { ", " }
                 )
                 .into(),
-                tooltip: Some(InlayTooltip::String("Elided lifetimes".into())),
             });
         }
         (None, allocated_lifetimes) => acc.push(InlayHint {
             range: func.name()?.syntax().text_range(),
-            kind: InlayKind::GenericParamListHint,
+            kind: InlayKind::GenericParamList,
             label: format!("<{}>", allocated_lifetimes.iter().format(", "),).into(),
-            tooltip: Some(InlayTooltip::String("Elided lifetimes".into())),
         }),
     }
     Some(())
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs
index 588a0e3b6a4..1122ee2e392 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_static.rs
@@ -8,7 +8,7 @@ use syntax::{
     SyntaxKind,
 };
 
-use crate::{InlayHint, InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints};
+use crate::{InlayHint, InlayHintsConfig, InlayKind, LifetimeElisionHints};
 
 pub(super) fn hints(
     acc: &mut Vec<InlayHint>,
@@ -32,9 +32,8 @@ pub(super) fn hints(
             let t = ty.amp_token()?;
             acc.push(InlayHint {
                 range: t.text_range(),
-                kind: InlayKind::LifetimeHint,
+                kind: InlayKind::Lifetime,
                 label: "'static".to_owned().into(),
-                tooltip: Some(InlayTooltip::String("Elided static lifetime".into())),
             });
         }
     }
diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs
index ecee67632e3..9cdae632410 100644
--- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/param_name.rs
@@ -10,7 +10,7 @@ use ide_db::{base_db::FileRange, RootDatabase};
 use stdx::to_lower_snake_case;
 use syntax::ast::{self, AstNode, HasArgList, HasName, UnaryOp};
 
-use crate::{InlayHint, InlayHintsConfig, InlayKind, InlayTooltip};
+use crate::{InlayHint, InlayHintLabel, InlayHintsConfig, InlayKind};
 
 pub(super) fn hints(
     acc: &mut Vec<InlayHint>,
@@ -43,21 +43,20 @@ pub(super) fn hints(
             !should_hide_param_name_hint(sema, &callable, param_name, arg)
         })
         .map(|(param, param_name, _, FileRange { range, .. })| {
-            let mut tooltip = None;
+            let mut linked_location = None;
             if let Some(name) = param {
                 if let hir::CallableKind::Function(f) = callable.kind() {
                     // assert the file is cached so we can map out of macros
                     if let Some(_) = sema.source(f) {
-                        tooltip = sema.original_range_opt(name.syntax());
+                        linked_location = sema.original_range_opt(name.syntax());
                     }
                 }
             }
 
             InlayHint {
                 range,
-                kind: InlayKind::ParameterHint,
-                label: param_name.into(),
-                tooltip: tooltip.map(|it| InlayTooltip::HoverOffset(it.file_id, it.range.start())),
+                kind: InlayKind::Parameter,
+                label: InlayHintLabel::simple(param_name, None, linked_location),
             }
         });
 
diff --git a/src/tools/rust-analyzer/crates/ide/src/join_lines.rs b/src/tools/rust-analyzer/crates/ide/src/join_lines.rs
index edc48e84d72..1cfde236245 100644
--- a/src/tools/rust-analyzer/crates/ide/src/join_lines.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/join_lines.rs
@@ -161,10 +161,8 @@ fn remove_newline(
         }
     }
 
-    if config.join_assignments {
-        if join_assignments(edit, &prev, &next).is_some() {
-            return;
-        }
+    if config.join_assignments && join_assignments(edit, &prev, &next).is_some() {
+        return;
     }
 
     if config.unwrap_trivial_blocks {
diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs
index 239456cb281..4ead9d4d0a8 100644
--- a/src/tools/rust-analyzer/crates/ide/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs
@@ -82,7 +82,8 @@ pub use crate::{
     hover::{HoverAction, HoverConfig, HoverDocFormat, HoverGotoTypeData, HoverResult},
     inlay_hints::{
         AdjustmentHints, AdjustmentHintsMode, ClosureReturnTypeHints, DiscriminantHints, InlayHint,
-        InlayHintLabel, InlayHintsConfig, InlayKind, InlayTooltip, LifetimeElisionHints,
+        InlayHintLabel, InlayHintLabelPart, InlayHintsConfig, InlayKind, InlayTooltip,
+        LifetimeElisionHints,
     },
     join_lines::JoinLinesConfig,
     markup::Markup,
@@ -236,7 +237,7 @@ impl Analysis {
             Ok(Vec::new()),
             false,
             CrateOrigin::CratesIo { repo: None, name: None },
-            None,
+            Err("Analysis::from_single_file has no target layout".into()),
         );
         change.change_file(file_id, Some(Arc::new(text)));
         change.set_crate_graph(crate_graph);
diff --git a/src/tools/rust-analyzer/crates/ide/src/markdown_remove.rs b/src/tools/rust-analyzer/crates/ide/src/markdown_remove.rs
index 3ec5c629e4f..718868c8747 100644
--- a/src/tools/rust-analyzer/crates/ide/src/markdown_remove.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/markdown_remove.rs
@@ -11,12 +11,146 @@ pub(crate) fn remove_markdown(markdown: &str) -> String {
     for event in parser {
         match event {
             Event::Text(text) | Event::Code(text) => out.push_str(&text),
-            Event::SoftBreak | Event::HardBreak | Event::Rule | Event::End(Tag::CodeBlock(_)) => {
-                out.push('\n')
+            Event::SoftBreak => out.push(' '),
+            Event::HardBreak | Event::Rule | Event::End(Tag::CodeBlock(_)) => out.push('\n'),
+            Event::End(Tag::Paragraph) => {
+                out.push('\n');
+                out.push('\n');
             }
-            _ => {}
+            Event::Start(_)
+            | Event::End(_)
+            | Event::Html(_)
+            | Event::FootnoteReference(_)
+            | Event::TaskListMarker(_) => (),
         }
     }
 
+    if let Some(p) = out.rfind(|c| c != '\n') {
+        out.drain(p + 1..);
+    }
+
     out
 }
+
+#[cfg(test)]
+mod tests {
+    use expect_test::expect;
+
+    use super::*;
+
+    #[test]
+    fn smoke_test() {
+        let res = remove_markdown(
+            r##"
+A function or function pointer.
+
+Functions are the primary way code is executed within Rust. Function blocks, usually just
+called functions, can be defined in a variety of different places and be assigned many
+different attributes and modifiers.
+
+Standalone functions that just sit within a module not attached to anything else are common,
+but most functions will end up being inside [`impl`] blocks, either on another type itself, or
+as a trait impl for that type.
+
+```rust
+fn standalone_function() {
+    // code
+}
+
+pub fn public_thing(argument: bool) -> String {
+    // code
+    # "".to_string()
+}
+
+struct Thing {
+    foo: i32,
+}
+
+impl Thing {
+    pub fn new() -> Self {
+        Self {
+            foo: 42,
+        }
+    }
+}
+```
+
+In addition to presenting fixed types in the form of `fn name(arg: type, ..) -> return_type`,
+functions can also declare a list of type parameters along with trait bounds that they fall
+into.
+
+```rust
+fn generic_function<T: Clone>(x: T) -> (T, T, T) {
+    (x.clone(), x.clone(), x.clone())
+}
+
+fn generic_where<T>(x: T) -> T
+    where T: std::ops::Add<Output = T> + Copy
+{
+    x + x + x
+}
+```
+
+Declaring trait bounds in the angle brackets is functionally identical to using a `where`
+clause. It's up to the programmer to decide which works better in each situation, but `where`
+tends to be better when things get longer than one line.
+
+Along with being made public via `pub`, `fn` can also have an [`extern`] added for use in
+FFI.
+
+For more information on the various types of functions and how they're used, consult the [Rust
+book] or the [Reference].
+
+[`impl`]: keyword.impl.html
+[`extern`]: keyword.extern.html
+[Rust book]: ../book/ch03-03-how-functions-work.html
+[Reference]: ../reference/items/functions.html
+"##,
+        );
+        expect![[r#"
+            A function or function pointer.
+
+            Functions are the primary way code is executed within Rust. Function blocks, usually just called functions, can be defined in a variety of different places and be assigned many different attributes and modifiers.
+
+            Standalone functions that just sit within a module not attached to anything else are common, but most functions will end up being inside impl blocks, either on another type itself, or as a trait impl for that type.
+
+            fn standalone_function() {
+                // code
+            }
+
+            pub fn public_thing(argument: bool) -> String {
+                // code
+                # "".to_string()
+            }
+
+            struct Thing {
+                foo: i32,
+            }
+
+            impl Thing {
+                pub fn new() -> Self {
+                    Self {
+                        foo: 42,
+                    }
+                }
+            }
+
+            In addition to presenting fixed types in the form of fn name(arg: type, ..) -> return_type, functions can also declare a list of type parameters along with trait bounds that they fall into.
+
+            fn generic_function<T: Clone>(x: T) -> (T, T, T) {
+                (x.clone(), x.clone(), x.clone())
+            }
+
+            fn generic_where<T>(x: T) -> T
+                where T: std::ops::Add<Output = T> + Copy
+            {
+                x + x + x
+            }
+
+            Declaring trait bounds in the angle brackets is functionally identical to using a where clause. It's up to the programmer to decide which works better in each situation, but where tends to be better when things get longer than one line.
+
+            Along with being made public via pub, fn can also have an extern added for use in FFI.
+
+            For more information on the various types of functions and how they're used, consult the Rust book or the Reference."#]].assert_eq(&res);
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs
index 0f758cfa2d3..60fb1544a8f 100644
--- a/src/tools/rust-analyzer/crates/ide/src/references.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/references.rs
@@ -1636,4 +1636,384 @@ pub fn deri$0ve(_stream: TokenStream) -> TokenStream {}
             "#]],
         );
     }
+
+    #[test]
+    fn assoc_items_trait_def() {
+        check(
+            r#"
+trait Trait {
+    const CONST$0: usize;
+}
+
+impl Trait for () {
+    const CONST: usize = 0;
+}
+
+impl Trait for ((),) {
+    const CONST: usize = 0;
+}
+
+fn f<T: Trait>() {
+    let _ = <()>::CONST;
+
+    let _ = T::CONST;
+}
+"#,
+            expect![[r#"
+                CONST Const FileId(0) 18..37 24..29
+
+                FileId(0) 71..76
+                FileId(0) 125..130
+                FileId(0) 183..188
+                FileId(0) 206..211
+            "#]],
+        );
+        check(
+            r#"
+trait Trait {
+    type TypeAlias$0;
+}
+
+impl Trait for () {
+    type TypeAlias = ();
+}
+
+impl Trait for ((),) {
+    type TypeAlias = ();
+}
+
+fn f<T: Trait>() {
+    let _: <() as Trait>::TypeAlias;
+
+    let _: T::TypeAlias;
+}
+"#,
+            expect![[r#"
+                TypeAlias TypeAlias FileId(0) 18..33 23..32
+
+                FileId(0) 66..75
+                FileId(0) 117..126
+                FileId(0) 181..190
+                FileId(0) 207..216
+            "#]],
+        );
+        check(
+            r#"
+trait Trait {
+    fn function$0() {}
+}
+
+impl Trait for () {
+    fn function() {}
+}
+
+impl Trait for ((),) {
+    fn function() {}
+}
+
+fn f<T: Trait>() {
+    let _ = <()>::function;
+
+    let _ = T::function;
+}
+"#,
+            expect![[r#"
+                function Function FileId(0) 18..34 21..29
+
+                FileId(0) 65..73
+                FileId(0) 112..120
+                FileId(0) 166..174
+                FileId(0) 192..200
+            "#]],
+        );
+    }
+
+    #[test]
+    fn assoc_items_trait_impl_def() {
+        check(
+            r#"
+trait Trait {
+    const CONST: usize;
+}
+
+impl Trait for () {
+    const CONST$0: usize = 0;
+}
+
+impl Trait for ((),) {
+    const CONST: usize = 0;
+}
+
+fn f<T: Trait>() {
+    let _ = <()>::CONST;
+
+    let _ = T::CONST;
+}
+"#,
+            expect![[r#"
+                CONST Const FileId(0) 65..88 71..76
+
+                FileId(0) 183..188
+            "#]],
+        );
+        check(
+            r#"
+trait Trait {
+    type TypeAlias;
+}
+
+impl Trait for () {
+    type TypeAlias$0 = ();
+}
+
+impl Trait for ((),) {
+    type TypeAlias = ();
+}
+
+fn f<T: Trait>() {
+    let _: <() as Trait>::TypeAlias;
+
+    let _: T::TypeAlias;
+}
+"#,
+            expect![[r#"
+                TypeAlias TypeAlias FileId(0) 61..81 66..75
+
+                FileId(0) 23..32
+                FileId(0) 117..126
+                FileId(0) 181..190
+                FileId(0) 207..216
+            "#]],
+        );
+        check(
+            r#"
+trait Trait {
+    fn function() {}
+}
+
+impl Trait for () {
+    fn function$0() {}
+}
+
+impl Trait for ((),) {
+    fn function() {}
+}
+
+fn f<T: Trait>() {
+    let _ = <()>::function;
+
+    let _ = T::function;
+}
+"#,
+            expect![[r#"
+                function Function FileId(0) 62..78 65..73
+
+                FileId(0) 166..174
+            "#]],
+        );
+    }
+
+    #[test]
+    fn assoc_items_ref() {
+        check(
+            r#"
+trait Trait {
+    const CONST: usize;
+}
+
+impl Trait for () {
+    const CONST: usize = 0;
+}
+
+impl Trait for ((),) {
+    const CONST: usize = 0;
+}
+
+fn f<T: Trait>() {
+    let _ = <()>::CONST$0;
+
+    let _ = T::CONST;
+}
+"#,
+            expect![[r#"
+                CONST Const FileId(0) 65..88 71..76
+
+                FileId(0) 183..188
+            "#]],
+        );
+        check(
+            r#"
+trait Trait {
+    type TypeAlias;
+}
+
+impl Trait for () {
+    type TypeAlias = ();
+}
+
+impl Trait for ((),) {
+    type TypeAlias = ();
+}
+
+fn f<T: Trait>() {
+    let _: <() as Trait>::TypeAlias$0;
+
+    let _: T::TypeAlias;
+}
+"#,
+            expect![[r#"
+                TypeAlias TypeAlias FileId(0) 18..33 23..32
+
+                FileId(0) 66..75
+                FileId(0) 117..126
+                FileId(0) 181..190
+                FileId(0) 207..216
+            "#]],
+        );
+        check(
+            r#"
+trait Trait {
+    fn function() {}
+}
+
+impl Trait for () {
+    fn function() {}
+}
+
+impl Trait for ((),) {
+    fn function() {}
+}
+
+fn f<T: Trait>() {
+    let _ = <()>::function$0;
+
+    let _ = T::function;
+}
+"#,
+            expect![[r#"
+                function Function FileId(0) 62..78 65..73
+
+                FileId(0) 166..174
+            "#]],
+        );
+    }
+
+    #[test]
+    fn name_clashes() {
+        check(
+            r#"
+trait Foo {
+    fn method$0(&self) -> u8;
+}
+
+struct Bar {
+    method: u8,
+}
+
+impl Foo for Bar {
+    fn method(&self) -> u8 {
+        self.method
+    }
+}
+fn method() {}
+"#,
+            expect![[r#"
+                method Function FileId(0) 16..39 19..25
+
+                FileId(0) 101..107
+            "#]],
+        );
+        check(
+            r#"
+trait Foo {
+    fn method(&self) -> u8;
+}
+
+struct Bar {
+    method$0: u8,
+}
+
+impl Foo for Bar {
+    fn method(&self) -> u8 {
+        self.method
+    }
+}
+fn method() {}
+"#,
+            expect![[r#"
+                method Field FileId(0) 60..70 60..66
+
+                FileId(0) 136..142 Read
+            "#]],
+        );
+        check(
+            r#"
+trait Foo {
+    fn method(&self) -> u8;
+}
+
+struct Bar {
+    method: u8,
+}
+
+impl Foo for Bar {
+    fn method$0(&self) -> u8 {
+        self.method
+    }
+}
+fn method() {}
+"#,
+            expect![[r#"
+                method Function FileId(0) 98..148 101..107
+
+                (no references)
+            "#]],
+        );
+        check(
+            r#"
+trait Foo {
+    fn method(&self) -> u8;
+}
+
+struct Bar {
+    method: u8,
+}
+
+impl Foo for Bar {
+    fn method(&self) -> u8 {
+        self.method$0
+    }
+}
+fn method() {}
+"#,
+            expect![[r#"
+                method Field FileId(0) 60..70 60..66
+
+                FileId(0) 136..142 Read
+            "#]],
+        );
+        check(
+            r#"
+trait Foo {
+    fn method(&self) -> u8;
+}
+
+struct Bar {
+    method: u8,
+}
+
+impl Foo for Bar {
+    fn method(&self) -> u8 {
+        self.method
+    }
+}
+fn method$0() {}
+"#,
+            expect![[r#"
+                method Function FileId(0) 151..165 154..160
+
+                (no references)
+            "#]],
+        );
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/ide/src/rename.rs b/src/tools/rust-analyzer/crates/ide/src/rename.rs
index 15bdf14fb9b..8e89160ef5e 100644
--- a/src/tools/rust-analyzer/crates/ide/src/rename.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/rename.rs
@@ -13,7 +13,7 @@ use ide_db::{
 };
 use itertools::Itertools;
 use stdx::{always, never};
-use syntax::{ast, AstNode, SyntaxNode};
+use syntax::{ast, utils::is_raw_identifier, AstNode, SmolStr, SyntaxNode, TextRange, TextSize};
 
 use text_edit::TextEdit;
 
@@ -48,7 +48,13 @@ pub(crate) fn prepare_rename(
                 frange.range.contains_inclusive(position.offset)
                     && frange.file_id == position.file_id
             );
-            Ok(frange.range)
+
+            Ok(match name_like {
+                ast::NameLike::Lifetime(_) => {
+                    TextRange::new(frange.range.start() + TextSize::from(1), frange.range.end())
+                }
+                _ => frange.range,
+            })
         })
         .reduce(|acc, cur| match (acc, cur) {
             // ensure all ranges are the same
@@ -116,7 +122,11 @@ pub(crate) fn will_rename_file(
     let sema = Semantics::new(db);
     let module = sema.to_module_def(file_id)?;
     let def = Definition::Module(module);
-    let mut change = def.rename(&sema, new_name_stem).ok()?;
+    let mut change = if is_raw_identifier(new_name_stem) {
+        def.rename(&sema, &SmolStr::from_iter(["r#", new_name_stem])).ok()?
+    } else {
+        def.rename(&sema, new_name_stem).ok()?
+    };
     change.file_system_edits.clear();
     Some(change)
 }
@@ -407,7 +417,7 @@ mod tests {
     #[test]
     fn test_prepare_rename_namelikes() {
         check_prepare(r"fn name$0<'lifetime>() {}", expect![[r#"3..7: name"#]]);
-        check_prepare(r"fn name<'lifetime$0>() {}", expect![[r#"8..17: 'lifetime"#]]);
+        check_prepare(r"fn name<'lifetime$0>() {}", expect![[r#"9..17: lifetime"#]]);
         check_prepare(r"fn name<'lifetime>() { name$0(); }", expect![[r#"23..27: name"#]]);
     }
 
@@ -521,15 +531,19 @@ impl Foo {
 
     #[test]
     fn test_rename_to_invalid_identifier_lifetime2() {
-        cov_mark::check!(rename_not_a_lifetime_ident_ref);
         check(
-            "foo",
+            "_",
             r#"fn main<'a>(_: &'a$0 ()) {}"#,
-            "error: Invalid name `foo`: not a lifetime identifier",
+            r#"error: Invalid name `_`: not a lifetime identifier"#,
         );
     }
 
     #[test]
+    fn test_rename_accepts_lifetime_without_apostrophe() {
+        check("foo", r#"fn main<'a>(_: &'a$0 ()) {}"#, r#"fn main<'foo>(_: &'foo ()) {}"#);
+    }
+
+    #[test]
     fn test_rename_to_underscore_invalid() {
         cov_mark::check!(rename_underscore_multiple);
         check(
@@ -549,6 +563,15 @@ impl Foo {
     }
 
     #[test]
+    fn test_rename_mod_invalid_raw_ident() {
+        check(
+            "r#self",
+            r#"mod foo$0 {}"#,
+            "error: Invalid name: `self` cannot be a raw identifier",
+        );
+    }
+
+    #[test]
     fn test_rename_for_local() {
         check(
             "k",
@@ -1277,6 +1300,143 @@ mod bar$0;
     }
 
     #[test]
+    fn test_rename_mod_to_raw_ident() {
+        check_expect(
+            "r#fn",
+            r#"
+//- /lib.rs
+mod foo$0;
+
+fn main() { foo::bar::baz(); }
+
+//- /foo.rs
+pub mod bar;
+
+//- /foo/bar.rs
+pub fn baz() {}
+"#,
+            expect![[r#"
+                SourceChange {
+                    source_file_edits: {
+                        FileId(
+                            0,
+                        ): TextEdit {
+                            indels: [
+                                Indel {
+                                    insert: "r#fn",
+                                    delete: 4..7,
+                                },
+                                Indel {
+                                    insert: "r#fn",
+                                    delete: 22..25,
+                                },
+                            ],
+                        },
+                    },
+                    file_system_edits: [
+                        MoveFile {
+                            src: FileId(
+                                1,
+                            ),
+                            dst: AnchoredPathBuf {
+                                anchor: FileId(
+                                    1,
+                                ),
+                                path: "fn.rs",
+                            },
+                        },
+                        MoveDir {
+                            src: AnchoredPathBuf {
+                                anchor: FileId(
+                                    1,
+                                ),
+                                path: "foo",
+                            },
+                            src_id: FileId(
+                                1,
+                            ),
+                            dst: AnchoredPathBuf {
+                                anchor: FileId(
+                                    1,
+                                ),
+                                path: "fn",
+                            },
+                        },
+                    ],
+                    is_snippet: false,
+                }
+            "#]],
+        );
+    }
+
+    #[test]
+    fn test_rename_mod_from_raw_ident() {
+        // FIXME: `r#fn` in path expression is not renamed.
+        check_expect(
+            "foo",
+            r#"
+//- /lib.rs
+mod r#fn$0;
+
+fn main() { r#fn::bar::baz(); }
+
+//- /fn.rs
+pub mod bar;
+
+//- /fn/bar.rs
+pub fn baz() {}
+"#,
+            expect![[r#"
+                SourceChange {
+                    source_file_edits: {
+                        FileId(
+                            0,
+                        ): TextEdit {
+                            indels: [
+                                Indel {
+                                    insert: "foo",
+                                    delete: 4..8,
+                                },
+                            ],
+                        },
+                    },
+                    file_system_edits: [
+                        MoveFile {
+                            src: FileId(
+                                1,
+                            ),
+                            dst: AnchoredPathBuf {
+                                anchor: FileId(
+                                    1,
+                                ),
+                                path: "foo.rs",
+                            },
+                        },
+                        MoveDir {
+                            src: AnchoredPathBuf {
+                                anchor: FileId(
+                                    1,
+                                ),
+                                path: "fn",
+                            },
+                            src_id: FileId(
+                                1,
+                            ),
+                            dst: AnchoredPathBuf {
+                                anchor: FileId(
+                                    1,
+                                ),
+                                path: "foo",
+                            },
+                        },
+                    ],
+                    is_snippet: false,
+                }
+            "#]],
+        );
+    }
+
+    #[test]
     fn test_enum_variant_from_module_1() {
         cov_mark::check!(rename_non_local);
         check(
@@ -1832,6 +1992,31 @@ fn foo<'a>() -> &'a () {
     }
 
     #[test]
+    fn test_rename_label_new_name_without_apostrophe() {
+        check(
+            "foo",
+            r#"
+fn main() {
+    'outer$0: loop {
+        'inner: loop {
+            break 'outer;
+        }
+    }
+}
+        "#,
+            r#"
+fn main() {
+    'foo: loop {
+        'inner: loop {
+            break 'foo;
+        }
+    }
+}
+        "#,
+        );
+    }
+
+    #[test]
     fn test_self_to_self() {
         cov_mark::check!(rename_self_to_self);
         check(
diff --git a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs
index f807ba30f40..a666562f101 100644
--- a/src/tools/rust-analyzer/crates/ide/src/signature_help.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/signature_help.rs
@@ -4,13 +4,15 @@
 use std::collections::BTreeSet;
 
 use either::Either;
-use hir::{AssocItem, GenericParam, HasAttrs, HirDisplay, Semantics, Trait};
-use ide_db::{active_parameter::callable_for_node, base_db::FilePosition};
+use hir::{
+    AssocItem, GenericParam, HasAttrs, HirDisplay, ModuleDef, PathResolution, Semantics, Trait,
+};
+use ide_db::{active_parameter::callable_for_node, base_db::FilePosition, FxIndexMap};
 use stdx::format_to;
 use syntax::{
     algo,
     ast::{self, HasArgList},
-    match_ast, AstNode, Direction, SyntaxToken, TextRange, TextSize,
+    match_ast, AstNode, Direction, SyntaxKind, SyntaxToken, TextRange, TextSize,
 };
 
 use crate::RootDatabase;
@@ -37,14 +39,18 @@ impl SignatureHelp {
     }
 
     fn push_call_param(&mut self, param: &str) {
-        self.push_param('(', param);
+        self.push_param("(", param);
     }
 
     fn push_generic_param(&mut self, param: &str) {
-        self.push_param('<', param);
+        self.push_param("<", param);
+    }
+
+    fn push_record_field(&mut self, param: &str) {
+        self.push_param("{ ", param);
     }
 
-    fn push_param(&mut self, opening_delim: char, param: &str) {
+    fn push_param(&mut self, opening_delim: &str, param: &str) {
         if !self.signature.ends_with(opening_delim) {
             self.signature.push_str(", ");
         }
@@ -85,6 +91,13 @@ pub(crate) fn signature_help(db: &RootDatabase, position: FilePosition) -> Optio
                     }
                     return signature_help_for_generics(&sema, garg_list, token);
                 },
+                ast::RecordExpr(record) => {
+                    let cursor_outside = record.record_expr_field_list().and_then(|list| list.r_curly_token()).as_ref() == Some(&token);
+                    if cursor_outside {
+                        continue;
+                    }
+                    return signature_help_for_record_lit(&sema, record, token);
+                },
                 _ => (),
             }
         }
@@ -92,7 +105,9 @@ pub(crate) fn signature_help(db: &RootDatabase, position: FilePosition) -> Optio
         // Stop at multi-line expressions, since the signature of the outer call is not very
         // helpful inside them.
         if let Some(expr) = ast::Expr::cast(node.clone()) {
-            if expr.syntax().text().contains_char('\n') {
+            if expr.syntax().text().contains_char('\n')
+                && expr.syntax().kind() != SyntaxKind::RECORD_EXPR
+            {
                 return None;
             }
         }
@@ -368,6 +383,86 @@ fn add_assoc_type_bindings(
     }
 }
 
+fn signature_help_for_record_lit(
+    sema: &Semantics<'_, RootDatabase>,
+    record: ast::RecordExpr,
+    token: SyntaxToken,
+) -> Option<SignatureHelp> {
+    let arg_list = record
+        .syntax()
+        .ancestors()
+        .filter_map(ast::RecordExpr::cast)
+        .find(|list| list.syntax().text_range().contains(token.text_range().start()))?;
+
+    let active_parameter = arg_list
+        .record_expr_field_list()?
+        .fields()
+        .take_while(|arg| arg.syntax().text_range().end() <= token.text_range().start())
+        .count();
+
+    let mut res = SignatureHelp {
+        doc: None,
+        signature: String::new(),
+        parameters: vec![],
+        active_parameter: Some(active_parameter),
+    };
+
+    let fields;
+
+    let db = sema.db;
+    let path_res = sema.resolve_path(&record.path()?)?;
+    if let PathResolution::Def(ModuleDef::Variant(variant)) = path_res {
+        fields = variant.fields(db);
+        let en = variant.parent_enum(db);
+
+        res.doc = en.docs(db).map(|it| it.into());
+        format_to!(res.signature, "enum {}::{} {{ ", en.name(db), variant.name(db));
+    } else {
+        let adt = match path_res {
+            PathResolution::SelfType(imp) => imp.self_ty(db).as_adt()?,
+            PathResolution::Def(ModuleDef::Adt(adt)) => adt,
+            _ => return None,
+        };
+
+        match adt {
+            hir::Adt::Struct(it) => {
+                fields = it.fields(db);
+                res.doc = it.docs(db).map(|it| it.into());
+                format_to!(res.signature, "struct {} {{ ", it.name(db));
+            }
+            hir::Adt::Union(it) => {
+                fields = it.fields(db);
+                res.doc = it.docs(db).map(|it| it.into());
+                format_to!(res.signature, "union {} {{ ", it.name(db));
+            }
+            _ => return None,
+        }
+    }
+
+    let mut fields =
+        fields.into_iter().map(|field| (field.name(db), Some(field))).collect::<FxIndexMap<_, _>>();
+    let mut buf = String::new();
+    for field in record.record_expr_field_list()?.fields() {
+        let Some((field, _, ty)) = sema.resolve_record_field(&field) else { continue };
+        let name = field.name(db);
+        format_to!(buf, "{name}: {}", ty.display_truncated(db, Some(20)));
+        res.push_record_field(&buf);
+        buf.clear();
+
+        if let Some(field) = fields.get_mut(&name) {
+            *field = None;
+        }
+    }
+    for (name, field) in fields {
+        let Some(field) = field else { continue };
+        format_to!(buf, "{name}: {}", field.ty(db).display_truncated(db, Some(20)));
+        res.push_record_field(&buf);
+        buf.clear();
+    }
+    res.signature.push_str(" }");
+    Some(res)
+}
+
 #[cfg(test)]
 mod tests {
     use std::iter;
@@ -1405,4 +1500,98 @@ fn take<C, Error>(
             "#]],
         );
     }
+
+    #[test]
+    fn record_literal() {
+        check(
+            r#"
+struct Strukt<T, U = ()> {
+    t: T,
+    u: U,
+    unit: (),
+}
+fn f() {
+    Strukt {
+        u: 0,
+        $0
+    }
+}
+"#,
+            expect![[r#"
+                struct Strukt { u: i32, t: T, unit: () }
+                                ------  ^^^^  --------
+            "#]],
+        );
+    }
+
+    #[test]
+    fn record_literal_nonexistent_field() {
+        check(
+            r#"
+struct Strukt {
+    a: u8,
+}
+fn f() {
+    Strukt {
+        b: 8,
+        $0
+    }
+}
+"#,
+            expect![[r#"
+                struct Strukt { a: u8 }
+                                -----
+            "#]],
+        );
+    }
+
+    #[test]
+    fn tuple_variant_record_literal() {
+        check(
+            r#"
+enum Opt {
+    Some(u8),
+}
+fn f() {
+    Opt::Some {$0}
+}
+"#,
+            expect![[r#"
+                enum Opt::Some { 0: u8 }
+                                 ^^^^^
+            "#]],
+        );
+        check(
+            r#"
+enum Opt {
+    Some(u8),
+}
+fn f() {
+    Opt::Some {0:0,$0}
+}
+"#,
+            expect![[r#"
+                enum Opt::Some { 0: u8 }
+                                 -----
+            "#]],
+        );
+    }
+
+    #[test]
+    fn record_literal_self() {
+        check(
+            r#"
+struct S { t: u8 }
+impl S {
+    fn new() -> Self {
+        Self { $0 }
+    }
+}
+        "#,
+            expect![[r#"
+                struct S { t: u8 }
+                           ^^^^^
+            "#]],
+        );
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs
index a6b30ba1396..3f7f6885f61 100644
--- a/src/tools/rust-analyzer/crates/ide/src/static_index.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs
@@ -16,8 +16,7 @@ use crate::{
     inlay_hints::AdjustmentHintsMode,
     moniker::{def_to_moniker, MonikerResult},
     parent_module::crates_for,
-    Analysis, Fold, HoverConfig, HoverDocFormat, HoverResult, InlayHint, InlayHintsConfig,
-    TryToNav,
+    Analysis, Fold, HoverConfig, HoverResult, InlayHint, InlayHintsConfig, TryToNav,
 };
 
 /// A static representation of fully analyzed source code.
@@ -107,7 +106,6 @@ impl StaticIndex<'_> {
             .analysis
             .inlay_hints(
                 &InlayHintsConfig {
-                    location_links: true,
                     render_colons: true,
                     discriminant_hints: crate::DiscriminantHints::Fieldless,
                     type_hints: true,
@@ -138,8 +136,9 @@ impl StaticIndex<'_> {
         });
         let hover_config = HoverConfig {
             links_in_hover: true,
-            documentation: Some(HoverDocFormat::Markdown),
+            documentation: true,
             keywords: true,
+            format: crate::HoverDocFormat::Markdown,
         };
         let tokens = tokens.filter(|token| {
             matches!(
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
index 50371d620eb..454a250f3de 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
@@ -413,11 +413,10 @@ fn traverse(
                 let string = ast::String::cast(token);
                 let string_to_highlight = ast::String::cast(descended_token.clone());
                 if let Some((string, expanded_string)) = string.zip(string_to_highlight) {
-                    if string.is_raw() {
-                        if inject::ra_fixture(hl, sema, config, &string, &expanded_string).is_some()
-                        {
-                            continue;
-                        }
+                    if string.is_raw()
+                        && inject::ra_fixture(hl, sema, config, &string, &expanded_string).is_some()
+                    {
+                        continue;
                     }
                     highlight_format_string(hl, &string, &expanded_string, range);
                     highlight_escape_string(hl, &string, range.start());
diff --git a/src/tools/rust-analyzer/crates/ide/src/typing.rs b/src/tools/rust-analyzer/crates/ide/src/typing.rs
index eba5a485636..c7e403f6b1a 100644
--- a/src/tools/rust-analyzer/crates/ide/src/typing.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/typing.rs
@@ -205,10 +205,8 @@ fn on_eq_typed(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
             if expr_stmt.semicolon_token().is_some() {
                 return None;
             }
-        } else {
-            if !ast::StmtList::can_cast(binop.syntax().parent()?.kind()) {
-                return None;
-            }
+        } else if !ast::StmtList::can_cast(binop.syntax().parent()?.kind()) {
+            return None;
         }
 
         let expr = binop.rhs()?;
@@ -255,6 +253,10 @@ fn on_eq_typed(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
         if file.syntax().text().slice(offset..expr_range.start()).contains_char('\n') {
             return None;
         }
+        // Good indicator that we will insert into a bad spot, so bail out.
+        if expr.syntax().descendants().any(|it| it.kind() == SyntaxKind::ERROR) {
+            return None;
+        }
         let offset = let_stmt.syntax().text_range().end();
         Some(TextEdit::insert(offset, ";".to_string()))
     }
@@ -409,15 +411,14 @@ mod tests {
 
     #[test]
     fn test_semi_after_let() {
-        //     do_check(r"
-        // fn foo() {
-        //     let foo =$0
-        // }
-        // ", r"
-        // fn foo() {
-        //     let foo =;
-        // }
-        // ");
+        type_char_noop(
+            '=',
+            r"
+fn foo() {
+    let foo =$0
+}
+",
+        );
         type_char(
             '=',
             r#"
@@ -431,17 +432,25 @@ fn foo() {
 }
 "#,
         );
-        //     do_check(r"
-        // fn foo() {
-        //     let foo =$0
-        //     let bar = 1;
-        // }
-        // ", r"
-        // fn foo() {
-        //     let foo =;
-        //     let bar = 1;
-        // }
-        // ");
+        type_char_noop(
+            '=',
+            r#"
+fn foo() {
+    let difference $0(counts: &HashMap<(char, char), u64>, last: char) -> u64 {
+        // ...
+    }
+}
+"#,
+        );
+        type_char_noop(
+            '=',
+            r"
+fn foo() {
+    let foo =$0
+    let bar = 1;
+}
+",
+        );
     }
 
     #[test]
diff --git a/src/tools/rust-analyzer/crates/intern/Cargo.toml b/src/tools/rust-analyzer/crates/intern/Cargo.toml
new file mode 100644
index 00000000000..c73c368a14e
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/intern/Cargo.toml
@@ -0,0 +1,20 @@
+[package]
+name = "intern"
+version = "0.0.0"
+description = "TBD"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
+
+[lib]
+doctest = false
+
+
+[dependencies]
+# We need to freeze the version of the crate, as the raw-api feature is considered unstable
+dashmap = { version = "=5.4.0", features = ["raw-api"] }
+hashbrown = { version = "0.12.1", default-features = false }
+once_cell = "1.17.0"
+rustc-hash = "1.1.0"
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/intern.rs b/src/tools/rust-analyzer/crates/intern/src/lib.rs
index f08521a3403..fb2903696b3 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/intern.rs
+++ b/src/tools/rust-analyzer/crates/intern/src/lib.rs
@@ -14,8 +14,6 @@ use hashbrown::HashMap;
 use once_cell::sync::OnceCell;
 use rustc_hash::FxHasher;
 
-use crate::generics::GenericParams;
-
 type InternMap<T> = DashMap<Arc<T>, (), BuildHasherDefault<FxHasher>>;
 type Guard<T> = dashmap::RwLockWriteGuard<
     'static,
@@ -204,9 +202,9 @@ pub trait Internable: Hash + Eq + 'static {
 #[doc(hidden)]
 macro_rules! _impl_internable {
     ( $($t:path),+ $(,)? ) => { $(
-        impl Internable for $t {
-            fn storage() -> &'static InternStorage<Self> {
-                static STORAGE: InternStorage<$t> = InternStorage::new();
+        impl $crate::Internable for $t {
+            fn storage() -> &'static $crate::InternStorage<Self> {
+                static STORAGE: $crate::InternStorage<$t> = $crate::InternStorage::new();
                 &STORAGE
             }
         }
@@ -215,13 +213,4 @@ macro_rules! _impl_internable {
 
 pub use crate::_impl_internable as impl_internable;
 
-impl_internable!(
-    crate::type_ref::TypeRef,
-    crate::type_ref::TraitRef,
-    crate::type_ref::TypeBound,
-    crate::path::ModPath,
-    crate::path::GenericArgs,
-    crate::attr::AttrInput,
-    GenericParams,
-    str,
-);
+impl_internable!(str,);
diff --git a/src/tools/rust-analyzer/crates/limit/Cargo.toml b/src/tools/rust-analyzer/crates/limit/Cargo.toml
index 3536f73da73..c0888690992 100644
--- a/src/tools/rust-analyzer/crates/limit/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/limit/Cargo.toml
@@ -2,9 +2,11 @@
 name = "limit"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [features]
 tracking = []
diff --git a/src/tools/rust-analyzer/crates/mbe/Cargo.toml b/src/tools/rust-analyzer/crates/mbe/Cargo.toml
index bce2fc9a70e..280ffc219ba 100644
--- a/src/tools/rust-analyzer/crates/mbe/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/mbe/Cargo.toml
@@ -2,9 +2,11 @@
 name = "mbe"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
@@ -15,10 +17,11 @@ rustc-hash = "1.1.0"
 smallvec = "1.10.0"
 tracing = "0.1.35"
 
-syntax = { path = "../syntax", version = "0.0.0" }
-parser = { path = "../parser", version = "0.0.0" }
-tt = { path = "../tt", version = "0.0.0" }
-stdx = { path = "../stdx", version = "0.0.0" }
+# local deps
+syntax.workspace = true
+parser.workspace = true
+tt.workspace = true
+stdx.workspace = true
 
 [dev-dependencies]
-test-utils = { path = "../test-utils" }
+test-utils.workspace = true
diff --git a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
index 4b750025018..894355fcbc9 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
@@ -9,7 +9,7 @@ use test_utils::{bench, bench_fixture, skip_slow_tests};
 
 use crate::{
     parser::{MetaVarKind, Op, RepeatKind, Separator},
-    syntax_node_to_token_tree, DeclarativeMacro,
+    syntax_node_to_token_tree, tt, DeclarativeMacro,
 };
 
 #[test]
@@ -91,7 +91,14 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
                 // So we just skip any error cases and try again
                 let mut try_cnt = 0;
                 loop {
-                    let mut subtree = tt::Subtree::default();
+                    let mut subtree = tt::Subtree {
+                        delimiter: tt::Delimiter {
+                            open: tt::TokenId::UNSPECIFIED,
+                            close: tt::TokenId::UNSPECIFIED,
+                            kind: tt::DelimiterKind::Invisible,
+                        },
+                        token_trees: vec![],
+                    };
                     for op in rule.lhs.iter() {
                         collect_from_op(op, &mut subtree, &mut seed);
                     }
@@ -145,7 +152,7 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
             Op::Ident(it) => parent.token_trees.push(tt::Leaf::from(it.clone()).into()),
             Op::Punct(puncts) => {
                 for punct in puncts {
-                    parent.token_trees.push(tt::Leaf::from(punct.clone()).into());
+                    parent.token_trees.push(tt::Leaf::from(*punct).into());
                 }
             }
             Op::Repeat { tokens, kind, separator } => {
@@ -196,12 +203,15 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
             *seed
         }
         fn make_ident(ident: &str) -> tt::TokenTree {
-            tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), text: SmolStr::new(ident) })
-                .into()
+            tt::Leaf::Ident(tt::Ident {
+                span: tt::TokenId::unspecified(),
+                text: SmolStr::new(ident),
+            })
+            .into()
         }
         fn make_punct(char: char) -> tt::TokenTree {
             tt::Leaf::Punct(tt::Punct {
-                id: tt::TokenId::unspecified(),
+                span: tt::TokenId::unspecified(),
                 char,
                 spacing: tt::Spacing::Alone,
             })
@@ -209,7 +219,7 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
         }
         fn make_literal(lit: &str) -> tt::TokenTree {
             tt::Leaf::Literal(tt::Literal {
-                id: tt::TokenId::unspecified(),
+                span: tt::TokenId::unspecified(),
                 text: SmolStr::new(lit),
             })
             .into()
@@ -219,7 +229,11 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
             token_trees: Option<Vec<tt::TokenTree>>,
         ) -> tt::TokenTree {
             tt::Subtree {
-                delimiter: Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind }),
+                delimiter: tt::Delimiter {
+                    open: tt::TokenId::unspecified(),
+                    close: tt::TokenId::unspecified(),
+                    kind,
+                },
                 token_trees: token_trees.unwrap_or_default(),
             }
             .into()
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander.rs b/src/tools/rust-analyzer/crates/mbe/src/expander.rs
index 100ec6bfb93..7537dc32261 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/expander.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander.rs
@@ -8,7 +8,7 @@ mod transcriber;
 use rustc_hash::FxHashMap;
 use syntax::SmolStr;
 
-use crate::{parser::MetaVarKind, ExpandError, ExpandResult};
+use crate::{parser::MetaVarKind, tt, ExpandError, ExpandResult};
 
 pub(crate) fn expand_rules(
     rules: &[crate::Rule],
@@ -45,7 +45,10 @@ pub(crate) fn expand_rules(
             transcriber::transcribe(&rule.rhs, &match_.bindings);
         ExpandResult { value, err: match_.err.or(transcribe_err) }
     } else {
-        ExpandResult::only_err(ExpandError::NoMatchingRule)
+        ExpandResult::with_err(
+            tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] },
+            ExpandError::NoMatchingRule,
+        )
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
index 88eae136f73..f4ea9e5c816 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs
@@ -67,6 +67,7 @@ use syntax::SmolStr;
 use crate::{
     expander::{Binding, Bindings, ExpandResult, Fragment},
     parser::{MetaVarKind, Op, RepeatKind, Separator},
+    tt,
     tt_iter::TtIter,
     ExpandError, MetaTemplate, ValueResult,
 };
@@ -75,7 +76,8 @@ impl Bindings {
     fn push_optional(&mut self, name: &SmolStr) {
         // FIXME: Do we have a better way to represent an empty token ?
         // Insert an empty subtree for empty token
-        let tt = tt::Subtree::default().into();
+        let tt =
+            tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] }.into();
         self.inner.insert(name.clone(), Binding::Fragment(Fragment::Tokens(tt)));
     }
 
@@ -462,9 +464,9 @@ fn match_loop_inner<'t>(
             }
             OpDelimited::Op(Op::Subtree { tokens, delimiter }) => {
                 if let Ok(subtree) = src.clone().expect_subtree() {
-                    if subtree.delimiter_kind() == delimiter.map(|it| it.kind) {
+                    if subtree.delimiter.kind == delimiter.kind {
                         item.stack.push(item.dot);
-                        item.dot = tokens.iter_delimited(delimiter.as_ref());
+                        item.dot = tokens.iter_delimited(Some(delimiter));
                         cur_items.push(item);
                     }
                 }
@@ -663,8 +665,8 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree) -> Match {
             }
             res.add_err(ExpandError::LeftoverTokens);
 
-            if let Some(error_reover_item) = error_recover_item {
-                res.bindings = bindings_builder.build(&error_reover_item);
+            if let Some(error_recover_item) = error_recover_item {
+                res.bindings = bindings_builder.build(&error_recover_item);
             }
             return res;
         }
@@ -782,7 +784,7 @@ fn match_meta_var(kind: MetaVarKind, input: &mut TtIter<'_>) -> ExpandResult<Opt
                             match neg {
                                 None => lit.into(),
                                 Some(neg) => tt::TokenTree::Subtree(tt::Subtree {
-                                    delimiter: None,
+                                    delimiter: tt::Delimiter::unspecified(),
                                     token_trees: vec![neg, lit.into()],
                                 }),
                             }
@@ -810,7 +812,11 @@ fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate)
 }
 impl MetaTemplate {
     fn iter_delimited<'a>(&'a self, delimited: Option<&'a tt::Delimiter>) -> OpDelimitedIter<'a> {
-        OpDelimitedIter { inner: &self.0, idx: 0, delimited }
+        OpDelimitedIter {
+            inner: &self.0,
+            idx: 0,
+            delimited: delimited.unwrap_or(&tt::Delimiter::UNSPECIFIED),
+        }
     }
 }
 
@@ -824,20 +830,21 @@ enum OpDelimited<'a> {
 #[derive(Debug, Clone, Copy)]
 struct OpDelimitedIter<'a> {
     inner: &'a [Op],
-    delimited: Option<&'a tt::Delimiter>,
+    delimited: &'a tt::Delimiter,
     idx: usize,
 }
 
 impl<'a> OpDelimitedIter<'a> {
     fn is_eof(&self) -> bool {
-        let len = self.inner.len() + if self.delimited.is_some() { 2 } else { 0 };
+        let len = self.inner.len()
+            + if self.delimited.kind != tt::DelimiterKind::Invisible { 2 } else { 0 };
         self.idx >= len
     }
 
     fn peek(&self) -> Option<OpDelimited<'a>> {
-        match self.delimited {
-            None => self.inner.get(self.idx).map(OpDelimited::Op),
-            Some(_) => match self.idx {
+        match self.delimited.kind {
+            tt::DelimiterKind::Invisible => self.inner.get(self.idx).map(OpDelimited::Op),
+            _ => match self.idx {
                 0 => Some(OpDelimited::Open),
                 i if i == self.inner.len() + 1 => Some(OpDelimited::Close),
                 i => self.inner.get(i - 1).map(OpDelimited::Op),
@@ -860,7 +867,8 @@ impl<'a> Iterator for OpDelimitedIter<'a> {
     }
 
     fn size_hint(&self) -> (usize, Option<usize>) {
-        let len = self.inner.len() + if self.delimited.is_some() { 2 } else { 0 };
+        let len = self.inner.len()
+            + if self.delimited.kind != tt::DelimiterKind::Invisible { 2 } else { 0 };
         let remain = len.saturating_sub(self.idx);
         (remain, Some(remain))
     }
@@ -904,7 +912,10 @@ impl<'a> TtIter<'a> {
             } else {
                 let puncts = self.expect_glued_punct()?;
                 let token_trees = puncts.into_iter().map(|p| tt::Leaf::Punct(p).into()).collect();
-                Ok(tt::TokenTree::Subtree(tt::Subtree { delimiter: None, token_trees }))
+                Ok(tt::TokenTree::Subtree(tt::Subtree {
+                    delimiter: tt::Delimiter::unspecified(),
+                    token_trees,
+                }))
             }
         } else {
             self.next().ok_or(()).cloned()
@@ -919,7 +930,7 @@ impl<'a> TtIter<'a> {
         let ident = self.expect_ident_or_underscore()?;
 
         Ok(tt::Subtree {
-            delimiter: None,
+            delimiter: tt::Delimiter::unspecified(),
             token_trees: vec![
                 tt::Leaf::Punct(*punct).into(),
                 tt::Leaf::Ident(ident.clone()).into(),
diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
index db0d327bf40..dffb40d4bc8 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/expander/transcriber.rs
@@ -2,11 +2,11 @@
 //! `$ident => foo`, interpolates variables in the template, to get `fn foo() {}`
 
 use syntax::SmolStr;
-use tt::{Delimiter, Subtree};
 
 use crate::{
     expander::{Binding, Bindings, Fragment},
     parser::{MetaVarKind, Op, RepeatKind, Separator},
+    tt::{self, Delimiter},
     ExpandError, ExpandResult, MetaTemplate,
 };
 
@@ -44,22 +44,23 @@ impl Bindings {
             Binding::Missing(it) => Ok(match it {
                 MetaVarKind::Stmt => {
                     Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
-                        id: tt::TokenId::unspecified(),
+                        span: tt::TokenId::unspecified(),
                         char: ';',
                         spacing: tt::Spacing::Alone,
                     })))
                 }
                 MetaVarKind::Block => Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree {
-                    delimiter: Some(tt::Delimiter {
-                        id: tt::TokenId::unspecified(),
+                    delimiter: tt::Delimiter {
+                        open: tt::TokenId::unspecified(),
+                        close: tt::TokenId::unspecified(),
                         kind: tt::DelimiterKind::Brace,
-                    }),
+                    },
                     token_trees: vec![],
                 })),
                 // FIXME: Meta and Item should get proper defaults
                 MetaVarKind::Meta | MetaVarKind::Item | MetaVarKind::Tt | MetaVarKind::Vis => {
                     Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree {
-                        delimiter: None,
+                        delimiter: tt::Delimiter::UNSPECIFIED,
                         token_trees: vec![],
                     }))
                 }
@@ -71,19 +72,19 @@ impl Bindings {
                 | MetaVarKind::Ident => {
                     Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
                         text: SmolStr::new_inline("missing"),
-                        id: tt::TokenId::unspecified(),
+                        span: tt::TokenId::unspecified(),
                     })))
                 }
                 MetaVarKind::Lifetime => {
                     Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
                         text: SmolStr::new_inline("'missing"),
-                        id: tt::TokenId::unspecified(),
+                        span: tt::TokenId::unspecified(),
                     })))
                 }
                 MetaVarKind::Literal => {
                     Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
                         text: SmolStr::new_inline("\"missing\""),
-                        id: tt::TokenId::unspecified(),
+                        span: tt::TokenId::unspecified(),
                     })))
                 }
             }),
@@ -138,12 +139,12 @@ fn expand_subtree(
             Op::Ident(it) => arena.push(tt::Leaf::from(it.clone()).into()),
             Op::Punct(puncts) => {
                 for punct in puncts {
-                    arena.push(tt::Leaf::from(punct.clone()).into());
+                    arena.push(tt::Leaf::from(*punct).into());
                 }
             }
             Op::Subtree { tokens, delimiter } => {
                 let ExpandResult { value: tt, err: e } =
-                    expand_subtree(ctx, tokens, *delimiter, arena);
+                    expand_subtree(ctx, tokens, Some(*delimiter), arena);
                 err = err.or(e);
                 arena.push(tt.into());
             }
@@ -170,7 +171,7 @@ fn expand_subtree(
                 arena.push(
                     tt::Leaf::Literal(tt::Literal {
                         text: index.to_string().into(),
-                        id: tt::TokenId::unspecified(),
+                        span: tt::TokenId::unspecified(),
                     })
                     .into(),
                 );
@@ -179,7 +180,13 @@ fn expand_subtree(
     }
     // drain the elements added in this instance of expand_subtree
     let tts = arena.drain(start_elements..).collect();
-    ExpandResult { value: tt::Subtree { delimiter, token_trees: tts }, err }
+    ExpandResult {
+        value: tt::Subtree {
+            delimiter: delimiter.unwrap_or_else(tt::Delimiter::unspecified),
+            token_trees: tts,
+        },
+        err,
+    }
 }
 
 fn expand_var(ctx: &mut ExpandCtx<'_>, v: &SmolStr, id: tt::TokenId) -> ExpandResult<Fragment> {
@@ -201,18 +208,25 @@ fn expand_var(ctx: &mut ExpandCtx<'_>, v: &SmolStr, id: tt::TokenId) -> ExpandRe
         // ```
         // We just treat it a normal tokens
         let tt = tt::Subtree {
-            delimiter: None,
+            delimiter: tt::Delimiter::UNSPECIFIED,
             token_trees: vec![
-                tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, id }).into(),
-                tt::Leaf::from(tt::Ident { text: v.clone(), id }).into(),
+                tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, span: id })
+                    .into(),
+                tt::Leaf::from(tt::Ident { text: v.clone(), span: id }).into(),
             ],
         }
         .into();
         ExpandResult::ok(Fragment::Tokens(tt))
     } else {
         ctx.bindings.get(v, &mut ctx.nesting).map_or_else(
-            |e| ExpandResult { value: Fragment::Tokens(tt::TokenTree::empty()), err: Some(e) },
-            |it| ExpandResult::ok(it),
+            |e| ExpandResult {
+                value: Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree {
+                    delimiter: tt::Delimiter::unspecified(),
+                    token_trees: vec![],
+                })),
+                err: Some(e),
+            },
+            ExpandResult::ok,
         )
     }
 }
@@ -249,7 +263,10 @@ fn expand_repeat(
                 ctx
             );
             return ExpandResult {
-                value: Fragment::Tokens(Subtree::default().into()),
+                value: Fragment::Tokens(
+                    tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] }
+                        .into(),
+                ),
                 err: Some(ExpandError::LimitExceeded),
             };
         }
@@ -258,7 +275,7 @@ fn expand_repeat(
             continue;
         }
 
-        t.delimiter = None;
+        t.delimiter = tt::Delimiter::unspecified();
         push_subtree(&mut buf, t);
 
         if let Some(sep) = separator {
@@ -292,7 +309,7 @@ fn expand_repeat(
 
     // Check if it is a single token subtree without any delimiter
     // e.g {Delimiter:None> ['>'] /Delimiter:None>}
-    let tt = tt::Subtree { delimiter: None, token_trees: buf }.into();
+    let tt = tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: buf }.into();
 
     if RepeatKind::OneOrMore == kind && counter == 0 {
         return ExpandResult {
@@ -307,11 +324,12 @@ fn push_fragment(buf: &mut Vec<tt::TokenTree>, fragment: Fragment) {
     match fragment {
         Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt),
         Fragment::Expr(tt::TokenTree::Subtree(mut tt)) => {
-            if tt.delimiter.is_none() {
-                tt.delimiter = Some(tt::Delimiter {
-                    id: tt::TokenId::unspecified(),
+            if tt.delimiter.kind == tt::DelimiterKind::Invisible {
+                tt.delimiter = tt::Delimiter {
+                    open: tt::TokenId::UNSPECIFIED,
+                    close: tt::TokenId::UNSPECIFIED,
                     kind: tt::DelimiterKind::Parenthesis,
-                })
+                };
             }
             buf.push(tt.into())
         }
@@ -320,8 +338,8 @@ fn push_fragment(buf: &mut Vec<tt::TokenTree>, fragment: Fragment) {
 }
 
 fn push_subtree(buf: &mut Vec<tt::TokenTree>, tt: tt::Subtree) {
-    match tt.delimiter {
-        None => buf.extend(tt.token_trees),
-        Some(_) => buf.push(tt.into()),
+    match tt.delimiter.kind {
+        tt::DelimiterKind::Invisible => buf.extend(tt.token_trees),
+        _ => buf.push(tt.into()),
     }
 }
diff --git a/src/tools/rust-analyzer/crates/mbe/src/lib.rs b/src/tools/rust-analyzer/crates/mbe/src/lib.rs
index 2373db97a3e..ac107a0d6d6 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/lib.rs
@@ -18,6 +18,8 @@ mod to_parser_input;
 mod benchmark;
 mod token_map;
 
+use ::tt::token_id as tt;
+
 use std::fmt;
 
 use crate::{
@@ -26,8 +28,8 @@ use crate::{
 };
 
 // FIXME: we probably should re-think  `token_tree_to_syntax_node` interfaces
+pub use self::tt::{Delimiter, DelimiterKind, Punct};
 pub use ::parser::TopEntryPoint;
-pub use tt::{Delimiter, DelimiterKind, Punct};
 
 pub use crate::{
     syntax_bridge::{
@@ -125,24 +127,26 @@ impl Shift {
 
         // Find the max token id inside a subtree
         fn max_id(subtree: &tt::Subtree) -> Option<u32> {
-            let filter = |tt: &_| match tt {
-                tt::TokenTree::Subtree(subtree) => {
-                    let tree_id = max_id(subtree);
-                    match subtree.delimiter {
-                        Some(it) if it.id != tt::TokenId::unspecified() => {
-                            Some(tree_id.map_or(it.id.0, |t| t.max(it.id.0)))
+            let filter =
+                |tt: &_| match tt {
+                    tt::TokenTree::Subtree(subtree) => {
+                        let tree_id = max_id(subtree);
+                        if subtree.delimiter.open != tt::TokenId::unspecified() {
+                            Some(tree_id.map_or(subtree.delimiter.open.0, |t| {
+                                t.max(subtree.delimiter.open.0)
+                            }))
+                        } else {
+                            tree_id
                         }
-                        _ => tree_id,
                     }
-                }
-                tt::TokenTree::Leaf(leaf) => {
-                    let &(tt::Leaf::Ident(tt::Ident { id, .. })
-                    | tt::Leaf::Punct(tt::Punct { id, .. })
-                    | tt::Leaf::Literal(tt::Literal { id, .. })) = leaf;
+                    tt::TokenTree::Leaf(leaf) => {
+                        let &(tt::Leaf::Ident(tt::Ident { span, .. })
+                        | tt::Leaf::Punct(tt::Punct { span, .. })
+                        | tt::Leaf::Literal(tt::Literal { span, .. })) = leaf;
 
-                    (id != tt::TokenId::unspecified()).then_some(id.0)
-                }
-            };
+                        (span != tt::TokenId::unspecified()).then_some(span.0)
+                    }
+                };
             subtree.token_trees.iter().filter_map(filter).max()
         }
     }
@@ -152,14 +156,13 @@ impl Shift {
         for t in &mut tt.token_trees {
             match t {
                 tt::TokenTree::Leaf(
-                    tt::Leaf::Ident(tt::Ident { id, .. })
-                    | tt::Leaf::Punct(tt::Punct { id, .. })
-                    | tt::Leaf::Literal(tt::Literal { id, .. }),
-                ) => *id = self.shift(*id),
+                    tt::Leaf::Ident(tt::Ident { span, .. })
+                    | tt::Leaf::Punct(tt::Punct { span, .. })
+                    | tt::Leaf::Literal(tt::Literal { span, .. }),
+                ) => *span = self.shift(*span),
                 tt::TokenTree::Subtree(tt) => {
-                    if let Some(it) = tt.delimiter.as_mut() {
-                        it.id = self.shift(it.id);
-                    }
+                    tt.delimiter.open = self.shift(tt.delimiter.open);
+                    tt.delimiter.close = self.shift(tt.delimiter.close);
                     self.shift_all(tt)
                 }
             }
@@ -216,7 +219,7 @@ impl DeclarativeMacro {
         let mut src = TtIter::new(tt);
         let mut rules = Vec::new();
 
-        if Some(tt::DelimiterKind::Brace) == tt.delimiter_kind() {
+        if tt::DelimiterKind::Brace == tt.delimiter.kind {
             cov_mark::hit!(parse_macro_def_rules);
             while src.len() > 0 {
                 let rule = Rule::parse(&mut src, true)?;
@@ -325,6 +328,10 @@ impl<T, E> ValueResult<T, E> {
         Self { value, err: None }
     }
 
+    pub fn with_err(value: T, err: E) -> Self {
+        Self { value, err: Some(err) }
+    }
+
     pub fn only_err(err: E) -> Self
     where
         T: Default,
diff --git a/src/tools/rust-analyzer/crates/mbe/src/parser.rs b/src/tools/rust-analyzer/crates/mbe/src/parser.rs
index fad905e97f4..fd3d64719ac 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/parser.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/parser.rs
@@ -4,7 +4,7 @@
 use smallvec::{smallvec, SmallVec};
 use syntax::SmolStr;
 
-use crate::{tt_iter::TtIter, ParseError};
+use crate::{tt, tt_iter::TtIter, ParseError};
 
 /// Consider
 ///
@@ -54,7 +54,7 @@ pub(crate) enum Op {
     Ignore { name: SmolStr, id: tt::TokenId },
     Index { depth: u32 },
     Repeat { tokens: MetaTemplate, kind: RepeatKind, separator: Option<Separator> },
-    Subtree { tokens: MetaTemplate, delimiter: Option<tt::Delimiter> },
+    Subtree { tokens: MetaTemplate, delimiter: tt::Delimiter },
     Literal(tt::Literal),
     Punct(SmallVec<[tt::Punct; 3]>),
     Ident(tt::Ident),
@@ -126,17 +126,17 @@ fn next_op(
             src.next().expect("first token already peeked");
             // Note that the '$' itself is a valid token inside macro_rules.
             let second = match src.next() {
-                None => return Ok(Op::Punct(smallvec![p.clone()])),
+                None => return Ok(Op::Punct(smallvec![*p])),
                 Some(it) => it,
             };
             match second {
-                tt::TokenTree::Subtree(subtree) => match subtree.delimiter_kind() {
-                    Some(tt::DelimiterKind::Parenthesis) => {
+                tt::TokenTree::Subtree(subtree) => match subtree.delimiter.kind {
+                    tt::DelimiterKind::Parenthesis => {
                         let (separator, kind) = parse_repeat(src)?;
                         let tokens = MetaTemplate::parse(subtree, mode)?;
                         Op::Repeat { tokens, separator, kind }
                     }
-                    Some(tt::DelimiterKind::Brace) => match mode {
+                    tt::DelimiterKind::Brace => match mode {
                         Mode::Template => {
                             parse_metavar_expr(&mut TtIter::new(subtree)).map_err(|()| {
                                 ParseError::unexpected("invalid metavariable expression")
@@ -157,18 +157,18 @@ fn next_op(
                 tt::TokenTree::Leaf(leaf) => match leaf {
                     tt::Leaf::Ident(ident) if ident.text == "crate" => {
                         // We simply produce identifier `$crate` here. And it will be resolved when lowering ast to Path.
-                        Op::Ident(tt::Ident { text: "$crate".into(), id: ident.id })
+                        Op::Ident(tt::Ident { text: "$crate".into(), span: ident.span })
                     }
                     tt::Leaf::Ident(ident) => {
                         let kind = eat_fragment_kind(src, mode)?;
                         let name = ident.text.clone();
-                        let id = ident.id;
+                        let id = ident.span;
                         Op::Var { name, kind, id }
                     }
                     tt::Leaf::Literal(lit) if is_boolean_literal(lit) => {
                         let kind = eat_fragment_kind(src, mode)?;
                         let name = lit.text.clone();
-                        let id = lit.id;
+                        let id = lit.span;
                         Op::Var { name, kind, id }
                     }
                     tt::Leaf::Punct(punct @ tt::Punct { char: '$', .. }) => match mode {
@@ -284,7 +284,7 @@ fn parse_metavar_expr(src: &mut TtIter<'_>) -> Result<Op, ()> {
     let func = src.expect_ident()?;
     let args = src.expect_subtree()?;
 
-    if args.delimiter_kind() != Some(tt::DelimiterKind::Parenthesis) {
+    if args.delimiter.kind != tt::DelimiterKind::Parenthesis {
         return Err(());
     }
 
@@ -293,7 +293,7 @@ fn parse_metavar_expr(src: &mut TtIter<'_>) -> Result<Op, ()> {
     let op = match &*func.text {
         "ignore" => {
             let ident = args.expect_ident()?;
-            Op::Ignore { name: ident.text.clone(), id: ident.id }
+            Op::Ignore { name: ident.text.clone(), id: ident.span }
         }
         "index" => {
             let depth = if args.len() == 0 { 0 } else { args.expect_u32_literal()? };
diff --git a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
index 5c965055634..fb531340108 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs
@@ -8,9 +8,16 @@ use syntax::{
     SyntaxKind::*,
     SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T,
 };
-use tt::buffer::{Cursor, TokenBuffer};
 
-use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, TokenMap};
+use crate::{
+    to_parser_input::to_parser_input,
+    tt::{
+        self,
+        buffer::{Cursor, TokenBuffer},
+    },
+    tt_iter::TtIter,
+    TokenMap,
+};
 
 #[cfg(test)]
 mod tests;
@@ -74,9 +81,10 @@ pub fn token_tree_to_syntax_node(
     entry_point: parser::TopEntryPoint,
 ) -> (Parse<SyntaxNode>, TokenMap) {
     let buffer = match tt {
-        tt::Subtree { delimiter: None, token_trees } => {
-            TokenBuffer::from_tokens(token_trees.as_slice())
-        }
+        tt::Subtree {
+            delimiter: tt::Delimiter { kind: tt::DelimiterKind::Invisible, .. },
+            token_trees,
+        } => TokenBuffer::from_tokens(token_trees.as_slice()),
         _ => TokenBuffer::from_subtree(tt),
     };
     let parser_input = to_parser_input(&buffer);
@@ -87,13 +95,15 @@ pub fn token_tree_to_syntax_node(
             parser::Step::Token { kind, n_input_tokens: n_raw_tokens } => {
                 tree_sink.token(kind, n_raw_tokens)
             }
+            parser::Step::FloatSplit { ends_in_dot: has_pseudo_dot } => {
+                tree_sink.float_split(has_pseudo_dot)
+            }
             parser::Step::Enter { kind } => tree_sink.start_node(kind),
             parser::Step::Exit => tree_sink.finish_node(),
             parser::Step::Error { msg } => tree_sink.error(msg.to_string()),
         }
     }
-    let (parse, range_map) = tree_sink.finish();
-    (parse, range_map)
+    tree_sink.finish()
 }
 
 /// Convert a string to a `TokenTree`
@@ -132,7 +142,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
         res.push(match expanded.value {
             None => break,
             Some(tt @ tt::TokenTree::Leaf(_)) => {
-                tt::Subtree { delimiter: None, token_trees: vec![tt] }
+                tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![tt] }
             }
             Some(tt::TokenTree::Subtree(tt)) => tt,
         });
@@ -145,7 +155,10 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
     }
 
     if iter.peek_n(0).is_some() {
-        res.push(tt::Subtree { delimiter: None, token_trees: iter.cloned().collect() });
+        res.push(tt::Subtree {
+            delimiter: tt::Delimiter::unspecified(),
+            token_trees: iter.cloned().collect(),
+        });
     }
 
     res
@@ -159,7 +172,7 @@ fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree {
     }
 
     let entry = StackEntry {
-        subtree: tt::Subtree { delimiter: None, ..Default::default() },
+        subtree: tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] },
         // never used (delimiter is `None`)
         idx: !0,
         open_range: TextRange::empty(TextSize::of('.')),
@@ -186,7 +199,7 @@ fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree {
                         if let Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) =
                             sub.token_trees.get_mut(2)
                         {
-                            lit.id = id
+                            lit.span = id
                         }
                     }
                     tt
@@ -199,13 +212,14 @@ fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree {
                 assert_eq!(range.len(), TextSize::of('.'));
             }
 
-            if let Some(delim) = subtree.delimiter {
-                let expected = match delim.kind {
-                    tt::DelimiterKind::Parenthesis => T![')'],
-                    tt::DelimiterKind::Brace => T!['}'],
-                    tt::DelimiterKind::Bracket => T![']'],
-                };
+            let expected = match subtree.delimiter.kind {
+                tt::DelimiterKind::Parenthesis => Some(T![')']),
+                tt::DelimiterKind::Brace => Some(T!['}']),
+                tt::DelimiterKind::Bracket => Some(T![']']),
+                tt::DelimiterKind::Invisible => None,
+            };
 
+            if let Some(expected) = expected {
                 if kind == expected {
                     if let Some(entry) = stack.pop() {
                         conv.id_alloc().close_delim(entry.idx, Some(range));
@@ -223,9 +237,11 @@ fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree {
             };
 
             if let Some(kind) = delim {
-                let mut subtree = tt::Subtree::default();
                 let (id, idx) = conv.id_alloc().open_delim(range, synth_id);
-                subtree.delimiter = Some(tt::Delimiter { id, kind });
+                let subtree = tt::Subtree {
+                    delimiter: tt::Delimiter { open: id, close: tt::TokenId::UNSPECIFIED, kind },
+                    token_trees: vec![],
+                };
                 stack.push(StackEntry { subtree, idx, open_range: range });
                 continue;
             }
@@ -240,13 +256,20 @@ fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree {
                     panic!("Token from lexer must be single char: token = {token:#?}");
                 }
             };
-            tt::Leaf::from(tt::Punct { char, spacing, id: conv.id_alloc().alloc(range, synth_id) })
-                .into()
+            tt::Leaf::from(tt::Punct {
+                char,
+                spacing,
+                span: conv.id_alloc().alloc(range, synth_id),
+            })
+            .into()
         } else {
             macro_rules! make_leaf {
                 ($i:ident) => {
-                    tt::$i { id: conv.id_alloc().alloc(range, synth_id), text: token.to_text(conv) }
-                        .into()
+                    tt::$i {
+                        span: conv.id_alloc().alloc(range, synth_id),
+                        text: token.to_text(conv),
+                    }
+                    .into()
                 };
             }
             let leaf: tt::Leaf = match kind {
@@ -261,14 +284,14 @@ fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree {
                     let apostrophe = tt::Leaf::from(tt::Punct {
                         char: '\'',
                         spacing: tt::Spacing::Joint,
-                        id: conv.id_alloc().alloc(r, synth_id),
+                        span: conv.id_alloc().alloc(r, synth_id),
                     });
                     result.push(apostrophe.into());
 
                     let r = TextRange::at(range.start() + char_unit, range.len() - char_unit);
                     let ident = tt::Leaf::from(tt::Ident {
                         text: SmolStr::new(&token.to_text(conv)[1..]),
-                        id: conv.id_alloc().alloc(r, synth_id),
+                        span: conv.id_alloc().alloc(r, synth_id),
                     });
                     result.push(ident.into());
                     continue;
@@ -289,11 +312,12 @@ fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree {
 
         conv.id_alloc().close_delim(entry.idx, None);
         let leaf: tt::Leaf = tt::Punct {
-            id: conv.id_alloc().alloc(entry.open_range, None),
-            char: match entry.subtree.delimiter.unwrap().kind {
+            span: conv.id_alloc().alloc(entry.open_range, None),
+            char: match entry.subtree.delimiter.kind {
                 tt::DelimiterKind::Parenthesis => '(',
                 tt::DelimiterKind::Brace => '{',
                 tt::DelimiterKind::Bracket => '[',
+                tt::DelimiterKind::Invisible => '$',
             },
             spacing: tt::Spacing::Alone,
         }
@@ -373,10 +397,11 @@ fn convert_doc_comment(token: &syntax::SyntaxToken) -> Option<Vec<tt::TokenTree>
         token_trees.push(mk_punct('!'));
     }
     token_trees.push(tt::TokenTree::from(tt::Subtree {
-        delimiter: Some(tt::Delimiter {
+        delimiter: tt::Delimiter {
+            open: tt::TokenId::UNSPECIFIED,
+            close: tt::TokenId::UNSPECIFIED,
             kind: tt::DelimiterKind::Bracket,
-            id: tt::TokenId::unspecified(),
-        }),
+        },
         token_trees: meta_tkns,
     }));
 
@@ -386,7 +411,7 @@ fn convert_doc_comment(token: &syntax::SyntaxToken) -> Option<Vec<tt::TokenTree>
     fn mk_ident(s: &str) -> tt::TokenTree {
         tt::TokenTree::from(tt::Leaf::from(tt::Ident {
             text: s.into(),
-            id: tt::TokenId::unspecified(),
+            span: tt::TokenId::unspecified(),
         }))
     }
 
@@ -394,12 +419,12 @@ fn convert_doc_comment(token: &syntax::SyntaxToken) -> Option<Vec<tt::TokenTree>
         tt::TokenTree::from(tt::Leaf::from(tt::Punct {
             char: c,
             spacing: tt::Spacing::Alone,
-            id: tt::TokenId::unspecified(),
+            span: tt::TokenId::unspecified(),
         }))
     }
 
     fn mk_doc_literal(comment: &ast::Comment) -> tt::TokenTree {
-        let lit = tt::Literal { text: doc_comment_text(comment), id: tt::TokenId::unspecified() };
+        let lit = tt::Literal { text: doc_comment_text(comment), span: tt::TokenId::unspecified() };
 
         tt::TokenTree::from(tt::Leaf::from(lit))
     }
@@ -761,18 +786,56 @@ impl<'a> TtTreeSink<'a> {
     }
 }
 
-fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> &'static str {
+fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> {
     let texts = match d {
         tt::DelimiterKind::Parenthesis => "()",
         tt::DelimiterKind::Brace => "{}",
         tt::DelimiterKind::Bracket => "[]",
+        tt::DelimiterKind::Invisible => return None,
     };
 
     let idx = closing as usize;
-    &texts[idx..texts.len() - (1 - idx)]
+    Some(&texts[idx..texts.len() - (1 - idx)])
 }
 
 impl<'a> TtTreeSink<'a> {
+    /// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween.
+    /// This occurs when a float literal is used as a field access.
+    fn float_split(&mut self, has_pseudo_dot: bool) {
+        let (text, _span) = match self.cursor.token_tree() {
+            Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Literal(lit), _)) => {
+                (lit.text.as_str(), lit.span)
+            }
+            _ => unreachable!(),
+        };
+        match text.split_once('.') {
+            Some((left, right)) => {
+                assert!(!left.is_empty());
+                self.inner.start_node(SyntaxKind::NAME_REF);
+                self.inner.token(SyntaxKind::INT_NUMBER, left);
+                self.inner.finish_node();
+
+                // here we move the exit up, the original exit has been deleted in process
+                self.inner.finish_node();
+
+                self.inner.token(SyntaxKind::DOT, ".");
+
+                if has_pseudo_dot {
+                    assert!(right.is_empty(), "{left}.{right}");
+                } else {
+                    self.inner.start_node(SyntaxKind::NAME_REF);
+                    self.inner.token(SyntaxKind::INT_NUMBER, right);
+                    self.inner.finish_node();
+
+                    // the parser creates an unbalanced start node, we are required to close it here
+                    self.inner.finish_node();
+                }
+            }
+            None => unreachable!(),
+        }
+        self.cursor = self.cursor.bump();
+    }
+
     fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) {
         if kind == LIFETIME_IDENT {
             n_tokens = 2;
@@ -790,13 +853,16 @@ impl<'a> TtTreeSink<'a> {
                     Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
                         // Mark the range if needed
                         let (text, id) = match leaf {
-                            tt::Leaf::Ident(ident) => (ident.text.as_str(), ident.id),
+                            tt::Leaf::Ident(ident) => (ident.text.as_str(), ident.span),
                             tt::Leaf::Punct(punct) => {
                                 assert!(punct.char.is_ascii());
                                 tmp = punct.char as u8;
-                                (std::str::from_utf8(std::slice::from_ref(&tmp)).unwrap(), punct.id)
+                                (
+                                    std::str::from_utf8(std::slice::from_ref(&tmp)).unwrap(),
+                                    punct.span,
+                                )
                             }
-                            tt::Leaf::Literal(lit) => (lit.text.as_str(), lit.id),
+                            tt::Leaf::Literal(lit) => (lit.text.as_str(), lit.span),
                         };
                         let range = TextRange::at(self.text_pos, TextSize::of(text));
                         self.token_map.insert(id, range);
@@ -805,10 +871,10 @@ impl<'a> TtTreeSink<'a> {
                     }
                     Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
                         self.cursor = self.cursor.subtree().unwrap();
-                        match subtree.delimiter {
-                            Some(d) => {
-                                self.open_delims.insert(d.id, self.text_pos);
-                                delim_to_str(d.kind, false)
+                        match delim_to_str(subtree.delimiter.kind, false) {
+                            Some(it) => {
+                                self.open_delims.insert(subtree.delimiter.open, self.text_pos);
+                                it
                             }
                             None => continue,
                         }
@@ -816,15 +882,21 @@ impl<'a> TtTreeSink<'a> {
                     None => {
                         let parent = self.cursor.end().unwrap();
                         self.cursor = self.cursor.bump();
-                        match parent.delimiter {
-                            Some(d) => {
-                                if let Some(open_delim) = self.open_delims.get(&d.id) {
+                        match delim_to_str(parent.delimiter.kind, true) {
+                            Some(it) => {
+                                if let Some(open_delim) =
+                                    self.open_delims.get(&parent.delimiter.open)
+                                {
                                     let open_range = TextRange::at(*open_delim, TextSize::of('('));
                                     let close_range =
                                         TextRange::at(self.text_pos, TextSize::of('('));
-                                    self.token_map.insert_delim(d.id, open_range, close_range);
+                                    self.token_map.insert_delim(
+                                        parent.delimiter.open,
+                                        open_range,
+                                        close_range,
+                                    );
                                 }
-                                delim_to_str(d.kind, true)
+                                it
                             }
                             None => continue,
                         }
diff --git a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge/tests.rs b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge/tests.rs
index c1a60836558..fa0125f3e9e 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge/tests.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge/tests.rs
@@ -29,8 +29,8 @@ fn check_punct_spacing(fixture: &str) {
     let mut cursor = buf.begin();
     while !cursor.eof() {
         while let Some(token_tree) = cursor.token_tree() {
-            if let TokenTreeRef::Leaf(Leaf::Punct(Punct { spacing, id, .. }), _) = token_tree {
-                if let Some(expected) = annotations.remove(id) {
+            if let TokenTreeRef::Leaf(Leaf::Punct(Punct { spacing, span, .. }), _) = token_tree {
+                if let Some(expected) = annotations.remove(span) {
                     assert_eq!(expected, *spacing);
                 }
             }
diff --git a/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs b/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs
index 7013aa58b55..051e20b3a3f 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/to_parser_input.rs
@@ -2,7 +2,8 @@
 //! format that works for our parser.
 
 use syntax::{SyntaxKind, SyntaxKind::*, T};
-use tt::buffer::TokenBuffer;
+
+use crate::tt::buffer::TokenBuffer;
 
 pub(crate) fn to_parser_input(buffer: &TokenBuffer<'_>) -> parser::Input {
     let mut res = parser::Input::default();
@@ -44,6 +45,13 @@ pub(crate) fn to_parser_input(buffer: &TokenBuffer<'_>) -> parser::Input {
                             .unwrap_or_else(|| panic!("Fail to convert given literal {:#?}", &lit));
 
                         res.push(kind);
+
+                        if kind == FLOAT_NUMBER && !inner_text.ends_with('.') {
+                            // Tag the token as joint if it is float with a fractional part
+                            // we use this jointness to inform the parser about what token split
+                            // event to emit when we encounter a float literal in a field access
+                            res.was_joint();
+                        }
                     }
                     tt::Leaf::Ident(ident) => match ident.text.as_ref() {
                         "_" => res.push(T![_]),
@@ -70,23 +78,25 @@ pub(crate) fn to_parser_input(buffer: &TokenBuffer<'_>) -> parser::Input {
                 cursor.bump()
             }
             Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
-                if let Some(d) = subtree.delimiter_kind() {
-                    res.push(match d {
-                        tt::DelimiterKind::Parenthesis => T!['('],
-                        tt::DelimiterKind::Brace => T!['{'],
-                        tt::DelimiterKind::Bracket => T!['['],
-                    });
+                if let Some(kind) = match subtree.delimiter.kind {
+                    tt::DelimiterKind::Parenthesis => Some(T!['(']),
+                    tt::DelimiterKind::Brace => Some(T!['{']),
+                    tt::DelimiterKind::Bracket => Some(T!['[']),
+                    tt::DelimiterKind::Invisible => None,
+                } {
+                    res.push(kind);
                 }
                 cursor.subtree().unwrap()
             }
             None => match cursor.end() {
                 Some(subtree) => {
-                    if let Some(d) = subtree.delimiter_kind() {
-                        res.push(match d {
-                            tt::DelimiterKind::Parenthesis => T![')'],
-                            tt::DelimiterKind::Brace => T!['}'],
-                            tt::DelimiterKind::Bracket => T![']'],
-                        })
+                    if let Some(kind) = match subtree.delimiter.kind {
+                        tt::DelimiterKind::Parenthesis => Some(T![')']),
+                        tt::DelimiterKind::Brace => Some(T!['}']),
+                        tt::DelimiterKind::Bracket => Some(T![']']),
+                        tt::DelimiterKind::Invisible => None,
+                    } {
+                        res.push(kind);
                     }
                     cursor.bump()
                 }
diff --git a/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs b/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs
index bee7b5de6ac..f744481f3ae 100644
--- a/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs
+++ b/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs
@@ -3,9 +3,8 @@
 
 use smallvec::{smallvec, SmallVec};
 use syntax::SyntaxKind;
-use tt::buffer::TokenBuffer;
 
-use crate::{to_parser_input::to_parser_input, ExpandError, ExpandResult};
+use crate::{to_parser_input::to_parser_input, tt, ExpandError, ExpandResult};
 
 #[derive(Debug, Clone)]
 pub(crate) struct TtIter<'a> {
@@ -114,7 +113,7 @@ impl<'a> TtIter<'a> {
             ('.', '.', Some('.' | '=')) | ('<', '<', Some('=')) | ('>', '>', Some('=')) => {
                 let _ = self.next().unwrap();
                 let _ = self.next().unwrap();
-                Ok(smallvec![first, second.clone(), third.unwrap().clone()])
+                Ok(smallvec![first, *second, *third.unwrap()])
             }
             ('-' | '!' | '*' | '/' | '&' | '%' | '^' | '+' | '<' | '=' | '>' | '|', '=', _)
             | ('-' | '=' | '>', '>', _)
@@ -125,7 +124,7 @@ impl<'a> TtIter<'a> {
             | ('<', '<', _)
             | ('|', '|', _) => {
                 let _ = self.next().unwrap();
-                Ok(smallvec![first, second.clone()])
+                Ok(smallvec![first, *second])
             }
             _ => Ok(smallvec![first]),
         }
@@ -135,7 +134,7 @@ impl<'a> TtIter<'a> {
         &mut self,
         entry_point: parser::PrefixEntryPoint,
     ) -> ExpandResult<Option<tt::TokenTree>> {
-        let buffer = TokenBuffer::from_tokens(self.inner.as_slice());
+        let buffer = tt::buffer::TokenBuffer::from_tokens(self.inner.as_slice());
         let parser_input = to_parser_input(&buffer);
         let tree_traversal = entry_point.parse(&parser_input);
 
@@ -151,6 +150,11 @@ impl<'a> TtIter<'a> {
                         cursor = cursor.bump_subtree();
                     }
                 }
+                parser::Step::FloatSplit { .. } => {
+                    // FIXME: We need to split the tree properly here, but mutating the token trees
+                    // in the buffer is somewhat tricky to pull off.
+                    cursor = cursor.bump_subtree();
+                }
                 parser::Step::Enter { .. } | parser::Step::Exit => (),
                 parser::Step::Error { .. } => error = true,
             }
@@ -167,19 +171,18 @@ impl<'a> TtIter<'a> {
 
         if cursor.is_root() {
             while curr != cursor {
-                if let Some(token) = curr.token_tree() {
-                    res.push(token);
-                }
+                let Some(token) = curr.token_tree() else { break };
+                res.push(token.cloned());
                 curr = curr.bump();
             }
         }
+
         self.inner = self.inner.as_slice()[res.len()..].iter();
         let res = match res.len() {
-            1 => Some(res[0].cloned()),
-            0 => None,
+            0 | 1 => res.pop(),
             _ => Some(tt::TokenTree::Subtree(tt::Subtree {
-                delimiter: None,
-                token_trees: res.into_iter().map(|it| it.cloned()).collect(),
+                delimiter: tt::Delimiter::unspecified(),
+                token_trees: res,
             })),
         };
         ExpandResult { value: res, err }
diff --git a/src/tools/rust-analyzer/crates/parser/Cargo.toml b/src/tools/rust-analyzer/crates/parser/Cargo.toml
index d1420de8937..08359133f1a 100644
--- a/src/tools/rust-analyzer/crates/parser/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/parser/Cargo.toml
@@ -2,18 +2,22 @@
 name = "parser"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
 
 [dependencies]
 drop_bomb = "0.1.5"
-rustc_lexer = { version = "725.0.0", package = "rustc-ap-rustc_lexer" }
-limit = { path = "../limit", version = "0.0.0" }
+rustc_lexer = { version = "727.0.0", package = "rustc-ap-rustc_lexer" }
+
+limit.workspace = true
 
 [dev-dependencies]
 expect-test = "1.4.0"
-sourcegen = { path = "../sourcegen" }
+
+sourcegen.workspace = true
diff --git a/src/tools/rust-analyzer/crates/parser/src/event.rs b/src/tools/rust-analyzer/crates/parser/src/event.rs
index b0e70e79430..577eb0967b4 100644
--- a/src/tools/rust-analyzer/crates/parser/src/event.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/event.rs
@@ -74,7 +74,13 @@ pub(crate) enum Event {
         kind: SyntaxKind,
         n_raw_tokens: u8,
     },
-
+    /// When we parse `foo.0.0` or `foo. 0. 0` the lexer will hand us a float literal
+    /// instead of an integer literal followed by a dot as the lexer has no contextual knowledge.
+    /// This event instructs whatever consumes the events to split the float literal into
+    /// the corresponding parts.
+    FloatSplitHack {
+        ends_in_dot: bool,
+    },
     Error {
         msg: String,
     },
@@ -125,6 +131,11 @@ pub(super) fn process(mut events: Vec<Event>) -> Output {
             Event::Token { kind, n_raw_tokens } => {
                 res.token(kind, n_raw_tokens);
             }
+            Event::FloatSplitHack { ends_in_dot } => {
+                res.float_split_hack(ends_in_dot);
+                let ev = mem::replace(&mut events[i + 1], Event::tombstone());
+                assert!(matches!(ev, Event::Finish), "{ev:?}");
+            }
             Event::Error { msg } => res.error(msg),
         }
     }
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs
index 8932330b825..7516ac3c4bd 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs
@@ -379,7 +379,7 @@ fn postfix_expr(
             // }
             T!['('] if allow_calls => call_expr(p, lhs),
             T!['['] if allow_calls => index_expr(p, lhs),
-            T![.] => match postfix_dot_expr(p, lhs) {
+            T![.] => match postfix_dot_expr::<false>(p, lhs) {
                 Ok(it) => it,
                 Err(it) => {
                     lhs = it;
@@ -393,35 +393,44 @@ fn postfix_expr(
         block_like = BlockLike::NotBlock;
     }
     return (lhs, block_like);
+}
 
-    fn postfix_dot_expr(
-        p: &mut Parser<'_>,
-        lhs: CompletedMarker,
-    ) -> Result<CompletedMarker, CompletedMarker> {
+fn postfix_dot_expr<const FLOAT_RECOVERY: bool>(
+    p: &mut Parser<'_>,
+    lhs: CompletedMarker,
+) -> Result<CompletedMarker, CompletedMarker> {
+    if !FLOAT_RECOVERY {
         assert!(p.at(T![.]));
-        if p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth_at(2, T![::])) {
-            return Ok(method_call_expr(p, lhs));
-        }
+    }
+    let nth1 = if FLOAT_RECOVERY { 0 } else { 1 };
+    let nth2 = if FLOAT_RECOVERY { 1 } else { 2 };
 
-        // test await_expr
-        // fn foo() {
-        //     x.await;
-        //     x.0.await;
-        //     x.0().await?.hello();
-        // }
-        if p.nth(1) == T![await] {
-            let m = lhs.precede(p);
-            p.bump(T![.]);
-            p.bump(T![await]);
-            return Ok(m.complete(p, AWAIT_EXPR));
-        }
+    if p.nth(nth1) == IDENT && (p.nth(nth2) == T!['('] || p.nth_at(nth2, T![::])) {
+        return Ok(method_call_expr::<FLOAT_RECOVERY>(p, lhs));
+    }
 
-        if p.at(T![..=]) || p.at(T![..]) {
-            return Err(lhs);
+    // test await_expr
+    // fn foo() {
+    //     x.await;
+    //     x.0.await;
+    //     x.0().await?.hello();
+    //     x.0.0.await;
+    //     x.0. await;
+    // }
+    if p.nth(nth1) == T![await] {
+        let m = lhs.precede(p);
+        if !FLOAT_RECOVERY {
+            p.bump(T![.]);
         }
+        p.bump(T![await]);
+        return Ok(m.complete(p, AWAIT_EXPR));
+    }
 
-        Ok(field_expr(p, lhs))
+    if p.at(T![..=]) || p.at(T![..]) {
+        return Err(lhs);
     }
+
+    field_expr::<FLOAT_RECOVERY>(p, lhs)
 }
 
 // test call_expr
@@ -455,11 +464,22 @@ fn index_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker {
 // fn foo() {
 //     x.foo();
 //     y.bar::<T>(1, 2,);
+//     x.0.0.call();
+//     x.0. call();
 // }
-fn method_call_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker {
-    assert!(p.at(T![.]) && p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth_at(2, T![::])));
+fn method_call_expr<const FLOAT_RECOVERY: bool>(
+    p: &mut Parser<'_>,
+    lhs: CompletedMarker,
+) -> CompletedMarker {
+    if FLOAT_RECOVERY {
+        assert!(p.nth(0) == IDENT && (p.nth(1) == T!['('] || p.nth_at(1, T![::])));
+    } else {
+        assert!(p.at(T![.]) && p.nth(1) == IDENT && (p.nth(2) == T!['('] || p.nth_at(2, T![::])));
+    }
     let m = lhs.precede(p);
-    p.bump_any();
+    if !FLOAT_RECOVERY {
+        p.bump(T![.]);
+    }
     name_ref(p);
     generic_args::opt_generic_arg_list(p, true);
     if p.at(T!['(']) {
@@ -472,21 +492,35 @@ fn method_call_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker
 // fn foo() {
 //     x.foo;
 //     x.0.bar;
+//     x.0.1;
+//     x.0. bar;
 //     x.0();
 // }
-fn field_expr(p: &mut Parser<'_>, lhs: CompletedMarker) -> CompletedMarker {
-    assert!(p.at(T![.]));
+fn field_expr<const FLOAT_RECOVERY: bool>(
+    p: &mut Parser<'_>,
+    lhs: CompletedMarker,
+) -> Result<CompletedMarker, CompletedMarker> {
+    if !FLOAT_RECOVERY {
+        assert!(p.at(T![.]));
+    }
     let m = lhs.precede(p);
-    p.bump(T![.]);
+    if !FLOAT_RECOVERY {
+        p.bump(T![.]);
+    }
     if p.at(IDENT) || p.at(INT_NUMBER) {
         name_ref_or_index(p);
     } else if p.at(FLOAT_NUMBER) {
-        // FIXME: How to recover and instead parse INT + T![.]?
-        p.bump_any();
+        return match p.split_float(m) {
+            (true, m) => {
+                let lhs = m.complete(p, FIELD_EXPR);
+                postfix_dot_expr::<true>(p, lhs)
+            }
+            (false, m) => Ok(m.complete(p, FIELD_EXPR)),
+        };
     } else {
         p.error("expected field name or number");
     }
-    m.complete(p, FIELD_EXPR)
+    Ok(m.complete(p, FIELD_EXPR))
 }
 
 // test try_expr
diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs
index efa3997353b..a23f900b738 100644
--- a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs
@@ -152,7 +152,7 @@ pub(super) fn atom_expr(
             m.complete(p, BLOCK_EXPR)
         }
 
-        T![static] | T![async] | T![move] | T![|] => closure_expr(p),
+        T![const] | T![static] | T![async] | T![move] | T![|] => closure_expr(p),
         T![for] if la == T![<] => closure_expr(p),
         T![for] => for_expr(p, None),
 
@@ -255,7 +255,7 @@ fn array_expr(p: &mut Parser<'_>) -> CompletedMarker {
 // }
 fn closure_expr(p: &mut Parser<'_>) -> CompletedMarker {
     assert!(match p.current() {
-        T![static] | T![async] | T![move] | T![|] => true,
+        T![const] | T![static] | T![async] | T![move] | T![|] => true,
         T![for] => p.nth(1) == T![<],
         _ => false,
     });
@@ -265,7 +265,9 @@ fn closure_expr(p: &mut Parser<'_>) -> CompletedMarker {
     if p.at(T![for]) {
         types::for_binder(p);
     }
-
+    // test const_closure
+    // fn main() { let cl = const || _ = 0; }
+    p.eat(T![const]);
     p.eat(T![static]);
     p.eat(T![async]);
     p.eat(T![move]);
diff --git a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs
index b48921f1917..100deff462d 100644
--- a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs
@@ -82,6 +82,7 @@ impl<'a> LexedStr<'a> {
     pub fn text(&self, i: usize) -> &str {
         self.range_text(i..i + 1)
     }
+
     pub fn range_text(&self, r: ops::Range<usize>) -> &str {
         assert!(r.start < r.end && r.end <= self.len());
         let lo = self.start[r.start] as usize;
@@ -216,6 +217,10 @@ impl<'a> Converter<'a> {
                 rustc_lexer::TokenKind::Caret => T![^],
                 rustc_lexer::TokenKind::Percent => T![%],
                 rustc_lexer::TokenKind::Unknown => ERROR,
+                rustc_lexer::TokenKind::UnknownPrefix => {
+                    err = "unknown literal prefix";
+                    IDENT
+                }
             }
         };
 
diff --git a/src/tools/rust-analyzer/crates/parser/src/lib.rs b/src/tools/rust-analyzer/crates/parser/src/lib.rs
index 87be4792773..8c5aed0232b 100644
--- a/src/tools/rust-analyzer/crates/parser/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/lib.rs
@@ -102,10 +102,14 @@ impl TopEntryPoint {
                 match step {
                     Step::Enter { .. } => depth += 1,
                     Step::Exit => depth -= 1,
+                    Step::FloatSplit { ends_in_dot: has_pseudo_dot } => {
+                        depth -= 1 + !has_pseudo_dot as usize
+                    }
                     Step::Token { .. } | Step::Error { .. } => (),
                 }
             }
             assert!(!first, "no tree at all");
+            assert_eq!(depth, 0, "unbalanced tree");
         }
 
         res
diff --git a/src/tools/rust-analyzer/crates/parser/src/output.rs b/src/tools/rust-analyzer/crates/parser/src/output.rs
index 6ca841cfe07..41d4c68b2d7 100644
--- a/src/tools/rust-analyzer/crates/parser/src/output.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/output.rs
@@ -25,53 +25,88 @@ pub struct Output {
 #[derive(Debug)]
 pub enum Step<'a> {
     Token { kind: SyntaxKind, n_input_tokens: u8 },
+    FloatSplit { ends_in_dot: bool },
     Enter { kind: SyntaxKind },
     Exit,
     Error { msg: &'a str },
 }
 
 impl Output {
+    const EVENT_MASK: u32 = 0b1;
+    const TAG_MASK: u32 = 0x0000_00F0;
+    const N_INPUT_TOKEN_MASK: u32 = 0x0000_FF00;
+    const KIND_MASK: u32 = 0xFFFF_0000;
+
+    const ERROR_SHIFT: u32 = Self::EVENT_MASK.trailing_ones();
+    const TAG_SHIFT: u32 = Self::TAG_MASK.trailing_zeros();
+    const N_INPUT_TOKEN_SHIFT: u32 = Self::N_INPUT_TOKEN_MASK.trailing_zeros();
+    const KIND_SHIFT: u32 = Self::KIND_MASK.trailing_zeros();
+
+    const TOKEN_EVENT: u8 = 0;
+    const ENTER_EVENT: u8 = 1;
+    const EXIT_EVENT: u8 = 2;
+    const SPLIT_EVENT: u8 = 3;
+
     pub fn iter(&self) -> impl Iterator<Item = Step<'_>> {
         self.event.iter().map(|&event| {
-            if event & 0b1 == 0 {
-                return Step::Error { msg: self.error[(event as usize) >> 1].as_str() };
+            if event & Self::EVENT_MASK == 0 {
+                return Step::Error {
+                    msg: self.error[(event as usize) >> Self::ERROR_SHIFT].as_str(),
+                };
             }
-            let tag = ((event & 0x0000_00F0) >> 4) as u8;
+            let tag = ((event & Self::TAG_MASK) >> Self::TAG_SHIFT) as u8;
             match tag {
-                0 => {
-                    let kind: SyntaxKind = (((event & 0xFFFF_0000) >> 16) as u16).into();
-                    let n_input_tokens = ((event & 0x0000_FF00) >> 8) as u8;
+                Self::TOKEN_EVENT => {
+                    let kind: SyntaxKind =
+                        (((event & Self::KIND_MASK) >> Self::KIND_SHIFT) as u16).into();
+                    let n_input_tokens =
+                        ((event & Self::N_INPUT_TOKEN_MASK) >> Self::N_INPUT_TOKEN_SHIFT) as u8;
                     Step::Token { kind, n_input_tokens }
                 }
-                1 => {
-                    let kind: SyntaxKind = (((event & 0xFFFF_0000) >> 16) as u16).into();
+                Self::ENTER_EVENT => {
+                    let kind: SyntaxKind =
+                        (((event & Self::KIND_MASK) >> Self::KIND_SHIFT) as u16).into();
                     Step::Enter { kind }
                 }
-                2 => Step::Exit,
+                Self::EXIT_EVENT => Step::Exit,
+                Self::SPLIT_EVENT => {
+                    Step::FloatSplit { ends_in_dot: event & Self::N_INPUT_TOKEN_MASK != 0 }
+                }
                 _ => unreachable!(),
             }
         })
     }
 
     pub(crate) fn token(&mut self, kind: SyntaxKind, n_tokens: u8) {
-        let e = ((kind as u16 as u32) << 16) | ((n_tokens as u32) << 8) | 1;
+        let e = ((kind as u16 as u32) << Self::KIND_SHIFT)
+            | ((n_tokens as u32) << Self::N_INPUT_TOKEN_SHIFT)
+            | Self::EVENT_MASK;
         self.event.push(e)
     }
 
+    pub(crate) fn float_split_hack(&mut self, ends_in_dot: bool) {
+        let e = (Self::SPLIT_EVENT as u32) << Self::TAG_SHIFT
+            | ((ends_in_dot as u32) << Self::N_INPUT_TOKEN_SHIFT)
+            | Self::EVENT_MASK;
+        self.event.push(e);
+    }
+
     pub(crate) fn enter_node(&mut self, kind: SyntaxKind) {
-        let e = ((kind as u16 as u32) << 16) | (1 << 4) | 1;
+        let e = ((kind as u16 as u32) << Self::KIND_SHIFT)
+            | ((Self::ENTER_EVENT as u32) << Self::TAG_SHIFT)
+            | Self::EVENT_MASK;
         self.event.push(e)
     }
 
     pub(crate) fn leave_node(&mut self) {
-        let e = 2 << 4 | 1;
+        let e = (Self::EXIT_EVENT as u32) << Self::TAG_SHIFT | Self::EVENT_MASK;
         self.event.push(e)
     }
 
     pub(crate) fn error(&mut self, error: String) {
         let idx = self.error.len();
         self.error.push(error);
-        let e = (idx as u32) << 1;
+        let e = (idx as u32) << Self::ERROR_SHIFT;
         self.event.push(e);
     }
 }
diff --git a/src/tools/rust-analyzer/crates/parser/src/parser.rs b/src/tools/rust-analyzer/crates/parser/src/parser.rs
index 48aecb35be1..280416ae7c9 100644
--- a/src/tools/rust-analyzer/crates/parser/src/parser.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/parser.rs
@@ -181,6 +181,35 @@ impl<'t> Parser<'t> {
         self.do_bump(kind, 1);
     }
 
+    /// Advances the parser by one token
+    pub(crate) fn split_float(&mut self, mut marker: Marker) -> (bool, Marker) {
+        assert!(self.at(SyntaxKind::FLOAT_NUMBER));
+        // we have parse `<something>.`
+        // `<something>`.0.1
+        // here we need to insert an extra event
+        //
+        // `<something>`. 0. 1;
+        // here we need to change the follow up parse, the return value will cause us to emulate a dot
+        // the actual splitting happens later
+        let ends_in_dot = !self.inp.is_joint(self.pos);
+        if !ends_in_dot {
+            let new_marker = self.start();
+            let idx = marker.pos as usize;
+            match &mut self.events[idx] {
+                Event::Start { forward_parent, kind } => {
+                    *kind = SyntaxKind::FIELD_EXPR;
+                    *forward_parent = Some(new_marker.pos - marker.pos);
+                }
+                _ => unreachable!(),
+            }
+            marker.bomb.defuse();
+            marker = new_marker;
+        };
+        self.pos += 1 as usize;
+        self.push_event(Event::FloatSplitHack { ends_in_dot });
+        (ends_in_dot, marker)
+    }
+
     /// Advances the parser by one token, remapping its kind.
     /// This is useful to create contextual keywords from
     /// identifiers. For example, the lexer creates a `union`
diff --git a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs
index 2be4050d135..47e4adcbbe6 100644
--- a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs
@@ -43,7 +43,16 @@ impl<'a> LexedStr<'a> {
                         res.was_joint();
                     }
                     res.push(kind);
+                    // Tag the token as joint if it is float with a fractional part
+                    // we use this jointness to inform the parser about what token split
+                    // event to emit when we encounter a float literal in a field access
+                    if kind == SyntaxKind::FLOAT_NUMBER {
+                        if !self.text(i).ends_with('.') {
+                            res.was_joint();
+                        }
+                    }
                 }
+
                 was_joint = true;
             }
         }
@@ -63,6 +72,9 @@ impl<'a> LexedStr<'a> {
                 Step::Token { kind, n_input_tokens: n_raw_tokens } => {
                     builder.token(kind, n_raw_tokens)
                 }
+                Step::FloatSplit { ends_in_dot: has_pseudo_dot } => {
+                    builder.float_split(has_pseudo_dot)
+                }
                 Step::Enter { kind } => builder.enter(kind),
                 Step::Exit => builder.exit(),
                 Step::Error { msg } => {
@@ -109,6 +121,16 @@ impl Builder<'_, '_> {
         self.do_token(kind, n_tokens as usize);
     }
 
+    fn float_split(&mut self, has_pseudo_dot: bool) {
+        match mem::replace(&mut self.state, State::Normal) {
+            State::PendingEnter => unreachable!(),
+            State::PendingExit => (self.sink)(StrStep::Exit),
+            State::Normal => (),
+        }
+        self.eat_trivias();
+        self.do_float_split(has_pseudo_dot);
+    }
+
     fn enter(&mut self, kind: SyntaxKind) {
         match mem::replace(&mut self.state, State::Normal) {
             State::PendingEnter => {
@@ -164,6 +186,37 @@ impl Builder<'_, '_> {
         self.pos += n_tokens;
         (self.sink)(StrStep::Token { kind, text });
     }
+
+    fn do_float_split(&mut self, has_pseudo_dot: bool) {
+        let text = &self.lexed.range_text(self.pos..self.pos + 1);
+        self.pos += 1;
+        match text.split_once('.') {
+            Some((left, right)) => {
+                assert!(!left.is_empty());
+                (self.sink)(StrStep::Enter { kind: SyntaxKind::NAME_REF });
+                (self.sink)(StrStep::Token { kind: SyntaxKind::INT_NUMBER, text: left });
+                (self.sink)(StrStep::Exit);
+
+                // here we move the exit up, the original exit has been deleted in process
+                (self.sink)(StrStep::Exit);
+
+                (self.sink)(StrStep::Token { kind: SyntaxKind::DOT, text: "." });
+
+                if has_pseudo_dot {
+                    assert!(right.is_empty(), "{left}.{right}");
+                    self.state = State::Normal;
+                } else {
+                    (self.sink)(StrStep::Enter { kind: SyntaxKind::NAME_REF });
+                    (self.sink)(StrStep::Token { kind: SyntaxKind::INT_NUMBER, text: right });
+                    (self.sink)(StrStep::Exit);
+
+                    // the parser creates an unbalanced start node, we are required to close it here
+                    self.state = State::PendingExit;
+                }
+            }
+            None => unreachable!(),
+        }
+    }
 }
 
 fn n_attached_trivias<'a>(
diff --git a/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs b/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs
index e626b4f27e0..40f92e58804 100644
--- a/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs
+++ b/src/tools/rust-analyzer/crates/parser/src/tests/prefix_entries.rs
@@ -51,6 +51,9 @@ fn expr() {
     check(PrefixEntryPoint::Expr, "-1", "-1");
     check(PrefixEntryPoint::Expr, "fn foo() {}", "fn");
     check(PrefixEntryPoint::Expr, "#[attr] ()", "#[attr] ()");
+    check(PrefixEntryPoint::Expr, "foo.0", "foo.0");
+    check(PrefixEntryPoint::Expr, "foo.0.1", "foo.0.1");
+    check(PrefixEntryPoint::Expr, "foo.0. foo", "foo.0. foo");
 }
 
 #[test]
@@ -88,6 +91,7 @@ fn check(entry: PrefixEntryPoint, input: &str, prefix: &str) {
     for step in entry.parse(&input).iter() {
         match step {
             Step::Token { n_input_tokens, .. } => n_tokens += n_input_tokens as usize,
+            Step::FloatSplit { .. } => n_tokens += 1,
             Step::Enter { .. } | Step::Exit | Step::Error { .. } => (),
         }
     }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rast
index 8498724b9ef..dd27dc48964 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rast
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rast
@@ -41,6 +41,39 @@ SOURCE_FILE
           SEMICOLON ";"
         WHITESPACE "\n    "
         EXPR_STMT
+          FIELD_EXPR
+            FIELD_EXPR
+              PATH_EXPR
+                PATH
+                  PATH_SEGMENT
+                    NAME_REF
+                      IDENT "x"
+              DOT "."
+              NAME_REF
+                INT_NUMBER "0"
+            DOT "."
+            NAME_REF
+              INT_NUMBER "1"
+          SEMICOLON ";"
+        WHITESPACE "\n    "
+        EXPR_STMT
+          FIELD_EXPR
+            FIELD_EXPR
+              PATH_EXPR
+                PATH
+                  PATH_SEGMENT
+                    NAME_REF
+                      IDENT "x"
+              DOT "."
+              NAME_REF
+                INT_NUMBER "0"
+            DOT "."
+            WHITESPACE " "
+            NAME_REF
+              IDENT "bar"
+          SEMICOLON ";"
+        WHITESPACE "\n    "
+        EXPR_STMT
           CALL_EXPR
             FIELD_EXPR
               PATH_EXPR
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rs
index b8da2ddc309..98dbe45a7ec 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rs
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0011_field_expr.rs
@@ -1,5 +1,7 @@
 fn foo() {
     x.foo;
     x.0.bar;
+    x.0.1;
+    x.0. bar;
     x.0();
 }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rast
index dcbcfe1231e..b28b8eb673a 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rast
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rast
@@ -58,6 +58,49 @@ SOURCE_FILE
               COMMA ","
               R_PAREN ")"
           SEMICOLON ";"
+        WHITESPACE "\n    "
+        EXPR_STMT
+          METHOD_CALL_EXPR
+            FIELD_EXPR
+              FIELD_EXPR
+                PATH_EXPR
+                  PATH
+                    PATH_SEGMENT
+                      NAME_REF
+                        IDENT "x"
+                DOT "."
+                NAME_REF
+                  INT_NUMBER "0"
+              DOT "."
+              NAME_REF
+                INT_NUMBER "0"
+            DOT "."
+            NAME_REF
+              IDENT "call"
+            ARG_LIST
+              L_PAREN "("
+              R_PAREN ")"
+          SEMICOLON ";"
+        WHITESPACE "\n    "
+        EXPR_STMT
+          METHOD_CALL_EXPR
+            FIELD_EXPR
+              PATH_EXPR
+                PATH
+                  PATH_SEGMENT
+                    NAME_REF
+                      IDENT "x"
+              DOT "."
+              NAME_REF
+                INT_NUMBER "0"
+            DOT "."
+            WHITESPACE " "
+            NAME_REF
+              IDENT "call"
+            ARG_LIST
+              L_PAREN "("
+              R_PAREN ")"
+          SEMICOLON ";"
         WHITESPACE "\n"
         R_CURLY "}"
   WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rs
index 1a3aa35ae8e..48bb6381e80 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rs
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0107_method_call_expr.rs
@@ -1,4 +1,6 @@
 fn foo() {
     x.foo();
     y.bar::<T>(1, 2,);
+    x.0.0.call();
+    x.0. call();
 }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rast
index 9d37ada0da8..af713a22072 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rast
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rast
@@ -65,6 +65,41 @@ SOURCE_FILE
               L_PAREN "("
               R_PAREN ")"
           SEMICOLON ";"
+        WHITESPACE "\n    "
+        EXPR_STMT
+          AWAIT_EXPR
+            FIELD_EXPR
+              FIELD_EXPR
+                PATH_EXPR
+                  PATH
+                    PATH_SEGMENT
+                      NAME_REF
+                        IDENT "x"
+                DOT "."
+                NAME_REF
+                  INT_NUMBER "0"
+              DOT "."
+              NAME_REF
+                INT_NUMBER "0"
+            DOT "."
+            AWAIT_KW "await"
+          SEMICOLON ";"
+        WHITESPACE "\n    "
+        EXPR_STMT
+          AWAIT_EXPR
+            FIELD_EXPR
+              PATH_EXPR
+                PATH
+                  PATH_SEGMENT
+                    NAME_REF
+                      IDENT "x"
+              DOT "."
+              NAME_REF
+                INT_NUMBER "0"
+            DOT "."
+            WHITESPACE " "
+            AWAIT_KW "await"
+          SEMICOLON ";"
         WHITESPACE "\n"
         R_CURLY "}"
   WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rs
index d2ba89ca607..fe9a3211bb1 100644
--- a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rs
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0137_await_expr.rs
@@ -2,4 +2,6 @@ fn foo() {
     x.await;
     x.0.await;
     x.0().await?.hello();
+    x.0.0.await;
+    x.0. await;
 }
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0205_const_closure.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0205_const_closure.rast
new file mode 100644
index 00000000000..06442a1d0f1
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0205_const_closure.rast
@@ -0,0 +1,42 @@
+SOURCE_FILE
+  FN
+    FN_KW "fn"
+    WHITESPACE " "
+    NAME
+      IDENT "main"
+    PARAM_LIST
+      L_PAREN "("
+      R_PAREN ")"
+    WHITESPACE " "
+    BLOCK_EXPR
+      STMT_LIST
+        L_CURLY "{"
+        WHITESPACE " "
+        LET_STMT
+          LET_KW "let"
+          WHITESPACE " "
+          IDENT_PAT
+            NAME
+              IDENT "cl"
+          WHITESPACE " "
+          EQ "="
+          WHITESPACE " "
+          CLOSURE_EXPR
+            CONST_KW "const"
+            WHITESPACE " "
+            PARAM_LIST
+              PIPE "|"
+              PIPE "|"
+            WHITESPACE " "
+            BIN_EXPR
+              UNDERSCORE_EXPR
+                UNDERSCORE "_"
+              WHITESPACE " "
+              EQ "="
+              WHITESPACE " "
+              LITERAL
+                INT_NUMBER "0"
+          SEMICOLON ";"
+        WHITESPACE " "
+        R_CURLY "}"
+  WHITESPACE "\n"
diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0205_const_closure.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0205_const_closure.rs
new file mode 100644
index 00000000000..0c05cc70bd3
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0205_const_closure.rs
@@ -0,0 +1 @@
+fn main() { let cl = const || _ = 0; }
diff --git a/src/tools/rust-analyzer/crates/paths/Cargo.toml b/src/tools/rust-analyzer/crates/paths/Cargo.toml
index d23a63d2a97..e24e6eceffb 100644
--- a/src/tools/rust-analyzer/crates/paths/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/paths/Cargo.toml
@@ -2,9 +2,11 @@
 name = "paths"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml
index f261f3def45..28469b83246 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml
@@ -2,15 +2,17 @@
 name = "proc-macro-api"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
 
 [dependencies]
-object = { version = "0.29.0", default-features = false, features = [
+object = { version = "0.30.2", default-features = false, features = [
     "std",
     "read_core",
     "elf",
@@ -21,11 +23,12 @@ serde = { version = "1.0.137", features = ["derive"] }
 serde_json = { version = "1.0.81", features = ["unbounded_depth"] }
 tracing = "0.1.37"
 memmap2 = "0.5.4"
-snap = "1.0.5"
+snap = "1.1.0"
 
-paths = { path = "../paths", version = "0.0.0" }
-tt = { path = "../tt", version = "0.0.0" }
-stdx = { path = "../stdx", version = "0.0.0" }
-profile = { path = "../profile", version = "0.0.0" }
+# local deps
+paths.workspace = true
+tt.workspace = true
+stdx.workspace = true
+profile.workspace = true
 # Intentionally *not* depend on anything salsa-related
-# base-db = { path = "../base-db", version = "0.0.0" }
+# base-db.workspace = true
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
index 7921fda331e..90d06967e8f 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
@@ -19,7 +19,8 @@ use std::{
 };
 
 use serde::{Deserialize, Serialize};
-use tt::Subtree;
+
+use ::tt::token_id as tt;
 
 use crate::{
     msg::{ExpandMacro, FlatTree, PanicMessage},
@@ -70,7 +71,7 @@ impl MacroDylib {
 
 /// A handle to a specific macro (a `#[proc_macro]` annotated function).
 ///
-/// It exists withing a context of a specific [`ProcMacroProcess`] -- currently
+/// It exists within a context of a specific [`ProcMacroProcess`] -- currently
 /// we share a single expander process for all macros.
 #[derive(Debug, Clone)]
 pub struct ProcMacro {
@@ -114,14 +115,14 @@ impl ProcMacroServer {
     /// Spawns an external process as the proc macro server and returns a client connected to it.
     pub fn spawn(
         process_path: AbsPathBuf,
-        args: impl IntoIterator<Item = impl AsRef<OsStr>>,
+        args: impl IntoIterator<Item = impl AsRef<OsStr>> + Clone,
     ) -> io::Result<ProcMacroServer> {
         let process = ProcMacroProcessSrv::run(process_path, args)?;
         Ok(ProcMacroServer { process: Arc::new(Mutex::new(process)) })
     }
 
     pub fn load_dylib(&self, dylib: MacroDylib) -> Result<Vec<ProcMacro>, ServerError> {
-        let _p = profile::span("ProcMacroClient::by_dylib_path");
+        let _p = profile::span("ProcMacroClient::load_dylib");
         let macros =
             self.process.lock().unwrap_or_else(|e| e.into_inner()).find_proc_macros(&dylib.path)?;
 
@@ -151,10 +152,10 @@ impl ProcMacro {
 
     pub fn expand(
         &self,
-        subtree: &Subtree,
-        attr: Option<&Subtree>,
+        subtree: &tt::Subtree,
+        attr: Option<&tt::Subtree>,
         env: Vec<(String, String)>,
-    ) -> Result<Result<Subtree, PanicMessage>, ServerError> {
+    ) -> Result<Result<tt::Subtree, PanicMessage>, ServerError> {
         let current_dir = env
             .iter()
             .find(|(name, _)| name == "CARGO_MANIFEST_DIR")
@@ -173,7 +174,7 @@ impl ProcMacro {
         let response = self.process.lock().unwrap_or_else(|e| e.into_inner()).send_task(request)?;
         match response {
             msg::Response::ExpandMacro(it) => Ok(it.map(FlatTree::to_subtree)),
-            msg::Response::ListMacros { .. } => {
+            msg::Response::ListMacros(..) | msg::Response::ApiVersionCheck(..) => {
                 Err(ServerError { message: "unexpected response".to_string(), io: None })
             }
         }
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs
index f9c2b9fda3a..4040efe93f0 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg.rs
@@ -12,16 +12,21 @@ use crate::ProcMacroKind;
 
 pub use crate::msg::flat::FlatTree;
 
+pub const NO_VERSION_CHECK_VERSION: u32 = 0;
+pub const CURRENT_API_VERSION: u32 = 1;
+
 #[derive(Debug, Serialize, Deserialize)]
 pub enum Request {
     ListMacros { dylib_path: PathBuf },
     ExpandMacro(ExpandMacro),
+    ApiVersionCheck {},
 }
 
 #[derive(Debug, Serialize, Deserialize)]
 pub enum Response {
     ListMacros(Result<Vec<(String, ProcMacroKind)>, String>),
     ExpandMacro(Result<FlatTree, PanicMessage>),
+    ApiVersionCheck(u32),
 }
 
 #[derive(Debug, Serialize, Deserialize)]
@@ -107,27 +112,31 @@ fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use tt::*;
+    use crate::tt::*;
 
     fn fixture_token_tree() -> Subtree {
-        let mut subtree = Subtree::default();
+        let mut subtree = Subtree { delimiter: Delimiter::unspecified(), token_trees: Vec::new() };
         subtree
             .token_trees
-            .push(TokenTree::Leaf(Ident { text: "struct".into(), id: TokenId(0) }.into()));
+            .push(TokenTree::Leaf(Ident { text: "struct".into(), span: TokenId(0) }.into()));
         subtree
             .token_trees
-            .push(TokenTree::Leaf(Ident { text: "Foo".into(), id: TokenId(1) }.into()));
+            .push(TokenTree::Leaf(Ident { text: "Foo".into(), span: TokenId(1) }.into()));
         subtree.token_trees.push(TokenTree::Leaf(Leaf::Literal(Literal {
             text: "Foo".into(),
-            id: TokenId::unspecified(),
+            span: TokenId::unspecified(),
         })));
         subtree.token_trees.push(TokenTree::Leaf(Leaf::Punct(Punct {
             char: '@',
-            id: TokenId::unspecified(),
+            span: TokenId::unspecified(),
             spacing: Spacing::Joint,
         })));
         subtree.token_trees.push(TokenTree::Subtree(Subtree {
-            delimiter: Some(Delimiter { id: TokenId(2), kind: DelimiterKind::Brace }),
+            delimiter: Delimiter {
+                open: TokenId(2),
+                close: TokenId::UNSPECIFIED,
+                kind: DelimiterKind::Brace,
+            },
             token_trees: vec![],
         }));
         subtree
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs
index b178c46263e..fd3202e0b28 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/msg/flat.rs
@@ -38,7 +38,8 @@
 use std::collections::{HashMap, VecDeque};
 
 use serde::{Deserialize, Serialize};
-use tt::TokenId;
+
+use crate::tt::{self, TokenId};
 
 #[derive(Serialize, Deserialize, Debug)]
 pub struct FlatTree {
@@ -52,7 +53,7 @@ pub struct FlatTree {
 
 struct SubtreeRepr {
     id: tt::TokenId,
-    kind: Option<tt::DelimiterKind>,
+    kind: tt::DelimiterKind,
     tt: [u32; 2],
 }
 
@@ -124,19 +125,19 @@ impl FlatTree {
 impl SubtreeRepr {
     fn write(self) -> [u32; 4] {
         let kind = match self.kind {
-            None => 0,
-            Some(tt::DelimiterKind::Parenthesis) => 1,
-            Some(tt::DelimiterKind::Brace) => 2,
-            Some(tt::DelimiterKind::Bracket) => 3,
+            tt::DelimiterKind::Invisible => 0,
+            tt::DelimiterKind::Parenthesis => 1,
+            tt::DelimiterKind::Brace => 2,
+            tt::DelimiterKind::Bracket => 3,
         };
         [self.id.0, kind, self.tt[0], self.tt[1]]
     }
     fn read([id, kind, lo, len]: [u32; 4]) -> SubtreeRepr {
         let kind = match kind {
-            0 => None,
-            1 => Some(tt::DelimiterKind::Parenthesis),
-            2 => Some(tt::DelimiterKind::Brace),
-            3 => Some(tt::DelimiterKind::Bracket),
+            0 => tt::DelimiterKind::Invisible,
+            1 => tt::DelimiterKind::Parenthesis,
+            2 => tt::DelimiterKind::Brace,
+            3 => tt::DelimiterKind::Bracket,
             other => panic!("bad kind {other}"),
         };
         SubtreeRepr { id: TokenId(id), kind, tt: [lo, len] }
@@ -216,7 +217,7 @@ impl<'a> Writer<'a> {
                     tt::Leaf::Literal(lit) => {
                         let idx = self.literal.len() as u32;
                         let text = self.intern(&lit.text);
-                        self.literal.push(LiteralRepr { id: lit.id, text });
+                        self.literal.push(LiteralRepr { id: lit.span, text });
                         idx << 2 | 0b01
                     }
                     tt::Leaf::Punct(punct) => {
@@ -224,14 +225,14 @@ impl<'a> Writer<'a> {
                         self.punct.push(PunctRepr {
                             char: punct.char,
                             spacing: punct.spacing,
-                            id: punct.id,
+                            id: punct.span,
                         });
                         idx << 2 | 0b10
                     }
                     tt::Leaf::Ident(ident) => {
                         let idx = self.ident.len() as u32;
                         let text = self.intern(&ident.text);
-                        self.ident.push(IdentRepr { id: ident.id, text });
+                        self.ident.push(IdentRepr { id: ident.span, text });
                         idx << 2 | 0b11
                     }
                 },
@@ -243,8 +244,8 @@ impl<'a> Writer<'a> {
 
     fn enqueue(&mut self, subtree: &'a tt::Subtree) -> u32 {
         let idx = self.subtree.len();
-        let delimiter_id = subtree.delimiter.map_or(TokenId::unspecified(), |it| it.id);
-        let delimiter_kind = subtree.delimiter.map(|it| it.kind);
+        let delimiter_id = subtree.delimiter.open;
+        let delimiter_kind = subtree.delimiter.kind;
         self.subtree.push(SubtreeRepr { id: delimiter_id, kind: delimiter_kind, tt: [!0, !0] });
         self.work.push_back((idx, subtree));
         idx as u32
@@ -276,7 +277,11 @@ impl Reader {
             let repr = &self.subtree[i];
             let token_trees = &self.token_tree[repr.tt[0] as usize..repr.tt[1] as usize];
             let s = tt::Subtree {
-                delimiter: repr.kind.map(|kind| tt::Delimiter { id: repr.id, kind }),
+                delimiter: tt::Delimiter {
+                    open: repr.id,
+                    close: TokenId::UNSPECIFIED,
+                    kind: repr.kind,
+                },
                 token_trees: token_trees
                     .iter()
                     .copied()
@@ -291,7 +296,7 @@ impl Reader {
                                 let repr = &self.literal[idx];
                                 tt::Leaf::Literal(tt::Literal {
                                     text: self.text[repr.text as usize].as_str().into(),
-                                    id: repr.id,
+                                    span: repr.id,
                                 })
                                 .into()
                             }
@@ -300,7 +305,7 @@ impl Reader {
                                 tt::Leaf::Punct(tt::Punct {
                                     char: repr.char,
                                     spacing: repr.spacing,
-                                    id: repr.id,
+                                    span: repr.id,
                                 })
                                 .into()
                             }
@@ -308,7 +313,7 @@ impl Reader {
                                 let repr = &self.ident[idx];
                                 tt::Leaf::Ident(tt::Ident {
                                     text: self.text[repr.text as usize].as_str().into(),
-                                    id: repr.id,
+                                    span: repr.id,
                                 })
                                 .into()
                             }
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
index 54dcb17f4e8..1ccbd780fdd 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
@@ -10,7 +10,7 @@ use paths::{AbsPath, AbsPathBuf};
 use stdx::JodChild;
 
 use crate::{
-    msg::{Message, Request, Response},
+    msg::{Message, Request, Response, CURRENT_API_VERSION},
     ProcMacroKind, ServerError,
 };
 
@@ -19,19 +19,53 @@ pub(crate) struct ProcMacroProcessSrv {
     _process: Process,
     stdin: ChildStdin,
     stdout: BufReader<ChildStdout>,
+    version: u32,
 }
 
 impl ProcMacroProcessSrv {
     pub(crate) fn run(
         process_path: AbsPathBuf,
-        args: impl IntoIterator<Item = impl AsRef<OsStr>>,
+        args: impl IntoIterator<Item = impl AsRef<OsStr>> + Clone,
     ) -> io::Result<ProcMacroProcessSrv> {
-        let mut process = Process::run(process_path, args)?;
-        let (stdin, stdout) = process.stdio().expect("couldn't access child stdio");
+        let create_srv = |null_stderr| {
+            let mut process = Process::run(process_path.clone(), args.clone(), null_stderr)?;
+            let (stdin, stdout) = process.stdio().expect("couldn't access child stdio");
+
+            io::Result::Ok(ProcMacroProcessSrv { _process: process, stdin, stdout, version: 0 })
+        };
+        let mut srv = create_srv(true)?;
+        tracing::info!("sending version check");
+        match srv.version_check() {
+            Ok(v) if v > CURRENT_API_VERSION => Err(io::Error::new(
+                io::ErrorKind::Other,
+                format!(
+                    "proc-macro server's api version ({}) is newer than rust-analyzer's ({})",
+                    v, CURRENT_API_VERSION
+                ),
+            )),
+            Ok(v) => {
+                tracing::info!("got version {v}");
+                srv = create_srv(false)?;
+                srv.version = v;
+                Ok(srv)
+            }
+            Err(e) => {
+                tracing::info!(%e, "proc-macro version check failed, restarting and assuming version 0");
+                create_srv(false)
+            }
+        }
+    }
 
-        let srv = ProcMacroProcessSrv { _process: process, stdin, stdout };
+    pub(crate) fn version_check(&mut self) -> Result<u32, ServerError> {
+        let request = Request::ApiVersionCheck {};
+        let response = self.send_task(request)?;
 
-        Ok(srv)
+        match response {
+            Response::ApiVersionCheck(version) => Ok(version),
+            Response::ExpandMacro { .. } | Response::ListMacros { .. } => {
+                Err(ServerError { message: "unexpected response".to_string(), io: None })
+            }
+        }
     }
 
     pub(crate) fn find_proc_macros(
@@ -44,7 +78,7 @@ impl ProcMacroProcessSrv {
 
         match response {
             Response::ListMacros(it) => Ok(it),
-            Response::ExpandMacro { .. } => {
+            Response::ExpandMacro { .. } | Response::ApiVersionCheck { .. } => {
                 Err(ServerError { message: "unexpected response".to_string(), io: None })
             }
         }
@@ -65,9 +99,10 @@ impl Process {
     fn run(
         path: AbsPathBuf,
         args: impl IntoIterator<Item = impl AsRef<OsStr>>,
+        null_stderr: bool,
     ) -> io::Result<Process> {
         let args: Vec<OsString> = args.into_iter().map(|s| s.as_ref().into()).collect();
-        let child = JodChild(mk_child(&path, args)?);
+        let child = JodChild(mk_child(&path, args, null_stderr)?);
         Ok(Process { child })
     }
 
@@ -83,13 +118,14 @@ impl Process {
 fn mk_child(
     path: &AbsPath,
     args: impl IntoIterator<Item = impl AsRef<OsStr>>,
+    null_stderr: bool,
 ) -> io::Result<Child> {
     Command::new(path.as_os_str())
         .args(args)
         .env("RUST_ANALYZER_INTERNALS_DO_NOT_USE", "this is unstable")
         .stdin(Stdio::piped())
         .stdout(Stdio::piped())
-        .stderr(Stdio::inherit())
+        .stderr(if null_stderr { Stdio::null() } else { Stdio::inherit() })
         .spawn()
 }
 
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml
index 7991e125ab8..c402bc02253 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv-cli/Cargo.toml
@@ -2,12 +2,14 @@
 name = "proc-macro-srv-cli"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [dependencies]
-proc-macro-srv = { version = "0.0.0", path = "../proc-macro-srv" }
+proc-macro-srv.workspace = true
 
 [features]
 sysroot-abi = ["proc-macro-srv/sysroot-abi"]
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml
index a136abc12b7..f7f07cfcb2e 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml
@@ -2,15 +2,17 @@
 name = "proc-macro-srv"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
 
 [dependencies]
-object = { version = "0.29.0", default-features = false, features = [
+object = { version = "0.30.2", default-features = false, features = [
     "std",
     "read_core",
     "elf",
@@ -20,16 +22,16 @@ object = { version = "0.29.0", default-features = false, features = [
 libloading = "0.7.3"
 memmap2 = "0.5.4"
 
-tt = { path = "../tt", version = "0.0.0" }
-mbe = { path = "../mbe", version = "0.0.0" }
-paths = { path = "../paths", version = "0.0.0" }
-proc-macro-api = { path = "../proc-macro-api", version = "0.0.0" }
+tt.workspace = true
+mbe.workspace = true
+paths.workspace = true
+proc-macro-api.workspace = true
 
 [dev-dependencies]
 expect-test = "1.4.0"
 
 # used as proc macro test targets
-proc-macro-test = { path = "../proc-macro-test" }
+proc-macro-test.workspace = true
 
 [features]
 sysroot-abi = []
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/mod.rs
deleted file mode 100644
index 1c91ac0fa1b..00000000000
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/mod.rs
+++ /dev/null
@@ -1,104 +0,0 @@
-//! Macro ABI for version 1.58 of rustc
-
-#[allow(dead_code)]
-#[doc(hidden)]
-mod proc_macro;
-
-#[allow(dead_code)]
-#[doc(hidden)]
-mod ra_server;
-
-use libloading::Library;
-use proc_macro_api::ProcMacroKind;
-
-use super::PanicMessage;
-
-pub(crate) struct Abi {
-    exported_macros: Vec<proc_macro::bridge::client::ProcMacro>,
-}
-
-impl From<proc_macro::bridge::PanicMessage> for PanicMessage {
-    fn from(p: proc_macro::bridge::PanicMessage) -> Self {
-        Self { message: p.as_str().map(|s| s.to_string()) }
-    }
-}
-
-impl Abi {
-    pub unsafe fn from_lib(lib: &Library, symbol_name: String) -> Result<Abi, libloading::Error> {
-        let macros: libloading::Symbol<'_, &&[proc_macro::bridge::client::ProcMacro]> =
-            lib.get(symbol_name.as_bytes())?;
-        Ok(Self { exported_macros: macros.to_vec() })
-    }
-
-    pub fn expand(
-        &self,
-        macro_name: &str,
-        macro_body: &tt::Subtree,
-        attributes: Option<&tt::Subtree>,
-    ) -> Result<tt::Subtree, PanicMessage> {
-        let parsed_body = ra_server::TokenStream::with_subtree(macro_body.clone());
-
-        let parsed_attributes = attributes.map_or(ra_server::TokenStream::new(), |attr| {
-            ra_server::TokenStream::with_subtree(attr.clone())
-        });
-
-        for proc_macro in &self.exported_macros {
-            match proc_macro {
-                proc_macro::bridge::client::ProcMacro::CustomDerive {
-                    trait_name, client, ..
-                } if *trait_name == macro_name => {
-                    let res = client.run(
-                        &proc_macro::bridge::server::SameThread,
-                        ra_server::RustAnalyzer::default(),
-                        parsed_body,
-                        true,
-                    );
-                    return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
-                }
-                proc_macro::bridge::client::ProcMacro::Bang { name, client }
-                    if *name == macro_name =>
-                {
-                    let res = client.run(
-                        &proc_macro::bridge::server::SameThread,
-                        ra_server::RustAnalyzer::default(),
-                        parsed_body,
-                        true,
-                    );
-                    return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
-                }
-                proc_macro::bridge::client::ProcMacro::Attr { name, client }
-                    if *name == macro_name =>
-                {
-                    let res = client.run(
-                        &proc_macro::bridge::server::SameThread,
-                        ra_server::RustAnalyzer::default(),
-                        parsed_attributes,
-                        parsed_body,
-                        true,
-                    );
-                    return res.map(|it| it.into_subtree()).map_err(PanicMessage::from);
-                }
-                _ => continue,
-            }
-        }
-
-        Err(proc_macro::bridge::PanicMessage::String("Nothing to expand".to_string()).into())
-    }
-
-    pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
-        self.exported_macros
-            .iter()
-            .map(|proc_macro| match proc_macro {
-                proc_macro::bridge::client::ProcMacro::CustomDerive { trait_name, .. } => {
-                    (trait_name.to_string(), ProcMacroKind::CustomDerive)
-                }
-                proc_macro::bridge::client::ProcMacro::Bang { name, .. } => {
-                    (name.to_string(), ProcMacroKind::FuncLike)
-                }
-                proc_macro::bridge::client::ProcMacro::Attr { name, .. } => {
-                    (name.to_string(), ProcMacroKind::Attr)
-                }
-            })
-            .collect()
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/buffer.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/buffer.rs
deleted file mode 100644
index d82669d3e23..00000000000
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/buffer.rs
+++ /dev/null
@@ -1,143 +0,0 @@
-//! Buffer management for same-process client<->server communication.
-
-use std::io::{self, Write};
-use std::mem;
-use std::ops::{Deref, DerefMut};
-use std::slice;
-
-#[repr(C)]
-pub struct Buffer<T: Copy> {
-    data: *mut T,
-    len: usize,
-    capacity: usize,
-    reserve: extern "C" fn(Buffer<T>, usize) -> Buffer<T>,
-    drop: extern "C" fn(Buffer<T>),
-}
-
-unsafe impl<T: Copy + Sync> Sync for Buffer<T> {}
-unsafe impl<T: Copy + Send> Send for Buffer<T> {}
-
-impl<T: Copy> Default for Buffer<T> {
-    fn default() -> Self {
-        Self::from(vec![])
-    }
-}
-
-impl<T: Copy> Deref for Buffer<T> {
-    type Target = [T];
-    fn deref(&self) -> &[T] {
-        unsafe { slice::from_raw_parts(self.data as *const T, self.len) }
-    }
-}
-
-impl<T: Copy> DerefMut for Buffer<T> {
-    fn deref_mut(&mut self) -> &mut [T] {
-        unsafe { slice::from_raw_parts_mut(self.data, self.len) }
-    }
-}
-
-impl<T: Copy> Buffer<T> {
-    pub(super) fn new() -> Self {
-        Self::default()
-    }
-
-    pub(super) fn clear(&mut self) {
-        self.len = 0;
-    }
-
-    pub(super) fn take(&mut self) -> Self {
-        mem::take(self)
-    }
-
-    // We have the array method separate from extending from a slice. This is
-    // because in the case of small arrays, codegen can be more efficient
-    // (avoiding a memmove call). With extend_from_slice, LLVM at least
-    // currently is not able to make that optimization.
-    pub(super) fn extend_from_array<const N: usize>(&mut self, xs: &[T; N]) {
-        if xs.len() > (self.capacity - self.len) {
-            let b = self.take();
-            *self = (b.reserve)(b, xs.len());
-        }
-        unsafe {
-            xs.as_ptr().copy_to_nonoverlapping(self.data.add(self.len), xs.len());
-            self.len += xs.len();
-        }
-    }
-
-    pub(super) fn extend_from_slice(&mut self, xs: &[T]) {
-        if xs.len() > (self.capacity - self.len) {
-            let b = self.take();
-            *self = (b.reserve)(b, xs.len());
-        }
-        unsafe {
-            xs.as_ptr().copy_to_nonoverlapping(self.data.add(self.len), xs.len());
-            self.len += xs.len();
-        }
-    }
-
-    pub(super) fn push(&mut self, v: T) {
-        // The code here is taken from Vec::push, and we know that reserve()
-        // will panic if we're exceeding isize::MAX bytes and so there's no need
-        // to check for overflow.
-        if self.len == self.capacity {
-            let b = self.take();
-            *self = (b.reserve)(b, 1);
-        }
-        unsafe {
-            *self.data.add(self.len) = v;
-            self.len += 1;
-        }
-    }
-}
-
-impl Write for Buffer<u8> {
-    fn write(&mut self, xs: &[u8]) -> io::Result<usize> {
-        self.extend_from_slice(xs);
-        Ok(xs.len())
-    }
-
-    fn write_all(&mut self, xs: &[u8]) -> io::Result<()> {
-        self.extend_from_slice(xs);
-        Ok(())
-    }
-
-    fn flush(&mut self) -> io::Result<()> {
-        Ok(())
-    }
-}
-
-impl<T: Copy> Drop for Buffer<T> {
-    fn drop(&mut self) {
-        let b = self.take();
-        (b.drop)(b);
-    }
-}
-
-impl<T: Copy> From<Vec<T>> for Buffer<T> {
-    fn from(mut v: Vec<T>) -> Self {
-        let (data, len, capacity) = (v.as_mut_ptr(), v.len(), v.capacity());
-        mem::forget(v);
-
-        // This utility function is nested in here because it can *only*
-        // be safely called on `Buffer`s created by *this* `proc_macro`.
-        fn to_vec<T: Copy>(b: Buffer<T>) -> Vec<T> {
-            unsafe {
-                let Buffer { data, len, capacity, .. } = b;
-                mem::forget(b);
-                Vec::from_raw_parts(data, len, capacity)
-            }
-        }
-
-        extern "C" fn reserve<T: Copy>(b: Buffer<T>, additional: usize) -> Buffer<T> {
-            let mut v = to_vec(b);
-            v.reserve(additional);
-            Buffer::from(v)
-        }
-
-        extern "C" fn drop<T: Copy>(b: Buffer<T>) {
-            mem::drop(to_vec(b));
-        }
-
-        Buffer { data, len, capacity, reserve, drop }
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs
deleted file mode 100644
index e78842f5c37..00000000000
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/client.rs
+++ /dev/null
@@ -1,485 +0,0 @@
-//! Client-side types.
-
-use super::*;
-
-macro_rules! define_handles {
-    (
-        'owned: $($oty:ident,)*
-        'interned: $($ity:ident,)*
-    ) => {
-        #[repr(C)]
-        #[allow(non_snake_case)]
-        pub struct HandleCounters {
-            $($oty: AtomicUsize,)*
-            $($ity: AtomicUsize,)*
-        }
-
-        impl HandleCounters {
-            // FIXME(eddyb) use a reference to the `static COUNTERS`, instead of
-            // a wrapper `fn` pointer, once `const fn` can reference `static`s.
-            extern "C" fn get() -> &'static Self {
-                static COUNTERS: HandleCounters = HandleCounters {
-                    $($oty: AtomicUsize::new(1),)*
-                    $($ity: AtomicUsize::new(1),)*
-                };
-                &COUNTERS
-            }
-        }
-
-        // FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`.
-        #[repr(C)]
-        #[allow(non_snake_case)]
-        pub(super) struct HandleStore<S: server::Types> {
-            $($oty: handle::OwnedStore<S::$oty>,)*
-            $($ity: handle::InternedStore<S::$ity>,)*
-        }
-
-        impl<S: server::Types> HandleStore<S> {
-            pub(super) fn new(handle_counters: &'static HandleCounters) -> Self {
-                HandleStore {
-                    $($oty: handle::OwnedStore::new(&handle_counters.$oty),)*
-                    $($ity: handle::InternedStore::new(&handle_counters.$ity),)*
-                }
-            }
-        }
-
-        $(
-            #[repr(C)]
-            pub(crate) struct $oty(handle::Handle);
-
-            // Forward `Drop::drop` to the inherent `drop` method.
-            impl Drop for $oty {
-                fn drop(&mut self) {
-                    $oty(self.0).drop();
-                }
-            }
-
-            impl<S> Encode<S> for $oty {
-                fn encode(self, w: &mut Writer, s: &mut S) {
-                    let handle = self.0;
-                    mem::forget(self);
-                    handle.encode(w, s);
-                }
-            }
-
-            impl<S: server::Types> DecodeMut<'_, '_, HandleStore<server::MarkedTypes<S>>>
-                for Marked<S::$oty, $oty>
-            {
-                fn decode(r: &mut Reader<'_>, s: &mut HandleStore<server::MarkedTypes<S>>) -> Self {
-                    s.$oty.take(handle::Handle::decode(r, &mut ()))
-                }
-            }
-
-            impl<S> Encode<S> for &$oty {
-                fn encode(self, w: &mut Writer, s: &mut S) {
-                    self.0.encode(w, s);
-                }
-            }
-
-            impl<'s, S: server::Types> Decode<'_, 's, HandleStore<server::MarkedTypes<S>>>
-                for &'s Marked<S::$oty, $oty>
-            {
-                fn decode(r: &mut Reader<'_>, s: &'s HandleStore<server::MarkedTypes<S>>) -> Self {
-                    &s.$oty[handle::Handle::decode(r, &mut ())]
-                }
-            }
-
-            impl<S> Encode<S> for &mut $oty {
-                fn encode(self, w: &mut Writer, s: &mut S) {
-                    self.0.encode(w, s);
-                }
-            }
-
-            impl<'s, S: server::Types> DecodeMut<'_, 's, HandleStore<server::MarkedTypes<S>>>
-                for &'s mut Marked<S::$oty, $oty>
-            {
-                fn decode(
-                    r: &mut Reader<'_>,
-                    s: &'s mut HandleStore<server::MarkedTypes<S>>
-                ) -> Self {
-                    &mut s.$oty[handle::Handle::decode(r, &mut ())]
-                }
-            }
-
-            impl<S: server::Types> Encode<HandleStore<server::MarkedTypes<S>>>
-                for Marked<S::$oty, $oty>
-            {
-                fn encode(self, w: &mut Writer, s: &mut HandleStore<server::MarkedTypes<S>>) {
-                    s.$oty.alloc(self).encode(w, s);
-                }
-            }
-
-            impl<S> DecodeMut<'_, '_, S> for $oty {
-                fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
-                    $oty(handle::Handle::decode(r, s))
-                }
-            }
-        )*
-
-        $(
-            #[repr(C)]
-            #[derive(Copy, Clone, PartialEq, Eq, Hash)]
-            pub(crate) struct $ity(handle::Handle);
-
-            impl<S> Encode<S> for $ity {
-                fn encode(self, w: &mut Writer, s: &mut S) {
-                    self.0.encode(w, s);
-                }
-            }
-
-            impl<S: server::Types> DecodeMut<'_, '_, HandleStore<server::MarkedTypes<S>>>
-                for Marked<S::$ity, $ity>
-            {
-                fn decode(r: &mut Reader<'_>, s: &mut HandleStore<server::MarkedTypes<S>>) -> Self {
-                    s.$ity.copy(handle::Handle::decode(r, &mut ()))
-                }
-            }
-
-            impl<S: server::Types> Encode<HandleStore<server::MarkedTypes<S>>>
-                for Marked<S::$ity, $ity>
-            {
-                fn encode(self, w: &mut Writer, s: &mut HandleStore<server::MarkedTypes<S>>) {
-                    s.$ity.alloc(self).encode(w, s);
-                }
-            }
-
-            impl<S> DecodeMut<'_, '_, S> for $ity {
-                fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
-                    $ity(handle::Handle::decode(r, s))
-                }
-            }
-        )*
-    }
-}
-define_handles! {
-    'owned:
-    FreeFunctions,
-    TokenStream,
-    TokenStreamBuilder,
-    TokenStreamIter,
-    Group,
-    Literal,
-    SourceFile,
-    MultiSpan,
-    Diagnostic,
-
-    'interned:
-    Punct,
-    Ident,
-    Span,
-}
-
-// FIXME(eddyb) generate these impls by pattern-matching on the
-// names of methods - also could use the presence of `fn drop`
-// to distinguish between 'owned and 'interned, above.
-// Alternatively, special 'modes" could be listed of types in with_api
-// instead of pattern matching on methods, here and in server decl.
-
-impl Clone for TokenStream {
-    fn clone(&self) -> Self {
-        self.clone()
-    }
-}
-
-impl Clone for TokenStreamIter {
-    fn clone(&self) -> Self {
-        self.clone()
-    }
-}
-
-impl Clone for Group {
-    fn clone(&self) -> Self {
-        self.clone()
-    }
-}
-
-impl Clone for Literal {
-    fn clone(&self) -> Self {
-        self.clone()
-    }
-}
-
-impl fmt::Debug for Literal {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.debug_struct("Literal")
-            // format the kind without quotes, as in `kind: Float`
-            .field("kind", &format_args!("{}", &self.debug_kind()))
-            .field("symbol", &self.symbol())
-            // format `Some("...")` on one line even in {:#?} mode
-            .field("suffix", &format_args!("{:?}", &self.suffix()))
-            .field("span", &self.span())
-            .finish()
-    }
-}
-
-impl Clone for SourceFile {
-    fn clone(&self) -> Self {
-        self.clone()
-    }
-}
-
-impl fmt::Debug for Span {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.write_str(&self.debug())
-    }
-}
-
-macro_rules! define_client_side {
-    ($($name:ident {
-        $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
-    }),* $(,)?) => {
-        $(impl $name {
-            $(pub(crate) fn $method($($arg: $arg_ty),*) $(-> $ret_ty)* {
-                Bridge::with(|bridge| {
-                    let mut b = bridge.cached_buffer.take();
-
-                    b.clear();
-                    api_tags::Method::$name(api_tags::$name::$method).encode(&mut b, &mut ());
-                    reverse_encode!(b; $($arg),*);
-
-                    b = bridge.dispatch.call(b);
-
-                    let r = Result::<_, PanicMessage>::decode(&mut &b[..], &mut ());
-
-                    bridge.cached_buffer = b;
-
-                    r.unwrap_or_else(|e| panic::resume_unwind(e.into()))
-                })
-            })*
-        })*
-    }
-}
-with_api!(self, self, define_client_side);
-
-enum BridgeState<'a> {
-    /// No server is currently connected to this client.
-    NotConnected,
-
-    /// A server is connected and available for requests.
-    Connected(Bridge<'a>),
-
-    /// Access to the bridge is being exclusively acquired
-    /// (e.g., during `BridgeState::with`).
-    InUse,
-}
-
-enum BridgeStateL {}
-
-impl<'a> scoped_cell::ApplyL<'a> for BridgeStateL {
-    type Out = BridgeState<'a>;
-}
-
-thread_local! {
-    static BRIDGE_STATE: scoped_cell::ScopedCell<BridgeStateL> =
-        scoped_cell::ScopedCell::new(BridgeState::NotConnected);
-}
-
-impl BridgeState<'_> {
-    /// Take exclusive control of the thread-local
-    /// `BridgeState`, and pass it to `f`, mutably.
-    /// The state will be restored after `f` exits, even
-    /// by panic, including modifications made to it by `f`.
-    ///
-    /// N.B., while `f` is running, the thread-local state
-    /// is `BridgeState::InUse`.
-    fn with<R>(f: impl FnOnce(&mut BridgeState<'_>) -> R) -> R {
-        BRIDGE_STATE.with(|state| {
-            state.replace(BridgeState::InUse, |mut state| {
-                // FIXME(#52812) pass `f` directly to `replace` when `RefMutL` is gone
-                f(&mut state)
-            })
-        })
-    }
-}
-
-impl Bridge<'_> {
-    pub(crate) fn is_available() -> bool {
-        BridgeState::with(|state| match state {
-            BridgeState::Connected(_) | BridgeState::InUse => true,
-            BridgeState::NotConnected => false,
-        })
-    }
-
-    fn enter<R>(self, f: impl FnOnce() -> R) -> R {
-        let force_show_panics = self.force_show_panics;
-        // Hide the default panic output within `proc_macro` expansions.
-        // NB. the server can't do this because it may use a different libstd.
-        static HIDE_PANICS_DURING_EXPANSION: Once = Once::new();
-        HIDE_PANICS_DURING_EXPANSION.call_once(|| {
-            let prev = panic::take_hook();
-            panic::set_hook(Box::new(move |info| {
-                let show = BridgeState::with(|state| match state {
-                    BridgeState::NotConnected => true,
-                    BridgeState::Connected(_) | BridgeState::InUse => force_show_panics,
-                });
-                if show {
-                    prev(info)
-                }
-            }));
-        });
-
-        BRIDGE_STATE.with(|state| state.set(BridgeState::Connected(self), f))
-    }
-
-    fn with<R>(f: impl FnOnce(&mut Bridge<'_>) -> R) -> R {
-        BridgeState::with(|state| match state {
-            BridgeState::NotConnected => {
-                panic!("procedural macro API is used outside of a procedural macro");
-            }
-            BridgeState::InUse => {
-                panic!("procedural macro API is used while it's already in use");
-            }
-            BridgeState::Connected(bridge) => f(bridge),
-        })
-    }
-}
-
-/// A client-side "global object" (usually a function pointer),
-/// which may be using a different `proc_macro` from the one
-/// used by the server, but can be interacted with compatibly.
-///
-/// N.B., `F` must have FFI-friendly memory layout (e.g., a pointer).
-/// The call ABI of function pointers used for `F` doesn't
-/// need to match between server and client, since it's only
-/// passed between them and (eventually) called by the client.
-#[repr(C)]
-#[derive(Copy, Clone)]
-pub struct Client<F> {
-    // FIXME(eddyb) use a reference to the `static COUNTERS`, instead of
-    // a wrapper `fn` pointer, once `const fn` can reference `static`s.
-    pub(super) get_handle_counters: extern "C" fn() -> &'static HandleCounters,
-    pub(super) run: extern "C" fn(Bridge<'_>, F) -> Buffer<u8>,
-    pub(super) f: F,
-}
-
-/// Client-side helper for handling client panics, entering the bridge,
-/// deserializing input and serializing output.
-// FIXME(eddyb) maybe replace `Bridge::enter` with this?
-fn run_client<A: for<'a, 's> DecodeMut<'a, 's, ()>, R: Encode<()>>(
-    mut bridge: Bridge<'_>,
-    f: impl FnOnce(A) -> R,
-) -> Buffer<u8> {
-    // The initial `cached_buffer` contains the input.
-    let mut b = bridge.cached_buffer.take();
-
-    panic::catch_unwind(panic::AssertUnwindSafe(|| {
-        bridge.enter(|| {
-            let reader = &mut &b[..];
-            let input = A::decode(reader, &mut ());
-
-            // Put the `cached_buffer` back in the `Bridge`, for requests.
-            Bridge::with(|bridge| bridge.cached_buffer = b.take());
-
-            let output = f(input);
-
-            // Take the `cached_buffer` back out, for the output value.
-            b = Bridge::with(|bridge| bridge.cached_buffer.take());
-
-            // HACK(eddyb) Separate encoding a success value (`Ok(output)`)
-            // from encoding a panic (`Err(e: PanicMessage)`) to avoid
-            // having handles outside the `bridge.enter(|| ...)` scope, and
-            // to catch panics that could happen while encoding the success.
-            //
-            // Note that panics should be impossible beyond this point, but
-            // this is defensively trying to avoid any accidental panicking
-            // reaching the `extern "C"` (which should `abort` but might not
-            // at the moment, so this is also potentially preventing UB).
-            b.clear();
-            Ok::<_, ()>(output).encode(&mut b, &mut ());
-        })
-    }))
-    .map_err(PanicMessage::from)
-    .unwrap_or_else(|e| {
-        b.clear();
-        Err::<(), _>(e).encode(&mut b, &mut ());
-    });
-    b
-}
-
-impl Client<fn(super::super::TokenStream) -> super::super::TokenStream> {
-    pub fn expand1(f: fn(super::super::TokenStream) -> super::super::TokenStream) -> Self {
-        extern "C" fn run(
-            bridge: Bridge<'_>,
-            f: impl FnOnce(super::super::TokenStream) -> super::super::TokenStream,
-        ) -> Buffer<u8> {
-            run_client(bridge, |input| f(super::super::TokenStream(input)).0)
-        }
-        Client { get_handle_counters: HandleCounters::get, run, f }
-    }
-}
-
-impl Client<fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream> {
-    pub fn expand2(
-        f: fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream,
-    ) -> Self {
-        extern "C" fn run(
-            bridge: Bridge<'_>,
-            f: impl FnOnce(
-                super::super::TokenStream,
-                super::super::TokenStream,
-            ) -> super::super::TokenStream,
-        ) -> Buffer<u8> {
-            run_client(bridge, |(input, input2)| {
-                f(super::super::TokenStream(input), super::super::TokenStream(input2)).0
-            })
-        }
-        Client { get_handle_counters: HandleCounters::get, run, f }
-    }
-}
-
-#[repr(C)]
-#[derive(Copy, Clone)]
-pub enum ProcMacro {
-    CustomDerive {
-        trait_name: &'static str,
-        attributes: &'static [&'static str],
-        client: Client<fn(super::super::TokenStream) -> super::super::TokenStream>,
-    },
-
-    Attr {
-        name: &'static str,
-        client: Client<
-            fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream,
-        >,
-    },
-
-    Bang {
-        name: &'static str,
-        client: Client<fn(super::super::TokenStream) -> super::super::TokenStream>,
-    },
-}
-
-impl ProcMacro {
-    pub fn name(&self) -> &'static str {
-        match self {
-            ProcMacro::CustomDerive { trait_name, .. } => trait_name,
-            ProcMacro::Attr { name, .. } => name,
-            ProcMacro::Bang { name, .. } => name,
-        }
-    }
-
-    pub fn custom_derive(
-        trait_name: &'static str,
-        attributes: &'static [&'static str],
-        expand: fn(super::super::TokenStream) -> super::super::TokenStream,
-    ) -> Self {
-        ProcMacro::CustomDerive { trait_name, attributes, client: Client::expand1(expand) }
-    }
-
-    pub fn attr(
-        name: &'static str,
-        expand: fn(
-            super::super::TokenStream,
-            super::super::TokenStream,
-        ) -> super::super::TokenStream,
-    ) -> Self {
-        ProcMacro::Attr { name, client: Client::expand2(expand) }
-    }
-
-    pub fn bang(
-        name: &'static str,
-        expand: fn(super::super::TokenStream) -> super::super::TokenStream,
-    ) -> Self {
-        ProcMacro::Bang { name, client: Client::expand1(expand) }
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/closure.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/closure.rs
deleted file mode 100644
index 5be71cc3d70..00000000000
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/closure.rs
+++ /dev/null
@@ -1,24 +0,0 @@
-//! Closure type (equivalent to `&mut dyn FnMut(A) -> R`) that's `repr(C)`.
-
-#[repr(C)]
-pub struct Closure<'a, A, R> {
-    call: unsafe extern "C" fn(&mut Env, A) -> R,
-    env: &'a mut Env,
-}
-
-struct Env;
-
-impl<'a, A, R, F: FnMut(A) -> R> From<&'a mut F> for Closure<'a, A, R> {
-    fn from(f: &'a mut F) -> Self {
-        unsafe extern "C" fn call<A, R, F: FnMut(A) -> R>(env: &mut Env, arg: A) -> R {
-            (*(env as *mut _ as *mut F))(arg)
-        }
-        Closure { call: call::<A, R, F>, env: unsafe { &mut *(f as *mut _ as *mut Env) } }
-    }
-}
-
-impl<'a, A, R> Closure<'a, A, R> {
-    pub fn call(&mut self, arg: A) -> R {
-        unsafe { (self.call)(self.env, arg) }
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/handle.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/handle.rs
deleted file mode 100644
index bcbb8681247..00000000000
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/handle.rs
+++ /dev/null
@@ -1,70 +0,0 @@
-//! Server-side handles and storage for per-handle data.
-
-use std::collections::{BTreeMap, HashMap};
-use std::hash::Hash;
-use std::num::NonZeroU32;
-use std::ops::{Index, IndexMut};
-use std::sync::atomic::{AtomicUsize, Ordering};
-
-pub(super) type Handle = NonZeroU32;
-
-pub(super) struct OwnedStore<T: 'static> {
-    counter: &'static AtomicUsize,
-    data: BTreeMap<Handle, T>,
-}
-
-impl<T> OwnedStore<T> {
-    pub(super) fn new(counter: &'static AtomicUsize) -> Self {
-        // Ensure the handle counter isn't 0, which would panic later,
-        // when `NonZeroU32::new` (aka `Handle::new`) is called in `alloc`.
-        assert_ne!(counter.load(Ordering::SeqCst), 0);
-
-        OwnedStore { counter, data: BTreeMap::new() }
-    }
-}
-
-impl<T> OwnedStore<T> {
-    pub(super) fn alloc(&mut self, x: T) -> Handle {
-        let counter = self.counter.fetch_add(1, Ordering::SeqCst);
-        let handle = Handle::new(counter as u32).expect("`proc_macro` handle counter overflowed");
-        assert!(self.data.insert(handle, x).is_none());
-        handle
-    }
-
-    pub(super) fn take(&mut self, h: Handle) -> T {
-        self.data.remove(&h).expect("use-after-free in `proc_macro` handle")
-    }
-}
-
-impl<T> Index<Handle> for OwnedStore<T> {
-    type Output = T;
-    fn index(&self, h: Handle) -> &T {
-        self.data.get(&h).expect("use-after-free in `proc_macro` handle")
-    }
-}
-
-impl<T> IndexMut<Handle> for OwnedStore<T> {
-    fn index_mut(&mut self, h: Handle) -> &mut T {
-        self.data.get_mut(&h).expect("use-after-free in `proc_macro` handle")
-    }
-}
-
-pub(super) struct InternedStore<T: 'static> {
-    owned: OwnedStore<T>,
-    interner: HashMap<T, Handle>,
-}
-
-impl<T: Copy + Eq + Hash> InternedStore<T> {
-    pub(super) fn new(counter: &'static AtomicUsize) -> Self {
-        InternedStore { owned: OwnedStore::new(counter), interner: HashMap::new() }
-    }
-
-    pub(super) fn alloc(&mut self, x: T) -> Handle {
-        let owned = &mut self.owned;
-        *self.interner.entry(x).or_insert_with(|| owned.alloc(x))
-    }
-
-    pub(super) fn copy(&mut self, h: Handle) -> T {
-        self.owned[h]
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/mod.rs
deleted file mode 100644
index b7968c529c3..00000000000
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/mod.rs
+++ /dev/null
@@ -1,429 +0,0 @@
-//! Internal interface for communicating between a `proc_macro` client
-//! (a proc macro crate) and a `proc_macro` server (a compiler front-end).
-//!
-//! Serialization (with C ABI buffers) and unique integer handles are employed
-//! to allow safely interfacing between two copies of `proc_macro` built
-//! (from the same source) by different compilers with potentially mismatching
-//! Rust ABIs (e.g., stage0/bin/rustc vs stage1/bin/rustc during bootstrap).
-
-#![deny(unsafe_code)]
-
-pub use super::{Delimiter, Level, LineColumn, Spacing};
-use std::fmt;
-use std::hash::Hash;
-use std::marker;
-use std::mem;
-use std::ops::Bound;
-use std::panic;
-use std::sync::atomic::AtomicUsize;
-use std::sync::Once;
-use std::thread;
-
-/// Higher-order macro describing the server RPC API, allowing automatic
-/// generation of type-safe Rust APIs, both client-side and server-side.
-///
-/// `with_api!(MySelf, my_self, my_macro)` expands to:
-/// ```rust,ignore (pseudo-code)
-/// my_macro! {
-///     // ...
-///     Literal {
-///         // ...
-///         fn character(ch: char) -> MySelf::Literal;
-///         // ...
-///         fn span(my_self: &MySelf::Literal) -> MySelf::Span;
-///         fn set_span(my_self: &mut MySelf::Literal, span: MySelf::Span);
-///     },
-///     // ...
-/// }
-/// ```
-///
-/// The first two arguments serve to customize the arguments names
-/// and argument/return types, to enable several different usecases:
-///
-/// If `my_self` is just `self`, then each `fn` signature can be used
-/// as-is for a method. If it's anything else (`self_` in practice),
-/// then the signatures don't have a special `self` argument, and
-/// can, therefore, have a different one introduced.
-///
-/// If `MySelf` is just `Self`, then the types are only valid inside
-/// a trait or a trait impl, where the trait has associated types
-/// for each of the API types. If non-associated types are desired,
-/// a module name (`self` in practice) can be used instead of `Self`.
-macro_rules! with_api {
-    ($S:ident, $self:ident, $m:ident) => {
-        $m! {
-            FreeFunctions {
-                fn drop($self: $S::FreeFunctions);
-                fn track_env_var(var: &str, value: Option<&str>);
-                fn track_path(path: &str);
-            },
-            TokenStream {
-                fn drop($self: $S::TokenStream);
-                fn clone($self: &$S::TokenStream) -> $S::TokenStream;
-                fn new() -> $S::TokenStream;
-                fn is_empty($self: &$S::TokenStream) -> bool;
-                fn expand_expr($self: &$S::TokenStream) -> Result<$S::TokenStream, ()>;
-                fn from_str(src: &str) -> $S::TokenStream;
-                fn to_string($self: &$S::TokenStream) -> String;
-                fn from_token_tree(
-                    tree: TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>,
-                ) -> $S::TokenStream;
-                fn into_iter($self: $S::TokenStream) -> $S::TokenStreamIter;
-            },
-            TokenStreamBuilder {
-                fn drop($self: $S::TokenStreamBuilder);
-                fn new() -> $S::TokenStreamBuilder;
-                fn push($self: &mut $S::TokenStreamBuilder, stream: $S::TokenStream);
-                fn build($self: $S::TokenStreamBuilder) -> $S::TokenStream;
-            },
-            TokenStreamIter {
-                fn drop($self: $S::TokenStreamIter);
-                fn clone($self: &$S::TokenStreamIter) -> $S::TokenStreamIter;
-                fn next(
-                    $self: &mut $S::TokenStreamIter,
-                ) -> Option<TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>>;
-            },
-            Group {
-                fn drop($self: $S::Group);
-                fn clone($self: &$S::Group) -> $S::Group;
-                fn new(delimiter: Delimiter, stream: $S::TokenStream) -> $S::Group;
-                fn delimiter($self: &$S::Group) -> Delimiter;
-                fn stream($self: &$S::Group) -> $S::TokenStream;
-                fn span($self: &$S::Group) -> $S::Span;
-                fn span_open($self: &$S::Group) -> $S::Span;
-                fn span_close($self: &$S::Group) -> $S::Span;
-                fn set_span($self: &mut $S::Group, span: $S::Span);
-            },
-            Punct {
-                fn new(ch: char, spacing: Spacing) -> $S::Punct;
-                fn as_char($self: $S::Punct) -> char;
-                fn spacing($self: $S::Punct) -> Spacing;
-                fn span($self: $S::Punct) -> $S::Span;
-                fn with_span($self: $S::Punct, span: $S::Span) -> $S::Punct;
-            },
-            Ident {
-                fn new(string: &str, span: $S::Span, is_raw: bool) -> $S::Ident;
-                fn span($self: $S::Ident) -> $S::Span;
-                fn with_span($self: $S::Ident, span: $S::Span) -> $S::Ident;
-            },
-            Literal {
-                fn drop($self: $S::Literal);
-                fn clone($self: &$S::Literal) -> $S::Literal;
-                fn from_str(s: &str) -> Result<$S::Literal, ()>;
-                fn to_string($self: &$S::Literal) -> String;
-                fn debug_kind($self: &$S::Literal) -> String;
-                fn symbol($self: &$S::Literal) -> String;
-                fn suffix($self: &$S::Literal) -> Option<String>;
-                fn integer(n: &str) -> $S::Literal;
-                fn typed_integer(n: &str, kind: &str) -> $S::Literal;
-                fn float(n: &str) -> $S::Literal;
-                fn f32(n: &str) -> $S::Literal;
-                fn f64(n: &str) -> $S::Literal;
-                fn string(string: &str) -> $S::Literal;
-                fn character(ch: char) -> $S::Literal;
-                fn byte_string(bytes: &[u8]) -> $S::Literal;
-                fn span($self: &$S::Literal) -> $S::Span;
-                fn set_span($self: &mut $S::Literal, span: $S::Span);
-                fn subspan(
-                    $self: &$S::Literal,
-                    start: Bound<usize>,
-                    end: Bound<usize>,
-                ) -> Option<$S::Span>;
-            },
-            SourceFile {
-                fn drop($self: $S::SourceFile);
-                fn clone($self: &$S::SourceFile) -> $S::SourceFile;
-                fn eq($self: &$S::SourceFile, other: &$S::SourceFile) -> bool;
-                fn path($self: &$S::SourceFile) -> String;
-                fn is_real($self: &$S::SourceFile) -> bool;
-            },
-            MultiSpan {
-                fn drop($self: $S::MultiSpan);
-                fn new() -> $S::MultiSpan;
-                fn push($self: &mut $S::MultiSpan, span: $S::Span);
-            },
-            Diagnostic {
-                fn drop($self: $S::Diagnostic);
-                fn new(level: Level, msg: &str, span: $S::MultiSpan) -> $S::Diagnostic;
-                fn sub(
-                    $self: &mut $S::Diagnostic,
-                    level: Level,
-                    msg: &str,
-                    span: $S::MultiSpan,
-                );
-                fn emit($self: $S::Diagnostic);
-            },
-            Span {
-                fn debug($self: $S::Span) -> String;
-                fn def_site() -> $S::Span;
-                fn call_site() -> $S::Span;
-                fn mixed_site() -> $S::Span;
-                fn source_file($self: $S::Span) -> $S::SourceFile;
-                fn parent($self: $S::Span) -> Option<$S::Span>;
-                fn source($self: $S::Span) -> $S::Span;
-                fn start($self: $S::Span) -> LineColumn;
-                fn end($self: $S::Span) -> LineColumn;
-                fn before($self: $S::Span) -> $S::Span;
-                fn after($self: $S::Span) -> $S::Span;
-                fn join($self: $S::Span, other: $S::Span) -> Option<$S::Span>;
-                fn resolved_at($self: $S::Span, at: $S::Span) -> $S::Span;
-                fn source_text($self: $S::Span) -> Option<String>;
-                fn save_span($self: $S::Span) -> usize;
-                fn recover_proc_macro_span(id: usize) -> $S::Span;
-            },
-        }
-    };
-}
-
-// FIXME(eddyb) this calls `encode` for each argument, but in reverse,
-// to avoid borrow conflicts from borrows started by `&mut` arguments.
-macro_rules! reverse_encode {
-    ($writer:ident;) => {};
-    ($writer:ident; $first:ident $(, $rest:ident)*) => {
-        reverse_encode!($writer; $($rest),*);
-        $first.encode(&mut $writer, &mut ());
-    }
-}
-
-// FIXME(eddyb) this calls `decode` for each argument, but in reverse,
-// to avoid borrow conflicts from borrows started by `&mut` arguments.
-macro_rules! reverse_decode {
-    ($reader:ident, $s:ident;) => {};
-    ($reader:ident, $s:ident; $first:ident: $first_ty:ty $(, $rest:ident: $rest_ty:ty)*) => {
-        reverse_decode!($reader, $s; $($rest: $rest_ty),*);
-        let $first = <$first_ty>::decode(&mut $reader, $s);
-    }
-}
-
-#[allow(unsafe_code)]
-mod buffer;
-#[forbid(unsafe_code)]
-pub mod client;
-#[allow(unsafe_code)]
-mod closure;
-#[forbid(unsafe_code)]
-mod handle;
-#[macro_use]
-#[forbid(unsafe_code)]
-mod rpc;
-#[allow(unsafe_code)]
-mod scoped_cell;
-#[forbid(unsafe_code)]
-pub mod server;
-
-use buffer::Buffer;
-pub use rpc::PanicMessage;
-use rpc::{Decode, DecodeMut, Encode, Reader, Writer};
-
-/// An active connection between a server and a client.
-/// The server creates the bridge (`Bridge::run_server` in `server.rs`),
-/// then passes it to the client through the function pointer in the `run`
-/// field of `client::Client`. The client holds its copy of the `Bridge`
-/// in TLS during its execution (`Bridge::{enter, with}` in `client.rs`).
-#[repr(C)]
-pub struct Bridge<'a> {
-    /// Reusable buffer (only `clear`-ed, never shrunk), primarily
-    /// used for making requests, but also for passing input to client.
-    cached_buffer: Buffer<u8>,
-
-    /// Server-side function that the client uses to make requests.
-    dispatch: closure::Closure<'a, Buffer<u8>, Buffer<u8>>,
-
-    /// If 'true', always invoke the default panic hook
-    force_show_panics: bool,
-}
-
-#[forbid(unsafe_code)]
-#[allow(non_camel_case_types)]
-mod api_tags {
-    use super::rpc::{DecodeMut, Encode, Reader, Writer};
-
-    macro_rules! declare_tags {
-        ($($name:ident {
-            $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)*
-        }),* $(,)?) => {
-            $(
-                pub(super) enum $name {
-                    $($method),*
-                }
-                rpc_encode_decode!(enum $name { $($method),* });
-            )*
-
-
-            pub(super) enum Method {
-                $($name($name)),*
-            }
-            rpc_encode_decode!(enum Method { $($name(m)),* });
-        }
-    }
-    with_api!(self, self, declare_tags);
-}
-
-/// Helper to wrap associated types to allow trait impl dispatch.
-/// That is, normally a pair of impls for `T::Foo` and `T::Bar`
-/// can overlap, but if the impls are, instead, on types like
-/// `Marked<T::Foo, Foo>` and `Marked<T::Bar, Bar>`, they can't.
-trait Mark {
-    type Unmarked;
-    fn mark(unmarked: Self::Unmarked) -> Self;
-}
-
-/// Unwrap types wrapped by `Mark::mark` (see `Mark` for details).
-trait Unmark {
-    type Unmarked;
-    fn unmark(self) -> Self::Unmarked;
-}
-
-#[derive(Copy, Clone, PartialEq, Eq, Hash)]
-struct Marked<T, M> {
-    value: T,
-    _marker: marker::PhantomData<M>,
-}
-
-impl<T, M> Mark for Marked<T, M> {
-    type Unmarked = T;
-    fn mark(unmarked: Self::Unmarked) -> Self {
-        Marked { value: unmarked, _marker: marker::PhantomData }
-    }
-}
-impl<T, M> Unmark for Marked<T, M> {
-    type Unmarked = T;
-    fn unmark(self) -> Self::Unmarked {
-        self.value
-    }
-}
-impl<'a, T, M> Unmark for &'a Marked<T, M> {
-    type Unmarked = &'a T;
-    fn unmark(self) -> Self::Unmarked {
-        &self.value
-    }
-}
-impl<'a, T, M> Unmark for &'a mut Marked<T, M> {
-    type Unmarked = &'a mut T;
-    fn unmark(self) -> Self::Unmarked {
-        &mut self.value
-    }
-}
-
-impl<T: Mark> Mark for Option<T> {
-    type Unmarked = Option<T::Unmarked>;
-    fn mark(unmarked: Self::Unmarked) -> Self {
-        unmarked.map(T::mark)
-    }
-}
-impl<T: Unmark> Unmark for Option<T> {
-    type Unmarked = Option<T::Unmarked>;
-    fn unmark(self) -> Self::Unmarked {
-        self.map(T::unmark)
-    }
-}
-
-impl<T: Mark, E: Mark> Mark for Result<T, E> {
-    type Unmarked = Result<T::Unmarked, E::Unmarked>;
-    fn mark(unmarked: Self::Unmarked) -> Self {
-        unmarked.map(T::mark).map_err(E::mark)
-    }
-}
-impl<T: Unmark, E: Unmark> Unmark for Result<T, E> {
-    type Unmarked = Result<T::Unmarked, E::Unmarked>;
-    fn unmark(self) -> Self::Unmarked {
-        self.map(T::unmark).map_err(E::unmark)
-    }
-}
-
-macro_rules! mark_noop {
-    ($($ty:ty),* $(,)?) => {
-        $(
-            impl Mark for $ty {
-                type Unmarked = Self;
-                fn mark(unmarked: Self::Unmarked) -> Self {
-                    unmarked
-                }
-            }
-            impl Unmark for $ty {
-                type Unmarked = Self;
-                fn unmark(self) -> Self::Unmarked {
-                    self
-                }
-            }
-        )*
-    }
-}
-mark_noop! {
-    (),
-    bool,
-    char,
-    &'_ [u8],
-    &'_ str,
-    String,
-    usize,
-    Delimiter,
-    Level,
-    LineColumn,
-    Spacing,
-    Bound<usize>,
-}
-
-rpc_encode_decode!(
-    enum Delimiter {
-        Parenthesis,
-        Brace,
-        Bracket,
-        None,
-    }
-);
-rpc_encode_decode!(
-    enum Level {
-        Error,
-        Warning,
-        Note,
-        Help,
-    }
-);
-rpc_encode_decode!(struct LineColumn { line, column });
-rpc_encode_decode!(
-    enum Spacing {
-        Alone,
-        Joint,
-    }
-);
-
-#[derive(Clone)]
-pub enum TokenTree<G, P, I, L> {
-    Group(G),
-    Punct(P),
-    Ident(I),
-    Literal(L),
-}
-
-impl<G: Mark, P: Mark, I: Mark, L: Mark> Mark for TokenTree<G, P, I, L> {
-    type Unmarked = TokenTree<G::Unmarked, P::Unmarked, I::Unmarked, L::Unmarked>;
-    fn mark(unmarked: Self::Unmarked) -> Self {
-        match unmarked {
-            TokenTree::Group(tt) => TokenTree::Group(G::mark(tt)),
-            TokenTree::Punct(tt) => TokenTree::Punct(P::mark(tt)),
-            TokenTree::Ident(tt) => TokenTree::Ident(I::mark(tt)),
-            TokenTree::Literal(tt) => TokenTree::Literal(L::mark(tt)),
-        }
-    }
-}
-impl<G: Unmark, P: Unmark, I: Unmark, L: Unmark> Unmark for TokenTree<G, P, I, L> {
-    type Unmarked = TokenTree<G::Unmarked, P::Unmarked, I::Unmarked, L::Unmarked>;
-    fn unmark(self) -> Self::Unmarked {
-        match self {
-            TokenTree::Group(tt) => TokenTree::Group(tt.unmark()),
-            TokenTree::Punct(tt) => TokenTree::Punct(tt.unmark()),
-            TokenTree::Ident(tt) => TokenTree::Ident(tt.unmark()),
-            TokenTree::Literal(tt) => TokenTree::Literal(tt.unmark()),
-        }
-    }
-}
-
-rpc_encode_decode!(
-    enum TokenTree<G, P, I, L> {
-        Group(tt),
-        Punct(tt),
-        Ident(tt),
-        Literal(tt),
-    }
-);
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/rpc.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/rpc.rs
deleted file mode 100644
index d50564d01a5..00000000000
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/rpc.rs
+++ /dev/null
@@ -1,305 +0,0 @@
-//! Serialization for client-server communication.
-
-use std::any::Any;
-use std::char;
-use std::io::Write;
-use std::num::NonZeroU32;
-use std::ops::Bound;
-use std::str;
-
-pub(super) type Writer = super::buffer::Buffer<u8>;
-
-pub(super) trait Encode<S>: Sized {
-    fn encode(self, w: &mut Writer, s: &mut S);
-}
-
-pub(super) type Reader<'a> = &'a [u8];
-
-pub(super) trait Decode<'a, 's, S>: Sized {
-    fn decode(r: &mut Reader<'a>, s: &'s S) -> Self;
-}
-
-pub(super) trait DecodeMut<'a, 's, S>: Sized {
-    fn decode(r: &mut Reader<'a>, s: &'s mut S) -> Self;
-}
-
-macro_rules! rpc_encode_decode {
-    (le $ty:ty) => {
-        impl<S> Encode<S> for $ty {
-            fn encode(self, w: &mut Writer, _: &mut S) {
-                w.extend_from_array(&self.to_le_bytes());
-            }
-        }
-
-        impl<S> DecodeMut<'_, '_, S> for $ty {
-            fn decode(r: &mut Reader<'_>, _: &mut S) -> Self {
-                const N: usize = ::std::mem::size_of::<$ty>();
-
-                let mut bytes = [0; N];
-                bytes.copy_from_slice(&r[..N]);
-                *r = &r[N..];
-
-                Self::from_le_bytes(bytes)
-            }
-        }
-    };
-    (struct $name:ident { $($field:ident),* $(,)? }) => {
-        impl<S> Encode<S> for $name {
-            fn encode(self, w: &mut Writer, s: &mut S) {
-                $(self.$field.encode(w, s);)*
-            }
-        }
-
-        impl<S> DecodeMut<'_, '_, S> for $name {
-            fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
-                $name {
-                    $($field: DecodeMut::decode(r, s)),*
-                }
-            }
-        }
-    };
-    (enum $name:ident $(<$($T:ident),+>)? { $($variant:ident $(($field:ident))*),* $(,)? }) => {
-        impl<S, $($($T: Encode<S>),+)?> Encode<S> for $name $(<$($T),+>)? {
-            fn encode(self, w: &mut Writer, s: &mut S) {
-                // HACK(eddyb): `Tag` enum duplicated between the
-                // two impls as there's no other place to stash it.
-                #[allow(non_upper_case_globals)]
-                mod tag {
-                    #[repr(u8)] enum Tag { $($variant),* }
-
-                    $(pub const $variant: u8 = Tag::$variant as u8;)*
-                }
-
-                match self {
-                    $($name::$variant $(($field))* => {
-                        tag::$variant.encode(w, s);
-                        $($field.encode(w, s);)*
-                    })*
-                }
-            }
-        }
-
-        impl<'a, S, $($($T: for<'s> DecodeMut<'a, 's, S>),+)?> DecodeMut<'a, '_, S>
-            for $name $(<$($T),+>)?
-        {
-            fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
-                // HACK(eddyb): `Tag` enum duplicated between the
-                // two impls as there's no other place to stash it.
-                #[allow(non_upper_case_globals)]
-                mod tag {
-                    #[repr(u8)] enum Tag { $($variant),* }
-
-                    $(pub const $variant: u8 = Tag::$variant as u8;)*
-                }
-
-                match u8::decode(r, s) {
-                    $(tag::$variant => {
-                        $(let $field = DecodeMut::decode(r, s);)*
-                        $name::$variant $(($field))*
-                    })*
-                    _ => unreachable!(),
-                }
-            }
-        }
-    }
-}
-
-impl<S> Encode<S> for () {
-    fn encode(self, _: &mut Writer, _: &mut S) {}
-}
-
-impl<S> DecodeMut<'_, '_, S> for () {
-    fn decode(_: &mut Reader<'_>, _: &mut S) -> Self {}
-}
-
-impl<S> Encode<S> for u8 {
-    fn encode(self, w: &mut Writer, _: &mut S) {
-        w.push(self);
-    }
-}
-
-impl<S> DecodeMut<'_, '_, S> for u8 {
-    fn decode(r: &mut Reader<'_>, _: &mut S) -> Self {
-        let x = r[0];
-        *r = &r[1..];
-        x
-    }
-}
-
-rpc_encode_decode!(le u32);
-rpc_encode_decode!(le usize);
-
-impl<S> Encode<S> for bool {
-    fn encode(self, w: &mut Writer, s: &mut S) {
-        (self as u8).encode(w, s);
-    }
-}
-
-impl<S> DecodeMut<'_, '_, S> for bool {
-    fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
-        match u8::decode(r, s) {
-            0 => false,
-            1 => true,
-            _ => unreachable!(),
-        }
-    }
-}
-
-impl<S> Encode<S> for char {
-    fn encode(self, w: &mut Writer, s: &mut S) {
-        (self as u32).encode(w, s);
-    }
-}
-
-impl<S> DecodeMut<'_, '_, S> for char {
-    fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
-        char::from_u32(u32::decode(r, s)).unwrap()
-    }
-}
-
-impl<S> Encode<S> for NonZeroU32 {
-    fn encode(self, w: &mut Writer, s: &mut S) {
-        self.get().encode(w, s);
-    }
-}
-
-impl<S> DecodeMut<'_, '_, S> for NonZeroU32 {
-    fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
-        Self::new(u32::decode(r, s)).unwrap()
-    }
-}
-
-impl<S, A: Encode<S>, B: Encode<S>> Encode<S> for (A, B) {
-    fn encode(self, w: &mut Writer, s: &mut S) {
-        self.0.encode(w, s);
-        self.1.encode(w, s);
-    }
-}
-
-impl<'a, S, A: for<'s> DecodeMut<'a, 's, S>, B: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S>
-    for (A, B)
-{
-    fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
-        (DecodeMut::decode(r, s), DecodeMut::decode(r, s))
-    }
-}
-
-rpc_encode_decode!(
-    enum Bound<T> {
-        Included(x),
-        Excluded(x),
-        Unbounded,
-    }
-);
-
-rpc_encode_decode!(
-    enum Option<T> {
-        None,
-        Some(x),
-    }
-);
-
-rpc_encode_decode!(
-    enum Result<T, E> {
-        Ok(x),
-        Err(e),
-    }
-);
-
-impl<S> Encode<S> for &[u8] {
-    fn encode(self, w: &mut Writer, s: &mut S) {
-        self.len().encode(w, s);
-        w.write_all(self).unwrap();
-    }
-}
-
-impl<'a, S> DecodeMut<'a, '_, S> for &'a [u8] {
-    fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
-        let len = usize::decode(r, s);
-        let xs = &r[..len];
-        *r = &r[len..];
-        xs
-    }
-}
-
-impl<S> Encode<S> for &str {
-    fn encode(self, w: &mut Writer, s: &mut S) {
-        self.as_bytes().encode(w, s);
-    }
-}
-
-impl<'a, S> DecodeMut<'a, '_, S> for &'a str {
-    fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
-        str::from_utf8(<&[u8]>::decode(r, s)).unwrap()
-    }
-}
-
-impl<S> Encode<S> for String {
-    fn encode(self, w: &mut Writer, s: &mut S) {
-        self[..].encode(w, s);
-    }
-}
-
-impl<S> DecodeMut<'_, '_, S> for String {
-    fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
-        <&str>::decode(r, s).to_string()
-    }
-}
-
-/// Simplified version of panic payloads, ignoring
-/// types other than `&'static str` and `String`.
-pub enum PanicMessage {
-    StaticStr(&'static str),
-    String(String),
-    Unknown,
-}
-
-impl From<Box<dyn Any + Send>> for PanicMessage {
-    fn from(payload: Box<dyn Any + Send + 'static>) -> Self {
-        if let Some(s) = payload.downcast_ref::<&'static str>() {
-            return PanicMessage::StaticStr(s);
-        }
-        if let Ok(s) = payload.downcast::<String>() {
-            return PanicMessage::String(*s);
-        }
-        PanicMessage::Unknown
-    }
-}
-
-impl Into<Box<dyn Any + Send>> for PanicMessage {
-    fn into(self) -> Box<dyn Any + Send> {
-        match self {
-            PanicMessage::StaticStr(s) => Box::new(s),
-            PanicMessage::String(s) => Box::new(s),
-            PanicMessage::Unknown => {
-                struct UnknownPanicMessage;
-                Box::new(UnknownPanicMessage)
-            }
-        }
-    }
-}
-
-impl PanicMessage {
-    pub fn as_str(&self) -> Option<&str> {
-        match self {
-            PanicMessage::StaticStr(s) => Some(s),
-            PanicMessage::String(s) => Some(s),
-            PanicMessage::Unknown => None,
-        }
-    }
-}
-
-impl<S> Encode<S> for PanicMessage {
-    fn encode(self, w: &mut Writer, s: &mut S) {
-        self.as_str().encode(w, s);
-    }
-}
-
-impl<S> DecodeMut<'_, '_, S> for PanicMessage {
-    fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
-        match Option::<String>::decode(r, s) {
-            Some(s) => PanicMessage::String(s),
-            None => PanicMessage::Unknown,
-        }
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/scoped_cell.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/scoped_cell.rs
deleted file mode 100644
index b0c2e5b9c26..00000000000
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/scoped_cell.rs
+++ /dev/null
@@ -1,81 +0,0 @@
-//! `Cell` variant for (scoped) existential lifetimes.
-
-use std::cell::Cell;
-use std::mem;
-use std::ops::{Deref, DerefMut};
-
-/// Type lambda application, with a lifetime.
-#[allow(unused_lifetimes)]
-pub trait ApplyL<'a> {
-    type Out;
-}
-
-/// Type lambda taking a lifetime, i.e., `Lifetime -> Type`.
-pub trait LambdaL: for<'a> ApplyL<'a> {}
-
-impl<T: for<'a> ApplyL<'a>> LambdaL for T {}
-
-// HACK(eddyb) work around projection limitations with a newtype
-// FIXME(#52812) replace with `&'a mut <T as ApplyL<'b>>::Out`
-pub struct RefMutL<'a, 'b, T: LambdaL>(&'a mut <T as ApplyL<'b>>::Out);
-
-impl<'a, 'b, T: LambdaL> Deref for RefMutL<'a, 'b, T> {
-    type Target = <T as ApplyL<'b>>::Out;
-    fn deref(&self) -> &Self::Target {
-        self.0
-    }
-}
-
-impl<'a, 'b, T: LambdaL> DerefMut for RefMutL<'a, 'b, T> {
-    fn deref_mut(&mut self) -> &mut Self::Target {
-        self.0
-    }
-}
-
-pub struct ScopedCell<T: LambdaL>(Cell<<T as ApplyL<'static>>::Out>);
-
-impl<T: LambdaL> ScopedCell<T> {
-    pub fn new(value: <T as ApplyL<'static>>::Out) -> Self {
-        ScopedCell(Cell::new(value))
-    }
-
-    /// Sets the value in `self` to `replacement` while
-    /// running `f`, which gets the old value, mutably.
-    /// The old value will be restored after `f` exits, even
-    /// by panic, including modifications made to it by `f`.
-    pub fn replace<'a, R>(
-        &self,
-        replacement: <T as ApplyL<'a>>::Out,
-        f: impl for<'b, 'c> FnOnce(RefMutL<'b, 'c, T>) -> R,
-    ) -> R {
-        /// Wrapper that ensures that the cell always gets filled
-        /// (with the original state, optionally changed by `f`),
-        /// even if `f` had panicked.
-        struct PutBackOnDrop<'a, T: LambdaL> {
-            cell: &'a ScopedCell<T>,
-            value: Option<<T as ApplyL<'static>>::Out>,
-        }
-
-        impl<'a, T: LambdaL> Drop for PutBackOnDrop<'a, T> {
-            fn drop(&mut self) {
-                self.cell.0.set(self.value.take().unwrap());
-            }
-        }
-
-        let mut put_back_on_drop = PutBackOnDrop {
-            cell: self,
-            value: Some(self.0.replace(unsafe {
-                let erased = mem::transmute_copy(&replacement);
-                mem::forget(replacement);
-                erased
-            })),
-        };
-
-        f(RefMutL(put_back_on_drop.value.as_mut().unwrap()))
-    }
-
-    /// Sets the value in `self` to `value` while running `f`.
-    pub fn set<R>(&self, value: <T as ApplyL<'_>>::Out, f: impl FnOnce() -> R) -> R {
-        self.replace(value, |_| f())
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/server.rs
deleted file mode 100644
index 06a19791351..00000000000
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/bridge/server.rs
+++ /dev/null
@@ -1,352 +0,0 @@
-//! Server-side traits.
-
-use super::*;
-
-// FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`.
-use super::client::HandleStore;
-
-/// Declare an associated item of one of the traits below, optionally
-/// adjusting it (i.e., adding bounds to types and default bodies to methods).
-macro_rules! associated_item {
-    (type FreeFunctions) =>
-        (type FreeFunctions: 'static;);
-    (type TokenStream) =>
-        (type TokenStream: 'static + Clone;);
-    (type TokenStreamBuilder) =>
-        (type TokenStreamBuilder: 'static;);
-    (type TokenStreamIter) =>
-        (type TokenStreamIter: 'static + Clone;);
-    (type Group) =>
-        (type Group: 'static + Clone;);
-    (type Punct) =>
-        (type Punct: 'static + Copy + Eq + Hash;);
-    (type Ident) =>
-        (type Ident: 'static + Copy + Eq + Hash;);
-    (type Literal) =>
-        (type Literal: 'static + Clone;);
-    (type SourceFile) =>
-        (type SourceFile: 'static + Clone;);
-    (type MultiSpan) =>
-        (type MultiSpan: 'static;);
-    (type Diagnostic) =>
-        (type Diagnostic: 'static;);
-    (type Span) =>
-        (type Span: 'static + Copy + Eq + Hash;);
-    (fn drop(&mut self, $arg:ident: $arg_ty:ty)) =>
-        (fn drop(&mut self, $arg: $arg_ty) { mem::drop($arg) });
-    (fn clone(&mut self, $arg:ident: $arg_ty:ty) -> $ret_ty:ty) =>
-        (fn clone(&mut self, $arg: $arg_ty) -> $ret_ty { $arg.clone() });
-    ($($item:tt)*) => ($($item)*;)
-}
-
-macro_rules! declare_server_traits {
-    ($($name:ident {
-        $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
-    }),* $(,)?) => {
-        pub trait Types {
-            $(associated_item!(type $name);)*
-        }
-
-        $(pub trait $name: Types {
-            $(associated_item!(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)?);)*
-        })*
-
-        pub trait Server: Types $(+ $name)* {}
-        impl<S: Types $(+ $name)*> Server for S {}
-    }
-}
-with_api!(Self, self_, declare_server_traits);
-
-pub(super) struct MarkedTypes<S: Types>(S);
-
-macro_rules! define_mark_types_impls {
-    ($($name:ident {
-        $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
-    }),* $(,)?) => {
-        impl<S: Types> Types for MarkedTypes<S> {
-            $(type $name = Marked<S::$name, client::$name>;)*
-        }
-
-        $(impl<S: $name> $name for MarkedTypes<S> {
-            $(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)? {
-                <_>::mark($name::$method(&mut self.0, $($arg.unmark()),*))
-            })*
-        })*
-    }
-}
-with_api!(Self, self_, define_mark_types_impls);
-
-struct Dispatcher<S: Types> {
-    handle_store: HandleStore<S>,
-    server: S,
-}
-
-macro_rules! define_dispatcher_impl {
-    ($($name:ident {
-        $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
-    }),* $(,)?) => {
-        // FIXME(eddyb) `pub` only for `ExecutionStrategy` below.
-        pub trait DispatcherTrait {
-            // HACK(eddyb) these are here to allow `Self::$name` to work below.
-            $(type $name;)*
-            fn dispatch(&mut self, b: Buffer<u8>) -> Buffer<u8>;
-        }
-
-        impl<S: Server> DispatcherTrait for Dispatcher<MarkedTypes<S>> {
-            $(type $name = <MarkedTypes<S> as Types>::$name;)*
-            fn dispatch(&mut self, mut b: Buffer<u8>) -> Buffer<u8> {
-                let Dispatcher { handle_store, server } = self;
-
-                let mut reader = &b[..];
-                match api_tags::Method::decode(&mut reader, &mut ()) {
-                    $(api_tags::Method::$name(m) => match m {
-                        $(api_tags::$name::$method => {
-                            let mut call_method = || {
-                                reverse_decode!(reader, handle_store; $($arg: $arg_ty),*);
-                                $name::$method(server, $($arg),*)
-                            };
-                            // HACK(eddyb) don't use `panic::catch_unwind` in a panic.
-                            // If client and server happen to use the same `libstd`,
-                            // `catch_unwind` asserts that the panic counter was 0,
-                            // even when the closure passed to it didn't panic.
-                            let r = if thread::panicking() {
-                                Ok(call_method())
-                            } else {
-                                panic::catch_unwind(panic::AssertUnwindSafe(call_method))
-                                    .map_err(PanicMessage::from)
-                            };
-
-                            b.clear();
-                            r.encode(&mut b, handle_store);
-                        })*
-                    }),*
-                }
-                b
-            }
-        }
-    }
-}
-with_api!(Self, self_, define_dispatcher_impl);
-
-pub trait ExecutionStrategy {
-    fn run_bridge_and_client<D: Copy + Send + 'static>(
-        &self,
-        dispatcher: &mut impl DispatcherTrait,
-        input: Buffer<u8>,
-        run_client: extern "C" fn(Bridge<'_>, D) -> Buffer<u8>,
-        client_data: D,
-        force_show_panics: bool,
-    ) -> Buffer<u8>;
-}
-
-pub struct SameThread;
-
-impl ExecutionStrategy for SameThread {
-    fn run_bridge_and_client<D: Copy + Send + 'static>(
-        &self,
-        dispatcher: &mut impl DispatcherTrait,
-        input: Buffer<u8>,
-        run_client: extern "C" fn(Bridge<'_>, D) -> Buffer<u8>,
-        client_data: D,
-        force_show_panics: bool,
-    ) -> Buffer<u8> {
-        let mut dispatch = |b| dispatcher.dispatch(b);
-
-        run_client(
-            Bridge { cached_buffer: input, dispatch: (&mut dispatch).into(), force_show_panics },
-            client_data,
-        )
-    }
-}
-
-// NOTE(eddyb) Two implementations are provided, the second one is a bit
-// faster but neither is anywhere near as fast as same-thread execution.
-
-pub struct CrossThread1;
-
-impl ExecutionStrategy for CrossThread1 {
-    fn run_bridge_and_client<D: Copy + Send + 'static>(
-        &self,
-        dispatcher: &mut impl DispatcherTrait,
-        input: Buffer<u8>,
-        run_client: extern "C" fn(Bridge<'_>, D) -> Buffer<u8>,
-        client_data: D,
-        force_show_panics: bool,
-    ) -> Buffer<u8> {
-        use std::sync::mpsc::channel;
-
-        let (req_tx, req_rx) = channel();
-        let (res_tx, res_rx) = channel();
-
-        let join_handle = thread::spawn(move || {
-            let mut dispatch = |b| {
-                req_tx.send(b).unwrap();
-                res_rx.recv().unwrap()
-            };
-
-            run_client(
-                Bridge {
-                    cached_buffer: input,
-                    dispatch: (&mut dispatch).into(),
-                    force_show_panics,
-                },
-                client_data,
-            )
-        });
-
-        for b in req_rx {
-            res_tx.send(dispatcher.dispatch(b)).unwrap();
-        }
-
-        join_handle.join().unwrap()
-    }
-}
-
-pub struct CrossThread2;
-
-impl ExecutionStrategy for CrossThread2 {
-    fn run_bridge_and_client<D: Copy + Send + 'static>(
-        &self,
-        dispatcher: &mut impl DispatcherTrait,
-        input: Buffer<u8>,
-        run_client: extern "C" fn(Bridge<'_>, D) -> Buffer<u8>,
-        client_data: D,
-        force_show_panics: bool,
-    ) -> Buffer<u8> {
-        use std::sync::{Arc, Mutex};
-
-        enum State<T> {
-            Req(T),
-            Res(T),
-        }
-
-        let mut state = Arc::new(Mutex::new(State::Res(Buffer::new())));
-
-        let server_thread = thread::current();
-        let state2 = state.clone();
-        let join_handle = thread::spawn(move || {
-            let mut dispatch = |b| {
-                *state2.lock().unwrap() = State::Req(b);
-                server_thread.unpark();
-                loop {
-                    thread::park();
-                    if let State::Res(b) = &mut *state2.lock().unwrap() {
-                        break b.take();
-                    }
-                }
-            };
-
-            let r = run_client(
-                Bridge {
-                    cached_buffer: input,
-                    dispatch: (&mut dispatch).into(),
-                    force_show_panics,
-                },
-                client_data,
-            );
-
-            // Wake up the server so it can exit the dispatch loop.
-            drop(state2);
-            server_thread.unpark();
-
-            r
-        });
-
-        // Check whether `state2` was dropped, to know when to stop.
-        while Arc::get_mut(&mut state).is_none() {
-            thread::park();
-            let mut b = match &mut *state.lock().unwrap() {
-                State::Req(b) => b.take(),
-                _ => continue,
-            };
-            b = dispatcher.dispatch(b.take());
-            *state.lock().unwrap() = State::Res(b);
-            join_handle.thread().unpark();
-        }
-
-        join_handle.join().unwrap()
-    }
-}
-
-fn run_server<
-    S: Server,
-    I: Encode<HandleStore<MarkedTypes<S>>>,
-    O: for<'a, 's> DecodeMut<'a, 's, HandleStore<MarkedTypes<S>>>,
-    D: Copy + Send + 'static,
->(
-    strategy: &impl ExecutionStrategy,
-    handle_counters: &'static client::HandleCounters,
-    server: S,
-    input: I,
-    run_client: extern "C" fn(Bridge<'_>, D) -> Buffer<u8>,
-    client_data: D,
-    force_show_panics: bool,
-) -> Result<O, PanicMessage> {
-    let mut dispatcher =
-        Dispatcher { handle_store: HandleStore::new(handle_counters), server: MarkedTypes(server) };
-
-    let mut b = Buffer::new();
-    input.encode(&mut b, &mut dispatcher.handle_store);
-
-    b = strategy.run_bridge_and_client(
-        &mut dispatcher,
-        b,
-        run_client,
-        client_data,
-        force_show_panics,
-    );
-
-    Result::decode(&mut &b[..], &mut dispatcher.handle_store)
-}
-
-impl client::Client<fn(super::super::TokenStream) -> super::super::TokenStream> {
-    pub fn run<S: Server>(
-        &self,
-        strategy: &impl ExecutionStrategy,
-        server: S,
-        input: S::TokenStream,
-        force_show_panics: bool,
-    ) -> Result<S::TokenStream, PanicMessage> {
-        let client::Client { get_handle_counters, run, f } = *self;
-        run_server(
-            strategy,
-            get_handle_counters(),
-            server,
-            <MarkedTypes<S> as Types>::TokenStream::mark(input),
-            run,
-            f,
-            force_show_panics,
-        )
-        .map(<MarkedTypes<S> as Types>::TokenStream::unmark)
-    }
-}
-
-impl
-    client::Client<
-        fn(super::super::TokenStream, super::super::TokenStream) -> super::super::TokenStream,
-    >
-{
-    pub fn run<S: Server>(
-        &self,
-        strategy: &impl ExecutionStrategy,
-        server: S,
-        input: S::TokenStream,
-        input2: S::TokenStream,
-        force_show_panics: bool,
-    ) -> Result<S::TokenStream, PanicMessage> {
-        let client::Client { get_handle_counters, run, f } = *self;
-        run_server(
-            strategy,
-            get_handle_counters(),
-            server,
-            (
-                <MarkedTypes<S> as Types>::TokenStream::mark(input),
-                <MarkedTypes<S> as Types>::TokenStream::mark(input2),
-            ),
-            run,
-            f,
-            force_show_panics,
-        )
-        .map(<MarkedTypes<S> as Types>::TokenStream::unmark)
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/diagnostic.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/diagnostic.rs
deleted file mode 100644
index cda239f8785..00000000000
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/diagnostic.rs
+++ /dev/null
@@ -1,166 +0,0 @@
-//! lib-proc-macro diagnostic
-//!
-//! Copy from <https://github.com/rust-lang/rust/blob/6050e523bae6de61de4e060facc43dc512adaccd/src/libproc_macro/diagnostic.rs>
-//! augmented with removing unstable features
-
-use super::Span;
-
-/// An enum representing a diagnostic level.
-#[derive(Copy, Clone, Debug)]
-#[non_exhaustive]
-pub enum Level {
-    /// An error.
-    Error,
-    /// A warning.
-    Warning,
-    /// A note.
-    Note,
-    /// A help message.
-    Help,
-}
-
-/// Trait implemented by types that can be converted into a set of `Span`s.
-pub trait MultiSpan {
-    /// Converts `self` into a `Vec<Span>`.
-    fn into_spans(self) -> Vec<Span>;
-}
-
-impl MultiSpan for Span {
-    fn into_spans(self) -> Vec<Span> {
-        vec![self]
-    }
-}
-
-impl MultiSpan for Vec<Span> {
-    fn into_spans(self) -> Vec<Span> {
-        self
-    }
-}
-
-impl<'a> MultiSpan for &'a [Span] {
-    fn into_spans(self) -> Vec<Span> {
-        self.to_vec()
-    }
-}
-
-/// A structure representing a diagnostic message and associated children
-/// messages.
-#[derive(Clone, Debug)]
-pub struct Diagnostic {
-    level: Level,
-    message: String,
-    spans: Vec<Span>,
-    children: Vec<Diagnostic>,
-}
-
-macro_rules! diagnostic_child_methods {
-    ($spanned:ident, $regular:ident, $level:expr) => {
-        #[doc = concat!("Adds a new child diagnostics message to `self` with the [`",
-                        stringify!($level), "`] level, and the given `spans` and `message`.")]
-        pub fn $spanned<S, T>(mut self, spans: S, message: T) -> Diagnostic
-        where
-            S: MultiSpan,
-            T: Into<String>,
-        {
-            self.children.push(Diagnostic::spanned(spans, $level, message));
-            self
-        }
-
-        #[doc = concat!("Adds a new child diagnostic message to `self` with the [`",
-                        stringify!($level), "`] level, and the given `message`.")]
-        pub fn $regular<T: Into<String>>(mut self, message: T) -> Diagnostic {
-            self.children.push(Diagnostic::new($level, message));
-            self
-        }
-    };
-}
-
-/// Iterator over the children diagnostics of a `Diagnostic`.
-#[derive(Debug, Clone)]
-pub struct Children<'a>(std::slice::Iter<'a, Diagnostic>);
-
-impl<'a> Iterator for Children<'a> {
-    type Item = &'a Diagnostic;
-
-    fn next(&mut self) -> Option<Self::Item> {
-        self.0.next()
-    }
-}
-
-impl Diagnostic {
-    /// Creates a new diagnostic with the given `level` and `message`.
-    pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
-        Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
-    }
-
-    /// Creates a new diagnostic with the given `level` and `message` pointing to
-    /// the given set of `spans`.
-    pub fn spanned<S, T>(spans: S, level: Level, message: T) -> Diagnostic
-    where
-        S: MultiSpan,
-        T: Into<String>,
-    {
-        Diagnostic { level, message: message.into(), spans: spans.into_spans(), children: vec![] }
-    }
-
-    diagnostic_child_methods!(span_error, error, Level::Error);
-    diagnostic_child_methods!(span_warning, warning, Level::Warning);
-    diagnostic_child_methods!(span_note, note, Level::Note);
-    diagnostic_child_methods!(span_help, help, Level::Help);
-
-    /// Returns the diagnostic `level` for `self`.
-    pub fn level(&self) -> Level {
-        self.level
-    }
-
-    /// Sets the level in `self` to `level`.
-    pub fn set_level(&mut self, level: Level) {
-        self.level = level;
-    }
-
-    /// Returns the message in `self`.
-    pub fn message(&self) -> &str {
-        &self.message
-    }
-
-    /// Sets the message in `self` to `message`.
-    pub fn set_message<T: Into<String>>(&mut self, message: T) {
-        self.message = message.into();
-    }
-
-    /// Returns the `Span`s in `self`.
-    pub fn spans(&self) -> &[Span] {
-        &self.spans
-    }
-
-    /// Sets the `Span`s in `self` to `spans`.
-    pub fn set_spans<S: MultiSpan>(&mut self, spans: S) {
-        self.spans = spans.into_spans();
-    }
-
-    /// Returns an iterator over the children diagnostics of `self`.
-    pub fn children(&self) -> Children<'_> {
-        Children(self.children.iter())
-    }
-
-    /// Emit the diagnostic.
-    pub fn emit(self) {
-        fn to_internal(spans: Vec<Span>) -> super::bridge::client::MultiSpan {
-            let mut multi_span = super::bridge::client::MultiSpan::new();
-            for span in spans {
-                multi_span.push(span.0);
-            }
-            multi_span
-        }
-
-        let mut diag = super::bridge::client::Diagnostic::new(
-            self.level,
-            &self.message[..],
-            to_internal(self.spans),
-        );
-        for c in self.children {
-            diag.sub(c.level, &c.message[..], to_internal(c.spans));
-        }
-        diag.emit();
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs
deleted file mode 100644
index c5145d00e32..00000000000
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/mod.rs
+++ /dev/null
@@ -1,1056 +0,0 @@
-//! A support library for macro authors when defining new macros.
-//!
-//! This library, provided by the standard distribution, provides the types
-//! consumed in the interfaces of procedurally defined macro definitions such as
-//! function-like macros `#[proc_macro]`, macro attributes `#[proc_macro_attribute]` and
-//! custom derive attributes`#[proc_macro_derive]`.
-//!
-//! See [the book] for more.
-//!
-//! [the book]: ../book/ch19-06-macros.html#procedural-macros-for-generating-code-from-attributes
-
-#[doc(hidden)]
-pub mod bridge;
-
-mod diagnostic;
-
-pub use diagnostic::{Diagnostic, Level, MultiSpan};
-
-use std::cmp::Ordering;
-use std::ops::RangeBounds;
-use std::path::PathBuf;
-use std::str::FromStr;
-use std::{error, fmt, iter, mem};
-
-/// Determines whether proc_macro has been made accessible to the currently
-/// running program.
-///
-/// The proc_macro crate is only intended for use inside the implementation of
-/// procedural macros. All the functions in this crate panic if invoked from
-/// outside of a procedural macro, such as from a build script or unit test or
-/// ordinary Rust binary.
-///
-/// With consideration for Rust libraries that are designed to support both
-/// macro and non-macro use cases, `proc_macro::is_available()` provides a
-/// non-panicking way to detect whether the infrastructure required to use the
-/// API of proc_macro is presently available. Returns true if invoked from
-/// inside of a procedural macro, false if invoked from any other binary.
-pub fn is_available() -> bool {
-    bridge::Bridge::is_available()
-}
-
-/// The main type provided by this crate, representing an abstract stream of
-/// tokens, or, more specifically, a sequence of token trees.
-/// The type provide interfaces for iterating over those token trees and, conversely,
-/// collecting a number of token trees into one stream.
-///
-/// This is both the input and output of `#[proc_macro]`, `#[proc_macro_attribute]`
-/// and `#[proc_macro_derive]` definitions.
-#[derive(Clone)]
-pub struct TokenStream(bridge::client::TokenStream);
-
-/// Error returned from `TokenStream::from_str`.
-#[non_exhaustive]
-#[derive(Debug)]
-pub struct LexError;
-
-impl fmt::Display for LexError {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.write_str("cannot parse string into token stream")
-    }
-}
-
-impl error::Error for LexError {}
-
-/// Error returned from `TokenStream::expand_expr`.
-#[non_exhaustive]
-#[derive(Debug)]
-pub struct ExpandError;
-
-impl fmt::Display for ExpandError {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.write_str("macro expansion failed")
-    }
-}
-
-impl error::Error for ExpandError {}
-
-impl TokenStream {
-    /// Returns an empty `TokenStream` containing no token trees.
-    pub fn new() -> TokenStream {
-        TokenStream(bridge::client::TokenStream::new())
-    }
-
-    /// Checks if this `TokenStream` is empty.
-    pub fn is_empty(&self) -> bool {
-        self.0.is_empty()
-    }
-
-    /// Parses this `TokenStream` as an expression and attempts to expand any
-    /// macros within it. Returns the expanded `TokenStream`.
-    ///
-    /// Currently only expressions expanding to literals will succeed, although
-    /// this may be relaxed in the future.
-    ///
-    /// NOTE: In error conditions, `expand_expr` may leave macros unexpanded,
-    /// report an error, failing compilation, and/or return an `Err(..)`. The
-    /// specific behavior for any error condition, and what conditions are
-    /// considered errors, is unspecified and may change in the future.
-    pub fn expand_expr(&self) -> Result<TokenStream, ExpandError> {
-        match bridge::client::TokenStream::expand_expr(&self.0) {
-            Ok(stream) => Ok(TokenStream(stream)),
-            Err(_) => Err(ExpandError),
-        }
-    }
-}
-
-/// Attempts to break the string into tokens and parse those tokens into a token stream.
-/// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
-/// or characters not existing in the language.
-/// All tokens in the parsed stream get `Span::call_site()` spans.
-///
-/// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
-/// change these errors into `LexError`s later.
-impl FromStr for TokenStream {
-    type Err = LexError;
-
-    fn from_str(src: &str) -> Result<TokenStream, LexError> {
-        Ok(TokenStream(bridge::client::TokenStream::from_str(src)))
-    }
-}
-
-/// Prints the token stream as a string that is supposed to be losslessly convertible back
-/// into the same token stream (modulo spans), except for possibly `TokenTree::Group`s
-/// with `Delimiter::None` delimiters and negative numeric literals.
-impl fmt::Display for TokenStream {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.write_str(&self.to_string())
-    }
-}
-
-/// Prints token in a form convenient for debugging.
-impl fmt::Debug for TokenStream {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.write_str("TokenStream ")?;
-        f.debug_list().entries(self.clone()).finish()
-    }
-}
-
-impl Default for TokenStream {
-    fn default() -> Self {
-        TokenStream::new()
-    }
-}
-
-pub use quote::{quote, quote_span};
-
-/// Creates a token stream containing a single token tree.
-impl From<TokenTree> for TokenStream {
-    fn from(tree: TokenTree) -> TokenStream {
-        TokenStream(bridge::client::TokenStream::from_token_tree(match tree {
-            TokenTree::Group(tt) => bridge::TokenTree::Group(tt.0),
-            TokenTree::Punct(tt) => bridge::TokenTree::Punct(tt.0),
-            TokenTree::Ident(tt) => bridge::TokenTree::Ident(tt.0),
-            TokenTree::Literal(tt) => bridge::TokenTree::Literal(tt.0),
-        }))
-    }
-}
-
-/// Collects a number of token trees into a single stream.
-impl FromIterator<TokenTree> for TokenStream {
-    fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
-        trees.into_iter().map(TokenStream::from).collect()
-    }
-}
-
-/// A "flattening" operation on token streams, collects token trees
-/// from multiple token streams into a single stream.
-impl FromIterator<TokenStream> for TokenStream {
-    fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
-        let mut builder = bridge::client::TokenStreamBuilder::new();
-        streams.into_iter().for_each(|stream| builder.push(stream.0));
-        TokenStream(builder.build())
-    }
-}
-
-impl Extend<TokenTree> for TokenStream {
-    fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
-        self.extend(trees.into_iter().map(TokenStream::from));
-    }
-}
-
-impl Extend<TokenStream> for TokenStream {
-    fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
-        // FIXME(eddyb) Use an optimized implementation if/when possible.
-        *self = iter::once(mem::replace(self, Self::new())).chain(streams).collect();
-    }
-}
-
-/// Public implementation details for the `TokenStream` type, such as iterators.
-pub mod token_stream {
-    use super::{bridge, Group, Ident, Literal, Punct, TokenStream, TokenTree};
-
-    /// An iterator over `TokenStream`'s `TokenTree`s.
-    /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
-    /// and returns whole groups as token trees.
-    #[derive(Clone)]
-    pub struct IntoIter(bridge::client::TokenStreamIter);
-
-    impl Iterator for IntoIter {
-        type Item = TokenTree;
-
-        fn next(&mut self) -> Option<TokenTree> {
-            self.0.next().map(|tree| match tree {
-                bridge::TokenTree::Group(tt) => TokenTree::Group(Group(tt)),
-                bridge::TokenTree::Punct(tt) => TokenTree::Punct(Punct(tt)),
-                bridge::TokenTree::Ident(tt) => TokenTree::Ident(Ident(tt)),
-                bridge::TokenTree::Literal(tt) => TokenTree::Literal(Literal(tt)),
-            })
-        }
-    }
-
-    impl IntoIterator for TokenStream {
-        type Item = TokenTree;
-        type IntoIter = IntoIter;
-
-        fn into_iter(self) -> IntoIter {
-            IntoIter(self.0.into_iter())
-        }
-    }
-}
-
-/// `quote!(..)` accepts arbitrary tokens and expands into a `TokenStream` describing the input.
-/// For example, `quote!(a + b)` will produce an expression, that, when evaluated, constructs
-/// the `TokenStream` `[Ident("a"), Punct('+', Alone), Ident("b")]`.
-///
-/// Unquoting is done with `$`, and works by taking the single next ident as the unquoted term.
-/// To quote `$` itself, use `$$`.
-//pub macro quote($($t:tt)*) {
-//[> compiler built-in <]
-//}
-
-#[doc(hidden)]
-mod quote;
-
-/// A region of source code, along with macro expansion information.
-#[derive(Copy, Clone)]
-pub struct Span(bridge::client::Span);
-
-macro_rules! diagnostic_method {
-    ($name:ident, $level:expr) => {
-        /// Creates a new `Diagnostic` with the given `message` at the span
-        /// `self`.
-        pub fn $name<T: Into<String>>(self, message: T) -> Diagnostic {
-            Diagnostic::spanned(self, $level, message)
-        }
-    };
-}
-
-impl Span {
-    /// A span that resolves at the macro definition site.
-    pub fn def_site() -> Span {
-        Span(bridge::client::Span::def_site())
-    }
-
-    /// The span of the invocation of the current procedural macro.
-    /// Identifiers created with this span will be resolved as if they were written
-    /// directly at the macro call location (call-site hygiene) and other code
-    /// at the macro call site will be able to refer to them as well.
-    pub fn call_site() -> Span {
-        Span(bridge::client::Span::call_site())
-    }
-
-    /// A span that represents `macro_rules` hygiene, and sometimes resolves at the macro
-    /// definition site (local variables, labels, `$crate`) and sometimes at the macro
-    /// call site (everything else).
-    /// The span location is taken from the call-site.
-    pub fn mixed_site() -> Span {
-        Span(bridge::client::Span::mixed_site())
-    }
-
-    /// The original source file into which this span points.
-    pub fn source_file(&self) -> SourceFile {
-        SourceFile(self.0.source_file())
-    }
-
-    /// The `Span` for the tokens in the previous macro expansion from which
-    /// `self` was generated from, if any.
-    pub fn parent(&self) -> Option<Span> {
-        self.0.parent().map(Span)
-    }
-
-    /// The span for the origin source code that `self` was generated from. If
-    /// this `Span` wasn't generated from other macro expansions then the return
-    /// value is the same as `*self`.
-    pub fn source(&self) -> Span {
-        Span(self.0.source())
-    }
-
-    /// Gets the starting line/column in the source file for this span.
-    pub fn start(&self) -> LineColumn {
-        self.0.start().add_1_to_column()
-    }
-
-    /// Gets the ending line/column in the source file for this span.
-    pub fn end(&self) -> LineColumn {
-        self.0.end().add_1_to_column()
-    }
-
-    /// Creates an empty span pointing to directly before this span.
-    pub fn before(&self) -> Span {
-        Span(self.0.before())
-    }
-
-    /// Creates an empty span pointing to directly after this span.
-    pub fn after(&self) -> Span {
-        Span(self.0.after())
-    }
-
-    /// Creates a new span encompassing `self` and `other`.
-    ///
-    /// Returns `None` if `self` and `other` are from different files.
-    pub fn join(&self, other: Span) -> Option<Span> {
-        self.0.join(other.0).map(Span)
-    }
-
-    /// Creates a new span with the same line/column information as `self` but
-    /// that resolves symbols as though it were at `other`.
-    pub fn resolved_at(&self, other: Span) -> Span {
-        Span(self.0.resolved_at(other.0))
-    }
-
-    /// Creates a new span with the same name resolution behavior as `self` but
-    /// with the line/column information of `other`.
-    pub fn located_at(&self, other: Span) -> Span {
-        other.resolved_at(*self)
-    }
-
-    /// Compares to spans to see if they're equal.
-    pub fn eq(&self, other: &Span) -> bool {
-        self.0 == other.0
-    }
-
-    /// Returns the source text behind a span. This preserves the original source
-    /// code, including spaces and comments. It only returns a result if the span
-    /// corresponds to real source code.
-    ///
-    /// Note: The observable result of a macro should only rely on the tokens and
-    /// not on this source text. The result of this function is a best effort to
-    /// be used for diagnostics only.
-    pub fn source_text(&self) -> Option<String> {
-        self.0.source_text()
-    }
-
-    // Used by the implementation of `Span::quote`
-    #[doc(hidden)]
-    pub fn save_span(&self) -> usize {
-        self.0.save_span()
-    }
-
-    // Used by the implementation of `Span::quote`
-    #[doc(hidden)]
-    pub fn recover_proc_macro_span(id: usize) -> Span {
-        Span(bridge::client::Span::recover_proc_macro_span(id))
-    }
-
-    diagnostic_method!(error, Level::Error);
-    diagnostic_method!(warning, Level::Warning);
-    diagnostic_method!(note, Level::Note);
-    diagnostic_method!(help, Level::Help);
-}
-
-/// Prints a span in a form convenient for debugging.
-impl fmt::Debug for Span {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        self.0.fmt(f)
-    }
-}
-
-/// A line-column pair representing the start or end of a `Span`.
-#[derive(Copy, Clone, Debug, PartialEq, Eq)]
-pub struct LineColumn {
-    /// The 1-indexed line in the source file on which the span starts or ends (inclusive).
-    pub line: usize,
-    /// The 1-indexed column (number of bytes in UTF-8 encoding) in the source
-    /// file on which the span starts or ends (inclusive).
-    pub column: usize,
-}
-
-impl LineColumn {
-    fn add_1_to_column(self) -> Self {
-        LineColumn { line: self.line, column: self.column + 1 }
-    }
-}
-
-impl Ord for LineColumn {
-    fn cmp(&self, other: &Self) -> Ordering {
-        self.line.cmp(&other.line).then(self.column.cmp(&other.column))
-    }
-}
-
-impl PartialOrd for LineColumn {
-    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
-        Some(self.cmp(other))
-    }
-}
-
-/// The source file of a given `Span`.
-#[derive(Clone)]
-pub struct SourceFile(bridge::client::SourceFile);
-
-impl SourceFile {
-    /// Gets the path to this source file.
-    ///
-    /// ### Note
-    /// If the code span associated with this `SourceFile` was generated by an external macro, this
-    /// macro, this might not be an actual path on the filesystem. Use [`is_real`] to check.
-    ///
-    /// Also note that even if `is_real` returns `true`, if `--remap-path-prefix` was passed on
-    /// the command line, the path as given might not actually be valid.
-    ///
-    /// [`is_real`]: Self::is_real
-    pub fn path(&self) -> PathBuf {
-        PathBuf::from(self.0.path())
-    }
-
-    /// Returns `true` if this source file is a real source file, and not generated by an external
-    /// macro's expansion.
-    pub fn is_real(&self) -> bool {
-        // This is a hack until intercrate spans are implemented and we can have real source files
-        // for spans generated in external macros.
-        // https://github.com/rust-lang/rust/pull/43604#issuecomment-333334368
-        self.0.is_real()
-    }
-}
-
-impl fmt::Debug for SourceFile {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.debug_struct("SourceFile")
-            .field("path", &self.path())
-            .field("is_real", &self.is_real())
-            .finish()
-    }
-}
-
-impl PartialEq for SourceFile {
-    fn eq(&self, other: &Self) -> bool {
-        self.0.eq(&other.0)
-    }
-}
-
-impl Eq for SourceFile {}
-
-/// A single token or a delimited sequence of token trees (e.g., `[1, (), ..]`).
-#[derive(Clone)]
-pub enum TokenTree {
-    /// A token stream surrounded by bracket delimiters.
-    Group(Group),
-    /// An identifier.
-    Ident(Ident),
-    /// A single punctuation character (`+`, `,`, `$`, etc.).
-    Punct(Punct),
-    /// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc.
-    Literal(Literal),
-}
-
-impl TokenTree {
-    /// Returns the span of this tree, delegating to the `span` method of
-    /// the contained token or a delimited stream.
-    pub fn span(&self) -> Span {
-        match *self {
-            TokenTree::Group(ref t) => t.span(),
-            TokenTree::Ident(ref t) => t.span(),
-            TokenTree::Punct(ref t) => t.span(),
-            TokenTree::Literal(ref t) => t.span(),
-        }
-    }
-
-    /// Configures the span for *only this token*.
-    ///
-    /// Note that if this token is a `Group` then this method will not configure
-    /// the span of each of the internal tokens, this will simply delegate to
-    /// the `set_span` method of each variant.
-    pub fn set_span(&mut self, span: Span) {
-        match *self {
-            TokenTree::Group(ref mut t) => t.set_span(span),
-            TokenTree::Ident(ref mut t) => t.set_span(span),
-            TokenTree::Punct(ref mut t) => t.set_span(span),
-            TokenTree::Literal(ref mut t) => t.set_span(span),
-        }
-    }
-}
-
-/// Prints token tree in a form convenient for debugging.
-impl fmt::Debug for TokenTree {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        // Each of these has the name in the struct type in the derived debug,
-        // so don't bother with an extra layer of indirection
-        match *self {
-            TokenTree::Group(ref tt) => tt.fmt(f),
-            TokenTree::Ident(ref tt) => tt.fmt(f),
-            TokenTree::Punct(ref tt) => tt.fmt(f),
-            TokenTree::Literal(ref tt) => tt.fmt(f),
-        }
-    }
-}
-
-impl From<Group> for TokenTree {
-    fn from(g: Group) -> TokenTree {
-        TokenTree::Group(g)
-    }
-}
-
-impl From<Ident> for TokenTree {
-    fn from(g: Ident) -> TokenTree {
-        TokenTree::Ident(g)
-    }
-}
-
-impl From<Punct> for TokenTree {
-    fn from(g: Punct) -> TokenTree {
-        TokenTree::Punct(g)
-    }
-}
-
-impl From<Literal> for TokenTree {
-    fn from(g: Literal) -> TokenTree {
-        TokenTree::Literal(g)
-    }
-}
-
-/// Prints the token tree as a string that is supposed to be losslessly convertible back
-/// into the same token tree (modulo spans), except for possibly `TokenTree::Group`s
-/// with `Delimiter::None` delimiters and negative numeric literals.
-impl fmt::Display for TokenTree {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.write_str(&self.to_string())
-    }
-}
-
-/// A delimited token stream.
-///
-/// A `Group` internally contains a `TokenStream` which is surrounded by `Delimiter`s.
-#[derive(Clone)]
-pub struct Group(bridge::client::Group);
-
-/// Describes how a sequence of token trees is delimited.
-#[derive(Copy, Clone, Debug, PartialEq, Eq)]
-pub enum Delimiter {
-    /// `( ... )`
-    Parenthesis,
-    /// `{ ... }`
-    Brace,
-    /// `[ ... ]`
-    Bracket,
-    /// `Ø ... Ø`
-    /// An implicit delimiter, that may, for example, appear around tokens coming from a
-    /// "macro variable" `$var`. It is important to preserve operator priorities in cases like
-    /// `$var * 3` where `$var` is `1 + 2`.
-    /// Implicit delimiters might not survive roundtrip of a token stream through a string.
-    None,
-}
-
-impl Group {
-    /// Creates a new `Group` with the given delimiter and token stream.
-    ///
-    /// This constructor will set the span for this group to
-    /// `Span::call_site()`. To change the span you can use the `set_span`
-    /// method below.
-    pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
-        Group(bridge::client::Group::new(delimiter, stream.0))
-    }
-
-    /// Returns the delimiter of this `Group`
-    pub fn delimiter(&self) -> Delimiter {
-        self.0.delimiter()
-    }
-
-    /// Returns the `TokenStream` of tokens that are delimited in this `Group`.
-    ///
-    /// Note that the returned token stream does not include the delimiter
-    /// returned above.
-    pub fn stream(&self) -> TokenStream {
-        TokenStream(self.0.stream())
-    }
-
-    /// Returns the span for the delimiters of this token stream, spanning the
-    /// entire `Group`.
-    ///
-    /// ```text
-    /// pub fn span(&self) -> Span {
-    ///            ^^^^^^^
-    /// ```
-    pub fn span(&self) -> Span {
-        Span(self.0.span())
-    }
-
-    /// Returns the span pointing to the opening delimiter of this group.
-    ///
-    /// ```text
-    /// pub fn span_open(&self) -> Span {
-    ///                 ^
-    /// ```
-    pub fn span_open(&self) -> Span {
-        Span(self.0.span_open())
-    }
-
-    /// Returns the span pointing to the closing delimiter of this group.
-    ///
-    /// ```text
-    /// pub fn span_close(&self) -> Span {
-    ///                        ^
-    /// ```
-    pub fn span_close(&self) -> Span {
-        Span(self.0.span_close())
-    }
-
-    /// Configures the span for this `Group`'s delimiters, but not its internal
-    /// tokens.
-    ///
-    /// This method will **not** set the span of all the internal tokens spanned
-    /// by this group, but rather it will only set the span of the delimiter
-    /// tokens at the level of the `Group`.
-    pub fn set_span(&mut self, span: Span) {
-        self.0.set_span(span.0);
-    }
-}
-
-/// Prints the group as a string that should be losslessly convertible back
-/// into the same group (modulo spans), except for possibly `TokenTree::Group`s
-/// with `Delimiter::None` delimiters.
-impl fmt::Display for Group {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.write_str(&self.to_string())
-    }
-}
-
-impl fmt::Debug for Group {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.debug_struct("Group")
-            .field("delimiter", &self.delimiter())
-            .field("stream", &self.stream())
-            .field("span", &self.span())
-            .finish()
-    }
-}
-
-/// A `Punct` is a single punctuation character such as `+`, `-` or `#`.
-///
-/// Multi-character operators like `+=` are represented as two instances of `Punct` with different
-/// forms of `Spacing` returned.
-#[derive(Clone)]
-pub struct Punct(bridge::client::Punct);
-
-/// Describes whether a `Punct` is followed immediately by another `Punct` ([`Spacing::Joint`]) or
-/// by a different token or whitespace ([`Spacing::Alone`]).
-#[derive(Copy, Clone, Debug, PartialEq, Eq)]
-pub enum Spacing {
-    /// A `Punct` is not immediately followed by another `Punct`.
-    /// E.g. `+` is `Alone` in `+ =`, `+ident` and `+()`.
-    Alone,
-    /// A `Punct` is immediately followed by another `Punct`.
-    /// E.g. `+` is `Joint` in `+=` and `++`.
-    ///
-    /// Additionally, single quote `'` can join with identifiers to form lifetimes: `'ident`.
-    Joint,
-}
-
-impl Punct {
-    /// Creates a new `Punct` from the given character and spacing.
-    /// The `ch` argument must be a valid punctuation character permitted by the language,
-    /// otherwise the function will panic.
-    ///
-    /// The returned `Punct` will have the default span of `Span::call_site()`
-    /// which can be further configured with the `set_span` method below.
-    pub fn new(ch: char, spacing: Spacing) -> Punct {
-        Punct(bridge::client::Punct::new(ch, spacing))
-    }
-
-    /// Returns the value of this punctuation character as `char`.
-    pub fn as_char(&self) -> char {
-        self.0.as_char()
-    }
-
-    /// Returns the spacing of this punctuation character, indicating whether it's immediately
-    /// followed by another `Punct` in the token stream, so they can potentially be combined into
-    /// a multi-character operator (`Joint`), or it's followed by some other token or whitespace
-    /// (`Alone`) so the operator has certainly ended.
-    pub fn spacing(&self) -> Spacing {
-        self.0.spacing()
-    }
-
-    /// Returns the span for this punctuation character.
-    pub fn span(&self) -> Span {
-        Span(self.0.span())
-    }
-
-    /// Configure the span for this punctuation character.
-    pub fn set_span(&mut self, span: Span) {
-        self.0 = self.0.with_span(span.0);
-    }
-}
-
-/// Prints the punctuation character as a string that should be losslessly convertible
-/// back into the same character.
-impl fmt::Display for Punct {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.write_str(&self.to_string())
-    }
-}
-
-impl fmt::Debug for Punct {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.debug_struct("Punct")
-            .field("ch", &self.as_char())
-            .field("spacing", &self.spacing())
-            .field("span", &self.span())
-            .finish()
-    }
-}
-
-impl PartialEq<char> for Punct {
-    fn eq(&self, rhs: &char) -> bool {
-        self.as_char() == *rhs
-    }
-}
-
-impl PartialEq<Punct> for char {
-    fn eq(&self, rhs: &Punct) -> bool {
-        *self == rhs.as_char()
-    }
-}
-
-/// An identifier (`ident`).
-#[derive(Clone)]
-pub struct Ident(bridge::client::Ident);
-
-impl Ident {
-    /// Creates a new `Ident` with the given `string` as well as the specified
-    /// `span`.
-    /// The `string` argument must be a valid identifier permitted by the
-    /// language (including keywords, e.g. `self` or `fn`). Otherwise, the function will panic.
-    ///
-    /// Note that `span`, currently in rustc, configures the hygiene information
-    /// for this identifier.
-    ///
-    /// As of this time `Span::call_site()` explicitly opts-in to "call-site" hygiene
-    /// meaning that identifiers created with this span will be resolved as if they were written
-    /// directly at the location of the macro call, and other code at the macro call site will be
-    /// able to refer to them as well.
-    ///
-    /// Later spans like `Span::def_site()` will allow to opt-in to "definition-site" hygiene
-    /// meaning that identifiers created with this span will be resolved at the location of the
-    /// macro definition and other code at the macro call site will not be able to refer to them.
-    ///
-    /// Due to the current importance of hygiene this constructor, unlike other
-    /// tokens, requires a `Span` to be specified at construction.
-    pub fn new(string: &str, span: Span) -> Ident {
-        Ident(bridge::client::Ident::new(string, span.0, false))
-    }
-
-    /// Same as `Ident::new`, but creates a raw identifier (`r#ident`).
-    /// The `string` argument be a valid identifier permitted by the language
-    /// (including keywords, e.g. `fn`). Keywords which are usable in path segments
-    /// (e.g. `self`, `super`) are not supported, and will cause a panic.
-    pub fn new_raw(string: &str, span: Span) -> Ident {
-        Ident(bridge::client::Ident::new(string, span.0, true))
-    }
-
-    /// Returns the span of this `Ident`, encompassing the entire string returned
-    /// by [`to_string`](Self::to_string).
-    pub fn span(&self) -> Span {
-        Span(self.0.span())
-    }
-
-    /// Configures the span of this `Ident`, possibly changing its hygiene context.
-    pub fn set_span(&mut self, span: Span) {
-        self.0 = self.0.with_span(span.0);
-    }
-}
-
-/// Prints the identifier as a string that should be losslessly convertible
-/// back into the same identifier.
-impl fmt::Display for Ident {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.write_str(&self.to_string())
-    }
-}
-
-impl fmt::Debug for Ident {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.debug_struct("Ident")
-            .field("ident", &self.to_string())
-            .field("span", &self.span())
-            .finish()
-    }
-}
-
-/// A literal string (`"hello"`), byte string (`b"hello"`),
-/// character (`'a'`), byte character (`b'a'`), an integer or floating point number
-/// with or without a suffix (`1`, `1u8`, `2.3`, `2.3f32`).
-/// Boolean literals like `true` and `false` do not belong here, they are `Ident`s.
-#[derive(Clone)]
-pub struct Literal(bridge::client::Literal);
-
-macro_rules! suffixed_int_literals {
-    ($($name:ident => $kind:ident,)*) => ($(
-        /// Creates a new suffixed integer literal with the specified value.
-        ///
-        /// This function will create an integer like `1u32` where the integer
-        /// value specified is the first part of the token and the integral is
-        /// also suffixed at the end.
-        /// Literals created from negative numbers might not survive round-trips through
-        /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
-        ///
-        /// Literals created through this method have the `Span::call_site()`
-        /// span by default, which can be configured with the `set_span` method
-        /// below.
-        pub fn $name(n: $kind) -> Literal {
-            Literal(bridge::client::Literal::typed_integer(&n.to_string(), stringify!($kind)))
-        }
-    )*)
-}
-
-macro_rules! unsuffixed_int_literals {
-    ($($name:ident => $kind:ident,)*) => ($(
-        /// Creates a new unsuffixed integer literal with the specified value.
-        ///
-        /// This function will create an integer like `1` where the integer
-        /// value specified is the first part of the token. No suffix is
-        /// specified on this token, meaning that invocations like
-        /// `Literal::i8_unsuffixed(1)` are equivalent to
-        /// `Literal::u32_unsuffixed(1)`.
-        /// Literals created from negative numbers might not survive rountrips through
-        /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
-        ///
-        /// Literals created through this method have the `Span::call_site()`
-        /// span by default, which can be configured with the `set_span` method
-        /// below.
-        pub fn $name(n: $kind) -> Literal {
-            Literal(bridge::client::Literal::integer(&n.to_string()))
-        }
-    )*)
-}
-
-impl Literal {
-    suffixed_int_literals! {
-        u8_suffixed => u8,
-        u16_suffixed => u16,
-        u32_suffixed => u32,
-        u64_suffixed => u64,
-        u128_suffixed => u128,
-        usize_suffixed => usize,
-        i8_suffixed => i8,
-        i16_suffixed => i16,
-        i32_suffixed => i32,
-        i64_suffixed => i64,
-        i128_suffixed => i128,
-        isize_suffixed => isize,
-    }
-
-    unsuffixed_int_literals! {
-        u8_unsuffixed => u8,
-        u16_unsuffixed => u16,
-        u32_unsuffixed => u32,
-        u64_unsuffixed => u64,
-        u128_unsuffixed => u128,
-        usize_unsuffixed => usize,
-        i8_unsuffixed => i8,
-        i16_unsuffixed => i16,
-        i32_unsuffixed => i32,
-        i64_unsuffixed => i64,
-        i128_unsuffixed => i128,
-        isize_unsuffixed => isize,
-    }
-
-    /// Creates a new unsuffixed floating-point literal.
-    ///
-    /// This constructor is similar to those like `Literal::i8_unsuffixed` where
-    /// the float's value is emitted directly into the token but no suffix is
-    /// used, so it may be inferred to be a `f64` later in the compiler.
-    /// Literals created from negative numbers might not survive rountrips through
-    /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
-    ///
-    /// # Panics
-    ///
-    /// This function requires that the specified float is finite, for
-    /// example if it is infinity or NaN this function will panic.
-    pub fn f32_unsuffixed(n: f32) -> Literal {
-        if !n.is_finite() {
-            panic!("Invalid float literal {n}");
-        }
-        let mut repr = n.to_string();
-        if !repr.contains('.') {
-            repr.push_str(".0");
-        }
-        Literal(bridge::client::Literal::float(&repr))
-    }
-
-    /// Creates a new suffixed floating-point literal.
-    ///
-    /// This constructor will create a literal like `1.0f32` where the value
-    /// specified is the preceding part of the token and `f32` is the suffix of
-    /// the token. This token will always be inferred to be an `f32` in the
-    /// compiler.
-    /// Literals created from negative numbers might not survive rountrips through
-    /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
-    ///
-    /// # Panics
-    ///
-    /// This function requires that the specified float is finite, for
-    /// example if it is infinity or NaN this function will panic.
-    pub fn f32_suffixed(n: f32) -> Literal {
-        if !n.is_finite() {
-            panic!("Invalid float literal {n}");
-        }
-        Literal(bridge::client::Literal::f32(&n.to_string()))
-    }
-
-    /// Creates a new unsuffixed floating-point literal.
-    ///
-    /// This constructor is similar to those like `Literal::i8_unsuffixed` where
-    /// the float's value is emitted directly into the token but no suffix is
-    /// used, so it may be inferred to be a `f64` later in the compiler.
-    /// Literals created from negative numbers might not survive rountrips through
-    /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
-    ///
-    /// # Panics
-    ///
-    /// This function requires that the specified float is finite, for
-    /// example if it is infinity or NaN this function will panic.
-    pub fn f64_unsuffixed(n: f64) -> Literal {
-        if !n.is_finite() {
-            panic!("Invalid float literal {n}");
-        }
-        let mut repr = n.to_string();
-        if !repr.contains('.') {
-            repr.push_str(".0");
-        }
-        Literal(bridge::client::Literal::float(&repr))
-    }
-
-    /// Creates a new suffixed floating-point literal.
-    ///
-    /// This constructor will create a literal like `1.0f64` where the value
-    /// specified is the preceding part of the token and `f64` is the suffix of
-    /// the token. This token will always be inferred to be an `f64` in the
-    /// compiler.
-    /// Literals created from negative numbers might not survive rountrips through
-    /// `TokenStream` or strings and may be broken into two tokens (`-` and positive literal).
-    ///
-    /// # Panics
-    ///
-    /// This function requires that the specified float is finite, for
-    /// example if it is infinity or NaN this function will panic.
-    pub fn f64_suffixed(n: f64) -> Literal {
-        if !n.is_finite() {
-            panic!("Invalid float literal {n}");
-        }
-        Literal(bridge::client::Literal::f64(&n.to_string()))
-    }
-
-    /// String literal.
-    pub fn string(string: &str) -> Literal {
-        Literal(bridge::client::Literal::string(string))
-    }
-
-    /// Character literal.
-    pub fn character(ch: char) -> Literal {
-        Literal(bridge::client::Literal::character(ch))
-    }
-
-    /// Byte string literal.
-    pub fn byte_string(bytes: &[u8]) -> Literal {
-        Literal(bridge::client::Literal::byte_string(bytes))
-    }
-
-    /// Returns the span encompassing this literal.
-    pub fn span(&self) -> Span {
-        Span(self.0.span())
-    }
-
-    /// Configures the span associated for this literal.
-    pub fn set_span(&mut self, span: Span) {
-        self.0.set_span(span.0);
-    }
-
-    /// Returns a `Span` that is a subset of `self.span()` containing only the
-    /// source bytes in range `range`. Returns `None` if the would-be trimmed
-    /// span is outside the bounds of `self`.
-    // FIXME(SergioBenitez): check that the byte range starts and ends at a
-    // UTF-8 boundary of the source. otherwise, it's likely that a panic will
-    // occur elsewhere when the source text is printed.
-    // FIXME(SergioBenitez): there is no way for the user to know what
-    // `self.span()` actually maps to, so this method can currently only be
-    // called blindly. For example, `to_string()` for the character 'c' returns
-    // "'\u{63}'"; there is no way for the user to know whether the source text
-    // was 'c' or whether it was '\u{63}'.
-    pub fn subspan<R: RangeBounds<usize>>(&self, range: R) -> Option<Span> {
-        self.0.subspan(range.start_bound().cloned(), range.end_bound().cloned()).map(Span)
-    }
-}
-
-/// Parse a single literal from its stringified representation.
-///
-/// In order to parse successfully, the input string must not contain anything
-/// but the literal token. Specifically, it must not contain whitespace or
-/// comments in addition to the literal.
-///
-/// The resulting literal token will have a `Span::call_site()` span.
-///
-/// NOTE: some errors may cause panics instead of returning `LexError`. We
-/// reserve the right to change these errors into `LexError`s later.
-impl FromStr for Literal {
-    type Err = LexError;
-
-    fn from_str(src: &str) -> Result<Self, LexError> {
-        match bridge::client::Literal::from_str(src) {
-            Ok(literal) => Ok(Literal(literal)),
-            Err(()) => Err(LexError),
-        }
-    }
-}
-
-/// Prints the literal as a string that should be losslessly convertible
-/// back into the same literal (except for possible rounding for floating point literals).
-impl fmt::Display for Literal {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.write_str(&self.to_string())
-    }
-}
-
-impl fmt::Debug for Literal {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        self.0.fmt(f)
-    }
-}
-
-/// Tracked access to environment variables.
-pub mod tracked_env {
-    use std::env::{self, VarError};
-    use std::ffi::OsStr;
-
-    /// Retrieve an environment variable and add it to build dependency info.
-    /// Build system executing the compiler will know that the variable was accessed during
-    /// compilation, and will be able to rerun the build when the value of that variable changes.
-    /// Besides the dependency tracking this function should be equivalent to `env::var` from the
-    /// standard library, except that the argument must be UTF-8.
-    pub fn var<K: AsRef<OsStr> + AsRef<str>>(key: K) -> Result<String, VarError> {
-        let key: &str = key.as_ref();
-        let value = env::var(key);
-        super::bridge::client::FreeFunctions::track_env_var(key, value.as_deref().ok());
-        value
-    }
-}
-
-/// Tracked access to additional files.
-pub mod tracked_path {
-
-    /// Track a file explicitly.
-    ///
-    /// Commonly used for tracking asset preprocessing.
-    pub fn path<P: AsRef<str>>(path: P) {
-        let path: &str = path.as_ref();
-        super::bridge::client::FreeFunctions::track_path(path);
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/quote.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/quote.rs
deleted file mode 100644
index b539ab9c0c6..00000000000
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/proc_macro/quote.rs
+++ /dev/null
@@ -1,140 +0,0 @@
-//! # Quasiquoter
-//! This file contains the implementation internals of the quasiquoter provided by `quote!`.
-
-//! This quasiquoter uses macros 2.0 hygiene to reliably access
-//! items from `proc_macro`, to build a `proc_macro::TokenStream`.
-
-use super::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
-
-macro_rules! quote_tt {
-    (($($t:tt)*)) => { Group::new(Delimiter::Parenthesis, quote!($($t)*)) };
-    ([$($t:tt)*]) => { Group::new(Delimiter::Bracket, quote!($($t)*)) };
-    ({$($t:tt)*}) => { Group::new(Delimiter::Brace, quote!($($t)*)) };
-    (,) => { Punct::new(',', Spacing::Alone) };
-    (.) => { Punct::new('.', Spacing::Alone) };
-    (:) => { Punct::new(':', Spacing::Alone) };
-    (;) => { Punct::new(';', Spacing::Alone) };
-    (!) => { Punct::new('!', Spacing::Alone) };
-    (<) => { Punct::new('<', Spacing::Alone) };
-    (>) => { Punct::new('>', Spacing::Alone) };
-    (&) => { Punct::new('&', Spacing::Alone) };
-    (=) => { Punct::new('=', Spacing::Alone) };
-    ($i:ident) => { Ident::new(stringify!($i), Span::def_site()) };
-}
-
-macro_rules! quote_ts {
-    ((@ $($t:tt)*)) => { $($t)* };
-    (::) => {
-        [
-            TokenTree::from(Punct::new(':', Spacing::Joint)),
-            TokenTree::from(Punct::new(':', Spacing::Alone)),
-        ].iter()
-            .cloned()
-            .map(|mut x| {
-                x.set_span(Span::def_site());
-                x
-            })
-            .collect::<TokenStream>()
-    };
-    ($t:tt) => { TokenTree::from(quote_tt!($t)) };
-}
-
-/// Simpler version of the real `quote!` macro, implemented solely
-/// through `macro_rules`, for bootstrapping the real implementation
-/// (see the `quote` function), which does not have access to the
-/// real `quote!` macro due to the `proc_macro` crate not being
-/// able to depend on itself.
-///
-/// Note: supported tokens are a subset of the real `quote!`, but
-/// unquoting is different: instead of `$x`, this uses `(@ expr)`.
-macro_rules! quote {
-    () => { TokenStream::new() };
-    ($($t:tt)*) => {
-        [
-            $(TokenStream::from(quote_ts!($t)),)*
-        ].iter().cloned().collect::<TokenStream>()
-    };
-}
-
-/// Quote a `TokenStream` into a `TokenStream`.
-/// This is the actual implementation of the `quote!()` proc macro.
-///
-/// It is loaded by the compiler in `register_builtin_macros`.
-pub fn quote(stream: TokenStream) -> TokenStream {
-    if stream.is_empty() {
-        return quote!(crate::TokenStream::new());
-    }
-    let proc_macro_crate = quote!(crate);
-    let mut after_dollar = false;
-    let tokens = stream
-        .into_iter()
-        .filter_map(|tree| {
-            if after_dollar {
-                after_dollar = false;
-                match tree {
-                    TokenTree::Ident(_) => {
-                        return Some(quote!(Into::<crate::TokenStream>::into(
-                        Clone::clone(&(@ tree))),));
-                    }
-                    TokenTree::Punct(ref tt) if tt.as_char() == '$' => {}
-                    _ => panic!("`$` must be followed by an ident or `$` in `quote!`"),
-                }
-            } else if let TokenTree::Punct(ref tt) = tree {
-                if tt.as_char() == '$' {
-                    after_dollar = true;
-                    return None;
-                }
-            }
-
-            Some(quote!(crate::TokenStream::from((@ match tree {
-                TokenTree::Punct(tt) => quote!(crate::TokenTree::Punct(crate::Punct::new(
-                    (@ TokenTree::from(Literal::character(tt.as_char()))),
-                    (@ match tt.spacing() {
-                        Spacing::Alone => quote!(crate::Spacing::Alone),
-                        Spacing::Joint => quote!(crate::Spacing::Joint),
-                    }),
-                ))),
-                TokenTree::Group(tt) => quote!(crate::TokenTree::Group(crate::Group::new(
-                    (@ match tt.delimiter() {
-                        Delimiter::Parenthesis => quote!(crate::Delimiter::Parenthesis),
-                        Delimiter::Brace => quote!(crate::Delimiter::Brace),
-                        Delimiter::Bracket => quote!(crate::Delimiter::Bracket),
-                        Delimiter::None => quote!(crate::Delimiter::None),
-                    }),
-                    (@ quote(tt.stream())),
-                ))),
-                TokenTree::Ident(tt) => quote!(crate::TokenTree::Ident(crate::Ident::new(
-                    (@ TokenTree::from(Literal::string(&tt.to_string()))),
-                    (@ quote_span(proc_macro_crate.clone(), tt.span())),
-                ))),
-                TokenTree::Literal(tt) => quote!(crate::TokenTree::Literal({
-                    let mut iter = (@ TokenTree::from(Literal::string(&tt.to_string())))
-                        .parse::<crate::TokenStream>()
-                        .unwrap()
-                        .into_iter();
-                    if let (Some(crate::TokenTree::Literal(mut lit)), None) =
-                        (iter.next(), iter.next())
-                    {
-                        lit.set_span((@ quote_span(proc_macro_crate.clone(), tt.span())));
-                        lit
-                    } else {
-                        unreachable!()
-                    }
-                }))
-            })),))
-        })
-        .collect::<TokenStream>();
-
-    if after_dollar {
-        panic!("unexpected trailing `$` in `quote!`");
-    }
-
-    quote!([(@ tokens)].iter().cloned().collect::<crate::TokenStream>())
-}
-
-/// Quote a `Span` into a `TokenStream`.
-/// This is needed to implement a custom quoter.
-pub fn quote_span(proc_macro_crate: TokenStream, span: Span) -> TokenStream {
-    let id = span.save_span();
-    quote!((@ proc_macro_crate ) ::Span::recover_proc_macro_span((@ TokenTree::from(Literal::usize_unsuffixed(id)))))
-}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs
deleted file mode 100644
index 22d4ad94f77..00000000000
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_58/ra_server.rs
+++ /dev/null
@@ -1,822 +0,0 @@
-//! Rustc proc-macro server implementation with tt
-//!
-//! Based on idea from <https://github.com/fedochet/rust-proc-macro-expander>
-//! The lib-proc-macro server backend is `TokenStream`-agnostic, such that
-//! we could provide any TokenStream implementation.
-//! The original idea from fedochet is using proc-macro2 as backend,
-//! we use tt instead for better integration with RA.
-//!
-//! FIXME: No span and source file information is implemented yet
-
-use super::proc_macro::bridge::{self, server};
-
-use std::collections::HashMap;
-use std::hash::Hash;
-use std::ops::Bound;
-use std::{ascii, vec::IntoIter};
-
-type Group = tt::Subtree;
-type TokenTree = tt::TokenTree;
-type Punct = tt::Punct;
-type Spacing = tt::Spacing;
-type Literal = tt::Literal;
-type Span = tt::TokenId;
-
-#[derive(Debug, Clone)]
-pub struct TokenStream {
-    pub token_trees: Vec<TokenTree>,
-}
-
-impl TokenStream {
-    pub fn new() -> Self {
-        TokenStream { token_trees: Default::default() }
-    }
-
-    pub fn with_subtree(subtree: tt::Subtree) -> Self {
-        if subtree.delimiter.is_some() {
-            TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
-        } else {
-            TokenStream { token_trees: subtree.token_trees }
-        }
-    }
-
-    pub fn into_subtree(self) -> tt::Subtree {
-        tt::Subtree { delimiter: None, token_trees: self.token_trees }
-    }
-
-    pub fn is_empty(&self) -> bool {
-        self.token_trees.is_empty()
-    }
-}
-
-/// Creates a token stream containing a single token tree.
-impl From<TokenTree> for TokenStream {
-    fn from(tree: TokenTree) -> TokenStream {
-        TokenStream { token_trees: vec![tree] }
-    }
-}
-
-/// Collects a number of token trees into a single stream.
-impl FromIterator<TokenTree> for TokenStream {
-    fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
-        trees.into_iter().map(TokenStream::from).collect()
-    }
-}
-
-/// A "flattening" operation on token streams, collects token trees
-/// from multiple token streams into a single stream.
-impl FromIterator<TokenStream> for TokenStream {
-    fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
-        let mut builder = TokenStreamBuilder::new();
-        streams.into_iter().for_each(|stream| builder.push(stream));
-        builder.build()
-    }
-}
-
-impl Extend<TokenTree> for TokenStream {
-    fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
-        self.extend(trees.into_iter().map(TokenStream::from));
-    }
-}
-
-impl Extend<TokenStream> for TokenStream {
-    fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
-        for item in streams {
-            for tkn in item {
-                match tkn {
-                    tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
-                        self.token_trees.extend(subtree.token_trees);
-                    }
-                    _ => {
-                        self.token_trees.push(tkn);
-                    }
-                }
-            }
-        }
-    }
-}
-
-#[derive(Clone)]
-pub struct SourceFile {
-    // FIXME stub
-}
-
-type Level = super::proc_macro::Level;
-type LineColumn = super::proc_macro::LineColumn;
-
-/// A structure representing a diagnostic message and associated children
-/// messages.
-#[derive(Clone, Debug)]
-pub struct Diagnostic {
-    level: Level,
-    message: String,
-    spans: Vec<Span>,
-    children: Vec<Diagnostic>,
-}
-
-impl Diagnostic {
-    /// Creates a new diagnostic with the given `level` and `message`.
-    pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
-        Diagnostic { level, message: message.into(), spans: vec![], children: vec![] }
-    }
-}
-
-// Rustc Server Ident has to be `Copyable`
-// We use a stub here for bypassing
-#[derive(Hash, Eq, PartialEq, Copy, Clone)]
-pub struct IdentId(u32);
-
-#[derive(Clone, Hash, Eq, PartialEq)]
-struct IdentData(tt::Ident);
-
-#[derive(Default)]
-struct IdentInterner {
-    idents: HashMap<IdentData, u32>,
-    ident_data: Vec<IdentData>,
-}
-
-impl IdentInterner {
-    fn intern(&mut self, data: &IdentData) -> u32 {
-        if let Some(index) = self.idents.get(data) {
-            return *index;
-        }
-
-        let index = self.idents.len() as u32;
-        self.ident_data.push(data.clone());
-        self.idents.insert(data.clone(), index);
-        index
-    }
-
-    fn get(&self, index: u32) -> &IdentData {
-        &self.ident_data[index as usize]
-    }
-
-    #[allow(unused)]
-    fn get_mut(&mut self, index: u32) -> &mut IdentData {
-        self.ident_data.get_mut(index as usize).expect("Should be consistent")
-    }
-}
-
-pub struct TokenStreamBuilder {
-    acc: TokenStream,
-}
-
-/// Public implementation details for the `TokenStream` type, such as iterators.
-pub mod token_stream {
-    use std::str::FromStr;
-
-    use super::{TokenStream, TokenTree};
-
-    /// An iterator over `TokenStream`'s `TokenTree`s.
-    /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
-    /// and returns whole groups as token trees.
-    impl IntoIterator for TokenStream {
-        type Item = TokenTree;
-        type IntoIter = super::IntoIter<TokenTree>;
-
-        fn into_iter(self) -> Self::IntoIter {
-            self.token_trees.into_iter()
-        }
-    }
-
-    type LexError = String;
-
-    /// Attempts to break the string into tokens and parse those tokens into a token stream.
-    /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters
-    /// or characters not existing in the language.
-    /// All tokens in the parsed stream get `Span::call_site()` spans.
-    ///
-    /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
-    /// change these errors into `LexError`s later.
-    impl FromStr for TokenStream {
-        type Err = LexError;
-
-        fn from_str(src: &str) -> Result<TokenStream, LexError> {
-            let (subtree, _token_map) =
-                mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?;
-
-            let subtree = subtree_replace_token_ids_with_unspecified(subtree);
-            Ok(TokenStream::with_subtree(subtree))
-        }
-    }
-
-    impl ToString for TokenStream {
-        fn to_string(&self) -> String {
-            tt::pretty(&self.token_trees)
-        }
-    }
-
-    fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
-        tt::Subtree {
-            delimiter: subtree
-                .delimiter
-                .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
-            token_trees: subtree
-                .token_trees
-                .into_iter()
-                .map(token_tree_replace_token_ids_with_unspecified)
-                .collect(),
-        }
-    }
-
-    fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree {
-        match tt {
-            tt::TokenTree::Leaf(leaf) => {
-                tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf))
-            }
-            tt::TokenTree::Subtree(subtree) => {
-                tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree))
-            }
-        }
-    }
-
-    fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
-        match leaf {
-            tt::Leaf::Literal(lit) => {
-                tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
-            }
-            tt::Leaf::Punct(punct) => {
-                tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
-            }
-            tt::Leaf::Ident(ident) => {
-                tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
-            }
-        }
-    }
-}
-
-impl TokenStreamBuilder {
-    fn new() -> TokenStreamBuilder {
-        TokenStreamBuilder { acc: TokenStream::new() }
-    }
-
-    fn push(&mut self, stream: TokenStream) {
-        self.acc.extend(stream.into_iter())
-    }
-
-    fn build(self) -> TokenStream {
-        self.acc
-    }
-}
-
-pub struct FreeFunctions;
-
-#[derive(Clone)]
-pub struct TokenStreamIter {
-    trees: IntoIter<TokenTree>,
-}
-
-#[derive(Default)]
-pub struct RustAnalyzer {
-    ident_interner: IdentInterner,
-    // FIXME: store span information here.
-}
-
-impl server::Types for RustAnalyzer {
-    type FreeFunctions = FreeFunctions;
-    type TokenStream = TokenStream;
-    type TokenStreamBuilder = TokenStreamBuilder;
-    type TokenStreamIter = TokenStreamIter;
-    type Group = Group;
-    type Punct = Punct;
-    type Ident = IdentId;
-    type Literal = Literal;
-    type SourceFile = SourceFile;
-    type Diagnostic = Diagnostic;
-    type Span = Span;
-    type MultiSpan = Vec<Span>;
-}
-
-impl server::FreeFunctions for RustAnalyzer {
-    fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {
-        // FIXME: track env var accesses
-        // https://github.com/rust-lang/rust/pull/71858
-    }
-    fn track_path(&mut self, _path: &str) {}
-}
-
-impl server::TokenStream for RustAnalyzer {
-    fn new(&mut self) -> Self::TokenStream {
-        Self::TokenStream::new()
-    }
-
-    fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
-        stream.is_empty()
-    }
-    fn from_str(&mut self, src: &str) -> Self::TokenStream {
-        use std::str::FromStr;
-
-        Self::TokenStream::from_str(src).expect("cannot parse string")
-    }
-    fn to_string(&mut self, stream: &Self::TokenStream) -> String {
-        stream.to_string()
-    }
-    fn from_token_tree(
-        &mut self,
-        tree: bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>,
-    ) -> Self::TokenStream {
-        match tree {
-            bridge::TokenTree::Group(group) => {
-                let tree = TokenTree::from(group);
-                Self::TokenStream::from_iter(vec![tree])
-            }
-
-            bridge::TokenTree::Ident(IdentId(index)) => {
-                let IdentData(ident) = self.ident_interner.get(index).clone();
-                let ident: tt::Ident = ident;
-                let leaf = tt::Leaf::from(ident);
-                let tree = TokenTree::from(leaf);
-                Self::TokenStream::from_iter(vec![tree])
-            }
-
-            bridge::TokenTree::Literal(literal) => {
-                let leaf = tt::Leaf::from(literal);
-                let tree = TokenTree::from(leaf);
-                Self::TokenStream::from_iter(vec![tree])
-            }
-
-            bridge::TokenTree::Punct(p) => {
-                let leaf = tt::Leaf::from(p);
-                let tree = TokenTree::from(leaf);
-                Self::TokenStream::from_iter(vec![tree])
-            }
-        }
-    }
-
-    fn into_iter(&mut self, stream: Self::TokenStream) -> Self::TokenStreamIter {
-        let trees: Vec<TokenTree> = stream.into_iter().collect();
-        TokenStreamIter { trees: trees.into_iter() }
-    }
-
-    fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
-        Ok(self_.clone())
-    }
-}
-
-impl server::TokenStreamBuilder for RustAnalyzer {
-    fn new(&mut self) -> Self::TokenStreamBuilder {
-        Self::TokenStreamBuilder::new()
-    }
-    fn push(&mut self, builder: &mut Self::TokenStreamBuilder, stream: Self::TokenStream) {
-        builder.push(stream)
-    }
-    fn build(&mut self, builder: Self::TokenStreamBuilder) -> Self::TokenStream {
-        builder.build()
-    }
-}
-
-impl server::TokenStreamIter for RustAnalyzer {
-    fn next(
-        &mut self,
-        iter: &mut Self::TokenStreamIter,
-    ) -> Option<bridge::TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
-        iter.trees.next().map(|tree| match tree {
-            TokenTree::Subtree(group) => bridge::TokenTree::Group(group),
-            TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
-                bridge::TokenTree::Ident(IdentId(self.ident_interner.intern(&IdentData(ident))))
-            }
-            TokenTree::Leaf(tt::Leaf::Literal(literal)) => bridge::TokenTree::Literal(literal),
-            TokenTree::Leaf(tt::Leaf::Punct(punct)) => bridge::TokenTree::Punct(punct),
-        })
-    }
-}
-
-fn delim_to_internal(d: bridge::Delimiter) -> Option<tt::Delimiter> {
-    let kind = match d {
-        bridge::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
-        bridge::Delimiter::Brace => tt::DelimiterKind::Brace,
-        bridge::Delimiter::Bracket => tt::DelimiterKind::Bracket,
-        bridge::Delimiter::None => return None,
-    };
-    Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind })
-}
-
-fn delim_to_external(d: Option<tt::Delimiter>) -> bridge::Delimiter {
-    match d.map(|it| it.kind) {
-        Some(tt::DelimiterKind::Parenthesis) => bridge::Delimiter::Parenthesis,
-        Some(tt::DelimiterKind::Brace) => bridge::Delimiter::Brace,
-        Some(tt::DelimiterKind::Bracket) => bridge::Delimiter::Bracket,
-        None => bridge::Delimiter::None,
-    }
-}
-
-fn spacing_to_internal(spacing: bridge::Spacing) -> Spacing {
-    match spacing {
-        bridge::Spacing::Alone => Spacing::Alone,
-        bridge::Spacing::Joint => Spacing::Joint,
-    }
-}
-
-fn spacing_to_external(spacing: Spacing) -> bridge::Spacing {
-    match spacing {
-        Spacing::Alone => bridge::Spacing::Alone,
-        Spacing::Joint => bridge::Spacing::Joint,
-    }
-}
-
-impl server::Group for RustAnalyzer {
-    fn new(&mut self, delimiter: bridge::Delimiter, stream: Self::TokenStream) -> Self::Group {
-        Self::Group { delimiter: delim_to_internal(delimiter), token_trees: stream.token_trees }
-    }
-    fn delimiter(&mut self, group: &Self::Group) -> bridge::Delimiter {
-        delim_to_external(group.delimiter)
-    }
-
-    // NOTE: Return value of do not include delimiter
-    fn stream(&mut self, group: &Self::Group) -> Self::TokenStream {
-        TokenStream { token_trees: group.token_trees.clone() }
-    }
-
-    fn span(&mut self, group: &Self::Group) -> Self::Span {
-        group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
-    }
-
-    fn set_span(&mut self, group: &mut Self::Group, span: Self::Span) {
-        if let Some(delim) = &mut group.delimiter {
-            delim.id = span;
-        }
-    }
-
-    fn span_open(&mut self, group: &Self::Group) -> Self::Span {
-        // FIXME we only store one `TokenId` for the delimiters
-        group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
-    }
-
-    fn span_close(&mut self, group: &Self::Group) -> Self::Span {
-        // FIXME we only store one `TokenId` for the delimiters
-        group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
-    }
-}
-
-impl server::Punct for RustAnalyzer {
-    fn new(&mut self, ch: char, spacing: bridge::Spacing) -> Self::Punct {
-        tt::Punct {
-            char: ch,
-            spacing: spacing_to_internal(spacing),
-            id: tt::TokenId::unspecified(),
-        }
-    }
-    fn as_char(&mut self, punct: Self::Punct) -> char {
-        punct.char
-    }
-    fn spacing(&mut self, punct: Self::Punct) -> bridge::Spacing {
-        spacing_to_external(punct.spacing)
-    }
-    fn span(&mut self, punct: Self::Punct) -> Self::Span {
-        punct.id
-    }
-    fn with_span(&mut self, punct: Self::Punct, span: Self::Span) -> Self::Punct {
-        tt::Punct { id: span, ..punct }
-    }
-}
-
-impl server::Ident for RustAnalyzer {
-    fn new(&mut self, string: &str, span: Self::Span, is_raw: bool) -> Self::Ident {
-        IdentId(self.ident_interner.intern(&IdentData(tt::Ident::new_with_is_raw(
-            string.into(),
-            span,
-            is_raw,
-        ))))
-    }
-
-    fn span(&mut self, ident: Self::Ident) -> Self::Span {
-        self.ident_interner.get(ident.0).0.id
-    }
-    fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident {
-        let data = self.ident_interner.get(ident.0);
-        let new = IdentData(tt::Ident { id: span, ..data.0.clone() });
-        IdentId(self.ident_interner.intern(&new))
-    }
-}
-
-impl server::Literal for RustAnalyzer {
-    fn debug_kind(&mut self, _literal: &Self::Literal) -> String {
-        // r-a: debug_kind and suffix are unsupported; corresponding client code has been changed to not call these.
-        // They must still be present to be ABI-compatible and work with upstream proc_macro.
-        "".to_owned()
-    }
-    fn from_str(&mut self, s: &str) -> Result<Self::Literal, ()> {
-        Ok(Literal { text: s.into(), id: tt::TokenId::unspecified() })
-    }
-    fn symbol(&mut self, literal: &Self::Literal) -> String {
-        literal.text.to_string()
-    }
-    fn suffix(&mut self, _literal: &Self::Literal) -> Option<String> {
-        None
-    }
-
-    fn to_string(&mut self, literal: &Self::Literal) -> String {
-        literal.to_string()
-    }
-
-    fn integer(&mut self, n: &str) -> Self::Literal {
-        let n = match n.parse::<i128>() {
-            Ok(n) => n.to_string(),
-            Err(_) => n.parse::<u128>().unwrap().to_string(),
-        };
-        Literal { text: n.into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
-        macro_rules! def_suffixed_integer {
-            ($kind:ident, $($ty:ty),*) => {
-                match $kind {
-                    $(
-                        stringify!($ty) => {
-                            let n: $ty = n.parse().unwrap();
-                            format!(concat!("{}", stringify!($ty)), n)
-                        }
-                    )*
-                    _ => unimplemented!("unknown args for typed_integer: n {}, kind {}", n, $kind),
-                }
-            }
-        }
-
-        let text = def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize};
-
-        Literal { text: text.into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn float(&mut self, n: &str) -> Self::Literal {
-        let n: f64 = n.parse().unwrap();
-        let mut text = f64::to_string(&n);
-        if !text.contains('.') {
-            text += ".0"
-        }
-        Literal { text: text.into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn f32(&mut self, n: &str) -> Self::Literal {
-        let n: f32 = n.parse().unwrap();
-        let text = format!("{n}f32");
-        Literal { text: text.into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn f64(&mut self, n: &str) -> Self::Literal {
-        let n: f64 = n.parse().unwrap();
-        let text = format!("{n}f64");
-        Literal { text: text.into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn string(&mut self, string: &str) -> Self::Literal {
-        let mut escaped = String::new();
-        for ch in string.chars() {
-            escaped.extend(ch.escape_debug());
-        }
-        Literal { text: format!("\"{escaped}\"").into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn character(&mut self, ch: char) -> Self::Literal {
-        Literal { text: format!("'{ch}'").into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
-        let string = bytes
-            .iter()
-            .cloned()
-            .flat_map(ascii::escape_default)
-            .map(Into::<char>::into)
-            .collect::<String>();
-
-        Literal { text: format!("b\"{string}\"").into(), id: tt::TokenId::unspecified() }
-    }
-
-    fn span(&mut self, literal: &Self::Literal) -> Self::Span {
-        literal.id
-    }
-
-    fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) {
-        literal.id = span;
-    }
-
-    fn subspan(
-        &mut self,
-        _literal: &Self::Literal,
-        _start: Bound<usize>,
-        _end: Bound<usize>,
-    ) -> Option<Self::Span> {
-        // FIXME handle span
-        None
-    }
-}
-
-impl server::SourceFile for RustAnalyzer {
-    // FIXME these are all stubs
-    fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool {
-        true
-    }
-    fn path(&mut self, _file: &Self::SourceFile) -> String {
-        String::new()
-    }
-    fn is_real(&mut self, _file: &Self::SourceFile) -> bool {
-        true
-    }
-}
-
-impl server::Diagnostic for RustAnalyzer {
-    fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
-        let mut diag = Diagnostic::new(level, msg);
-        diag.spans = spans;
-        diag
-    }
-
-    fn sub(
-        &mut self,
-        _diag: &mut Self::Diagnostic,
-        _level: Level,
-        _msg: &str,
-        _spans: Self::MultiSpan,
-    ) {
-        // FIXME handle diagnostic
-        //
-    }
-
-    fn emit(&mut self, _diag: Self::Diagnostic) {
-        // FIXME handle diagnostic
-        // diag.emit()
-    }
-}
-
-impl server::Span for RustAnalyzer {
-    fn debug(&mut self, span: Self::Span) -> String {
-        format!("{:?}", span.0)
-    }
-    fn def_site(&mut self) -> Self::Span {
-        // MySpan(self.span_interner.intern(&MySpanData(Span::def_site())))
-        // FIXME handle span
-        tt::TokenId::unspecified()
-    }
-    fn call_site(&mut self) -> Self::Span {
-        // MySpan(self.span_interner.intern(&MySpanData(Span::call_site())))
-        // FIXME handle span
-        tt::TokenId::unspecified()
-    }
-    fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile {
-        SourceFile {}
-    }
-    fn save_span(&mut self, _span: Self::Span) -> usize {
-        // FIXME stub
-        0
-    }
-    fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
-        // FIXME stub
-        tt::TokenId::unspecified()
-    }
-    /// Recent feature, not yet in the proc_macro
-    ///
-    /// See PR:
-    /// https://github.com/rust-lang/rust/pull/55780
-    fn source_text(&mut self, _span: Self::Span) -> Option<String> {
-        None
-    }
-
-    fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
-        // FIXME handle span
-        None
-    }
-    fn source(&mut self, span: Self::Span) -> Self::Span {
-        // FIXME handle span
-        span
-    }
-    fn start(&mut self, _span: Self::Span) -> LineColumn {
-        // FIXME handle span
-        LineColumn { line: 0, column: 0 }
-    }
-    fn end(&mut self, _span: Self::Span) -> LineColumn {
-        // FIXME handle span
-        LineColumn { line: 0, column: 0 }
-    }
-    fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
-        // Just return the first span again, because some macros will unwrap the result.
-        Some(first)
-    }
-    fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
-        // FIXME handle span
-        tt::TokenId::unspecified()
-    }
-
-    fn mixed_site(&mut self) -> Self::Span {
-        // FIXME handle span
-        tt::TokenId::unspecified()
-    }
-
-    fn after(&mut self, _self_: Self::Span) -> Self::Span {
-        tt::TokenId::unspecified()
-    }
-
-    fn before(&mut self, _self_: Self::Span) -> Self::Span {
-        tt::TokenId::unspecified()
-    }
-}
-
-impl server::MultiSpan for RustAnalyzer {
-    fn new(&mut self) -> Self::MultiSpan {
-        // FIXME handle span
-        vec![]
-    }
-
-    fn push(&mut self, other: &mut Self::MultiSpan, span: Self::Span) {
-        //TODP
-        other.push(span)
-    }
-}
-
-#[cfg(test)]
-mod tests {
-    use super::super::proc_macro::bridge::server::Literal;
-    use super::*;
-
-    #[test]
-    fn test_ra_server_literals() {
-        let mut srv = RustAnalyzer { ident_interner: IdentInterner::default() };
-        assert_eq!(srv.integer("1234").text, "1234");
-
-        assert_eq!(srv.typed_integer("12", "u8").text, "12u8");
-        assert_eq!(srv.typed_integer("255", "u16").text, "255u16");
-        assert_eq!(srv.typed_integer("1234", "u32").text, "1234u32");
-        assert_eq!(srv.typed_integer("15846685", "u64").text, "15846685u64");
-        assert_eq!(srv.typed_integer("15846685258", "u128").text, "15846685258u128");
-        assert_eq!(srv.typed_integer("156788984", "usize").text, "156788984usize");
-        assert_eq!(srv.typed_integer("127", "i8").text, "127i8");
-        assert_eq!(srv.typed_integer("255", "i16").text, "255i16");
-        assert_eq!(srv.typed_integer("1234", "i32").text, "1234i32");
-        assert_eq!(srv.typed_integer("15846685", "i64").text, "15846685i64");
-        assert_eq!(srv.typed_integer("15846685258", "i128").text, "15846685258i128");
-        assert_eq!(srv.float("0").text, "0.0");
-        assert_eq!(srv.float("15684.5867").text, "15684.5867");
-        assert_eq!(srv.f32("15684.58").text, "15684.58f32");
-        assert_eq!(srv.f64("15684.58").text, "15684.58f64");
-
-        assert_eq!(srv.string("hello_world").text, "\"hello_world\"");
-        assert_eq!(srv.character('c').text, "'c'");
-        assert_eq!(srv.byte_string(b"1234586\x88").text, "b\"1234586\\x88\"");
-
-        // u128::max
-        assert_eq!(
-            srv.integer("340282366920938463463374607431768211455").text,
-            "340282366920938463463374607431768211455"
-        );
-        // i128::min
-        assert_eq!(
-            srv.integer("-170141183460469231731687303715884105728").text,
-            "-170141183460469231731687303715884105728"
-        );
-    }
-
-    #[test]
-    fn test_ra_server_to_string() {
-        let s = TokenStream {
-            token_trees: vec![
-                tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
-                    text: "struct".into(),
-                    id: tt::TokenId::unspecified(),
-                })),
-                tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
-                    text: "T".into(),
-                    id: tt::TokenId::unspecified(),
-                })),
-                tt::TokenTree::Subtree(tt::Subtree {
-                    delimiter: Some(tt::Delimiter {
-                        id: tt::TokenId::unspecified(),
-                        kind: tt::DelimiterKind::Brace,
-                    }),
-                    token_trees: vec![],
-                }),
-            ],
-        };
-
-        assert_eq!(s.to_string(), "struct T {}");
-    }
-
-    #[test]
-    fn test_ra_server_from_str() {
-        use std::str::FromStr;
-        let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
-            delimiter: Some(tt::Delimiter {
-                id: tt::TokenId::unspecified(),
-                kind: tt::DelimiterKind::Parenthesis,
-            }),
-            token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
-                text: "a".into(),
-                id: tt::TokenId::unspecified(),
-            }))],
-        });
-
-        let t1 = TokenStream::from_str("(a)").unwrap();
-        assert_eq!(t1.token_trees.len(), 1);
-        assert_eq!(t1.token_trees[0], subtree_paren_a);
-
-        let t2 = TokenStream::from_str("(a);").unwrap();
-        assert_eq!(t2.token_trees.len(), 2);
-        assert_eq!(t2.token_trees[0], subtree_paren_a);
-
-        let underscore = TokenStream::from_str("_").unwrap();
-        assert_eq!(
-            underscore.token_trees[0],
-            tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
-                text: "_".into(),
-                id: tt::TokenId::unspecified(),
-            }))
-        );
-    }
-}
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/mod.rs
index 76e89e3191a..93805c89354 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/mod.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/mod.rs
@@ -11,6 +11,7 @@ mod ra_server;
 use libloading::Library;
 use proc_macro_api::ProcMacroKind;
 
+use super::tt;
 use super::PanicMessage;
 
 pub use ra_server::TokenStream;
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs
index 7ab1f421daf..89bd10da5e4 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/proc_macro/mod.rs
@@ -124,8 +124,8 @@ impl FromStr for TokenStream {
 /// into the same token stream (modulo spans), except for possibly `TokenTree::Group`s
 /// with `Delimiter::None` delimiters and negative numeric literals.
 impl fmt::Display for TokenStream {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.write_str(&self.to_string())
+    fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        unimplemented!()
     }
 }
 
@@ -572,8 +572,8 @@ impl From<Literal> for TokenTree {
 /// into the same token tree (modulo spans), except for possibly `TokenTree::Group`s
 /// with `Delimiter::None` delimiters and negative numeric literals.
 impl fmt::Display for TokenTree {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.write_str(&self.to_string())
+    fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        unimplemented!()
     }
 }
 
@@ -669,8 +669,8 @@ impl Group {
 /// into the same group (modulo spans), except for possibly `TokenTree::Group`s
 /// with `Delimiter::None` delimiters.
 impl fmt::Display for Group {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.write_str(&self.to_string())
+    fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        unimplemented!()
     }
 }
 
@@ -743,8 +743,8 @@ impl Punct {
 /// Prints the punctuation character as a string that should be losslessly convertible
 /// back into the same character.
 impl fmt::Display for Punct {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.write_str(&self.to_string())
+    fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        unimplemented!()
     }
 }
 
@@ -821,8 +821,8 @@ impl Ident {
 /// Prints the identifier as a string that should be losslessly convertible
 /// back into the same identifier.
 impl fmt::Display for Ident {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.write_str(&self.to_string())
+    fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        unimplemented!()
     }
 }
 
@@ -1064,8 +1064,8 @@ impl FromStr for Literal {
 /// Prints the literal as a string that should be losslessly convertible
 /// back into the same literal (except for possible rounding for floating point literals).
 impl fmt::Display for Literal {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.write_str(&self.to_string())
+    fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        unimplemented!()
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs
index f82f20c37bc..30baf3a13f5 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_1_63/ra_server.rs
@@ -15,6 +15,8 @@ use std::hash::Hash;
 use std::ops::Bound;
 use std::{ascii, vec::IntoIter};
 
+use crate::tt;
+
 type Group = tt::Subtree;
 type TokenTree = tt::TokenTree;
 type Punct = tt::Punct;
@@ -33,7 +35,7 @@ impl TokenStream {
     }
 
     pub fn with_subtree(subtree: tt::Subtree) -> Self {
-        if subtree.delimiter.is_some() {
+        if subtree.delimiter.kind != tt::DelimiterKind::Invisible {
             TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
         } else {
             TokenStream { token_trees: subtree.token_trees }
@@ -41,7 +43,7 @@ impl TokenStream {
     }
 
     pub fn into_subtree(self) -> tt::Subtree {
-        tt::Subtree { delimiter: None, token_trees: self.token_trees }
+        tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: self.token_trees }
     }
 
     pub fn is_empty(&self) -> bool {
@@ -84,7 +86,9 @@ impl Extend<TokenStream> for TokenStream {
         for item in streams {
             for tkn in item {
                 match tkn {
-                    tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
+                    tt::TokenTree::Subtree(subtree)
+                        if subtree.delimiter.kind == tt::DelimiterKind::Invisible =>
+                    {
                         self.token_trees.extend(subtree.token_trees);
                     }
                     _ => {
@@ -165,7 +169,7 @@ pub struct TokenStreamBuilder {
 pub mod token_stream {
     use std::str::FromStr;
 
-    use super::{TokenStream, TokenTree};
+    use super::{tt, TokenStream, TokenTree};
 
     /// An iterator over `TokenStream`'s `TokenTree`s.
     /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
@@ -202,15 +206,17 @@ pub mod token_stream {
 
     impl ToString for TokenStream {
         fn to_string(&self) -> String {
-            tt::pretty(&self.token_trees)
+            ::tt::pretty(&self.token_trees)
         }
     }
 
     fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
         tt::Subtree {
-            delimiter: subtree
-                .delimiter
-                .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
+            delimiter: tt::Delimiter {
+                open: tt::TokenId::UNSPECIFIED,
+                close: tt::TokenId::UNSPECIFIED,
+                ..subtree.delimiter
+            },
             token_trees: subtree
                 .token_trees
                 .into_iter()
@@ -233,13 +239,13 @@ pub mod token_stream {
     fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
         match leaf {
             tt::Leaf::Literal(lit) => {
-                tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
+                tt::Leaf::Literal(tt::Literal { span: tt::TokenId::unspecified(), ..lit })
             }
             tt::Leaf::Punct(punct) => {
-                tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
+                tt::Leaf::Punct(tt::Punct { span: tt::TokenId::unspecified(), ..punct })
             }
             tt::Leaf::Ident(ident) => {
-                tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
+                tt::Leaf::Ident(tt::Ident { span: tt::TokenId::unspecified(), ..ident })
             }
         }
     }
@@ -389,22 +395,22 @@ impl server::TokenStream for RustAnalyzer {
     }
 }
 
-fn delim_to_internal(d: bridge::Delimiter) -> Option<tt::Delimiter> {
+fn delim_to_internal(d: bridge::Delimiter) -> tt::Delimiter {
     let kind = match d {
         bridge::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
         bridge::Delimiter::Brace => tt::DelimiterKind::Brace,
         bridge::Delimiter::Bracket => tt::DelimiterKind::Bracket,
-        bridge::Delimiter::None => return None,
+        bridge::Delimiter::None => tt::DelimiterKind::Invisible,
     };
-    Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind })
+    tt::Delimiter { open: tt::TokenId::unspecified(), close: tt::TokenId::unspecified(), kind }
 }
 
-fn delim_to_external(d: Option<tt::Delimiter>) -> bridge::Delimiter {
-    match d.map(|it| it.kind) {
-        Some(tt::DelimiterKind::Parenthesis) => bridge::Delimiter::Parenthesis,
-        Some(tt::DelimiterKind::Brace) => bridge::Delimiter::Brace,
-        Some(tt::DelimiterKind::Bracket) => bridge::Delimiter::Bracket,
-        None => bridge::Delimiter::None,
+fn delim_to_external(d: tt::Delimiter) -> bridge::Delimiter {
+    match d.kind {
+        tt::DelimiterKind::Parenthesis => bridge::Delimiter::Parenthesis,
+        tt::DelimiterKind::Brace => bridge::Delimiter::Brace,
+        tt::DelimiterKind::Bracket => bridge::Delimiter::Bracket,
+        tt::DelimiterKind::Invisible => bridge::Delimiter::None,
     }
 }
 
@@ -443,23 +449,19 @@ impl server::Group for RustAnalyzer {
     }
 
     fn span(&mut self, group: &Self::Group) -> Self::Span {
-        group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
+        group.delimiter.open
     }
 
     fn set_span(&mut self, group: &mut Self::Group, span: Self::Span) {
-        if let Some(delim) = &mut group.delimiter {
-            delim.id = span;
-        }
+        group.delimiter.open = span;
     }
 
     fn span_open(&mut self, group: &Self::Group) -> Self::Span {
-        // FIXME we only store one `TokenId` for the delimiters
-        group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
+        group.delimiter.open
     }
 
     fn span_close(&mut self, group: &Self::Group) -> Self::Span {
-        // FIXME we only store one `TokenId` for the delimiters
-        group.delimiter.map(|it| it.id).unwrap_or_else(tt::TokenId::unspecified)
+        group.delimiter.close
     }
 }
 
@@ -468,7 +470,7 @@ impl server::Punct for RustAnalyzer {
         tt::Punct {
             char: ch,
             spacing: spacing_to_internal(spacing),
-            id: tt::TokenId::unspecified(),
+            span: tt::TokenId::unspecified(),
         }
     }
     fn as_char(&mut self, punct: Self::Punct) -> char {
@@ -478,28 +480,27 @@ impl server::Punct for RustAnalyzer {
         spacing_to_external(punct.spacing)
     }
     fn span(&mut self, punct: Self::Punct) -> Self::Span {
-        punct.id
+        punct.span
     }
     fn with_span(&mut self, punct: Self::Punct, span: Self::Span) -> Self::Punct {
-        tt::Punct { id: span, ..punct }
+        tt::Punct { span: span, ..punct }
     }
 }
 
 impl server::Ident for RustAnalyzer {
     fn new(&mut self, string: &str, span: Self::Span, is_raw: bool) -> Self::Ident {
-        IdentId(self.ident_interner.intern(&IdentData(tt::Ident::new_with_is_raw(
-            string.into(),
+        IdentId(self.ident_interner.intern(&IdentData(tt::Ident {
+            text: if is_raw { ::tt::SmolStr::from_iter(["r#", string]) } else { string.into() },
             span,
-            is_raw,
-        ))))
+        })))
     }
 
     fn span(&mut self, ident: Self::Ident) -> Self::Span {
-        self.ident_interner.get(ident.0).0.id
+        self.ident_interner.get(ident.0).0.span
     }
     fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident {
         let data = self.ident_interner.get(ident.0);
-        let new = IdentData(tt::Ident { id: span, ..data.0.clone() });
+        let new = IdentData(tt::Ident { span: span, ..data.0.clone() });
         IdentId(self.ident_interner.intern(&new))
     }
 }
@@ -511,7 +512,7 @@ impl server::Literal for RustAnalyzer {
         "".to_owned()
     }
     fn from_str(&mut self, s: &str) -> Result<Self::Literal, ()> {
-        Ok(Literal { text: s.into(), id: tt::TokenId::unspecified() })
+        Ok(Literal { text: s.into(), span: tt::TokenId::unspecified() })
     }
     fn symbol(&mut self, literal: &Self::Literal) -> String {
         literal.text.to_string()
@@ -529,7 +530,7 @@ impl server::Literal for RustAnalyzer {
             Ok(n) => n.to_string(),
             Err(_) => n.parse::<u128>().unwrap().to_string(),
         };
-        Literal { text: n.into(), id: tt::TokenId::unspecified() }
+        Literal { text: n.into(), span: tt::TokenId::unspecified() }
     }
 
     fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
@@ -549,7 +550,7 @@ impl server::Literal for RustAnalyzer {
 
         let text = def_suffixed_integer! {kind, u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize};
 
-        Literal { text: text.into(), id: tt::TokenId::unspecified() }
+        Literal { text: text.into(), span: tt::TokenId::unspecified() }
     }
 
     fn float(&mut self, n: &str) -> Self::Literal {
@@ -558,19 +559,19 @@ impl server::Literal for RustAnalyzer {
         if !text.contains('.') {
             text += ".0"
         }
-        Literal { text: text.into(), id: tt::TokenId::unspecified() }
+        Literal { text: text.into(), span: tt::TokenId::unspecified() }
     }
 
     fn f32(&mut self, n: &str) -> Self::Literal {
         let n: f32 = n.parse().unwrap();
         let text = format!("{n}f32");
-        Literal { text: text.into(), id: tt::TokenId::unspecified() }
+        Literal { text: text.into(), span: tt::TokenId::unspecified() }
     }
 
     fn f64(&mut self, n: &str) -> Self::Literal {
         let n: f64 = n.parse().unwrap();
         let text = format!("{n}f64");
-        Literal { text: text.into(), id: tt::TokenId::unspecified() }
+        Literal { text: text.into(), span: tt::TokenId::unspecified() }
     }
 
     fn string(&mut self, string: &str) -> Self::Literal {
@@ -578,11 +579,11 @@ impl server::Literal for RustAnalyzer {
         for ch in string.chars() {
             escaped.extend(ch.escape_debug());
         }
-        Literal { text: format!("\"{escaped}\"").into(), id: tt::TokenId::unspecified() }
+        Literal { text: format!("\"{escaped}\"").into(), span: tt::TokenId::unspecified() }
     }
 
     fn character(&mut self, ch: char) -> Self::Literal {
-        Literal { text: format!("'{ch}'").into(), id: tt::TokenId::unspecified() }
+        Literal { text: format!("'{ch}'").into(), span: tt::TokenId::unspecified() }
     }
 
     fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
@@ -593,15 +594,15 @@ impl server::Literal for RustAnalyzer {
             .map(Into::<char>::into)
             .collect::<String>();
 
-        Literal { text: format!("b\"{string}\"").into(), id: tt::TokenId::unspecified() }
+        Literal { text: format!("b\"{string}\"").into(), span: tt::TokenId::unspecified() }
     }
 
     fn span(&mut self, literal: &Self::Literal) -> Self::Span {
-        literal.id
+        literal.span
     }
 
     fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) {
-        literal.id = span;
+        literal.span = span;
     }
 
     fn subspan(
@@ -784,17 +785,18 @@ mod tests {
             token_trees: vec![
                 tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
                     text: "struct".into(),
-                    id: tt::TokenId::unspecified(),
+                    span: tt::TokenId::unspecified(),
                 })),
                 tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
                     text: "T".into(),
-                    id: tt::TokenId::unspecified(),
+                    span: tt::TokenId::unspecified(),
                 })),
                 tt::TokenTree::Subtree(tt::Subtree {
-                    delimiter: Some(tt::Delimiter {
-                        id: tt::TokenId::unspecified(),
+                    delimiter: tt::Delimiter {
+                        open: tt::TokenId::unspecified(),
+                        close: tt::TokenId::unspecified(),
                         kind: tt::DelimiterKind::Brace,
-                    }),
+                    },
                     token_trees: vec![],
                 }),
             ],
@@ -807,13 +809,14 @@ mod tests {
     fn test_ra_server_from_str() {
         use std::str::FromStr;
         let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
-            delimiter: Some(tt::Delimiter {
-                id: tt::TokenId::unspecified(),
+            delimiter: tt::Delimiter {
+                open: tt::TokenId::unspecified(),
+                close: tt::TokenId::unspecified(),
                 kind: tt::DelimiterKind::Parenthesis,
-            }),
+            },
             token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
                 text: "a".into(),
-                id: tt::TokenId::unspecified(),
+                span: tt::TokenId::unspecified(),
             }))],
         });
 
@@ -830,7 +833,7 @@ mod tests {
             underscore.token_trees[0],
             tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
                 text: "_".into(),
-                id: tt::TokenId::unspecified(),
+                span: tt::TokenId::unspecified(),
             }))
         );
     }
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs
index 243972b0499..0a3b8866a7f 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/mod.rs
@@ -9,7 +9,7 @@ mod ra_server;
 use libloading::Library;
 use proc_macro_api::ProcMacroKind;
 
-use super::PanicMessage;
+use super::{tt, PanicMessage};
 
 pub use ra_server::TokenStream;
 
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs
index 068f79f824d..d258a024729 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server.rs
@@ -22,6 +22,8 @@ pub use symbol::*;
 
 use std::ops::Bound;
 
+use crate::tt;
+
 type Group = tt::Subtree;
 type TokenTree = tt::TokenTree;
 type Punct = tt::Punct;
@@ -108,8 +110,9 @@ impl server::TokenStream for RustAnalyzer {
 
             bridge::TokenTree::Ident(ident) => {
                 let text = ident.sym.text();
-                let text = if ident.is_raw { tt::SmolStr::from_iter(["r#", &text]) } else { text };
-                let ident: tt::Ident = tt::Ident { text, id: ident.span };
+                let text =
+                    if ident.is_raw { ::tt::SmolStr::from_iter(["r#", &text]) } else { text };
+                let ident: tt::Ident = tt::Ident { text, span: ident.span };
                 let leaf = tt::Leaf::from(ident);
                 let tree = TokenTree::from(leaf);
                 Self::TokenStream::from_iter(vec![tree])
@@ -118,9 +121,9 @@ impl server::TokenStream for RustAnalyzer {
             bridge::TokenTree::Literal(literal) => {
                 let literal = LiteralFormatter(literal);
                 let text = literal
-                    .with_stringify_parts(|parts| tt::SmolStr::from_iter(parts.iter().copied()));
+                    .with_stringify_parts(|parts| ::tt::SmolStr::from_iter(parts.iter().copied()));
 
-                let literal = tt::Literal { text, id: literal.0.span };
+                let literal = tt::Literal { text, span: literal.0.span };
                 let leaf = tt::Leaf::from(literal);
                 let tree = TokenTree::from(leaf);
                 Self::TokenStream::from_iter(vec![tree])
@@ -130,7 +133,7 @@ impl server::TokenStream for RustAnalyzer {
                 let punct = tt::Punct {
                     char: p.ch as char,
                     spacing: if p.joint { Spacing::Joint } else { Spacing::Alone },
-                    id: p.span,
+                    span: p.span,
                 };
                 let leaf = tt::Leaf::from(punct);
                 let tree = TokenTree::from(leaf);
@@ -184,7 +187,7 @@ impl server::TokenStream for RustAnalyzer {
                     bridge::TokenTree::Ident(bridge::Ident {
                         sym: Symbol::intern(ident.text.trim_start_matches("r#")),
                         is_raw: ident.text.starts_with("r#"),
-                        span: ident.id,
+                        span: ident.span,
                     })
                 }
                 tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
@@ -194,14 +197,14 @@ impl server::TokenStream for RustAnalyzer {
                         symbol: Symbol::intern(&lit.text),
                         // FIXME: handle suffixes
                         suffix: None,
-                        span: lit.id,
+                        span: lit.span,
                     })
                 }
                 tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
                     bridge::TokenTree::Punct(bridge::Punct {
                         ch: punct.char as u8,
                         joint: punct.spacing == Spacing::Joint,
-                        span: punct.id,
+                        span: punct.span,
                     })
                 }
                 tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group {
@@ -211,31 +214,29 @@ impl server::TokenStream for RustAnalyzer {
                     } else {
                         Some(subtree.token_trees.into_iter().collect())
                     },
-                    span: bridge::DelimSpan::from_single(
-                        subtree.delimiter.map_or(Span::unspecified(), |del| del.id),
-                    ),
+                    span: bridge::DelimSpan::from_single(subtree.delimiter.open),
                 }),
             })
             .collect()
     }
 }
 
-fn delim_to_internal(d: proc_macro::Delimiter) -> Option<tt::Delimiter> {
+fn delim_to_internal(d: proc_macro::Delimiter) -> tt::Delimiter {
     let kind = match d {
         proc_macro::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
         proc_macro::Delimiter::Brace => tt::DelimiterKind::Brace,
         proc_macro::Delimiter::Bracket => tt::DelimiterKind::Bracket,
-        proc_macro::Delimiter::None => return None,
+        proc_macro::Delimiter::None => tt::DelimiterKind::Invisible,
     };
-    Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind })
+    tt::Delimiter { open: tt::TokenId::unspecified(), close: tt::TokenId::unspecified(), kind }
 }
 
-fn delim_to_external(d: Option<tt::Delimiter>) -> proc_macro::Delimiter {
-    match d.map(|it| it.kind) {
-        Some(tt::DelimiterKind::Parenthesis) => proc_macro::Delimiter::Parenthesis,
-        Some(tt::DelimiterKind::Brace) => proc_macro::Delimiter::Brace,
-        Some(tt::DelimiterKind::Bracket) => proc_macro::Delimiter::Bracket,
-        None => proc_macro::Delimiter::None,
+fn delim_to_external(d: tt::Delimiter) -> proc_macro::Delimiter {
+    match d.kind {
+        tt::DelimiterKind::Parenthesis => proc_macro::Delimiter::Parenthesis,
+        tt::DelimiterKind::Brace => proc_macro::Delimiter::Brace,
+        tt::DelimiterKind::Bracket => proc_macro::Delimiter::Bracket,
+        tt::DelimiterKind::Invisible => proc_macro::Delimiter::None,
     }
 }
 
@@ -349,7 +350,7 @@ impl server::Server for RustAnalyzer {
     }
 
     fn intern_symbol(ident: &str) -> Self::Symbol {
-        Symbol::intern(&tt::SmolStr::from(ident))
+        Symbol::intern(&::tt::SmolStr::from(ident))
     }
 
     fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) {
@@ -413,17 +414,18 @@ mod tests {
             token_trees: vec![
                 tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
                     text: "struct".into(),
-                    id: tt::TokenId::unspecified(),
+                    span: tt::TokenId::unspecified(),
                 })),
                 tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
                     text: "T".into(),
-                    id: tt::TokenId::unspecified(),
+                    span: tt::TokenId::unspecified(),
                 })),
                 tt::TokenTree::Subtree(tt::Subtree {
-                    delimiter: Some(tt::Delimiter {
-                        id: tt::TokenId::unspecified(),
+                    delimiter: tt::Delimiter {
+                        open: tt::TokenId::unspecified(),
+                        close: tt::TokenId::unspecified(),
                         kind: tt::DelimiterKind::Brace,
-                    }),
+                    },
                     token_trees: vec![],
                 }),
             ],
@@ -436,13 +438,14 @@ mod tests {
     fn test_ra_server_from_str() {
         use std::str::FromStr;
         let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
-            delimiter: Some(tt::Delimiter {
-                id: tt::TokenId::unspecified(),
+            delimiter: tt::Delimiter {
+                open: tt::TokenId::unspecified(),
+                close: tt::TokenId::unspecified(),
                 kind: tt::DelimiterKind::Parenthesis,
-            }),
+            },
             token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
                 text: "a".into(),
-                id: tt::TokenId::unspecified(),
+                span: tt::TokenId::unspecified(),
             }))],
         });
 
@@ -459,7 +462,7 @@ mod tests {
             underscore.token_trees[0],
             tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
                 text: "_".into(),
-                id: tt::TokenId::unspecified(),
+                span: tt::TokenId::unspecified(),
             }))
         );
     }
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/token_stream.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/token_stream.rs
index 113bb52c1af..d091d431900 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/token_stream.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/abi_sysroot/ra_server/token_stream.rs
@@ -1,6 +1,6 @@
 //! TokenStream implementation used by sysroot ABI
 
-use tt::TokenTree;
+use crate::tt::{self, TokenTree};
 
 #[derive(Debug, Default, Clone)]
 pub struct TokenStream {
@@ -13,7 +13,7 @@ impl TokenStream {
     }
 
     pub fn with_subtree(subtree: tt::Subtree) -> Self {
-        if subtree.delimiter.is_some() {
+        if subtree.delimiter.kind != tt::DelimiterKind::Invisible {
             TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] }
         } else {
             TokenStream { token_trees: subtree.token_trees }
@@ -21,7 +21,7 @@ impl TokenStream {
     }
 
     pub fn into_subtree(self) -> tt::Subtree {
-        tt::Subtree { delimiter: None, token_trees: self.token_trees }
+        tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: self.token_trees }
     }
 
     pub fn is_empty(&self) -> bool {
@@ -64,7 +64,9 @@ impl Extend<TokenStream> for TokenStream {
         for item in streams {
             for tkn in item {
                 match tkn {
-                    tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
+                    tt::TokenTree::Subtree(subtree)
+                        if subtree.delimiter.kind == tt::DelimiterKind::Invisible =>
+                    {
                         self.token_trees.extend(subtree.token_trees);
                     }
                     _ => {
@@ -84,7 +86,7 @@ pub struct TokenStreamBuilder {
 pub mod token_stream {
     use std::str::FromStr;
 
-    use super::{TokenStream, TokenTree};
+    use super::{tt, TokenStream, TokenTree};
 
     /// An iterator over `TokenStream`'s `TokenTree`s.
     /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups,
@@ -121,15 +123,17 @@ pub mod token_stream {
 
     impl ToString for TokenStream {
         fn to_string(&self) -> String {
-            tt::pretty(&self.token_trees)
+            ::tt::pretty(&self.token_trees)
         }
     }
 
     fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree {
         tt::Subtree {
-            delimiter: subtree
-                .delimiter
-                .map(|d| tt::Delimiter { id: tt::TokenId::unspecified(), ..d }),
+            delimiter: tt::Delimiter {
+                open: tt::TokenId::UNSPECIFIED,
+                close: tt::TokenId::UNSPECIFIED,
+                ..subtree.delimiter
+            },
             token_trees: subtree
                 .token_trees
                 .into_iter()
@@ -152,13 +156,13 @@ pub mod token_stream {
     fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf {
         match leaf {
             tt::Leaf::Literal(lit) => {
-                tt::Leaf::Literal(tt::Literal { id: tt::TokenId::unspecified(), ..lit })
+                tt::Leaf::Literal(tt::Literal { span: tt::TokenId::unspecified(), ..lit })
             }
             tt::Leaf::Punct(punct) => {
-                tt::Leaf::Punct(tt::Punct { id: tt::TokenId::unspecified(), ..punct })
+                tt::Leaf::Punct(tt::Punct { span: tt::TokenId::unspecified(), ..punct })
             }
             tt::Leaf::Ident(ident) => {
-                tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), ..ident })
+                tt::Leaf::Ident(tt::Ident { span: tt::TokenId::unspecified(), ..ident })
             }
         }
     }
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/mod.rs
index 5b8aca4d816..04be39cffa4 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/mod.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/abis/mod.rs
@@ -23,7 +23,6 @@
 //! for the relevant versions of the rust compiler
 //!
 
-mod abi_1_58;
 mod abi_1_63;
 #[cfg(feature = "sysroot-abi")]
 mod abi_sysroot;
@@ -36,13 +35,14 @@ include!(concat!(env!("OUT_DIR"), "/rustc_version.rs"));
 pub(crate) use abi_sysroot::TokenStream as TestTokenStream;
 
 use super::dylib::LoadProcMacroDylibError;
-pub(crate) use abi_1_58::Abi as Abi_1_58;
 pub(crate) use abi_1_63::Abi as Abi_1_63;
 #[cfg(feature = "sysroot-abi")]
 pub(crate) use abi_sysroot::Abi as Abi_Sysroot;
 use libloading::Library;
 use proc_macro_api::{ProcMacroKind, RustCInfo};
 
+use crate::tt;
+
 pub struct PanicMessage {
     message: Option<String>,
 }
@@ -54,7 +54,6 @@ impl PanicMessage {
 }
 
 pub(crate) enum Abi {
-    Abi1_58(Abi_1_58),
     Abi1_63(Abi_1_63),
     #[cfg(feature = "sysroot-abi")]
     AbiSysroot(Abi_Sysroot),
@@ -109,10 +108,6 @@ impl Abi {
         // FIXME: this should use exclusive ranges when they're stable
         // https://github.com/rust-lang/rust/issues/37854
         match (info.version.0, info.version.1) {
-            (1, 58..=62) => {
-                let inner = unsafe { Abi_1_58::from_lib(lib, symbol_name) }?;
-                Ok(Abi::Abi1_58(inner))
-            }
             (1, 63) => {
                 let inner = unsafe { Abi_1_63::from_lib(lib, symbol_name) }?;
                 Ok(Abi::Abi1_63(inner))
@@ -128,7 +123,6 @@ impl Abi {
         attributes: Option<&tt::Subtree>,
     ) -> Result<tt::Subtree, PanicMessage> {
         match self {
-            Self::Abi1_58(abi) => abi.expand(macro_name, macro_body, attributes),
             Self::Abi1_63(abi) => abi.expand(macro_name, macro_body, attributes),
             #[cfg(feature = "sysroot-abi")]
             Self::AbiSysroot(abi) => abi.expand(macro_name, macro_body, attributes),
@@ -137,7 +131,6 @@ impl Abi {
 
     pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
         match self {
-            Self::Abi1_58(abi) => abi.list_macros(),
             Self::Abi1_63(abi) => abi.list_macros(),
             #[cfg(feature = "sysroot-abi")]
             Self::AbiSysroot(abi) => abi.list_macros(),
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/cli.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/cli.rs
index f1e131c135d..05168feb629 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/cli.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/cli.rs
@@ -15,6 +15,9 @@ pub fn run() -> io::Result<()> {
                 msg::Response::ListMacros(srv.list_macros(&dylib_path))
             }
             msg::Request::ExpandMacro(task) => msg::Response::ExpandMacro(srv.expand(task)),
+            msg::Request::ApiVersionCheck {} => {
+                msg::Response::ApiVersionCheck(proc_macro_api::msg::CURRENT_API_VERSION)
+            }
         };
         write_response(res)?
     }
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs
index 0722cd89d72..89ffd1f493e 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/dylib.rs
@@ -13,6 +13,8 @@ use object::Object;
 use paths::AbsPath;
 use proc_macro_api::{read_dylib_info, ProcMacroKind};
 
+use crate::tt;
+
 use super::abis::Abi;
 
 const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_";
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs
index 2eb939a7ce5..ee70fe7d4f5 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs
@@ -37,6 +37,8 @@ use proc_macro_api::{
     ProcMacroKind,
 };
 
+use ::tt::token_id as tt;
+
 #[derive(Default)]
 pub(crate) struct ProcMacroSrv {
     expanders: HashMap<(PathBuf, SystemTime), dylib::Expander>,
diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs
index 1ccc170f422..04a0ae7bc72 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-srv/src/tests/mod.rs
@@ -8,7 +8,7 @@ use expect_test::expect;
 
 #[test]
 fn test_derive_empty() {
-    assert_expand("DeriveEmpty", r#"struct S;"#, expect![[r#"SUBTREE $"#]]);
+    assert_expand("DeriveEmpty", r#"struct S;"#, expect!["SUBTREE $$ 4294967295 4294967295"]);
 }
 
 #[test]
@@ -17,10 +17,10 @@ fn test_derive_error() {
         "DeriveError",
         r#"struct S;"#,
         expect![[r##"
-            SUBTREE $
+            SUBTREE $$ 4294967295 4294967295
               IDENT   compile_error 4294967295
               PUNCH   ! [alone] 4294967295
-              SUBTREE () 4294967295
+              SUBTREE () 4294967295 4294967295
                 LITERAL "#[derive(DeriveError)] struct S ;" 4294967295
               PUNCH   ; [alone] 4294967295"##]],
     );
@@ -32,14 +32,14 @@ fn test_fn_like_macro_noop() {
         "fn_like_noop",
         r#"ident, 0, 1, []"#,
         expect![[r#"
-            SUBTREE $
+            SUBTREE $$ 4294967295 4294967295
               IDENT   ident 4294967295
               PUNCH   , [alone] 4294967295
               LITERAL 0 4294967295
               PUNCH   , [alone] 4294967295
               LITERAL 1 4294967295
               PUNCH   , [alone] 4294967295
-              SUBTREE [] 4294967295"#]],
+              SUBTREE [] 4294967295 4294967295"#]],
     );
 }
 
@@ -49,10 +49,10 @@ fn test_fn_like_macro_clone_ident_subtree() {
         "fn_like_clone_tokens",
         r#"ident, []"#,
         expect![[r#"
-            SUBTREE $
+            SUBTREE $$ 4294967295 4294967295
               IDENT   ident 4294967295
               PUNCH   , [alone] 4294967295
-              SUBTREE [] 4294967295"#]],
+              SUBTREE [] 4294967295 4294967295"#]],
     );
 }
 
@@ -62,7 +62,7 @@ fn test_fn_like_macro_clone_raw_ident() {
         "fn_like_clone_tokens",
         "r#async",
         expect![[r#"
-            SUBTREE $
+            SUBTREE $$ 4294967295 4294967295
               IDENT   r#async 4294967295"#]],
     );
 }
@@ -73,7 +73,7 @@ fn test_fn_like_mk_literals() {
         "fn_like_mk_literals",
         r#""#,
         expect![[r#"
-            SUBTREE $
+            SUBTREE $$ 4294967295 4294967295
               LITERAL b"byte_string" 4294967295
               LITERAL 'c' 4294967295
               LITERAL "string" 4294967295
@@ -90,7 +90,7 @@ fn test_fn_like_mk_idents() {
         "fn_like_mk_idents",
         r#""#,
         expect![[r#"
-            SUBTREE $
+            SUBTREE $$ 4294967295 4294967295
               IDENT   standard 4294967295
               IDENT   r#raw 4294967295"#]],
     );
@@ -102,7 +102,7 @@ fn test_fn_like_macro_clone_literals() {
         "fn_like_clone_tokens",
         r#"1u16, 2_u32, -4i64, 3.14f32, "hello bridge""#,
         expect![[r#"
-            SUBTREE $
+            SUBTREE $$ 4294967295 4294967295
               LITERAL 1u16 4294967295
               PUNCH   , [alone] 4294967295
               LITERAL 2_u32 4294967295
@@ -126,10 +126,10 @@ fn test_attr_macro() {
         r#"mod m {}"#,
         r#"some arguments"#,
         expect![[r##"
-            SUBTREE $
+            SUBTREE $$ 4294967295 4294967295
               IDENT   compile_error 4294967295
               PUNCH   ! [alone] 4294967295
-              SUBTREE () 4294967295
+              SUBTREE () 4294967295 4294967295
                 LITERAL "#[attr_error(some arguments)] mod m {}" 4294967295
               PUNCH   ; [alone] 4294967295"##]],
     );
diff --git a/src/tools/rust-analyzer/crates/proc-macro-test/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-test/Cargo.toml
index d2a79f91074..77b4afd7d7e 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-test/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/proc-macro-test/Cargo.toml
@@ -1,15 +1,20 @@
 [package]
 name = "proc-macro-test"
 version = "0.0.0"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
 publish = false
 
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
+
 [lib]
 doctest = false
 
 [build-dependencies]
-proc-macro-test-impl = { path = "imp", version = "0.0.0" }
-toolchain = { path = "../toolchain", version = "0.0.0" }
 cargo_metadata = "0.15.0"
+
+proc-macro-test-impl = { path = "imp", version = "0.0.0" }
+
+# local deps
+toolchain.workspace = true
diff --git a/src/tools/rust-analyzer/crates/proc-macro-test/imp/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-test/imp/Cargo.toml
index 1bd14070e90..2a36737cef0 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-test/imp/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/proc-macro-test/imp/Cargo.toml
@@ -3,7 +3,6 @@ name = "proc-macro-test-impl"
 version = "0.0.0"
 license = "MIT OR Apache-2.0"
 edition = "2021"
-rust-version = "1.65"
 publish = false
 
 [lib]
diff --git a/src/tools/rust-analyzer/crates/profile/Cargo.toml b/src/tools/rust-analyzer/crates/profile/Cargo.toml
index 01d1735bf78..6273ea51db8 100644
--- a/src/tools/rust-analyzer/crates/profile/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/profile/Cargo.toml
@@ -2,15 +2,17 @@
 name = "profile"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
 
 [dependencies]
-once_cell = "1.15.0"
+once_cell = "1.17.0"
 cfg-if = "1.0.0"
 libc = "0.2.135"
 la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
diff --git a/src/tools/rust-analyzer/crates/profile/src/lib.rs b/src/tools/rust-analyzer/crates/profile/src/lib.rs
index 7ca3c7d6295..e7fc3d970bf 100644
--- a/src/tools/rust-analyzer/crates/profile/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/profile/src/lib.rs
@@ -26,7 +26,7 @@ pub use countme::Count;
 
 thread_local!(static IN_SCOPE: RefCell<bool> = RefCell::new(false));
 
-/// Allows to check if the current code is withing some dynamic scope, can be
+/// Allows to check if the current code is within some dynamic scope, can be
 /// useful during debugging to figure out why a function is called.
 pub struct Scope {
     prev: bool,
diff --git a/src/tools/rust-analyzer/crates/project-model/Cargo.toml b/src/tools/rust-analyzer/crates/project-model/Cargo.toml
index 39902a53214..22d6a6e7895 100644
--- a/src/tools/rust-analyzer/crates/project-model/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/project-model/Cargo.toml
@@ -2,9 +2,11 @@
 name = "project-model"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
@@ -19,12 +21,13 @@ serde_json = "1.0.86"
 anyhow = "1.0.62"
 la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
 
-cfg = { path = "../cfg", version = "0.0.0" }
-base-db = { path = "../base-db", version = "0.0.0" }
-toolchain = { path = "../toolchain", version = "0.0.0" }
-paths = { path = "../paths", version = "0.0.0" }
-stdx = { path = "../stdx", version = "0.0.0" }
-profile = { path = "../profile", version = "0.0.0" }
+# local deps
+base-db.workspace = true
+cfg.workspace = true
+paths.workspace = true
+profile.workspace = true
+stdx.workspace = true
+toolchain.workspace = true
 
 [dev-dependencies]
 expect-test = "1.4.0"
diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
index 467cf091787..fdc7859eb90 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
@@ -96,6 +96,7 @@ pub struct CargoConfig {
     pub target: Option<String>,
     /// Sysroot loading behavior
     pub sysroot: Option<RustcSource>,
+    pub sysroot_src: Option<AbsPathBuf>,
     /// rustc private crate source
     pub rustc_source: Option<RustcSource>,
     /// crates to disable `#[cfg(test)]` on
diff --git a/src/tools/rust-analyzer/crates/project-model/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/src/lib.rs
index e2f09bad2de..9b6a71db811 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/lib.rs
@@ -25,7 +25,7 @@ mod sysroot;
 mod workspace;
 mod rustc_cfg;
 mod build_scripts;
-mod target_data_layout;
+pub mod target_data_layout;
 
 #[cfg(test)]
 mod tests;
diff --git a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs
index 9af0eafe9fd..4b2448e47f1 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/project_json.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/project_json.rs
@@ -4,6 +4,50 @@
 //! idea here is that people who do not use Cargo, can instead teach their build
 //! system to generate `rust-project.json` which can be ingested by
 //! rust-analyzer.
+//!
+//! This short file is a somewhat big conceptual piece of the architecture of
+//! rust-analyzer, so it's worth elaborating on the underlying ideas and
+//! motivation.
+//!
+//! For rust-analyzer to function, it needs some information about the project.
+//! Specifically, it maintains an in-memory data structure which lists all the
+//! crates (compilation units) and dependencies between them. This is necessary
+//! a global singleton, as we do want, eg, find usages to always search across
+//! the whole project, rather than just in the "current" crate.
+//!
+//! Normally, we get this "crate graph" by calling `cargo metadata
+//! --message-format=json` for each cargo workspace and merging results. This
+//! works for your typical cargo project, but breaks down for large folks who
+//! have a monorepo with an infinite amount of Rust code which is built with bazel or
+//! some such.
+//!
+//! To support this use case, we need to make _something_ configurable. To avoid
+//! a [midlayer mistake](https://lwn.net/Articles/336262/), we allow configuring
+//! the lowest possible layer. `ProjectJson` is essentially a hook to just set
+//! that global singleton in-memory data structure. It is optimized for power,
+//! not for convenience (you'd be using cargo anyway if you wanted nice things,
+//! right? :)
+//!
+//! `rust-project.json` also isn't necessary a file. Architecturally, we support
+//! any convenient way to specify this data, which today is:
+//!
+//! * file on disk
+//! * a field in the config (ie, you can send a JSON request with the contents
+//!   of rust-project.json to rust-analyzer, no need to write anything to disk)
+//!
+//! Another possible thing we don't do today, but which would be totally valid,
+//! is to add an extension point to VS Code extension to register custom
+//! project.
+//!
+//! In general, it is assumed that if you are going to use `rust-project.json`,
+//! you'd write a fair bit of custom code gluing your build system to ra through
+//! this JSON format. This logic can take form of a VS Code extension, or a
+//! proxy process which injects data into "configure" LSP request, or maybe just
+//! a simple build system rule to generate the file.
+//!
+//! In particular, the logic for lazily loading parts of the monorepo as the
+//! user explores them belongs to that extension (it's totally valid to change
+//! rust-project.json over time via configuration request!)
 
 use std::path::PathBuf;
 
diff --git a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs
index 8d5ab0061e5..328d2fbcf31 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs
@@ -7,6 +7,7 @@
 use std::{env, fs, iter, ops, path::PathBuf, process::Command};
 
 use anyhow::{format_err, Result};
+use base_db::CrateName;
 use la_arena::{Arena, Idx};
 use paths::{AbsPath, AbsPathBuf};
 use rustc_hash::FxHashMap;
@@ -50,14 +51,16 @@ impl Sysroot {
         &self.src_root
     }
 
-    pub fn public_deps(&self) -> impl Iterator<Item = (&'static str, SysrootCrate, bool)> + '_ {
+    pub fn public_deps(&self) -> impl Iterator<Item = (CrateName, SysrootCrate, bool)> + '_ {
         // core is added as a dependency before std in order to
         // mimic rustcs dependency order
         ["core", "alloc", "std"]
             .into_iter()
             .zip(iter::repeat(true))
             .chain(iter::once(("test", false)))
-            .filter_map(move |(name, prelude)| Some((name, self.by_name(name)?, prelude)))
+            .filter_map(move |(name, prelude)| {
+                Some((CrateName::new(name).unwrap(), self.by_name(name)?, prelude))
+            })
     }
 
     pub fn proc_macro(&self) -> Option<SysrootCrate> {
@@ -67,8 +70,13 @@ impl Sysroot {
     pub fn crates(&self) -> impl Iterator<Item = SysrootCrate> + ExactSizeIterator + '_ {
         self.crates.iter().map(|(id, _data)| id)
     }
+
+    pub fn is_empty(&self) -> bool {
+        self.crates.is_empty()
+    }
 }
 
+// FIXME: Expose a builder api as loading the sysroot got way too modular and complicated.
 impl Sysroot {
     /// Attempts to discover the toolchain's sysroot from the given `dir`.
     pub fn discover(dir: &AbsPath, extra_env: &FxHashMap<String, String>) -> Result<Sysroot> {
@@ -76,8 +84,17 @@ impl Sysroot {
         let sysroot_dir = discover_sysroot_dir(dir, extra_env)?;
         let sysroot_src_dir =
             discover_sysroot_src_dir_or_add_component(&sysroot_dir, dir, extra_env)?;
-        let res = Sysroot::load(sysroot_dir, sysroot_src_dir)?;
-        Ok(res)
+        Ok(Sysroot::load(sysroot_dir, sysroot_src_dir))
+    }
+
+    pub fn discover_with_src_override(
+        dir: &AbsPath,
+        extra_env: &FxHashMap<String, String>,
+        src: AbsPathBuf,
+    ) -> Result<Sysroot> {
+        tracing::debug!("discovering sysroot for {}", dir.display());
+        let sysroot_dir = discover_sysroot_dir(dir, extra_env)?;
+        Ok(Sysroot::load(sysroot_dir, src))
     }
 
     pub fn discover_rustc(
@@ -94,11 +111,10 @@ impl Sysroot {
         let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir).ok_or_else(|| {
             format_err!("can't load standard library from sysroot {}", sysroot_dir.display())
         })?;
-        let res = Sysroot::load(sysroot_dir, sysroot_src_dir)?;
-        Ok(res)
+        Ok(Sysroot::load(sysroot_dir, sysroot_src_dir))
     }
 
-    pub fn load(sysroot_dir: AbsPathBuf, sysroot_src_dir: AbsPathBuf) -> Result<Sysroot> {
+    pub fn load(sysroot_dir: AbsPathBuf, sysroot_src_dir: AbsPathBuf) -> Sysroot {
         let mut sysroot =
             Sysroot { root: sysroot_dir, src_root: sysroot_src_dir, crates: Arena::default() };
 
@@ -149,14 +165,14 @@ impl Sysroot {
             } else {
                 ""
             };
-            anyhow::bail!(
+            tracing::error!(
                 "could not find libcore in sysroot path `{}`{}",
                 sysroot.src_root.as_path().display(),
                 var_note,
             );
         }
 
-        Ok(sysroot)
+        sysroot
     }
 
     fn by_name(&self, name: &str) -> Option<SysrootCrate> {
diff --git a/src/tools/rust-analyzer/crates/project-model/src/target_data_layout.rs b/src/tools/rust-analyzer/crates/project-model/src/target_data_layout.rs
index 40cf47c3f55..42c06ad0ed3 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/target_data_layout.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/target_data_layout.rs
@@ -1,15 +1,16 @@
 //! Runs `rustc --print target-spec-json` to get the target_data_layout.
 use std::process::Command;
 
+use anyhow::Result;
 use rustc_hash::FxHashMap;
 
 use crate::{utf8_stdout, ManifestPath};
 
-pub(super) fn get(
+pub fn get(
     cargo_toml: Option<&ManifestPath>,
     target: Option<&str>,
     extra_env: &FxHashMap<String, String>,
-) -> Option<String> {
+) -> Result<String> {
     let output = (|| {
         if let Some(cargo_toml) = cargo_toml {
             let mut cmd = Command::new(toolchain::rustc());
@@ -28,13 +29,13 @@ pub(super) fn get(
         // using unstable cargo features failed, fall back to using plain rustc
         let mut cmd = Command::new(toolchain::rustc());
         cmd.envs(extra_env)
-            .args(["-Z", "unstable-options", "rustc", "--print", "target-spec-json"])
+            .args(["-Z", "unstable-options", "--print", "target-spec-json"])
             .env("RUSTC_BOOTSTRAP", "1");
         if let Some(target) = target {
             cmd.args(["--target", target]);
         }
         utf8_stdout(cmd)
-    })()
-    .ok()?;
-    Some(output.split_once(r#""data-layout": ""#)?.1.split_once('"')?.0.to_owned())
+    })()?;
+    (|| Some(output.split_once(r#""data-layout": ""#)?.1.split_once('"')?.0.to_owned()))()
+        .ok_or_else(|| anyhow::format_err!("could not fetch target-spec-json from command output"))
 }
diff --git a/src/tools/rust-analyzer/crates/project-model/src/tests.rs b/src/tools/rust-analyzer/crates/project-model/src/tests.rs
index 2bb9ebf998b..9e9691d11e8 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/tests.rs
@@ -29,7 +29,7 @@ fn load_cargo_with_overrides(file: &str, cfg_overrides: CfgOverrides) -> CrateGr
         rustc_cfg: Vec::new(),
         cfg_overrides,
         toolchain: None,
-        target_layout: None,
+        target_layout: Err("target_data_layout not loaded".into()),
     };
     to_crate_graph(project_workspace)
 }
@@ -81,7 +81,7 @@ fn get_fake_sysroot() -> Sysroot {
     // fake sysroot, so we give them both the same path:
     let sysroot_dir = AbsPathBuf::assert(sysroot_path);
     let sysroot_src_dir = sysroot_dir.clone();
-    Sysroot::load(sysroot_dir, sysroot_src_dir).unwrap()
+    Sysroot::load(sysroot_dir, sysroot_src_dir)
 }
 
 fn rooted_project_json(data: ProjectJsonData) -> ProjectJson {
@@ -151,7 +151,9 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() {
                                 "debug_assertions",
                             ],
                         ),
-                        target_layout: None,
+                        target_layout: Err(
+                            "target_data_layout not loaded",
+                        ),
                         env: Env {
                             entries: {
                                 "CARGO_PKG_LICENSE": "",
@@ -221,7 +223,9 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() {
                                 "debug_assertions",
                             ],
                         ),
-                        target_layout: None,
+                        target_layout: Err(
+                            "target_data_layout not loaded",
+                        ),
                         env: Env {
                             entries: {
                                 "CARGO_PKG_LICENSE": "",
@@ -300,7 +304,9 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() {
                                 "debug_assertions",
                             ],
                         ),
-                        target_layout: None,
+                        target_layout: Err(
+                            "target_data_layout not loaded",
+                        ),
                         env: Env {
                             entries: {
                                 "CARGO_PKG_LICENSE": "",
@@ -379,7 +385,9 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() {
                                 "debug_assertions",
                             ],
                         ),
-                        target_layout: None,
+                        target_layout: Err(
+                            "target_data_layout not loaded",
+                        ),
                         env: Env {
                             entries: {
                                 "CARGO_PKG_LICENSE": "",
@@ -467,7 +475,9 @@ fn cargo_hello_world_project_model_with_wildcard_overrides() {
                                 "feature=use_std",
                             ],
                         ),
-                        target_layout: None,
+                        target_layout: Err(
+                            "target_data_layout not loaded",
+                        ),
                         env: Env {
                             entries: {
                                 "CARGO_PKG_LICENSE": "",
@@ -553,7 +563,9 @@ fn cargo_hello_world_project_model_with_selective_overrides() {
                                 "test",
                             ],
                         ),
-                        target_layout: None,
+                        target_layout: Err(
+                            "target_data_layout not loaded",
+                        ),
                         env: Env {
                             entries: {
                                 "CARGO_PKG_LICENSE": "",
@@ -625,7 +637,9 @@ fn cargo_hello_world_project_model_with_selective_overrides() {
                                 "test",
                             ],
                         ),
-                        target_layout: None,
+                        target_layout: Err(
+                            "target_data_layout not loaded",
+                        ),
                         env: Env {
                             entries: {
                                 "CARGO_PKG_LICENSE": "",
@@ -706,7 +720,9 @@ fn cargo_hello_world_project_model_with_selective_overrides() {
                                 "test",
                             ],
                         ),
-                        target_layout: None,
+                        target_layout: Err(
+                            "target_data_layout not loaded",
+                        ),
                         env: Env {
                             entries: {
                                 "CARGO_PKG_LICENSE": "",
@@ -787,7 +803,9 @@ fn cargo_hello_world_project_model_with_selective_overrides() {
                                 "test",
                             ],
                         ),
-                        target_layout: None,
+                        target_layout: Err(
+                            "target_data_layout not loaded",
+                        ),
                         env: Env {
                             entries: {
                                 "CARGO_PKG_LICENSE": "",
@@ -875,7 +893,9 @@ fn cargo_hello_world_project_model_with_selective_overrides() {
                                 "feature=use_std",
                             ],
                         ),
-                        target_layout: None,
+                        target_layout: Err(
+                            "target_data_layout not loaded",
+                        ),
                         env: Env {
                             entries: {
                                 "CARGO_PKG_LICENSE": "",
@@ -952,7 +972,9 @@ fn cargo_hello_world_project_model() {
                                 "test",
                             ],
                         ),
-                        target_layout: None,
+                        target_layout: Err(
+                            "target_data_layout not loaded",
+                        ),
                         env: Env {
                             entries: {
                                 "CARGO_PKG_LICENSE": "",
@@ -1024,7 +1046,9 @@ fn cargo_hello_world_project_model() {
                                 "test",
                             ],
                         ),
-                        target_layout: None,
+                        target_layout: Err(
+                            "target_data_layout not loaded",
+                        ),
                         env: Env {
                             entries: {
                                 "CARGO_PKG_LICENSE": "",
@@ -1105,7 +1129,9 @@ fn cargo_hello_world_project_model() {
                                 "test",
                             ],
                         ),
-                        target_layout: None,
+                        target_layout: Err(
+                            "target_data_layout not loaded",
+                        ),
                         env: Env {
                             entries: {
                                 "CARGO_PKG_LICENSE": "",
@@ -1186,7 +1212,9 @@ fn cargo_hello_world_project_model() {
                                 "test",
                             ],
                         ),
-                        target_layout: None,
+                        target_layout: Err(
+                            "target_data_layout not loaded",
+                        ),
                         env: Env {
                             entries: {
                                 "CARGO_PKG_LICENSE": "",
@@ -1274,7 +1302,9 @@ fn cargo_hello_world_project_model() {
                                 "feature=use_std",
                             ],
                         ),
-                        target_layout: None,
+                        target_layout: Err(
+                            "target_data_layout not loaded",
+                        ),
                         env: Env {
                             entries: {
                                 "CARGO_PKG_LICENSE": "",
@@ -1327,7 +1357,7 @@ fn rust_project_hello_world_project_model() {
                         root_file_id: FileId(
                             1,
                         ),
-                        edition: Edition2018,
+                        edition: Edition2021,
                         version: None,
                         display_name: Some(
                             CrateDisplayName {
@@ -1343,7 +1373,9 @@ fn rust_project_hello_world_project_model() {
                         potential_cfg_options: CfgOptions(
                             [],
                         ),
-                        target_layout: None,
+                        target_layout: Err(
+                            "rust-project.json projects have no target layout set",
+                        ),
                         env: Env {
                             entries: {},
                         },
@@ -1372,7 +1404,7 @@ fn rust_project_hello_world_project_model() {
                         root_file_id: FileId(
                             2,
                         ),
-                        edition: Edition2018,
+                        edition: Edition2021,
                         version: None,
                         display_name: Some(
                             CrateDisplayName {
@@ -1388,7 +1420,9 @@ fn rust_project_hello_world_project_model() {
                         potential_cfg_options: CfgOptions(
                             [],
                         ),
-                        target_layout: None,
+                        target_layout: Err(
+                            "rust-project.json projects have no target layout set",
+                        ),
                         env: Env {
                             entries: {},
                         },
@@ -1407,7 +1441,7 @@ fn rust_project_hello_world_project_model() {
                         root_file_id: FileId(
                             3,
                         ),
-                        edition: Edition2018,
+                        edition: Edition2021,
                         version: None,
                         display_name: Some(
                             CrateDisplayName {
@@ -1423,7 +1457,9 @@ fn rust_project_hello_world_project_model() {
                         potential_cfg_options: CfgOptions(
                             [],
                         ),
-                        target_layout: None,
+                        target_layout: Err(
+                            "rust-project.json projects have no target layout set",
+                        ),
                         env: Env {
                             entries: {},
                         },
@@ -1442,7 +1478,7 @@ fn rust_project_hello_world_project_model() {
                         root_file_id: FileId(
                             4,
                         ),
-                        edition: Edition2018,
+                        edition: Edition2021,
                         version: None,
                         display_name: Some(
                             CrateDisplayName {
@@ -1458,7 +1494,9 @@ fn rust_project_hello_world_project_model() {
                         potential_cfg_options: CfgOptions(
                             [],
                         ),
-                        target_layout: None,
+                        target_layout: Err(
+                            "rust-project.json projects have no target layout set",
+                        ),
                         env: Env {
                             entries: {},
                         },
@@ -1477,7 +1515,7 @@ fn rust_project_hello_world_project_model() {
                         root_file_id: FileId(
                             5,
                         ),
-                        edition: Edition2018,
+                        edition: Edition2021,
                         version: None,
                         display_name: Some(
                             CrateDisplayName {
@@ -1493,7 +1531,9 @@ fn rust_project_hello_world_project_model() {
                         potential_cfg_options: CfgOptions(
                             [],
                         ),
-                        target_layout: None,
+                        target_layout: Err(
+                            "rust-project.json projects have no target layout set",
+                        ),
                         env: Env {
                             entries: {},
                         },
@@ -1522,7 +1562,7 @@ fn rust_project_hello_world_project_model() {
                         root_file_id: FileId(
                             6,
                         ),
-                        edition: Edition2018,
+                        edition: Edition2021,
                         version: None,
                         display_name: Some(
                             CrateDisplayName {
@@ -1538,7 +1578,9 @@ fn rust_project_hello_world_project_model() {
                         potential_cfg_options: CfgOptions(
                             [],
                         ),
-                        target_layout: None,
+                        target_layout: Err(
+                            "rust-project.json projects have no target layout set",
+                        ),
                         env: Env {
                             entries: {},
                         },
@@ -1557,7 +1599,7 @@ fn rust_project_hello_world_project_model() {
                         root_file_id: FileId(
                             7,
                         ),
-                        edition: Edition2018,
+                        edition: Edition2021,
                         version: None,
                         display_name: Some(
                             CrateDisplayName {
@@ -1573,7 +1615,9 @@ fn rust_project_hello_world_project_model() {
                         potential_cfg_options: CfgOptions(
                             [],
                         ),
-                        target_layout: None,
+                        target_layout: Err(
+                            "rust-project.json projects have no target layout set",
+                        ),
                         env: Env {
                             entries: {},
                         },
@@ -1665,7 +1709,7 @@ fn rust_project_hello_world_project_model() {
                         root_file_id: FileId(
                             8,
                         ),
-                        edition: Edition2018,
+                        edition: Edition2021,
                         version: None,
                         display_name: Some(
                             CrateDisplayName {
@@ -1681,7 +1725,9 @@ fn rust_project_hello_world_project_model() {
                         potential_cfg_options: CfgOptions(
                             [],
                         ),
-                        target_layout: None,
+                        target_layout: Err(
+                            "rust-project.json projects have no target layout set",
+                        ),
                         env: Env {
                             entries: {},
                         },
@@ -1700,7 +1746,7 @@ fn rust_project_hello_world_project_model() {
                         root_file_id: FileId(
                             9,
                         ),
-                        edition: Edition2018,
+                        edition: Edition2021,
                         version: None,
                         display_name: Some(
                             CrateDisplayName {
@@ -1716,7 +1762,9 @@ fn rust_project_hello_world_project_model() {
                         potential_cfg_options: CfgOptions(
                             [],
                         ),
-                        target_layout: None,
+                        target_layout: Err(
+                            "rust-project.json projects have no target layout set",
+                        ),
                         env: Env {
                             entries: {},
                         },
@@ -1735,7 +1783,7 @@ fn rust_project_hello_world_project_model() {
                         root_file_id: FileId(
                             10,
                         ),
-                        edition: Edition2018,
+                        edition: Edition2021,
                         version: None,
                         display_name: Some(
                             CrateDisplayName {
@@ -1751,7 +1799,9 @@ fn rust_project_hello_world_project_model() {
                         potential_cfg_options: CfgOptions(
                             [],
                         ),
-                        target_layout: None,
+                        target_layout: Err(
+                            "rust-project.json projects have no target layout set",
+                        ),
                         env: Env {
                             entries: {},
                         },
@@ -1786,7 +1836,9 @@ fn rust_project_hello_world_project_model() {
                         potential_cfg_options: CfgOptions(
                             [],
                         ),
-                        target_layout: None,
+                        target_layout: Err(
+                            "rust-project.json projects have no target layout set",
+                        ),
                         env: Env {
                             entries: {},
                         },
diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
index e2382aa37e8..2a11f1e8eb8 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
@@ -7,7 +7,7 @@ use std::{collections::VecDeque, fmt, fs, process::Command, sync::Arc};
 use anyhow::{format_err, Context, Result};
 use base_db::{
     CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Edition, Env,
-    FileId, LangCrateOrigin, ProcMacroLoadResult,
+    FileId, LangCrateOrigin, ProcMacroLoadResult, TargetLayoutLoadResult,
 };
 use cfg::{CfgDiff, CfgOptions};
 use paths::{AbsPath, AbsPathBuf};
@@ -63,7 +63,7 @@ pub struct PackageRoot {
     pub exclude: Vec<AbsPathBuf>,
 }
 
-#[derive(Clone, Eq, PartialEq)]
+#[derive(Clone)]
 pub enum ProjectWorkspace {
     /// Project workspace was discovered by running `cargo metadata` and `rustc --print sysroot`.
     Cargo {
@@ -79,11 +79,10 @@ pub enum ProjectWorkspace {
         rustc_cfg: Vec<CfgFlag>,
         cfg_overrides: CfgOverrides,
         toolchain: Option<Version>,
-        target_layout: Option<String>,
+        target_layout: Result<String, String>,
     },
     /// Project workspace was manually specified using a `rust-project.json` file.
     Json { project: ProjectJson, sysroot: Option<Sysroot>, rustc_cfg: Vec<CfgFlag> },
-
     // FIXME: The primary limitation of this approach is that the set of detached files needs to be fixed at the beginning.
     // That's not the end user experience we should strive for.
     // Ideally, you should be able to just open a random detached file in existing cargo projects, and get the basic features working.
@@ -163,7 +162,7 @@ impl ProjectWorkspace {
                     project_json,
                     config.target.as_deref(),
                     &config.extra_env,
-                )?
+                )
             }
             ProjectManifest::CargoToml(cargo_toml) => {
                 let cargo_version = utf8_stdout({
@@ -191,24 +190,52 @@ impl ProjectWorkspace {
                 })?;
                 let cargo = CargoWorkspace::new(meta);
 
-                let sysroot = match &config.sysroot {
-                    Some(RustcSource::Path(path)) => {
-                        Some(Sysroot::with_sysroot_dir(path.clone()).with_context(|| {
-                            format!("Failed to find sysroot at {}.", path.display())
-                        })?)
+                let sysroot = match (&config.sysroot, &config.sysroot_src) {
+                    (Some(RustcSource::Path(path)), None) => {
+                        match Sysroot::with_sysroot_dir(path.clone()) {
+                            Ok(it) => Some(it),
+                            Err(e) => {
+                                tracing::error!(%e, "Failed to find sysroot at {}.", path.display());
+                                None
+                            }
+                        }
                     }
-                    Some(RustcSource::Discover) => Some(
-                        Sysroot::discover(cargo_toml.parent(), &config.extra_env).with_context(
-                            || {
-                                format!(
-                            "Failed to find sysroot for Cargo.toml file {}. Is rust-src installed?",
-                            cargo_toml.display()
-                        )
-                            },
-                        )?,
-                    ),
-                    None => None,
+                    (Some(RustcSource::Discover), None) => {
+                        match Sysroot::discover(cargo_toml.parent(), &config.extra_env) {
+                            Ok(it) => Some(it),
+                            Err(e) => {
+                                tracing::error!(
+                                    %e,
+                                    "Failed to find sysroot for Cargo.toml file {}. Is rust-src installed?",
+                                    cargo_toml.display()
+                                );
+                                None
+                            }
+                        }
+                    }
+                    (Some(RustcSource::Path(sysroot)), Some(sysroot_src)) => {
+                        Some(Sysroot::load(sysroot.clone(), sysroot_src.clone()))
+                    }
+                    (Some(RustcSource::Discover), Some(sysroot_src)) => {
+                        match Sysroot::discover_with_src_override(
+                            cargo_toml.parent(),
+                            &config.extra_env,
+                            sysroot_src.clone(),
+                        ) {
+                            Ok(it) => Some(it),
+                            Err(e) => {
+                                tracing::error!(
+                                    %e,
+                                    "Failed to find sysroot for Cargo.toml file {}. Is rust-src installed?",
+                                    cargo_toml.display()
+                                );
+                                None
+                            }
+                        }
+                    }
+                    (None, _) => None,
                 };
+
                 if let Some(sysroot) = &sysroot {
                     tracing::info!(src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot");
                 }
@@ -225,18 +252,22 @@ impl ProjectWorkspace {
                 }
 
                 let rustc = match rustc_dir {
-                    Some(rustc_dir) => Some({
-                        let meta = CargoWorkspace::fetch_metadata(
-                            &rustc_dir,
-                            cargo_toml.parent(),
-                            config,
-                            progress,
-                        )
-                        .with_context(|| {
-                            "Failed to read Cargo metadata for Rust sources".to_string()
-                        })?;
-                        CargoWorkspace::new(meta)
-                    }),
+                    Some(rustc_dir) => match CargoWorkspace::fetch_metadata(
+                        &rustc_dir,
+                        cargo_toml.parent(),
+                        config,
+                        progress,
+                    ) {
+                        Ok(meta) => Some(CargoWorkspace::new(meta)),
+                        Err(e) => {
+                            tracing::error!(
+                                %e,
+                                "Failed to read Cargo metadata from rustc source at {}",
+                                rustc_dir.display()
+                            );
+                            None
+                        }
+                    },
                     None => None,
                 };
 
@@ -249,6 +280,9 @@ impl ProjectWorkspace {
                     config.target.as_deref(),
                     &config.extra_env,
                 );
+                if let Err(e) = &data_layout {
+                    tracing::error!(%e, "failed fetching data layout for {cargo_toml:?} workspace");
+                }
                 ProjectWorkspace::Cargo {
                     cargo,
                     build_scripts: WorkspaceBuildScripts::default(),
@@ -257,7 +291,7 @@ impl ProjectWorkspace {
                     rustc_cfg,
                     cfg_overrides,
                     toolchain,
-                    target_layout: data_layout,
+                    target_layout: data_layout.map_err(|it| it.to_string()),
                 }
             }
         };
@@ -269,15 +303,14 @@ impl ProjectWorkspace {
         project_json: ProjectJson,
         target: Option<&str>,
         extra_env: &FxHashMap<String, String>,
-    ) -> Result<ProjectWorkspace> {
+    ) -> ProjectWorkspace {
         let sysroot = match (project_json.sysroot.clone(), project_json.sysroot_src.clone()) {
-            (Some(sysroot), Some(sysroot_src)) => Some(Sysroot::load(sysroot, sysroot_src)?),
+            (Some(sysroot), Some(sysroot_src)) => Some(Sysroot::load(sysroot, sysroot_src)),
             (Some(sysroot), None) => {
                 // assume sysroot is structured like rustup's and guess `sysroot_src`
                 let sysroot_src =
                     sysroot.join("lib").join("rustlib").join("src").join("rust").join("library");
-
-                Some(Sysroot::load(sysroot, sysroot_src)?)
+                Some(Sysroot::load(sysroot, sysroot_src))
             }
             (None, Some(sysroot_src)) => {
                 // assume sysroot is structured like rustup's and guess `sysroot`
@@ -285,7 +318,7 @@ impl ProjectWorkspace {
                 for _ in 0..5 {
                     sysroot.pop();
                 }
-                Some(Sysroot::load(sysroot, sysroot_src)?)
+                Some(Sysroot::load(sysroot, sysroot_src))
             }
             (None, None) => None,
         };
@@ -294,7 +327,7 @@ impl ProjectWorkspace {
         }
 
         let rustc_cfg = rustc_cfg::get(None, target, extra_env);
-        Ok(ProjectWorkspace::Json { project: project_json, sysroot, rustc_cfg })
+        ProjectWorkspace::Json { project: project_json, sysroot, rustc_cfg }
     }
 
     pub fn load_detached_files(
@@ -302,18 +335,29 @@ impl ProjectWorkspace {
         config: &CargoConfig,
     ) -> Result<ProjectWorkspace> {
         let sysroot = match &config.sysroot {
-            Some(RustcSource::Path(path)) => Some(
-                Sysroot::with_sysroot_dir(path.clone())
-                    .with_context(|| format!("Failed to find sysroot at {}.", path.display()))?,
-            ),
+            Some(RustcSource::Path(path)) => match Sysroot::with_sysroot_dir(path.clone()) {
+                Ok(it) => Some(it),
+                Err(e) => {
+                    tracing::error!(%e, "Failed to find sysroot at {}.", path.display());
+                    None
+                }
+            },
             Some(RustcSource::Discover) => {
                 let dir = &detached_files
                     .first()
                     .and_then(|it| it.parent())
                     .ok_or_else(|| format_err!("No detached files to load"))?;
-                Some(Sysroot::discover(dir, &config.extra_env).with_context(|| {
-                    format!("Failed to find sysroot in {}. Is rust-src installed?", dir.display())
-                })?)
+                match Sysroot::discover(dir, &config.extra_env) {
+                    Ok(it) => Some(it),
+                    Err(e) => {
+                        tracing::error!(
+                            %e,
+                            "Failed to find sysroot for {}. Is rust-src installed?",
+                            dir.display()
+                        );
+                        None
+                    }
+                }
             }
             None => None,
         };
@@ -366,7 +410,7 @@ impl ProjectWorkspace {
                 _ => None,
             })
             .collect();
-        let ref mut outputs = match WorkspaceBuildScripts::run_once(config, &cargo_ws, progress) {
+        let outputs = &mut match WorkspaceBuildScripts::run_once(config, &cargo_ws, progress) {
             Ok(it) => Ok(it.into_iter()),
             // io::Error is not Clone?
             Err(e) => Err(Arc::new(e)),
@@ -417,9 +461,11 @@ impl ProjectWorkspace {
     /// The return type contains the path and whether or not
     /// the root is a member of the current workspace
     pub fn to_roots(&self) -> Vec<PackageRoot> {
-        let mk_sysroot = |sysroot: Option<&Sysroot>| {
+        let mk_sysroot = |sysroot: Option<&Sysroot>, project_root: Option<&AbsPath>| {
             sysroot.map(|sysroot| PackageRoot {
-                is_local: false,
+                // mark the sysroot as mutable if it is located inside of the project
+                is_local: project_root
+                    .map_or(false, |project_root| sysroot.src_root().starts_with(project_root)),
                 include: vec![sysroot.src_root().to_path_buf()],
                 exclude: Vec::new(),
             })
@@ -434,7 +480,7 @@ impl ProjectWorkspace {
                 })
                 .collect::<FxHashSet<_>>()
                 .into_iter()
-                .chain(mk_sysroot(sysroot.as_ref()))
+                .chain(mk_sysroot(sysroot.as_ref(), Some(project.path())))
                 .collect::<Vec<_>>(),
             ProjectWorkspace::Cargo {
                 cargo,
@@ -484,7 +530,7 @@ impl ProjectWorkspace {
                         }
                         PackageRoot { is_local, include, exclude }
                     })
-                    .chain(mk_sysroot(sysroot.as_ref()))
+                    .chain(mk_sysroot(sysroot.as_ref(), Some(cargo.workspace_root())))
                     .chain(rustc.iter().flat_map(|rustc| {
                         rustc.packages().map(move |krate| PackageRoot {
                             is_local: false,
@@ -501,7 +547,7 @@ impl ProjectWorkspace {
                     include: vec![detached_file.clone()],
                     exclude: Vec::new(),
                 })
-                .chain(mk_sysroot(sysroot.as_ref()))
+                .chain(mk_sysroot(sysroot.as_ref(), None))
                 .collect(),
         }
     }
@@ -538,9 +584,9 @@ impl ProjectWorkspace {
                 load_proc_macro,
                 load,
                 project,
-                sysroot,
+                sysroot.as_ref(),
                 extra_env,
-                None,
+                Err("rust-project.json projects have no target layout set".into()),
             ),
             ProjectWorkspace::Cargo {
                 cargo,
@@ -560,10 +606,19 @@ impl ProjectWorkspace {
                 rustc_cfg.clone(),
                 cfg_overrides,
                 build_scripts,
-                target_layout.as_deref().map(Arc::from),
+                match target_layout.as_ref() {
+                    Ok(it) => Ok(Arc::from(it.as_str())),
+                    Err(it) => Err(Arc::from(it.as_str())),
+                },
             ),
             ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => {
-                detached_files_to_crate_graph(rustc_cfg.clone(), load, files, sysroot, None)
+                detached_files_to_crate_graph(
+                    rustc_cfg.clone(),
+                    load,
+                    files,
+                    sysroot,
+                    Err("detached file projects have no target layout set".into()),
+                )
             }
         };
         if crate_graph.patch_cfg_if() {
@@ -573,6 +628,49 @@ impl ProjectWorkspace {
         }
         crate_graph
     }
+
+    pub fn eq_ignore_build_data(&self, other: &Self) -> bool {
+        match (self, other) {
+            (
+                Self::Cargo {
+                    cargo,
+                    sysroot,
+                    rustc,
+                    rustc_cfg,
+                    cfg_overrides,
+                    toolchain,
+                    build_scripts: _,
+                    target_layout: _,
+                },
+                Self::Cargo {
+                    cargo: o_cargo,
+                    sysroot: o_sysroot,
+                    rustc: o_rustc,
+                    rustc_cfg: o_rustc_cfg,
+                    cfg_overrides: o_cfg_overrides,
+                    toolchain: o_toolchain,
+                    build_scripts: _,
+                    target_layout: _,
+                },
+            ) => {
+                cargo == o_cargo
+                    && rustc == o_rustc
+                    && rustc_cfg == o_rustc_cfg
+                    && cfg_overrides == o_cfg_overrides
+                    && toolchain == o_toolchain
+                    && sysroot == o_sysroot
+            }
+            (
+                Self::Json { project, sysroot, rustc_cfg },
+                Self::Json { project: o_project, sysroot: o_sysroot, rustc_cfg: o_rustc_cfg },
+            ) => project == o_project && rustc_cfg == o_rustc_cfg && sysroot == o_sysroot,
+            (
+                Self::DetachedFiles { files, sysroot, rustc_cfg },
+                Self::DetachedFiles { files: o_files, sysroot: o_sysroot, rustc_cfg: o_rustc_cfg },
+            ) => files == o_files && sysroot == o_sysroot && rustc_cfg == o_rustc_cfg,
+            _ => false,
+        }
+    }
 }
 
 fn project_json_to_crate_graph(
@@ -580,9 +678,9 @@ fn project_json_to_crate_graph(
     load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult,
     load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
     project: &ProjectJson,
-    sysroot: &Option<Sysroot>,
+    sysroot: Option<&Sysroot>,
     extra_env: &FxHashMap<String, String>,
-    target_layout: Option<Arc<str>>,
+    target_layout: TargetLayoutLoadResult,
 ) -> CrateGraph {
     let mut crate_graph = CrateGraph::default();
     let sysroot_deps = sysroot.as_ref().map(|sysroot| {
@@ -686,7 +784,7 @@ fn cargo_to_crate_graph(
     rustc_cfg: Vec<CfgFlag>,
     override_cfg: &CfgOverrides,
     build_scripts: &WorkspaceBuildScripts,
-    target_layout: Option<Arc<str>>,
+    target_layout: TargetLayoutLoadResult,
 ) -> CrateGraph {
     let _p = profile::span("cargo_to_crate_graph");
     let mut crate_graph = CrateGraph::default();
@@ -852,7 +950,7 @@ fn detached_files_to_crate_graph(
     load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
     detached_files: &[AbsPathBuf],
     sysroot: &Option<Sysroot>,
-    target_layout: Option<Arc<str>>,
+    target_layout: TargetLayoutLoadResult,
 ) -> CrateGraph {
     let _p = profile::span("detached_files_to_crate_graph");
     let mut crate_graph = CrateGraph::default();
@@ -917,7 +1015,7 @@ fn handle_rustc_crates(
     cfg_options: &CfgOptions,
     override_cfg: &CfgOverrides,
     build_scripts: &WorkspaceBuildScripts,
-    target_layout: Option<Arc<str>>,
+    target_layout: TargetLayoutLoadResult,
 ) {
     let mut rustc_pkg_crates = FxHashMap::default();
     // The root package of the rustc-dev component is rustc_driver, so we match that
@@ -1039,7 +1137,7 @@ fn add_target_crate_root(
     file_id: FileId,
     cargo_name: &str,
     is_proc_macro: bool,
-    target_layout: Option<Arc<str>>,
+    target_layout: TargetLayoutLoadResult,
 ) -> CrateId {
     let edition = pkg.edition;
     let mut potential_cfg_options = cfg_options.clone();
@@ -1108,7 +1206,7 @@ fn sysroot_to_crate_graph(
     crate_graph: &mut CrateGraph,
     sysroot: &Sysroot,
     rustc_cfg: Vec<CfgFlag>,
-    target_layout: Option<Arc<str>>,
+    target_layout: TargetLayoutLoadResult,
     load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
 ) -> (SysrootPublicDeps, Option<CrateId>) {
     let _p = profile::span("sysroot_to_crate_graph");
@@ -1150,9 +1248,7 @@ fn sysroot_to_crate_graph(
     let public_deps = SysrootPublicDeps {
         deps: sysroot
             .public_deps()
-            .map(|(name, idx, prelude)| {
-                (CrateName::new(name).unwrap(), sysroot_crates[&idx], prelude)
-            })
+            .map(|(name, idx, prelude)| (name, sysroot_crates[&idx], prelude))
             .collect::<Vec<_>>(),
     };
 
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml
index 56f14fe1874..e3aa880d005 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml
@@ -1,14 +1,15 @@
 [package]
 name = "rust-analyzer"
 version = "0.0.0"
-authors = ["rust-analyzer Team"]
 homepage = "https://github.com/rust-analyzer/rust-analyzer"
 description = "A language server for the Rust programming language"
 documentation = "https://rust-analyzer.github.io/manual.html"
-license = "MIT OR Apache-2.0"
 autobins = false
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
@@ -31,8 +32,8 @@ rustc-hash = "1.1.0"
 serde = { version = "1.0.137", features = ["derive"] }
 serde_json = { version = "1.0.81", features = ["preserve_order"] }
 threadpool = "1.8.1"
-rayon = "1.5.3"
-num_cpus = "1.13.1"
+rayon = "1.6.1"
+num_cpus = "1.15.0"
 mimalloc = { version = "0.1.30", default-features = false, optional = true }
 lsp-server = { version = "0.7.0", path = "../../lib/lsp-server" }
 tracing = "0.1.35"
@@ -46,26 +47,25 @@ tracing-log = "0.1.3"
 tracing-tree = "0.2.1"
 always-assert = "0.1.2"
 
-stdx = { path = "../stdx", version = "0.0.0" }
-flycheck = { path = "../flycheck", version = "0.0.0" }
-ide = { path = "../ide", version = "0.0.0" }
-ide-db = { path = "../ide-db", version = "0.0.0" }
-profile = { path = "../profile", version = "0.0.0" }
-project-model = { path = "../project-model", version = "0.0.0" }
-syntax = { path = "../syntax", version = "0.0.0" }
-vfs = { path = "../vfs", version = "0.0.0" }
-vfs-notify = { path = "../vfs-notify", version = "0.0.0" }
-cfg = { path = "../cfg", version = "0.0.0" }
-toolchain = { path = "../toolchain", version = "0.0.0" }
-tt = { path = "../tt", version = "0.0.0" }
-proc-macro-api = { path = "../proc-macro-api", version = "0.0.0" }
-
+cfg.workspace = true
+flycheck.workspace = true
+hir-def.workspace = true
+hir-ty.workspace = true
+hir.workspace = true
+ide-db.workspace = true
 # This should only be used in CLI
-ide-ssr = { path = "../ide-ssr", version = "0.0.0" }
-hir = { path = "../hir", version = "0.0.0" }
-hir-def = { path = "../hir-def", version = "0.0.0" }
-hir-ty = { path = "../hir-ty", version = "0.0.0" }
-proc-macro-srv = { path = "../proc-macro-srv", version = "0.0.0" }
+ide-ssr.workspace = true
+ide.workspace = true
+proc-macro-api.workspace = true
+proc-macro-srv.workspace = true
+profile.workspace = true
+project-model.workspace = true
+stdx.workspace = true
+syntax.workspace = true
+toolchain.workspace = true
+tt.workspace = true
+vfs-notify.workspace = true
+vfs.workspace = true
 
 [target.'cfg(windows)'.dependencies]
 winapi = "0.3.9"
@@ -78,9 +78,9 @@ expect-test = "1.4.0"
 jod-thread = "0.1.2"
 xshell = "0.2.2"
 
-test-utils = { path = "../test-utils" }
-sourcegen = { path = "../sourcegen" }
-mbe = { path = "../mbe" }
+test-utils.workspace = true
+sourcegen.workspace = true
+mbe.workspace = true
 
 [features]
 jemalloc = ["jemallocator", "profile/jemalloc"]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs
index 53710749de3..4de022b6ed6 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs
@@ -10,7 +10,6 @@ mod rustc_wrapper;
 use std::{env, fs, path::Path, process};
 
 use lsp_server::Connection;
-use project_model::ProjectManifest;
 use rust_analyzer::{cli::flags, config::Config, from_json, Result};
 use vfs::AbsPathBuf;
 
@@ -168,7 +167,18 @@ fn run_server() -> Result<()> {
         }
     };
 
-    let mut config = Config::new(root_path, initialize_params.capabilities);
+    let workspace_roots = initialize_params
+        .workspace_folders
+        .map(|workspaces| {
+            workspaces
+                .into_iter()
+                .filter_map(|it| it.uri.to_file_path().ok())
+                .filter_map(|it| AbsPathBuf::try_from(it).ok())
+                .collect::<Vec<_>>()
+        })
+        .filter(|workspaces| !workspaces.is_empty())
+        .unwrap_or_else(|| vec![root_path.clone()]);
+    let mut config = Config::new(root_path, initialize_params.capabilities, workspace_roots);
     if let Some(json) = initialize_params.initialization_options {
         if let Err(e) = config.update(json) {
             use lsp_types::{
@@ -183,8 +193,6 @@ fn run_server() -> Result<()> {
         }
     }
 
-    config.client_specific_adjustments(&initialize_params.client_info);
-
     let server_capabilities = rust_analyzer::server_capabilities(&config);
 
     let initialize_result = lsp_types::InitializeResult {
@@ -204,25 +212,8 @@ fn run_server() -> Result<()> {
         tracing::info!("Client '{}' {}", client_info.name, client_info.version.unwrap_or_default());
     }
 
-    if config.linked_projects().is_empty() && config.detached_files().is_empty() {
-        let workspace_roots = initialize_params
-            .workspace_folders
-            .map(|workspaces| {
-                workspaces
-                    .into_iter()
-                    .filter_map(|it| it.uri.to_file_path().ok())
-                    .filter_map(|it| AbsPathBuf::try_from(it).ok())
-                    .collect::<Vec<_>>()
-            })
-            .filter(|workspaces| !workspaces.is_empty())
-            .unwrap_or_else(|| vec![config.root_path().clone()]);
-
-        let discovered = ProjectManifest::discover_all(&workspace_roots);
-        tracing::info!("discovered projects: {:?}", discovered);
-        if discovered.is_empty() {
-            tracing::error!("failed to find any projects in {:?}", workspace_roots);
-        }
-        config.discovered_projects = Some(discovered);
+    if !config.has_linked_projects() && config.detached_files().is_empty() {
+        config.rediscover_workspaces();
     }
 
     rust_analyzer::main_loop(config, connection)?;
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/caps.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/caps.rs
index 122d2e6ff1b..841861635c6 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/caps.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/caps.rs
@@ -10,7 +10,8 @@ use lsp_types::{
     SemanticTokensFullOptions, SemanticTokensLegend, SemanticTokensOptions, ServerCapabilities,
     SignatureHelpOptions, TextDocumentSyncCapability, TextDocumentSyncKind,
     TextDocumentSyncOptions, TypeDefinitionProviderCapability, WorkDoneProgressOptions,
-    WorkspaceFileOperationsServerCapabilities, WorkspaceServerCapabilities,
+    WorkspaceFileOperationsServerCapabilities, WorkspaceFoldersServerCapabilities,
+    WorkspaceServerCapabilities,
 };
 use serde_json::json;
 
@@ -80,7 +81,10 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities {
         color_provider: None,
         execute_command_provider: None,
         workspace: Some(WorkspaceServerCapabilities {
-            workspace_folders: None,
+            workspace_folders: Some(WorkspaceFoldersServerCapabilities {
+                supported: Some(true),
+                change_notifications: Some(OneOf::Left(true)),
+            }),
             file_operations: Some(WorkspaceFileOperationsServerCapabilities {
                 did_create: None,
                 will_create: None,
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
index 053db5fc533..93297faa664 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -15,7 +15,7 @@ use hir_def::{
     expr::ExprId,
     FunctionId,
 };
-use hir_ty::{TyExt, TypeWalk};
+use hir_ty::{Interner, TyExt, TypeFlags};
 use ide::{Analysis, AnalysisHost, LineCol, RootDatabase};
 use ide_db::base_db::{
     salsa::{self, debug::DebugQueryTable, ParallelDatabase},
@@ -33,7 +33,7 @@ use vfs::{AbsPathBuf, Vfs, VfsPath};
 
 use crate::cli::{
     flags::{self, OutputFormat},
-    load_cargo::{load_workspace, LoadCargoConfig},
+    load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice},
     print_memory_usage,
     progress_report::ProgressReport,
     report_metric, Result, Verbosity,
@@ -59,11 +59,6 @@ impl flags::AnalysisStats {
             true => None,
             false => Some(RustcSource::Discover),
         };
-        let load_cargo_config = LoadCargoConfig {
-            load_out_dirs_from_check: !self.disable_build_scripts,
-            with_proc_macro: !self.disable_proc_macros,
-            prefill_caches: false,
-        };
         let no_progress = &|_| ();
 
         let mut db_load_sw = self.stop_watch();
@@ -73,6 +68,11 @@ impl flags::AnalysisStats {
 
         let mut workspace = ProjectWorkspace::load(manifest, &cargo_config, no_progress)?;
         let metadata_time = db_load_sw.elapsed();
+        let load_cargo_config = LoadCargoConfig {
+            load_out_dirs_from_check: !self.disable_build_scripts,
+            with_proc_macro_server: ProcMacroServerChoice::Sysroot,
+            prefill_caches: false,
+        };
 
         let build_scripts_time = if self.disable_build_scripts {
             None
@@ -280,12 +280,8 @@ impl flags::AnalysisStats {
                     }
                     true
                 } else {
-                    let mut is_partially_unknown = false;
-                    ty.walk(&mut |ty| {
-                        if ty.is_unknown() {
-                            is_partially_unknown = true;
-                        }
-                    });
+                    let is_partially_unknown =
+                        ty.data(Interner).flags.contains(TypeFlags::HAS_ERROR);
                     if is_partially_unknown {
                         num_exprs_partially_unknown += 1;
                     }
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
index fd5b3ce61f7..ff821be53d8 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs
@@ -9,7 +9,7 @@ use ide_db::base_db::SourceDatabaseExt;
 
 use crate::cli::{
     flags,
-    load_cargo::{load_workspace_at, LoadCargoConfig},
+    load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice},
 };
 
 impl flags::Diagnostics {
@@ -17,7 +17,7 @@ impl flags::Diagnostics {
         let cargo_config = Default::default();
         let load_cargo_config = LoadCargoConfig {
             load_out_dirs_from_check: !self.disable_build_scripts,
-            with_proc_macro: !self.disable_proc_macros,
+            with_proc_macro_server: ProcMacroServerChoice::Sysroot,
             prefill_caches: false,
         };
         let (host, _vfs, _proc_macro) =
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/load_cargo.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/load_cargo.rs
index 762d7d3a18e..5a958d963e4 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/load_cargo.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/load_cargo.rs
@@ -1,6 +1,6 @@
 //! Loads a Cargo project into a static instance of analysis, without support
 //! for incorporating changes.
-use std::{path::Path, sync::Arc};
+use std::{convert::identity, path::Path, sync::Arc};
 
 use anyhow::Result;
 use crossbeam_channel::{unbounded, Receiver};
@@ -17,10 +17,17 @@ use crate::reload::{load_proc_macro, ProjectFolders, SourceRootConfig};
 // what otherwise would be `pub(crate)` has to be `pub` here instead.
 pub struct LoadCargoConfig {
     pub load_out_dirs_from_check: bool,
-    pub with_proc_macro: bool,
+    pub with_proc_macro_server: ProcMacroServerChoice,
     pub prefill_caches: bool,
 }
 
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ProcMacroServerChoice {
+    Sysroot,
+    Explicit(AbsPathBuf, Vec<String>),
+    None,
+}
+
 // Note: Since this function is used by external tools that use rust-analyzer as a library
 // what otherwise would be `pub(crate)` has to be `pub` here instead.
 pub fn load_workspace_at(
@@ -59,15 +66,17 @@ pub fn load_workspace(
         Box::new(loader)
     };
 
-    let proc_macro_client = if load_config.with_proc_macro {
-        let (server_path, args): (_, &[_]) = match ws.find_sysroot_proc_macro_srv() {
-            Some(server_path) => (server_path, &[]),
-            None => (AbsPathBuf::assert(std::env::current_exe()?), &["proc-macro"]),
-        };
-
-        ProcMacroServer::spawn(server_path, args).map_err(|e| e.to_string())
-    } else {
-        Err("proc macro server disabled".to_owned())
+    let proc_macro_client = match &load_config.with_proc_macro_server {
+        ProcMacroServerChoice::Sysroot => ws
+            .find_sysroot_proc_macro_srv()
+            .ok_or_else(|| "failed to find sysroot proc-macro server".to_owned())
+            .and_then(|it| {
+                ProcMacroServer::spawn(it, identity::<&[&str]>(&[])).map_err(|e| e.to_string())
+            }),
+        ProcMacroServerChoice::Explicit(path, args) => {
+            ProcMacroServer::spawn(path.clone(), args).map_err(|e| e.to_string())
+        }
+        ProcMacroServerChoice::None => Err("proc macro server disabled".to_owned()),
     };
 
     let crate_graph = ws.to_crate_graph(
@@ -157,7 +166,7 @@ mod tests {
         let cargo_config = CargoConfig::default();
         let load_cargo_config = LoadCargoConfig {
             load_out_dirs_from_check: false,
-            with_proc_macro: false,
+            with_proc_macro_server: ProcMacroServerChoice::None,
             prefill_caches: false,
         };
         let (host, _vfs, _proc_macro) =
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs
index af8356d041f..60a7f99ccdb 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs
@@ -15,6 +15,7 @@ use lsp_types::{self, lsif};
 use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace};
 use vfs::{AbsPathBuf, Vfs};
 
+use crate::cli::load_cargo::ProcMacroServerChoice;
 use crate::cli::{
     flags,
     load_cargo::{load_workspace, LoadCargoConfig},
@@ -291,7 +292,7 @@ impl flags::Lsif {
         let no_progress = &|_| ();
         let load_cargo_config = LoadCargoConfig {
             load_out_dirs_from_check: true,
-            with_proc_macro: true,
+            with_proc_macro_server: ProcMacroServerChoice::Sysroot,
             prefill_caches: false,
         };
         let path = AbsPathBuf::assert(env::current_dir()?.join(&self.path));
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
index b050d1e95ac..9a04fbea774 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
@@ -5,7 +5,10 @@ use std::{
     time::Instant,
 };
 
-use crate::line_index::{LineEndings, LineIndex, PositionEncoding};
+use crate::{
+    cli::load_cargo::ProcMacroServerChoice,
+    line_index::{LineEndings, LineIndex, PositionEncoding},
+};
 use hir::Name;
 use ide::{
     LineCol, MonikerDescriptorKind, StaticIndex, StaticIndexedFile, TextRange, TokenId,
@@ -31,7 +34,7 @@ impl flags::Scip {
         let no_progress = &|s| (eprintln!("rust-analyzer: Loading {s}"));
         let load_cargo_config = LoadCargoConfig {
             load_out_dirs_from_check: true,
-            with_proc_macro: true,
+            with_proc_macro_server: ProcMacroServerChoice::Sysroot,
             prefill_caches: true,
         };
         let path = vfs::AbsPathBuf::assert(env::current_dir()?.join(&self.path));
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
index 84c48917167..3552f840a1b 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
@@ -5,7 +5,7 @@ use project_model::CargoConfig;
 
 use crate::cli::{
     flags,
-    load_cargo::{load_workspace_at, LoadCargoConfig},
+    load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice},
     Result,
 };
 
@@ -15,7 +15,7 @@ impl flags::Ssr {
         let cargo_config = CargoConfig::default();
         let load_cargo_config = LoadCargoConfig {
             load_out_dirs_from_check: true,
-            with_proc_macro: true,
+            with_proc_macro_server: ProcMacroServerChoice::Sysroot,
             prefill_caches: false,
         };
         let (host, vfs, _proc_macro) = load_workspace_at(
@@ -51,7 +51,7 @@ impl flags::Search {
         let cargo_config = CargoConfig::default();
         let load_cargo_config = LoadCargoConfig {
             load_out_dirs_from_check: true,
-            with_proc_macro: true,
+            with_proc_macro_server: ProcMacroServerChoice::Sysroot,
             prefill_caches: false,
         };
         let (host, _vfs, _proc_macro) = load_workspace_at(
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
index b0afbdc9a42..be09938c2c4 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
@@ -20,7 +20,7 @@ use ide_db::{
     SnippetCap,
 };
 use itertools::Itertools;
-use lsp_types::{ClientCapabilities, ClientInfo, MarkupKind};
+use lsp_types::{ClientCapabilities, MarkupKind};
 use project_model::{
     CargoConfig, CargoFeatures, ProjectJson, ProjectJsonData, ProjectManifest, RustcSource,
     UnsetTestCrates,
@@ -117,6 +117,11 @@ config_data! {
         ///
         /// This option does not take effect until rust-analyzer is restarted.
         cargo_sysroot: Option<String>    = "\"discover\"",
+        /// Relative path to the sysroot library sources. If left unset, this will default to
+        /// `{cargo.sysroot}/lib/rustlib/src/rust/library`.
+        ///
+        /// This option does not take effect until rust-analyzer is restarted.
+        cargo_sysrootSrc: Option<String>    = "null",
         /// Compilation target override (target triple).
         // FIXME(@poliorcetics): move to multiple targets here too, but this will need more work
         // than `checkOnSave_target`
@@ -195,6 +200,8 @@ config_data! {
         completion_autoself_enable: bool        = "true",
         /// Whether to add parenthesis and argument snippets when completing function.
         completion_callable_snippets: CallableCompletionDef  = "\"fill_arguments\"",
+        /// Maximum number of completions to return. If `None`, the limit is infinite.
+        completion_limit: Option<usize> = "null",
         /// Whether to show postfix snippets like `dbg`, `if`, `not`, etc.
         completion_postfix_enable: bool         = "true",
         /// Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position.
@@ -342,8 +349,6 @@ config_data! {
         inlayHints_lifetimeElisionHints_enable: LifetimeElisionDef = "\"never\"",
         /// Whether to prefer using parameter names as the name for elided lifetime hints if possible.
         inlayHints_lifetimeElisionHints_useParameterNames: bool    = "false",
-        /// Whether to use location links for parts of type mentioned in inlay hints.
-        inlayHints_locationLinks: bool                             = "true",
         /// Maximum length for inlay hints. Set to null to have an unlimited length.
         inlayHints_maxLength: Option<usize>                        = "25",
         /// Whether to show function parameter name inlay hints at the call
@@ -521,6 +526,7 @@ impl Default for ConfigData {
 #[derive(Debug, Clone)]
 pub struct Config {
     pub discovered_projects: Option<Vec<ProjectManifest>>,
+    pub workspace_roots: Vec<AbsPathBuf>,
     caps: lsp_types::ClientCapabilities,
     root_path: AbsPathBuf,
     data: ConfigData,
@@ -717,7 +723,11 @@ impl fmt::Display for ConfigUpdateError {
 }
 
 impl Config {
-    pub fn new(root_path: AbsPathBuf, caps: ClientCapabilities) -> Self {
+    pub fn new(
+        root_path: AbsPathBuf,
+        caps: ClientCapabilities,
+        workspace_roots: Vec<AbsPathBuf>,
+    ) -> Self {
         Config {
             caps,
             data: ConfigData::default(),
@@ -725,20 +735,17 @@ impl Config {
             discovered_projects: None,
             root_path,
             snippets: Default::default(),
+            workspace_roots,
         }
     }
 
-    pub fn client_specific_adjustments(&mut self, client_info: &Option<ClientInfo>) {
-        // FIXME: remove this when we drop support for vscode 1.65 and below
-        if let Some(client) = client_info {
-            if client.name.contains("Code") || client.name.contains("Codium") {
-                if let Some(version) = &client.version {
-                    if version.as_str() < "1.76" {
-                        self.data.inlayHints_locationLinks = false;
-                    }
-                }
-            }
+    pub fn rediscover_workspaces(&mut self) {
+        let discovered = ProjectManifest::discover_all(&self.workspace_roots);
+        tracing::info!("discovered projects: {:?}", discovered);
+        if discovered.is_empty() {
+            tracing::error!("failed to find any projects in {:?}", &self.workspace_roots);
         }
+        self.discovered_projects = Some(discovered);
     }
 
     pub fn update(&mut self, mut json: serde_json::Value) -> Result<(), ConfigUpdateError> {
@@ -837,6 +844,9 @@ macro_rules! try_or_def {
 }
 
 impl Config {
+    pub fn has_linked_projects(&self) -> bool {
+        !self.data.linkedProjects.is_empty()
+    }
     pub fn linked_projects(&self) -> Vec<LinkedProject> {
         match self.data.linkedProjects.as_slice() {
             [] => match self.discovered_projects.as_ref() {
@@ -1004,6 +1014,10 @@ impl Config {
         self.experimental("codeActionGroup")
     }
 
+    pub fn open_server_logs(&self) -> bool {
+        self.experimental("openServerLogs")
+    }
+
     pub fn server_status_notification(&self) -> bool {
         self.experimental("serverStatusNotification")
     }
@@ -1044,7 +1058,7 @@ impl Config {
         &self.data.cargo_extraEnv
     }
 
-    pub fn check_on_save_extra_env(&self) -> FxHashMap<String, String> {
+    pub fn check_extra_env(&self) -> FxHashMap<String, String> {
         let mut extra_env = self.data.cargo_extraEnv.clone();
         extra_env.extend(self.data.check_extraEnv.clone());
         extra_env
@@ -1114,6 +1128,8 @@ impl Config {
                 RustcSource::Path(self.root_path.join(sysroot))
             }
         });
+        let sysroot_src =
+            self.data.cargo_sysrootSrc.as_ref().map(|sysroot| self.root_path.join(sysroot));
 
         CargoConfig {
             features: match &self.data.cargo_features {
@@ -1125,6 +1141,7 @@ impl Config {
             },
             target: self.data.cargo_target.clone(),
             sysroot,
+            sysroot_src,
             rustc_source,
             unset_test_crates: UnsetTestCrates::Only(self.data.cargo_unsetTest.clone()),
             wrap_rustc_in_build_scripts: self.data.cargo_buildScripts_useRustcWrapper,
@@ -1165,7 +1182,7 @@ impl Config {
                 FlycheckConfig::CustomCommand {
                     command,
                     args,
-                    extra_env: self.check_on_save_extra_env(),
+                    extra_env: self.check_extra_env(),
                     invocation_strategy: match self.data.check_invocationStrategy {
                         InvocationStrategy::Once => flycheck::InvocationStrategy::Once,
                         InvocationStrategy::PerWorkspace => {
@@ -1210,7 +1227,7 @@ impl Config {
                     CargoFeaturesDef::Selected(it) => it,
                 },
                 extra_args: self.data.check_extraArgs.clone(),
-                extra_env: self.check_on_save_extra_env(),
+                extra_env: self.check_extra_env(),
                 ansi_color_output: self.color_diagnostic_output(),
             },
         }
@@ -1229,7 +1246,6 @@ impl Config {
 
     pub fn inlay_hints(&self) -> InlayHintsConfig {
         InlayHintsConfig {
-            location_links: self.data.inlayHints_locationLinks,
             render_colons: self.data.inlayHints_renderColons,
             type_hints: self.data.inlayHints_typeHints_enable,
             parameter_hints: self.data.inlayHints_parameterHints_enable,
@@ -1329,6 +1345,7 @@ impl Config {
                     .snippet_support?
             )),
             snippets: self.snippets.clone(),
+            limit: self.data.completion_limit,
         }
     }
 
@@ -1409,7 +1426,8 @@ impl Config {
     pub fn hover(&self) -> HoverConfig {
         HoverConfig {
             links_in_hover: self.data.hover_links_enable,
-            documentation: self.data.hover_documentation_enable.then(|| {
+            documentation: self.data.hover_documentation_enable,
+            format: {
                 let is_markdown = try_or_def!(self
                     .caps
                     .text_document
@@ -1425,7 +1443,7 @@ impl Config {
                 } else {
                     HoverDocFormat::PlainText
                 }
-            }),
+            },
             keywords: self.data.hover_documentation_keywords_enable,
         }
     }
@@ -1454,6 +1472,10 @@ impl Config {
         try_or_def!(self.caps.workspace.as_ref()?.code_lens.as_ref()?.refresh_support?)
     }
 
+    pub fn inlay_hints_refresh(&self) -> bool {
+        try_or_def!(self.caps.workspace.as_ref()?.inlay_hint.as_ref()?.refresh_support?)
+    }
+
     pub fn insert_replace_support(&self) -> bool {
         try_or_def!(
             self.caps
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs
index de6ac946a68..73d2ed32984 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config/patch_old_style.rs
@@ -114,16 +114,18 @@ pub(super) fn patch_json_for_outdated_configs(json: &mut Value) {
     }
 
     // completion_addCallArgumentSnippets completion_addCallParenthesis -> completion_callable_snippets
-    let res = match (
-        copy.pointer("/completion/addCallArgumentSnippets"),
-        copy.pointer("/completion/addCallParenthesis"),
-    ) {
-        (Some(Value::Bool(true)), Some(Value::Bool(true))) => json!("fill_arguments"),
-        (_, Some(Value::Bool(true))) => json!("add_parentheses"),
-        (Some(Value::Bool(false)), Some(Value::Bool(false))) => json!("none"),
-        (_, _) => return,
-    };
-    merge(json, json!({ "completion": { "callable": {"snippets": res }} }));
+    'completion: {
+        let res = match (
+            copy.pointer("/completion/addCallArgumentSnippets"),
+            copy.pointer("/completion/addCallParenthesis"),
+        ) {
+            (Some(Value::Bool(true)), Some(Value::Bool(true))) => json!("fill_arguments"),
+            (_, Some(Value::Bool(true))) => json!("add_parentheses"),
+            (Some(Value::Bool(false)), Some(Value::Bool(false))) => json!("none"),
+            (_, _) => break 'completion,
+        };
+        merge(json, json!({ "completion": { "callable": {"snippets": res }} }));
+    }
 
     // We need to do this due to the checkOnSave_enable -> checkOnSave change, as that key now can either be an object or a bool
     // checkOnSave_* -> check_*
@@ -146,3 +148,23 @@ fn merge(dst: &mut Value, src: Value) {
         (dst, src) => *dst = src,
     }
 }
+
+#[test]
+fn check_on_save_patching() {
+    let mut json = json!({ "checkOnSave": { "overrideCommand": "foo" }});
+    patch_json_for_outdated_configs(&mut json);
+    assert_eq!(
+        json,
+        json!({ "checkOnSave": { "overrideCommand": "foo" }, "check": { "overrideCommand": "foo" }})
+    );
+}
+
+#[test]
+fn check_on_save_patching_enable() {
+    let mut json = json!({ "checkOnSave": { "enable": true, "overrideCommand": "foo" }});
+    patch_json_for_outdated_configs(&mut json);
+    assert_eq!(
+        json,
+        json!({ "checkOnSave": true, "check": { "enable": true, "overrideCommand": "foo" }})
+    );
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs
index acb416a0689..55b89019b47 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics/to_proto.rs
@@ -534,7 +534,7 @@ mod tests {
         let (sender, _) = crossbeam_channel::unbounded();
         let state = GlobalState::new(
             sender,
-            Config::new(workspace_root.to_path_buf(), ClientCapabilities::default()),
+            Config::new(workspace_root.to_path_buf(), ClientCapabilities::default(), Vec::new()),
         );
         let snap = state.snapshot();
         let mut actual = map_rust_diagnostic_to_lsp(&config, &diagnostic, workspace_root, &snap);
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
index c6f4e9ce07f..aca6c923570 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
@@ -179,10 +179,9 @@ impl GlobalState {
 
     pub(crate) fn process_changes(&mut self) -> bool {
         let _p = profile::span("GlobalState::process_changes");
-        // A file was added or deleted
-        let mut has_structure_changes = false;
         let mut workspace_structure_change = None;
 
+        let mut file_changes = FxHashMap::default();
         let (change, changed_files) = {
             let mut change = Change::new();
             let (vfs, line_endings_map) = &mut *self.vfs.write();
@@ -191,43 +190,56 @@ impl GlobalState {
                 return false;
             }
 
-            // important: this needs to be a stable sort, the order between changes is relevant
-            // for the same file ids
-            changed_files.sort_by_key(|file| file.file_id);
-            // We need to fix up the changed events a bit, if we have a create or modify for a file
-            // id that is followed by a delete we actually no longer observe the file text from the
-            // create or modify which may cause problems later on
-            changed_files.dedup_by(|a, b| {
+            // We need to fix up the changed events a bit. If we have a create or modify for a file
+            // id that is followed by a delete we actually skip observing the file text from the
+            // earlier event, to avoid problems later on.
+            for changed_file in &changed_files {
                 use vfs::ChangeKind::*;
 
-                if a.file_id != b.file_id {
-                    return false;
-                }
+                file_changes
+                    .entry(changed_file.file_id)
+                    .and_modify(|(change, just_created)| {
+                        // None -> Delete => keep
+                        // Create -> Delete => collapse
+                        //
+                        match (change, just_created, changed_file.change_kind) {
+                            // latter `Delete` wins
+                            (change, _, Delete) => *change = Delete,
+                            // merge `Create` with `Create` or `Modify`
+                            (Create, _, Create | Modify) => {}
+                            // collapse identical `Modify`es
+                            (Modify, _, Modify) => {}
+                            // equivalent to `Modify`
+                            (change @ Delete, just_created, Create) => {
+                                *change = Modify;
+                                *just_created = true;
+                            }
+                            // shouldn't occur, but collapse into `Create`
+                            (change @ Delete, just_created, Modify) => {
+                                *change = Create;
+                                *just_created = true;
+                            }
+                            // shouldn't occur, but collapse into `Modify`
+                            (Modify, _, Create) => {}
+                        }
+                    })
+                    .or_insert((
+                        changed_file.change_kind,
+                        matches!(changed_file.change_kind, Create),
+                    ));
+            }
 
-                match (a.change_kind, b.change_kind) {
-                    // duplicate can be merged
-                    (Create, Create) | (Modify, Modify) | (Delete, Delete) => true,
-                    // just leave the create, modify is irrelevant
-                    (Create, Modify) => {
-                        std::mem::swap(a, b);
-                        true
-                    }
-                    // modify becomes irrelevant if the file is deleted
-                    (Modify, Delete) => true,
-                    // we should fully remove this occurrence,
-                    // but leaving just a delete works as well
-                    (Create, Delete) => true,
-                    // this is equivalent to a modify
-                    (Delete, Create) => {
-                        a.change_kind = Modify;
-                        true
-                    }
-                    // can't really occur
-                    (Modify, Create) => false,
-                    (Delete, Modify) => false,
-                }
-            });
+            changed_files.extend(
+                file_changes
+                    .into_iter()
+                    .filter(|(_, (change_kind, just_created))| {
+                        !matches!((change_kind, just_created), (vfs::ChangeKind::Delete, true))
+                    })
+                    .map(|(file_id, (change_kind, _))| vfs::ChangedFile { file_id, change_kind }),
+            );
 
+            // A file was added or deleted
+            let mut has_structure_changes = false;
             for file in &changed_files {
                 if let Some(path) = vfs.file_path(file.file_id).as_path() {
                     let path = path.to_path_buf();
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers.rs
index 59bdd306127..4e08bd0a724 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers.rs
@@ -29,7 +29,6 @@ use project_model::{ManifestPath, ProjectWorkspace, TargetKind};
 use serde_json::json;
 use stdx::{format_to, never};
 use syntax::{algo, ast, AstNode, TextRange, TextSize};
-use tracing::error;
 use vfs::AbsPathBuf;
 
 use crate::{
@@ -937,8 +936,7 @@ pub(crate) fn handle_hover(
 
     let line_index = snap.file_line_index(file_range.file_id)?;
     let range = to_proto::range(&line_index, info.range);
-    let markup_kind =
-        snap.config.hover().documentation.map_or(ide::HoverDocFormat::Markdown, |kind| kind);
+    let markup_kind = snap.config.hover().format;
     let hover = lsp_ext::Hover {
         hover: lsp_types::Hover {
             contents: HoverContents::Markup(to_proto::markup_content(
@@ -1360,55 +1358,10 @@ pub(crate) fn handle_inlay_hints(
 }
 
 pub(crate) fn handle_inlay_hints_resolve(
-    snap: GlobalStateSnapshot,
-    mut hint: InlayHint,
+    _snap: GlobalStateSnapshot,
+    hint: InlayHint,
 ) -> Result<InlayHint> {
     let _p = profile::span("handle_inlay_hints_resolve");
-    let data = match hint.data.take() {
-        Some(it) => it,
-        None => return Ok(hint),
-    };
-
-    let resolve_data: lsp_ext::InlayHintResolveData = serde_json::from_value(data)?;
-
-    match snap.url_file_version(&resolve_data.text_document.uri) {
-        Some(version) if version == resolve_data.text_document.version => {}
-        Some(version) => {
-            error!(
-                "attempted inlayHints/resolve of '{}' at version {} while server version is {}",
-                resolve_data.text_document.uri, resolve_data.text_document.version, version,
-            );
-            return Ok(hint);
-        }
-        None => {
-            error!(
-                "attempted inlayHints/resolve of unknown file '{}' at version {}",
-                resolve_data.text_document.uri, resolve_data.text_document.version,
-            );
-            return Ok(hint);
-        }
-    }
-    let file_range = from_proto::file_range_uri(
-        &snap,
-        &resolve_data.text_document.uri,
-        match resolve_data.position {
-            PositionOrRange::Position(pos) => Range::new(pos, pos),
-            PositionOrRange::Range(range) => range,
-        },
-    )?;
-    let info = match snap.analysis.hover(&snap.config.hover(), file_range)? {
-        None => return Ok(hint),
-        Some(info) => info,
-    };
-
-    let markup_kind =
-        snap.config.hover().documentation.map_or(ide::HoverDocFormat::Markdown, |kind| kind);
-
-    // FIXME: hover actions?
-    hint.tooltip = Some(lsp_types::InlayHintTooltip::MarkupContent(to_proto::markup_content(
-        info.info.markup,
-        markup_kind,
-    )));
     Ok(hint)
 }
 
@@ -1516,7 +1469,8 @@ pub(crate) fn handle_semantic_tokens_full(
 
     let mut highlight_config = snap.config.highlighting_config();
     // Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet.
-    highlight_config.syntactic_name_ref_highlighting = !snap.proc_macros_loaded;
+    highlight_config.syntactic_name_ref_highlighting =
+        snap.workspaces.is_empty() || !snap.proc_macros_loaded;
 
     let highlights = snap.analysis.highlight(highlight_config, file_id)?;
     let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
@@ -1539,7 +1493,8 @@ pub(crate) fn handle_semantic_tokens_full_delta(
 
     let mut highlight_config = snap.config.highlighting_config();
     // Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet.
-    highlight_config.syntactic_name_ref_highlighting = !snap.proc_macros_loaded;
+    highlight_config.syntactic_name_ref_highlighting =
+        snap.workspaces.is_empty() || !snap.proc_macros_loaded;
 
     let highlights = snap.analysis.highlight(highlight_config, file_id)?;
     let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
@@ -1570,7 +1525,12 @@ pub(crate) fn handle_semantic_tokens_range(
     let text = snap.analysis.file_text(frange.file_id)?;
     let line_index = snap.file_line_index(frange.file_id)?;
 
-    let highlights = snap.analysis.highlight_range(snap.config.highlighting_config(), frange)?;
+    let mut highlight_config = snap.config.highlighting_config();
+    // Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet.
+    highlight_config.syntactic_name_ref_highlighting =
+        snap.workspaces.is_empty() || !snap.proc_macros_loaded;
+
+    let highlights = snap.analysis.highlight_range(highlight_config, frange)?;
     let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
     Ok(Some(semantic_tokens.into()))
 }
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
index 405d261db6f..e8912b90796 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
@@ -21,7 +21,7 @@ use project_model::CargoConfig;
 use test_utils::project_root;
 use vfs::{AbsPathBuf, VfsPath};
 
-use crate::cli::load_cargo::{load_workspace_at, LoadCargoConfig};
+use crate::cli::load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice};
 
 #[test]
 fn integrated_highlighting_benchmark() {
@@ -36,7 +36,7 @@ fn integrated_highlighting_benchmark() {
     let cargo_config = CargoConfig::default();
     let load_cargo_config = LoadCargoConfig {
         load_out_dirs_from_check: true,
-        with_proc_macro: false,
+        with_proc_macro_server: ProcMacroServerChoice::None,
         prefill_caches: false,
     };
 
@@ -90,7 +90,7 @@ fn integrated_completion_benchmark() {
     let cargo_config = CargoConfig::default();
     let load_cargo_config = LoadCargoConfig {
         load_out_dirs_from_check: true,
-        with_proc_macro: false,
+        with_proc_macro_server: ProcMacroServerChoice::None,
         prefill_caches: true,
     };
 
@@ -146,6 +146,7 @@ fn integrated_completion_benchmark() {
             },
             snippets: Vec::new(),
             prefer_no_std: false,
+            limit: None,
         };
         let position =
             FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
@@ -184,6 +185,7 @@ fn integrated_completion_benchmark() {
             },
             snippets: Vec::new(),
             prefer_no_std: false,
+            limit: None,
         };
         let position =
             FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs
index 65620b4209b..08b2c837de3 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs
@@ -3,11 +3,11 @@
 use std::{collections::HashMap, path::PathBuf};
 
 use lsp_types::request::Request;
+use lsp_types::PositionEncodingKind;
 use lsp_types::{
     notification::Notification, CodeActionKind, DocumentOnTypeFormattingParams,
     PartialResultParams, Position, Range, TextDocumentIdentifier, WorkDoneProgressParams,
 };
-use lsp_types::{PositionEncodingKind, VersionedTextDocumentIdentifier};
 use serde::{Deserialize, Serialize};
 
 pub enum AnalyzerStatus {}
@@ -151,6 +151,13 @@ impl Notification for ClearFlycheck {
     const METHOD: &'static str = "rust-analyzer/clearFlycheck";
 }
 
+pub enum OpenServerLogs {}
+
+impl Notification for OpenServerLogs {
+    type Params = ();
+    const METHOD: &'static str = "rust-analyzer/openServerLogs";
+}
+
 #[derive(Deserialize, Serialize, Debug)]
 #[serde(rename_all = "camelCase")]
 pub struct RunFlycheckParams {
@@ -568,10 +575,7 @@ pub struct CompletionResolveData {
 }
 
 #[derive(Debug, Serialize, Deserialize)]
-pub struct InlayHintResolveData {
-    pub text_document: VersionedTextDocumentIdentifier,
-    pub position: PositionOrRange,
-}
+pub struct InlayHintResolveData {}
 
 #[derive(Debug, Serialize, Deserialize)]
 pub struct CompletionImport {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_utils.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_utils.rs
index dcaee92857a..baa77a005e2 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_utils.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_utils.rs
@@ -2,12 +2,13 @@
 use std::{mem, ops::Range, sync::Arc};
 
 use lsp_server::Notification;
+use lsp_types::request::Request;
 
 use crate::{
     from_proto,
     global_state::GlobalState,
     line_index::{LineEndings, LineIndex, PositionEncoding},
-    LspError,
+    lsp_ext, LspError,
 };
 
 pub(crate) fn invalid_params_error(message: String) -> LspError {
@@ -46,20 +47,47 @@ impl GlobalState {
     /// If `additional_info` is [`Some`], appends a note to the notification telling to check the logs.
     /// This will always log `message` + `additional_info` to the server's error log.
     pub(crate) fn show_and_log_error(&mut self, message: String, additional_info: Option<String>) {
-        let mut message = message;
         match additional_info {
             Some(additional_info) => {
-                tracing::error!("{}\n\n{}", &message, &additional_info);
-                if tracing::enabled!(tracing::Level::ERROR) {
-                    message.push_str("\n\nCheck the server logs for additional info.");
+                tracing::error!("{}:\n{}", &message, &additional_info);
+                match self.config.open_server_logs() && tracing::enabled!(tracing::Level::ERROR) {
+                    true => self.send_request::<lsp_types::request::ShowMessageRequest>(
+                        lsp_types::ShowMessageRequestParams {
+                            typ: lsp_types::MessageType::ERROR,
+                            message,
+                            actions: Some(vec![lsp_types::MessageActionItem {
+                                title: "Open server logs".to_owned(),
+                                properties: Default::default(),
+                            }]),
+                        },
+                        |this, resp| {
+                            let lsp_server::Response { error: None, result: Some(result), .. } = resp
+                            else { return };
+                            if let Ok(Some(_item)) = crate::from_json::<
+                                <lsp_types::request::ShowMessageRequest as lsp_types::request::Request>::Result,
+                            >(
+                                lsp_types::request::ShowMessageRequest::METHOD, &result
+                            ) {
+                                this.send_notification::<lsp_ext::OpenServerLogs>(());
+                            }
+                        },
+                    ),
+                    false => self.send_notification::<lsp_types::notification::ShowMessage>(
+                        lsp_types::ShowMessageParams {
+                            typ: lsp_types::MessageType::ERROR,
+                            message,
+                        },
+                    ),
                 }
             }
-            None => tracing::error!("{}", &message),
-        }
+            None => {
+                tracing::error!("{}", &message);
 
-        self.send_notification::<lsp_types::notification::ShowMessage>(
-            lsp_types::ShowMessageParams { typ: lsp_types::MessageType::ERROR, message },
-        )
+                self.send_notification::<lsp_types::notification::ShowMessage>(
+                    lsp_types::ShowMessageParams { typ: lsp_types::MessageType::ERROR, message },
+                );
+            }
+        }
     }
 
     /// rust-analyzer is resilient -- if it fails, this doesn't usually affect
@@ -77,7 +105,7 @@ impl GlobalState {
         let from_source_build = option_env!("POKE_RA_DEVS").is_some();
         let profiling_enabled = std::env::var("RA_PROFILE").is_ok();
         if from_source_build || profiling_enabled {
-            self.show_message(lsp_types::MessageType::ERROR, message)
+            self.show_and_log_error(message, None);
         }
     }
 
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
index 0bc940dfe8d..346a74e270f 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
@@ -14,7 +14,7 @@ use ide_db::base_db::{SourceDatabaseExt, VfsPath};
 use itertools::Itertools;
 use lsp_server::{Connection, Notification, Request};
 use lsp_types::notification::Notification as _;
-use vfs::{ChangeKind, FileId};
+use vfs::{AbsPathBuf, ChangeKind, FileId};
 
 use crate::{
     config::Config,
@@ -287,8 +287,10 @@ impl GlobalState {
                 || self.fetch_build_data_queue.op_requested());
 
             if became_quiescent {
-                // Project has loaded properly, kick off initial flycheck
-                self.flycheck.iter().for_each(FlycheckHandle::restart);
+                if self.config.check_on_save() {
+                    // Project has loaded properly, kick off initial flycheck
+                    self.flycheck.iter().for_each(FlycheckHandle::restart);
+                }
                 if self.config.prefill_caches() {
                     self.prime_caches_queue.request_op("became quiescent".to_string());
                 }
@@ -305,13 +307,18 @@ impl GlobalState {
                 if self.config.code_lens_refresh() {
                     self.send_request::<lsp_types::request::CodeLensRefresh>((), |_, _| ());
                 }
-            }
 
-            if !was_quiescent || state_changed || memdocs_added_or_removed {
-                if self.config.publish_diagnostics() {
-                    self.update_diagnostics()
+                // Refresh inlay hints if the client supports it.
+                if self.config.inlay_hints_refresh() {
+                    self.send_request::<lsp_types::request::InlayHintRefreshRequest>((), |_, _| ());
                 }
             }
+
+            if (!was_quiescent || state_changed || memdocs_added_or_removed)
+                && self.config.publish_diagnostics()
+            {
+                self.update_diagnostics()
+            }
         }
 
         if let Some(diagnostic_changes) = self.diagnostics.take_changes() {
@@ -604,8 +611,8 @@ impl GlobalState {
             Ok(())
         });
 
-        if let RequestDispatcher { req: Some(req), global_state: this } = &mut dispatcher {
-            if this.shutdown_requested {
+        match &mut dispatcher {
+            RequestDispatcher { req: Some(req), global_state: this } if this.shutdown_requested => {
                 this.respond(lsp_server::Response::new_err(
                     req.id.clone(),
                     lsp_server::ErrorCode::InvalidRequest as i32,
@@ -613,16 +620,7 @@ impl GlobalState {
                 ));
                 return;
             }
-
-            // Avoid flashing a bunch of unresolved references during initial load.
-            if this.workspaces.is_empty() && !this.is_quiescent() {
-                this.respond(lsp_server::Response::new_err(
-                    req.id.clone(),
-                    lsp_server::ErrorCode::ContentModified as i32,
-                    "waiting for cargo metadata or cargo check".to_owned(),
-                ));
-                return;
-            }
+            _ => (),
         }
 
         dispatcher
@@ -935,6 +933,30 @@ impl GlobalState {
 
                 Ok(())
             })?
+            .on::<lsp_types::notification::DidChangeWorkspaceFolders>(|this, params| {
+                let config = Arc::make_mut(&mut this.config);
+
+                for workspace in params.event.removed {
+                    let Ok(path) = workspace.uri.to_file_path() else { continue };
+                    let Ok(path) = AbsPathBuf::try_from(path) else { continue };
+                    let Some(position) = config.workspace_roots.iter().position(|it| it == &path) else { continue };
+                    config.workspace_roots.remove(position);
+                }
+
+                let added = params
+                    .event
+                    .added
+                    .into_iter()
+                    .filter_map(|it| it.uri.to_file_path().ok())
+                    .filter_map(|it| AbsPathBuf::try_from(it).ok());
+                config.workspace_roots.extend(added);
+                    if !config.has_linked_projects() && config.detached_files().is_empty() {
+                        config.rediscover_workspaces();
+                        this.fetch_workspaces_queue.request_op("client workspaces changed".to_string())
+                    }
+
+                Ok(())
+            })?
             .on::<lsp_types::notification::DidChangeWatchedFiles>(|this, params| {
                 for change in params.changes {
                     if let Ok(path) = from_proto::abs_path(&change.uri) {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
index 9bbce70ec0a..5ac5af94f5a 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
@@ -34,6 +34,8 @@ use crate::{
     op_queue::Cause,
 };
 
+use ::tt::token_id as tt;
+
 #[derive(Debug)]
 pub(crate) enum ProjectWorkspaceProgress {
     Begin,
@@ -148,11 +150,11 @@ impl GlobalState {
                             )
                         }
                         LinkedProject::InlineJsonProject(it) => {
-                            project_model::ProjectWorkspace::load_inline(
+                            Ok(project_model::ProjectWorkspace::load_inline(
                                 it.clone(),
                                 cargo_config.target.as_deref(),
                                 &cargo_config.extra_env,
-                            )
+                            ))
                         }
                     })
                     .collect::<Vec<_>>();
@@ -212,35 +214,11 @@ impl GlobalState {
         let workspaces =
             workspaces.iter().filter_map(|res| res.as_ref().ok().cloned()).collect::<Vec<_>>();
 
-        fn eq_ignore_build_data<'a>(
-            left: &'a ProjectWorkspace,
-            right: &'a ProjectWorkspace,
-        ) -> bool {
-            let key = |p: &'a ProjectWorkspace| match p {
-                ProjectWorkspace::Cargo {
-                    cargo,
-                    sysroot,
-                    rustc,
-                    rustc_cfg,
-                    cfg_overrides,
-
-                    build_scripts: _,
-                    toolchain: _,
-                    target_layout: _,
-                } => Some((cargo, sysroot, rustc, rustc_cfg, cfg_overrides)),
-                _ => None,
-            };
-            match (key(left), key(right)) {
-                (Some(lk), Some(rk)) => lk == rk,
-                _ => left == right,
-            }
-        }
-
         let same_workspaces = workspaces.len() == self.workspaces.len()
             && workspaces
                 .iter()
                 .zip(self.workspaces.iter())
-                .all(|(l, r)| eq_ignore_build_data(l, r));
+                .all(|(l, r)| l.eq_ignore_build_data(r));
 
         if same_workspaces {
             let (workspaces, build_scripts) = self.fetch_build_data_queue.last_op_result();
@@ -270,7 +248,8 @@ impl GlobalState {
 
             // Here, we completely changed the workspace (Cargo.toml edit), so
             // we don't care about build-script results, they are stale.
-            self.workspaces = Arc::new(workspaces)
+            // FIXME: can we abort the build scripts here?
+            self.workspaces = Arc::new(workspaces);
         }
 
         if let FilesWatcher::Client = self.config.files().watcher {
@@ -362,7 +341,7 @@ impl GlobalState {
             let loader = &mut self.loader;
             let mem_docs = &self.mem_docs;
             let mut load = move |path: &AbsPath| {
-                let _p = profile::span("GlobalState::load");
+                let _p = profile::span("switch_workspaces::load");
                 let vfs_path = vfs::VfsPath::from(path.to_path_buf());
                 if !mem_docs.contains(&vfs_path) {
                     let contents = loader.handle.load_sync(path);
@@ -584,10 +563,10 @@ pub(crate) fn load_proc_macro(
     path: &AbsPath,
     dummy_replace: &[Box<str>],
 ) -> ProcMacroLoadResult {
+    let server = server.map_err(ToOwned::to_owned)?;
     let res: Result<Vec<_>, String> = (|| {
         let dylib = MacroDylib::new(path.to_path_buf())
             .map_err(|io| format!("Proc-macro dylib loading failed: {io}"))?;
-        let server = server.map_err(ToOwned::to_owned)?;
         let vec = server.load_dylib(dylib).map_err(|e| format!("{e}"))?;
         if vec.is_empty() {
             return Err("proc macro library returned no proc macros".to_string());
@@ -679,7 +658,7 @@ pub(crate) fn load_proc_macro(
             _: Option<&tt::Subtree>,
             _: &Env,
         ) -> Result<tt::Subtree, ProcMacroExpansionError> {
-            Ok(tt::Subtree::default())
+            Ok(tt::Subtree::empty())
         }
     }
 }
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs
index e736b2ff9a3..5bdc1bf8d9b 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs
@@ -9,9 +9,9 @@ use ide::{
     Annotation, AnnotationKind, Assist, AssistKind, Cancellable, CompletionItem,
     CompletionItemKind, CompletionRelevance, Documentation, FileId, FileRange, FileSystemEdit,
     Fold, FoldKind, Highlight, HlMod, HlOperator, HlPunct, HlRange, HlTag, Indel, InlayHint,
-    InlayHintLabel, InlayKind, Markup, NavigationTarget, ReferenceCategory, RenameError, Runnable,
-    Severity, SignatureHelp, SourceChange, StructureNodeKind, SymbolKind, TextEdit, TextRange,
-    TextSize,
+    InlayHintLabel, InlayHintLabelPart, InlayKind, Markup, NavigationTarget, ReferenceCategory,
+    RenameError, Runnable, Severity, SignatureHelp, SourceChange, StructureNodeKind, SymbolKind,
+    TextEdit, TextRange, TextSize,
 };
 use itertools::Itertools;
 use serde_json::to_value;
@@ -215,8 +215,14 @@ pub(crate) fn completion_items(
     let max_relevance = items.iter().map(|it| it.relevance().score()).max().unwrap_or_default();
     let mut res = Vec::with_capacity(items.len());
     for item in items {
-        completion_item(&mut res, config, line_index, &tdpp, max_relevance, item)
+        completion_item(&mut res, config, line_index, &tdpp, max_relevance, item);
     }
+
+    if let Some(limit) = config.completion().limit {
+        res.sort_by(|item1, item2| item1.sort_text.cmp(&item2.sort_text));
+        res.truncate(limit);
+    }
+
     res
 }
 
@@ -431,137 +437,140 @@ pub(crate) fn inlay_hint(
     mut inlay_hint: InlayHint,
 ) -> Cancellable<lsp_types::InlayHint> {
     match inlay_hint.kind {
-        InlayKind::ParameterHint if render_colons => inlay_hint.label.append_str(":"),
-        InlayKind::TypeHint if render_colons => inlay_hint.label.prepend_str(": "),
-        InlayKind::ClosureReturnTypeHint => inlay_hint.label.prepend_str(" -> "),
-        InlayKind::DiscriminantHint => inlay_hint.label.prepend_str(" = "),
+        InlayKind::Parameter if render_colons => inlay_hint.label.append_str(":"),
+        InlayKind::Type if render_colons => inlay_hint.label.prepend_str(": "),
+        InlayKind::ClosureReturnType => inlay_hint.label.prepend_str(" -> "),
+        InlayKind::Discriminant => inlay_hint.label.prepend_str(" = "),
         _ => {}
     }
 
+    let (label, tooltip) = inlay_hint_label(snap, inlay_hint.label)?;
+
     Ok(lsp_types::InlayHint {
         position: match inlay_hint.kind {
             // before annotated thing
             InlayKind::OpeningParenthesis
-            | InlayKind::ParameterHint
-            | InlayKind::AdjustmentHint
-            | InlayKind::BindingModeHint => position(line_index, inlay_hint.range.start()),
+            | InlayKind::Parameter
+            | InlayKind::Adjustment
+            | InlayKind::BindingMode => position(line_index, inlay_hint.range.start()),
             // after annotated thing
-            InlayKind::ClosureReturnTypeHint
-            | InlayKind::TypeHint
-            | InlayKind::DiscriminantHint
-            | InlayKind::ChainingHint
-            | InlayKind::GenericParamListHint
+            InlayKind::ClosureReturnType
+            | InlayKind::Type
+            | InlayKind::Discriminant
+            | InlayKind::Chaining
+            | InlayKind::GenericParamList
             | InlayKind::ClosingParenthesis
-            | InlayKind::AdjustmentHintPostfix
-            | InlayKind::LifetimeHint
-            | InlayKind::ClosingBraceHint => position(line_index, inlay_hint.range.end()),
+            | InlayKind::AdjustmentPostfix
+            | InlayKind::Lifetime
+            | InlayKind::ClosingBrace => position(line_index, inlay_hint.range.end()),
         },
         padding_left: Some(match inlay_hint.kind {
-            InlayKind::TypeHint => !render_colons,
-            InlayKind::ChainingHint | InlayKind::ClosingBraceHint => true,
+            InlayKind::Type => !render_colons,
+            InlayKind::Chaining | InlayKind::ClosingBrace => true,
             InlayKind::ClosingParenthesis
-            | InlayKind::DiscriminantHint
+            | InlayKind::Discriminant
             | InlayKind::OpeningParenthesis
-            | InlayKind::BindingModeHint
-            | InlayKind::ClosureReturnTypeHint
-            | InlayKind::GenericParamListHint
-            | InlayKind::AdjustmentHint
-            | InlayKind::AdjustmentHintPostfix
-            | InlayKind::LifetimeHint
-            | InlayKind::ParameterHint => false,
+            | InlayKind::BindingMode
+            | InlayKind::ClosureReturnType
+            | InlayKind::GenericParamList
+            | InlayKind::Adjustment
+            | InlayKind::AdjustmentPostfix
+            | InlayKind::Lifetime
+            | InlayKind::Parameter => false,
         }),
         padding_right: Some(match inlay_hint.kind {
             InlayKind::ClosingParenthesis
             | InlayKind::OpeningParenthesis
-            | InlayKind::ChainingHint
-            | InlayKind::ClosureReturnTypeHint
-            | InlayKind::GenericParamListHint
-            | InlayKind::AdjustmentHint
-            | InlayKind::AdjustmentHintPostfix
-            | InlayKind::TypeHint
-            | InlayKind::DiscriminantHint
-            | InlayKind::ClosingBraceHint => false,
-            InlayKind::BindingModeHint => inlay_hint.label.as_simple_str() != Some("&"),
-            InlayKind::ParameterHint | InlayKind::LifetimeHint => true,
+            | InlayKind::Chaining
+            | InlayKind::ClosureReturnType
+            | InlayKind::GenericParamList
+            | InlayKind::Adjustment
+            | InlayKind::AdjustmentPostfix
+            | InlayKind::Type
+            | InlayKind::Discriminant
+            | InlayKind::ClosingBrace => false,
+            InlayKind::BindingMode => {
+                matches!(&label, lsp_types::InlayHintLabel::String(s) if s != "&")
+            }
+            InlayKind::Parameter | InlayKind::Lifetime => true,
         }),
         kind: match inlay_hint.kind {
-            InlayKind::ParameterHint => Some(lsp_types::InlayHintKind::PARAMETER),
-            InlayKind::ClosureReturnTypeHint | InlayKind::TypeHint | InlayKind::ChainingHint => {
+            InlayKind::Parameter => Some(lsp_types::InlayHintKind::PARAMETER),
+            InlayKind::ClosureReturnType | InlayKind::Type | InlayKind::Chaining => {
                 Some(lsp_types::InlayHintKind::TYPE)
             }
             InlayKind::ClosingParenthesis
-            | InlayKind::DiscriminantHint
+            | InlayKind::Discriminant
             | InlayKind::OpeningParenthesis
-            | InlayKind::BindingModeHint
-            | InlayKind::GenericParamListHint
-            | InlayKind::LifetimeHint
-            | InlayKind::AdjustmentHint
-            | InlayKind::AdjustmentHintPostfix
-            | InlayKind::ClosingBraceHint => None,
+            | InlayKind::BindingMode
+            | InlayKind::GenericParamList
+            | InlayKind::Lifetime
+            | InlayKind::Adjustment
+            | InlayKind::AdjustmentPostfix
+            | InlayKind::ClosingBrace => None,
         },
         text_edits: None,
-        data: (|| match inlay_hint.tooltip {
-            Some(ide::InlayTooltip::HoverOffset(file_id, offset)) => {
-                let uri = url(snap, file_id);
-                let line_index = snap.file_line_index(file_id).ok()?;
-
-                let text_document = lsp_types::VersionedTextDocumentIdentifier {
-                    version: snap.url_file_version(&uri)?,
-                    uri,
-                };
-                to_value(lsp_ext::InlayHintResolveData {
-                    text_document,
-                    position: lsp_ext::PositionOrRange::Position(position(&line_index, offset)),
-                })
-                .ok()
-            }
-            Some(ide::InlayTooltip::HoverRanged(file_id, text_range)) => {
-                let uri = url(snap, file_id);
-                let text_document = lsp_types::VersionedTextDocumentIdentifier {
-                    version: snap.url_file_version(&uri)?,
-                    uri,
-                };
-                let line_index = snap.file_line_index(file_id).ok()?;
-                to_value(lsp_ext::InlayHintResolveData {
-                    text_document,
-                    position: lsp_ext::PositionOrRange::Range(range(&line_index, text_range)),
-                })
-                .ok()
-            }
-            _ => None,
-        })(),
-        tooltip: Some(match inlay_hint.tooltip {
-            Some(ide::InlayTooltip::String(s)) => lsp_types::InlayHintTooltip::String(s),
-            _ => lsp_types::InlayHintTooltip::String(inlay_hint.label.to_string()),
-        }),
-        label: inlay_hint_label(snap, inlay_hint.label)?,
+        data: None,
+        tooltip,
+        label,
     })
 }
 
 fn inlay_hint_label(
     snap: &GlobalStateSnapshot,
-    label: InlayHintLabel,
-) -> Cancellable<lsp_types::InlayHintLabel> {
-    Ok(match label.as_simple_str() {
-        Some(s) => lsp_types::InlayHintLabel::String(s.into()),
-        None => lsp_types::InlayHintLabel::LabelParts(
-            label
+    mut label: InlayHintLabel,
+) -> Cancellable<(lsp_types::InlayHintLabel, Option<lsp_types::InlayHintTooltip>)> {
+    let res = match &*label.parts {
+        [InlayHintLabelPart { linked_location: None, .. }] => {
+            let InlayHintLabelPart { text, tooltip, .. } = label.parts.pop().unwrap();
+            (
+                lsp_types::InlayHintLabel::String(text),
+                match tooltip {
+                    Some(ide::InlayTooltip::String(s)) => {
+                        Some(lsp_types::InlayHintTooltip::String(s))
+                    }
+                    Some(ide::InlayTooltip::Markdown(s)) => {
+                        Some(lsp_types::InlayHintTooltip::MarkupContent(lsp_types::MarkupContent {
+                            kind: lsp_types::MarkupKind::Markdown,
+                            value: s,
+                        }))
+                    }
+                    None => None,
+                },
+            )
+        }
+        _ => {
+            let parts = label
                 .parts
                 .into_iter()
                 .map(|part| {
-                    Ok(lsp_types::InlayHintLabelPart {
-                        value: part.text,
-                        tooltip: None,
-                        location: part
-                            .linked_location
-                            .map(|range| location(snap, range))
-                            .transpose()?,
-                        command: None,
-                    })
+                    part.linked_location.map(|range| location(snap, range)).transpose().map(
+                        |location| lsp_types::InlayHintLabelPart {
+                            value: part.text,
+                            tooltip: match part.tooltip {
+                                Some(ide::InlayTooltip::String(s)) => {
+                                    Some(lsp_types::InlayHintLabelPartTooltip::String(s))
+                                }
+                                Some(ide::InlayTooltip::Markdown(s)) => {
+                                    Some(lsp_types::InlayHintLabelPartTooltip::MarkupContent(
+                                        lsp_types::MarkupContent {
+                                            kind: lsp_types::MarkupKind::Markdown,
+                                            value: s,
+                                        },
+                                    ))
+                                }
+                                None => None,
+                            },
+                            location,
+                            command: None,
+                        },
+                    )
                 })
-                .collect::<Cancellable<Vec<_>>>()?,
-        ),
-    })
+                .collect::<Cancellable<_>>()?;
+            (lsp_types::InlayHintLabel::LabelParts(parts), None)
+        }
+    };
+    Ok(res)
 }
 
 static TOKEN_RESULT_COUNTER: AtomicU32 = AtomicU32::new(1);
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs
index 269212ebb99..b7275df0f40 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs
@@ -137,6 +137,7 @@ impl<'a> Project<'a> {
                 })),
                 ..Default::default()
             },
+            Vec::new(),
         );
         config.discovered_projects = Some(discovered_projects);
         config.update(self.config).expect("invalid config");
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/tidy.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/tidy.rs
index 35b5af73192..8e3097fce42 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/tidy.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/tidy.rs
@@ -82,7 +82,6 @@ fn files_are_tidy() {
                 check_dbg(&path, &text);
                 check_test_attrs(&path, &text);
                 check_trailing_ws(&path, &text);
-                deny_clippy(&path, &text);
                 tidy_docs.visit(&path, &text);
                 tidy_marks.visit(&path, &text);
             }
@@ -144,32 +143,6 @@ fn check_cargo_toml(path: &Path, text: String) {
     }
 }
 
-fn deny_clippy(path: &Path, text: &str) {
-    let ignore = &[
-        // The documentation in string literals may contain anything for its own purposes
-        "ide-db/src/generated/lints.rs",
-        // The tests test clippy lint hovers
-        "ide/src/hover/tests.rs",
-        // The tests test clippy lint completions
-        "ide-completion/src/tests/attribute.rs",
-    ];
-    if ignore.iter().any(|p| path.ends_with(p)) {
-        return;
-    }
-
-    if text.contains("\u{61}llow(clippy") {
-        panic!(
-            "\n\nallowing lints is forbidden: {}.
-rust-analyzer intentionally doesn't check clippy on CI.
-You can allow lint globally via `xtask clippy`.
-See https://github.com/rust-lang/rust-clippy/issues/5537 for discussion.
-
-",
-            path.display()
-        )
-    }
-}
-
 #[cfg(not(feature = "in-rust-tree"))]
 #[test]
 fn check_licenses() {
diff --git a/src/tools/rust-analyzer/crates/sourcegen/Cargo.toml b/src/tools/rust-analyzer/crates/sourcegen/Cargo.toml
index 593dc4e55b2..fb2b9ebef50 100644
--- a/src/tools/rust-analyzer/crates/sourcegen/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/sourcegen/Cargo.toml
@@ -2,9 +2,11 @@
 name = "sourcegen"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
diff --git a/src/tools/rust-analyzer/crates/stdx/Cargo.toml b/src/tools/rust-analyzer/crates/stdx/Cargo.toml
index f7b7d09640f..c881f2fd3f4 100644
--- a/src/tools/rust-analyzer/crates/stdx/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/stdx/Cargo.toml
@@ -2,9 +2,11 @@
 name = "stdx"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
diff --git a/src/tools/rust-analyzer/crates/stdx/src/lib.rs b/src/tools/rust-analyzer/crates/stdx/src/lib.rs
index 51e109798d1..bd24d7d28ba 100644
--- a/src/tools/rust-analyzer/crates/stdx/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/stdx/src/lib.rs
@@ -2,9 +2,9 @@
 
 #![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
 
+use std::io as sio;
 use std::process::Command;
 use std::{cmp::Ordering, ops, time::Instant};
-use std::{io as sio, iter};
 
 mod macros;
 pub mod hash;
@@ -39,15 +39,19 @@ Uncomment `default = [ "backtrace" ]` in `crates/stdx/Cargo.toml`.
 }
 
 pub fn to_lower_snake_case(s: &str) -> String {
-    to_snake_case(s, char::to_ascii_lowercase)
+    to_snake_case(s, char::to_lowercase)
 }
 pub fn to_upper_snake_case(s: &str) -> String {
-    to_snake_case(s, char::to_ascii_uppercase)
+    to_snake_case(s, char::to_uppercase)
 }
 
 // Code partially taken from rust/compiler/rustc_lint/src/nonstandard_style.rs
 // commit: 9626f2b
-fn to_snake_case<F: Fn(&char) -> char>(mut s: &str, change_case: F) -> String {
+fn to_snake_case<F, I>(mut s: &str, change_case: F) -> String
+where
+    F: Fn(char) -> I,
+    I: Iterator<Item = char>,
+{
     let mut words = vec![];
 
     // Preserve leading underscores
@@ -75,7 +79,7 @@ fn to_snake_case<F: Fn(&char) -> char>(mut s: &str, change_case: F) -> String {
             }
 
             last_upper = ch.is_uppercase();
-            buf.extend(iter::once(change_case(&ch)));
+            buf.extend(change_case(ch));
         }
 
         words.push(buf);
diff --git a/src/tools/rust-analyzer/crates/stdx/src/macros.rs b/src/tools/rust-analyzer/crates/stdx/src/macros.rs
index d91fc690cb5..1a9982fa8b2 100644
--- a/src/tools/rust-analyzer/crates/stdx/src/macros.rs
+++ b/src/tools/rust-analyzer/crates/stdx/src/macros.rs
@@ -43,5 +43,14 @@ macro_rules! impl_from {
                 }
             )*)?
         )*
+    };
+    ($($variant:ident$(<$V:ident>)?),* for $enum:ident) => {
+        $(
+            impl$(<$V>)? From<$variant$(<$V>)?> for $enum$(<$V>)? {
+                fn from(it: $variant$(<$V>)?) -> $enum$(<$V>)? {
+                    $enum::$variant(it)
+                }
+            }
+        )*
     }
 }
diff --git a/src/tools/rust-analyzer/crates/syntax/Cargo.toml b/src/tools/rust-analyzer/crates/syntax/Cargo.toml
index 00743cca559..8fc493a23f5 100644
--- a/src/tools/rust-analyzer/crates/syntax/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/syntax/Cargo.toml
@@ -2,10 +2,12 @@
 name = "syntax"
 version = "0.0.0"
 description = "Comment and whitespace preserving parser for the Rust language"
-license = "MIT OR Apache-2.0"
 repository = "https://github.com/rust-lang/rust-analyzer"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
@@ -14,26 +16,26 @@ doctest = false
 cov-mark = "2.0.0-pre.1"
 itertools = "0.10.5"
 rowan = "0.15.10"
-rustc_lexer = { version = "725.0.0", package = "rustc-ap-rustc_lexer" }
+rustc_lexer = { version = "727.0.0", package = "rustc-ap-rustc_lexer" }
 rustc-hash = "1.1.0"
-once_cell = "1.15.0"
+once_cell = "1.17.0"
 indexmap = "1.9.1"
 smol_str = "0.1.23"
 
-stdx = { path = "../stdx", version = "0.0.0" }
-text-edit = { path = "../text-edit", version = "0.0.0" }
-parser = { path = "../parser", version = "0.0.0" }
-profile = { path = "../profile", version = "0.0.0" }
+parser.workspace = true
+profile.workspace = true
+stdx.workspace = true
+text-edit.workspace = true
 
 [dev-dependencies]
-rayon = "1.5.3"
+rayon = "1.6.1"
 expect-test = "1.4.0"
 proc-macro2 = "1.0.47"
 quote = "1.0.20"
 ungrammar = "1.16.1"
 
-test-utils = { path = "../test-utils" }
-sourcegen = { path = "../sourcegen" }
+test-utils.workspace = true
+sourcegen.workspace = true
 
 [features]
 in-rust-tree = []
diff --git a/src/tools/rust-analyzer/crates/syntax/fuzz/Cargo.toml b/src/tools/rust-analyzer/crates/syntax/fuzz/Cargo.toml
index f295c40065d..6070222f1f1 100644
--- a/src/tools/rust-analyzer/crates/syntax/fuzz/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/syntax/fuzz/Cargo.toml
@@ -1,18 +1,17 @@
-
 [package]
 name = "syntax-fuzz"
 version = "0.0.1"
 publish = false
 edition = "2021"
-rust-version = "1.65"
+rust-version = "1.66.1"
 
 [package.metadata]
 cargo-fuzz = true
 
 [dependencies]
 syntax = { path = "..", version = "0.0.0" }
-text_edit = { path = "../../text_edit", version = "0.0.0" }
-libfuzzer-sys = { git = "https://github.com/rust-fuzz/libfuzzer-sys.git" }
+text-edit = { path = "../../text-edit", version = "0.0.0" }
+libfuzzer-sys = "0.4.5"
 
 # Prevent this from interfering with workspaces
 [workspace]
diff --git a/src/tools/rust-analyzer/crates/syntax/rust.ungram b/src/tools/rust-analyzer/crates/syntax/rust.ungram
index 2c67586a390..36ad5fddfd0 100644
--- a/src/tools/rust-analyzer/crates/syntax/rust.ungram
+++ b/src/tools/rust-analyzer/crates/syntax/rust.ungram
@@ -452,7 +452,7 @@ FieldExpr =
   Attr* Expr '.' NameRef
 
 ClosureExpr =
-  Attr* ('for' GenericParamList)? 'static'? 'async'? 'move'?  ParamList RetType?
+  Attr* ('for' GenericParamList)? 'const'? 'static'? 'async'? 'move'?  ParamList RetType?
   body:Expr
 
 IfExpr =
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast.rs b/src/tools/rust-analyzer/crates/syntax/src/ast.rs
index 10c04575833..385a4e0a3ce 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast.rs
@@ -13,6 +13,8 @@ pub mod prec;
 
 use std::marker::PhantomData;
 
+use itertools::Either;
+
 use crate::{
     syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken},
     SyntaxKind,
@@ -98,6 +100,34 @@ impl<N: AstNode> Iterator for AstChildren<N> {
     }
 }
 
+impl<L, R> AstNode for Either<L, R>
+where
+    L: AstNode,
+    R: AstNode,
+{
+    fn can_cast(kind: SyntaxKind) -> bool
+    where
+        Self: Sized,
+    {
+        L::can_cast(kind) || R::can_cast(kind)
+    }
+
+    fn cast(syntax: SyntaxNode) -> Option<Self>
+    where
+        Self: Sized,
+    {
+        if L::can_cast(syntax.kind()) {
+            L::cast(syntax).map(Either::Left)
+        } else {
+            R::cast(syntax).map(Either::Right)
+        }
+    }
+
+    fn syntax(&self) -> &SyntaxNode {
+        self.as_ref().either(L::syntax, R::syntax)
+    }
+}
+
 mod support {
     use super::{AstChildren, AstNode, SyntaxKind, SyntaxNode, SyntaxToken};
 
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
index a214a5e4462..642a3bfc35d 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs
@@ -842,6 +842,7 @@ impl ast::HasAttrs for ClosureExpr {}
 impl ClosureExpr {
     pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
     pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
+    pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
     pub fn static_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![static]) }
     pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
     pub fn move_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![move]) }
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs
index d5b3296980c..5aebe4cd9f5 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs
@@ -12,7 +12,7 @@
 use itertools::Itertools;
 use stdx::{format_to, never};
 
-use crate::{ast, AstNode, SourceFile, SyntaxKind, SyntaxToken};
+use crate::{ast, utils::is_raw_identifier, AstNode, SourceFile, SyntaxKind, SyntaxToken};
 
 /// While the parent module defines basic atomic "constructors", the `ext`
 /// module defines shortcuts for common things.
@@ -111,8 +111,7 @@ pub fn name_ref(name_ref: &str) -> ast::NameRef {
     ast_from_text(&format!("fn f() {{ {raw_escape}{name_ref}; }}"))
 }
 fn raw_ident_esc(ident: &str) -> &'static str {
-    let is_keyword = parser::SyntaxKind::from_keyword(ident).is_some();
-    if is_keyword && !matches!(ident, "self" | "crate" | "super" | "Self") {
+    if is_raw_identifier(ident) {
         "r#"
     } else {
         ""
@@ -520,6 +519,15 @@ pub fn literal_pat(lit: &str) -> ast::LiteralPat {
     }
 }
 
+pub fn slice_pat(pats: impl IntoIterator<Item = ast::Pat>) -> ast::SlicePat {
+    let pats_str = pats.into_iter().join(", ");
+    return from_text(&format!("[{pats_str}]"));
+
+    fn from_text(text: &str) -> ast::SlicePat {
+        ast_from_text(&format!("fn f() {{ match () {{{text} => ()}} }}"))
+    }
+}
+
 /// Creates a tuple of patterns from an iterator of patterns.
 ///
 /// Invariant: `pats` must be length > 0
@@ -814,6 +822,7 @@ pub fn fn_(
     visibility: Option<ast::Visibility>,
     fn_name: ast::Name,
     type_params: Option<ast::GenericParamList>,
+    where_clause: Option<ast::WhereClause>,
     params: ast::ParamList,
     body: ast::BlockExpr,
     ret_type: Option<ast::RetType>,
@@ -823,6 +832,10 @@ pub fn fn_(
         Some(type_params) => format!("{type_params}"),
         None => "".into(),
     };
+    let where_clause = match where_clause {
+        Some(it) => format!("{it} "),
+        None => "".into(),
+    };
     let ret_type = match ret_type {
         Some(ret_type) => format!("{ret_type} "),
         None => "".into(),
@@ -835,7 +848,7 @@ pub fn fn_(
     let async_literal = if is_async { "async " } else { "" };
 
     ast_from_text(&format!(
-        "{visibility}{async_literal}fn {fn_name}{type_params}{params} {ret_type}{body}",
+        "{visibility}{async_literal}fn {fn_name}{type_params}{params} {ret_type}{where_clause}{body}",
     ))
 }
 
diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_node.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_node.rs
index a08c01597d3..2e9e0bc226b 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/syntax_node.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_node.rs
@@ -48,6 +48,7 @@ impl SyntaxTreeBuilder {
     pub fn finish(self) -> Parse<SyntaxNode> {
         let (green, errors) = self.finish_raw();
         // Disable block validation, see https://github.com/rust-lang/rust-analyzer/pull/10357
+        #[allow(clippy::overly_complex_bool_expr)]
         if cfg!(debug_assertions) && false {
             let node = SyntaxNode::new_root(green.clone());
             crate::validation::validate_block_structure(&node);
diff --git a/src/tools/rust-analyzer/crates/syntax/src/utils.rs b/src/tools/rust-analyzer/crates/syntax/src/utils.rs
index f4c02518b4c..25f34ea9d39 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/utils.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/utils.rs
@@ -2,7 +2,7 @@
 
 use itertools::Itertools;
 
-use crate::{ast, match_ast, AstNode};
+use crate::{ast, match_ast, AstNode, SyntaxKind};
 
 pub fn path_to_string_stripping_turbo_fish(path: &ast::Path) -> String {
     path.syntax()
@@ -23,6 +23,11 @@ pub fn path_to_string_stripping_turbo_fish(path: &ast::Path) -> String {
         .join("::")
 }
 
+pub fn is_raw_identifier(name: &str) -> bool {
+    let is_keyword = SyntaxKind::from_keyword(name).is_some();
+    is_keyword && !matches!(name, "self" | "crate" | "super" | "Self")
+}
+
 #[cfg(test)]
 mod tests {
     use super::path_to_string_stripping_turbo_fish;
diff --git a/src/tools/rust-analyzer/crates/test-utils/Cargo.toml b/src/tools/rust-analyzer/crates/test-utils/Cargo.toml
index 1047373b1c7..92b1ef23e69 100644
--- a/src/tools/rust-analyzer/crates/test-utils/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/test-utils/Cargo.toml
@@ -2,9 +2,11 @@
 name = "test-utils"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
@@ -15,5 +17,5 @@ dissimilar = "1.0.4"
 text-size = "1.1.0"
 rustc-hash = "1.1.0"
 
-stdx = { path = "../stdx", version = "0.0.0" }
-profile = { path = "../profile", version = "0.0.0" }
+stdx.workspace = true
+profile.workspace = true
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
index 3ca63fcab90..3b033e1aae5 100644
--- a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
+++ b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
@@ -28,6 +28,7 @@
 //!     generator: pin
 //!     hash:
 //!     index: sized
+//!     infallible:
 //!     iterator: option
 //!     iterators: iterator, fn
 //!     non_zero:
@@ -40,7 +41,7 @@
 //!     sized:
 //!     slice:
 //!     sync: sized
-//!     try:
+//!     try: infallible
 //!     unsize: sized
 
 pub mod marker {
@@ -105,6 +106,11 @@ pub mod marker {
         impl<T: ?Sized> Copy for &T {}
     }
     // endregion:copy
+
+    // region:fn
+    #[lang = "tuple_trait"]
+    pub trait Tuple {}
+    // endregion:fn
 }
 
 // region:default
@@ -172,6 +178,9 @@ pub mod convert {
         fn as_ref(&self) -> &T;
     }
     // endregion:as_ref
+    // region:infallible
+    pub enum Infallibe {}
+    // endregion:infallible
 }
 
 pub mod ops {
@@ -269,6 +278,24 @@ pub mod ops {
             }
         }
 
+        impl<T, I, const N: usize> Index<I> for [T; N]
+        where
+            I: SliceIndex<[T]>,
+        {
+            type Output = I::Output;
+            fn index(&self, index: I) -> &I::Output {
+                loop {}
+            }
+        }
+        impl<T, I, const N: usize> IndexMut<I> for [T; N]
+        where
+            I: SliceIndex<[T]>,
+        {
+            fn index_mut(&mut self, index: I) -> &mut I::Output {
+                loop {}
+            }
+        }
+
         pub unsafe trait SliceIndex<T: ?Sized> {
             type Output: ?Sized;
         }
@@ -325,19 +352,26 @@ pub mod ops {
 
     // region:fn
     mod function {
+        use crate::marker::Tuple;
+
         #[lang = "fn"]
         #[fundamental]
-        pub trait Fn<Args>: FnMut<Args> {}
+        pub trait Fn<Args: Tuple>: FnMut<Args> {
+            extern "rust-call" fn call(&self, args: Args) -> Self::Output;
+        }
 
         #[lang = "fn_mut"]
         #[fundamental]
-        pub trait FnMut<Args>: FnOnce<Args> {}
+        pub trait FnMut<Args: Tuple>: FnOnce<Args> {
+            extern "rust-call" fn call_mut(&mut self, args: Args) -> Self::Output;
+        }
 
         #[lang = "fn_once"]
         #[fundamental]
-        pub trait FnOnce<Args> {
+        pub trait FnOnce<Args: Tuple> {
             #[lang = "fn_once_output"]
             type Output;
+            extern "rust-call" fn call_once(self, args: Args) -> Self::Output;
         }
     }
     pub use self::function::{Fn, FnMut, FnOnce};
@@ -352,7 +386,7 @@ pub mod ops {
             #[lang = "from_residual"]
             fn from_residual(residual: R) -> Self;
         }
-        #[lang = "try"]
+        #[lang = "Try"]
         pub trait Try: FromResidual<Self::Residual> {
             type Output;
             type Residual;
diff --git a/src/tools/rust-analyzer/crates/text-edit/Cargo.toml b/src/tools/rust-analyzer/crates/text-edit/Cargo.toml
index 8df7e1af611..337cd234739 100644
--- a/src/tools/rust-analyzer/crates/text-edit/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/text-edit/Cargo.toml
@@ -2,9 +2,11 @@
 name = "text-edit"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
diff --git a/src/tools/rust-analyzer/crates/toolchain/Cargo.toml b/src/tools/rust-analyzer/crates/toolchain/Cargo.toml
index a6a3ae742ae..a283f9a8842 100644
--- a/src/tools/rust-analyzer/crates/toolchain/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/toolchain/Cargo.toml
@@ -2,9 +2,11 @@
 name = "toolchain"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
diff --git a/src/tools/rust-analyzer/crates/tt/Cargo.toml b/src/tools/rust-analyzer/crates/tt/Cargo.toml
index 4f2103f3a97..b8469383183 100644
--- a/src/tools/rust-analyzer/crates/tt/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/tt/Cargo.toml
@@ -2,9 +2,11 @@
 name = "tt"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
@@ -12,4 +14,4 @@ doctest = false
 [dependencies]
 smol_str = "0.1.23"
 
-stdx = { path = "../stdx", version = "0.0.0" }
+stdx.workspace = true
diff --git a/src/tools/rust-analyzer/crates/tt/src/buffer.rs b/src/tools/rust-analyzer/crates/tt/src/buffer.rs
index d27a7aa0d4d..0615a3763df 100644
--- a/src/tools/rust-analyzer/crates/tt/src/buffer.rs
+++ b/src/tools/rust-analyzer/crates/tt/src/buffer.rs
@@ -7,33 +7,43 @@ use crate::{Leaf, Subtree, TokenTree};
 struct EntryId(usize);
 
 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
-struct EntryPtr(EntryId, usize);
+struct EntryPtr(
+    /// The index of the buffer containing the entry.
+    EntryId,
+    /// The index of the entry within the buffer.
+    usize,
+);
 
 /// Internal type which is used instead of `TokenTree` to represent a token tree
 /// within a `TokenBuffer`.
 #[derive(Debug)]
-enum Entry<'t> {
+enum Entry<'t, Span> {
     // Mimicking types from proc-macro.
-    Subtree(Option<&'t TokenTree>, &'t Subtree, EntryId),
-    Leaf(&'t TokenTree),
-    // End entries contain a pointer to the entry from the containing
-    // token tree, or None if this is the outermost level.
+    Subtree(Option<&'t TokenTree<Span>>, &'t Subtree<Span>, EntryId),
+    Leaf(&'t TokenTree<Span>),
+    /// End entries contain a pointer to the entry from the containing
+    /// token tree, or [`None`] if this is the outermost level.
     End(Option<EntryPtr>),
 }
 
 /// A token tree buffer
 /// The safe version of `syn` [`TokenBuffer`](https://github.com/dtolnay/syn/blob/6533607f91686545cb034d2838beea338d9d0742/src/buffer.rs#L41)
 #[derive(Debug)]
-pub struct TokenBuffer<'t> {
-    buffers: Vec<Box<[Entry<'t>]>>,
+pub struct TokenBuffer<'t, Span> {
+    buffers: Vec<Box<[Entry<'t, Span>]>>,
 }
 
-trait TokenList<'a> {
-    fn entries(&self) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec<Entry<'a>>);
+trait TokenList<'a, Span> {
+    fn entries(
+        &self,
+    ) -> (Vec<(usize, (&'a Subtree<Span>, Option<&'a TokenTree<Span>>))>, Vec<Entry<'a, Span>>);
 }
 
-impl<'a> TokenList<'a> for &'a [TokenTree] {
-    fn entries(&self) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec<Entry<'a>>) {
+impl<'a, Span> TokenList<'a, Span> for &'a [TokenTree<Span>] {
+    fn entries(
+        &self,
+    ) -> (Vec<(usize, (&'a Subtree<Span>, Option<&'a TokenTree<Span>>))>, Vec<Entry<'a, Span>>)
+    {
         // Must contain everything in tokens and then the Entry::End
         let start_capacity = self.len() + 1;
         let mut entries = Vec::with_capacity(start_capacity);
@@ -53,8 +63,11 @@ impl<'a> TokenList<'a> for &'a [TokenTree] {
     }
 }
 
-impl<'a> TokenList<'a> for &'a Subtree {
-    fn entries(&self) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec<Entry<'a>>) {
+impl<'a, Span> TokenList<'a, Span> for &'a Subtree<Span> {
+    fn entries(
+        &self,
+    ) -> (Vec<(usize, (&'a Subtree<Span>, Option<&'a TokenTree<Span>>))>, Vec<Entry<'a, Span>>)
+    {
         // Must contain everything in tokens and then the Entry::End
         let mut entries = vec![];
         let mut children = vec![];
@@ -64,25 +77,25 @@ impl<'a> TokenList<'a> for &'a Subtree {
     }
 }
 
-impl<'t> TokenBuffer<'t> {
-    pub fn from_tokens(tokens: &'t [TokenTree]) -> TokenBuffer<'t> {
+impl<'t, Span> TokenBuffer<'t, Span> {
+    pub fn from_tokens(tokens: &'t [TokenTree<Span>]) -> TokenBuffer<'t, Span> {
         Self::new(tokens)
     }
 
-    pub fn from_subtree(subtree: &'t Subtree) -> TokenBuffer<'t> {
+    pub fn from_subtree(subtree: &'t Subtree<Span>) -> TokenBuffer<'t, Span> {
         Self::new(subtree)
     }
 
-    fn new<T: TokenList<'t>>(tokens: T) -> TokenBuffer<'t> {
+    fn new<T: TokenList<'t, Span>>(tokens: T) -> TokenBuffer<'t, Span> {
         let mut buffers = vec![];
         let idx = TokenBuffer::new_inner(tokens, &mut buffers, None);
         assert_eq!(idx, 0);
         TokenBuffer { buffers }
     }
 
-    fn new_inner<T: TokenList<'t>>(
+    fn new_inner<T: TokenList<'t, Span>>(
         tokens: T,
-        buffers: &mut Vec<Box<[Entry<'t>]>>,
+        buffers: &mut Vec<Box<[Entry<'t, Span>]>>,
         next: Option<EntryPtr>,
     ) -> usize {
         let (children, mut entries) = tokens.entries();
@@ -105,25 +118,25 @@ impl<'t> TokenBuffer<'t> {
 
     /// Creates a cursor referencing the first token in the buffer and able to
     /// traverse until the end of the buffer.
-    pub fn begin(&self) -> Cursor<'_> {
+    pub fn begin(&self) -> Cursor<'_, Span> {
         Cursor::create(self, EntryPtr(EntryId(0), 0))
     }
 
-    fn entry(&self, ptr: &EntryPtr) -> Option<&Entry<'_>> {
+    fn entry(&self, ptr: &EntryPtr) -> Option<&Entry<'_, Span>> {
         let id = ptr.0;
         self.buffers[id.0].get(ptr.1)
     }
 }
 
 #[derive(Debug)]
-pub enum TokenTreeRef<'a> {
-    Subtree(&'a Subtree, Option<&'a TokenTree>),
-    Leaf(&'a Leaf, &'a TokenTree),
+pub enum TokenTreeRef<'a, Span> {
+    Subtree(&'a Subtree<Span>, Option<&'a TokenTree<Span>>),
+    Leaf(&'a Leaf<Span>, &'a TokenTree<Span>),
 }
 
-impl<'a> TokenTreeRef<'a> {
-    pub fn cloned(&self) -> TokenTree {
-        match &self {
+impl<'a, Span: Clone> TokenTreeRef<'a, Span> {
+    pub fn cloned(&self) -> TokenTree<Span> {
+        match self {
             TokenTreeRef::Subtree(subtree, tt) => match tt {
                 Some(it) => (*it).clone(),
                 None => (*subtree).clone().into(),
@@ -135,20 +148,20 @@ impl<'a> TokenTreeRef<'a> {
 
 /// A safe version of `Cursor` from `syn` crate <https://github.com/dtolnay/syn/blob/6533607f91686545cb034d2838beea338d9d0742/src/buffer.rs#L125>
 #[derive(Copy, Clone, Debug)]
-pub struct Cursor<'a> {
-    buffer: &'a TokenBuffer<'a>,
+pub struct Cursor<'a, Span> {
+    buffer: &'a TokenBuffer<'a, Span>,
     ptr: EntryPtr,
 }
 
-impl<'a> PartialEq for Cursor<'a> {
-    fn eq(&self, other: &Cursor<'_>) -> bool {
+impl<'a, Span> PartialEq for Cursor<'a, Span> {
+    fn eq(&self, other: &Cursor<'_, Span>) -> bool {
         self.ptr == other.ptr && std::ptr::eq(self.buffer, other.buffer)
     }
 }
 
-impl<'a> Eq for Cursor<'a> {}
+impl<'a, Span> Eq for Cursor<'a, Span> {}
 
-impl<'a> Cursor<'a> {
+impl<'a, Span> Cursor<'a, Span> {
     /// Check whether it is eof
     pub fn eof(self) -> bool {
         matches!(self.buffer.entry(&self.ptr), None | Some(Entry::End(None)))
@@ -156,7 +169,7 @@ impl<'a> Cursor<'a> {
 
     /// If the cursor is pointing at the end of a subtree, returns
     /// the parent subtree
-    pub fn end(self) -> Option<&'a Subtree> {
+    pub fn end(self) -> Option<&'a Subtree<Span>> {
         match self.entry() {
             Some(Entry::End(Some(ptr))) => {
                 let idx = ptr.1;
@@ -171,13 +184,13 @@ impl<'a> Cursor<'a> {
         }
     }
 
-    fn entry(self) -> Option<&'a Entry<'a>> {
+    fn entry(&self) -> Option<&'a Entry<'a, Span>> {
         self.buffer.entry(&self.ptr)
     }
 
     /// If the cursor is pointing at a `Subtree`, returns
     /// a cursor into that subtree
-    pub fn subtree(self) -> Option<Cursor<'a>> {
+    pub fn subtree(self) -> Option<Cursor<'a, Span>> {
         match self.entry() {
             Some(Entry::Subtree(_, _, entry_id)) => {
                 Some(Cursor::create(self.buffer, EntryPtr(*entry_id, 0)))
@@ -187,7 +200,7 @@ impl<'a> Cursor<'a> {
     }
 
     /// If the cursor is pointing at a `TokenTree`, returns it
-    pub fn token_tree(self) -> Option<TokenTreeRef<'a>> {
+    pub fn token_tree(self) -> Option<TokenTreeRef<'a, Span>> {
         match self.entry() {
             Some(Entry::Leaf(tt)) => match tt {
                 TokenTree::Leaf(leaf) => Some(TokenTreeRef::Leaf(leaf, tt)),
@@ -198,12 +211,12 @@ impl<'a> Cursor<'a> {
         }
     }
 
-    fn create(buffer: &'a TokenBuffer<'_>, ptr: EntryPtr) -> Cursor<'a> {
+    fn create(buffer: &'a TokenBuffer<'_, Span>, ptr: EntryPtr) -> Cursor<'a, Span> {
         Cursor { buffer, ptr }
     }
 
     /// Bump the cursor
-    pub fn bump(self) -> Cursor<'a> {
+    pub fn bump(self) -> Cursor<'a, Span> {
         if let Some(Entry::End(exit)) = self.buffer.entry(&self.ptr) {
             match exit {
                 Some(exit) => Cursor::create(self.buffer, *exit),
@@ -216,10 +229,16 @@ impl<'a> Cursor<'a> {
 
     /// Bump the cursor, if it is a subtree, returns
     /// a cursor into that subtree
-    pub fn bump_subtree(self) -> Cursor<'a> {
+    pub fn bump_subtree(self) -> Cursor<'a, Span> {
         match self.entry() {
-            Some(Entry::Subtree(_, _, _)) => self.subtree().unwrap(),
-            _ => self.bump(),
+            Some(&Entry::Subtree(_, _, entry_id)) => {
+                Cursor::create(self.buffer, EntryPtr(entry_id, 0))
+            }
+            Some(Entry::End(exit)) => match exit {
+                Some(exit) => Cursor::create(self.buffer, *exit),
+                None => self,
+            },
+            _ => Cursor::create(self.buffer, EntryPtr(self.ptr.0, self.ptr.1 + 1)),
         }
     }
 
diff --git a/src/tools/rust-analyzer/crates/tt/src/lib.rs b/src/tools/rust-analyzer/crates/tt/src/lib.rs
index 353b09fd8c1..b7dbc82e1d6 100644
--- a/src/tools/rust-analyzer/crates/tt/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/tt/src/lib.rs
@@ -16,45 +16,106 @@ pub use smol_str::SmolStr;
 /// which source tokens. We do it by assigning an distinct identity to each
 /// source token and making sure that identities are preserved during macro
 /// expansion.
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+#[derive(Clone, Copy, PartialEq, Eq, Hash)]
 pub struct TokenId(pub u32);
 
+impl fmt::Debug for TokenId {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        self.0.fmt(f)
+    }
+}
+
 impl TokenId {
+    pub const UNSPECIFIED: TokenId = TokenId(!0);
     pub const fn unspecified() -> TokenId {
-        TokenId(!0)
+        Self::UNSPECIFIED
     }
 }
 
-#[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub enum TokenTree {
-    Leaf(Leaf),
-    Subtree(Subtree),
-}
-impl_from!(Leaf, Subtree for TokenTree);
+pub mod token_id {
+    pub use crate::{DelimiterKind, Spacing, TokenId};
+    pub type Span = crate::TokenId;
+    pub type Subtree = crate::Subtree<Span>;
+    pub type Punct = crate::Punct<Span>;
+    pub type Delimiter = crate::Delimiter<Span>;
+    pub type Leaf = crate::Leaf<Span>;
+    pub type Ident = crate::Ident<Span>;
+    pub type Literal = crate::Literal<Span>;
+    pub type TokenTree = crate::TokenTree<Span>;
+    pub mod buffer {
+        pub type TokenBuffer<'a> = crate::buffer::TokenBuffer<'a, super::Span>;
+        pub type Cursor<'a> = crate::buffer::Cursor<'a, super::Span>;
+        pub type TokenTreeRef<'a> = crate::buffer::TokenTreeRef<'a, super::Span>;
+    }
 
-impl TokenTree {
-    pub fn empty() -> Self {
-        TokenTree::Subtree(Subtree::default())
+    impl Delimiter {
+        pub const UNSPECIFIED: Self = Self {
+            open: TokenId::UNSPECIFIED,
+            close: TokenId::UNSPECIFIED,
+            kind: DelimiterKind::Invisible,
+        };
+        pub const fn unspecified() -> Self {
+            Self::UNSPECIFIED
+        }
+    }
+    impl Subtree {
+        pub const fn empty() -> Self {
+            Subtree { delimiter: Delimiter::unspecified(), token_trees: vec![] }
+        }
+    }
+    impl TokenTree {
+        pub const fn empty() -> Self {
+            Self::Subtree(Subtree { delimiter: Delimiter::unspecified(), token_trees: vec![] })
+        }
     }
 }
 
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct SyntaxContext(pub u32);
+
+// #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+// pub struct Span {
+//     pub id: TokenId,
+//     pub ctx: SyntaxContext,
+// }
+// pub type Span = (TokenId, SyntaxContext);
+
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub enum Leaf {
-    Literal(Literal),
-    Punct(Punct),
-    Ident(Ident),
+pub enum TokenTree<Span> {
+    Leaf(Leaf<Span>),
+    Subtree(Subtree<Span>),
 }
-impl_from!(Literal, Punct, Ident for Leaf);
+impl_from!(Leaf<Span>, Subtree<Span> for TokenTree);
 
-#[derive(Clone, PartialEq, Eq, Hash, Default)]
-pub struct Subtree {
-    pub delimiter: Option<Delimiter>,
-    pub token_trees: Vec<TokenTree>,
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Leaf<Span> {
+    Literal(Literal<Span>),
+    Punct(Punct<Span>),
+    Ident(Ident<Span>),
+}
+
+impl<Span> Leaf<Span> {
+    pub fn span(&self) -> &Span {
+        match self {
+            Leaf::Literal(it) => &it.span,
+            Leaf::Punct(it) => &it.span,
+            Leaf::Ident(it) => &it.span,
+        }
+    }
+}
+impl_from!(Literal<Span>, Punct<Span>, Ident<Span> for Leaf);
+
+#[derive(Clone, PartialEq, Eq, Hash)]
+pub struct Subtree<Span> {
+    // FIXME, this should not be Option
+    pub delimiter: Delimiter<Span>,
+    pub token_trees: Vec<TokenTree<Span>>,
 }
 
 #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
-pub struct Delimiter {
-    pub id: TokenId,
+pub struct Delimiter<Span> {
+    pub open: Span,
+    pub close: Span,
     pub kind: DelimiterKind,
 }
 
@@ -63,19 +124,20 @@ pub enum DelimiterKind {
     Parenthesis,
     Brace,
     Bracket,
+    Invisible,
 }
 
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub struct Literal {
+pub struct Literal<Span> {
     pub text: SmolStr,
-    pub id: TokenId,
+    pub span: Span,
 }
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub struct Punct {
+pub struct Punct<Span> {
     pub char: char,
     pub spacing: Spacing,
-    pub id: TokenId,
+    pub span: Span,
 }
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -85,39 +147,25 @@ pub enum Spacing {
 }
 
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub struct Ident {
-    /// Identifier or keyword. Unlike rustc, we keep "r#" prefix when it represents a raw identifier.
+/// Identifier or keyword. Unlike rustc, we keep "r#" prefix when it represents a raw identifier.
+pub struct Ident<Span> {
     pub text: SmolStr,
-    pub id: TokenId,
+    pub span: Span,
 }
 
-impl Ident {
-    /// Constructor intended to be used only by proc macro server. `text` should not contain raw
-    /// identifier prefix.
-    pub fn new_with_is_raw(text: SmolStr, id: TokenId, is_raw: bool) -> Self {
-        let text = if is_raw { SmolStr::from_iter(["r#", &text]) } else { text };
-        Ident { text, id }
-    }
-}
-
-impl Leaf {
-    pub fn id(&self) -> TokenId {
-        match self {
-            Leaf::Literal(l) => l.id,
-            Leaf::Punct(p) => p.id,
-            Leaf::Ident(i) => i.id,
-        }
-    }
-}
-
-fn print_debug_subtree(f: &mut fmt::Formatter<'_>, subtree: &Subtree, level: usize) -> fmt::Result {
+fn print_debug_subtree<Span: fmt::Debug>(
+    f: &mut fmt::Formatter<'_>,
+    subtree: &Subtree<Span>,
+    level: usize,
+) -> fmt::Result {
     let align = "  ".repeat(level);
 
-    let aux = match subtree.delimiter.map(|it| (it.kind, it.id.0)) {
-        None => "$".to_string(),
-        Some((DelimiterKind::Parenthesis, id)) => format!("() {id}"),
-        Some((DelimiterKind::Brace, id)) => format!("{{}} {id}"),
-        Some((DelimiterKind::Bracket, id)) => format!("[] {id}"),
+    let Delimiter { kind, open, close } = &subtree.delimiter;
+    let aux = match kind {
+        DelimiterKind::Invisible => format!("$$ {:?} {:?}", open, close),
+        DelimiterKind::Parenthesis => format!("() {:?} {:?}", open, close),
+        DelimiterKind::Brace => format!("{{}} {:?} {:?}", open, close),
+        DelimiterKind::Bracket => format!("[] {:?} {:?}", open, close),
     };
 
     if subtree.token_trees.is_empty() {
@@ -135,21 +183,25 @@ fn print_debug_subtree(f: &mut fmt::Formatter<'_>, subtree: &Subtree, level: usi
     Ok(())
 }
 
-fn print_debug_token(f: &mut fmt::Formatter<'_>, tkn: &TokenTree, level: usize) -> fmt::Result {
+fn print_debug_token<Span: fmt::Debug>(
+    f: &mut fmt::Formatter<'_>,
+    tkn: &TokenTree<Span>,
+    level: usize,
+) -> fmt::Result {
     let align = "  ".repeat(level);
 
     match tkn {
         TokenTree::Leaf(leaf) => match leaf {
-            Leaf::Literal(lit) => write!(f, "{align}LITERAL {} {}", lit.text, lit.id.0)?,
+            Leaf::Literal(lit) => write!(f, "{}LITERAL {} {:?}", align, lit.text, lit.span)?,
             Leaf::Punct(punct) => write!(
                 f,
-                "{}PUNCH   {} [{}] {}",
+                "{}PUNCH   {} [{}] {:?}",
                 align,
                 punct.char,
                 if punct.spacing == Spacing::Alone { "alone" } else { "joint" },
-                punct.id.0
+                punct.span
             )?,
-            Leaf::Ident(ident) => write!(f, "{align}IDENT   {} {}", ident.text, ident.id.0)?,
+            Leaf::Ident(ident) => write!(f, "{}IDENT   {} {:?}", align, ident.text, ident.span)?,
         },
         TokenTree::Subtree(subtree) => {
             print_debug_subtree(f, subtree, level)?;
@@ -159,13 +211,13 @@ fn print_debug_token(f: &mut fmt::Formatter<'_>, tkn: &TokenTree, level: usize)
     Ok(())
 }
 
-impl fmt::Debug for Subtree {
+impl<Span: fmt::Debug> fmt::Debug for Subtree<Span> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         print_debug_subtree(f, self, 0)
     }
 }
 
-impl fmt::Display for TokenTree {
+impl<Span> fmt::Display for TokenTree<Span> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         match self {
             TokenTree::Leaf(it) => fmt::Display::fmt(it, f),
@@ -174,13 +226,13 @@ impl fmt::Display for TokenTree {
     }
 }
 
-impl fmt::Display for Subtree {
+impl<Span> fmt::Display for Subtree<Span> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        let (l, r) = match self.delimiter_kind() {
-            Some(DelimiterKind::Parenthesis) => ("(", ")"),
-            Some(DelimiterKind::Brace) => ("{", "}"),
-            Some(DelimiterKind::Bracket) => ("[", "]"),
-            None => ("", ""),
+        let (l, r) = match self.delimiter.kind {
+            DelimiterKind::Parenthesis => ("(", ")"),
+            DelimiterKind::Brace => ("{", "}"),
+            DelimiterKind::Bracket => ("[", "]"),
+            DelimiterKind::Invisible => ("", ""),
         };
         f.write_str(l)?;
         let mut needs_space = false;
@@ -202,7 +254,7 @@ impl fmt::Display for Subtree {
     }
 }
 
-impl fmt::Display for Leaf {
+impl<Span> fmt::Display for Leaf<Span> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         match self {
             Leaf::Ident(it) => fmt::Display::fmt(it, f),
@@ -212,25 +264,25 @@ impl fmt::Display for Leaf {
     }
 }
 
-impl fmt::Display for Ident {
+impl<Span> fmt::Display for Ident<Span> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         fmt::Display::fmt(&self.text, f)
     }
 }
 
-impl fmt::Display for Literal {
+impl<Span> fmt::Display for Literal<Span> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         fmt::Display::fmt(&self.text, f)
     }
 }
 
-impl fmt::Display for Punct {
+impl<Span> fmt::Display for Punct<Span> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         fmt::Display::fmt(&self.char, f)
     }
 }
 
-impl Subtree {
+impl<Span> Subtree<Span> {
     /// Count the number of tokens recursively
     pub fn count(&self) -> usize {
         let children_count = self
@@ -244,20 +296,16 @@ impl Subtree {
 
         self.token_trees.len() + children_count
     }
-
-    pub fn delimiter_kind(&self) -> Option<DelimiterKind> {
-        self.delimiter.map(|it| it.kind)
-    }
 }
 
-impl Subtree {
+impl<Span> Subtree<Span> {
     /// A simple line string used for debugging
     pub fn as_debug_string(&self) -> String {
-        let delim = match self.delimiter_kind() {
-            Some(DelimiterKind::Brace) => ("{", "}"),
-            Some(DelimiterKind::Bracket) => ("[", "]"),
-            Some(DelimiterKind::Parenthesis) => ("(", ")"),
-            None => (" ", " "),
+        let delim = match self.delimiter.kind {
+            DelimiterKind::Brace => ("{", "}"),
+            DelimiterKind::Bracket => ("[", "]"),
+            DelimiterKind::Parenthesis => ("(", ")"),
+            DelimiterKind::Invisible => ("$", "$"),
         };
 
         let mut res = String::new();
@@ -275,7 +323,7 @@ impl Subtree {
                         (Leaf::Ident(_), Some(&TokenTree::Leaf(Leaf::Ident(_)))) => {
                             " ".to_string() + &s
                         }
-                        (Leaf::Punct(_), Some(&TokenTree::Leaf(Leaf::Punct(punct)))) => {
+                        (Leaf::Punct(_), Some(TokenTree::Leaf(Leaf::Punct(punct)))) => {
                             if punct.spacing == Spacing::Alone {
                                 " ".to_string() + &s
                             } else {
@@ -298,19 +346,19 @@ impl Subtree {
 
 pub mod buffer;
 
-pub fn pretty(tkns: &[TokenTree]) -> String {
-    fn tokentree_to_text(tkn: &TokenTree) -> String {
+pub fn pretty<Span>(tkns: &[TokenTree<Span>]) -> String {
+    fn tokentree_to_text<Span>(tkn: &TokenTree<Span>) -> String {
         match tkn {
             TokenTree::Leaf(Leaf::Ident(ident)) => ident.text.clone().into(),
             TokenTree::Leaf(Leaf::Literal(literal)) => literal.text.clone().into(),
             TokenTree::Leaf(Leaf::Punct(punct)) => format!("{}", punct.char),
             TokenTree::Subtree(subtree) => {
                 let content = pretty(&subtree.token_trees);
-                let (open, close) = match subtree.delimiter.map(|it| it.kind) {
-                    None => ("", ""),
-                    Some(DelimiterKind::Brace) => ("{", "}"),
-                    Some(DelimiterKind::Parenthesis) => ("(", ")"),
-                    Some(DelimiterKind::Bracket) => ("[", "]"),
+                let (open, close) = match subtree.delimiter.kind {
+                    DelimiterKind::Brace => ("{", "}"),
+                    DelimiterKind::Bracket => ("[", "]"),
+                    DelimiterKind::Parenthesis => ("(", ")"),
+                    DelimiterKind::Invisible => ("", ""),
                 };
                 format!("{open}{content}{close}")
             }
diff --git a/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml b/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml
index 061f3c157a8..e06b98d8118 100644
--- a/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml
@@ -2,9 +2,11 @@
 name = "vfs-notify"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
@@ -16,5 +18,5 @@ walkdir = "2.3.2"
 crossbeam-channel = "0.5.5"
 notify = "5.0"
 
-vfs = { path = "../vfs", version = "0.0.0" }
-paths = { path = "../paths", version = "0.0.0" }
+vfs.workspace = true
+paths.workspace = true
diff --git a/src/tools/rust-analyzer/crates/vfs/Cargo.toml b/src/tools/rust-analyzer/crates/vfs/Cargo.toml
index e55bf6f293c..802a300060f 100644
--- a/src/tools/rust-analyzer/crates/vfs/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/vfs/Cargo.toml
@@ -2,9 +2,11 @@
 name = "vfs"
 version = "0.0.0"
 description = "TBD"
-license = "MIT OR Apache-2.0"
-edition = "2021"
-rust-version = "1.65"
+
+authors.workspace = true
+edition.workspace = true
+license.workspace = true
+rust-version.workspace = true
 
 [lib]
 doctest = false
@@ -14,5 +16,5 @@ rustc-hash = "1.1.0"
 fst = "0.4.7"
 indexmap = "1.9.1"
 
-paths = { path = "../paths", version = "0.0.0" }
-stdx = { path = "../stdx", version = "0.0.0" }
+paths.workspace = true
+stdx.workspace = true
diff --git a/src/tools/rust-analyzer/crates/vfs/src/lib.rs b/src/tools/rust-analyzer/crates/vfs/src/lib.rs
index c61f30387b7..14972d29074 100644
--- a/src/tools/rust-analyzer/crates/vfs/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/vfs/src/lib.rs
@@ -75,6 +75,7 @@ pub struct Vfs {
 }
 
 /// Changed file in the [`Vfs`].
+#[derive(Debug)]
 pub struct ChangedFile {
     /// Id of the changed file
     pub file_id: FileId,
@@ -161,9 +162,9 @@ impl Vfs {
         let file_id = self.alloc_file_id(path);
         let change_kind = match (&self.get(file_id), &contents) {
             (None, None) => return false,
+            (Some(old), Some(new)) if old == new => return false,
             (None, Some(_)) => ChangeKind::Create,
             (Some(_), None) => ChangeKind::Delete,
-            (Some(old), Some(new)) if old == new => return false,
             (Some(_), Some(_)) => ChangeKind::Modify,
         };
 
diff --git a/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs b/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs
index b23c9f1966d..38501a8ba5a 100644
--- a/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs
+++ b/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs
@@ -1,7 +1,7 @@
 //! Abstract-ish representation of paths for VFS.
 use std::fmt;
 
-use paths::{AbsPath, AbsPathBuf};
+use paths::{AbsPath, AbsPathBuf, RelPath};
 
 /// Path in [`Vfs`].
 ///
@@ -84,6 +84,14 @@ impl VfsPath {
         }
     }
 
+    pub fn strip_prefix(&self, other: &VfsPath) -> Option<&RelPath> {
+        match (&self.0, &other.0) {
+            (VfsPathRepr::PathBuf(lhs), VfsPathRepr::PathBuf(rhs)) => lhs.strip_prefix(rhs),
+            (VfsPathRepr::VirtualPath(lhs), VfsPathRepr::VirtualPath(rhs)) => lhs.strip_prefix(rhs),
+            (VfsPathRepr::PathBuf(_) | VfsPathRepr::VirtualPath(_), _) => None,
+        }
+    }
+
     /// Returns the `VfsPath` without its final component, if there is one.
     ///
     /// Returns [`None`] if the path is a root or prefix.
@@ -320,6 +328,13 @@ impl VirtualPath {
         self.0.starts_with(&other.0)
     }
 
+    fn strip_prefix(&self, base: &VirtualPath) -> Option<&RelPath> {
+        <_ as AsRef<std::path::Path>>::as_ref(&self.0)
+            .strip_prefix(&base.0)
+            .ok()
+            .map(RelPath::new_unchecked)
+    }
+
     /// Remove the last component of `self`.
     ///
     /// This will find the last `'/'` in `self`, and remove everything after it,
diff --git a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md
index a4780af1a26..a794e866181 100644
--- a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md
+++ b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md
@@ -1,5 +1,5 @@
 <!---
-lsp_ext.rs hash: 45bd7985265725c5
+lsp_ext.rs hash: ec29403e67dfd15b
 
 If you need to change the above hash to make the test pass, please check if you
 need to adjust this doc as well and ping this issue:
diff --git a/src/tools/rust-analyzer/docs/user/generated_config.adoc b/src/tools/rust-analyzer/docs/user/generated_config.adoc
index b33a2e79525..50e3670a7a8 100644
--- a/src/tools/rust-analyzer/docs/user/generated_config.adoc
+++ b/src/tools/rust-analyzer/docs/user/generated_config.adoc
@@ -99,6 +99,14 @@ Unsetting this disables sysroot loading.
 
 This option does not take effect until rust-analyzer is restarted.
 --
+[[rust-analyzer.cargo.sysrootSrc]]rust-analyzer.cargo.sysrootSrc (default: `null`)::
++
+--
+Relative path to the sysroot library sources. If left unset, this will default to
+`{cargo.sysroot}/lib/rustlib/src/rust/library`.
+
+This option does not take effect until rust-analyzer is restarted.
+--
 [[rust-analyzer.cargo.target]]rust-analyzer.cargo.target (default: `null`)::
 +
 --
@@ -219,6 +227,11 @@ with `self` prefixed to them when inside a method.
 --
 Whether to add parenthesis and argument snippets when completing function.
 --
+[[rust-analyzer.completion.limit]]rust-analyzer.completion.limit (default: `null`)::
++
+--
+Maximum number of completions to return. If `None`, the limit is infinite.
+--
 [[rust-analyzer.completion.postfix.enable]]rust-analyzer.completion.postfix.enable (default: `true`)::
 +
 --
@@ -486,11 +499,6 @@ Whether to show inlay type hints for elided lifetimes in function signatures.
 --
 Whether to prefer using parameter names as the name for elided lifetime hints if possible.
 --
-[[rust-analyzer.inlayHints.locationLinks]]rust-analyzer.inlayHints.locationLinks (default: `true`)::
-+
---
-Whether to use location links for parts of type mentioned in inlay hints.
---
 [[rust-analyzer.inlayHints.maxLength]]rust-analyzer.inlayHints.maxLength (default: `25`)::
 +
 --
diff --git a/src/tools/rust-analyzer/docs/user/manual.adoc b/src/tools/rust-analyzer/docs/user/manual.adoc
index 1a4c70575b0..cb96feeb5e5 100644
--- a/src/tools/rust-analyzer/docs/user/manual.adoc
+++ b/src/tools/rust-analyzer/docs/user/manual.adoc
@@ -232,18 +232,66 @@ The `rust-analyzer` binary can be installed via https://brew.sh/[Homebrew].
 $ brew install rust-analyzer
 ----
 
-=== Emacs
+=== VS Code or VSCodium in Flatpak
+
+Setting up `rust-analyzer` with a Flatpak version of Code is not trivial because of the Flatpak sandbox.
+While the sandbox can be disabled for some directories, `/usr/bin` will always be mounted under `/run/host/usr/bin`.
+This prevents access to the system's C compiler, a system-wide installation of Rust, or any other libraries you might want to link to.
+Some compilers and libraries can be acquired as Flatpak SDKs, such as `org.freedesktop.Sdk.Extension.rust-stable` or `org.freedesktop.Sdk.Extension.llvm15`.
+
+If you use a Flatpak SDK for Rust, there should be no extra steps necessary.
 
-Note this excellent https://robert.kra.hn/posts/2021-02-07_rust-with-emacs/[guide] from https://github.com/rksm[@rksm].
+If you want to use Flatpak in combination with `rustup`, the following steps might help:
+
+ - both Rust and `rustup` have to be installed using https://rustup.rs. Distro packages _will not_ work.
+ - you need to launch Code, open a terminal and run `echo $PATH`
+ - using https://flathub.org/apps/details/com.github.tchx84.Flatseal[Flatseal], you must add an environment variable called `PATH`.
+   Set its value to the output from above, appending `:~/.cargo/bin`, where `~` is the path to your home directory.
+   You must replace `~`, as it won't be expanded otherwise.
+ - while Flatseal is open, you must enable access to "All user files"
+
+A C compiler should already be available via `org.freedesktop.Sdk`.
+Any other tools or libraries you will need to acquire  from Flatpak.
+
+=== Emacs
 
 Prerequisites: You have installed the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
 
-Emacs support is maintained as part of the https://github.com/emacs-lsp/lsp-mode[Emacs-LSP] package in https://github.com/emacs-lsp/lsp-mode/blob/master/lsp-rust.el[lsp-rust.el].
+To use `rust-analyzer`, you need to install and enable one of the two popular two popular LSP client implementations for Emacs, https://github.com/joaotavora/eglot[Eglot] or https://github.com/emacs-lsp/lsp-mode[LSP Mode]. Both enable `rust-analyzer` by default in rust buffers if it is available.
+
+==== Eglot
+
+Eglot is the more minimalistic and lightweight LSP client for Emacs, integrates well with existing Emacs functionality and will be built into Emacs starting from release 29.
+
+After installing Eglot, e.g. via `M-x package-install` (not needed from Emacs 29), you can enable it via the `M-x eglot` command or load it automatically in `rust-mode` via
+
+[source,emacs-lisp]
+----
+(add-hook 'rust-mode-hook 'eglot-ensure)
+----
+
+For more detailed instructions and options see the https://joaotavora.github.io/eglot[Eglot manual] (also available from Emacs via `M-x info`) and the
+https://github.com/joaotavora/eglot/blob/master/README.md[Eglot readme].
+
+Eglot does not support the rust-analyzer extensions to the language-server protocol and does not aim to do so in the future. The https://github.com/nemethf/eglot-x#rust-analyzer-extensions[eglot-x] package adds experimental support for those LSP extensions.
+
+==== LSP Mode
+
+LSP-mode is the original LSP-client for emacs. Compared to Eglot it has a larger codebase and supports more features, like LSP protocol extensions.
+With extension packages like https://github.com/emacs-lsp/lsp-mode[LSP UI] it offers a lot of visual eyecandy.
+Further it integrates well with https://github.com/emacs-lsp/dap-mode[DAP mode] for support of the Debug Adapter Protocol.
+
+You can install LSP-mode via `M-x package-install` and then run it via the `M-x lsp` command or load it automatically in rust buffers with
+
+[source,emacs-lisp]
+----
+(add-hook 'rust-mode-hook 'lsp-deferred)
+----
+
+For more information on how to set up LSP mode and its extension package see the instructions in the https://emacs-lsp.github.io/lsp-mode/page/installation[LSP mode manual].
+Also see the https://emacs-lsp.github.io/lsp-mode/page/lsp-rust-analyzer/[rust-analyzer section] for `rust-analyzer` specific options and commands, which you can optionally bind to keys.
 
-1. Install the most recent version of `emacs-lsp` package by following the https://github.com/emacs-lsp/lsp-mode[Emacs-LSP instructions].
-2. Set `lsp-rust-server` to `'rust-analyzer`.
-3. Run `lsp` in a Rust buffer.
-4. (Optionally) bind commands like `lsp-rust-analyzer-join-lines`, `lsp-extend-selection` and `lsp-rust-analyzer-expand-macro` to keys.
+Note the excellent https://robert.kra.hn/posts/2021-02-07_rust-with-emacs/[guide] from https://github.com/rksm[@rksm] on how to set-up Emacs for Rust development with LSP mode and several other packages.
 
 === Vim/NeoVim
 
@@ -487,6 +535,47 @@ https://docs.helix-editor.com/[Helix] supports LSP by default.
 However, it won't install `rust-analyzer` automatically.
 You can follow instructions for installing <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
 
+=== Visual Studio 2022
+
+There are multiple rust-analyzer extensions for Visual Studio 2022 on Windows:
+
+==== rust-analyzer.vs
+
+(License: Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International)
+
+https://marketplace.visualstudio.com/items?itemName=kitamstudios.RustAnalyzer[Visual Studio Marketplace]
+
+https://github.com/kitamstudios/rust-analyzer/[GitHub]
+
+Support for Rust development in the Visual Studio IDE is enabled by the link:https://marketplace.visualstudio.com/items?itemName=kitamstudios.RustAnalyzer[rust-analyzer] package. Either click on the download link or install from IDE's extension manager.
+For now link:https://visualstudio.microsoft.com/downloads/[Visual Studio 2022] is required. All editions are supported viz. Community, Professional & Enterprise.
+The package aims to provide 0-friction installation and therefore comes loaded with most things required including rust-analyzer binary. If anything it needs is missing, appropriate errors / warnings will guide the user. E.g. cargo.exe needs to be in path and the package will tell you as much.
+This package is under rapid active development. So if you encounter any issues please file it at link:https://github.com/kitamstudios/rust-analyzer/[rust-analyzer.vs].
+
+==== VS_RustAnalyzer
+
+(License: GPL)
+
+https://marketplace.visualstudio.com/items?itemName=cchharris.vsrustanalyzer[Visual Studio Marketplace]
+
+https://github.com/cchharris/VS-RustAnalyzer[GitHub]
+
+==== SourceGear Rust
+
+(License: closed source)
+
+https://marketplace.visualstudio.com/items?itemName=SourceGear.SourceGearRust[Visual Studio Marketplace]
+
+https://github.com/sourcegear/rust-vs-extension[GitHub (docs, issues, discussions)]
+
+* Free (no-cost)
+* Supports all editions of Visual Studio 2022 on Windows: Community, Professional, or Enterprise
+
+=== Lapce
+
+https://lapce.dev/[Lapce] has a Rust plugin which you can install directly.
+Unfortunately, it downloads an old version of `rust-analyzer`, but you can set the server path under Settings.
+
 === Crates
 
 There is a package named `ra_ap_rust_analyzer` available on https://crates.io/crates/ra_ap_rust-analyzer[crates.io], for someone who wants to use it programmatically.
@@ -684,14 +773,18 @@ See https://github.com/rust-analyzer/rust-project.json-example for a small examp
 
 You can set the `RA_LOG` environment variable to `rust_analyzer=info` to inspect how rust-analyzer handles config and project loading.
 
-Note that calls to `cargo check` are disabled when using `rust-project.json` by default, so compilation errors and warnings will no longer be sent to your LSP client. To enable these compilation errors you will need to specify explicitly what command rust-analyzer should run to perform the checks using the `checkOnSave.overrideCommand` configuration. As an example, the following configuration explicitly sets `cargo check` as the `checkOnSave` command.
+Note that calls to `cargo check` are disabled when using `rust-project.json` by default, so compilation errors and warnings will no longer be sent to your LSP client.
+To enable these compilation errors you will need to specify explicitly what command rust-analyzer should run to perform the checks using the `rust-analyzer.check.overrideCommand` configuration.
+As an example, the following configuration explicitly sets `cargo check` as the `check` command.
 
 [source,json]
 ----
-{ "rust-analyzer.checkOnSave.overrideCommand": ["cargo", "check", "--message-format=json"] }
+{ "rust-analyzer.check.overrideCommand": ["cargo", "check", "--message-format=json"] }
 ----
 
-The `checkOnSave.overrideCommand` requires the command specified to output json error messages for rust-analyzer to consume. The `--message-format=json` flag does this for `cargo check` so whichever command you use must also output errors in this format. See the <<Configuration>> section for more information.
+`check.overrideCommand` requires the command specified to output json error messages for rust-analyzer to consume.
+The `--message-format=json` flag does this for `cargo check` so whichever command you use must also output errors in this format.
+See the <<Configuration>> section for more information.
 
 == Security
 
@@ -732,6 +825,10 @@ include::./generated_assists.adoc[]
 While most errors and warnings provided by rust-analyzer come from the `cargo check` integration, there's a growing number of diagnostics implemented using rust-analyzer's own analysis.
 Some of these diagnostics don't respect `\#[allow]` or `\#[deny]` attributes yet, but can be turned off using the `rust-analyzer.diagnostics.enable`, `rust-analyzer.diagnostics.experimental.enable` or `rust-analyzer.diagnostics.disabled` settings.
 
+=== Clippy
+
+To run `cargo clippy` instead of `cargo check`, you can set `"rust-analyzer.check.command": "clippy"`.
+
 include::./generated_diagnostic.adoc[]
 
 == Editor Features
@@ -864,7 +961,7 @@ Also note that a full runnable name is something like *run bin_or_example_name*,
 
 Instead of relying on the built-in `cargo check`, you can configure Code to run a command in the background and use the `$rustc-watch` problem matcher to generate inline error markers from its output.
 
-To do this you need to create a new https://code.visualstudio.com/docs/editor/tasks[VS Code Task] and set `rust-analyzer.checkOnSave.enable: false` in preferences.
+To do this you need to create a new https://code.visualstudio.com/docs/editor/tasks[VS Code Task] and set `"rust-analyzer.checkOnSave": false` in preferences.
 
 For example, if you want to run https://crates.io/crates/cargo-watch[`cargo watch`] instead, you might add the following to `.vscode/tasks.json`:
 
diff --git a/src/tools/rust-analyzer/editors/code/language-configuration.json b/src/tools/rust-analyzer/editors/code/language-configuration.json
index b1ee0843e3e..51f0e65f4fd 100644
--- a/src/tools/rust-analyzer/editors/code/language-configuration.json
+++ b/src/tools/rust-analyzer/editors/code/language-configuration.json
@@ -35,8 +35,8 @@
     },
     "folding": {
         "markers": {
-            "start": "^\\s*//\\s*#?region\\b",
-            "end": "^\\s*//\\s*#?endregion\\b"
+            "start": "^\\s*// region:\\b",
+            "end": "^\\s*// endregion\\b"
         }
     }
 }
diff --git a/src/tools/rust-analyzer/editors/code/package-lock.json b/src/tools/rust-analyzer/editors/code/package-lock.json
index 4844837a06f..ef9be380ed1 100644
--- a/src/tools/rust-analyzer/editors/code/package-lock.json
+++ b/src/tools/rust-analyzer/editors/code/package-lock.json
@@ -34,16 +34,32 @@
                 "vscode": "^1.66.0"
             }
         },
+        "node_modules/@esbuild/linux-loong64": {
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.14.54.tgz",
+            "integrity": "sha512-bZBrLAIX1kpWelV0XemxBZllyRmM6vgFQQG2GdNb+r3Fkp0FOh1NJSvekXDs7jq70k4euu1cryLMfU+mTXlEpw==",
+            "cpu": [
+                "loong64"
+            ],
+            "dev": true,
+            "optional": true,
+            "os": [
+                "linux"
+            ],
+            "engines": {
+                "node": ">=12"
+            }
+        },
         "node_modules/@eslint/eslintrc": {
-            "version": "1.3.0",
-            "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.3.0.tgz",
-            "integrity": "sha512-UWW0TMTmk2d7hLcWD1/e2g5HDM/HQ3csaLSqXCfqwh4uNDuNqlaKWXmEsL4Cs41Z0KnILNvwbHAah3C2yt06kw==",
+            "version": "1.4.1",
+            "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.4.1.tgz",
+            "integrity": "sha512-XXrH9Uarn0stsyldqDYq8r++mROmWRI1xKMXa640Bb//SY1+ECYX6VzT6Lcx5frD0V30XieqJ0oX9I2Xj5aoMA==",
             "dev": true,
             "dependencies": {
                 "ajv": "^6.12.4",
                 "debug": "^4.3.2",
-                "espree": "^9.3.2",
-                "globals": "^13.15.0",
+                "espree": "^9.4.0",
+                "globals": "^13.19.0",
                 "ignore": "^5.2.0",
                 "import-fresh": "^3.2.1",
                 "js-yaml": "^4.1.0",
@@ -52,6 +68,9 @@
             },
             "engines": {
                 "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
+            },
+            "funding": {
+                "url": "https://opencollective.com/eslint"
             }
         },
         "node_modules/@hpcc-js/wasm": {
@@ -66,19 +85,32 @@
             }
         },
         "node_modules/@humanwhocodes/config-array": {
-            "version": "0.9.5",
-            "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.9.5.tgz",
-            "integrity": "sha512-ObyMyWxZiCu/yTisA7uzx81s40xR2fD5Cg/2Kq7G02ajkNubJf6BopgDTmDyc3U7sXpNKM8cYOw7s7Tyr+DnCw==",
+            "version": "0.11.8",
+            "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.8.tgz",
+            "integrity": "sha512-UybHIJzJnR5Qc/MsD9Kr+RpO2h+/P1GhOwdiLPXK5TWk5sgTdu88bTD9UP+CKbPPh5Rni1u0GjAdYQLemG8g+g==",
             "dev": true,
             "dependencies": {
                 "@humanwhocodes/object-schema": "^1.2.1",
                 "debug": "^4.1.1",
-                "minimatch": "^3.0.4"
+                "minimatch": "^3.0.5"
             },
             "engines": {
                 "node": ">=10.10.0"
             }
         },
+        "node_modules/@humanwhocodes/module-importer": {
+            "version": "1.0.1",
+            "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz",
+            "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==",
+            "dev": true,
+            "engines": {
+                "node": ">=12.22"
+            },
+            "funding": {
+                "type": "github",
+                "url": "https://github.com/sponsors/nzakas"
+            }
+        },
         "node_modules/@humanwhocodes/object-schema": {
             "version": "1.2.1",
             "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz",
@@ -136,9 +168,15 @@
             "dev": true
         },
         "node_modules/@types/node": {
-            "version": "16.11.43",
-            "resolved": "https://registry.npmjs.org/@types/node/-/node-16.11.43.tgz",
-            "integrity": "sha512-GqWykok+3uocgfAJM8imbozrqLnPyTrpFlrryURQlw1EesPUCx5XxTiucWDSFF9/NUEXDuD4bnvHm8xfVGWTpQ==",
+            "version": "16.11.68",
+            "resolved": "https://registry.npmjs.org/@types/node/-/node-16.11.68.tgz",
+            "integrity": "sha512-JkRpuVz3xCNCWaeQ5EHLR/6woMbHZz/jZ7Kmc63AkU+1HxnoUugzSWMck7dsR4DvNYX8jp9wTi9K7WvnxOIQZQ==",
+            "dev": true
+        },
+        "node_modules/@types/semver": {
+            "version": "7.3.13",
+            "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.13.tgz",
+            "integrity": "sha512-21cFJr9z3g5dW8B0CVI9g2O9beqaThGQ6ZFBqHfwhzLDKUxaqTIy3vnfah/UPkfOiF2pLq+tGz+W8RyCskuslw==",
             "dev": true
         },
         "node_modules/@types/vscode": {
@@ -148,17 +186,17 @@
             "dev": true
         },
         "node_modules/@typescript-eslint/eslint-plugin": {
-            "version": "5.30.5",
-            "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.30.5.tgz",
-            "integrity": "sha512-lftkqRoBvc28VFXEoRgyZuztyVUQ04JvUnATSPtIRFAccbXTWL6DEtXGYMcbg998kXw1NLUJm7rTQ9eUt+q6Ig==",
+            "version": "5.48.2",
+            "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.48.2.tgz",
+            "integrity": "sha512-sR0Gja9Ky1teIq4qJOl0nC+Tk64/uYdX+mi+5iB//MH8gwyx8e3SOyhEzeLZEFEEfCaLf8KJq+Bd/6je1t+CAg==",
             "dev": true,
             "dependencies": {
-                "@typescript-eslint/scope-manager": "5.30.5",
-                "@typescript-eslint/type-utils": "5.30.5",
-                "@typescript-eslint/utils": "5.30.5",
+                "@typescript-eslint/scope-manager": "5.48.2",
+                "@typescript-eslint/type-utils": "5.48.2",
+                "@typescript-eslint/utils": "5.48.2",
                 "debug": "^4.3.4",
-                "functional-red-black-tree": "^1.0.1",
                 "ignore": "^5.2.0",
+                "natural-compare-lite": "^1.4.0",
                 "regexpp": "^3.2.0",
                 "semver": "^7.3.7",
                 "tsutils": "^3.21.0"
@@ -181,14 +219,14 @@
             }
         },
         "node_modules/@typescript-eslint/parser": {
-            "version": "5.30.5",
-            "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.30.5.tgz",
-            "integrity": "sha512-zj251pcPXI8GO9NDKWWmygP6+UjwWmrdf9qMW/L/uQJBM/0XbU2inxe5io/234y/RCvwpKEYjZ6c1YrXERkK4Q==",
+            "version": "5.48.2",
+            "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.48.2.tgz",
+            "integrity": "sha512-38zMsKsG2sIuM5Oi/olurGwYJXzmtdsHhn5mI/pQogP+BjYVkK5iRazCQ8RGS0V+YLk282uWElN70zAAUmaYHw==",
             "dev": true,
             "dependencies": {
-                "@typescript-eslint/scope-manager": "5.30.5",
-                "@typescript-eslint/types": "5.30.5",
-                "@typescript-eslint/typescript-estree": "5.30.5",
+                "@typescript-eslint/scope-manager": "5.48.2",
+                "@typescript-eslint/types": "5.48.2",
+                "@typescript-eslint/typescript-estree": "5.48.2",
                 "debug": "^4.3.4"
             },
             "engines": {
@@ -208,13 +246,13 @@
             }
         },
         "node_modules/@typescript-eslint/scope-manager": {
-            "version": "5.30.5",
-            "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.30.5.tgz",
-            "integrity": "sha512-NJ6F+YHHFT/30isRe2UTmIGGAiXKckCyMnIV58cE3JkHmaD6e5zyEYm5hBDv0Wbin+IC0T1FWJpD3YqHUG/Ydg==",
+            "version": "5.48.2",
+            "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.48.2.tgz",
+            "integrity": "sha512-zEUFfonQid5KRDKoI3O+uP1GnrFd4tIHlvs+sTJXiWuypUWMuDaottkJuR612wQfOkjYbsaskSIURV9xo4f+Fw==",
             "dev": true,
             "dependencies": {
-                "@typescript-eslint/types": "5.30.5",
-                "@typescript-eslint/visitor-keys": "5.30.5"
+                "@typescript-eslint/types": "5.48.2",
+                "@typescript-eslint/visitor-keys": "5.48.2"
             },
             "engines": {
                 "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
@@ -225,12 +263,13 @@
             }
         },
         "node_modules/@typescript-eslint/type-utils": {
-            "version": "5.30.5",
-            "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.30.5.tgz",
-            "integrity": "sha512-k9+ejlv1GgwN1nN7XjVtyCgE0BTzhzT1YsQF0rv4Vfj2U9xnslBgMYYvcEYAFVdvhuEscELJsB7lDkN7WusErw==",
+            "version": "5.48.2",
+            "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.48.2.tgz",
+            "integrity": "sha512-QVWx7J5sPMRiOMJp5dYshPxABRoZV1xbRirqSk8yuIIsu0nvMTZesKErEA3Oix1k+uvsk8Cs8TGJ6kQ0ndAcew==",
             "dev": true,
             "dependencies": {
-                "@typescript-eslint/utils": "5.30.5",
+                "@typescript-eslint/typescript-estree": "5.48.2",
+                "@typescript-eslint/utils": "5.48.2",
                 "debug": "^4.3.4",
                 "tsutils": "^3.21.0"
             },
@@ -251,9 +290,9 @@
             }
         },
         "node_modules/@typescript-eslint/types": {
-            "version": "5.30.5",
-            "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.30.5.tgz",
-            "integrity": "sha512-kZ80w/M2AvsbRvOr3PjaNh6qEW1LFqs2pLdo2s5R38B2HYXG8Z0PP48/4+j1QHJFL3ssHIbJ4odPRS8PlHrFfw==",
+            "version": "5.48.2",
+            "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.48.2.tgz",
+            "integrity": "sha512-hE7dA77xxu7ByBc6KCzikgfRyBCTst6dZQpwaTy25iMYOnbNljDT4hjhrGEJJ0QoMjrfqrx+j1l1B9/LtKeuqA==",
             "dev": true,
             "engines": {
                 "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
@@ -264,13 +303,13 @@
             }
         },
         "node_modules/@typescript-eslint/typescript-estree": {
-            "version": "5.30.5",
-            "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.30.5.tgz",
-            "integrity": "sha512-qGTc7QZC801kbYjAr4AgdOfnokpwStqyhSbiQvqGBLixniAKyH+ib2qXIVo4P9NgGzwyfD9I0nlJN7D91E1VpQ==",
+            "version": "5.48.2",
+            "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.48.2.tgz",
+            "integrity": "sha512-bibvD3z6ilnoVxUBFEgkO0k0aFvUc4Cttt0dAreEr+nrAHhWzkO83PEVVuieK3DqcgL6VAK5dkzK8XUVja5Zcg==",
             "dev": true,
             "dependencies": {
-                "@typescript-eslint/types": "5.30.5",
-                "@typescript-eslint/visitor-keys": "5.30.5",
+                "@typescript-eslint/types": "5.48.2",
+                "@typescript-eslint/visitor-keys": "5.48.2",
                 "debug": "^4.3.4",
                 "globby": "^11.1.0",
                 "is-glob": "^4.0.3",
@@ -291,17 +330,19 @@
             }
         },
         "node_modules/@typescript-eslint/utils": {
-            "version": "5.30.5",
-            "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.30.5.tgz",
-            "integrity": "sha512-o4SSUH9IkuA7AYIfAvatldovurqTAHrfzPApOZvdUq01hHojZojCFXx06D/aFpKCgWbMPRdJBWAC3sWp3itwTA==",
+            "version": "5.48.2",
+            "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.48.2.tgz",
+            "integrity": "sha512-2h18c0d7jgkw6tdKTlNaM7wyopbLRBiit8oAxoP89YnuBOzCZ8g8aBCaCqq7h208qUTroL7Whgzam7UY3HVLow==",
             "dev": true,
             "dependencies": {
                 "@types/json-schema": "^7.0.9",
-                "@typescript-eslint/scope-manager": "5.30.5",
-                "@typescript-eslint/types": "5.30.5",
-                "@typescript-eslint/typescript-estree": "5.30.5",
+                "@types/semver": "^7.3.12",
+                "@typescript-eslint/scope-manager": "5.48.2",
+                "@typescript-eslint/types": "5.48.2",
+                "@typescript-eslint/typescript-estree": "5.48.2",
                 "eslint-scope": "^5.1.1",
-                "eslint-utils": "^3.0.0"
+                "eslint-utils": "^3.0.0",
+                "semver": "^7.3.7"
             },
             "engines": {
                 "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
@@ -315,12 +356,12 @@
             }
         },
         "node_modules/@typescript-eslint/visitor-keys": {
-            "version": "5.30.5",
-            "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.30.5.tgz",
-            "integrity": "sha512-D+xtGo9HUMELzWIUqcQc0p2PO4NyvTrgIOK/VnSH083+8sq0tiLozNRKuLarwHYGRuA6TVBQSuuLwJUDWd3aaA==",
+            "version": "5.48.2",
+            "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.48.2.tgz",
+            "integrity": "sha512-z9njZLSkwmjFWUelGEwEbdf4NwKvfHxvGC0OcGN1Hp/XNDIcJ7D5DpPNPv6x6/mFvc1tQHsaWmpD/a4gOvvCJQ==",
             "dev": true,
             "dependencies": {
-                "@typescript-eslint/types": "5.30.5",
+                "@typescript-eslint/types": "5.48.2",
                 "eslint-visitor-keys": "^3.3.0"
             },
             "engines": {
@@ -332,9 +373,9 @@
             }
         },
         "node_modules/@vscode/test-electron": {
-            "version": "2.1.5",
-            "resolved": "https://registry.npmjs.org/@vscode/test-electron/-/test-electron-2.1.5.tgz",
-            "integrity": "sha512-O/ioqFpV+RvKbRykX2ItYPnbcZ4Hk5V0rY4uhQjQTLhGL9WZUvS7exzuYQCCI+ilSqJpctvxq2llTfGXf9UnnA==",
+            "version": "2.2.2",
+            "resolved": "https://registry.npmjs.org/@vscode/test-electron/-/test-electron-2.2.2.tgz",
+            "integrity": "sha512-s5d2VtMySvff0UgqkJ0BMCr1es+qREE194EAodGIefq518W53ifvv69e80l9e2MrYJEqUUKwukE/w3H9o15YEw==",
             "dev": true,
             "dependencies": {
                 "http-proxy-agent": "^4.0.1",
@@ -343,13 +384,13 @@
                 "unzipper": "^0.10.11"
             },
             "engines": {
-                "node": ">=8.9.3"
+                "node": ">=16"
             }
         },
         "node_modules/acorn": {
-            "version": "8.7.1",
-            "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.1.tgz",
-            "integrity": "sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A==",
+            "version": "8.8.1",
+            "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.1.tgz",
+            "integrity": "sha512-7zFpHzhnqYKrkYdUjF1HI1bzd0VygEGX8lFk4k5zVMqHEoES+P+7TKI+EvLO9WVMJ8eekdO0aDEK044xTXwPPA==",
             "dev": true,
             "bin": {
                 "acorn": "bin/acorn"
@@ -812,9 +853,9 @@
             }
         },
         "node_modules/d3": {
-            "version": "7.6.1",
-            "resolved": "https://registry.npmjs.org/d3/-/d3-7.6.1.tgz",
-            "integrity": "sha512-txMTdIHFbcpLx+8a0IFhZsbp+PfBBPt8yfbmukZTQFroKuFqIwqswF0qE5JXWefylaAVpSXFoKm3yP+jpNLFLw==",
+            "version": "7.8.2",
+            "resolved": "https://registry.npmjs.org/d3/-/d3-7.8.2.tgz",
+            "integrity": "sha512-WXty7qOGSHb7HR7CfOzwN1Gw04MUOzN8qh9ZUsvwycIMb4DYMpY9xczZ6jUorGtO6bR9BPMPaueIKwiDxu9uiQ==",
             "dependencies": {
                 "d3-array": "3",
                 "d3-axis": "3",
@@ -852,9 +893,9 @@
             }
         },
         "node_modules/d3-array": {
-            "version": "3.2.0",
-            "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.0.tgz",
-            "integrity": "sha512-3yXFQo0oG3QCxbF06rMPFyGRMGJNS7NvsV1+2joOjbBE+9xvWQ8+GcMJAjRCzw06zQ3/arXeJgbPYcjUCuC+3g==",
+            "version": "3.2.2",
+            "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.2.tgz",
+            "integrity": "sha512-yEEyEAbDrF8C6Ob2myOBLjwBLck1Z89jMGFee0oPsn95GqjerpaOA4ch+vc2l0FNFFwMD5N7OCSEN5eAlsUbgQ==",
             "dependencies": {
                 "internmap": "1 - 2"
             },
@@ -905,9 +946,9 @@
             }
         },
         "node_modules/d3-contour": {
-            "version": "4.0.0",
-            "resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-4.0.0.tgz",
-            "integrity": "sha512-7aQo0QHUTu/Ko3cP9YK9yUTxtoDEiDGwnBHyLxG5M4vqlBkO/uixMRele3nfsfj6UXOcuReVpVXzAboGraYIJw==",
+            "version": "4.0.2",
+            "resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-4.0.2.tgz",
+            "integrity": "sha512-4EzFTRIikzs47RGmdxbeUvLWtGedDUNkTcmzoeyg4sP/dvCexO47AaQL7VKy/gul85TOxw+IBgA8US2xwbToNA==",
             "dependencies": {
                 "d3-array": "^3.2.0"
             },
@@ -1011,9 +1052,9 @@
             }
         },
         "node_modules/d3-geo": {
-            "version": "3.0.1",
-            "resolved": "https://registry.npmjs.org/d3-geo/-/d3-geo-3.0.1.tgz",
-            "integrity": "sha512-Wt23xBych5tSy9IYAM1FR2rWIBFWa52B/oF/GYe5zbdHrg08FU8+BuI6X4PvTwPDdqdAdq04fuWJpELtsaEjeA==",
+            "version": "3.1.0",
+            "resolved": "https://registry.npmjs.org/d3-geo/-/d3-geo-3.1.0.tgz",
+            "integrity": "sha512-JEo5HxXDdDYXCaWdwLRt79y7giK8SbhZJbFWXqbRTolCHFI5jRqteLzCsq51NKbUoX0PjBVSohxrx+NoOUujYA==",
             "dependencies": {
                 "d3-array": "2.5.0 - 3"
             },
@@ -1129,20 +1170,20 @@
             }
         },
         "node_modules/d3-shape": {
-            "version": "3.1.0",
-            "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.1.0.tgz",
-            "integrity": "sha512-tGDh1Muf8kWjEDT/LswZJ8WF85yDZLvVJpYU9Nq+8+yW1Z5enxrmXOhTArlkaElU+CTn0OTVNli+/i+HP45QEQ==",
+            "version": "3.2.0",
+            "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz",
+            "integrity": "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==",
             "dependencies": {
-                "d3-path": "1 - 3"
+                "d3-path": "^3.1.0"
             },
             "engines": {
                 "node": ">=12"
             }
         },
         "node_modules/d3-time": {
-            "version": "3.0.0",
-            "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.0.0.tgz",
-            "integrity": "sha512-zmV3lRnlaLI08y9IMRXSDshQb5Nj77smnfpnd2LrBa/2K281Jijactokeak14QacHs/kKq0AQ121nidNYlarbQ==",
+            "version": "3.1.0",
+            "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz",
+            "integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==",
             "dependencies": {
                 "d3-array": "2 - 3"
             },
@@ -1369,9 +1410,9 @@
             }
         },
         "node_modules/entities": {
-            "version": "4.3.1",
-            "resolved": "https://registry.npmjs.org/entities/-/entities-4.3.1.tgz",
-            "integrity": "sha512-o4q/dYJlmyjP2zfnaWDUC6A3BQFmVTX+tZPezK7k0GLSU9QYCauscf5Y+qcEPzKL+EixVouYDgLQK5H9GrLpkg==",
+            "version": "4.4.0",
+            "resolved": "https://registry.npmjs.org/entities/-/entities-4.4.0.tgz",
+            "integrity": "sha512-oYp7156SP8LkeGD0GF85ad1X9Ai79WtRsZ2gxJqtBuzH+98YUV6jkHEKlZkMbcrjJjIVJNIDP/3WL9wQkoPbWA==",
             "dev": true,
             "engines": {
                 "node": ">=0.12"
@@ -1381,9 +1422,9 @@
             }
         },
         "node_modules/esbuild": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.14.48.tgz",
-            "integrity": "sha512-w6N1Yn5MtqK2U1/WZTX9ZqUVb8IOLZkZ5AdHkT6x3cHDMVsYWC7WPdiLmx19w3i4Rwzy5LqsEMtVihG3e4rFzA==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.14.54.tgz",
+            "integrity": "sha512-Cy9llcy8DvET5uznocPyqL3BFRrFXSVqbgpMJ9Wz8oVjZlh/zUSNbPRbov0VX7VxN2JH1Oa0uNxZ7eLRb62pJA==",
             "dev": true,
             "hasInstallScript": true,
             "bin": {
@@ -1393,32 +1434,33 @@
                 "node": ">=12"
             },
             "optionalDependencies": {
-                "esbuild-android-64": "0.14.48",
-                "esbuild-android-arm64": "0.14.48",
-                "esbuild-darwin-64": "0.14.48",
-                "esbuild-darwin-arm64": "0.14.48",
-                "esbuild-freebsd-64": "0.14.48",
-                "esbuild-freebsd-arm64": "0.14.48",
-                "esbuild-linux-32": "0.14.48",
-                "esbuild-linux-64": "0.14.48",
-                "esbuild-linux-arm": "0.14.48",
-                "esbuild-linux-arm64": "0.14.48",
-                "esbuild-linux-mips64le": "0.14.48",
-                "esbuild-linux-ppc64le": "0.14.48",
-                "esbuild-linux-riscv64": "0.14.48",
-                "esbuild-linux-s390x": "0.14.48",
-                "esbuild-netbsd-64": "0.14.48",
-                "esbuild-openbsd-64": "0.14.48",
-                "esbuild-sunos-64": "0.14.48",
-                "esbuild-windows-32": "0.14.48",
-                "esbuild-windows-64": "0.14.48",
-                "esbuild-windows-arm64": "0.14.48"
+                "@esbuild/linux-loong64": "0.14.54",
+                "esbuild-android-64": "0.14.54",
+                "esbuild-android-arm64": "0.14.54",
+                "esbuild-darwin-64": "0.14.54",
+                "esbuild-darwin-arm64": "0.14.54",
+                "esbuild-freebsd-64": "0.14.54",
+                "esbuild-freebsd-arm64": "0.14.54",
+                "esbuild-linux-32": "0.14.54",
+                "esbuild-linux-64": "0.14.54",
+                "esbuild-linux-arm": "0.14.54",
+                "esbuild-linux-arm64": "0.14.54",
+                "esbuild-linux-mips64le": "0.14.54",
+                "esbuild-linux-ppc64le": "0.14.54",
+                "esbuild-linux-riscv64": "0.14.54",
+                "esbuild-linux-s390x": "0.14.54",
+                "esbuild-netbsd-64": "0.14.54",
+                "esbuild-openbsd-64": "0.14.54",
+                "esbuild-sunos-64": "0.14.54",
+                "esbuild-windows-32": "0.14.54",
+                "esbuild-windows-64": "0.14.54",
+                "esbuild-windows-arm64": "0.14.54"
             }
         },
         "node_modules/esbuild-android-64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-android-64/-/esbuild-android-64-0.14.48.tgz",
-            "integrity": "sha512-3aMjboap/kqwCUpGWIjsk20TtxVoKck8/4Tu19rubh7t5Ra0Yrpg30Mt1QXXlipOazrEceGeWurXKeFJgkPOUg==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-android-64/-/esbuild-android-64-0.14.54.tgz",
+            "integrity": "sha512-Tz2++Aqqz0rJ7kYBfz+iqyE3QMycD4vk7LBRyWaAVFgFtQ/O8EJOnVmTOiDWYZ/uYzB4kvP+bqejYdVKzE5lAQ==",
             "cpu": [
                 "x64"
             ],
@@ -1432,9 +1474,9 @@
             }
         },
         "node_modules/esbuild-android-arm64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-android-arm64/-/esbuild-android-arm64-0.14.48.tgz",
-            "integrity": "sha512-vptI3K0wGALiDq+EvRuZotZrJqkYkN5282iAfcffjI5lmGG9G1ta/CIVauhY42MBXwEgDJkweiDcDMRLzBZC4g==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-android-arm64/-/esbuild-android-arm64-0.14.54.tgz",
+            "integrity": "sha512-F9E+/QDi9sSkLaClO8SOV6etqPd+5DgJje1F9lOWoNncDdOBL2YF59IhsWATSt0TLZbYCf3pNlTHvVV5VfHdvg==",
             "cpu": [
                 "arm64"
             ],
@@ -1448,9 +1490,9 @@
             }
         },
         "node_modules/esbuild-darwin-64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-darwin-64/-/esbuild-darwin-64-0.14.48.tgz",
-            "integrity": "sha512-gGQZa4+hab2Va/Zww94YbshLuWteyKGD3+EsVon8EWTWhnHFRm5N9NbALNbwi/7hQ/hM1Zm4FuHg+k6BLsl5UA==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-darwin-64/-/esbuild-darwin-64-0.14.54.tgz",
+            "integrity": "sha512-jtdKWV3nBviOd5v4hOpkVmpxsBy90CGzebpbO9beiqUYVMBtSc0AL9zGftFuBon7PNDcdvNCEuQqw2x0wP9yug==",
             "cpu": [
                 "x64"
             ],
@@ -1464,9 +1506,9 @@
             }
         },
         "node_modules/esbuild-darwin-arm64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-darwin-arm64/-/esbuild-darwin-arm64-0.14.48.tgz",
-            "integrity": "sha512-bFjnNEXjhZT+IZ8RvRGNJthLWNHV5JkCtuOFOnjvo5pC0sk2/QVk0Qc06g2PV3J0TcU6kaPC3RN9yy9w2PSLEA==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-darwin-arm64/-/esbuild-darwin-arm64-0.14.54.tgz",
+            "integrity": "sha512-OPafJHD2oUPyvJMrsCvDGkRrVCar5aVyHfWGQzY1dWnzErjrDuSETxwA2HSsyg2jORLY8yBfzc1MIpUkXlctmw==",
             "cpu": [
                 "arm64"
             ],
@@ -1480,9 +1522,9 @@
             }
         },
         "node_modules/esbuild-freebsd-64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-freebsd-64/-/esbuild-freebsd-64-0.14.48.tgz",
-            "integrity": "sha512-1NOlwRxmOsnPcWOGTB10JKAkYSb2nue0oM1AfHWunW/mv3wERfJmnYlGzL3UAOIUXZqW8GeA2mv+QGwq7DToqA==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-freebsd-64/-/esbuild-freebsd-64-0.14.54.tgz",
+            "integrity": "sha512-OKwd4gmwHqOTp4mOGZKe/XUlbDJ4Q9TjX0hMPIDBUWWu/kwhBAudJdBoxnjNf9ocIB6GN6CPowYpR/hRCbSYAg==",
             "cpu": [
                 "x64"
             ],
@@ -1496,9 +1538,9 @@
             }
         },
         "node_modules/esbuild-freebsd-arm64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-freebsd-arm64/-/esbuild-freebsd-arm64-0.14.48.tgz",
-            "integrity": "sha512-gXqKdO8wabVcYtluAbikDH2jhXp+Klq5oCD5qbVyUG6tFiGhrC9oczKq3vIrrtwcxDQqK6+HDYK8Zrd4bCA9Gw==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-freebsd-arm64/-/esbuild-freebsd-arm64-0.14.54.tgz",
+            "integrity": "sha512-sFwueGr7OvIFiQT6WeG0jRLjkjdqWWSrfbVwZp8iMP+8UHEHRBvlaxL6IuKNDwAozNUmbb8nIMXa7oAOARGs1Q==",
             "cpu": [
                 "arm64"
             ],
@@ -1512,9 +1554,9 @@
             }
         },
         "node_modules/esbuild-linux-32": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-linux-32/-/esbuild-linux-32-0.14.48.tgz",
-            "integrity": "sha512-ghGyDfS289z/LReZQUuuKq9KlTiTspxL8SITBFQFAFRA/IkIvDpnZnCAKTCjGXAmUqroMQfKJXMxyjJA69c/nQ==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-linux-32/-/esbuild-linux-32-0.14.54.tgz",
+            "integrity": "sha512-1ZuY+JDI//WmklKlBgJnglpUL1owm2OX+8E1syCD6UAxcMM/XoWd76OHSjl/0MR0LisSAXDqgjT3uJqT67O3qw==",
             "cpu": [
                 "ia32"
             ],
@@ -1528,9 +1570,9 @@
             }
         },
         "node_modules/esbuild-linux-64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-linux-64/-/esbuild-linux-64-0.14.48.tgz",
-            "integrity": "sha512-vni3p/gppLMVZLghI7oMqbOZdGmLbbKR23XFARKnszCIBpEMEDxOMNIKPmMItQrmH/iJrL1z8Jt2nynY0bE1ug==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-linux-64/-/esbuild-linux-64-0.14.54.tgz",
+            "integrity": "sha512-EgjAgH5HwTbtNsTqQOXWApBaPVdDn7XcK+/PtJwZLT1UmpLoznPd8c5CxqsH2dQK3j05YsB3L17T8vE7cp4cCg==",
             "cpu": [
                 "x64"
             ],
@@ -1544,9 +1586,9 @@
             }
         },
         "node_modules/esbuild-linux-arm": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-linux-arm/-/esbuild-linux-arm-0.14.48.tgz",
-            "integrity": "sha512-+VfSV7Akh1XUiDNXgqgY1cUP1i2vjI+BmlyXRfVz5AfV3jbpde8JTs5Q9sYgaoq5cWfuKfoZB/QkGOI+QcL1Tw==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-linux-arm/-/esbuild-linux-arm-0.14.54.tgz",
+            "integrity": "sha512-qqz/SjemQhVMTnvcLGoLOdFpCYbz4v4fUo+TfsWG+1aOu70/80RV6bgNpR2JCrppV2moUQkww+6bWxXRL9YMGw==",
             "cpu": [
                 "arm"
             ],
@@ -1560,9 +1602,9 @@
             }
         },
         "node_modules/esbuild-linux-arm64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-linux-arm64/-/esbuild-linux-arm64-0.14.48.tgz",
-            "integrity": "sha512-3CFsOlpoxlKPRevEHq8aAntgYGYkE1N9yRYAcPyng/p4Wyx0tPR5SBYsxLKcgPB9mR8chHEhtWYz6EZ+H199Zw==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-linux-arm64/-/esbuild-linux-arm64-0.14.54.tgz",
+            "integrity": "sha512-WL71L+0Rwv+Gv/HTmxTEmpv0UgmxYa5ftZILVi2QmZBgX3q7+tDeOQNqGtdXSdsL8TQi1vIaVFHUPDe0O0kdig==",
             "cpu": [
                 "arm64"
             ],
@@ -1576,9 +1618,9 @@
             }
         },
         "node_modules/esbuild-linux-mips64le": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-linux-mips64le/-/esbuild-linux-mips64le-0.14.48.tgz",
-            "integrity": "sha512-cs0uOiRlPp6ymknDnjajCgvDMSsLw5mST2UXh+ZIrXTj2Ifyf2aAP3Iw4DiqgnyYLV2O/v/yWBJx+WfmKEpNLA==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-linux-mips64le/-/esbuild-linux-mips64le-0.14.54.tgz",
+            "integrity": "sha512-qTHGQB8D1etd0u1+sB6p0ikLKRVuCWhYQhAHRPkO+OF3I/iSlTKNNS0Lh2Oc0g0UFGguaFZZiPJdJey3AGpAlw==",
             "cpu": [
                 "mips64el"
             ],
@@ -1592,9 +1634,9 @@
             }
         },
         "node_modules/esbuild-linux-ppc64le": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-linux-ppc64le/-/esbuild-linux-ppc64le-0.14.48.tgz",
-            "integrity": "sha512-+2F0vJMkuI0Wie/wcSPDCqXvSFEELH7Jubxb7mpWrA/4NpT+/byjxDz0gG6R1WJoeDefcrMfpBx4GFNN1JQorQ==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-linux-ppc64le/-/esbuild-linux-ppc64le-0.14.54.tgz",
+            "integrity": "sha512-j3OMlzHiqwZBDPRCDFKcx595XVfOfOnv68Ax3U4UKZ3MTYQB5Yz3X1mn5GnodEVYzhtZgxEBidLWeIs8FDSfrQ==",
             "cpu": [
                 "ppc64"
             ],
@@ -1608,9 +1650,9 @@
             }
         },
         "node_modules/esbuild-linux-riscv64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-linux-riscv64/-/esbuild-linux-riscv64-0.14.48.tgz",
-            "integrity": "sha512-BmaK/GfEE+5F2/QDrIXteFGKnVHGxlnK9MjdVKMTfvtmudjY3k2t8NtlY4qemKSizc+QwyombGWTBDc76rxePA==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-linux-riscv64/-/esbuild-linux-riscv64-0.14.54.tgz",
+            "integrity": "sha512-y7Vt7Wl9dkOGZjxQZnDAqqn+XOqFD7IMWiewY5SPlNlzMX39ocPQlOaoxvT4FllA5viyV26/QzHtvTjVNOxHZg==",
             "cpu": [
                 "riscv64"
             ],
@@ -1624,9 +1666,9 @@
             }
         },
         "node_modules/esbuild-linux-s390x": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-linux-s390x/-/esbuild-linux-s390x-0.14.48.tgz",
-            "integrity": "sha512-tndw/0B9jiCL+KWKo0TSMaUm5UWBLsfCKVdbfMlb3d5LeV9WbijZ8Ordia8SAYv38VSJWOEt6eDCdOx8LqkC4g==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-linux-s390x/-/esbuild-linux-s390x-0.14.54.tgz",
+            "integrity": "sha512-zaHpW9dziAsi7lRcyV4r8dhfG1qBidQWUXweUjnw+lliChJqQr+6XD71K41oEIC3Mx1KStovEmlzm+MkGZHnHA==",
             "cpu": [
                 "s390x"
             ],
@@ -1640,9 +1682,9 @@
             }
         },
         "node_modules/esbuild-netbsd-64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-netbsd-64/-/esbuild-netbsd-64-0.14.48.tgz",
-            "integrity": "sha512-V9hgXfwf/T901Lr1wkOfoevtyNkrxmMcRHyticybBUHookznipMOHoF41Al68QBsqBxnITCEpjjd4yAos7z9Tw==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-netbsd-64/-/esbuild-netbsd-64-0.14.54.tgz",
+            "integrity": "sha512-PR01lmIMnfJTgeU9VJTDY9ZerDWVFIUzAtJuDHwwceppW7cQWjBBqP48NdeRtoP04/AtO9a7w3viI+PIDr6d+w==",
             "cpu": [
                 "x64"
             ],
@@ -1656,9 +1698,9 @@
             }
         },
         "node_modules/esbuild-openbsd-64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-openbsd-64/-/esbuild-openbsd-64-0.14.48.tgz",
-            "integrity": "sha512-+IHf4JcbnnBl4T52egorXMatil/za0awqzg2Vy6FBgPcBpisDWT2sVz/tNdrK9kAqj+GZG/jZdrOkj7wsrNTKA==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-openbsd-64/-/esbuild-openbsd-64-0.14.54.tgz",
+            "integrity": "sha512-Qyk7ikT2o7Wu76UsvvDS5q0amJvmRzDyVlL0qf5VLsLchjCa1+IAvd8kTBgUxD7VBUUVgItLkk609ZHUc1oCaw==",
             "cpu": [
                 "x64"
             ],
@@ -1672,9 +1714,9 @@
             }
         },
         "node_modules/esbuild-sunos-64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-sunos-64/-/esbuild-sunos-64-0.14.48.tgz",
-            "integrity": "sha512-77m8bsr5wOpOWbGi9KSqDphcq6dFeJyun8TA+12JW/GAjyfTwVtOnN8DOt6DSPUfEV+ltVMNqtXUeTeMAxl5KA==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-sunos-64/-/esbuild-sunos-64-0.14.54.tgz",
+            "integrity": "sha512-28GZ24KmMSeKi5ueWzMcco6EBHStL3B6ubM7M51RmPwXQGLe0teBGJocmWhgwccA1GeFXqxzILIxXpHbl9Q/Kw==",
             "cpu": [
                 "x64"
             ],
@@ -1688,9 +1730,9 @@
             }
         },
         "node_modules/esbuild-windows-32": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-windows-32/-/esbuild-windows-32-0.14.48.tgz",
-            "integrity": "sha512-EPgRuTPP8vK9maxpTGDe5lSoIBHGKO/AuxDncg5O3NkrPeLNdvvK8oywB0zGaAZXxYWfNNSHskvvDgmfVTguhg==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-windows-32/-/esbuild-windows-32-0.14.54.tgz",
+            "integrity": "sha512-T+rdZW19ql9MjS7pixmZYVObd9G7kcaZo+sETqNH4RCkuuYSuv9AGHUVnPoP9hhuE1WM1ZimHz1CIBHBboLU7w==",
             "cpu": [
                 "ia32"
             ],
@@ -1704,9 +1746,9 @@
             }
         },
         "node_modules/esbuild-windows-64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-windows-64/-/esbuild-windows-64-0.14.48.tgz",
-            "integrity": "sha512-YmpXjdT1q0b8ictSdGwH3M8VCoqPpK1/UArze3X199w6u8hUx3V8BhAi1WjbsfDYRBanVVtduAhh2sirImtAvA==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-windows-64/-/esbuild-windows-64-0.14.54.tgz",
+            "integrity": "sha512-AoHTRBUuYwXtZhjXZbA1pGfTo8cJo3vZIcWGLiUcTNgHpJJMC1rVA44ZereBHMJtotyN71S8Qw0npiCIkW96cQ==",
             "cpu": [
                 "x64"
             ],
@@ -1720,9 +1762,9 @@
             }
         },
         "node_modules/esbuild-windows-arm64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-windows-arm64/-/esbuild-windows-arm64-0.14.48.tgz",
-            "integrity": "sha512-HHaOMCsCXp0rz5BT2crTka6MPWVno121NKApsGs/OIW5QC0ggC69YMGs1aJct9/9FSUF4A1xNE/cLvgB5svR4g==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-windows-arm64/-/esbuild-windows-arm64-0.14.54.tgz",
+            "integrity": "sha512-M0kuUvXhot1zOISQGXwWn6YtS+Y/1RT9WrVIOywZnJHo3jCDyewAc79aKNQWFCQm+xNHVTq9h8dZKvygoXQQRg==",
             "cpu": [
                 "arm64"
             ],
@@ -1756,13 +1798,15 @@
             }
         },
         "node_modules/eslint": {
-            "version": "8.19.0",
-            "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.19.0.tgz",
-            "integrity": "sha512-SXOPj3x9VKvPe81TjjUJCYlV4oJjQw68Uek+AM0X4p+33dj2HY5bpTZOgnQHcG2eAm1mtCU9uNMnJi7exU/kYw==",
+            "version": "8.32.0",
+            "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.32.0.tgz",
+            "integrity": "sha512-nETVXpnthqKPFyuY2FNjz/bEd6nbosRgKbkgS/y1C7LJop96gYHWpiguLecMHQ2XCPxn77DS0P+68WzG6vkZSQ==",
             "dev": true,
             "dependencies": {
-                "@eslint/eslintrc": "^1.3.0",
-                "@humanwhocodes/config-array": "^0.9.2",
+                "@eslint/eslintrc": "^1.4.1",
+                "@humanwhocodes/config-array": "^0.11.8",
+                "@humanwhocodes/module-importer": "^1.0.1",
+                "@nodelib/fs.walk": "^1.2.8",
                 "ajv": "^6.10.0",
                 "chalk": "^4.0.0",
                 "cross-spawn": "^7.0.2",
@@ -1772,18 +1816,21 @@
                 "eslint-scope": "^7.1.1",
                 "eslint-utils": "^3.0.0",
                 "eslint-visitor-keys": "^3.3.0",
-                "espree": "^9.3.2",
+                "espree": "^9.4.0",
                 "esquery": "^1.4.0",
                 "esutils": "^2.0.2",
                 "fast-deep-equal": "^3.1.3",
                 "file-entry-cache": "^6.0.1",
-                "functional-red-black-tree": "^1.0.1",
-                "glob-parent": "^6.0.1",
-                "globals": "^13.15.0",
+                "find-up": "^5.0.0",
+                "glob-parent": "^6.0.2",
+                "globals": "^13.19.0",
+                "grapheme-splitter": "^1.0.4",
                 "ignore": "^5.2.0",
                 "import-fresh": "^3.0.0",
                 "imurmurhash": "^0.1.4",
                 "is-glob": "^4.0.0",
+                "is-path-inside": "^3.0.3",
+                "js-sdsl": "^4.1.4",
                 "js-yaml": "^4.1.0",
                 "json-stable-stringify-without-jsonify": "^1.0.1",
                 "levn": "^0.4.1",
@@ -1794,8 +1841,7 @@
                 "regexpp": "^3.2.0",
                 "strip-ansi": "^6.0.1",
                 "strip-json-comments": "^3.1.0",
-                "text-table": "^0.2.0",
-                "v8-compile-cache": "^2.0.3"
+                "text-table": "^0.2.0"
             },
             "bin": {
                 "eslint": "bin/eslint.js"
@@ -1808,9 +1854,9 @@
             }
         },
         "node_modules/eslint-config-prettier": {
-            "version": "8.5.0",
-            "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.5.0.tgz",
-            "integrity": "sha512-obmWKLUNCnhtQRKc+tmnYuQl0pFU1ibYJQ5BGhTVB08bHe9wC8qUeG7c08dj9XX+AuPj1YSGSQIHl1pnDHZR0Q==",
+            "version": "8.6.0",
+            "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.6.0.tgz",
+            "integrity": "sha512-bAF0eLpLVqP5oEVUFKpMA+NnRFICwn9X8B5jrR9FcqnYBuPbqWEjTEspPWMj5ye6czoSLDweCzSo3Ko7gGrZaA==",
             "dev": true,
             "bin": {
                 "eslint-config-prettier": "bin/cli.js"
@@ -1891,17 +1937,20 @@
             }
         },
         "node_modules/espree": {
-            "version": "9.3.2",
-            "resolved": "https://registry.npmjs.org/espree/-/espree-9.3.2.tgz",
-            "integrity": "sha512-D211tC7ZwouTIuY5x9XnS0E9sWNChB7IYKX/Xp5eQj3nFXhqmiUDB9q27y76oFl8jTg3pXcQx/bpxMfs3CIZbA==",
+            "version": "9.4.1",
+            "resolved": "https://registry.npmjs.org/espree/-/espree-9.4.1.tgz",
+            "integrity": "sha512-XwctdmTO6SIvCzd9810yyNzIrOrqNYV9Koizx4C/mRhf9uq0o4yHoCEU/670pOxOL/MSraektvSAji79kX90Vg==",
             "dev": true,
             "dependencies": {
-                "acorn": "^8.7.1",
+                "acorn": "^8.8.0",
                 "acorn-jsx": "^5.3.2",
                 "eslint-visitor-keys": "^3.3.0"
             },
             "engines": {
                 "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
+            },
+            "funding": {
+                "url": "https://opencollective.com/eslint"
             }
         },
         "node_modules/esquery": {
@@ -1980,9 +2029,9 @@
             "dev": true
         },
         "node_modules/fast-glob": {
-            "version": "3.2.11",
-            "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.11.tgz",
-            "integrity": "sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew==",
+            "version": "3.2.12",
+            "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz",
+            "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==",
             "dev": true,
             "dependencies": {
                 "@nodelib/fs.stat": "^2.0.2",
@@ -2020,9 +2069,9 @@
             "dev": true
         },
         "node_modules/fastq": {
-            "version": "1.13.0",
-            "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz",
-            "integrity": "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==",
+            "version": "1.15.0",
+            "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.15.0.tgz",
+            "integrity": "sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==",
             "dev": true,
             "dependencies": {
                 "reusify": "^1.0.4"
@@ -2061,6 +2110,22 @@
                 "node": ">=8"
             }
         },
+        "node_modules/find-up": {
+            "version": "5.0.0",
+            "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz",
+            "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==",
+            "dev": true,
+            "dependencies": {
+                "locate-path": "^6.0.0",
+                "path-exists": "^4.0.0"
+            },
+            "engines": {
+                "node": ">=10"
+            },
+            "funding": {
+                "url": "https://github.com/sponsors/sindresorhus"
+            }
+        },
         "node_modules/flat-cache": {
             "version": "3.0.4",
             "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz",
@@ -2075,15 +2140,15 @@
             }
         },
         "node_modules/flatted": {
-            "version": "3.2.6",
-            "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.6.tgz",
-            "integrity": "sha512-0sQoMh9s0BYsm+12Huy/rkKxVu4R1+r96YX5cG44rHV0pQ6iC3Q+mkoMFaGWObMFYQxCVT+ssG1ksneA2MI9KQ==",
+            "version": "3.2.7",
+            "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz",
+            "integrity": "sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==",
             "dev": true
         },
         "node_modules/follow-redirects": {
-            "version": "1.15.1",
-            "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.1.tgz",
-            "integrity": "sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA==",
+            "version": "1.15.2",
+            "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz",
+            "integrity": "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==",
             "dev": true,
             "funding": [
                 {
@@ -2145,12 +2210,6 @@
             "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==",
             "dev": true
         },
-        "node_modules/functional-red-black-tree": {
-            "version": "1.0.1",
-            "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz",
-            "integrity": "sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==",
-            "dev": true
-        },
         "node_modules/get-caller-file": {
             "version": "2.0.5",
             "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
@@ -2160,9 +2219,9 @@
             }
         },
         "node_modules/get-intrinsic": {
-            "version": "1.1.2",
-            "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.2.tgz",
-            "integrity": "sha512-Jfm3OyCxHh9DJyc28qGk+JmfkpO41A4XkneDSujN9MDXrm4oDKdHvndhZ2dN94+ERNfkYJWDclW6k2L/ZGHjXA==",
+            "version": "1.2.0",
+            "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.0.tgz",
+            "integrity": "sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q==",
             "dev": true,
             "dependencies": {
                 "function-bind": "^1.1.1",
@@ -2212,9 +2271,9 @@
             }
         },
         "node_modules/globals": {
-            "version": "13.16.0",
-            "resolved": "https://registry.npmjs.org/globals/-/globals-13.16.0.tgz",
-            "integrity": "sha512-A1lrQfpNF+McdPOnnFqY3kSN0AFTy485bTi1bkLk4mVPODIUEcSfhHgRqA+QdXPksrSTTztYXx37NFV+GpGk3Q==",
+            "version": "13.19.0",
+            "resolved": "https://registry.npmjs.org/globals/-/globals-13.19.0.tgz",
+            "integrity": "sha512-dkQ957uSRWHw7CFXLUtUHQI3g3aWApYhfNR2O6jn/907riyTYKVBmxYVROkBcY614FSSeSJh7Xm7SrUWCxvJMQ==",
             "dev": true,
             "dependencies": {
                 "type-fest": "^0.20.2"
@@ -2252,6 +2311,12 @@
             "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==",
             "dev": true
         },
+        "node_modules/grapheme-splitter": {
+            "version": "1.0.4",
+            "resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz",
+            "integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==",
+            "dev": true
+        },
         "node_modules/has": {
             "version": "1.0.3",
             "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz",
@@ -2375,9 +2440,9 @@
             ]
         },
         "node_modules/ignore": {
-            "version": "5.2.0",
-            "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz",
-            "integrity": "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==",
+            "version": "5.2.4",
+            "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.4.tgz",
+            "integrity": "sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==",
             "dev": true,
             "engines": {
                 "node": ">= 4"
@@ -2488,6 +2553,15 @@
                 "node": ">=0.12.0"
             }
         },
+        "node_modules/is-path-inside": {
+            "version": "3.0.3",
+            "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz",
+            "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==",
+            "dev": true,
+            "engines": {
+                "node": ">=8"
+            }
+        },
         "node_modules/isarray": {
             "version": "1.0.0",
             "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
@@ -2500,6 +2574,16 @@
             "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
             "dev": true
         },
+        "node_modules/js-sdsl": {
+            "version": "4.3.0",
+            "resolved": "https://registry.npmjs.org/js-sdsl/-/js-sdsl-4.3.0.tgz",
+            "integrity": "sha512-mifzlm2+5nZ+lEcLJMoBK0/IH/bDg8XnJfd/Wq6IP+xoCjLZsTOnV2QpxlVbX9bMnkl5PdEjNtBJ9Cj1NjifhQ==",
+            "dev": true,
+            "funding": {
+                "type": "opencollective",
+                "url": "https://opencollective.com/js-sdsl"
+            }
+        },
         "node_modules/js-yaml": {
             "version": "4.1.0",
             "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
@@ -2572,6 +2656,21 @@
             "integrity": "sha512-3mk/Zag0+IJxeDrxSgaDPy4zZ3w05PRZeJNnlWhzFz5OkX49J4krc+A8X2d2M69vGMBEX0uyl8M+W+8gH+kBqQ==",
             "dev": true
         },
+        "node_modules/locate-path": {
+            "version": "6.0.0",
+            "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz",
+            "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==",
+            "dev": true,
+            "dependencies": {
+                "p-locate": "^5.0.0"
+            },
+            "engines": {
+                "node": ">=10"
+            },
+            "funding": {
+                "url": "https://github.com/sponsors/sindresorhus"
+            }
+        },
         "node_modules/lodash.merge": {
             "version": "4.6.2",
             "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
@@ -2678,10 +2777,13 @@
             }
         },
         "node_modules/minimist": {
-            "version": "1.2.6",
-            "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz",
-            "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==",
-            "dev": true
+            "version": "1.2.7",
+            "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.7.tgz",
+            "integrity": "sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g==",
+            "dev": true,
+            "funding": {
+                "url": "https://github.com/sponsors/ljharb"
+            }
         },
         "node_modules/mkdirp": {
             "version": "0.5.6",
@@ -2725,10 +2827,16 @@
             "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==",
             "dev": true
         },
+        "node_modules/natural-compare-lite": {
+            "version": "1.4.0",
+            "resolved": "https://registry.npmjs.org/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz",
+            "integrity": "sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g==",
+            "dev": true
+        },
         "node_modules/node-abi": {
-            "version": "3.22.0",
-            "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.22.0.tgz",
-            "integrity": "sha512-u4uAs/4Zzmp/jjsD9cyFYDXeISfUWaAVWshPmDZOFOv4Xl4SbzTXm53I04C2uRueYJ+0t5PEtLH/owbn2Npf/w==",
+            "version": "3.31.0",
+            "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.31.0.tgz",
+            "integrity": "sha512-eSKV6s+APenqVh8ubJyiu/YhZgxQpGP66ntzUb3lY1xB9ukSRaGnx0AIxI+IM+1+IVYC1oWobgG5L3Lt9ARykQ==",
             "dev": true,
             "dependencies": {
                 "semver": "^7.3.5"
@@ -2756,9 +2864,9 @@
             }
         },
         "node_modules/object-inspect": {
-            "version": "1.12.2",
-            "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz",
-            "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==",
+            "version": "1.12.3",
+            "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz",
+            "integrity": "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==",
             "dev": true,
             "funding": {
                 "url": "https://github.com/sponsors/ljharb"
@@ -2819,6 +2927,36 @@
                 "node": ">= 6"
             }
         },
+        "node_modules/p-limit": {
+            "version": "3.1.0",
+            "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
+            "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
+            "dev": true,
+            "dependencies": {
+                "yocto-queue": "^0.1.0"
+            },
+            "engines": {
+                "node": ">=10"
+            },
+            "funding": {
+                "url": "https://github.com/sponsors/sindresorhus"
+            }
+        },
+        "node_modules/p-locate": {
+            "version": "5.0.0",
+            "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz",
+            "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==",
+            "dev": true,
+            "dependencies": {
+                "p-limit": "^3.0.2"
+            },
+            "engines": {
+                "node": ">=10"
+            },
+            "funding": {
+                "url": "https://github.com/sponsors/sindresorhus"
+            }
+        },
         "node_modules/parent-module": {
             "version": "1.0.1",
             "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
@@ -2850,12 +2988,12 @@
             }
         },
         "node_modules/parse5": {
-            "version": "7.0.0",
-            "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.0.0.tgz",
-            "integrity": "sha512-y/t8IXSPWTuRZqXc0ajH/UwDj4mnqLEbSttNbThcFhGrZuOyoyvNBO85PBp2jQa55wY9d07PBNjsK8ZP3K5U6g==",
+            "version": "7.1.2",
+            "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.1.2.tgz",
+            "integrity": "sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==",
             "dev": true,
             "dependencies": {
-                "entities": "^4.3.0"
+                "entities": "^4.4.0"
             },
             "funding": {
                 "url": "https://github.com/inikulin/parse5?sponsor=1"
@@ -2874,6 +3012,15 @@
                 "url": "https://github.com/inikulin/parse5?sponsor=1"
             }
         },
+        "node_modules/path-exists": {
+            "version": "4.0.0",
+            "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
+            "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
+            "dev": true,
+            "engines": {
+                "node": ">=8"
+            }
+        },
         "node_modules/path-is-absolute": {
             "version": "1.0.1",
             "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
@@ -2955,9 +3102,9 @@
             }
         },
         "node_modules/prettier": {
-            "version": "2.7.1",
-            "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.7.1.tgz",
-            "integrity": "sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g==",
+            "version": "2.8.3",
+            "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.3.tgz",
+            "integrity": "sha512-tJ/oJ4amDihPoufT5sM0Z1SKEuKay8LfVAMlbbhnnkvt6BUserZylqo2PN+p9KeljLr0OHa2rXHU1T8reeoTrw==",
             "dev": true,
             "bin": {
                 "prettier": "bin-prettier.js"
@@ -2986,9 +3133,9 @@
             }
         },
         "node_modules/punycode": {
-            "version": "2.1.1",
-            "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
-            "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==",
+            "version": "2.3.0",
+            "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz",
+            "integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==",
             "dev": true,
             "engines": {
                 "node": ">=6"
@@ -3185,9 +3332,9 @@
             "dev": true
         },
         "node_modules/semver": {
-            "version": "7.3.7",
-            "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz",
-            "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==",
+            "version": "7.3.8",
+            "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz",
+            "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==",
             "dependencies": {
                 "lru-cache": "^6.0.0"
             },
@@ -3432,9 +3579,9 @@
             }
         },
         "node_modules/tslib": {
-            "version": "2.4.0",
-            "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz",
-            "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==",
+            "version": "2.4.1",
+            "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.1.tgz",
+            "integrity": "sha512-tGyy4dAjRIEwI7BzsB0lynWgOpfqjUdq91XXAlIWD2OwKBH7oCl/GZG/HT4BOHrTlPMOASlMQ7veyTqpmRcrNA==",
             "dev": true
         },
         "node_modules/tsutils": {
@@ -3515,9 +3662,9 @@
             }
         },
         "node_modules/typescript": {
-            "version": "4.7.4",
-            "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.7.4.tgz",
-            "integrity": "sha512-C0WQT0gezHuw6AdY1M2jxUO83Rjf0HP7Sk1DtXj6j1EwkQNZrHAg2XPWlq62oqEhYvONq5pkC2Y9oPljWToLmQ==",
+            "version": "4.9.4",
+            "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.4.tgz",
+            "integrity": "sha512-Uz+dTXYzxXXbsFpM86Wh3dKCxrQqUcVMxwU54orwlJjOpO3ao8L7j5lH+dWfTwgCwIuM9GQ2kvVotzYJMXTBZg==",
             "dev": true,
             "bin": {
                 "tsc": "bin/tsc",
@@ -3534,9 +3681,9 @@
             "dev": true
         },
         "node_modules/underscore": {
-            "version": "1.13.4",
-            "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.13.4.tgz",
-            "integrity": "sha512-BQFnUDuAQ4Yf/cYY5LNrK9NCJFKriaRbD9uR1fTeXnBeoa97W0i41qkZfGO9pSo8I5KzjAcSY2XYtdf0oKd7KQ==",
+            "version": "1.13.6",
+            "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.13.6.tgz",
+            "integrity": "sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A==",
             "dev": true
         },
         "node_modules/unzipper": {
@@ -3578,16 +3725,11 @@
             "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==",
             "dev": true
         },
-        "node_modules/v8-compile-cache": {
-            "version": "2.3.0",
-            "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz",
-            "integrity": "sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==",
-            "dev": true
-        },
         "node_modules/vsce": {
-            "version": "2.9.2",
-            "resolved": "https://registry.npmjs.org/vsce/-/vsce-2.9.2.tgz",
-            "integrity": "sha512-xyLqL4U82BilUX1t6Ym2opQEa2tLGWYjbgB7+ETeNVXlIJz5sWBJjQJSYJVFOKJSpiOtQclolu88cj7oY6vvPQ==",
+            "version": "2.15.0",
+            "resolved": "https://registry.npmjs.org/vsce/-/vsce-2.15.0.tgz",
+            "integrity": "sha512-P8E9LAZvBCQnoGoizw65JfGvyMqNGlHdlUXD1VAuxtvYAaHBKLBdKPnpy60XKVDAkQCfmMu53g+gq9FM+ydepw==",
+            "deprecated": "vsce has been renamed to @vscode/vsce. Install using @vscode/vsce instead.",
             "dev": true,
             "dependencies": {
                 "azure-devops-node-api": "^11.0.1",
@@ -3866,19 +4008,38 @@
             "dependencies": {
                 "buffer-crc32": "~0.2.3"
             }
+        },
+        "node_modules/yocto-queue": {
+            "version": "0.1.0",
+            "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
+            "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==",
+            "dev": true,
+            "engines": {
+                "node": ">=10"
+            },
+            "funding": {
+                "url": "https://github.com/sponsors/sindresorhus"
+            }
         }
     },
     "dependencies": {
+        "@esbuild/linux-loong64": {
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.14.54.tgz",
+            "integrity": "sha512-bZBrLAIX1kpWelV0XemxBZllyRmM6vgFQQG2GdNb+r3Fkp0FOh1NJSvekXDs7jq70k4euu1cryLMfU+mTXlEpw==",
+            "dev": true,
+            "optional": true
+        },
         "@eslint/eslintrc": {
-            "version": "1.3.0",
-            "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.3.0.tgz",
-            "integrity": "sha512-UWW0TMTmk2d7hLcWD1/e2g5HDM/HQ3csaLSqXCfqwh4uNDuNqlaKWXmEsL4Cs41Z0KnILNvwbHAah3C2yt06kw==",
+            "version": "1.4.1",
+            "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.4.1.tgz",
+            "integrity": "sha512-XXrH9Uarn0stsyldqDYq8r++mROmWRI1xKMXa640Bb//SY1+ECYX6VzT6Lcx5frD0V30XieqJ0oX9I2Xj5aoMA==",
             "dev": true,
             "requires": {
                 "ajv": "^6.12.4",
                 "debug": "^4.3.2",
-                "espree": "^9.3.2",
-                "globals": "^13.15.0",
+                "espree": "^9.4.0",
+                "globals": "^13.19.0",
                 "ignore": "^5.2.0",
                 "import-fresh": "^3.2.1",
                 "js-yaml": "^4.1.0",
@@ -3895,16 +4056,22 @@
             }
         },
         "@humanwhocodes/config-array": {
-            "version": "0.9.5",
-            "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.9.5.tgz",
-            "integrity": "sha512-ObyMyWxZiCu/yTisA7uzx81s40xR2fD5Cg/2Kq7G02ajkNubJf6BopgDTmDyc3U7sXpNKM8cYOw7s7Tyr+DnCw==",
+            "version": "0.11.8",
+            "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.8.tgz",
+            "integrity": "sha512-UybHIJzJnR5Qc/MsD9Kr+RpO2h+/P1GhOwdiLPXK5TWk5sgTdu88bTD9UP+CKbPPh5Rni1u0GjAdYQLemG8g+g==",
             "dev": true,
             "requires": {
                 "@humanwhocodes/object-schema": "^1.2.1",
                 "debug": "^4.1.1",
-                "minimatch": "^3.0.4"
+                "minimatch": "^3.0.5"
             }
         },
+        "@humanwhocodes/module-importer": {
+            "version": "1.0.1",
+            "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz",
+            "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==",
+            "dev": true
+        },
         "@humanwhocodes/object-schema": {
             "version": "1.2.1",
             "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz",
@@ -3950,9 +4117,15 @@
             "dev": true
         },
         "@types/node": {
-            "version": "16.11.43",
-            "resolved": "https://registry.npmjs.org/@types/node/-/node-16.11.43.tgz",
-            "integrity": "sha512-GqWykok+3uocgfAJM8imbozrqLnPyTrpFlrryURQlw1EesPUCx5XxTiucWDSFF9/NUEXDuD4bnvHm8xfVGWTpQ==",
+            "version": "16.11.68",
+            "resolved": "https://registry.npmjs.org/@types/node/-/node-16.11.68.tgz",
+            "integrity": "sha512-JkRpuVz3xCNCWaeQ5EHLR/6woMbHZz/jZ7Kmc63AkU+1HxnoUugzSWMck7dsR4DvNYX8jp9wTi9K7WvnxOIQZQ==",
+            "dev": true
+        },
+        "@types/semver": {
+            "version": "7.3.13",
+            "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.13.tgz",
+            "integrity": "sha512-21cFJr9z3g5dW8B0CVI9g2O9beqaThGQ6ZFBqHfwhzLDKUxaqTIy3vnfah/UPkfOiF2pLq+tGz+W8RyCskuslw==",
             "dev": true
         },
         "@types/vscode": {
@@ -3962,69 +4135,70 @@
             "dev": true
         },
         "@typescript-eslint/eslint-plugin": {
-            "version": "5.30.5",
-            "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.30.5.tgz",
-            "integrity": "sha512-lftkqRoBvc28VFXEoRgyZuztyVUQ04JvUnATSPtIRFAccbXTWL6DEtXGYMcbg998kXw1NLUJm7rTQ9eUt+q6Ig==",
+            "version": "5.48.2",
+            "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.48.2.tgz",
+            "integrity": "sha512-sR0Gja9Ky1teIq4qJOl0nC+Tk64/uYdX+mi+5iB//MH8gwyx8e3SOyhEzeLZEFEEfCaLf8KJq+Bd/6je1t+CAg==",
             "dev": true,
             "requires": {
-                "@typescript-eslint/scope-manager": "5.30.5",
-                "@typescript-eslint/type-utils": "5.30.5",
-                "@typescript-eslint/utils": "5.30.5",
+                "@typescript-eslint/scope-manager": "5.48.2",
+                "@typescript-eslint/type-utils": "5.48.2",
+                "@typescript-eslint/utils": "5.48.2",
                 "debug": "^4.3.4",
-                "functional-red-black-tree": "^1.0.1",
                 "ignore": "^5.2.0",
+                "natural-compare-lite": "^1.4.0",
                 "regexpp": "^3.2.0",
                 "semver": "^7.3.7",
                 "tsutils": "^3.21.0"
             }
         },
         "@typescript-eslint/parser": {
-            "version": "5.30.5",
-            "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.30.5.tgz",
-            "integrity": "sha512-zj251pcPXI8GO9NDKWWmygP6+UjwWmrdf9qMW/L/uQJBM/0XbU2inxe5io/234y/RCvwpKEYjZ6c1YrXERkK4Q==",
+            "version": "5.48.2",
+            "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.48.2.tgz",
+            "integrity": "sha512-38zMsKsG2sIuM5Oi/olurGwYJXzmtdsHhn5mI/pQogP+BjYVkK5iRazCQ8RGS0V+YLk282uWElN70zAAUmaYHw==",
             "dev": true,
             "requires": {
-                "@typescript-eslint/scope-manager": "5.30.5",
-                "@typescript-eslint/types": "5.30.5",
-                "@typescript-eslint/typescript-estree": "5.30.5",
+                "@typescript-eslint/scope-manager": "5.48.2",
+                "@typescript-eslint/types": "5.48.2",
+                "@typescript-eslint/typescript-estree": "5.48.2",
                 "debug": "^4.3.4"
             }
         },
         "@typescript-eslint/scope-manager": {
-            "version": "5.30.5",
-            "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.30.5.tgz",
-            "integrity": "sha512-NJ6F+YHHFT/30isRe2UTmIGGAiXKckCyMnIV58cE3JkHmaD6e5zyEYm5hBDv0Wbin+IC0T1FWJpD3YqHUG/Ydg==",
+            "version": "5.48.2",
+            "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.48.2.tgz",
+            "integrity": "sha512-zEUFfonQid5KRDKoI3O+uP1GnrFd4tIHlvs+sTJXiWuypUWMuDaottkJuR612wQfOkjYbsaskSIURV9xo4f+Fw==",
             "dev": true,
             "requires": {
-                "@typescript-eslint/types": "5.30.5",
-                "@typescript-eslint/visitor-keys": "5.30.5"
+                "@typescript-eslint/types": "5.48.2",
+                "@typescript-eslint/visitor-keys": "5.48.2"
             }
         },
         "@typescript-eslint/type-utils": {
-            "version": "5.30.5",
-            "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.30.5.tgz",
-            "integrity": "sha512-k9+ejlv1GgwN1nN7XjVtyCgE0BTzhzT1YsQF0rv4Vfj2U9xnslBgMYYvcEYAFVdvhuEscELJsB7lDkN7WusErw==",
+            "version": "5.48.2",
+            "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.48.2.tgz",
+            "integrity": "sha512-QVWx7J5sPMRiOMJp5dYshPxABRoZV1xbRirqSk8yuIIsu0nvMTZesKErEA3Oix1k+uvsk8Cs8TGJ6kQ0ndAcew==",
             "dev": true,
             "requires": {
-                "@typescript-eslint/utils": "5.30.5",
+                "@typescript-eslint/typescript-estree": "5.48.2",
+                "@typescript-eslint/utils": "5.48.2",
                 "debug": "^4.3.4",
                 "tsutils": "^3.21.0"
             }
         },
         "@typescript-eslint/types": {
-            "version": "5.30.5",
-            "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.30.5.tgz",
-            "integrity": "sha512-kZ80w/M2AvsbRvOr3PjaNh6qEW1LFqs2pLdo2s5R38B2HYXG8Z0PP48/4+j1QHJFL3ssHIbJ4odPRS8PlHrFfw==",
+            "version": "5.48.2",
+            "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.48.2.tgz",
+            "integrity": "sha512-hE7dA77xxu7ByBc6KCzikgfRyBCTst6dZQpwaTy25iMYOnbNljDT4hjhrGEJJ0QoMjrfqrx+j1l1B9/LtKeuqA==",
             "dev": true
         },
         "@typescript-eslint/typescript-estree": {
-            "version": "5.30.5",
-            "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.30.5.tgz",
-            "integrity": "sha512-qGTc7QZC801kbYjAr4AgdOfnokpwStqyhSbiQvqGBLixniAKyH+ib2qXIVo4P9NgGzwyfD9I0nlJN7D91E1VpQ==",
+            "version": "5.48.2",
+            "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.48.2.tgz",
+            "integrity": "sha512-bibvD3z6ilnoVxUBFEgkO0k0aFvUc4Cttt0dAreEr+nrAHhWzkO83PEVVuieK3DqcgL6VAK5dkzK8XUVja5Zcg==",
             "dev": true,
             "requires": {
-                "@typescript-eslint/types": "5.30.5",
-                "@typescript-eslint/visitor-keys": "5.30.5",
+                "@typescript-eslint/types": "5.48.2",
+                "@typescript-eslint/visitor-keys": "5.48.2",
                 "debug": "^4.3.4",
                 "globby": "^11.1.0",
                 "is-glob": "^4.0.3",
@@ -4033,33 +4207,35 @@
             }
         },
         "@typescript-eslint/utils": {
-            "version": "5.30.5",
-            "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.30.5.tgz",
-            "integrity": "sha512-o4SSUH9IkuA7AYIfAvatldovurqTAHrfzPApOZvdUq01hHojZojCFXx06D/aFpKCgWbMPRdJBWAC3sWp3itwTA==",
+            "version": "5.48.2",
+            "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.48.2.tgz",
+            "integrity": "sha512-2h18c0d7jgkw6tdKTlNaM7wyopbLRBiit8oAxoP89YnuBOzCZ8g8aBCaCqq7h208qUTroL7Whgzam7UY3HVLow==",
             "dev": true,
             "requires": {
                 "@types/json-schema": "^7.0.9",
-                "@typescript-eslint/scope-manager": "5.30.5",
-                "@typescript-eslint/types": "5.30.5",
-                "@typescript-eslint/typescript-estree": "5.30.5",
+                "@types/semver": "^7.3.12",
+                "@typescript-eslint/scope-manager": "5.48.2",
+                "@typescript-eslint/types": "5.48.2",
+                "@typescript-eslint/typescript-estree": "5.48.2",
                 "eslint-scope": "^5.1.1",
-                "eslint-utils": "^3.0.0"
+                "eslint-utils": "^3.0.0",
+                "semver": "^7.3.7"
             }
         },
         "@typescript-eslint/visitor-keys": {
-            "version": "5.30.5",
-            "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.30.5.tgz",
-            "integrity": "sha512-D+xtGo9HUMELzWIUqcQc0p2PO4NyvTrgIOK/VnSH083+8sq0tiLozNRKuLarwHYGRuA6TVBQSuuLwJUDWd3aaA==",
+            "version": "5.48.2",
+            "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.48.2.tgz",
+            "integrity": "sha512-z9njZLSkwmjFWUelGEwEbdf4NwKvfHxvGC0OcGN1Hp/XNDIcJ7D5DpPNPv6x6/mFvc1tQHsaWmpD/a4gOvvCJQ==",
             "dev": true,
             "requires": {
-                "@typescript-eslint/types": "5.30.5",
+                "@typescript-eslint/types": "5.48.2",
                 "eslint-visitor-keys": "^3.3.0"
             }
         },
         "@vscode/test-electron": {
-            "version": "2.1.5",
-            "resolved": "https://registry.npmjs.org/@vscode/test-electron/-/test-electron-2.1.5.tgz",
-            "integrity": "sha512-O/ioqFpV+RvKbRykX2ItYPnbcZ4Hk5V0rY4uhQjQTLhGL9WZUvS7exzuYQCCI+ilSqJpctvxq2llTfGXf9UnnA==",
+            "version": "2.2.2",
+            "resolved": "https://registry.npmjs.org/@vscode/test-electron/-/test-electron-2.2.2.tgz",
+            "integrity": "sha512-s5d2VtMySvff0UgqkJ0BMCr1es+qREE194EAodGIefq518W53ifvv69e80l9e2MrYJEqUUKwukE/w3H9o15YEw==",
             "dev": true,
             "requires": {
                 "http-proxy-agent": "^4.0.1",
@@ -4069,9 +4245,9 @@
             }
         },
         "acorn": {
-            "version": "8.7.1",
-            "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.1.tgz",
-            "integrity": "sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A==",
+            "version": "8.8.1",
+            "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.1.tgz",
+            "integrity": "sha512-7zFpHzhnqYKrkYdUjF1HI1bzd0VygEGX8lFk4k5zVMqHEoES+P+7TKI+EvLO9WVMJ8eekdO0aDEK044xTXwPPA==",
             "dev": true
         },
         "acorn-jsx": {
@@ -4406,9 +4582,9 @@
             "dev": true
         },
         "d3": {
-            "version": "7.6.1",
-            "resolved": "https://registry.npmjs.org/d3/-/d3-7.6.1.tgz",
-            "integrity": "sha512-txMTdIHFbcpLx+8a0IFhZsbp+PfBBPt8yfbmukZTQFroKuFqIwqswF0qE5JXWefylaAVpSXFoKm3yP+jpNLFLw==",
+            "version": "7.8.2",
+            "resolved": "https://registry.npmjs.org/d3/-/d3-7.8.2.tgz",
+            "integrity": "sha512-WXty7qOGSHb7HR7CfOzwN1Gw04MUOzN8qh9ZUsvwycIMb4DYMpY9xczZ6jUorGtO6bR9BPMPaueIKwiDxu9uiQ==",
             "requires": {
                 "d3-array": "3",
                 "d3-axis": "3",
@@ -4443,9 +4619,9 @@
             }
         },
         "d3-array": {
-            "version": "3.2.0",
-            "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.0.tgz",
-            "integrity": "sha512-3yXFQo0oG3QCxbF06rMPFyGRMGJNS7NvsV1+2joOjbBE+9xvWQ8+GcMJAjRCzw06zQ3/arXeJgbPYcjUCuC+3g==",
+            "version": "3.2.2",
+            "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.2.tgz",
+            "integrity": "sha512-yEEyEAbDrF8C6Ob2myOBLjwBLck1Z89jMGFee0oPsn95GqjerpaOA4ch+vc2l0FNFFwMD5N7OCSEN5eAlsUbgQ==",
             "requires": {
                 "internmap": "1 - 2"
             }
@@ -4481,9 +4657,9 @@
             "integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA=="
         },
         "d3-contour": {
-            "version": "4.0.0",
-            "resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-4.0.0.tgz",
-            "integrity": "sha512-7aQo0QHUTu/Ko3cP9YK9yUTxtoDEiDGwnBHyLxG5M4vqlBkO/uixMRele3nfsfj6UXOcuReVpVXzAboGraYIJw==",
+            "version": "4.0.2",
+            "resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-4.0.2.tgz",
+            "integrity": "sha512-4EzFTRIikzs47RGmdxbeUvLWtGedDUNkTcmzoeyg4sP/dvCexO47AaQL7VKy/gul85TOxw+IBgA8US2xwbToNA==",
             "requires": {
                 "d3-array": "^3.2.0"
             }
@@ -4549,9 +4725,9 @@
             "integrity": "sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA=="
         },
         "d3-geo": {
-            "version": "3.0.1",
-            "resolved": "https://registry.npmjs.org/d3-geo/-/d3-geo-3.0.1.tgz",
-            "integrity": "sha512-Wt23xBych5tSy9IYAM1FR2rWIBFWa52B/oF/GYe5zbdHrg08FU8+BuI6X4PvTwPDdqdAdq04fuWJpELtsaEjeA==",
+            "version": "3.1.0",
+            "resolved": "https://registry.npmjs.org/d3-geo/-/d3-geo-3.1.0.tgz",
+            "integrity": "sha512-JEo5HxXDdDYXCaWdwLRt79y7giK8SbhZJbFWXqbRTolCHFI5jRqteLzCsq51NKbUoX0PjBVSohxrx+NoOUujYA==",
             "requires": {
                 "d3-array": "2.5.0 - 3"
             }
@@ -4631,17 +4807,17 @@
             "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ=="
         },
         "d3-shape": {
-            "version": "3.1.0",
-            "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.1.0.tgz",
-            "integrity": "sha512-tGDh1Muf8kWjEDT/LswZJ8WF85yDZLvVJpYU9Nq+8+yW1Z5enxrmXOhTArlkaElU+CTn0OTVNli+/i+HP45QEQ==",
+            "version": "3.2.0",
+            "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz",
+            "integrity": "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==",
             "requires": {
-                "d3-path": "1 - 3"
+                "d3-path": "^3.1.0"
             }
         },
         "d3-time": {
-            "version": "3.0.0",
-            "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.0.0.tgz",
-            "integrity": "sha512-zmV3lRnlaLI08y9IMRXSDshQb5Nj77smnfpnd2LrBa/2K281Jijactokeak14QacHs/kKq0AQ121nidNYlarbQ==",
+            "version": "3.1.0",
+            "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz",
+            "integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==",
             "requires": {
                 "d3-array": "2 - 3"
             }
@@ -4806,176 +4982,177 @@
             }
         },
         "entities": {
-            "version": "4.3.1",
-            "resolved": "https://registry.npmjs.org/entities/-/entities-4.3.1.tgz",
-            "integrity": "sha512-o4q/dYJlmyjP2zfnaWDUC6A3BQFmVTX+tZPezK7k0GLSU9QYCauscf5Y+qcEPzKL+EixVouYDgLQK5H9GrLpkg==",
+            "version": "4.4.0",
+            "resolved": "https://registry.npmjs.org/entities/-/entities-4.4.0.tgz",
+            "integrity": "sha512-oYp7156SP8LkeGD0GF85ad1X9Ai79WtRsZ2gxJqtBuzH+98YUV6jkHEKlZkMbcrjJjIVJNIDP/3WL9wQkoPbWA==",
             "dev": true
         },
         "esbuild": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.14.48.tgz",
-            "integrity": "sha512-w6N1Yn5MtqK2U1/WZTX9ZqUVb8IOLZkZ5AdHkT6x3cHDMVsYWC7WPdiLmx19w3i4Rwzy5LqsEMtVihG3e4rFzA==",
-            "dev": true,
-            "requires": {
-                "esbuild-android-64": "0.14.48",
-                "esbuild-android-arm64": "0.14.48",
-                "esbuild-darwin-64": "0.14.48",
-                "esbuild-darwin-arm64": "0.14.48",
-                "esbuild-freebsd-64": "0.14.48",
-                "esbuild-freebsd-arm64": "0.14.48",
-                "esbuild-linux-32": "0.14.48",
-                "esbuild-linux-64": "0.14.48",
-                "esbuild-linux-arm": "0.14.48",
-                "esbuild-linux-arm64": "0.14.48",
-                "esbuild-linux-mips64le": "0.14.48",
-                "esbuild-linux-ppc64le": "0.14.48",
-                "esbuild-linux-riscv64": "0.14.48",
-                "esbuild-linux-s390x": "0.14.48",
-                "esbuild-netbsd-64": "0.14.48",
-                "esbuild-openbsd-64": "0.14.48",
-                "esbuild-sunos-64": "0.14.48",
-                "esbuild-windows-32": "0.14.48",
-                "esbuild-windows-64": "0.14.48",
-                "esbuild-windows-arm64": "0.14.48"
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.14.54.tgz",
+            "integrity": "sha512-Cy9llcy8DvET5uznocPyqL3BFRrFXSVqbgpMJ9Wz8oVjZlh/zUSNbPRbov0VX7VxN2JH1Oa0uNxZ7eLRb62pJA==",
+            "dev": true,
+            "requires": {
+                "@esbuild/linux-loong64": "0.14.54",
+                "esbuild-android-64": "0.14.54",
+                "esbuild-android-arm64": "0.14.54",
+                "esbuild-darwin-64": "0.14.54",
+                "esbuild-darwin-arm64": "0.14.54",
+                "esbuild-freebsd-64": "0.14.54",
+                "esbuild-freebsd-arm64": "0.14.54",
+                "esbuild-linux-32": "0.14.54",
+                "esbuild-linux-64": "0.14.54",
+                "esbuild-linux-arm": "0.14.54",
+                "esbuild-linux-arm64": "0.14.54",
+                "esbuild-linux-mips64le": "0.14.54",
+                "esbuild-linux-ppc64le": "0.14.54",
+                "esbuild-linux-riscv64": "0.14.54",
+                "esbuild-linux-s390x": "0.14.54",
+                "esbuild-netbsd-64": "0.14.54",
+                "esbuild-openbsd-64": "0.14.54",
+                "esbuild-sunos-64": "0.14.54",
+                "esbuild-windows-32": "0.14.54",
+                "esbuild-windows-64": "0.14.54",
+                "esbuild-windows-arm64": "0.14.54"
             }
         },
         "esbuild-android-64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-android-64/-/esbuild-android-64-0.14.48.tgz",
-            "integrity": "sha512-3aMjboap/kqwCUpGWIjsk20TtxVoKck8/4Tu19rubh7t5Ra0Yrpg30Mt1QXXlipOazrEceGeWurXKeFJgkPOUg==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-android-64/-/esbuild-android-64-0.14.54.tgz",
+            "integrity": "sha512-Tz2++Aqqz0rJ7kYBfz+iqyE3QMycD4vk7LBRyWaAVFgFtQ/O8EJOnVmTOiDWYZ/uYzB4kvP+bqejYdVKzE5lAQ==",
             "dev": true,
             "optional": true
         },
         "esbuild-android-arm64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-android-arm64/-/esbuild-android-arm64-0.14.48.tgz",
-            "integrity": "sha512-vptI3K0wGALiDq+EvRuZotZrJqkYkN5282iAfcffjI5lmGG9G1ta/CIVauhY42MBXwEgDJkweiDcDMRLzBZC4g==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-android-arm64/-/esbuild-android-arm64-0.14.54.tgz",
+            "integrity": "sha512-F9E+/QDi9sSkLaClO8SOV6etqPd+5DgJje1F9lOWoNncDdOBL2YF59IhsWATSt0TLZbYCf3pNlTHvVV5VfHdvg==",
             "dev": true,
             "optional": true
         },
         "esbuild-darwin-64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-darwin-64/-/esbuild-darwin-64-0.14.48.tgz",
-            "integrity": "sha512-gGQZa4+hab2Va/Zww94YbshLuWteyKGD3+EsVon8EWTWhnHFRm5N9NbALNbwi/7hQ/hM1Zm4FuHg+k6BLsl5UA==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-darwin-64/-/esbuild-darwin-64-0.14.54.tgz",
+            "integrity": "sha512-jtdKWV3nBviOd5v4hOpkVmpxsBy90CGzebpbO9beiqUYVMBtSc0AL9zGftFuBon7PNDcdvNCEuQqw2x0wP9yug==",
             "dev": true,
             "optional": true
         },
         "esbuild-darwin-arm64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-darwin-arm64/-/esbuild-darwin-arm64-0.14.48.tgz",
-            "integrity": "sha512-bFjnNEXjhZT+IZ8RvRGNJthLWNHV5JkCtuOFOnjvo5pC0sk2/QVk0Qc06g2PV3J0TcU6kaPC3RN9yy9w2PSLEA==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-darwin-arm64/-/esbuild-darwin-arm64-0.14.54.tgz",
+            "integrity": "sha512-OPafJHD2oUPyvJMrsCvDGkRrVCar5aVyHfWGQzY1dWnzErjrDuSETxwA2HSsyg2jORLY8yBfzc1MIpUkXlctmw==",
             "dev": true,
             "optional": true
         },
         "esbuild-freebsd-64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-freebsd-64/-/esbuild-freebsd-64-0.14.48.tgz",
-            "integrity": "sha512-1NOlwRxmOsnPcWOGTB10JKAkYSb2nue0oM1AfHWunW/mv3wERfJmnYlGzL3UAOIUXZqW8GeA2mv+QGwq7DToqA==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-freebsd-64/-/esbuild-freebsd-64-0.14.54.tgz",
+            "integrity": "sha512-OKwd4gmwHqOTp4mOGZKe/XUlbDJ4Q9TjX0hMPIDBUWWu/kwhBAudJdBoxnjNf9ocIB6GN6CPowYpR/hRCbSYAg==",
             "dev": true,
             "optional": true
         },
         "esbuild-freebsd-arm64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-freebsd-arm64/-/esbuild-freebsd-arm64-0.14.48.tgz",
-            "integrity": "sha512-gXqKdO8wabVcYtluAbikDH2jhXp+Klq5oCD5qbVyUG6tFiGhrC9oczKq3vIrrtwcxDQqK6+HDYK8Zrd4bCA9Gw==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-freebsd-arm64/-/esbuild-freebsd-arm64-0.14.54.tgz",
+            "integrity": "sha512-sFwueGr7OvIFiQT6WeG0jRLjkjdqWWSrfbVwZp8iMP+8UHEHRBvlaxL6IuKNDwAozNUmbb8nIMXa7oAOARGs1Q==",
             "dev": true,
             "optional": true
         },
         "esbuild-linux-32": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-linux-32/-/esbuild-linux-32-0.14.48.tgz",
-            "integrity": "sha512-ghGyDfS289z/LReZQUuuKq9KlTiTspxL8SITBFQFAFRA/IkIvDpnZnCAKTCjGXAmUqroMQfKJXMxyjJA69c/nQ==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-linux-32/-/esbuild-linux-32-0.14.54.tgz",
+            "integrity": "sha512-1ZuY+JDI//WmklKlBgJnglpUL1owm2OX+8E1syCD6UAxcMM/XoWd76OHSjl/0MR0LisSAXDqgjT3uJqT67O3qw==",
             "dev": true,
             "optional": true
         },
         "esbuild-linux-64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-linux-64/-/esbuild-linux-64-0.14.48.tgz",
-            "integrity": "sha512-vni3p/gppLMVZLghI7oMqbOZdGmLbbKR23XFARKnszCIBpEMEDxOMNIKPmMItQrmH/iJrL1z8Jt2nynY0bE1ug==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-linux-64/-/esbuild-linux-64-0.14.54.tgz",
+            "integrity": "sha512-EgjAgH5HwTbtNsTqQOXWApBaPVdDn7XcK+/PtJwZLT1UmpLoznPd8c5CxqsH2dQK3j05YsB3L17T8vE7cp4cCg==",
             "dev": true,
             "optional": true
         },
         "esbuild-linux-arm": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-linux-arm/-/esbuild-linux-arm-0.14.48.tgz",
-            "integrity": "sha512-+VfSV7Akh1XUiDNXgqgY1cUP1i2vjI+BmlyXRfVz5AfV3jbpde8JTs5Q9sYgaoq5cWfuKfoZB/QkGOI+QcL1Tw==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-linux-arm/-/esbuild-linux-arm-0.14.54.tgz",
+            "integrity": "sha512-qqz/SjemQhVMTnvcLGoLOdFpCYbz4v4fUo+TfsWG+1aOu70/80RV6bgNpR2JCrppV2moUQkww+6bWxXRL9YMGw==",
             "dev": true,
             "optional": true
         },
         "esbuild-linux-arm64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-linux-arm64/-/esbuild-linux-arm64-0.14.48.tgz",
-            "integrity": "sha512-3CFsOlpoxlKPRevEHq8aAntgYGYkE1N9yRYAcPyng/p4Wyx0tPR5SBYsxLKcgPB9mR8chHEhtWYz6EZ+H199Zw==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-linux-arm64/-/esbuild-linux-arm64-0.14.54.tgz",
+            "integrity": "sha512-WL71L+0Rwv+Gv/HTmxTEmpv0UgmxYa5ftZILVi2QmZBgX3q7+tDeOQNqGtdXSdsL8TQi1vIaVFHUPDe0O0kdig==",
             "dev": true,
             "optional": true
         },
         "esbuild-linux-mips64le": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-linux-mips64le/-/esbuild-linux-mips64le-0.14.48.tgz",
-            "integrity": "sha512-cs0uOiRlPp6ymknDnjajCgvDMSsLw5mST2UXh+ZIrXTj2Ifyf2aAP3Iw4DiqgnyYLV2O/v/yWBJx+WfmKEpNLA==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-linux-mips64le/-/esbuild-linux-mips64le-0.14.54.tgz",
+            "integrity": "sha512-qTHGQB8D1etd0u1+sB6p0ikLKRVuCWhYQhAHRPkO+OF3I/iSlTKNNS0Lh2Oc0g0UFGguaFZZiPJdJey3AGpAlw==",
             "dev": true,
             "optional": true
         },
         "esbuild-linux-ppc64le": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-linux-ppc64le/-/esbuild-linux-ppc64le-0.14.48.tgz",
-            "integrity": "sha512-+2F0vJMkuI0Wie/wcSPDCqXvSFEELH7Jubxb7mpWrA/4NpT+/byjxDz0gG6R1WJoeDefcrMfpBx4GFNN1JQorQ==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-linux-ppc64le/-/esbuild-linux-ppc64le-0.14.54.tgz",
+            "integrity": "sha512-j3OMlzHiqwZBDPRCDFKcx595XVfOfOnv68Ax3U4UKZ3MTYQB5Yz3X1mn5GnodEVYzhtZgxEBidLWeIs8FDSfrQ==",
             "dev": true,
             "optional": true
         },
         "esbuild-linux-riscv64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-linux-riscv64/-/esbuild-linux-riscv64-0.14.48.tgz",
-            "integrity": "sha512-BmaK/GfEE+5F2/QDrIXteFGKnVHGxlnK9MjdVKMTfvtmudjY3k2t8NtlY4qemKSizc+QwyombGWTBDc76rxePA==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-linux-riscv64/-/esbuild-linux-riscv64-0.14.54.tgz",
+            "integrity": "sha512-y7Vt7Wl9dkOGZjxQZnDAqqn+XOqFD7IMWiewY5SPlNlzMX39ocPQlOaoxvT4FllA5viyV26/QzHtvTjVNOxHZg==",
             "dev": true,
             "optional": true
         },
         "esbuild-linux-s390x": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-linux-s390x/-/esbuild-linux-s390x-0.14.48.tgz",
-            "integrity": "sha512-tndw/0B9jiCL+KWKo0TSMaUm5UWBLsfCKVdbfMlb3d5LeV9WbijZ8Ordia8SAYv38VSJWOEt6eDCdOx8LqkC4g==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-linux-s390x/-/esbuild-linux-s390x-0.14.54.tgz",
+            "integrity": "sha512-zaHpW9dziAsi7lRcyV4r8dhfG1qBidQWUXweUjnw+lliChJqQr+6XD71K41oEIC3Mx1KStovEmlzm+MkGZHnHA==",
             "dev": true,
             "optional": true
         },
         "esbuild-netbsd-64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-netbsd-64/-/esbuild-netbsd-64-0.14.48.tgz",
-            "integrity": "sha512-V9hgXfwf/T901Lr1wkOfoevtyNkrxmMcRHyticybBUHookznipMOHoF41Al68QBsqBxnITCEpjjd4yAos7z9Tw==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-netbsd-64/-/esbuild-netbsd-64-0.14.54.tgz",
+            "integrity": "sha512-PR01lmIMnfJTgeU9VJTDY9ZerDWVFIUzAtJuDHwwceppW7cQWjBBqP48NdeRtoP04/AtO9a7w3viI+PIDr6d+w==",
             "dev": true,
             "optional": true
         },
         "esbuild-openbsd-64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-openbsd-64/-/esbuild-openbsd-64-0.14.48.tgz",
-            "integrity": "sha512-+IHf4JcbnnBl4T52egorXMatil/za0awqzg2Vy6FBgPcBpisDWT2sVz/tNdrK9kAqj+GZG/jZdrOkj7wsrNTKA==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-openbsd-64/-/esbuild-openbsd-64-0.14.54.tgz",
+            "integrity": "sha512-Qyk7ikT2o7Wu76UsvvDS5q0amJvmRzDyVlL0qf5VLsLchjCa1+IAvd8kTBgUxD7VBUUVgItLkk609ZHUc1oCaw==",
             "dev": true,
             "optional": true
         },
         "esbuild-sunos-64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-sunos-64/-/esbuild-sunos-64-0.14.48.tgz",
-            "integrity": "sha512-77m8bsr5wOpOWbGi9KSqDphcq6dFeJyun8TA+12JW/GAjyfTwVtOnN8DOt6DSPUfEV+ltVMNqtXUeTeMAxl5KA==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-sunos-64/-/esbuild-sunos-64-0.14.54.tgz",
+            "integrity": "sha512-28GZ24KmMSeKi5ueWzMcco6EBHStL3B6ubM7M51RmPwXQGLe0teBGJocmWhgwccA1GeFXqxzILIxXpHbl9Q/Kw==",
             "dev": true,
             "optional": true
         },
         "esbuild-windows-32": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-windows-32/-/esbuild-windows-32-0.14.48.tgz",
-            "integrity": "sha512-EPgRuTPP8vK9maxpTGDe5lSoIBHGKO/AuxDncg5O3NkrPeLNdvvK8oywB0zGaAZXxYWfNNSHskvvDgmfVTguhg==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-windows-32/-/esbuild-windows-32-0.14.54.tgz",
+            "integrity": "sha512-T+rdZW19ql9MjS7pixmZYVObd9G7kcaZo+sETqNH4RCkuuYSuv9AGHUVnPoP9hhuE1WM1ZimHz1CIBHBboLU7w==",
             "dev": true,
             "optional": true
         },
         "esbuild-windows-64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-windows-64/-/esbuild-windows-64-0.14.48.tgz",
-            "integrity": "sha512-YmpXjdT1q0b8ictSdGwH3M8VCoqPpK1/UArze3X199w6u8hUx3V8BhAi1WjbsfDYRBanVVtduAhh2sirImtAvA==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-windows-64/-/esbuild-windows-64-0.14.54.tgz",
+            "integrity": "sha512-AoHTRBUuYwXtZhjXZbA1pGfTo8cJo3vZIcWGLiUcTNgHpJJMC1rVA44ZereBHMJtotyN71S8Qw0npiCIkW96cQ==",
             "dev": true,
             "optional": true
         },
         "esbuild-windows-arm64": {
-            "version": "0.14.48",
-            "resolved": "https://registry.npmjs.org/esbuild-windows-arm64/-/esbuild-windows-arm64-0.14.48.tgz",
-            "integrity": "sha512-HHaOMCsCXp0rz5BT2crTka6MPWVno121NKApsGs/OIW5QC0ggC69YMGs1aJct9/9FSUF4A1xNE/cLvgB5svR4g==",
+            "version": "0.14.54",
+            "resolved": "https://registry.npmjs.org/esbuild-windows-arm64/-/esbuild-windows-arm64-0.14.54.tgz",
+            "integrity": "sha512-M0kuUvXhot1zOISQGXwWn6YtS+Y/1RT9WrVIOywZnJHo3jCDyewAc79aKNQWFCQm+xNHVTq9h8dZKvygoXQQRg==",
             "dev": true,
             "optional": true
         },
@@ -4991,13 +5168,15 @@
             "dev": true
         },
         "eslint": {
-            "version": "8.19.0",
-            "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.19.0.tgz",
-            "integrity": "sha512-SXOPj3x9VKvPe81TjjUJCYlV4oJjQw68Uek+AM0X4p+33dj2HY5bpTZOgnQHcG2eAm1mtCU9uNMnJi7exU/kYw==",
+            "version": "8.32.0",
+            "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.32.0.tgz",
+            "integrity": "sha512-nETVXpnthqKPFyuY2FNjz/bEd6nbosRgKbkgS/y1C7LJop96gYHWpiguLecMHQ2XCPxn77DS0P+68WzG6vkZSQ==",
             "dev": true,
             "requires": {
-                "@eslint/eslintrc": "^1.3.0",
-                "@humanwhocodes/config-array": "^0.9.2",
+                "@eslint/eslintrc": "^1.4.1",
+                "@humanwhocodes/config-array": "^0.11.8",
+                "@humanwhocodes/module-importer": "^1.0.1",
+                "@nodelib/fs.walk": "^1.2.8",
                 "ajv": "^6.10.0",
                 "chalk": "^4.0.0",
                 "cross-spawn": "^7.0.2",
@@ -5007,18 +5186,21 @@
                 "eslint-scope": "^7.1.1",
                 "eslint-utils": "^3.0.0",
                 "eslint-visitor-keys": "^3.3.0",
-                "espree": "^9.3.2",
+                "espree": "^9.4.0",
                 "esquery": "^1.4.0",
                 "esutils": "^2.0.2",
                 "fast-deep-equal": "^3.1.3",
                 "file-entry-cache": "^6.0.1",
-                "functional-red-black-tree": "^1.0.1",
-                "glob-parent": "^6.0.1",
-                "globals": "^13.15.0",
+                "find-up": "^5.0.0",
+                "glob-parent": "^6.0.2",
+                "globals": "^13.19.0",
+                "grapheme-splitter": "^1.0.4",
                 "ignore": "^5.2.0",
                 "import-fresh": "^3.0.0",
                 "imurmurhash": "^0.1.4",
                 "is-glob": "^4.0.0",
+                "is-path-inside": "^3.0.3",
+                "js-sdsl": "^4.1.4",
                 "js-yaml": "^4.1.0",
                 "json-stable-stringify-without-jsonify": "^1.0.1",
                 "levn": "^0.4.1",
@@ -5029,8 +5211,7 @@
                 "regexpp": "^3.2.0",
                 "strip-ansi": "^6.0.1",
                 "strip-json-comments": "^3.1.0",
-                "text-table": "^0.2.0",
-                "v8-compile-cache": "^2.0.3"
+                "text-table": "^0.2.0"
             },
             "dependencies": {
                 "eslint-scope": {
@@ -5052,9 +5233,9 @@
             }
         },
         "eslint-config-prettier": {
-            "version": "8.5.0",
-            "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.5.0.tgz",
-            "integrity": "sha512-obmWKLUNCnhtQRKc+tmnYuQl0pFU1ibYJQ5BGhTVB08bHe9wC8qUeG7c08dj9XX+AuPj1YSGSQIHl1pnDHZR0Q==",
+            "version": "8.6.0",
+            "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.6.0.tgz",
+            "integrity": "sha512-bAF0eLpLVqP5oEVUFKpMA+NnRFICwn9X8B5jrR9FcqnYBuPbqWEjTEspPWMj5ye6czoSLDweCzSo3Ko7gGrZaA==",
             "dev": true,
             "requires": {}
         },
@@ -5092,12 +5273,12 @@
             "dev": true
         },
         "espree": {
-            "version": "9.3.2",
-            "resolved": "https://registry.npmjs.org/espree/-/espree-9.3.2.tgz",
-            "integrity": "sha512-D211tC7ZwouTIuY5x9XnS0E9sWNChB7IYKX/Xp5eQj3nFXhqmiUDB9q27y76oFl8jTg3pXcQx/bpxMfs3CIZbA==",
+            "version": "9.4.1",
+            "resolved": "https://registry.npmjs.org/espree/-/espree-9.4.1.tgz",
+            "integrity": "sha512-XwctdmTO6SIvCzd9810yyNzIrOrqNYV9Koizx4C/mRhf9uq0o4yHoCEU/670pOxOL/MSraektvSAji79kX90Vg==",
             "dev": true,
             "requires": {
-                "acorn": "^8.7.1",
+                "acorn": "^8.8.0",
                 "acorn-jsx": "^5.3.2",
                 "eslint-visitor-keys": "^3.3.0"
             }
@@ -5161,9 +5342,9 @@
             "dev": true
         },
         "fast-glob": {
-            "version": "3.2.11",
-            "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.11.tgz",
-            "integrity": "sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew==",
+            "version": "3.2.12",
+            "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz",
+            "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==",
             "dev": true,
             "requires": {
                 "@nodelib/fs.stat": "^2.0.2",
@@ -5197,9 +5378,9 @@
             "dev": true
         },
         "fastq": {
-            "version": "1.13.0",
-            "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz",
-            "integrity": "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==",
+            "version": "1.15.0",
+            "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.15.0.tgz",
+            "integrity": "sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==",
             "dev": true,
             "requires": {
                 "reusify": "^1.0.4"
@@ -5232,6 +5413,16 @@
                 "to-regex-range": "^5.0.1"
             }
         },
+        "find-up": {
+            "version": "5.0.0",
+            "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz",
+            "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==",
+            "dev": true,
+            "requires": {
+                "locate-path": "^6.0.0",
+                "path-exists": "^4.0.0"
+            }
+        },
         "flat-cache": {
             "version": "3.0.4",
             "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz",
@@ -5243,15 +5434,15 @@
             }
         },
         "flatted": {
-            "version": "3.2.6",
-            "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.6.tgz",
-            "integrity": "sha512-0sQoMh9s0BYsm+12Huy/rkKxVu4R1+r96YX5cG44rHV0pQ6iC3Q+mkoMFaGWObMFYQxCVT+ssG1ksneA2MI9KQ==",
+            "version": "3.2.7",
+            "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz",
+            "integrity": "sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==",
             "dev": true
         },
         "follow-redirects": {
-            "version": "1.15.1",
-            "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.1.tgz",
-            "integrity": "sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA==",
+            "version": "1.15.2",
+            "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz",
+            "integrity": "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==",
             "dev": true
         },
         "fs-constants": {
@@ -5295,21 +5486,15 @@
             "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==",
             "dev": true
         },
-        "functional-red-black-tree": {
-            "version": "1.0.1",
-            "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz",
-            "integrity": "sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==",
-            "dev": true
-        },
         "get-caller-file": {
             "version": "2.0.5",
             "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
             "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg=="
         },
         "get-intrinsic": {
-            "version": "1.1.2",
-            "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.2.tgz",
-            "integrity": "sha512-Jfm3OyCxHh9DJyc28qGk+JmfkpO41A4XkneDSujN9MDXrm4oDKdHvndhZ2dN94+ERNfkYJWDclW6k2L/ZGHjXA==",
+            "version": "1.2.0",
+            "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.0.tgz",
+            "integrity": "sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q==",
             "dev": true,
             "requires": {
                 "function-bind": "^1.1.1",
@@ -5347,9 +5532,9 @@
             }
         },
         "globals": {
-            "version": "13.16.0",
-            "resolved": "https://registry.npmjs.org/globals/-/globals-13.16.0.tgz",
-            "integrity": "sha512-A1lrQfpNF+McdPOnnFqY3kSN0AFTy485bTi1bkLk4mVPODIUEcSfhHgRqA+QdXPksrSTTztYXx37NFV+GpGk3Q==",
+            "version": "13.19.0",
+            "resolved": "https://registry.npmjs.org/globals/-/globals-13.19.0.tgz",
+            "integrity": "sha512-dkQ957uSRWHw7CFXLUtUHQI3g3aWApYhfNR2O6jn/907riyTYKVBmxYVROkBcY614FSSeSJh7Xm7SrUWCxvJMQ==",
             "dev": true,
             "requires": {
                 "type-fest": "^0.20.2"
@@ -5375,6 +5560,12 @@
             "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==",
             "dev": true
         },
+        "grapheme-splitter": {
+            "version": "1.0.4",
+            "resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz",
+            "integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==",
+            "dev": true
+        },
         "has": {
             "version": "1.0.3",
             "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz",
@@ -5453,9 +5644,9 @@
             "dev": true
         },
         "ignore": {
-            "version": "5.2.0",
-            "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz",
-            "integrity": "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==",
+            "version": "5.2.4",
+            "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.4.tgz",
+            "integrity": "sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==",
             "dev": true
         },
         "import-fresh": {
@@ -5536,6 +5727,12 @@
             "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
             "dev": true
         },
+        "is-path-inside": {
+            "version": "3.0.3",
+            "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz",
+            "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==",
+            "dev": true
+        },
         "isarray": {
             "version": "1.0.0",
             "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
@@ -5548,6 +5745,12 @@
             "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
             "dev": true
         },
+        "js-sdsl": {
+            "version": "4.3.0",
+            "resolved": "https://registry.npmjs.org/js-sdsl/-/js-sdsl-4.3.0.tgz",
+            "integrity": "sha512-mifzlm2+5nZ+lEcLJMoBK0/IH/bDg8XnJfd/Wq6IP+xoCjLZsTOnV2QpxlVbX9bMnkl5PdEjNtBJ9Cj1NjifhQ==",
+            "dev": true
+        },
         "js-yaml": {
             "version": "4.1.0",
             "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
@@ -5610,6 +5813,15 @@
             "integrity": "sha512-3mk/Zag0+IJxeDrxSgaDPy4zZ3w05PRZeJNnlWhzFz5OkX49J4krc+A8X2d2M69vGMBEX0uyl8M+W+8gH+kBqQ==",
             "dev": true
         },
+        "locate-path": {
+            "version": "6.0.0",
+            "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz",
+            "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==",
+            "dev": true,
+            "requires": {
+                "p-locate": "^5.0.0"
+            }
+        },
         "lodash.merge": {
             "version": "4.6.2",
             "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
@@ -5688,9 +5900,9 @@
             }
         },
         "minimist": {
-            "version": "1.2.6",
-            "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz",
-            "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==",
+            "version": "1.2.7",
+            "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.7.tgz",
+            "integrity": "sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g==",
             "dev": true
         },
         "mkdirp": {
@@ -5732,10 +5944,16 @@
             "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==",
             "dev": true
         },
+        "natural-compare-lite": {
+            "version": "1.4.0",
+            "resolved": "https://registry.npmjs.org/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz",
+            "integrity": "sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g==",
+            "dev": true
+        },
         "node-abi": {
-            "version": "3.22.0",
-            "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.22.0.tgz",
-            "integrity": "sha512-u4uAs/4Zzmp/jjsD9cyFYDXeISfUWaAVWshPmDZOFOv4Xl4SbzTXm53I04C2uRueYJ+0t5PEtLH/owbn2Npf/w==",
+            "version": "3.31.0",
+            "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.31.0.tgz",
+            "integrity": "sha512-eSKV6s+APenqVh8ubJyiu/YhZgxQpGP66ntzUb3lY1xB9ukSRaGnx0AIxI+IM+1+IVYC1oWobgG5L3Lt9ARykQ==",
             "dev": true,
             "requires": {
                 "semver": "^7.3.5"
@@ -5757,9 +5975,9 @@
             }
         },
         "object-inspect": {
-            "version": "1.12.2",
-            "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz",
-            "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==",
+            "version": "1.12.3",
+            "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz",
+            "integrity": "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==",
             "dev": true
         },
         "once": {
@@ -5807,6 +6025,24 @@
                 }
             }
         },
+        "p-limit": {
+            "version": "3.1.0",
+            "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
+            "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
+            "dev": true,
+            "requires": {
+                "yocto-queue": "^0.1.0"
+            }
+        },
+        "p-locate": {
+            "version": "5.0.0",
+            "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz",
+            "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==",
+            "dev": true,
+            "requires": {
+                "p-limit": "^3.0.2"
+            }
+        },
         "parent-module": {
             "version": "1.0.1",
             "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
@@ -5834,12 +6070,12 @@
             }
         },
         "parse5": {
-            "version": "7.0.0",
-            "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.0.0.tgz",
-            "integrity": "sha512-y/t8IXSPWTuRZqXc0ajH/UwDj4mnqLEbSttNbThcFhGrZuOyoyvNBO85PBp2jQa55wY9d07PBNjsK8ZP3K5U6g==",
+            "version": "7.1.2",
+            "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.1.2.tgz",
+            "integrity": "sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==",
             "dev": true,
             "requires": {
-                "entities": "^4.3.0"
+                "entities": "^4.4.0"
             }
         },
         "parse5-htmlparser2-tree-adapter": {
@@ -5852,6 +6088,12 @@
                 "parse5": "^7.0.0"
             }
         },
+        "path-exists": {
+            "version": "4.0.0",
+            "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
+            "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
+            "dev": true
+        },
         "path-is-absolute": {
             "version": "1.0.1",
             "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
@@ -5909,9 +6151,9 @@
             "dev": true
         },
         "prettier": {
-            "version": "2.7.1",
-            "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.7.1.tgz",
-            "integrity": "sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g==",
+            "version": "2.8.3",
+            "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.3.tgz",
+            "integrity": "sha512-tJ/oJ4amDihPoufT5sM0Z1SKEuKay8LfVAMlbbhnnkvt6BUserZylqo2PN+p9KeljLr0OHa2rXHU1T8reeoTrw==",
             "dev": true
         },
         "process-nextick-args": {
@@ -5931,9 +6173,9 @@
             }
         },
         "punycode": {
-            "version": "2.1.1",
-            "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
-            "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==",
+            "version": "2.3.0",
+            "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz",
+            "integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==",
             "dev": true
         },
         "qs": {
@@ -6064,9 +6306,9 @@
             "dev": true
         },
         "semver": {
-            "version": "7.3.7",
-            "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz",
-            "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==",
+            "version": "7.3.8",
+            "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz",
+            "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==",
             "requires": {
                 "lru-cache": "^6.0.0"
             }
@@ -6237,9 +6479,9 @@
             "dev": true
         },
         "tslib": {
-            "version": "2.4.0",
-            "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz",
-            "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==",
+            "version": "2.4.1",
+            "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.1.tgz",
+            "integrity": "sha512-tGyy4dAjRIEwI7BzsB0lynWgOpfqjUdq91XXAlIWD2OwKBH7oCl/GZG/HT4BOHrTlPMOASlMQ7veyTqpmRcrNA==",
             "dev": true
         },
         "tsutils": {
@@ -6301,9 +6543,9 @@
             }
         },
         "typescript": {
-            "version": "4.7.4",
-            "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.7.4.tgz",
-            "integrity": "sha512-C0WQT0gezHuw6AdY1M2jxUO83Rjf0HP7Sk1DtXj6j1EwkQNZrHAg2XPWlq62oqEhYvONq5pkC2Y9oPljWToLmQ==",
+            "version": "4.9.4",
+            "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.4.tgz",
+            "integrity": "sha512-Uz+dTXYzxXXbsFpM86Wh3dKCxrQqUcVMxwU54orwlJjOpO3ao8L7j5lH+dWfTwgCwIuM9GQ2kvVotzYJMXTBZg==",
             "dev": true
         },
         "uc.micro": {
@@ -6313,9 +6555,9 @@
             "dev": true
         },
         "underscore": {
-            "version": "1.13.4",
-            "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.13.4.tgz",
-            "integrity": "sha512-BQFnUDuAQ4Yf/cYY5LNrK9NCJFKriaRbD9uR1fTeXnBeoa97W0i41qkZfGO9pSo8I5KzjAcSY2XYtdf0oKd7KQ==",
+            "version": "1.13.6",
+            "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.13.6.tgz",
+            "integrity": "sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A==",
             "dev": true
         },
         "unzipper": {
@@ -6357,16 +6599,10 @@
             "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==",
             "dev": true
         },
-        "v8-compile-cache": {
-            "version": "2.3.0",
-            "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz",
-            "integrity": "sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==",
-            "dev": true
-        },
         "vsce": {
-            "version": "2.9.2",
-            "resolved": "https://registry.npmjs.org/vsce/-/vsce-2.9.2.tgz",
-            "integrity": "sha512-xyLqL4U82BilUX1t6Ym2opQEa2tLGWYjbgB7+ETeNVXlIJz5sWBJjQJSYJVFOKJSpiOtQclolu88cj7oY6vvPQ==",
+            "version": "2.15.0",
+            "resolved": "https://registry.npmjs.org/vsce/-/vsce-2.15.0.tgz",
+            "integrity": "sha512-P8E9LAZvBCQnoGoizw65JfGvyMqNGlHdlUXD1VAuxtvYAaHBKLBdKPnpy60XKVDAkQCfmMu53g+gq9FM+ydepw==",
             "dev": true,
             "requires": {
                 "azure-devops-node-api": "^11.0.1",
@@ -6584,6 +6820,12 @@
             "requires": {
                 "buffer-crc32": "~0.2.3"
             }
+        },
+        "yocto-queue": {
+            "version": "0.1.0",
+            "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
+            "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==",
+            "dev": true
         }
     }
 }
diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json
index 930564bd7ca..3610e993f82 100644
--- a/src/tools/rust-analyzer/editors/code/package.json
+++ b/src/tools/rust-analyzer/editors/code/package.json
@@ -19,6 +19,12 @@
     "categories": [
         "Programming Languages"
     ],
+    "capabilities": {
+        "untrustedWorkspaces": {
+            "supported": false,
+            "description": "rust-analyzer invokes binaries set up by its configuration as well as the Rust toolchain's binaries. A malicious actor could exploit this to run arbitrary code on your machine."
+        }
+    },
     "engines": {
         "vscode": "^1.66.0"
     },
@@ -539,6 +545,14 @@
                         "string"
                     ]
                 },
+                "rust-analyzer.cargo.sysrootSrc": {
+                    "markdownDescription": "Relative path to the sysroot library sources. If left unset, this will default to\n`{cargo.sysroot}/lib/rustlib/src/rust/library`.\n\nThis option does not take effect until rust-analyzer is restarted.",
+                    "default": null,
+                    "type": [
+                        "null",
+                        "string"
+                    ]
+                },
                 "rust-analyzer.cargo.target": {
                     "markdownDescription": "Compilation target override (target triple).",
                     "default": null,
@@ -697,6 +711,15 @@
                         "Do no snippet completions for callables."
                     ]
                 },
+                "rust-analyzer.completion.limit": {
+                    "markdownDescription": "Maximum number of completions to return. If `None`, the limit is infinite.",
+                    "default": null,
+                    "type": [
+                        "null",
+                        "integer"
+                    ],
+                    "minimum": 0
+                },
                 "rust-analyzer.completion.postfix.enable": {
                     "markdownDescription": "Whether to show postfix snippets like `dbg`, `if`, `not`, etc.",
                     "default": true,
@@ -1038,11 +1061,6 @@
                     "default": false,
                     "type": "boolean"
                 },
-                "rust-analyzer.inlayHints.locationLinks": {
-                    "markdownDescription": "Whether to use location links for parts of type mentioned in inlay hints.",
-                    "default": true,
-                    "type": "boolean"
-                },
                 "rust-analyzer.inlayHints.maxLength": {
                     "markdownDescription": "Maximum length for inlay hints. Set to null to have an unlimited length.",
                     "default": 25,
diff --git a/src/tools/rust-analyzer/editors/code/src/bootstrap.ts b/src/tools/rust-analyzer/editors/code/src/bootstrap.ts
index 374c3b8144c..b38fa06a85c 100644
--- a/src/tools/rust-analyzer/editors/code/src/bootstrap.ts
+++ b/src/tools/rust-analyzer/editors/code/src/bootstrap.ts
@@ -31,58 +31,12 @@ export async function bootstrap(
 
     return path;
 }
-
-async function patchelf(dest: vscode.Uri): Promise<void> {
-    await vscode.window.withProgress(
-        {
-            location: vscode.ProgressLocation.Notification,
-            title: "Patching rust-analyzer for NixOS",
-        },
-        async (progress, _) => {
-            const expression = `
-            {srcStr, pkgs ? import <nixpkgs> {}}:
-                pkgs.stdenv.mkDerivation {
-                    name = "rust-analyzer";
-                    src = /. + srcStr;
-                    phases = [ "installPhase" "fixupPhase" ];
-                    installPhase = "cp $src $out";
-                    fixupPhase = ''
-                    chmod 755 $out
-                    patchelf --set-interpreter "$(cat $NIX_CC/nix-support/dynamic-linker)" $out
-                    '';
-                }
-            `;
-            const origFile = vscode.Uri.file(dest.fsPath + "-orig");
-            await vscode.workspace.fs.rename(dest, origFile, { overwrite: true });
-            try {
-                progress.report({ message: "Patching executable", increment: 20 });
-                await new Promise((resolve, reject) => {
-                    const handle = exec(
-                        `nix-build -E - --argstr srcStr '${origFile.fsPath}' -o '${dest.fsPath}'`,
-                        (err, stdout, stderr) => {
-                            if (err != null) {
-                                reject(Error(stderr));
-                            } else {
-                                resolve(stdout);
-                            }
-                        }
-                    );
-                    handle.stdin?.write(expression);
-                    handle.stdin?.end();
-                });
-            } finally {
-                await vscode.workspace.fs.delete(origFile);
-            }
-        }
-    );
-}
-
 async function getServer(
     context: vscode.ExtensionContext,
     config: Config,
     state: PersistentState
 ): Promise<string | undefined> {
-    const explicitPath = serverPath(config);
+    const explicitPath = process.env.__RA_LSP_SERVER_DEBUG ?? config.serverPath;
     if (explicitPath) {
         if (explicitPath.startsWith("~/")) {
             return os.homedir() + explicitPath.slice("~".length);
@@ -131,9 +85,6 @@ async function getServer(
     );
     return undefined;
 }
-function serverPath(config: Config): string | null {
-    return process.env.__RA_LSP_SERVER_DEBUG ?? config.serverPath;
-}
 
 async function isNixOs(): Promise<boolean> {
     try {
@@ -146,3 +97,48 @@ async function isNixOs(): Promise<boolean> {
         return false;
     }
 }
+
+async function patchelf(dest: vscode.Uri): Promise<void> {
+    await vscode.window.withProgress(
+        {
+            location: vscode.ProgressLocation.Notification,
+            title: "Patching rust-analyzer for NixOS",
+        },
+        async (progress, _) => {
+            const expression = `
+            {srcStr, pkgs ? import <nixpkgs> {}}:
+                pkgs.stdenv.mkDerivation {
+                    name = "rust-analyzer";
+                    src = /. + srcStr;
+                    phases = [ "installPhase" "fixupPhase" ];
+                    installPhase = "cp $src $out";
+                    fixupPhase = ''
+                    chmod 755 $out
+                    patchelf --set-interpreter "$(cat $NIX_CC/nix-support/dynamic-linker)" $out
+                    '';
+                }
+            `;
+            const origFile = vscode.Uri.file(dest.fsPath + "-orig");
+            await vscode.workspace.fs.rename(dest, origFile, { overwrite: true });
+            try {
+                progress.report({ message: "Patching executable", increment: 20 });
+                await new Promise((resolve, reject) => {
+                    const handle = exec(
+                        `nix-build -E - --argstr srcStr '${origFile.fsPath}' -o '${dest.fsPath}'`,
+                        (err, stdout, stderr) => {
+                            if (err != null) {
+                                reject(Error(stderr));
+                            } else {
+                                resolve(stdout);
+                            }
+                        }
+                    );
+                    handle.stdin?.write(expression);
+                    handle.stdin?.end();
+                });
+            } finally {
+                await vscode.workspace.fs.delete(origFile);
+            }
+        }
+    );
+}
diff --git a/src/tools/rust-analyzer/editors/code/src/client.ts b/src/tools/rust-analyzer/editors/code/src/client.ts
index 82cdf0390ac..62980ca0464 100644
--- a/src/tools/rust-analyzer/editors/code/src/client.ts
+++ b/src/tools/rust-analyzer/editors/code/src/client.ts
@@ -102,24 +102,6 @@ export async function createClient(
                     }
                 },
             },
-            async provideInlayHints(document, viewPort, token, next) {
-                const inlays = await next(document, viewPort, token);
-                if (!inlays) {
-                    return inlays;
-                }
-                // U+200C is a zero-width non-joiner to prevent the editor from forming a ligature
-                // between code and hints
-                for (const inlay of inlays) {
-                    if (typeof inlay.label === "string") {
-                        inlay.label = `\u{200c}${inlay.label}\u{200c}`;
-                    } else if (Array.isArray(inlay.label)) {
-                        for (const it of inlay.label) {
-                            it.value = `\u{200c}${it.value}\u{200c}`;
-                        }
-                    }
-                }
-                return inlays;
-            },
             async handleDiagnostics(
                 uri: vscode.Uri,
                 diagnosticList: vscode.Diagnostic[],
@@ -190,12 +172,10 @@ export async function createClient(
                     )
                     .then(
                         (result) => {
+                            if (!result) return null;
                             const hover = client.protocol2CodeConverter.asHover(result);
-                            if (hover) {
-                                const actions = (<any>result).actions;
-                                if (actions) {
-                                    hover.contents.push(renderHoverActions(actions));
-                                }
+                            if (!!result.actions) {
+                                hover.contents.push(renderHoverActions(result.actions));
                             }
                             return hover;
                         },
@@ -328,25 +308,27 @@ class ExperimentalFeatures implements lc.StaticFeature {
         return { kind: "static" };
     }
     fillClientCapabilities(capabilities: lc.ClientCapabilities): void {
-        const caps: any = capabilities.experimental ?? {};
-        caps.snippetTextEdit = true;
-        caps.codeActionGroup = true;
-        caps.hoverActions = true;
-        caps.serverStatusNotification = true;
-        caps.colorDiagnosticOutput = true;
-        caps.commands = {
-            commands: [
-                "rust-analyzer.runSingle",
-                "rust-analyzer.debugSingle",
-                "rust-analyzer.showReferences",
-                "rust-analyzer.gotoLocation",
-                "editor.action.triggerParameterHints",
-            ],
+        capabilities.experimental = {
+            snippetTextEdit: true,
+            codeActionGroup: true,
+            hoverActions: true,
+            serverStatusNotification: true,
+            colorDiagnosticOutput: true,
+            openServerLogs: true,
+            commands: {
+                commands: [
+                    "rust-analyzer.runSingle",
+                    "rust-analyzer.debugSingle",
+                    "rust-analyzer.showReferences",
+                    "rust-analyzer.gotoLocation",
+                    "editor.action.triggerParameterHints",
+                ],
+            },
+            ...capabilities.experimental,
         };
-        capabilities.experimental = caps;
     }
     initialize(
-        _capabilities: lc.ServerCapabilities<any>,
+        _capabilities: lc.ServerCapabilities,
         _documentSelector: lc.DocumentSelector | undefined
     ): void {}
     dispose(): void {}
diff --git a/src/tools/rust-analyzer/editors/code/src/commands.ts b/src/tools/rust-analyzer/editors/code/src/commands.ts
index cb4e13e2c60..b5b64e33e07 100644
--- a/src/tools/rust-analyzer/editors/code/src/commands.ts
+++ b/src/tools/rust-analyzer/editors/code/src/commands.ts
@@ -130,11 +130,11 @@ export function joinLines(ctx: CtxInit): Cmd {
 }
 
 export function moveItemUp(ctx: CtxInit): Cmd {
-    return moveItem(ctx, ra.Direction.Up);
+    return moveItem(ctx, "Up");
 }
 
 export function moveItemDown(ctx: CtxInit): Cmd {
-    return moveItem(ctx, ra.Direction.Down);
+    return moveItem(ctx, "Down");
 }
 
 export function moveItem(ctx: CtxInit, direction: ra.Direction): Cmd {
diff --git a/src/tools/rust-analyzer/editors/code/src/config.ts b/src/tools/rust-analyzer/editors/code/src/config.ts
index eb4f965291f..1faa0ad9106 100644
--- a/src/tools/rust-analyzer/editors/code/src/config.ts
+++ b/src/tools/rust-analyzer/editors/code/src/config.ts
@@ -1,5 +1,6 @@
-import * as path from "path";
+import * as Is from "vscode-languageclient/lib/common/utils/is";
 import * as os from "os";
+import * as path from "path";
 import * as vscode from "vscode";
 import { Env } from "./client";
 import { log } from "./util";
@@ -47,7 +48,7 @@ export class Config {
     }
 
     private refreshLogging() {
-        log.setEnabled(this.traceExtension);
+        log.setEnabled(this.traceExtension ?? false);
         log.info("Extension version:", this.package.version);
 
         const cfg = Object.entries(this.cfg).filter(([_, val]) => !(val instanceof Function));
@@ -86,58 +87,84 @@ export class Config {
      * [1]: https://github.com/Microsoft/vscode/issues/11514#issuecomment-244707076
      */
     private configureLanguage() {
-        if (this.typingContinueCommentsOnNewline && !this.configureLang) {
+        // Only need to dispose of the config if there's a change
+        if (this.configureLang) {
+            this.configureLang.dispose();
+            this.configureLang = undefined;
+        }
+
+        let onEnterRules: vscode.OnEnterRule[] = [
+            {
+                // Carry indentation from the previous line
+                beforeText: /^\s*$/,
+                action: { indentAction: vscode.IndentAction.None },
+            },
+            {
+                // After the end of a function/field chain,
+                // with the semicolon on the same line
+                beforeText: /^\s+\..*;/,
+                action: { indentAction: vscode.IndentAction.Outdent },
+            },
+            {
+                // After the end of a function/field chain,
+                // with semicolon detached from the rest
+                beforeText: /^\s+;/,
+                previousLineText: /^\s+\..*/,
+                action: { indentAction: vscode.IndentAction.Outdent },
+            },
+        ];
+
+        if (this.typingContinueCommentsOnNewline) {
             const indentAction = vscode.IndentAction.None;
 
-            this.configureLang = vscode.languages.setLanguageConfiguration("rust", {
-                onEnterRules: [
-                    {
-                        // Doc single-line comment
-                        // e.g. ///|
-                        beforeText: /^\s*\/{3}.*$/,
-                        action: { indentAction, appendText: "/// " },
-                    },
-                    {
-                        // Parent doc single-line comment
-                        // e.g. //!|
-                        beforeText: /^\s*\/{2}\!.*$/,
-                        action: { indentAction, appendText: "//! " },
-                    },
-                    {
-                        // Begins an auto-closed multi-line comment (standard or parent doc)
-                        // e.g. /** | */ or /*! | */
-                        beforeText: /^\s*\/\*(\*|\!)(?!\/)([^\*]|\*(?!\/))*$/,
-                        afterText: /^\s*\*\/$/,
-                        action: {
-                            indentAction: vscode.IndentAction.IndentOutdent,
-                            appendText: " * ",
-                        },
-                    },
-                    {
-                        // Begins a multi-line comment (standard or parent doc)
-                        // e.g. /** ...| or /*! ...|
-                        beforeText: /^\s*\/\*(\*|\!)(?!\/)([^\*]|\*(?!\/))*$/,
-                        action: { indentAction, appendText: " * " },
-                    },
-                    {
-                        // Continues a multi-line comment
-                        // e.g.  * ...|
-                        beforeText: /^(\ \ )*\ \*(\ ([^\*]|\*(?!\/))*)?$/,
-                        action: { indentAction, appendText: "* " },
+            onEnterRules = [
+                ...onEnterRules,
+                {
+                    // Doc single-line comment
+                    // e.g. ///|
+                    beforeText: /^\s*\/{3}.*$/,
+                    action: { indentAction, appendText: "/// " },
+                },
+                {
+                    // Parent doc single-line comment
+                    // e.g. //!|
+                    beforeText: /^\s*\/{2}\!.*$/,
+                    action: { indentAction, appendText: "//! " },
+                },
+                {
+                    // Begins an auto-closed multi-line comment (standard or parent doc)
+                    // e.g. /** | */ or /*! | */
+                    beforeText: /^\s*\/\*(\*|\!)(?!\/)([^\*]|\*(?!\/))*$/,
+                    afterText: /^\s*\*\/$/,
+                    action: {
+                        indentAction: vscode.IndentAction.IndentOutdent,
+                        appendText: " * ",
                     },
-                    {
-                        // Dedents after closing a multi-line comment
-                        // e.g.  */|
-                        beforeText: /^(\ \ )*\ \*\/\s*$/,
-                        action: { indentAction, removeText: 1 },
-                    },
-                ],
-            });
-        }
-        if (!this.typingContinueCommentsOnNewline && this.configureLang) {
-            this.configureLang.dispose();
-            this.configureLang = undefined;
+                },
+                {
+                    // Begins a multi-line comment (standard or parent doc)
+                    // e.g. /** ...| or /*! ...|
+                    beforeText: /^\s*\/\*(\*|\!)(?!\/)([^\*]|\*(?!\/))*$/,
+                    action: { indentAction, appendText: " * " },
+                },
+                {
+                    // Continues a multi-line comment
+                    // e.g.  * ...|
+                    beforeText: /^(\ \ )*\ \*(\ ([^\*]|\*(?!\/))*)?$/,
+                    action: { indentAction, appendText: "* " },
+                },
+                {
+                    // Dedents after closing a multi-line comment
+                    // e.g.  */|
+                    beforeText: /^(\ \ )*\ \*\/\s*$/,
+                    action: { indentAction, removeText: 1 },
+                },
+            ];
         }
+
+        this.configureLang = vscode.languages.setLanguageConfiguration("rust", {
+            onEnterRules,
+        });
     }
 
     // We don't do runtime config validation here for simplicity. More on stackoverflow:
@@ -163,18 +190,24 @@ export class Config {
      * ```
      * So this getter handles this quirk by not requiring the caller to use postfix `!`
      */
-    private get<T>(path: string): T {
-        return this.cfg.get<T>(path)!;
+    private get<T>(path: string): T | undefined {
+        return substituteVSCodeVariables(this.cfg.get<T>(path));
     }
 
     get serverPath() {
         return this.get<null | string>("server.path") ?? this.get<null | string>("serverPath");
     }
+
     get serverExtraEnv(): Env {
         const extraEnv =
             this.get<{ [key: string]: string | number } | null>("server.extraEnv") ?? {};
-        return Object.fromEntries(
-            Object.entries(extraEnv).map(([k, v]) => [k, typeof v !== "string" ? v.toString() : v])
+        return substituteVariablesInEnv(
+            Object.fromEntries(
+                Object.entries(extraEnv).map(([k, v]) => [
+                    k,
+                    typeof v !== "string" ? v.toString() : v,
+                ])
+            )
         );
     }
     get traceExtension() {
@@ -216,13 +249,13 @@ export class Config {
         if (sourceFileMap !== "auto") {
             // "/rustc/<id>" used by suggestions only.
             const { ["/rustc/<id>"]: _, ...trimmed } =
-                this.get<Record<string, string>>("debug.sourceFileMap");
+                this.get<Record<string, string>>("debug.sourceFileMap") ?? {};
             sourceFileMap = trimmed;
         }
 
         return {
             engine: this.get<string>("debug.engine"),
-            engineSettings: this.get<object>("debug.engineSettings"),
+            engineSettings: this.get<object>("debug.engineSettings") ?? {},
             openDebugPane: this.get<boolean>("debug.openDebugPane"),
             sourceFileMap: sourceFileMap,
         };
@@ -247,37 +280,25 @@ export class Config {
     }
 }
 
-const VarRegex = new RegExp(/\$\{(.+?)\}/g);
-
-export function substituteVSCodeVariableInString(val: string): string {
-    return val.replace(VarRegex, (substring: string, varName) => {
-        if (typeof varName === "string") {
-            return computeVscodeVar(varName) || substring;
-        } else {
-            return substring;
-        }
-    });
-}
-
-export function substituteVSCodeVariables(resp: any): any {
-    if (typeof resp === "string") {
-        return substituteVSCodeVariableInString(resp);
-    } else if (resp && Array.isArray(resp)) {
+export function substituteVSCodeVariables<T>(resp: T): T {
+    if (Is.string(resp)) {
+        return substituteVSCodeVariableInString(resp) as T;
+    } else if (resp && Is.array<any>(resp)) {
         return resp.map((val) => {
             return substituteVSCodeVariables(val);
-        });
+        }) as T;
     } else if (resp && typeof resp === "object") {
         const res: { [key: string]: any } = {};
         for (const key in resp) {
             const val = resp[key];
             res[key] = substituteVSCodeVariables(val);
         }
-        return res;
-    } else if (typeof resp === "function") {
-        return null;
+        return res as T;
     }
     return resp;
 }
+
+// FIXME: Merge this with `substituteVSCodeVariables` above
 export function substituteVariablesInEnv(env: Env): Env {
     const missingDeps = new Set<string>();
     // vscode uses `env:ENV_NAME` for env vars resolution, and it's easier
@@ -355,6 +376,17 @@ export function substituteVariablesInEnv(env: Env): Env {
     return resolvedEnv;
 }
 
+const VarRegex = new RegExp(/\$\{(.+?)\}/g);
+function substituteVSCodeVariableInString(val: string): string {
+    return val.replace(VarRegex, (substring: string, varName) => {
+        if (Is.string(varName)) {
+            return computeVscodeVar(varName) || substring;
+        } else {
+            return substring;
+        }
+    });
+}
+
 function computeVscodeVar(varName: string): string | null {
     const workspaceFolder = () => {
         const folders = vscode.workspace.workspaceFolders ?? [];
diff --git a/src/tools/rust-analyzer/editors/code/src/ctx.ts b/src/tools/rust-analyzer/editors/code/src/ctx.ts
index d6cee5c8fc6..e2a30e0cc45 100644
--- a/src/tools/rust-analyzer/editors/code/src/ctx.ts
+++ b/src/tools/rust-analyzer/editors/code/src/ctx.ts
@@ -2,9 +2,9 @@ import * as vscode from "vscode";
 import * as lc from "vscode-languageclient/node";
 import * as ra from "./lsp_ext";
 
-import { Config, substituteVariablesInEnv, substituteVSCodeVariables } from "./config";
+import { Config, substituteVSCodeVariables } from "./config";
 import { createClient } from "./client";
-import { isRustDocument, isRustEditor, log, RustEditor } from "./util";
+import { isRustDocument, isRustEditor, LazyOutputChannel, log, RustEditor } from "./util";
 import { ServerStatusParams } from "./lsp_ext";
 import { PersistentState } from "./persistent_state";
 import { bootstrap } from "./bootstrap";
@@ -128,9 +128,7 @@ export class Ctx {
         }
 
         if (!this.traceOutputChannel) {
-            this.traceOutputChannel = vscode.window.createOutputChannel(
-                "Rust Analyzer Language Server Trace"
-            );
+            this.traceOutputChannel = new LazyOutputChannel("Rust Analyzer Language Server Trace");
             this.pushExtCleanup(this.traceOutputChannel);
         }
         if (!this.outputChannel) {
@@ -152,9 +150,7 @@ export class Ctx {
                     throw new Error(message);
                 }
             );
-            const newEnv = substituteVariablesInEnv(
-                Object.assign({}, process.env, this.config.serverExtraEnv)
-            );
+            const newEnv = Object.assign({}, process.env, this.config.serverExtraEnv);
             const run: lc.Executable = {
                 command: this._serverPath,
                 options: { env: newEnv },
@@ -187,6 +183,11 @@ export class Ctx {
                     this.setServerStatus(params)
                 )
             );
+            this.pushClientCleanup(
+                this._client.onNotification(ra.openServerLogs, () => {
+                    this.outputChannel!.show();
+                })
+            );
         }
         return this._client;
     }
diff --git a/src/tools/rust-analyzer/editors/code/src/debug.ts b/src/tools/rust-analyzer/editors/code/src/debug.ts
index bd45599227e..268b70b4fbb 100644
--- a/src/tools/rust-analyzer/editors/code/src/debug.ts
+++ b/src/tools/rust-analyzer/editors/code/src/debug.ts
@@ -84,7 +84,7 @@ async function getDebugConfiguration(
             debugEngine = vscode.extensions.getExtension(engineId);
             if (debugEngine) break;
         }
-    } else {
+    } else if (debugOptions.engine) {
         debugEngine = vscode.extensions.getExtension(debugOptions.engine);
     }
 
diff --git a/src/tools/rust-analyzer/editors/code/src/lsp_ext.ts b/src/tools/rust-analyzer/editors/code/src/lsp_ext.ts
index 29349cc20f5..f6f5124dc41 100644
--- a/src/tools/rust-analyzer/editors/code/src/lsp_ext.ts
+++ b/src/tools/rust-analyzer/editors/code/src/lsp_ext.ts
@@ -4,130 +4,134 @@
 
 import * as lc from "vscode-languageclient";
 
-export interface AnalyzerStatusParams {
-    textDocument?: lc.TextDocumentIdentifier;
-}
+// rust-analyzer overrides
+
+export const hover = new lc.RequestType<
+    HoverParams,
+    (lc.Hover & { actions: CommandLinkGroup[] }) | null,
+    void
+>("textDocument/hover");
+export type HoverParams = { position: lc.Position | lc.Range } & Omit<
+    lc.TextDocumentPositionParams,
+    "position"
+> &
+    lc.WorkDoneProgressParams;
+export type CommandLink = {
+    /**
+     * A tooltip for the command, when represented in the UI.
+     */
+    tooltip?: string;
+} & lc.Command;
+export type CommandLinkGroup = {
+    title?: string;
+    commands: CommandLink[];
+};
+
+// rust-analyzer extensions
+
 export const analyzerStatus = new lc.RequestType<AnalyzerStatusParams, string, void>(
     "rust-analyzer/analyzerStatus"
 );
+export const cancelFlycheck = new lc.NotificationType0("rust-analyzer/cancelFlycheck");
+export const clearFlycheck = new lc.NotificationType0("rust-analyzer/clearFlycheck");
+export const expandMacro = new lc.RequestType<ExpandMacroParams, ExpandedMacro | null, void>(
+    "rust-analyzer/expandMacro"
+);
 export const memoryUsage = new lc.RequestType0<string, void>("rust-analyzer/memoryUsage");
-export const shuffleCrateGraph = new lc.RequestType0<null, void>("rust-analyzer/shuffleCrateGraph");
-
-export interface ServerStatusParams {
-    health: "ok" | "warning" | "error";
-    quiescent: boolean;
-    message?: string;
-}
-export const serverStatus = new lc.NotificationType<ServerStatusParams>(
-    "experimental/serverStatus"
+export const openServerLogs = new lc.NotificationType0("rust-analyzer/openServerLogs");
+export const relatedTests = new lc.RequestType<lc.TextDocumentPositionParams, TestInfo[], void>(
+    "rust-analyzer/relatedTests"
 );
-
 export const reloadWorkspace = new lc.RequestType0<null, void>("rust-analyzer/reloadWorkspace");
-
-export const hover = new lc.RequestType<HoverParams, lc.Hover | null, void>("textDocument/hover");
-
-export interface HoverParams extends lc.WorkDoneProgressParams {
-    textDocument: lc.TextDocumentIdentifier;
-    position: lc.Range | lc.Position;
-}
-
-export interface SyntaxTreeParams {
-    textDocument: lc.TextDocumentIdentifier;
-    range: lc.Range | null;
-}
+export const runFlycheck = new lc.NotificationType<{
+    textDocument: lc.TextDocumentIdentifier | null;
+}>("rust-analyzer/runFlycheck");
+export const shuffleCrateGraph = new lc.RequestType0<null, void>("rust-analyzer/shuffleCrateGraph");
 export const syntaxTree = new lc.RequestType<SyntaxTreeParams, string, void>(
     "rust-analyzer/syntaxTree"
 );
-
-export const viewHir = new lc.RequestType<lc.TextDocumentPositionParams, string, void>(
-    "rust-analyzer/viewHir"
+export const viewCrateGraph = new lc.RequestType<ViewCrateGraphParams, string, void>(
+    "rust-analyzer/viewCrateGraph"
 );
-
 export const viewFileText = new lc.RequestType<lc.TextDocumentIdentifier, string, void>(
     "rust-analyzer/viewFileText"
 );
-
-export interface ViewItemTreeParams {
-    textDocument: lc.TextDocumentIdentifier;
-}
-
+export const viewHir = new lc.RequestType<lc.TextDocumentPositionParams, string, void>(
+    "rust-analyzer/viewHir"
+);
 export const viewItemTree = new lc.RequestType<ViewItemTreeParams, string, void>(
     "rust-analyzer/viewItemTree"
 );
 
-export interface ViewCrateGraphParams {
-    full: boolean;
-}
+export type AnalyzerStatusParams = { textDocument?: lc.TextDocumentIdentifier };
 
-export const viewCrateGraph = new lc.RequestType<ViewCrateGraphParams, string, void>(
-    "rust-analyzer/viewCrateGraph"
-);
-
-export interface ExpandMacroParams {
+export type ExpandMacroParams = {
     textDocument: lc.TextDocumentIdentifier;
     position: lc.Position;
-}
-export interface ExpandedMacro {
+};
+export type ExpandedMacro = {
     name: string;
     expansion: string;
-}
-export const expandMacro = new lc.RequestType<ExpandMacroParams, ExpandedMacro | null, void>(
-    "rust-analyzer/expandMacro"
-);
-
-export const relatedTests = new lc.RequestType<lc.TextDocumentPositionParams, TestInfo[], void>(
-    "rust-analyzer/relatedTests"
-);
-
-export const cancelFlycheck = new lc.NotificationType0("rust-analyzer/cancelFlycheck");
-export const clearFlycheck = new lc.NotificationType0("rust-analyzer/clearFlycheck");
-export const runFlycheck = new lc.NotificationType<{
-    textDocument: lc.TextDocumentIdentifier | null;
-}>("rust-analyzer/runFlycheck");
-
-// Experimental extensions
-
-export interface SsrParams {
-    query: string;
-    parseOnly: boolean;
+};
+export type TestInfo = { runnable: Runnable };
+export type SyntaxTreeParams = {
     textDocument: lc.TextDocumentIdentifier;
-    position: lc.Position;
-    selections: readonly lc.Range[];
-}
-export const ssr = new lc.RequestType<SsrParams, lc.WorkspaceEdit, void>("experimental/ssr");
+    range: lc.Range | null;
+};
+export type ViewCrateGraphParams = { full: boolean };
+export type ViewItemTreeParams = { textDocument: lc.TextDocumentIdentifier };
 
-export interface MatchingBraceParams {
-    textDocument: lc.TextDocumentIdentifier;
-    positions: lc.Position[];
-}
+// experimental extensions
+
+export const joinLines = new lc.RequestType<JoinLinesParams, lc.TextEdit[], void>(
+    "experimental/joinLines"
+);
 export const matchingBrace = new lc.RequestType<MatchingBraceParams, lc.Position[], void>(
     "experimental/matchingBrace"
 );
-
+export const moveItem = new lc.RequestType<MoveItemParams, lc.TextEdit[], void>(
+    "experimental/moveItem"
+);
+export const onEnter = new lc.RequestType<lc.TextDocumentPositionParams, lc.TextEdit[], void>(
+    "experimental/onEnter"
+);
+export const openCargoToml = new lc.RequestType<OpenCargoTomlParams, lc.Location, void>(
+    "experimental/openCargoToml"
+);
+export const openDocs = new lc.RequestType<lc.TextDocumentPositionParams, string | void, void>(
+    "experimental/externalDocs"
+);
 export const parentModule = new lc.RequestType<
     lc.TextDocumentPositionParams,
     lc.LocationLink[] | null,
     void
 >("experimental/parentModule");
-
-export interface JoinLinesParams {
-    textDocument: lc.TextDocumentIdentifier;
-    ranges: lc.Range[];
-}
-export const joinLines = new lc.RequestType<JoinLinesParams, lc.TextEdit[], void>(
-    "experimental/joinLines"
+export const runnables = new lc.RequestType<RunnablesParams, Runnable[], void>(
+    "experimental/runnables"
 );
-
-export const onEnter = new lc.RequestType<lc.TextDocumentPositionParams, lc.TextEdit[], void>(
-    "experimental/onEnter"
+export const serverStatus = new lc.NotificationType<ServerStatusParams>(
+    "experimental/serverStatus"
 );
+export const ssr = new lc.RequestType<SsrParams, lc.WorkspaceEdit, void>("experimental/ssr");
 
-export interface RunnablesParams {
+export type JoinLinesParams = {
     textDocument: lc.TextDocumentIdentifier;
-    position: lc.Position | null;
-}
-
-export interface Runnable {
+    ranges: lc.Range[];
+};
+export type MatchingBraceParams = {
+    textDocument: lc.TextDocumentIdentifier;
+    positions: lc.Position[];
+};
+export type MoveItemParams = {
+    textDocument: lc.TextDocumentIdentifier;
+    range: lc.Range;
+    direction: Direction;
+};
+export type Direction = "Up" | "Down";
+export type OpenCargoTomlParams = {
+    textDocument: lc.TextDocumentIdentifier;
+};
+export type Runnable = {
     label: string;
     location?: lc.LocationLink;
     kind: "cargo";
@@ -139,50 +143,20 @@ export interface Runnable {
         expectTest?: boolean;
         overrideCargo?: string;
     };
-}
-export const runnables = new lc.RequestType<RunnablesParams, Runnable[], void>(
-    "experimental/runnables"
-);
-
-export interface TestInfo {
-    runnable: Runnable;
-}
-
-export interface CommandLink extends lc.Command {
-    /**
-     * A tooltip for the command, when represented in the UI.
-     */
-    tooltip?: string;
-}
-
-export interface CommandLinkGroup {
-    title?: string;
-    commands: CommandLink[];
-}
-
-export const openDocs = new lc.RequestType<lc.TextDocumentPositionParams, string | void, void>(
-    "experimental/externalDocs"
-);
-
-export const openCargoToml = new lc.RequestType<OpenCargoTomlParams, lc.Location, void>(
-    "experimental/openCargoToml"
-);
-
-export interface OpenCargoTomlParams {
+};
+export type RunnablesParams = {
     textDocument: lc.TextDocumentIdentifier;
-}
-
-export const moveItem = new lc.RequestType<MoveItemParams, lc.TextEdit[], void>(
-    "experimental/moveItem"
-);
-
-export interface MoveItemParams {
+    position: lc.Position | null;
+};
+export type ServerStatusParams = {
+    health: "ok" | "warning" | "error";
+    quiescent: boolean;
+    message?: string;
+};
+export type SsrParams = {
+    query: string;
+    parseOnly: boolean;
     textDocument: lc.TextDocumentIdentifier;
-    range: lc.Range;
-    direction: Direction;
-}
-
-export const enum Direction {
-    Up = "Up",
-    Down = "Down",
-}
+    position: lc.Position;
+    selections: readonly lc.Range[];
+};
diff --git a/src/tools/rust-analyzer/editors/code/src/util.ts b/src/tools/rust-analyzer/editors/code/src/util.ts
index cd91932bb60..d93b9caeb16 100644
--- a/src/tools/rust-analyzer/editors/code/src/util.ts
+++ b/src/tools/rust-analyzer/editors/code/src/util.ts
@@ -117,7 +117,7 @@ export function isValidExecutable(path: string): boolean {
 
     const res = spawnSync(path, ["--version"], { encoding: "utf8" });
 
-    const printOutput = res.error && (res.error as any).code !== "ENOENT" ? log.warn : log.debug;
+    const printOutput = res.error ? log.warn : log.info;
     printOutput(path, "--version:", res);
 
     return res.status === 0;
@@ -166,3 +166,49 @@ export function execute(command: string, options: ExecOptions): Promise<string>
         });
     });
 }
+
+export class LazyOutputChannel implements vscode.OutputChannel {
+    constructor(name: string) {
+        this.name = name;
+    }
+
+    name: string;
+    _channel: vscode.OutputChannel | undefined;
+
+    get channel(): vscode.OutputChannel {
+        if (!this._channel) {
+            this._channel = vscode.window.createOutputChannel(this.name);
+        }
+        return this._channel;
+    }
+
+    append(value: string): void {
+        this.channel.append(value);
+    }
+    appendLine(value: string): void {
+        this.channel.appendLine(value);
+    }
+    replace(value: string): void {
+        this.channel.replace(value);
+    }
+    clear(): void {
+        if (this._channel) {
+            this._channel.clear();
+        }
+    }
+    show(preserveFocus?: boolean): void;
+    show(column?: vscode.ViewColumn, preserveFocus?: boolean): void;
+    show(column?: any, preserveFocus?: any): void {
+        this.channel.show(column, preserveFocus);
+    }
+    hide(): void {
+        if (this._channel) {
+            this._channel.hide();
+        }
+    }
+    dispose(): void {
+        if (this._channel) {
+            this._channel.dispose();
+        }
+    }
+}
diff --git a/src/tools/rust-analyzer/lib/lsp-server/src/stdio.rs b/src/tools/rust-analyzer/lib/lsp-server/src/stdio.rs
index cdee6432df8..49a825e579b 100644
--- a/src/tools/rust-analyzer/lib/lsp-server/src/stdio.rs
+++ b/src/tools/rust-analyzer/lib/lsp-server/src/stdio.rs
@@ -21,10 +21,7 @@ pub(crate) fn stdio_transport() -> (Sender<Message>, Receiver<Message>, IoThread
         let stdin = stdin();
         let mut stdin = stdin.lock();
         while let Some(msg) = Message::read(&mut stdin)? {
-            let is_exit = match &msg {
-                Message::Notification(n) => n.is_exit(),
-                _ => false,
-            };
+            let is_exit = matches!(&msg, Message::Notification(n) if n.is_exit());
 
             reader_sender.send(msg).unwrap();
 
diff --git a/src/tools/rust-analyzer/xtask/Cargo.toml b/src/tools/rust-analyzer/xtask/Cargo.toml
index 2dd01796c6e..b4b294c3099 100644
--- a/src/tools/rust-analyzer/xtask/Cargo.toml
+++ b/src/tools/rust-analyzer/xtask/Cargo.toml
@@ -12,5 +12,6 @@ flate2 = "1.0.24"
 write-json = "0.1.2"
 xshell = "0.2.2"
 xflags = "0.3.0"
+time = { version = "0.3", default-features = false }
 zip = { version = "0.6", default-features = false, features = ["deflate", "time"] }
 # Avoid adding more dependencies to this crate
diff --git a/src/tools/rust-analyzer/xtask/src/dist.rs b/src/tools/rust-analyzer/xtask/src/dist.rs
index 74715c53eaa..5a03c71b28a 100644
--- a/src/tools/rust-analyzer/xtask/src/dist.rs
+++ b/src/tools/rust-analyzer/xtask/src/dist.rs
@@ -6,6 +6,7 @@ use std::{
 };
 
 use flate2::{write::GzEncoder, Compression};
+use time::OffsetDateTime;
 use xshell::{cmd, Shell};
 use zip::{write::FileOptions, DateTime, ZipWriter};
 
@@ -112,7 +113,8 @@ fn zip(src_path: &Path, symbols_path: Option<&PathBuf>, dest_path: &Path) -> any
         src_path.file_name().unwrap().to_str().unwrap(),
         FileOptions::default()
             .last_modified_time(
-                DateTime::from_time(std::fs::metadata(src_path)?.modified()?.into()).unwrap(),
+                DateTime::try_from(OffsetDateTime::from(std::fs::metadata(src_path)?.modified()?))
+                    .unwrap(),
             )
             .unix_permissions(0o755)
             .compression_method(zip::CompressionMethod::Deflated)
@@ -125,7 +127,10 @@ fn zip(src_path: &Path, symbols_path: Option<&PathBuf>, dest_path: &Path) -> any
             symbols_path.file_name().unwrap().to_str().unwrap(),
             FileOptions::default()
                 .last_modified_time(
-                    DateTime::from_time(std::fs::metadata(src_path)?.modified()?.into()).unwrap(),
+                    DateTime::try_from(OffsetDateTime::from(
+                        std::fs::metadata(src_path)?.modified()?,
+                    ))
+                    .unwrap(),
                 )
                 .compression_method(zip::CompressionMethod::Deflated)
                 .compression_level(Some(9)),
diff --git a/src/tools/rust-analyzer/xtask/src/publish.rs b/src/tools/rust-analyzer/xtask/src/publish.rs
index 79b5f3d2f61..cdb7d8fac89 100644
--- a/src/tools/rust-analyzer/xtask/src/publish.rs
+++ b/src/tools/rust-analyzer/xtask/src/publish.rs
@@ -13,7 +13,7 @@ impl flags::PublishReleaseNotes {
         let tag_name = &file_name[0..10];
         let original_changelog_url = create_original_changelog_url(&file_name);
         let additional_paragraph =
-            format!("\nSee also [original changelog]({original_changelog_url}).");
+            format!("\nSee also the [changelog post]({original_changelog_url}).");
         markdown.push_str(&additional_paragraph);
         if self.dry_run {
             println!("{markdown}");
diff --git a/src/tools/rust-analyzer/xtask/src/release/changelog.rs b/src/tools/rust-analyzer/xtask/src/release/changelog.rs
index d2a1483e387..67538414840 100644
--- a/src/tools/rust-analyzer/xtask/src/release/changelog.rs
+++ b/src/tools/rust-analyzer/xtask/src/release/changelog.rs
@@ -69,7 +69,7 @@ pub(crate) fn get_changelog(
 :page-layout: post
 
 Commit: commit:{commit}[] +
-Release: release:{today}[]
+Release: release:{today}[] (`TBD`)
 
 == New Features
 
@@ -156,7 +156,7 @@ fn parse_title_line(s: &str) -> PrInfo {
         ("minor: ", PrKind::Skip),
     ];
 
-    for &(prefix, kind) in &PREFIXES {
+    for (prefix, kind) in PREFIXES {
         if lower.starts_with(prefix) {
             let message = match &kind {
                 PrKind::Skip => None,