diff options
Diffstat (limited to 'src')
249 files changed, 5593 insertions, 3158 deletions
diff --git a/src/bootstrap/src/core/build_steps/gcc.rs b/src/bootstrap/src/core/build_steps/gcc.rs index 899e3fd9a45..d4cbbe60921 100644 --- a/src/bootstrap/src/core/build_steps/gcc.rs +++ b/src/bootstrap/src/core/build_steps/gcc.rs @@ -220,21 +220,18 @@ fn build_gcc(metadata: &Meta, builder: &Builder<'_>, target: TargetSelection) { t!(fs::create_dir_all(install_dir)); // GCC creates files (e.g. symlinks to the downloaded dependencies) - // in the source directory, which does not work with our CI setup, where we mount + // in the source directory, which does not work with our CI/Docker setup, where we mount // source directories as read-only on Linux. - // Therefore, as a part of the build in CI, we first copy the whole source directory - // to the build directory, and perform the build from there. - let src_dir = if builder.config.is_running_on_ci { - let src_dir = builder.gcc_out(target).join("src"); - if src_dir.exists() { - builder.remove_dir(&src_dir); - } - builder.create_dir(&src_dir); - builder.cp_link_r(root, &src_dir); - src_dir - } else { - root.clone() - }; + // And in general, we shouldn't be modifying the source directories if possible, even for local + // builds. + // Therefore, we first copy the whole source directory to the build directory, and perform the + // build from there. + let src_dir = builder.gcc_out(target).join("src"); + if src_dir.exists() { + builder.remove_dir(&src_dir); + } + builder.create_dir(&src_dir); + builder.cp_link_r(root, &src_dir); command(src_dir.join("contrib/download_prerequisites")).current_dir(&src_dir).run(builder); let mut configure_cmd = command(src_dir.join("configure")); diff --git a/src/bootstrap/src/core/config/config.rs b/src/bootstrap/src/core/config/config.rs index 28958b60fc3..22a75183404 100644 --- a/src/bootstrap/src/core/config/config.rs +++ b/src/bootstrap/src/core/config/config.rs @@ -697,7 +697,7 @@ impl Config { config.change_id = toml.change_id.inner; let Build { - mut description, + description, build, host, target, @@ -749,7 +749,7 @@ impl Config { compiletest_diff_tool, compiletest_use_stage0_libtest, tidy_extra_checks, - mut ccache, + ccache, exclude, } = toml.build.unwrap_or_default(); @@ -942,7 +942,7 @@ impl Config { config.rust_profile_use = flags_rust_profile_use; config.rust_profile_generate = flags_rust_profile_generate; - config.apply_rust_config(toml.rust, flags_warnings, &mut description); + config.apply_rust_config(toml.rust, flags_warnings); config.reproducible_artifacts = flags_reproducible_artifact; config.description = description; @@ -963,7 +963,7 @@ impl Config { config.channel = channel; } - config.apply_llvm_config(toml.llvm, &mut ccache); + config.apply_llvm_config(toml.llvm); config.apply_gcc_config(toml.gcc); diff --git a/src/bootstrap/src/core/config/toml/llvm.rs b/src/bootstrap/src/core/config/toml/llvm.rs index 4774e202bd8..1f0cecd145c 100644 --- a/src/bootstrap/src/core/config/toml/llvm.rs +++ b/src/bootstrap/src/core/config/toml/llvm.rs @@ -17,8 +17,6 @@ define_config! { tests: Option<bool> = "tests", enzyme: Option<bool> = "enzyme", plugins: Option<bool> = "plugins", - // FIXME: Remove this field at Q2 2025, it has been replaced by build.ccache - ccache: Option<StringOrBool> = "ccache", static_libstdcpp: Option<bool> = "static-libstdcpp", libzstd: Option<bool> = "libzstd", ninja: Option<bool> = "ninja", @@ -97,7 +95,6 @@ pub fn check_incompatible_options_for_ci_llvm( assertions: _, tests: _, plugins, - ccache: _, static_libstdcpp: _, libzstd, ninja: _, @@ -149,11 +146,7 @@ pub fn check_incompatible_options_for_ci_llvm( } impl Config { - pub fn apply_llvm_config( - &mut self, - toml_llvm: Option<Llvm>, - ccache: &mut Option<StringOrBool>, - ) { + pub fn apply_llvm_config(&mut self, toml_llvm: Option<Llvm>) { let mut llvm_tests = None; let mut llvm_enzyme = None; let mut llvm_offload = None; @@ -168,7 +161,6 @@ impl Config { tests, enzyme, plugins, - ccache: llvm_ccache, static_libstdcpp, libzstd, ninja, @@ -191,13 +183,7 @@ impl Config { download_ci_llvm, build_config, } = llvm; - if llvm_ccache.is_some() { - eprintln!("Warning: llvm.ccache is deprecated. Use build.ccache instead."); - } - if ccache.is_none() { - *ccache = llvm_ccache; - } set(&mut self.ninja_in_file, ninja); llvm_tests = tests; llvm_enzyme = enzyme; diff --git a/src/bootstrap/src/core/config/toml/rust.rs b/src/bootstrap/src/core/config/toml/rust.rs index 0fae235bb93..71fab0e6ae6 100644 --- a/src/bootstrap/src/core/config/toml/rust.rs +++ b/src/bootstrap/src/core/config/toml/rust.rs @@ -36,8 +36,6 @@ define_config! { incremental: Option<bool> = "incremental", default_linker: Option<String> = "default-linker", channel: Option<String> = "channel", - // FIXME: Remove this field at Q2 2025, it has been replaced by build.description - description: Option<String> = "description", musl_root: Option<String> = "musl-root", rpath: Option<bool> = "rpath", strip: Option<bool> = "strip", @@ -320,7 +318,6 @@ pub fn check_incompatible_options_for_ci_rustc( jemalloc, rpath, channel, - description, default_linker, std_features, @@ -388,7 +385,6 @@ pub fn check_incompatible_options_for_ci_rustc( err!(current_rust_config.std_features, std_features, "rust"); warn!(current_rust_config.channel, channel, "rust"); - warn!(current_rust_config.description, description, "rust"); Ok(()) } @@ -415,12 +411,7 @@ pub(crate) fn validate_codegen_backends(backends: Vec<String>, section: &str) -> } impl Config { - pub fn apply_rust_config( - &mut self, - toml_rust: Option<Rust>, - warnings: Warnings, - description: &mut Option<String>, - ) { + pub fn apply_rust_config(&mut self, toml_rust: Option<Rust>, warnings: Warnings) { let mut debug = None; let mut rustc_debug_assertions = None; let mut std_debug_assertions = None; @@ -459,7 +450,6 @@ impl Config { randomize_layout, default_linker, channel: _, // already handled above - description: rust_description, musl_root, rpath, verbose_tests, @@ -541,6 +531,14 @@ impl Config { lld_enabled = lld_enabled_toml; std_features = std_features_toml; + if optimize_toml.as_ref().is_some_and(|v| matches!(v, RustOptimize::Bool(false))) { + eprintln!( + "WARNING: setting `optimize` to `false` is known to cause errors and \ + should be considered unsupported. Refer to `bootstrap.example.toml` \ + for more details." + ); + } + optimize = optimize_toml; self.rust_new_symbol_mangling = new_symbol_mangling; set(&mut self.rust_optimize_tests, optimize_tests); @@ -552,14 +550,6 @@ impl Config { set(&mut self.jemalloc, jemalloc); set(&mut self.test_compare_mode, test_compare_mode); set(&mut self.backtrace, backtrace); - if rust_description.is_some() { - eprintln!( - "Warning: rust.description is deprecated. Use build.description instead." - ); - } - if description.is_none() { - *description = rust_description; - } set(&mut self.rust_dist_src, dist_src); set(&mut self.verbose_tests, verbose_tests); // in the case "false" is set explicitly, do not overwrite the command line args diff --git a/src/bootstrap/src/utils/change_tracker.rs b/src/bootstrap/src/utils/change_tracker.rs index d888a7863bc..f802640a42d 100644 --- a/src/bootstrap/src/utils/change_tracker.rs +++ b/src/bootstrap/src/utils/change_tracker.rs @@ -481,4 +481,9 @@ pub const CONFIG_CHANGE_HISTORY: &[ChangeInfo] = &[ severity: ChangeSeverity::Warning, summary: "The current `./x suggest` implementation has been removed due to it being quite broken and a lack of maintenance bandwidth, with no prejudice against re-implementing it in a more maintainable form.", }, + ChangeInfo { + change_id: 143926, + severity: ChangeSeverity::Warning, + summary: "Removed `rust.description` and `llvm.ccache` as it was deprecated in #137723 and #136941 long time ago.", + }, ]; diff --git a/src/build_helper/src/lib.rs b/src/build_helper/src/lib.rs index 05de8fd2d42..266eedc6245 100644 --- a/src/build_helper/src/lib.rs +++ b/src/build_helper/src/lib.rs @@ -5,6 +5,7 @@ pub mod drop_bomb; pub mod fs; pub mod git; pub mod metrics; +pub mod npm; pub mod stage0_parser; pub mod targets; pub mod util; diff --git a/src/build_helper/src/npm.rs b/src/build_helper/src/npm.rs new file mode 100644 index 00000000000..dedef40978d --- /dev/null +++ b/src/build_helper/src/npm.rs @@ -0,0 +1,30 @@ +use std::error::Error; +use std::path::{Path, PathBuf}; +use std::process::Command; +use std::{fs, io}; + +/// Install an exact package version, and return the path of `node_modules`. +pub fn install_one( + out_dir: &Path, + npm_bin: &Path, + pkg_name: &str, + pkg_version: &str, +) -> Result<PathBuf, io::Error> { + let nm_path = out_dir.join("node_modules"); + let _ = fs::create_dir(&nm_path); + let mut child = Command::new(npm_bin) + .arg("install") + .arg("--audit=false") + .arg("--fund=false") + .arg(format!("{pkg_name}@{pkg_version}")) + .current_dir(out_dir) + .spawn()?; + let exit_status = child.wait()?; + if !exit_status.success() { + eprintln!("npm install did not exit successfully"); + return Err(io::Error::other(Box::<dyn Error + Send + Sync>::from(format!( + "npm install returned exit code {exit_status}" + )))); + } + Ok(nm_path) +} diff --git a/src/ci/docker/host-aarch64/aarch64-gnu-llvm-19/Dockerfile b/src/ci/docker/host-aarch64/aarch64-gnu-llvm-19/Dockerfile index 2f9d0010573..e73fbe506f7 100644 --- a/src/ci/docker/host-aarch64/aarch64-gnu-llvm-19/Dockerfile +++ b/src/ci/docker/host-aarch64/aarch64-gnu-llvm-19/Dockerfile @@ -50,9 +50,7 @@ ENV RUST_CONFIGURE_ARGS \ COPY scripts/shared.sh /scripts/ -ARG SCRIPT_ARG +COPY scripts/stage_2_test_set1.sh /scripts/ +COPY scripts/stage_2_test_set2.sh /scripts/ -COPY scripts/stage_2_test_set1.sh /tmp/ -COPY scripts/stage_2_test_set2.sh /tmp/ - -ENV SCRIPT "/tmp/${SCRIPT_ARG}" +ENV SCRIPT "Must specify DOCKER_SCRIPT for this image" diff --git a/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile b/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile index 44f6a8d2a15..01f19eac1d2 100644 --- a/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile +++ b/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile @@ -96,12 +96,10 @@ ENV RUST_CONFIGURE_ARGS \ --set rust.lto=thin \ --set rust.codegen-units=1 -ARG SCRIPT_ARG - COPY host-x86_64/dist-x86_64-linux/dist.sh /scripts/ COPY host-x86_64/dist-x86_64-linux/dist-alt.sh /scripts/ -ENV SCRIPT /scripts/${SCRIPT_ARG} +ENV SCRIPT "Must specify DOCKER_SCRIPT for this image" ENV CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_LINKER=clang diff --git a/src/ci/docker/host-x86_64/i686-gnu-nopt/Dockerfile b/src/ci/docker/host-x86_64/i686-gnu-nopt/Dockerfile index 58e66fd637a..be3df9d4036 100644 --- a/src/ci/docker/host-x86_64/i686-gnu-nopt/Dockerfile +++ b/src/ci/docker/host-x86_64/i686-gnu-nopt/Dockerfile @@ -23,7 +23,7 @@ COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh ENV RUST_CONFIGURE_ARGS --build=i686-unknown-linux-gnu --disable-optimize-tests -ARG SCRIPT_ARG COPY scripts/stage_2_test_set1.sh /scripts/ COPY scripts/stage_2_test_set2.sh /scripts/ -ENV SCRIPT ${SCRIPT_ARG} +COPY scripts/i686-gnu-nopt-2.sh /scripts/ +ENV SCRIPT "Must specify DOCKER_SCRIPT for this image" diff --git a/src/ci/docker/host-x86_64/i686-gnu/Dockerfile b/src/ci/docker/host-x86_64/i686-gnu/Dockerfile index a715f7182d2..00cd24b89db 100644 --- a/src/ci/docker/host-x86_64/i686-gnu/Dockerfile +++ b/src/ci/docker/host-x86_64/i686-gnu/Dockerfile @@ -24,7 +24,6 @@ COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh ENV RUST_CONFIGURE_ARGS --build=i686-unknown-linux-gnu -ARG SCRIPT_ARG COPY scripts/stage_2_test_set1.sh /scripts/ COPY scripts/stage_2_test_set2.sh /scripts/ -ENV SCRIPT /scripts/${SCRIPT_ARG} +ENV SCRIPT "Must specify DOCKER_SCRIPT for this image" diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-19/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-19/Dockerfile index c09be047c6a..5cba7c564f1 100644 --- a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-19/Dockerfile +++ b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-19/Dockerfile @@ -57,12 +57,10 @@ ENV RUST_CONFIGURE_ARGS \ COPY scripts/shared.sh /scripts/ -ARG SCRIPT_ARG +COPY scripts/x86_64-gnu-llvm.sh /scripts/ +COPY scripts/x86_64-gnu-llvm2.sh /scripts/ +COPY scripts/x86_64-gnu-llvm3.sh /scripts/ +COPY scripts/stage_2_test_set1.sh /scripts/ +COPY scripts/stage_2_test_set2.sh /scripts/ -COPY scripts/x86_64-gnu-llvm.sh /tmp/ -COPY scripts/x86_64-gnu-llvm2.sh /tmp/ -COPY scripts/x86_64-gnu-llvm3.sh /tmp/ -COPY scripts/stage_2_test_set1.sh /tmp/ -COPY scripts/stage_2_test_set2.sh /tmp/ - -ENV SCRIPT "/tmp/${SCRIPT_ARG}" +ENV SCRIPT "Must specify DOCKER_SCRIPT for this image" diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-20/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-20/Dockerfile index 83a3bfb37a5..92c2631000f 100644 --- a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-20/Dockerfile +++ b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-20/Dockerfile @@ -57,12 +57,10 @@ ENV RUST_CONFIGURE_ARGS \ COPY scripts/shared.sh /scripts/ -ARG SCRIPT_ARG +COPY scripts/x86_64-gnu-llvm.sh /scripts/ +COPY scripts/x86_64-gnu-llvm2.sh /scripts/ +COPY scripts/x86_64-gnu-llvm3.sh /scripts/ +COPY scripts/stage_2_test_set1.sh /scripts/ +COPY scripts/stage_2_test_set2.sh /scripts/ -COPY scripts/x86_64-gnu-llvm.sh /tmp/ -COPY scripts/x86_64-gnu-llvm2.sh /tmp/ -COPY scripts/x86_64-gnu-llvm3.sh /tmp/ -COPY scripts/stage_2_test_set1.sh /tmp/ -COPY scripts/stage_2_test_set2.sh /tmp/ - -ENV SCRIPT "/tmp/${SCRIPT_ARG}" +ENV SCRIPT "Must specify DOCKER_SCRIPT for this image" diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-miri/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu-miri/Dockerfile index b937bc3e678..ad2ee85c7bb 100644 --- a/src/ci/docker/host-x86_64/x86_64-gnu-miri/Dockerfile +++ b/src/ci/docker/host-x86_64/x86_64-gnu-miri/Dockerfile @@ -46,12 +46,4 @@ ENV HOST_TARGET x86_64-unknown-linux-gnu COPY scripts/shared.sh /scripts/ -# For now, we need to use `--unsafe-perm=true` to go around an issue when npm tries -# to create a new folder. For reference: -# https://github.com/puppeteer/puppeteer/issues/375 -# -# We also specify the version in case we need to update it to go around cache limitations. -# -# The `browser-ui-test.version` file is also used by bootstrap to emit warnings in case -# the local version of the package is different than the one used by the CI. ENV SCRIPT /tmp/check-miri.sh ../x.py diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-tools/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu-tools/Dockerfile index e770c58bd9c..95357d22937 100644 --- a/src/ci/docker/host-x86_64/x86_64-gnu-tools/Dockerfile +++ b/src/ci/docker/host-x86_64/x86_64-gnu-tools/Dockerfile @@ -76,8 +76,6 @@ COPY scripts/nodejs.sh /scripts/ RUN sh /scripts/nodejs.sh /node ENV PATH="/node/bin:${PATH}" -COPY host-x86_64/x86_64-gnu-tools/browser-ui-test.version /tmp/ - ENV RUST_CONFIGURE_ARGS \ --build=x86_64-unknown-linux-gnu \ --save-toolstates=/tmp/toolstate/toolstates.json \ @@ -91,15 +89,6 @@ ENV HOST_TARGET x86_64-unknown-linux-gnu COPY scripts/shared.sh /scripts/ -# For now, we need to use `--unsafe-perm=true` to go around an issue when npm tries -# to create a new folder. For reference: -# https://github.com/puppeteer/puppeteer/issues/375 -# -# We also specify the version in case we need to update it to go around cache limitations. -# -# The `browser-ui-test.version` file is also used by bootstrap to emit warnings in case -# the local version of the package is different than the one used by the CI. ENV SCRIPT /tmp/checktools.sh ../x.py && \ - npm install browser-ui-test@$(head -n 1 /tmp/browser-ui-test.version) --unsafe-perm=true && \ python3 ../x.py check compiletest --set build.compiletest-use-stage0-libtest=true && \ python3 ../x.py test tests/rustdoc-gui --stage 2 --test-args "'--jobs 1'" diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-tools/browser-ui-test.version b/src/ci/docker/host-x86_64/x86_64-gnu-tools/browser-ui-test.version deleted file mode 100644 index b9f8e558df4..00000000000 --- a/src/ci/docker/host-x86_64/x86_64-gnu-tools/browser-ui-test.version +++ /dev/null @@ -1 +0,0 @@ -0.21.1 \ No newline at end of file diff --git a/src/ci/docker/run.sh b/src/ci/docker/run.sh index da7d084d48d..044f5a8fff3 100755 --- a/src/ci/docker/run.sh +++ b/src/ci/docker/run.sh @@ -114,14 +114,6 @@ if [ -f "$docker_dir/$image/Dockerfile" ]; then "$context" ) - # If the environment variable DOCKER_SCRIPT is defined, - # set the build argument SCRIPT_ARG to DOCKER_SCRIPT. - # In this way, we run the script defined in CI, - # instead of the one defined in the Dockerfile. - if [ -n "${DOCKER_SCRIPT+x}" ]; then - build_args+=("--build-arg" "SCRIPT_ARG=${DOCKER_SCRIPT}") - fi - GHCR_BUILDKIT_IMAGE="ghcr.io/rust-lang/buildkit:buildx-stable-1" # On non-CI jobs, we try to download a pre-built image from the rust-lang-ci # ghcr.io registry. If it is not possible, we fall back to building the image @@ -341,6 +333,10 @@ if [ "$ENABLE_GCC_CODEGEN" = "1" ]; then echo "Setting extra environment values for docker: $extra_env" fi +if [ -n "${DOCKER_SCRIPT}" ]; then + extra_env="$extra_env --env SCRIPT=\"/scripts/${DOCKER_SCRIPT}\"" +fi + docker \ run \ --workdir /checkout/obj \ diff --git a/src/ci/docker/scripts/i686-gnu-nopt-2.sh b/src/ci/docker/scripts/i686-gnu-nopt-2.sh new file mode 100755 index 00000000000..4c171739daf --- /dev/null +++ b/src/ci/docker/scripts/i686-gnu-nopt-2.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +set -ex + +python3 ../x.py test --stage 1 --set rust.optimize=false library/std && +/scripts/stage_2_test_set2.sh diff --git a/src/ci/docker/scripts/x86_64-gnu-llvm2.sh b/src/ci/docker/scripts/x86_64-gnu-llvm2.sh index fe5382aaa48..0060b9bfd23 100755 --- a/src/ci/docker/scripts/x86_64-gnu-llvm2.sh +++ b/src/ci/docker/scripts/x86_64-gnu-llvm2.sh @@ -4,7 +4,7 @@ set -ex ##### Test stage 2 ##### -/tmp/stage_2_test_set1.sh +/scripts/stage_2_test_set1.sh # Run the `mir-opt` tests again but this time for a 32-bit target. # This enforces that tests using `// EMIT_MIR_FOR_EACH_BIT_WIDTH` have diff --git a/src/ci/github-actions/jobs.yml b/src/ci/github-actions/jobs.yml index 445fc0dd018..6c5e37b51ef 100644 --- a/src/ci/github-actions/jobs.yml +++ b/src/ci/github-actions/jobs.yml @@ -315,16 +315,14 @@ auto: - name: i686-gnu-nopt-1 env: IMAGE: i686-gnu-nopt - DOCKER_SCRIPT: /scripts/stage_2_test_set1.sh + DOCKER_SCRIPT: stage_2_test_set1.sh <<: *job-linux-4c # Skip tests that run in i686-gnu-nopt-1 - name: i686-gnu-nopt-2 env: IMAGE: i686-gnu-nopt - DOCKER_SCRIPT: >- - python3 ../x.py test --stage 1 --set rust.optimize=false library/std && - /scripts/stage_2_test_set2.sh + DOCKER_SCRIPT: i686-gnu-nopt-2.sh <<: *job-linux-4c - name: pr-check-1 diff --git a/src/doc/rustc-dev-guide/.github/workflows/rustc-pull.yml b/src/doc/rustc-dev-guide/.github/workflows/rustc-pull.yml index 1e430d8b4e6..ad570ee4595 100644 --- a/src/doc/rustc-dev-guide/.github/workflows/rustc-pull.yml +++ b/src/doc/rustc-dev-guide/.github/workflows/rustc-pull.yml @@ -9,106 +9,12 @@ on: jobs: pull: if: github.repository == 'rust-lang/rustc-dev-guide' - runs-on: ubuntu-latest - outputs: - pr_url: ${{ steps.update-pr.outputs.pr_url }} - permissions: - contents: write - pull-requests: write - steps: - - uses: actions/checkout@v4 - with: - # We need the full history for josh to work - fetch-depth: '0' - - name: Install stable Rust toolchain - run: rustup update stable - - uses: Swatinem/rust-cache@v2 - with: - workspaces: "josh-sync" - # Cache the josh directory with checked out rustc - cache-directories: "/home/runner/.cache/rustc-dev-guide-josh" - - name: Install josh - run: RUSTFLAGS="--cap-lints warn" cargo install josh-proxy --git https://github.com/josh-project/josh --tag r24.10.04 - - name: Setup bot git name and email - run: | - git config --global user.name 'The rustc-dev-guide Cronjob Bot' - git config --global user.email 'github-actions@github.com' - - name: Perform rustc-pull - id: rustc-pull - # Turn off -e to disable early exit - shell: bash {0} - run: | - cargo run --manifest-path josh-sync/Cargo.toml -- rustc-pull - exitcode=$? - - # If no pull was performed, we want to mark this job as successful, - # but we do not want to perform the follow-up steps. - if [ $exitcode -eq 0 ]; then - echo "pull_result=pull-finished" >> $GITHUB_OUTPUT - elif [ $exitcode -eq 2 ]; then - echo "pull_result=skipped" >> $GITHUB_OUTPUT - exitcode=0 - fi - - exit ${exitcode} - - name: Push changes to a branch - if: ${{ steps.rustc-pull.outputs.pull_result == 'pull-finished' }} - run: | - # Update a sticky branch that is used only for rustc pulls - BRANCH="rustc-pull" - git switch -c $BRANCH - git push -u origin $BRANCH --force - - name: Create pull request - id: update-pr - if: ${{ steps.rustc-pull.outputs.pull_result == 'pull-finished' }} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - # Check if an open pull request for an rustc pull update already exists - # If it does, the previous push has just updated it - # If not, we create it now - RESULT=`gh pr list --author github-actions[bot] --state open -q 'map(select(.title=="Rustc pull update")) | length' --json title` - if [[ "$RESULT" -eq 0 ]]; then - echo "Creating new pull request" - PR_URL=`gh pr create -B master --title 'Rustc pull update' --body 'Latest update from rustc.'` - echo "pr_url=$PR_URL" >> $GITHUB_OUTPUT - else - PR_URL=`gh pr list --author github-actions[bot] --state open -q 'map(select(.title=="Rustc pull update")) | .[0].url' --json url,title` - echo "Updating pull request ${PR_URL}" - echo "pr_url=$PR_URL" >> $GITHUB_OUTPUT - fi - send-zulip-message: - needs: [pull] - if: ${{ !cancelled() }} - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Compute message - id: create-message - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - if [ "${{ needs.pull.result }}" == "failure" ]; then - WORKFLOW_URL="${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" - echo "message=Rustc pull sync failed. Check out the [workflow URL]($WORKFLOW_URL)." >> $GITHUB_OUTPUT - else - CREATED_AT=`gh pr list --author github-actions[bot] --state open -q 'map(select(.title=="Rustc pull update")) | .[0].createdAt' --json createdAt,title` - PR_URL=`gh pr list --author github-actions[bot] --state open -q 'map(select(.title=="Rustc pull update")) | .[0].url' --json url,title` - week_ago=$(date +%F -d '7 days ago') - - # If there is an open PR that is at least a week old, post a message about it - if [[ -n $DATE_GH && $DATE_GH < $week_ago ]]; then - echo "message=A PR with a Rustc pull has been opened for more a week. Check out the [PR](${PR_URL})." >> $GITHUB_OUTPUT - fi - fi - - name: Send a Zulip message about updated PR - if: ${{ steps.create-message.outputs.message != '' }} - uses: zulip/github-actions-zulip/send-message@e4c8f27c732ba9bd98ac6be0583096dea82feea5 - with: - api-key: ${{ secrets.ZULIP_API_TOKEN }} - email: "rustc-dev-guide-gha-notif-bot@rust-lang.zulipchat.com" - organization-url: "https://rust-lang.zulipchat.com" - to: 196385 - type: "stream" - topic: "Subtree sync automation" - content: ${{ steps.create-message.outputs.message }} + uses: rust-lang/josh-sync/.github/workflows/rustc-pull.yml@main + with: + zulip-stream-id: 196385 + zulip-bot-email: "rustc-dev-guide-gha-notif-bot@rust-lang.zulipchat.com" + pr-base-branch: master + branch-name: rustc-pull + secrets: + zulip-api-token: ${{ secrets.ZULIP_API_TOKEN }} + token: ${{ secrets.GITHUB_TOKEN }} diff --git a/src/doc/rustc-dev-guide/README.md b/src/doc/rustc-dev-guide/README.md index 0425c15f83c..5932da467ab 100644 --- a/src/doc/rustc-dev-guide/README.md +++ b/src/doc/rustc-dev-guide/README.md @@ -72,49 +72,6 @@ including the `<!-- toc -->` marker at the place where you want the TOC. ## Synchronizing josh subtree with rustc -This repository is linked to `rust-lang/rust` as a [josh](https://josh-project.github.io/josh/intro.html) subtree. You can use the following commands to synchronize the subtree in both directions. +This repository is linked to `rust-lang/rust` as a [josh](https://josh-project.github.io/josh/intro.html) subtree. You can use the [rustc-josh-sync](https://github.com/rust-lang/josh-sync) tool to perform synchronization. -You'll need to install `josh-proxy` locally via - -``` -cargo install josh-proxy --git https://github.com/josh-project/josh --tag r24.10.04 -``` -Older versions of `josh-proxy` may not round trip commits losslessly so it is important to install this exact version. - -### Pull changes from `rust-lang/rust` into this repository - -1) Checkout a new branch that will be used to create a PR into `rust-lang/rustc-dev-guide` -2) Run the pull command - ``` - cargo run --manifest-path josh-sync/Cargo.toml rustc-pull - ``` -3) Push the branch to your fork and create a PR into `rustc-dev-guide` - -### Push changes from this repository into `rust-lang/rust` - -NOTE: If you use Git protocol to push to your fork of `rust-lang/rust`, -ensure that you have this entry in your Git config, -else the 2 steps that follow would prompt for a username and password: - -``` -[url "git@github.com:"] -insteadOf = "https://github.com/" -``` - -1) Run the push command to create a branch named `<branch-name>` in a `rustc` fork under the `<gh-username>` account - ``` - cargo run --manifest-path josh-sync/Cargo.toml rustc-push <branch-name> <gh-username> - ``` -2) Create a PR from `<branch-name>` into `rust-lang/rust` - -#### Minimal git config - -For simplicity (ease of implementation purposes), the josh-sync script simply calls out to system git. This means that the git invocation may be influenced by global (or local) git configuration. - -You may observe "Nothing to pull" even if you *know* rustc-pull has something to pull if your global git config sets `fetch.prunetags = true` (and possibly other configurations may cause unexpected outcomes). - -To minimize the likelihood of this happening, you may wish to keep a separate *minimal* git config that *only* has `[user]` entries from global git config, then repoint system git to use the minimal git config instead. E.g. - -``` -GIT_CONFIG_GLOBAL=/path/to/minimal/gitconfig GIT_CONFIG_SYSTEM='' cargo run --manifest-path josh-sync/Cargo.toml -- rustc-pull -``` +You can find a guide on how to perform the synchronization [here](./src/external-repos.md#synchronizing-a-josh-subtree). diff --git a/src/doc/rustc-dev-guide/josh-sync/Cargo.lock b/src/doc/rustc-dev-guide/josh-sync/Cargo.lock deleted file mode 100644 index a8183a740db..00000000000 --- a/src/doc/rustc-dev-guide/josh-sync/Cargo.lock +++ /dev/null @@ -1,430 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 4 - -[[package]] -name = "anstream" -version = "0.6.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" -dependencies = [ - "anstyle", - "anstyle-parse", - "anstyle-query", - "anstyle-wincon", - "colorchoice", - "is_terminal_polyfill", - "utf8parse", -] - -[[package]] -name = "anstyle" -version = "1.0.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" - -[[package]] -name = "anstyle-parse" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" -dependencies = [ - "utf8parse", -] - -[[package]] -name = "anstyle-query" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" -dependencies = [ - "windows-sys 0.59.0", -] - -[[package]] -name = "anstyle-wincon" -version = "3.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125" -dependencies = [ - "anstyle", - "windows-sys 0.59.0", -] - -[[package]] -name = "anyhow" -version = "1.0.95" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" - -[[package]] -name = "bitflags" -version = "2.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" - -[[package]] -name = "cfg-if" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" - -[[package]] -name = "clap" -version = "4.5.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3135e7ec2ef7b10c6ed8950f0f792ed96ee093fa088608f1c76e569722700c84" -dependencies = [ - "clap_builder", - "clap_derive", -] - -[[package]] -name = "clap_builder" -version = "4.5.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30582fc632330df2bd26877bde0c1f4470d57c582bbc070376afcd04d8cb4838" -dependencies = [ - "anstream", - "anstyle", - "clap_lex", - "strsim", -] - -[[package]] -name = "clap_derive" -version = "4.5.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "clap_lex" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" - -[[package]] -name = "colorchoice" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" - -[[package]] -name = "directories" -version = "5.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a49173b84e034382284f27f1af4dcbbd231ffa358c0fe316541a7337f376a35" -dependencies = [ - "dirs-sys", -] - -[[package]] -name = "dirs-sys" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c" -dependencies = [ - "libc", - "option-ext", - "redox_users", - "windows-sys 0.48.0", -] - -[[package]] -name = "getrandom" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" -dependencies = [ - "cfg-if", - "libc", - "wasi", -] - -[[package]] -name = "heck" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" - -[[package]] -name = "is_terminal_polyfill" -version = "1.70.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" - -[[package]] -name = "josh-sync" -version = "0.0.0" -dependencies = [ - "anyhow", - "clap", - "directories", - "xshell", -] - -[[package]] -name = "libc" -version = "0.2.169" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" - -[[package]] -name = "libredox" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" -dependencies = [ - "bitflags", - "libc", -] - -[[package]] -name = "option-ext" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" - -[[package]] -name = "proc-macro2" -version = "1.0.92" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "quote" -version = "1.0.38" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "redox_users" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" -dependencies = [ - "getrandom", - "libredox", - "thiserror", -] - -[[package]] -name = "strsim" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" - -[[package]] -name = "syn" -version = "2.0.93" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c786062daee0d6db1132800e623df74274a0a87322d8e183338e01b3d98d058" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "thiserror" -version = "1.0.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" -dependencies = [ - "thiserror-impl", -] - -[[package]] -name = "thiserror-impl" -version = "1.0.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "unicode-ident" -version = "1.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" - -[[package]] -name = "utf8parse" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" - -[[package]] -name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" - -[[package]] -name = "windows-sys" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" -dependencies = [ - "windows-targets 0.48.5", -] - -[[package]] -name = "windows-sys" -version = "0.59.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" -dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-targets" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" -dependencies = [ - "windows_aarch64_gnullvm 0.48.5", - "windows_aarch64_msvc 0.48.5", - "windows_i686_gnu 0.48.5", - "windows_i686_msvc 0.48.5", - "windows_x86_64_gnu 0.48.5", - "windows_x86_64_gnullvm 0.48.5", - "windows_x86_64_msvc 0.48.5", -] - -[[package]] -name = "windows-targets" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" -dependencies = [ - "windows_aarch64_gnullvm 0.52.6", - "windows_aarch64_msvc 0.52.6", - "windows_i686_gnu 0.52.6", - "windows_i686_gnullvm", - "windows_i686_msvc 0.52.6", - "windows_x86_64_gnu 0.52.6", - "windows_x86_64_gnullvm 0.52.6", - "windows_x86_64_msvc 0.52.6", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" - -[[package]] -name = "windows_i686_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" - -[[package]] -name = "windows_i686_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" - -[[package]] -name = "windows_i686_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" - -[[package]] -name = "windows_i686_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" - -[[package]] -name = "xshell" -version = "0.2.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e7290c623014758632efe00737145b6867b66292c42167f2ec381eb566a373d" -dependencies = [ - "xshell-macros", -] - -[[package]] -name = "xshell-macros" -version = "0.2.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32ac00cd3f8ec9c1d33fb3e7958a82df6989c42d747bd326c822b1d625283547" diff --git a/src/doc/rustc-dev-guide/josh-sync/Cargo.toml b/src/doc/rustc-dev-guide/josh-sync/Cargo.toml deleted file mode 100644 index 1f8bf2a0093..00000000000 --- a/src/doc/rustc-dev-guide/josh-sync/Cargo.toml +++ /dev/null @@ -1,9 +0,0 @@ -[package] -name = "josh-sync" -edition = "2024" - -[dependencies] -anyhow = "1.0.95" -clap = { version = "4.5.21", features = ["derive"] } -directories = "5" -xshell = "0.2.6" diff --git a/src/doc/rustc-dev-guide/josh-sync/README.md b/src/doc/rustc-dev-guide/josh-sync/README.md deleted file mode 100644 index a3dd876e8b8..00000000000 --- a/src/doc/rustc-dev-guide/josh-sync/README.md +++ /dev/null @@ -1,4 +0,0 @@ -# Git josh sync -This utility serves for syncing the josh git subtree to and from the rust-lang/rust repository. - -See CLI help for usage. diff --git a/src/doc/rustc-dev-guide/josh-sync/src/main.rs b/src/doc/rustc-dev-guide/josh-sync/src/main.rs deleted file mode 100644 index aeedee5be22..00000000000 --- a/src/doc/rustc-dev-guide/josh-sync/src/main.rs +++ /dev/null @@ -1,41 +0,0 @@ -use clap::Parser; - -use crate::sync::{GitSync, RustcPullError}; - -mod sync; - -#[derive(clap::Parser)] -enum Args { - /// Pull changes from the main `rustc` repository. - /// This creates new commits that should be then merged into `rustc-dev-guide`. - RustcPull, - /// Push changes from `rustc-dev-guide` to the given `branch` of a `rustc` fork under the given - /// GitHub `username`. - /// The pushed branch should then be merged into the `rustc` repository. - RustcPush { branch: String, github_username: String }, -} - -fn main() -> anyhow::Result<()> { - let args = Args::parse(); - let sync = GitSync::from_current_dir()?; - match args { - Args::RustcPull => { - if let Err(error) = sync.rustc_pull(None) { - match error { - RustcPullError::NothingToPull => { - eprintln!("Nothing to pull"); - std::process::exit(2); - } - RustcPullError::PullFailed(error) => { - eprintln!("Pull failure: {error:?}"); - std::process::exit(1); - } - } - } - } - Args::RustcPush { github_username, branch } => { - sync.rustc_push(github_username, branch)?; - } - } - Ok(()) -} diff --git a/src/doc/rustc-dev-guide/josh-sync/src/sync.rs b/src/doc/rustc-dev-guide/josh-sync/src/sync.rs deleted file mode 100644 index ed38d1403a0..00000000000 --- a/src/doc/rustc-dev-guide/josh-sync/src/sync.rs +++ /dev/null @@ -1,275 +0,0 @@ -use std::io::Write; -use std::ops::Not; -use std::path::PathBuf; -use std::time::Duration; -use std::{env, net, process}; - -use anyhow::{Context, anyhow, bail}; -use xshell::{Shell, cmd}; - -/// Used for rustc syncs. -const JOSH_FILTER: &str = ":/src/doc/rustc-dev-guide"; -const JOSH_PORT: u16 = 42042; -const UPSTREAM_REPO: &str = "rust-lang/rust"; - -pub enum RustcPullError { - /// No changes are available to be pulled. - NothingToPull, - /// A rustc-pull has failed, probably a git operation error has occurred. - PullFailed(anyhow::Error), -} - -impl<E> From<E> for RustcPullError -where - E: Into<anyhow::Error>, -{ - fn from(error: E) -> Self { - Self::PullFailed(error.into()) - } -} - -pub struct GitSync { - dir: PathBuf, -} - -/// This code was adapted from the miri repository -/// (https://github.com/rust-lang/miri/blob/6a68a79f38064c3bc30617cca4bdbfb2c336b140/miri-script/src/commands.rs#L236). -impl GitSync { - pub fn from_current_dir() -> anyhow::Result<Self> { - Ok(Self { dir: std::env::current_dir()? }) - } - - pub fn rustc_pull(&self, commit: Option<String>) -> Result<(), RustcPullError> { - let sh = Shell::new()?; - sh.change_dir(&self.dir); - let commit = commit.map(Ok).unwrap_or_else(|| { - let rust_repo_head = - cmd!(sh, "git ls-remote https://github.com/{UPSTREAM_REPO}/ HEAD").read()?; - rust_repo_head - .split_whitespace() - .next() - .map(|front| front.trim().to_owned()) - .ok_or_else(|| anyhow!("Could not obtain Rust repo HEAD from remote.")) - })?; - // Make sure the repo is clean. - if cmd!(sh, "git status --untracked-files=no --porcelain").read()?.is_empty().not() { - return Err(anyhow::anyhow!( - "working directory must be clean before performing rustc pull" - ) - .into()); - } - // Make sure josh is running. - let josh = Self::start_josh()?; - let josh_url = - format!("http://localhost:{JOSH_PORT}/{UPSTREAM_REPO}.git@{commit}{JOSH_FILTER}.git"); - - let previous_base_commit = sh.read_file("rust-version")?.trim().to_string(); - if previous_base_commit == commit { - return Err(RustcPullError::NothingToPull); - } - - // Update rust-version file. As a separate commit, since making it part of - // the merge has confused the heck out of josh in the past. - // We pass `--no-verify` to avoid running git hooks. - // We do this before the merge so that if there are merge conflicts, we have - // the right rust-version file while resolving them. - sh.write_file("rust-version", format!("{commit}\n"))?; - const PREPARING_COMMIT_MESSAGE: &str = "Preparing for merge from rustc"; - cmd!(sh, "git commit rust-version --no-verify -m {PREPARING_COMMIT_MESSAGE}") - .run() - .context("FAILED to commit rust-version file, something went wrong")?; - - // Fetch given rustc commit. - cmd!(sh, "git fetch {josh_url}") - .run() - .inspect_err(|_| { - // Try to un-do the previous `git commit`, to leave the repo in the state we found it. - cmd!(sh, "git reset --hard HEAD^") - .run() - .expect("FAILED to clean up again after failed `git fetch`, sorry for that"); - }) - .context("FAILED to fetch new commits, something went wrong (committing the rust-version file has been undone)")?; - - // This should not add any new root commits. So count those before and after merging. - let num_roots = || -> anyhow::Result<u32> { - Ok(cmd!(sh, "git rev-list HEAD --max-parents=0 --count") - .read() - .context("failed to determine the number of root commits")? - .parse::<u32>()?) - }; - let num_roots_before = num_roots()?; - - let sha = - cmd!(sh, "git rev-parse HEAD").output().context("FAILED to get current commit")?.stdout; - - // Merge the fetched commit. - const MERGE_COMMIT_MESSAGE: &str = "Merge from rustc"; - cmd!(sh, "git merge FETCH_HEAD --no-verify --no-ff -m {MERGE_COMMIT_MESSAGE}") - .run() - .context("FAILED to merge new commits, something went wrong")?; - - let current_sha = - cmd!(sh, "git rev-parse HEAD").output().context("FAILED to get current commit")?.stdout; - if current_sha == sha { - cmd!(sh, "git reset --hard HEAD^") - .run() - .expect("FAILED to clean up after creating the preparation commit"); - eprintln!( - "No merge was performed, no changes to pull were found. Rolled back the preparation commit." - ); - return Err(RustcPullError::NothingToPull); - } - - // Check that the number of roots did not increase. - if num_roots()? != num_roots_before { - return Err(anyhow::anyhow!( - "Josh created a new root commit. This is probably not the history you want." - ) - .into()); - } - - drop(josh); - Ok(()) - } - - pub fn rustc_push(&self, github_user: String, branch: String) -> anyhow::Result<()> { - let sh = Shell::new()?; - sh.change_dir(&self.dir); - let base = sh.read_file("rust-version")?.trim().to_owned(); - // Make sure the repo is clean. - if cmd!(sh, "git status --untracked-files=no --porcelain").read()?.is_empty().not() { - bail!("working directory must be clean before running `rustc-push`"); - } - // Make sure josh is running. - let josh = Self::start_josh()?; - let josh_url = - format!("http://localhost:{JOSH_PORT}/{github_user}/rust.git{JOSH_FILTER}.git"); - - // Find a repo we can do our preparation in. - if let Ok(rustc_git) = env::var("RUSTC_GIT") { - // If rustc_git is `Some`, we'll use an existing fork for the branch updates. - sh.change_dir(rustc_git); - } else { - // Otherwise, do this in the local repo. - println!( - "This will pull a copy of the rust-lang/rust history into this checkout, growing it by about 1GB." - ); - print!( - "To avoid that, abort now and set the `RUSTC_GIT` environment variable to an existing rustc checkout. Proceed? [y/N] " - ); - std::io::stdout().flush()?; - let mut answer = String::new(); - std::io::stdin().read_line(&mut answer)?; - if answer.trim().to_lowercase() != "y" { - std::process::exit(1); - } - }; - // Prepare the branch. Pushing works much better if we use as base exactly - // the commit that we pulled from last time, so we use the `rust-version` - // file to find out which commit that would be. - println!("Preparing {github_user}/rust (base: {base})..."); - if cmd!(sh, "git fetch https://github.com/{github_user}/rust {branch}") - .ignore_stderr() - .read() - .is_ok() - { - println!( - "The branch '{branch}' seems to already exist in 'https://github.com/{github_user}/rust'. Please delete it and try again." - ); - std::process::exit(1); - } - cmd!(sh, "git fetch https://github.com/{UPSTREAM_REPO} {base}").run()?; - cmd!(sh, "git push https://github.com/{github_user}/rust {base}:refs/heads/{branch}") - .ignore_stdout() - .ignore_stderr() // silence the "create GitHub PR" message - .run()?; - println!(); - - // Do the actual push. - sh.change_dir(&self.dir); - println!("Pushing changes..."); - cmd!(sh, "git push {josh_url} HEAD:{branch}").run()?; - println!(); - - // Do a round-trip check to make sure the push worked as expected. - cmd!(sh, "git fetch {josh_url} {branch}").ignore_stderr().read()?; - let head = cmd!(sh, "git rev-parse HEAD").read()?; - let fetch_head = cmd!(sh, "git rev-parse FETCH_HEAD").read()?; - if head != fetch_head { - bail!( - "Josh created a non-roundtrip push! Do NOT merge this into rustc!\n\ - Expected {head}, got {fetch_head}." - ); - } - println!( - "Confirmed that the push round-trips back to rustc-dev-guide properly. Please create a rustc PR:" - ); - println!( - // Open PR with `subtree update` title to silence the `no-merges` triagebot check - " https://github.com/{UPSTREAM_REPO}/compare/{github_user}:{branch}?quick_pull=1&title=rustc-dev-guide+subtree+update&body=r?+@ghost" - ); - - drop(josh); - Ok(()) - } - - fn start_josh() -> anyhow::Result<impl Drop> { - // Determine cache directory. - let local_dir = { - let user_dirs = - directories::ProjectDirs::from("org", "rust-lang", "rustc-dev-guide-josh").unwrap(); - user_dirs.cache_dir().to_owned() - }; - - // Start josh, silencing its output. - let mut cmd = process::Command::new("josh-proxy"); - cmd.arg("--local").arg(local_dir); - cmd.arg("--remote").arg("https://github.com"); - cmd.arg("--port").arg(JOSH_PORT.to_string()); - cmd.arg("--no-background"); - cmd.stdout(process::Stdio::null()); - cmd.stderr(process::Stdio::null()); - let josh = cmd.spawn().context("failed to start josh-proxy, make sure it is installed")?; - - // Create a wrapper that stops it on drop. - struct Josh(process::Child); - impl Drop for Josh { - fn drop(&mut self) { - #[cfg(unix)] - { - // Try to gracefully shut it down. - process::Command::new("kill") - .args(["-s", "INT", &self.0.id().to_string()]) - .output() - .expect("failed to SIGINT josh-proxy"); - // Sadly there is no "wait with timeout"... so we just give it some time to finish. - std::thread::sleep(Duration::from_millis(100)); - // Now hopefully it is gone. - if self.0.try_wait().expect("failed to wait for josh-proxy").is_some() { - return; - } - } - // If that didn't work (or we're not on Unix), kill it hard. - eprintln!( - "I have to kill josh-proxy the hard way, let's hope this does not break anything." - ); - self.0.kill().expect("failed to SIGKILL josh-proxy"); - } - } - - // Wait until the port is open. We try every 10ms until 1s passed. - for _ in 0..100 { - // This will generally fail immediately when the port is still closed. - let josh_ready = net::TcpStream::connect_timeout( - &net::SocketAddr::from(([127, 0, 0, 1], JOSH_PORT)), - Duration::from_millis(1), - ); - if josh_ready.is_ok() { - return Ok(Josh(josh)); - } - // Not ready yet. - std::thread::sleep(Duration::from_millis(10)); - } - bail!("Even after waiting for 1s, josh-proxy is still not available.") - } -} diff --git a/src/doc/rustc-dev-guide/rust-version b/src/doc/rustc-dev-guide/rust-version index e444613e631..3f10132b684 100644 --- a/src/doc/rustc-dev-guide/rust-version +++ b/src/doc/rustc-dev-guide/rust-version @@ -1 +1 @@ -c96a69059ecc618b519da385a6ccd03155aa0237 +fd2eb391d032181459773f3498c17b198513e0d0 diff --git a/src/doc/rustc-dev-guide/src/autodiff/installation.md b/src/doc/rustc-dev-guide/src/autodiff/installation.md index c9b28dc43a6..a550f6d233e 100644 --- a/src/doc/rustc-dev-guide/src/autodiff/installation.md +++ b/src/doc/rustc-dev-guide/src/autodiff/installation.md @@ -6,14 +6,14 @@ In the near future, `std::autodiff` should become available in nightly builds fo First you need to clone and configure the Rust repository: ```bash -git clone --depth=1 git@github.com:rust-lang/rust.git +git clone git@github.com:rust-lang/rust cd rust ./configure --enable-llvm-link-shared --enable-llvm-plugins --enable-llvm-enzyme --release-channel=nightly --enable-llvm-assertions --enable-clang --enable-lld --enable-option-checking --enable-ninja --disable-docs ``` Afterwards you can build rustc using: ```bash -./x.py build --stage 1 library +./x build --stage 1 library ``` Afterwards rustc toolchain link will allow you to use it through cargo: @@ -25,10 +25,10 @@ rustup toolchain install nightly # enables -Z unstable-options You can then run our test cases: ```bash -./x.py test --stage 1 tests/codegen/autodiff -./x.py test --stage 1 tests/pretty/autodiff -./x.py test --stage 1 tests/ui/autodiff -./x.py test --stage 1 tests/ui/feature-gates/feature-gate-autodiff.rs +./x test --stage 1 tests/codegen/autodiff +./x test --stage 1 tests/pretty/autodiff +./x test --stage 1 tests/ui/autodiff +./x test --stage 1 tests/ui/feature-gates/feature-gate-autodiff.rs ``` Autodiff is still experimental, so if you want to use it in your own projects, you will need to add `lto="fat"` to your Cargo.toml @@ -45,7 +45,7 @@ apt install wget vim python3 git curl libssl-dev pkg-config lld ninja-build cmak ``` Then build rustc in a slightly altered way: ```bash -git clone --depth=1 https://github.com/rust-lang/rust.git +git clone https://github.com/rust-lang/rust cd rust ./configure --enable-llvm-link-shared --enable-llvm-plugins --enable-llvm-enzyme --release-channel=nightly --enable-llvm-assertions --enable-clang --enable-lld --enable-option-checking --enable-ninja --disable-docs ./x dist @@ -66,7 +66,7 @@ We recommend that approach, if you just want to use any of them and have no expe However, if you prefer to just build Enzyme without Rust, then these instructions might help. ```bash -git clone --depth=1 git@github.com:llvm/llvm-project.git +git clone git@github.com:llvm/llvm-project cd llvm-project mkdir build cd build @@ -77,7 +77,7 @@ ninja install This gives you a working LLVM build, now we can continue with building Enzyme. Leave the `llvm-project` folder, and execute the following commands: ```bash -git clone git@github.com:EnzymeAD/Enzyme.git +git clone git@github.com:EnzymeAD/Enzyme cd Enzyme/enzyme mkdir build cd build diff --git a/src/doc/rustc-dev-guide/src/effects.md b/src/doc/rustc-dev-guide/src/effects.md index c7aa2714668..87b0103a7bc 100644 --- a/src/doc/rustc-dev-guide/src/effects.md +++ b/src/doc/rustc-dev-guide/src/effects.md @@ -67,10 +67,8 @@ in [`wfcheck::check_impl`]. Here's an example: ```rust -#[const_trait] -trait Bar {} -#[const_trait] -trait Foo: ~const Bar {} +const trait Bar {} +const trait Foo: ~const Bar {} // `const_conditions` contains `HostEffect(Self: Bar, maybe)` impl const Bar for () {} @@ -85,8 +83,7 @@ predicates of the trait method, and we attempt to prove the predicates of the impl method. We do the same for `const_conditions`: ```rust -#[const_trait] -trait Foo { +const trait Foo { fn hi<T: ~const Default>(); } diff --git a/src/doc/rustc-dev-guide/src/external-repos.md b/src/doc/rustc-dev-guide/src/external-repos.md index f3170c9222d..ecc65b26ab7 100644 --- a/src/doc/rustc-dev-guide/src/external-repos.md +++ b/src/doc/rustc-dev-guide/src/external-repos.md @@ -9,7 +9,7 @@ There are three main ways we use dependencies: As a general rule: - Use crates.io for libraries that could be useful for others in the ecosystem - Use subtrees for tools that depend on compiler internals and need to be updated if there are breaking -changes + changes - Use submodules for tools that are independent of the compiler ## External Dependencies (subtrees) @@ -23,6 +23,8 @@ The following external projects are managed using some form of a `subtree`: * [rust-analyzer](https://github.com/rust-lang/rust-analyzer) * [rustc_codegen_cranelift](https://github.com/rust-lang/rustc_codegen_cranelift) * [rustc-dev-guide](https://github.com/rust-lang/rustc-dev-guide) +* [compiler-builtins](https://github.com/rust-lang/compiler-builtins) +* [stdarch](https://github.com/rust-lang/stdarch) In contrast to `submodule` dependencies (see below for those), the `subtree` dependencies are just regular files and directories which can @@ -34,20 +36,61 @@ implement a new tool feature or test, that should happen in one collective rustc `subtree` dependencies are currently managed by two distinct approaches: * Using `git subtree` - * `clippy` ([sync guide](https://doc.rust-lang.org/nightly/clippy/development/infrastructure/sync.html#performing-the-sync-from-rust-langrust-to-clippy)) - * `portable-simd` ([sync script](https://github.com/rust-lang/portable-simd/blob/master/subtree-sync.sh)) - * `rustfmt` - * `rustc_codegen_cranelift` ([sync script](https://github.com/rust-lang/rustc_codegen_cranelift/blob/113af154d459e41b3dc2c5d7d878e3d3a8f33c69/scripts/rustup.sh#L7)) + * `clippy` ([sync guide](https://doc.rust-lang.org/nightly/clippy/development/infrastructure/sync.html#performing-the-sync-from-rust-langrust-to-clippy)) + * `portable-simd` ([sync script](https://github.com/rust-lang/portable-simd/blob/master/subtree-sync.sh)) + * `rustfmt` + * `rustc_codegen_cranelift` ([sync script](https://github.com/rust-lang/rustc_codegen_cranelift/blob/113af154d459e41b3dc2c5d7d878e3d3a8f33c69/scripts/rustup.sh#L7)) * Using the [josh] tool - * `miri` ([sync guide](https://github.com/rust-lang/miri/blob/master/CONTRIBUTING.md#advanced-topic-syncing-with-the-rustc-repo)) - * `rust-analyzer` ([sync script](https://github.com/rust-lang/rust-analyzer/blob/2e13684be123eca7181aa48e043e185d8044a84a/xtask/src/release.rs#L147)) - * `rustc-dev-guide` ([sync guide](https://github.com/rust-lang/rustc-dev-guide#synchronizing-josh-subtree-with-rustc)) + * `miri` ([sync guide](https://github.com/rust-lang/miri/blob/master/CONTRIBUTING.md#advanced-topic-syncing-with-the-rustc-repo)) + * `rust-analyzer` ([sync script](https://github.com/rust-lang/rust-analyzer/blob/2e13684be123eca7181aa48e043e185d8044a84a/xtask/src/release.rs#L147)) + * `rustc-dev-guide` ([josh sync](#synchronizing-a-josh-subtree)) + * `compiler-builtins` ([josh sync](#synchronizing-a-josh-subtree)) + * `stdarch` ([josh sync](#synchronizing-a-josh-subtree)) -The [josh] tool is an alternative to git subtrees, which manages git history in a different way and scales better to larger repositories. Specific tooling is required to work with josh, you can check out the `miri` or `rust-analyzer` scripts linked above for inspiration. If you want to migrate a repository dependency from `git subtree` or `git submodule` to josh, you can check out [this guide](https://hackmd.io/7pOuxnkdQDaL1Y1FQr65xg). +### Josh subtrees -Below you can find a guide on how to perform push and pull synchronization with the main rustc repo using `git subtree`, although these instructions might differ repo from repo. +The [josh] tool is an alternative to git subtrees, which manages git history in a different way and scales better to larger repositories. Specific tooling is required to work with josh; you can check out the `miri` or `rust-analyzer` scripts linked above for inspiration. We provide a helper [`rustc-josh-sync`][josh-sync] tool to help with the synchronization, described [below](#synchronizing-a-josh-subtree). -### Synchronizing a subtree +### Synchronizing a Josh subtree + +We use a dedicated tool called [`rustc-josh-sync`][josh-sync] for performing Josh subtree updates. +Currently, we are migrating Josh repositories to it. So far, it is used in: + +- compiler-builtins +- rustc-dev-guide +- stdarch + +To install the tool: +``` +cargo install --locked --git https://github.com/rust-lang/josh-sync +``` + +Both pulls (synchronize changes from rust-lang/rust into the subtree) and pushes (synchronize +changes from the subtree to rust-lang/rust) are performed from the subtree repository (so first +switch to its repository checkout directory in your terminal). + +#### Performing pull +1) Checkout a new branch that will be used to create a PR into the subtree +2) Run the pull command + ``` + rustc-josh-sync pull + ``` +3) Push the branch to your fork and create a PR into the subtree repository + - If you have `gh` CLI installed, `rustc-josh-sync` can create the PR for you. + +#### Performing push + +1) Run the push command to create a branch named `<branch-name>` in a `rustc` fork under the `<gh-username>` account + ``` + rustc-josh-sync push <branch-name> <gh-username> + ``` +2) Create a PR from `<branch-name>` into `rust-lang/rust` + +### Creating a new Josh subtree dependency + +If you want to migrate a repository dependency from `git subtree` or `git submodule` to josh, you can check out [this guide](https://hackmd.io/7pOuxnkdQDaL1Y1FQr65xg). + +### Synchronizing a git subtree Periodically the changes made to subtree based dependencies need to be synchronized between this repository and the upstream tool repositories. @@ -129,3 +172,4 @@ the week leading up to the beta cut. [toolstate website]: https://rust-lang-nursery.github.io/rust-toolstate/ [Toolstate chapter]: https://forge.rust-lang.org/infra/toolstate.html [josh]: https://josh-project.github.io/josh/intro.html +[josh-sync]: https://github.com/rust-lang/josh-sync diff --git a/src/doc/rustc-dev-guide/src/offload/installation.md b/src/doc/rustc-dev-guide/src/offload/installation.md index 2536af09a23..1962314c70a 100644 --- a/src/doc/rustc-dev-guide/src/offload/installation.md +++ b/src/doc/rustc-dev-guide/src/offload/installation.md @@ -6,14 +6,14 @@ In the future, `std::offload` should become available in nightly builds for user First you need to clone and configure the Rust repository: ```bash -git clone --depth=1 git@github.com:rust-lang/rust.git +git clone git@github.com:rust-lang/rust cd rust ./configure --enable-llvm-link-shared --release-channel=nightly --enable-llvm-assertions --enable-offload --enable-enzyme --enable-clang --enable-lld --enable-option-checking --enable-ninja --disable-docs ``` Afterwards you can build rustc using: ```bash -./x.py build --stage 1 library +./x build --stage 1 library ``` Afterwards rustc toolchain link will allow you to use it through cargo: @@ -26,7 +26,7 @@ rustup toolchain install nightly # enables -Z unstable-options ## Build instruction for LLVM itself ```bash -git clone --depth=1 git@github.com:llvm/llvm-project.git +git clone git@github.com:llvm/llvm-project cd llvm-project mkdir build cd build @@ -40,7 +40,7 @@ This gives you a working LLVM build. ## Testing run ``` -./x.py test --stage 1 tests/codegen/gpu_offload +./x test --stage 1 tests/codegen/gpu_offload ``` ## Usage diff --git a/src/doc/rustc-dev-guide/src/solve/invariants.md b/src/doc/rustc-dev-guide/src/solve/invariants.md index fd12b195757..8ec15f339e5 100644 --- a/src/doc/rustc-dev-guide/src/solve/invariants.md +++ b/src/doc/rustc-dev-guide/src/solve/invariants.md @@ -4,17 +4,15 @@ FIXME: This file talks about invariants of the type system as a whole, not only There are a lot of invariants - things the type system guarantees to be true at all times - which are desirable or expected from other languages and type systems. Unfortunately, quite -a few of them do not hold in Rust right now. This is either a fundamental to its design or -caused by bugs and something that may change in the future. +a few of them do not hold in Rust right now. This is either fundamental to its design or +caused by bugs, and something that may change in the future. -It is important to know about the things you can assume while working on - and with - the +It is important to know about the things you can assume while working on, and with, the type system, so here's an incomplete and unofficial list of invariants of the core type system: -- ✅: this invariant mostly holds, with some weird exceptions, you can rely on it outside -of these cases -- ❌: this invariant does not hold, either due to bugs or by design, you must not rely on -it for soundness or have to be incredibly careful when doing so +- ✅: this invariant mostly holds, with some weird exceptions or current bugs +- ❌: this invariant does not hold, and is unlikely to do so in the future; do not rely on it for soundness or have to be incredibly careful when doing so ### `wf(X)` implies `wf(normalize(X))` ✅ @@ -23,20 +21,23 @@ well-formed after normalizing said aliases. We rely on this as otherwise we would have to re-check for well-formedness for these types. +This currently does not hold due to a type system unsoundness: [#84533](https://github.com/rust-lang/rust/issues/84533). + ### Structural equality modulo regions implies semantic equality ✅ If you have a some type and equate it to itself after replacing any regions with unique inference variables in both the lhs and rhs, the now potentially structurally different types should still be equal to each other. -Needed to prevent goals from succeeding in HIR typeck and then failing in MIR borrowck. -If this invariant is broken MIR typeck ends up failing with an ICE. +This is needed to prevent goals from succeeding in HIR typeck and then failing in MIR borrowck. +If this invariant is broken, MIR typeck ends up failing with an ICE. ### Applying inference results from a goal does not change its result ❌ TODO: this invariant is formulated in a weird way and needs to be elaborated. Pretty much: I would like this check to only fail if there's a solver bug: -https://github.com/rust-lang/rust/blob/2ffeb4636b4ae376f716dc4378a7efb37632dc2d/compiler/rustc_trait_selection/src/solve/eval_ctxt.rs#L391-L407 +<https://github.com/rust-lang/rust/blob/2ffeb4636b4ae376f716dc4378a7efb37632dc2d/compiler/rustc_trait_selection/src/solve/eval_ctxt.rs#L391-L407>. +We should readd this check and see where it breaks :3 If we prove some goal/equate types/whatever, apply the resulting inference constraints, and then redo the original action, the result should be the same. @@ -73,82 +74,99 @@ Many of the currently known unsound issues end up relying on this invariant bein It is however very difficult to imagine a sound type system without this invariant, so the issue is that the invariant is broken, not that we incorrectly rely on it. -### Generic goals and their instantiations have the same result ✅ +### The type system is complete ❌ + +The type system is not complete. +It often adds unnecessary inference constraints, and errors even though the goal could hold. + +- method selection +- opaque type inference +- handling type outlives constraints +- preferring `ParamEnv` candidates over `Impl` candidates during candidate selection +in the trait solver + +### Goals keep their result from HIR typeck afterwards ✅ + +Having a goal which succeeds during HIR typeck but fails when being reevaluated during MIR borrowck causes ICE, e.g. [#140211](https://github.com/rust-lang/rust/issues/140211). -Pretty much: If we successfully typecheck a generic function concrete instantiations -of that function should also typeck. We should not get errors post-monomorphization. -We can however get overflow errors at that point. +Having a goal which succeeds during HIR typeck but fails after being instantiated is unsound, e.g. [#140212](https://github.com/rust-lang/rust/issues/140212). -TODO: example for overflow error post-monomorphization +It is interesting that we allow some incompleteness in the trait solver while still maintaining this limitation. It would be nice if there was a clear way to separate the "allowed incompleteness" from behavior which would break this invariant. + +#### Normalization must not change results This invariant is relied on to allow the normalization of generic aliases. Breaking -it can easily result in unsoundness, e.g. [#57893](https://github.com/rust-lang/rust/issues/57893) +it can easily result in unsoundness, e.g. [#57893](https://github.com/rust-lang/rust/issues/57893). + +#### Goals may still overflow after instantiation + +This happens they start to hit the recursion limit. +We also have diverging aliases which are scuffed. +It's unclear how these should be handled :3 ### Trait goals in empty environments are proven by a unique impl ✅ If a trait goal holds with an empty environment, there should be a unique `impl`, either user-defined or builtin, which is used to prove that goal. This is -necessary to select a unique method. +necessary to select unique methods and associated items. -We do however break this invariant in few cases, some of which are due to bugs, -some by design: +We do however break this invariant in a few cases, some of which are due to bugs, some by design: - *marker traits* are allowed to overlap as they do not have associated items - *specialization* allows specializing impls to overlap with their parent - the builtin trait object trait implementation can overlap with a user-defined impl: -[#57893] +[#57893](https://github.com/rust-lang/rust/issues/57893) -### The type system is complete ❌ - -The type system is not complete, it often adds unnecessary inference constraints, and errors -even though the goal could hold. - -- method selection -- opaque type inference -- handling type outlives constraints -- preferring `ParamEnv` candidates over `Impl` candidates during candidate selection -in the trait solver #### The type system is complete during the implicit negative overlap check in coherence ✅ -For more on overlap checking: [coherence] +For more on overlap checking, see [Coherence chapter]. -During the implicit negative overlap check in coherence we must never return *error* for -goals which can be proven. This would allow for overlapping impls with potentially different -associated items, breaking a bunch of other invariants. +During the implicit negative overlap check in coherence, +we must never return *error* for goals which can be proven. +This would allow for overlapping impls with potentially different associated items, +breaking a bunch of other invariants. This invariant is currently broken in many different ways while actually something we rely on. We have to be careful as it is quite easy to break: - generalization of aliases - generalization during subtyping binders (luckily not exploitable in coherence) -### Trait solving must be (free) lifetime agnostic ✅ +### Trait solving must not depend on lifetimes being different ✅ + +If a goal holds with lifetimes being different, it must also hold with these lifetimes being the same. We otherwise get post-monomorphization errors during codegen or unsoundness due to invalid vtables. -Trait solving during codegen should have the same result as during typeck. As we erase -all free regions during codegen we must not rely on them during typeck. A noteworthy example -is special behavior for `'static`. +We could also just get inconsistent behavior when first proving a goal with different lifetimes which are later constrained to be equal. + +### Trait solving in bodies must not depend on lifetimes being equal ✅ We also have to be careful with relying on equality of regions in the trait solver. This is fine for codegen, as we treat all erased regions as equal. We can however lose equality information from HIR to MIR typeck. -The new solver "uniquifies regions" during canonicalization, canonicalizing `u32: Trait<'x, 'x>` -as `exists<'0, '1> u32: Trait<'0, '1>`, to make it harder to rely on this property. +This currently does not hold with the new solver: [trait-system-refactor-initiative#27](https://github.com/rust-lang/trait-system-refactor-initiative/issues/27). ### Removing ambiguity makes strictly more things compile ❌ Ideally we *should* not rely on ambiguity for things to compile. Not doing that will cause future improvements to be breaking changes. -Due to *incompleteness* this is not the case and improving inference can result in inference -changes, breaking existing projects. +Due to *incompleteness* this is not the case, +and improving inference can result in inference changes, breaking existing projects. ### Semantic equality implies structural equality ✅ Two types being equal in the type system must mean that they have the same `TypeId` after instantiating their generic parameters with concrete -arguments. This currently does not hold: [#97156]. +arguments. We can otherwise use their different `TypeId`s to impact trait selection. + +We lookup types using structural equality during codegen, but this shouldn't necessarily be unsound +- may result in redundant method codegen or backend type check errors? +- we also rely on it in CTFE assertions + +### Semantically different types have different `TypeId`s ✅ + +Semantically different `'static` types need different `TypeId`s to avoid transmutes, +for example `for<'a> fn(&'a str)` vs `fn(&'static str)` must have a different `TypeId`. + -[#57893]: https://github.com/rust-lang/rust/issues/57893 -[#97156]: https://github.com/rust-lang/rust/issues/97156 -[#114936]: https://github.com/rust-lang/rust/issues/114936 -[coherence]: ../coherence.md +[coherence chapter]: ../coherence.md diff --git a/src/doc/rustc-dev-guide/src/tests/directives.md b/src/doc/rustc-dev-guide/src/tests/directives.md index 839076b809d..63aa08c389c 100644 --- a/src/doc/rustc-dev-guide/src/tests/directives.md +++ b/src/doc/rustc-dev-guide/src/tests/directives.md @@ -59,7 +59,7 @@ not be exhaustive. Directives can generally be found by browsing the | `aux-crate` | Like `aux-build` but makes available as extern prelude | All except `run-make` | `<extern_prelude_name>=<path/to/aux/file.rs>` | | `aux-codegen-backend` | Similar to `aux-build` but pass the compiled dylib to `-Zcodegen-backend` when building the main file | `ui-fulldeps` | Path to codegen backend file | | `proc-macro` | Similar to `aux-build`, but for aux forces host and don't use `-Cprefer-dynamic`[^pm]. | All except `run-make` | Path to auxiliary proc-macro `.rs` file | -| `build-aux-docs` | Build docs for auxiliaries as well | All except `run-make` | N/A | +| `build-aux-docs` | Build docs for auxiliaries as well. Note that this only works with `aux-build`, not `aux-crate`. | All except `run-make` | N/A | [^pm]: please see the Auxiliary proc-macro section in the [compiletest](./compiletest.md) chapter for specifics. diff --git a/src/doc/rustc-dev-guide/src/tests/intro.md b/src/doc/rustc-dev-guide/src/tests/intro.md index c55d60f4a5c..79b96c450a8 100644 --- a/src/doc/rustc-dev-guide/src/tests/intro.md +++ b/src/doc/rustc-dev-guide/src/tests/intro.md @@ -111,12 +111,14 @@ and it can be invoked so: This requires building all of the documentation, which might take a while. -### Dist check +### `distcheck` `distcheck` verifies that the source distribution tarball created by the build system will unpack, build, and run all tests. -> Example: `./x test distcheck` +```console +./x test distcheck +``` ### Tool tests diff --git a/src/doc/rustc-dev-guide/src/tests/misc.md b/src/doc/rustc-dev-guide/src/tests/misc.md index c0288b3dd10..39f88174879 100644 --- a/src/doc/rustc-dev-guide/src/tests/misc.md +++ b/src/doc/rustc-dev-guide/src/tests/misc.md @@ -9,7 +9,7 @@ for testing: - `RUSTC_BOOTSTRAP=1` will "cheat" and bypass usual stability checking, allowing you to use unstable features and cli flags on a stable `rustc`. -- `RUSTC_BOOTSTRAP=-1` will force a given `rustc` to pretend that is a stable +- `RUSTC_BOOTSTRAP=-1` will force a given `rustc` to pretend it is a stable compiler, even if it's actually a nightly `rustc`. This is useful because some behaviors of the compiler (e.g. diagnostics) can differ depending on whether the compiler is nightly or not. diff --git a/src/doc/rustc/src/platform-support/netbsd.md b/src/doc/rustc/src/platform-support/netbsd.md index e80ff85edad..b173bf82b44 100644 --- a/src/doc/rustc/src/platform-support/netbsd.md +++ b/src/doc/rustc/src/platform-support/netbsd.md @@ -32,10 +32,11 @@ are built for NetBSD 8.x but also work on newer OS versions). ## Target Maintainers [@he32](https://github.com/he32) +[@0323pin](https://github.com/0323pin) Further contacts: -- [NetBSD/pkgsrc-wip's rust](https://github.com/NetBSD/pkgsrc-wip/blob/master/rust185/Makefile) maintainer (see MAINTAINER variable). This package is part of "pkgsrc work-in-progress" and is used for deployment and testing of new versions of rust +- [NetBSD/pkgsrc-wip's rust](https://github.com/NetBSD/pkgsrc-wip/blob/master/rust188/Makefile) maintainer (see MAINTAINER variable). This package is part of "pkgsrc work-in-progress" and is used for deployment and testing of new versions of rust. Note that we have the convention of having multiple rust versions active in pkgsrc-wip at any one time, so the version number is part of the directory name, and from time to time old versions are culled so this is not a fully "stable" link. - [NetBSD's pkgsrc lang/rust](https://github.com/NetBSD/pkgsrc/tree/trunk/lang/rust) for the "proper" package in pkgsrc. - [NetBSD's pkgsrc lang/rust-bin](https://github.com/NetBSD/pkgsrc/tree/trunk/lang/rust-bin) which re-uses the bootstrap kit as a binary distribution and therefore avoids the rather protracted native build time of rust itself diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index e7a1f4d8397..1265a39d27b 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -390,7 +390,8 @@ pub(crate) fn clean_predicate<'tcx>( ty::ClauseKind::ConstEvaluatable(..) | ty::ClauseKind::WellFormed(..) | ty::ClauseKind::ConstArgHasType(..) - // FIXME(const_trait_impl): We can probably use this `HostEffect` pred to render `[const]`. + | ty::ClauseKind::UnstableFeature(..) + // FIXME(const_trait_impl): We can probably use this `HostEffect` pred to render `~const`. | ty::ClauseKind::HostEffect(_) => None, } } @@ -2864,7 +2865,7 @@ fn clean_maybe_renamed_item<'tcx>( ItemKind::Fn { ref sig, generics, body: body_id, .. } => { clean_fn_or_proc_macro(item, sig, generics, body_id, &mut name, cx) } - ItemKind::Trait(_, _, _, generics, bounds, item_ids) => { + ItemKind::Trait(_, _, _, _, generics, bounds, item_ids) => { let items = item_ids .iter() .map(|&ti| clean_trait_item(cx.tcx.hir_trait_item(ti), cx)) diff --git a/src/librustdoc/clean/types.rs b/src/librustdoc/clean/types.rs index 3ecd41db2dd..09647492d93 100644 --- a/src/librustdoc/clean/types.rs +++ b/src/librustdoc/clean/types.rs @@ -648,7 +648,7 @@ impl Item { let sig = tcx.fn_sig(def_id).skip_binder(); let constness = if tcx.is_const_fn(def_id) { // rustc's `is_const_fn` returns `true` for associated functions that have an `impl const` parent - // or that have a `#[const_trait]` parent. Do not display those as `const` in rustdoc because we + // or that have a `const trait` parent. Do not display those as `const` in rustdoc because we // won't be printing correct syntax plus the syntax is unstable. match tcx.opt_associated_item(def_id) { Some(ty::AssocItem { @@ -759,79 +759,48 @@ impl Item { Some(tcx.visibility(def_id)) } - fn attributes_without_repr(&self, tcx: TyCtxt<'_>, is_json: bool) -> Vec<String> { - const ALLOWED_ATTRIBUTES: &[Symbol] = - &[sym::export_name, sym::link_section, sym::no_mangle, sym::non_exhaustive]; + /// Get a list of attributes excluding `#[repr]` to display. + /// + /// Only used by the HTML output-format. + fn attributes_without_repr(&self) -> Vec<String> { self.attrs .other_attrs .iter() - .filter_map(|attr| { - if let hir::Attribute::Parsed(AttributeKind::LinkSection { name, .. }) = attr { + .filter_map(|attr| match attr { + hir::Attribute::Parsed(AttributeKind::LinkSection { name, .. }) => { Some(format!("#[link_section = \"{name}\"]")) } - // NoMangle is special cased, as it appears in HTML output, and we want to show it in source form, not HIR printing. - // It is also used by cargo-semver-checks. - else if let hir::Attribute::Parsed(AttributeKind::NoMangle(..)) = attr { + hir::Attribute::Parsed(AttributeKind::NoMangle(..)) => { Some("#[no_mangle]".to_string()) - } else if let hir::Attribute::Parsed(AttributeKind::ExportName { name, .. }) = attr - { + } + hir::Attribute::Parsed(AttributeKind::ExportName { name, .. }) => { Some(format!("#[export_name = \"{name}\"]")) - } else if let hir::Attribute::Parsed(AttributeKind::NonExhaustive(..)) = attr { + } + hir::Attribute::Parsed(AttributeKind::NonExhaustive(..)) => { Some("#[non_exhaustive]".to_string()) - } else if is_json { - match attr { - // rustdoc-json stores this in `Item::deprecation`, so we - // don't want it it `Item::attrs`. - hir::Attribute::Parsed(AttributeKind::Deprecation { .. }) => None, - // We have separate pretty-printing logic for `#[repr(..)]` attributes. - hir::Attribute::Parsed(AttributeKind::Repr { .. }) => None, - // target_feature is special-cased because cargo-semver-checks uses it - hir::Attribute::Parsed(AttributeKind::TargetFeature(features, _)) => { - let mut output = String::new(); - for (i, (feature, _)) in features.iter().enumerate() { - if i != 0 { - output.push_str(", "); - } - output.push_str(&format!("enable=\"{}\"", feature.as_str())); - } - Some(format!("#[target_feature({output})]")) - } - hir::Attribute::Parsed(AttributeKind::AutomaticallyDerived(..)) => { - Some("#[automatically_derived]".to_string()) - } - _ => Some({ - let mut s = rustc_hir_pretty::attribute_to_string(&tcx, attr); - assert_eq!(s.pop(), Some('\n')); - s - }), - } - } else { - if !attr.has_any_name(ALLOWED_ATTRIBUTES) { - return None; - } - Some( - rustc_hir_pretty::attribute_to_string(&tcx, attr) - .replace("\\\n", "") - .replace('\n', "") - .replace(" ", " "), - ) } + _ => None, }) .collect() } - pub(crate) fn attributes(&self, tcx: TyCtxt<'_>, cache: &Cache, is_json: bool) -> Vec<String> { - let mut attrs = self.attributes_without_repr(tcx, is_json); + /// Get a list of attributes to display on this item. + /// + /// Only used by the HTML output-format. + pub(crate) fn attributes(&self, tcx: TyCtxt<'_>, cache: &Cache) -> Vec<String> { + let mut attrs = self.attributes_without_repr(); - if let Some(repr_attr) = self.repr(tcx, cache, is_json) { + if let Some(repr_attr) = self.repr(tcx, cache) { attrs.push(repr_attr); } attrs } /// Returns a stringified `#[repr(...)]` attribute. - pub(crate) fn repr(&self, tcx: TyCtxt<'_>, cache: &Cache, is_json: bool) -> Option<String> { - repr_attributes(tcx, cache, self.def_id()?, self.type_(), is_json) + /// + /// Only used by the HTML output-format. + pub(crate) fn repr(&self, tcx: TyCtxt<'_>, cache: &Cache) -> Option<String> { + repr_attributes(tcx, cache, self.def_id()?, self.type_()) } pub fn is_doc_hidden(&self) -> bool { @@ -843,12 +812,14 @@ impl Item { } } +/// Return a string representing the `#[repr]` attribute if present. +/// +/// Only used by the HTML output-format. pub(crate) fn repr_attributes( tcx: TyCtxt<'_>, cache: &Cache, def_id: DefId, item_type: ItemType, - is_json: bool, ) -> Option<String> { use rustc_abi::IntegerType; @@ -865,7 +836,6 @@ pub(crate) fn repr_attributes( // Render `repr(transparent)` iff the non-1-ZST field is public or at least one // field is public in case all fields are 1-ZST fields. let render_transparent = cache.document_private - || is_json || adt .all_fields() .find(|field| { diff --git a/src/librustdoc/clean/utils.rs b/src/librustdoc/clean/utils.rs index bf3f7607274..fd1b17b6476 100644 --- a/src/librustdoc/clean/utils.rs +++ b/src/librustdoc/clean/utils.rs @@ -3,6 +3,7 @@ use std::fmt::{self, Display, Write as _}; use std::sync::LazyLock as Lazy; use std::{ascii, mem}; +use rustc_ast::join_path_idents; use rustc_ast::tokenstream::TokenTree; use rustc_hir::def::{DefKind, Res}; use rustc_hir::def_id::{DefId, LOCAL_CRATE, LocalDefId}; @@ -24,7 +25,7 @@ use crate::clean::{ clean_middle_ty, inline, }; use crate::core::DocContext; -use crate::display::{Joined as _, MaybeDisplay as _}; +use crate::display::Joined as _; #[cfg(test)] mod tests; @@ -251,13 +252,7 @@ pub(crate) fn qpath_to_string(p: &hir::QPath<'_>) -> String { hir::QPath::LangItem(lang_item, ..) => return lang_item.name().to_string(), }; - fmt::from_fn(|f| { - segments - .iter() - .map(|seg| (seg.ident.name != kw::PathRoot).then_some(seg.ident).maybe_display()) - .joined("::", f) - }) - .to_string() + join_path_idents(segments.iter().map(|seg| seg.ident)) } pub(crate) fn build_deref_target_impls( diff --git a/src/librustdoc/formats/cache.rs b/src/librustdoc/formats/cache.rs index 4989bd718c9..5191120ebdb 100644 --- a/src/librustdoc/formats/cache.rs +++ b/src/librustdoc/formats/cache.rs @@ -1,5 +1,6 @@ use std::mem; +use rustc_ast::join_path_syms; use rustc_attr_data_structures::StabilityLevel; use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet}; use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, DefIdSet}; @@ -13,7 +14,6 @@ use crate::core::DocContext; use crate::fold::DocFolder; use crate::formats::Impl; use crate::formats::item_type::ItemType; -use crate::html::format::join_with_double_colon; use crate::html::markdown::short_markdown_summary; use crate::html::render::IndexItem; use crate::html::render::search_index::get_function_type_for_search; @@ -558,7 +558,7 @@ fn add_item_to_search_index(tcx: TyCtxt<'_>, cache: &mut Cache, item: &clean::It clean::ItemKind::ImportItem(import) => import.source.did.unwrap_or(item_def_id), _ => item_def_id, }; - let path = join_with_double_colon(parent_path); + let path = join_path_syms(parent_path); let impl_id = if let Some(ParentStackItem::Impl { item_id, .. }) = cache.parent_stack.last() { item_id.as_def_id() } else { diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs index bcb3e57c844..b16485107a0 100644 --- a/src/librustdoc/html/format.rs +++ b/src/librustdoc/html/format.rs @@ -14,6 +14,7 @@ use std::slice; use itertools::{Either, Itertools}; use rustc_abi::ExternAbi; +use rustc_ast::join_path_syms; use rustc_attr_data_structures::{ConstStability, StabilityLevel, StableSince}; use rustc_data_structures::fx::FxHashSet; use rustc_hir as hir; @@ -25,7 +26,7 @@ use rustc_span::symbol::kw; use rustc_span::{Symbol, sym}; use tracing::{debug, trace}; -use super::url_parts_builder::{UrlPartsBuilder, estimate_item_path_byte_length}; +use super::url_parts_builder::UrlPartsBuilder; use crate::clean::types::ExternalLocation; use crate::clean::utils::find_nearest_parent_module; use crate::clean::{self, ExternalCrate, PrimitiveType}; @@ -369,18 +370,6 @@ pub(crate) enum HrefError { NotInExternalCache, } -// Panics if `syms` is empty. -pub(crate) fn join_with_double_colon(syms: &[Symbol]) -> String { - let mut s = String::with_capacity(estimate_item_path_byte_length(syms.len())); - // NOTE: using `Joined::joined` here causes a noticeable perf regression - s.push_str(syms[0].as_str()); - for sym in &syms[1..] { - s.push_str("::"); - s.push_str(sym.as_str()); - } - s -} - /// This function is to get the external macro path because they are not in the cache used in /// `href_with_root_path`. fn generate_macro_def_id_path( @@ -672,7 +661,7 @@ pub(crate) fn link_tooltip( write!(f, "{}", cx.tcx().item_name(id))?; } else if !fqp.is_empty() { write!(f, "{shortty} ")?; - fqp.iter().joined("::", f)?; + write!(f, "{}", join_path_syms(fqp))?; } Ok(()) }) @@ -703,7 +692,7 @@ fn resolved_path( write!( f, "{path}::{anchor}", - path = join_with_double_colon(&fqp[..fqp.len() - 1]), + path = join_path_syms(&fqp[..fqp.len() - 1]), anchor = print_anchor(did, *fqp.last().unwrap(), cx) ) } else { @@ -835,7 +824,7 @@ pub(crate) fn print_anchor(did: DefId, text: Symbol, cx: &Context<'_>) -> impl D write!( f, r#"<a class="{short_ty}" href="{url}" title="{short_ty} {path}">{text}</a>"#, - path = join_with_double_colon(&fqp), + path = join_path_syms(fqp), text = EscapeBodyText(text.as_str()), ) } else { @@ -1095,7 +1084,7 @@ impl clean::QPathData { title=\"type {path}::{name}\">{name}</a>", shortty = ItemType::AssocType, name = assoc.name, - path = join_with_double_colon(&path), + path = join_path_syms(path), ) } else { write!(f, "{}", assoc.name) diff --git a/src/librustdoc/html/render/context.rs b/src/librustdoc/html/render/context.rs index 3b4dae841ee..7b814701a73 100644 --- a/src/librustdoc/html/render/context.rs +++ b/src/librustdoc/html/render/context.rs @@ -6,6 +6,7 @@ use std::path::{Path, PathBuf}; use std::sync::mpsc::{Receiver, channel}; use askama::Template; +use rustc_ast::join_path_syms; use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet}; use rustc_hir::def_id::{DefIdMap, LOCAL_CRATE}; use rustc_middle::ty::TyCtxt; @@ -27,7 +28,6 @@ use crate::formats::FormatRenderer; use crate::formats::cache::Cache; use crate::formats::item_type::ItemType; use crate::html::escape::Escape; -use crate::html::format::join_with_double_colon; use crate::html::markdown::{self, ErrorCodes, IdMap, plain_text_summary}; use crate::html::render::write_shared::write_shared; use crate::html::url_parts_builder::UrlPartsBuilder; @@ -211,7 +211,7 @@ impl<'tcx> Context<'tcx> { title.push_str(" in "); } // No need to include the namespace for primitive types and keywords - title.push_str(&join_with_double_colon(&self.current)); + title.push_str(&join_path_syms(&self.current)); }; title.push_str(" - Rust"); let tyname = it.type_(); diff --git a/src/librustdoc/html/render/mod.rs b/src/librustdoc/html/render/mod.rs index 70f3f54e4c0..d8c37137bbc 100644 --- a/src/librustdoc/html/render/mod.rs +++ b/src/librustdoc/html/render/mod.rs @@ -49,6 +49,7 @@ use std::{fs, str}; use askama::Template; use itertools::Either; +use rustc_ast::join_path_syms; use rustc_attr_data_structures::{ ConstStability, DeprecatedSince, Deprecation, RustcVersion, StabilityLevel, StableSince, }; @@ -74,9 +75,9 @@ use crate::formats::cache::Cache; use crate::formats::item_type::ItemType; use crate::html::escape::Escape; use crate::html::format::{ - Ending, HrefError, PrintWithSpace, href, join_with_double_colon, print_abi_with_space, - print_constness_with_space, print_default_space, print_generic_bounds, print_where_clause, - visibility_print_with_space, write_str, + Ending, HrefError, PrintWithSpace, href, print_abi_with_space, print_constness_with_space, + print_default_space, print_generic_bounds, print_where_clause, visibility_print_with_space, + write_str, }; use crate::html::markdown::{ HeadingOffset, IdMap, Markdown, MarkdownItemInfo, MarkdownSummaryLine, @@ -1191,7 +1192,7 @@ fn render_assoc_item( // a whitespace prefix and newline. fn render_attributes_in_pre(it: &clean::Item, prefix: &str, cx: &Context<'_>) -> impl fmt::Display { fmt::from_fn(move |f| { - for a in it.attributes(cx.tcx(), cx.cache(), false) { + for a in it.attributes(cx.tcx(), cx.cache()) { writeln!(f, "{prefix}{a}")?; } Ok(()) @@ -1207,7 +1208,7 @@ fn render_code_attribute(code_attr: CodeAttribute, w: &mut impl fmt::Write) { // When an attribute is rendered inside a <code> tag, it is formatted using // a div to produce a newline after it. fn render_attributes_in_code(w: &mut impl fmt::Write, it: &clean::Item, cx: &Context<'_>) { - for attr in it.attributes(cx.tcx(), cx.cache(), false) { + for attr in it.attributes(cx.tcx(), cx.cache()) { render_code_attribute(CodeAttribute(attr), w); } } @@ -1219,7 +1220,7 @@ fn render_repr_attributes_in_code( def_id: DefId, item_type: ItemType, ) { - if let Some(repr) = clean::repr_attributes(cx.tcx(), cx.cache(), def_id, item_type, false) { + if let Some(repr) = clean::repr_attributes(cx.tcx(), cx.cache(), def_id, item_type) { render_code_attribute(CodeAttribute(repr), w); } } @@ -2555,7 +2556,7 @@ fn collect_paths_for_type(first_ty: &clean::Type, cache: &Cache) -> Vec<String> let fqp = cache.exact_paths.get(&did).or_else(get_extern); if let Some(path) = fqp { - out.push(join_with_double_colon(path)); + out.push(join_path_syms(path)); } }; diff --git a/src/librustdoc/html/render/print_item.rs b/src/librustdoc/html/render/print_item.rs index e33bdc0db32..5fbda4797cc 100644 --- a/src/librustdoc/html/render/print_item.rs +++ b/src/librustdoc/html/render/print_item.rs @@ -4,6 +4,7 @@ use std::iter; use askama::Template; use rustc_abi::VariantIdx; +use rustc_ast::join_path_syms; use rustc_data_structures::fx::{FxHashMap, FxIndexSet}; use rustc_hir as hir; use rustc_hir::def::CtorKind; @@ -30,8 +31,8 @@ use crate::formats::Impl; use crate::formats::item_type::ItemType; use crate::html::escape::{Escape, EscapeBodyTextWithWbr}; use crate::html::format::{ - Ending, PrintWithSpace, join_with_double_colon, print_abi_with_space, - print_constness_with_space, print_where_clause, visibility_print_with_space, + Ending, PrintWithSpace, print_abi_with_space, print_constness_with_space, print_where_clause, + visibility_print_with_space, }; use crate::html::markdown::{HeadingOffset, MarkdownSummaryLine}; use crate::html::render::{document_full, document_item_info}; @@ -1424,7 +1425,7 @@ fn item_type_alias(cx: &Context<'_>, it: &clean::Item, t: &clean::TypeAlias) -> iter::repeat_n("..", cx.current.len()).chain(iter::once("type.impl")).collect(); js_src_path.extend(target_fqp[..target_fqp.len() - 1].iter().copied()); js_src_path.push_fmt(format_args!("{target_type}.{}.js", target_fqp.last().unwrap())); - let self_path = fmt::from_fn(|f| self_fqp.iter().joined("::", f)); + let self_path = join_path_syms(self_fqp); write!( w, "<script src=\"{src}\" data-self-path=\"{self_path}\" async></script>", @@ -1487,12 +1488,11 @@ impl<'a, 'cx: 'a> ItemUnion<'a, 'cx> { self.cx.cache(), self.def_id, ItemType::Union, - false, ) { writeln!(f, "{repr}")?; }; } else { - for a in self.it.attributes(self.cx.tcx(), self.cx.cache(), false) { + for a in self.it.attributes(self.cx.tcx(), self.cx.cache()) { writeln!(f, "{a}")?; } } @@ -2257,7 +2257,7 @@ pub(crate) fn compare_names(left: &str, right: &str) -> Ordering { } pub(super) fn full_path(cx: &Context<'_>, item: &clean::Item) -> String { - let mut s = join_with_double_colon(&cx.current); + let mut s = join_path_syms(&cx.current); s.push_str("::"); s.push_str(item.name.unwrap().as_str()); s diff --git a/src/librustdoc/html/render/search_index.rs b/src/librustdoc/html/render/search_index.rs index aff8684ee3a..80a59fa218c 100644 --- a/src/librustdoc/html/render/search_index.rs +++ b/src/librustdoc/html/render/search_index.rs @@ -4,6 +4,7 @@ use std::collections::hash_map::Entry; use std::collections::{BTreeMap, VecDeque}; use encode::{bitmap_to_string, write_vlqhex_to_string}; +use rustc_ast::join_path_syms; use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; use rustc_middle::ty::TyCtxt; use rustc_span::def_id::DefId; @@ -17,7 +18,6 @@ use crate::clean::types::{Function, Generics, ItemId, Type, WherePredicate}; use crate::clean::{self, utils}; use crate::formats::cache::{Cache, OrphanImplItem}; use crate::formats::item_type::ItemType; -use crate::html::format::join_with_double_colon; use crate::html::markdown::short_markdown_summary; use crate::html::render::ordered_json::OrderedJson; use crate::html::render::{self, IndexItem, IndexItemFunctionType, RenderType, RenderTypeId}; @@ -78,7 +78,7 @@ pub(crate) fn build_index( ty: item.type_(), defid: item.item_id.as_def_id(), name: item.name.unwrap(), - path: join_with_double_colon(&fqp[..fqp.len() - 1]), + path: join_path_syms(&fqp[..fqp.len() - 1]), desc, parent: Some(parent), parent_idx: None, @@ -416,7 +416,7 @@ pub(crate) fn build_index( if fqp.len() < 2 { return None; } - join_with_double_colon(&fqp[..fqp.len() - 1]) + join_path_syms(&fqp[..fqp.len() - 1]) }; if path == item.path { return None; @@ -427,10 +427,10 @@ pub(crate) fn build_index( let i = <isize as TryInto<usize>>::try_into(parent_idx).unwrap(); item.path = { let p = &crate_paths[i].1; - join_with_double_colon(&p[..p.len() - 1]) + join_path_syms(&p[..p.len() - 1]) }; item.exact_path = - crate_paths[i].2.as_ref().map(|xp| join_with_double_colon(&xp[..xp.len() - 1])); + crate_paths[i].2.as_ref().map(|xp| join_path_syms(&xp[..xp.len() - 1])); } // Omit the parent path if it is same to that of the prior item. @@ -549,11 +549,11 @@ pub(crate) fn build_index( }); continue; } - let full_path = join_with_double_colon(&path[..path.len() - 1]); + let full_path = join_path_syms(&path[..path.len() - 1]); let full_exact_path = exact .as_ref() .filter(|exact| exact.last() == path.last() && exact.len() >= 2) - .map(|exact| join_with_double_colon(&exact[..exact.len() - 1])); + .map(|exact| join_path_syms(&exact[..exact.len() - 1])); let exact_path = extra_paths.len() + self.items.len(); let exact_path = full_exact_path.as_ref().map(|full_exact_path| match extra_paths .entry(full_exact_path.clone()) diff --git a/src/librustdoc/html/render/write_shared.rs b/src/librustdoc/html/render/write_shared.rs index 0078671fcc5..1f691392b17 100644 --- a/src/librustdoc/html/render/write_shared.rs +++ b/src/librustdoc/html/render/write_shared.rs @@ -26,6 +26,7 @@ use std::{fmt, fs}; use indexmap::IndexMap; use regex::Regex; +use rustc_ast::join_path_syms; use rustc_data_structures::flock; use rustc_data_structures::fx::{FxHashSet, FxIndexMap, FxIndexSet}; use rustc_middle::ty::TyCtxt; @@ -43,7 +44,6 @@ use crate::docfs::PathError; use crate::error::Error; use crate::formats::Impl; use crate::formats::item_type::ItemType; -use crate::html::format::join_with_double_colon; use crate::html::layout; use crate::html::render::ordered_json::{EscapedJson, OrderedJson}; use crate::html::render::search_index::{SerializedSearchIndex, build_index}; @@ -608,7 +608,7 @@ impl TypeAliasPart { for &(type_alias_fqp, type_alias_item) in type_aliases { cx.id_map.borrow_mut().clear(); cx.deref_id_map.borrow_mut().clear(); - let type_alias_fqp = join_with_double_colon(&type_alias_fqp); + let type_alias_fqp = join_path_syms(type_alias_fqp); if let Some(ret) = &mut ret { ret.aliases.push(type_alias_fqp); } else { diff --git a/src/librustdoc/html/url_parts_builder.rs b/src/librustdoc/html/url_parts_builder.rs index 9a533827441..705fa498e8d 100644 --- a/src/librustdoc/html/url_parts_builder.rs +++ b/src/librustdoc/html/url_parts_builder.rs @@ -117,7 +117,7 @@ impl UrlPartsBuilder { /// This is just a guess at the average length of a URL part, /// used for [`String::with_capacity`] calls in the [`FromIterator`] -/// and [`Extend`] impls, and for [estimating item path lengths]. +/// and [`Extend`] impls. /// /// The value `8` was chosen for two main reasons: /// @@ -125,18 +125,8 @@ impl UrlPartsBuilder { /// * jemalloc's size classes are all multiples of eight, /// which means that the amount of memory it allocates will often match /// the amount requested, avoiding wasted bytes. -/// -/// [estimating item path lengths]: estimate_item_path_byte_length const AVG_PART_LENGTH: usize = 8; -/// Estimate the number of bytes in an item's path, based on how many segments it has. -/// -/// **Note:** This is only to be used with, e.g., [`String::with_capacity()`]; -/// the return value is just a rough estimate. -pub(crate) const fn estimate_item_path_byte_length(segment_count: usize) -> usize { - AVG_PART_LENGTH * segment_count -} - impl<'a> FromIterator<&'a str> for UrlPartsBuilder { fn from_iter<T: IntoIterator<Item = &'a str>>(iter: T) -> Self { let iter = iter.into_iter(); diff --git a/src/librustdoc/json/conversions.rs b/src/librustdoc/json/conversions.rs index e7163bead92..0a84d8caa30 100644 --- a/src/librustdoc/json/conversions.rs +++ b/src/librustdoc/json/conversions.rs @@ -5,10 +5,12 @@ use rustc_abi::ExternAbi; use rustc_ast::ast; use rustc_attr_data_structures::{self as attrs, DeprecatedSince}; +use rustc_hir as hir; use rustc_hir::def::CtorKind; use rustc_hir::def_id::DefId; use rustc_hir::{HeaderSafety, Safety}; use rustc_metadata::rendered_const; +use rustc_middle::ty::TyCtxt; use rustc_middle::{bug, ty}; use rustc_span::{Pos, kw, sym}; use rustdoc_json_types::*; @@ -39,7 +41,12 @@ impl JsonRenderer<'_> { }) .collect(); let docs = item.opt_doc_value(); - let attrs = item.attributes(self.tcx, &self.cache, true); + let attrs = item + .attrs + .other_attrs + .iter() + .filter_map(|a| maybe_from_hir_attr(a, item.item_id, self.tcx)) + .collect(); let span = item.span(self.tcx); let visibility = item.visibility(self.tcx); let clean::ItemInner { name, item_id, .. } = *item.inner; @@ -886,3 +893,93 @@ impl FromClean<ItemType> for ItemKind { } } } + +/// Maybe convert a attribute from hir to json. +/// +/// Returns `None` if the attribute shouldn't be in the output. +fn maybe_from_hir_attr( + attr: &hir::Attribute, + item_id: ItemId, + tcx: TyCtxt<'_>, +) -> Option<Attribute> { + use attrs::AttributeKind as AK; + + let kind = match attr { + hir::Attribute::Parsed(kind) => kind, + + hir::Attribute::Unparsed(_) => { + // FIXME: We should handle `#[doc(hidden)]`. + return Some(other_attr(tcx, attr)); + } + }; + + Some(match kind { + AK::Deprecation { .. } => return None, // Handled separately into Item::deprecation. + AK::DocComment { .. } => unreachable!("doc comments stripped out earlier"), + + AK::MustUse { reason, span: _ } => { + Attribute::MustUse { reason: reason.map(|s| s.to_string()) } + } + AK::Repr { .. } => repr_attr( + tcx, + item_id.as_def_id().expect("all items that could have #[repr] have a DefId"), + ), + AK::ExportName { name, span: _ } => Attribute::ExportName(name.to_string()), + AK::LinkSection { name, span: _ } => Attribute::LinkSection(name.to_string()), + AK::TargetFeature(features, _span) => Attribute::TargetFeature { + enable: features.iter().map(|(feat, _span)| feat.to_string()).collect(), + }, + + AK::NoMangle(_) => Attribute::NoMangle, + AK::NonExhaustive(_) => Attribute::NonExhaustive, + AK::AutomaticallyDerived(_) => Attribute::AutomaticallyDerived, + + _ => other_attr(tcx, attr), + }) +} + +fn other_attr(tcx: TyCtxt<'_>, attr: &hir::Attribute) -> Attribute { + let mut s = rustc_hir_pretty::attribute_to_string(&tcx, attr); + assert_eq!(s.pop(), Some('\n')); + Attribute::Other(s) +} + +fn repr_attr(tcx: TyCtxt<'_>, def_id: DefId) -> Attribute { + let repr = tcx.adt_def(def_id).repr(); + + let kind = if repr.c() { + ReprKind::C + } else if repr.transparent() { + ReprKind::Transparent + } else if repr.simd() { + ReprKind::Simd + } else { + ReprKind::Rust + }; + + let align = repr.align.map(|a| a.bytes()); + let packed = repr.pack.map(|p| p.bytes()); + let int = repr.int.map(format_integer_type); + + Attribute::Repr(AttributeRepr { kind, align, packed, int }) +} + +fn format_integer_type(it: rustc_abi::IntegerType) -> String { + use rustc_abi::Integer::*; + use rustc_abi::IntegerType::*; + match it { + Pointer(true) => "isize", + Pointer(false) => "usize", + Fixed(I8, true) => "i8", + Fixed(I8, false) => "u8", + Fixed(I16, true) => "i16", + Fixed(I16, false) => "u16", + Fixed(I32, true) => "i32", + Fixed(I32, false) => "u32", + Fixed(I64, true) => "i64", + Fixed(I64, false) => "u64", + Fixed(I128, true) => "i128", + Fixed(I128, false) => "u128", + } + .to_owned() +} diff --git a/src/rustdoc-json-types/lib.rs b/src/rustdoc-json-types/lib.rs index 0e72ddd9db1..6235b0e8576 100644 --- a/src/rustdoc-json-types/lib.rs +++ b/src/rustdoc-json-types/lib.rs @@ -37,8 +37,8 @@ pub type FxHashMap<K, V> = HashMap<K, V>; // re-export for use in src/librustdoc // will instead cause conflicts. See #94591 for more. (This paragraph and the "Latest feature" line // are deliberately not in a doc comment, because they need not be in public docs.) // -// Latest feature: Pretty printing of no_mangle attributes changed -pub const FORMAT_VERSION: u32 = 53; +// Latest feature: Structured Attributes +pub const FORMAT_VERSION: u32 = 54; /// The root of the emitted JSON blob. /// @@ -195,13 +195,94 @@ pub struct Item { /// - `#[repr(C)]` and other reprs also appear as themselves, /// though potentially with a different order: e.g. `repr(i8, C)` may become `repr(C, i8)`. /// Multiple repr attributes on the same item may be combined into an equivalent single attr. - pub attrs: Vec<String>, + pub attrs: Vec<Attribute>, /// Information about the item’s deprecation, if present. pub deprecation: Option<Deprecation>, /// The type-specific fields describing this item. pub inner: ItemEnum, } +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +/// An attribute, e.g. `#[repr(C)]` +/// +/// This doesn't include: +/// - `#[doc = "Doc Comment"]` or `/// Doc comment`. These are in [`Item::docs`] instead. +/// - `#[deprecated]`. These are in [`Item::deprecation`] instead. +pub enum Attribute { + /// `#[non_exhaustive]` + NonExhaustive, + + /// `#[must_use]` + MustUse { reason: Option<String> }, + + /// `#[export_name = "name"]` + ExportName(String), + + /// `#[link_section = "name"]` + LinkSection(String), + + /// `#[automatically_derived]` + AutomaticallyDerived, + + /// `#[repr]` + Repr(AttributeRepr), + + /// `#[no_mangle]` + NoMangle, + + /// #[target_feature(enable = "feature1", enable = "feature2")] + TargetFeature { enable: Vec<String> }, + + /// Something else. + /// + /// Things here are explicitly *not* covered by the [`FORMAT_VERSION`] + /// constant, and may change without bumping the format version. + /// + /// As an implementation detail, this is currently either: + /// 1. A HIR debug printing, like `"#[attr = Optimize(Speed)]"` + /// 2. The attribute as it appears in source form, like + /// `"#[optimize(speed)]"`. + Other(String), +} + +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +/// The contents of a `#[repr(...)]` attribute. +/// +/// Used in [`Attribute::Repr`]. +pub struct AttributeRepr { + /// The representation, e.g. `#[repr(C)]`, `#[repr(transparent)]` + pub kind: ReprKind, + + /// Alignment in bytes, if explicitly specified by `#[repr(align(...)]`. + pub align: Option<u64>, + /// Alignment in bytes, if explicitly specified by `#[repr(packed(...)]]`. + pub packed: Option<u64>, + + /// The integer type for an enum descriminant, if explicitly specified. + /// + /// e.g. `"i32"`, for `#[repr(C, i32)]` + pub int: Option<String>, +} + +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +/// The kind of `#[repr]`. +/// +/// See [AttributeRepr::kind]`. +pub enum ReprKind { + /// `#[repr(Rust)]` + /// + /// Also the default. + Rust, + /// `#[repr(C)]` + C, + /// `#[repr(transparent)] + Transparent, + /// `#[repr(simd)]` + Simd, +} + /// A range of source code. #[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] pub struct Span { @@ -1343,7 +1424,7 @@ pub struct Static { /// Is the static `unsafe`? /// - /// This is only true if it's in an `extern` block, and not explicity marked + /// This is only true if it's in an `extern` block, and not explicitly marked /// as `safe`. /// /// ```rust diff --git a/src/tools/cargo b/src/tools/cargo -Subproject eabb4cd923deb73e714f7ad3f5234d68ca284db +Subproject 6833aa715d724437dc1247d0166afe314ab6854 diff --git a/src/tools/clippy/clippy_lints/src/arbitrary_source_item_ordering.rs b/src/tools/clippy/clippy_lints/src/arbitrary_source_item_ordering.rs index 07df893ae3c..a9d3015ce5c 100644 --- a/src/tools/clippy/clippy_lints/src/arbitrary_source_item_ordering.rs +++ b/src/tools/clippy/clippy_lints/src/arbitrary_source_item_ordering.rs @@ -306,7 +306,7 @@ impl<'tcx> LateLintPass<'tcx> for ArbitrarySourceItemOrdering { cur_f = Some(field); } }, - ItemKind::Trait(is_auto, _safety, _ident, _generics, _generic_bounds, item_ref) + ItemKind::Trait(_constness, is_auto, _safety, _ident, _generics, _generic_bounds, item_ref) if self.enable_ordering_for_trait && *is_auto == IsAuto::No => { let mut cur_t: Option<(TraitItemId, Ident)> = None; diff --git a/src/tools/clippy/clippy_lints/src/doc/mod.rs b/src/tools/clippy/clippy_lints/src/doc/mod.rs index 2bf52216b83..22b781b8929 100644 --- a/src/tools/clippy/clippy_lints/src/doc/mod.rs +++ b/src/tools/clippy/clippy_lints/src/doc/mod.rs @@ -740,7 +740,7 @@ impl<'tcx> LateLintPass<'tcx> for Documentation { ); } }, - ItemKind::Trait(_, unsafety, ..) => match (headers.safety, unsafety) { + ItemKind::Trait(_, _, unsafety, ..) => match (headers.safety, unsafety) { (false, Safety::Unsafe) => span_lint( cx, MISSING_SAFETY_DOC, diff --git a/src/tools/clippy/clippy_lints/src/len_zero.rs b/src/tools/clippy/clippy_lints/src/len_zero.rs index d32017a8b41..1bf03480c82 100644 --- a/src/tools/clippy/clippy_lints/src/len_zero.rs +++ b/src/tools/clippy/clippy_lints/src/len_zero.rs @@ -125,7 +125,7 @@ declare_lint_pass!(LenZero => [LEN_ZERO, LEN_WITHOUT_IS_EMPTY, COMPARISON_TO_EMP impl<'tcx> LateLintPass<'tcx> for LenZero { fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'_>) { - if let ItemKind::Trait(_, _, ident, _, _, trait_items) = item.kind + if let ItemKind::Trait(_, _, _, ident, _, _, trait_items) = item.kind && !item.span.from_expansion() { check_trait_items(cx, item, ident, trait_items); diff --git a/src/tools/clippy/clippy_lints/src/methods/from_iter_instead_of_collect.rs b/src/tools/clippy/clippy_lints/src/methods/from_iter_instead_of_collect.rs index 045363058d1..d664eaaac70 100644 --- a/src/tools/clippy/clippy_lints/src/methods/from_iter_instead_of_collect.rs +++ b/src/tools/clippy/clippy_lints/src/methods/from_iter_instead_of_collect.rs @@ -4,6 +4,7 @@ use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::source::snippet_with_applicability; use clippy_utils::ty::implements_trait; use clippy_utils::{is_path_diagnostic_item, sugg}; +use rustc_ast::join_path_idents; use rustc_errors::Applicability; use rustc_hir::def::Res; use rustc_hir::{self as hir, Expr, ExprKind, GenericArg, QPath, TyKind}; @@ -47,7 +48,7 @@ fn build_full_type(cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>, app: &mut Applica && let QPath::Resolved(None, ty_path) = &ty_qpath && let Res::Def(_, ty_did) = ty_path.res { - let mut ty_str = itertools::join(ty_path.segments.iter().map(|s| s.ident), "::"); + let mut ty_str = join_path_idents(ty_path.segments.iter().map(|seg| seg.ident)); let mut first = true; let mut append = |arg: &str| { write!(&mut ty_str, "{}{arg}", [", ", "<"][usize::from(first)]).unwrap(); diff --git a/src/tools/clippy/clippy_lints/src/missing_inline.rs b/src/tools/clippy/clippy_lints/src/missing_inline.rs index 329f7193437..c4a3d10299b 100644 --- a/src/tools/clippy/clippy_lints/src/missing_inline.rs +++ b/src/tools/clippy/clippy_lints/src/missing_inline.rs @@ -101,7 +101,7 @@ impl<'tcx> LateLintPass<'tcx> for MissingInline { let attrs = cx.tcx.hir_attrs(it.hir_id()); check_missing_inline_attrs(cx, attrs, it.span, desc); }, - hir::ItemKind::Trait(ref _is_auto, ref _unsafe, _ident, _generics, _bounds, trait_items) => { + hir::ItemKind::Trait(ref _constness, ref _is_auto, ref _unsafe, _ident, _generics, _bounds, trait_items) => { // note: we need to check if the trait is exported so we can't use // `LateLintPass::check_trait_item` here. for &tit in trait_items { diff --git a/src/tools/clippy/clippy_lints/src/trait_bounds.rs b/src/tools/clippy/clippy_lints/src/trait_bounds.rs index 45e54302e32..9182a55081f 100644 --- a/src/tools/clippy/clippy_lints/src/trait_bounds.rs +++ b/src/tools/clippy/clippy_lints/src/trait_bounds.rs @@ -112,7 +112,7 @@ impl<'tcx> LateLintPass<'tcx> for TraitBounds { // special handling for self trait bounds as these are not considered generics // ie. trait Foo: Display {} if let Item { - kind: ItemKind::Trait(_, _, _, _, bounds, ..), + kind: ItemKind::Trait(_, _, _, _, _, bounds, ..), .. } = item { @@ -133,7 +133,7 @@ impl<'tcx> LateLintPass<'tcx> for TraitBounds { .. }) = segments.first() && let Some(Node::Item(Item { - kind: ItemKind::Trait(_, _, _, _, self_bounds, _), + kind: ItemKind::Trait(_, _, _, _, _, self_bounds, _), .. })) = cx.tcx.hir_get_if_local(*def_id) { diff --git a/src/tools/clippy/clippy_lints/src/upper_case_acronyms.rs b/src/tools/clippy/clippy_lints/src/upper_case_acronyms.rs index 02281b9e922..944cd91a7fd 100644 --- a/src/tools/clippy/clippy_lints/src/upper_case_acronyms.rs +++ b/src/tools/clippy/clippy_lints/src/upper_case_acronyms.rs @@ -131,7 +131,7 @@ impl LateLintPass<'_> for UpperCaseAcronyms { return; } match it.kind { - ItemKind::TyAlias(ident, ..) | ItemKind::Struct(ident, ..) | ItemKind::Trait(_, _, ident, ..) => { + ItemKind::TyAlias(ident, ..) | ItemKind::Struct(ident, ..) | ItemKind::Trait(_, _, _, ident, ..) => { check_ident(cx, &ident, it.hir_id(), self.upper_case_acronyms_aggressive); }, ItemKind::Enum(ident, _, ref enumdef) => { diff --git a/src/tools/clippy/clippy_utils/src/ast_utils/mod.rs b/src/tools/clippy/clippy_utils/src/ast_utils/mod.rs index 42254ec8e92..96f0273c439 100644 --- a/src/tools/clippy/clippy_utils/src/ast_utils/mod.rs +++ b/src/tools/clippy/clippy_utils/src/ast_utils/mod.rs @@ -444,6 +444,7 @@ pub fn eq_item_kind(l: &ItemKind, r: &ItemKind) -> bool { }, ( Trait(box ast::Trait { + constness: lc, is_auto: la, safety: lu, ident: li, @@ -452,6 +453,7 @@ pub fn eq_item_kind(l: &ItemKind, r: &ItemKind) -> bool { items: lis, }), Trait(box ast::Trait { + constness: rc, is_auto: ra, safety: ru, ident: ri, @@ -460,7 +462,8 @@ pub fn eq_item_kind(l: &ItemKind, r: &ItemKind) -> bool { items: ris, }), ) => { - la == ra + matches!(lc, ast::Const::No) == matches!(rc, ast::Const::No) + && la == ra && matches!(lu, Safety::Default) == matches!(ru, Safety::Default) && eq_id(*li, *ri) && eq_generics(lg, rg) diff --git a/src/tools/clippy/clippy_utils/src/check_proc_macro.rs b/src/tools/clippy/clippy_utils/src/check_proc_macro.rs index ce61fffe0de..dc31ed08fb7 100644 --- a/src/tools/clippy/clippy_utils/src/check_proc_macro.rs +++ b/src/tools/clippy/clippy_utils/src/check_proc_macro.rs @@ -252,11 +252,11 @@ fn item_search_pat(item: &Item<'_>) -> (Pat, Pat) { ItemKind::Struct(_, _, VariantData::Struct { .. }) => (Pat::Str("struct"), Pat::Str("}")), ItemKind::Struct(..) => (Pat::Str("struct"), Pat::Str(";")), ItemKind::Union(..) => (Pat::Str("union"), Pat::Str("}")), - ItemKind::Trait(_, Safety::Unsafe, ..) + ItemKind::Trait(_, _, Safety::Unsafe, ..) | ItemKind::Impl(Impl { safety: Safety::Unsafe, .. }) => (Pat::Str("unsafe"), Pat::Str("}")), - ItemKind::Trait(IsAuto::Yes, ..) => (Pat::Str("auto"), Pat::Str("}")), + ItemKind::Trait(_, IsAuto::Yes, ..) => (Pat::Str("auto"), Pat::Str("}")), ItemKind::Trait(..) => (Pat::Str("trait"), Pat::Str("}")), ItemKind::Impl(_) => (Pat::Str("impl"), Pat::Str("}")), _ => return (Pat::Str(""), Pat::Str("")), diff --git a/src/tools/clippy/clippy_utils/src/lib.rs b/src/tools/clippy/clippy_utils/src/lib.rs index ad8c816e204..ff1ee663f9b 100644 --- a/src/tools/clippy/clippy_utils/src/lib.rs +++ b/src/tools/clippy/clippy_utils/src/lib.rs @@ -89,6 +89,7 @@ use std::sync::{Mutex, MutexGuard, OnceLock}; use itertools::Itertools; use rustc_abi::Integer; +use rustc_ast::join_path_syms; use rustc_ast::ast::{self, LitKind, RangeLimits}; use rustc_attr_data_structures::{AttributeKind, find_attr}; use rustc_data_structures::fx::FxHashMap; @@ -3245,8 +3246,8 @@ fn maybe_get_relative_path(from: &DefPath, to: &DefPath, max_super: usize) -> St // a::b::c ::d::sym refers to // e::f::sym:: :: // result should be super::super::super::super::e::f - if let DefPathData::TypeNs(s) = l { - path.push(s.to_string()); + if let DefPathData::TypeNs(sym) = l { + path.push(sym); } if let DefPathData::TypeNs(_) = r { go_up_by += 1; @@ -3256,7 +3257,7 @@ fn maybe_get_relative_path(from: &DefPath, to: &DefPath, max_super: usize) -> St // a::b::sym:: :: refers to // c::d::e ::f::sym // when looking at `f` - Left(DefPathData::TypeNs(sym)) => path.push(sym.to_string()), + Left(DefPathData::TypeNs(sym)) => path.push(sym), // consider: // a::b::c ::d::sym refers to // e::f::sym:: :: @@ -3268,17 +3269,17 @@ fn maybe_get_relative_path(from: &DefPath, to: &DefPath, max_super: usize) -> St if go_up_by > max_super { // `super` chain would be too long, just use the absolute path instead - once(String::from("crate")) - .chain(to.data.iter().filter_map(|el| { + join_path_syms( + once(kw::Crate).chain(to.data.iter().filter_map(|el| { if let DefPathData::TypeNs(sym) = el.data { - Some(sym.to_string()) + Some(sym) } else { None } })) - .join("::") + ) } else { - repeat_n(String::from("super"), go_up_by).chain(path).join("::") + join_path_syms(repeat_n(kw::Super, go_up_by).chain(path)) } } diff --git a/src/tools/clippy/tests/ui/assign_ops.fixed b/src/tools/clippy/tests/ui/assign_ops.fixed index 99beea850a2..eee61f949e7 100644 --- a/src/tools/clippy/tests/ui/assign_ops.fixed +++ b/src/tools/clippy/tests/ui/assign_ops.fixed @@ -84,8 +84,7 @@ mod issue14871 { const ONE: Self; } - #[const_trait] - pub trait NumberConstants { + pub const trait NumberConstants { fn constant(value: usize) -> Self; } diff --git a/src/tools/clippy/tests/ui/assign_ops.rs b/src/tools/clippy/tests/ui/assign_ops.rs index 900d5ad38e0..13ffcee0a3c 100644 --- a/src/tools/clippy/tests/ui/assign_ops.rs +++ b/src/tools/clippy/tests/ui/assign_ops.rs @@ -84,8 +84,7 @@ mod issue14871 { const ONE: Self; } - #[const_trait] - pub trait NumberConstants { + pub const trait NumberConstants { fn constant(value: usize) -> Self; } diff --git a/src/tools/clippy/tests/ui/missing_const_for_fn/const_trait.fixed b/src/tools/clippy/tests/ui/missing_const_for_fn/const_trait.fixed index f1d5579a723..c113c1caaa6 100644 --- a/src/tools/clippy/tests/ui/missing_const_for_fn/const_trait.fixed +++ b/src/tools/clippy/tests/ui/missing_const_for_fn/const_trait.fixed @@ -3,8 +3,7 @@ // Reduced test case from https://github.com/rust-lang/rust-clippy/issues/14658 -#[const_trait] -trait ConstTrait { +const trait ConstTrait { fn method(self); } diff --git a/src/tools/clippy/tests/ui/missing_const_for_fn/const_trait.rs b/src/tools/clippy/tests/ui/missing_const_for_fn/const_trait.rs index d495759526d..69248bc52d5 100644 --- a/src/tools/clippy/tests/ui/missing_const_for_fn/const_trait.rs +++ b/src/tools/clippy/tests/ui/missing_const_for_fn/const_trait.rs @@ -3,8 +3,7 @@ // Reduced test case from https://github.com/rust-lang/rust-clippy/issues/14658 -#[const_trait] -trait ConstTrait { +const trait ConstTrait { fn method(self); } diff --git a/src/tools/clippy/tests/ui/missing_const_for_fn/const_trait.stderr b/src/tools/clippy/tests/ui/missing_const_for_fn/const_trait.stderr index b994b88fac6..7ea009cfc9b 100644 --- a/src/tools/clippy/tests/ui/missing_const_for_fn/const_trait.stderr +++ b/src/tools/clippy/tests/ui/missing_const_for_fn/const_trait.stderr @@ -1,5 +1,5 @@ error: this could be a `const fn` - --> tests/ui/missing_const_for_fn/const_trait.rs:24:1 + --> tests/ui/missing_const_for_fn/const_trait.rs:23:1 | LL | / fn can_be_const() { LL | | 0u64.method(); diff --git a/src/tools/clippy/tests/ui/ptr_arg.rs b/src/tools/clippy/tests/ui/ptr_arg.rs index 65f3f05d6cb..578641e910d 100644 --- a/src/tools/clippy/tests/ui/ptr_arg.rs +++ b/src/tools/clippy/tests/ui/ptr_arg.rs @@ -312,7 +312,7 @@ mod issue_9218 { // Inferred to be `&'a str`, afaik. fn cow_good_ret_ty<'a>(input: &'a Cow<'a, str>) -> &str { - //~^ ERROR: lifetime flowing from input to output with different syntax + //~^ ERROR: eliding a lifetime that's named elsewhere is confusing todo!() } } diff --git a/src/tools/clippy/tests/ui/ptr_arg.stderr b/src/tools/clippy/tests/ui/ptr_arg.stderr index 600343754e1..fd9ceddfe11 100644 --- a/src/tools/clippy/tests/ui/ptr_arg.stderr +++ b/src/tools/clippy/tests/ui/ptr_arg.stderr @@ -231,18 +231,19 @@ error: writing `&String` instead of `&str` involves a new object where a slice w LL | fn good(v1: &String, v2: &String) { | ^^^^^^^ help: change this to: `&str` -error: lifetime flowing from input to output with different syntax can be confusing +error: eliding a lifetime that's named elsewhere is confusing --> tests/ui/ptr_arg.rs:314:36 | LL | fn cow_good_ret_ty<'a>(input: &'a Cow<'a, str>) -> &str { - | ^^ ^^ ---- the lifetime gets resolved as `'a` + | ^^ ^^ ---- the same lifetime is elided here | | | - | | these lifetimes flow to the output - | these lifetimes flow to the output + | | the lifetime is named here + | the lifetime is named here | + = help: the same lifetime is referred to in inconsistent ways, making the signature confusing = note: `-D mismatched-lifetime-syntaxes` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(mismatched_lifetime_syntaxes)]` -help: one option is to consistently use `'a` +help: consistently use `'a` | LL | fn cow_good_ret_ty<'a>(input: &'a Cow<'a, str>) -> &'a str { | ++ diff --git a/src/tools/clippy/tests/ui/trait_duplication_in_bounds.fixed b/src/tools/clippy/tests/ui/trait_duplication_in_bounds.fixed index cf52ecf2f03..88ba5f810b4 100644 --- a/src/tools/clippy/tests/ui/trait_duplication_in_bounds.fixed +++ b/src/tools/clippy/tests/ui/trait_duplication_in_bounds.fixed @@ -167,8 +167,7 @@ where } // #13476 -#[const_trait] -trait ConstTrait {} +const trait ConstTrait {} const fn const_trait_bounds_good<T: ConstTrait + [const] ConstTrait>() {} const fn const_trait_bounds_bad<T: [const] ConstTrait>() {} diff --git a/src/tools/clippy/tests/ui/trait_duplication_in_bounds.rs b/src/tools/clippy/tests/ui/trait_duplication_in_bounds.rs index 955562f08dc..19a4e70e294 100644 --- a/src/tools/clippy/tests/ui/trait_duplication_in_bounds.rs +++ b/src/tools/clippy/tests/ui/trait_duplication_in_bounds.rs @@ -167,8 +167,7 @@ where } // #13476 -#[const_trait] -trait ConstTrait {} +const trait ConstTrait {} const fn const_trait_bounds_good<T: ConstTrait + [const] ConstTrait>() {} const fn const_trait_bounds_bad<T: [const] ConstTrait + [const] ConstTrait>() {} diff --git a/src/tools/clippy/tests/ui/trait_duplication_in_bounds.stderr b/src/tools/clippy/tests/ui/trait_duplication_in_bounds.stderr index ab31721ef51..a56a683de97 100644 --- a/src/tools/clippy/tests/ui/trait_duplication_in_bounds.stderr +++ b/src/tools/clippy/tests/ui/trait_duplication_in_bounds.stderr @@ -59,19 +59,19 @@ LL | fn bad_trait_object(arg0: &(dyn Any + Send + Send)) { | ^^^^^^^^^^^^^^^^^ help: try: `Any + Send` error: these bounds contain repeated elements - --> tests/ui/trait_duplication_in_bounds.rs:174:36 + --> tests/ui/trait_duplication_in_bounds.rs:173:36 | LL | const fn const_trait_bounds_bad<T: [const] ConstTrait + [const] ConstTrait>() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `[const] ConstTrait` error: these where clauses contain repeated elements - --> tests/ui/trait_duplication_in_bounds.rs:181:8 + --> tests/ui/trait_duplication_in_bounds.rs:180:8 | LL | T: IntoIterator<Item = U::Owned> + IntoIterator<Item = U::Owned>, | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `IntoIterator<Item = U::Owned>` error: these where clauses contain repeated elements - --> tests/ui/trait_duplication_in_bounds.rs:203:8 + --> tests/ui/trait_duplication_in_bounds.rs:202:8 | LL | T: AssocConstTrait<ASSOC = 0> + AssocConstTrait<ASSOC = 0>, | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `AssocConstTrait<ASSOC = 0>` diff --git a/src/tools/lint-docs/Cargo.toml b/src/tools/lint-docs/Cargo.toml index e914a2df2ba..6e1ab84ed18 100644 --- a/src/tools/lint-docs/Cargo.toml +++ b/src/tools/lint-docs/Cargo.toml @@ -7,7 +7,7 @@ description = "A script to extract the lint documentation for the rustc book." # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -rustc-literal-escaper = "0.0.4" +rustc-literal-escaper = "0.0.5" serde_json = "1.0.57" tempfile = "3.1.0" walkdir = "2.3.1" diff --git a/src/tools/miri/README.md b/src/tools/miri/README.md index b05acff72b5..7816ce1ac56 100644 --- a/src/tools/miri/README.md +++ b/src/tools/miri/README.md @@ -342,9 +342,9 @@ environment variable. We first document the most relevant and most commonly used is enabled (the default), this is also used to emulate system entropy. The default seed is 0. You can increase test coverage by running Miri multiple times with different seeds. * `-Zmiri-strict-provenance` enables [strict - provenance](https://github.com/rust-lang/rust/issues/95228) checking in Miri. This means that - casting an integer to a pointer will stop execution because the provenance of the pointer - cannot be determined. + provenance](https://doc.rust-lang.org/nightly/std/ptr/index.html#strict-provenance) checking in + Miri. This means that casting an integer to a pointer will stop execution because the provenance + of the pointer cannot be determined. * `-Zmiri-symbolic-alignment-check` makes the alignment check more strict. By default, alignment is checked by casting the pointer to an integer, and making sure that is a multiple of the alignment. This can lead to cases where a program passes the alignment check by pure chance, because things diff --git a/src/tools/miri/miri-script/Cargo.toml b/src/tools/miri/miri-script/Cargo.toml index 462a9cc62bf..9240788d6bc 100644 --- a/src/tools/miri/miri-script/Cargo.toml +++ b/src/tools/miri/miri-script/Cargo.toml @@ -7,6 +7,7 @@ repository = "https://github.com/rust-lang/miri" version = "0.1.0" default-run = "miri-script" edition = "2024" +rust-version = "1.85" [workspace] # We make this a workspace root so that cargo does not go looking in ../Cargo.toml for the workspace root. diff --git a/src/tools/miri/miri-script/src/commands.rs b/src/tools/miri/miri-script/src/commands.rs index e948beef004..e6ebdf54e38 100644 --- a/src/tools/miri/miri-script/src/commands.rs +++ b/src/tools/miri/miri-script/src/commands.rs @@ -702,7 +702,6 @@ impl Command { let mut early_flags = Vec::<OsString>::new(); // In `dep` mode, the target is already passed via `MIRI_TEST_TARGET` - #[expect(clippy::collapsible_if)] // we need to wait until this is stable if !dep { if let Some(target) = &target { early_flags.push("--target".into()); @@ -735,7 +734,6 @@ impl Command { // Add Miri flags let mut cmd = cmd.args(&miri_flags).args(&early_flags).args(&flags); // For `--dep` we also need to set the target in the env var. - #[expect(clippy::collapsible_if)] // we need to wait until this is stable if dep { if let Some(target) = &target { cmd = cmd.env("MIRI_TEST_TARGET", target); diff --git a/src/tools/miri/rust-version b/src/tools/miri/rust-version index 0130cf13a45..67f27e7aa2c 100644 --- a/src/tools/miri/rust-version +++ b/src/tools/miri/rust-version @@ -1 +1 @@ -733b47ea4b1b86216f14ef56e49440c33933f230 +7f2065a4bae1faed5bab928c670964eafbf43b55 diff --git a/src/tools/miri/src/alloc/isolated_alloc.rs b/src/tools/miri/src/alloc/isolated_alloc.rs index a7bb9b4da75..7b2f1a3eebf 100644 --- a/src/tools/miri/src/alloc/isolated_alloc.rs +++ b/src/tools/miri/src/alloc/isolated_alloc.rs @@ -302,23 +302,20 @@ impl IsolatedAlloc { } } - /// Returns a vector of page addresses managed by the allocator. - pub fn pages(&self) -> Vec<usize> { - let mut pages: Vec<usize> = - self.page_ptrs.iter().map(|p| p.expose_provenance().get()).collect(); - for (ptr, size) in self.huge_ptrs.iter() { - for i in 0..size / self.page_size { - pages.push(ptr.expose_provenance().get().strict_add(i * self.page_size)); - } - } - pages + /// Returns a list of page addresses managed by the allocator. + pub fn pages(&self) -> impl Iterator<Item = usize> { + let pages = self.page_ptrs.iter().map(|p| p.expose_provenance().get()); + pages.chain(self.huge_ptrs.iter().flat_map(|(ptr, size)| { + (0..size / self.page_size) + .map(|i| ptr.expose_provenance().get().strict_add(i * self.page_size)) + })) } /// Protects all owned memory as `PROT_NONE`, preventing accesses. /// /// SAFETY: Accessing memory after this point will result in a segfault /// unless it is first unprotected. - pub unsafe fn prepare_ffi(&mut self) -> Result<(), nix::errno::Errno> { + pub unsafe fn start_ffi(&mut self) -> Result<(), nix::errno::Errno> { let prot = mman::ProtFlags::PROT_NONE; unsafe { self.mprotect(prot) } } @@ -326,7 +323,7 @@ impl IsolatedAlloc { /// Deprotects all owned memory by setting it to RW. Erroring here is very /// likely unrecoverable, so it may panic if applying those permissions /// fails. - pub fn unprep_ffi(&mut self) { + pub fn end_ffi(&mut self) { let prot = mman::ProtFlags::PROT_READ | mman::ProtFlags::PROT_WRITE; unsafe { self.mprotect(prot).unwrap(); diff --git a/src/tools/miri/src/bin/log/tracing_chrome.rs b/src/tools/miri/src/bin/log/tracing_chrome.rs index 459acea6f0b..5a96633c99e 100644 --- a/src/tools/miri/src/bin/log/tracing_chrome.rs +++ b/src/tools/miri/src/bin/log/tracing_chrome.rs @@ -1,8 +1,18 @@ // SPDX-License-Identifier: MIT // SPDX-FileCopyrightText: Copyright (c) 2020 Thoren Paulson -//! This file is taken unmodified from the following link, except for file attributes and -//! `extern crate` at the top. -//! https://github.com/thoren-d/tracing-chrome/blob/7e2625ab4aeeef2f0ef9bde9d6258dd181c04472/src/lib.rs +//! This file was initially taken from the following link: +//! <https://github.com/thoren-d/tracing-chrome/blob/7e2625ab4aeeef2f0ef9bde9d6258dd181c04472/src/lib.rs> +//! +//! The precise changes that were made to the original file can be found in git history +//! (`git log -- path/to/tracing_chrome.rs`), but in summary: +//! - the file attributes were changed and `extern crate` was added at the top +//! - if a tracing span has a field called "tracing_separate_thread", it will be given a separate +//! span ID even in [TraceStyle::Threaded] mode, to make it appear on a separate line when viewing +//! the trace in <https://ui.perfetto.dev>. This is the syntax to trigger this behavior: +//! ```rust +//! tracing::info_span!("my_span", tracing_separate_thread = tracing::field::Empty, /* ... */) +//! ``` +//! //! Depending on the tracing-chrome crate from crates.io is unfortunately not possible, since it //! depends on `tracing_core` which conflicts with rustc_private's `tracing_core` (meaning it would //! not be possible to use the [ChromeLayer] in a context that expects a [Layer] from @@ -79,14 +89,26 @@ where } /// Decides how traces will be recorded. +/// Also see <https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/preview#heading=h.jh64i9l3vwa1> #[derive(Default)] pub enum TraceStyle { - /// Traces will be recorded as a group of threads. + /// Traces will be recorded as a group of threads, and all spans on the same thread will appear + /// on a single trace line in <https://ui.perfetto.dev>. /// In this style, spans should be entered and exited on the same thread. + /// + /// If a tracing span has a field called "tracing_separate_thread", it will be given a separate + /// span ID even in this mode, to make it appear on a separate line when viewing the trace in + /// <https://ui.perfetto.dev>. This is the syntax to trigger this behavior: + /// ```rust + /// tracing::info_span!("my_span", tracing_separate_thread = tracing::field::Empty, /* ... */) + /// ``` + /// [tracing::field::Empty] is used so that other tracing layers (e.g. the logger) will ignore + /// the "tracing_separate_thread" argument and not print out anything for it. #[default] Threaded, - /// Traces will recorded as a group of asynchronous operations. + /// Traces will recorded as a group of asynchronous operations. All spans will be given separate + /// span IDs and will appear on separate trace lines in <https://ui.perfetto.dev>. Async, } @@ -497,31 +519,39 @@ where } } - fn get_root_id(span: SpanRef<S>) -> u64 { - span.scope() - .from_root() - .take(1) - .next() - .unwrap_or(span) - .id() - .into_u64() + fn get_root_id(&self, span: SpanRef<S>) -> Option<u64> { + match self.trace_style { + TraceStyle::Threaded => { + if span.fields().field("tracing_separate_thread").is_some() { + // assign an independent "id" to spans with argument "tracing_separate_thread", + // so they appear a separate trace line in trace visualization tools, see + // https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/preview#heading=h.jh64i9l3vwa1 + Some(span.id().into_u64()) + } else { + None + } + }, + TraceStyle::Async => Some( + span.scope() + .from_root() + .take(1) + .next() + .unwrap_or(span) + .id() + .into_u64() + ), + } } fn enter_span(&self, span: SpanRef<S>, ts: f64) { let callsite = self.get_callsite(EventOrSpan::Span(&span)); - let root_id = match self.trace_style { - TraceStyle::Async => Some(ChromeLayer::get_root_id(span)), - _ => None, - }; + let root_id = self.get_root_id(span); self.send_message(Message::Enter(ts, callsite, root_id)); } fn exit_span(&self, span: SpanRef<S>, ts: f64) { let callsite = self.get_callsite(EventOrSpan::Span(&span)); - let root_id = match self.trace_style { - TraceStyle::Async => Some(ChromeLayer::get_root_id(span)), - _ => None, - }; + let root_id = self.get_root_id(span); self.send_message(Message::Exit(ts, callsite, root_id)); } diff --git a/src/tools/miri/src/bin/miri.rs b/src/tools/miri/src/bin/miri.rs index e3b579a4ac6..61cebedf081 100644 --- a/src/tools/miri/src/bin/miri.rs +++ b/src/tools/miri/src/bin/miri.rs @@ -233,8 +233,6 @@ impl rustc_driver::Callbacks for MiriCompilerCalls { } else { let return_code = miri::eval_entry(tcx, entry_def_id, entry_type, &config, None) .unwrap_or_else(|| { - //#[cfg(target_os = "linux")] - //miri::native_lib::register_retcode_sv(rustc_driver::EXIT_FAILURE); tcx.dcx().abort_if_errors(); rustc_driver::EXIT_FAILURE }); @@ -337,6 +335,9 @@ impl rustc_driver::Callbacks for MiriBeRustCompilerCalls { fn exit(exit_code: i32) -> ! { // Drop the tracing guard before exiting, so tracing calls are flushed correctly. deinit_loggers(); + // Make sure the supervisor knows about the code code. + #[cfg(target_os = "linux")] + miri::native_lib::register_retcode_sv(exit_code); std::process::exit(exit_code); } @@ -355,6 +356,11 @@ fn run_compiler_and_exit( args: &[String], callbacks: &mut (dyn rustc_driver::Callbacks + Send), ) -> ! { + // Install the ctrlc handler that sets `rustc_const_eval::CTRL_C_RECEIVED`, even if + // MIRI_BE_RUSTC is set. We do this late so that when `native_lib::init_sv` is called, + // there are no other threads. + rustc_driver::install_ctrlc_handler(); + // Invoke compiler, catch any unwinding panics and handle return code. let exit_code = rustc_driver::catch_with_exit_code(move || rustc_driver::run_compiler(args, callbacks)); @@ -439,10 +445,6 @@ fn main() { let args = rustc_driver::catch_fatal_errors(|| rustc_driver::args::raw_args(&early_dcx)) .unwrap_or_else(|_| std::process::exit(rustc_driver::EXIT_FAILURE)); - // Install the ctrlc handler that sets `rustc_const_eval::CTRL_C_RECEIVED`, even if - // MIRI_BE_RUSTC is set. - rustc_driver::install_ctrlc_handler(); - // If the environment asks us to actually be rustc, then do that. if let Some(crate_kind) = env::var_os("MIRI_BE_RUSTC") { // Earliest rustc setup. @@ -750,15 +752,15 @@ fn main() { debug!("rustc arguments: {:?}", rustc_args); debug!("crate arguments: {:?}", miri_config.args); - #[cfg(target_os = "linux")] if !miri_config.native_lib.is_empty() && miri_config.native_lib_enable_tracing { - // FIXME: This should display a diagnostic / warning on error - // SAFETY: If any other threads exist at this point (namely for the ctrlc - // handler), they will not interact with anything on the main rustc/Miri - // thread in an async-signal-unsafe way such as by accessing shared - // semaphores, etc.; the handler only calls `sleep()` and `exit()`, which - // are async-signal-safe, as is accessing atomics - //let _ = unsafe { miri::native_lib::init_sv() }; + // SAFETY: No other threads are running + #[cfg(target_os = "linux")] + if unsafe { miri::native_lib::init_sv() }.is_err() { + eprintln!( + "warning: The native-lib tracer could not be started. Is this an x86 Linux system, and does Miri have permissions to ptrace?\n\ + Falling back to non-tracing native-lib mode." + ); + } } run_compiler_and_exit( &rustc_args, diff --git a/src/tools/miri/src/borrow_tracker/mod.rs b/src/tools/miri/src/borrow_tracker/mod.rs index 36c61053a32..ec6c2c60ca9 100644 --- a/src/tools/miri/src/borrow_tracker/mod.rs +++ b/src/tools/miri/src/borrow_tracker/mod.rs @@ -260,6 +260,7 @@ impl GlobalStateInner { kind: MemoryKind, machine: &MiriMachine<'_>, ) -> AllocState { + let _span = enter_trace_span!(borrow_tracker::new_allocation, ?id, ?alloc_size, ?kind); match self.borrow_tracker_method { BorrowTrackerMethod::StackedBorrows => AllocState::StackedBorrows(Box::new(RefCell::new(Stacks::new_allocation( @@ -280,6 +281,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { kind: RetagKind, val: &ImmTy<'tcx>, ) -> InterpResult<'tcx, ImmTy<'tcx>> { + let _span = enter_trace_span!(borrow_tracker::retag_ptr_value, ?kind, ?val.layout); let this = self.eval_context_mut(); let method = this.machine.borrow_tracker.as_ref().unwrap().borrow().borrow_tracker_method; match method { @@ -293,6 +295,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { kind: RetagKind, place: &PlaceTy<'tcx>, ) -> InterpResult<'tcx> { + let _span = enter_trace_span!(borrow_tracker::retag_place_contents, ?kind, ?place); let this = self.eval_context_mut(); let method = this.machine.borrow_tracker.as_ref().unwrap().borrow().borrow_tracker_method; match method { @@ -302,6 +305,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } fn protect_place(&mut self, place: &MPlaceTy<'tcx>) -> InterpResult<'tcx, MPlaceTy<'tcx>> { + let _span = enter_trace_span!(borrow_tracker::protect_place, ?place); let this = self.eval_context_mut(); let method = this.machine.borrow_tracker.as_ref().unwrap().borrow().borrow_tracker_method; match method { @@ -311,6 +315,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } fn expose_tag(&self, alloc_id: AllocId, tag: BorTag) -> InterpResult<'tcx> { + let _span = + enter_trace_span!(borrow_tracker::expose_tag, alloc_id = alloc_id.0, tag = tag.0); let this = self.eval_context_ref(); let method = this.machine.borrow_tracker.as_ref().unwrap().borrow().borrow_tracker_method; match method { @@ -354,6 +360,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { &self, frame: &Frame<'tcx, Provenance, FrameExtra<'tcx>>, ) -> InterpResult<'tcx> { + let _span = enter_trace_span!(borrow_tracker::on_stack_pop); let this = self.eval_context_ref(); let borrow_tracker = this.machine.borrow_tracker.as_ref().unwrap(); // The body of this loop needs `borrow_tracker` immutably @@ -431,6 +438,7 @@ impl AllocState { range: AllocRange, machine: &MiriMachine<'tcx>, ) -> InterpResult<'tcx> { + let _span = enter_trace_span!(borrow_tracker::before_memory_read, alloc_id = alloc_id.0); match self { AllocState::StackedBorrows(sb) => sb.borrow_mut().before_memory_read(alloc_id, prov_extra, range, machine), @@ -452,6 +460,7 @@ impl AllocState { range: AllocRange, machine: &MiriMachine<'tcx>, ) -> InterpResult<'tcx> { + let _span = enter_trace_span!(borrow_tracker::before_memory_write, alloc_id = alloc_id.0); match self { AllocState::StackedBorrows(sb) => sb.get_mut().before_memory_write(alloc_id, prov_extra, range, machine), @@ -473,6 +482,8 @@ impl AllocState { size: Size, machine: &MiriMachine<'tcx>, ) -> InterpResult<'tcx> { + let _span = + enter_trace_span!(borrow_tracker::before_memory_deallocation, alloc_id = alloc_id.0); match self { AllocState::StackedBorrows(sb) => sb.get_mut().before_memory_deallocation(alloc_id, prov_extra, size, machine), @@ -482,6 +493,7 @@ impl AllocState { } pub fn remove_unreachable_tags(&self, tags: &FxHashSet<BorTag>) { + let _span = enter_trace_span!(borrow_tracker::remove_unreachable_tags); match self { AllocState::StackedBorrows(sb) => sb.borrow_mut().remove_unreachable_tags(tags), AllocState::TreeBorrows(tb) => tb.borrow_mut().remove_unreachable_tags(tags), @@ -496,6 +508,11 @@ impl AllocState { tag: BorTag, alloc_id: AllocId, // diagnostics ) -> InterpResult<'tcx> { + let _span = enter_trace_span!( + borrow_tracker::release_protector, + alloc_id = alloc_id.0, + tag = tag.0 + ); match self { AllocState::StackedBorrows(_sb) => interp_ok(()), AllocState::TreeBorrows(tb) => @@ -506,6 +523,7 @@ impl AllocState { impl VisitProvenance for AllocState { fn visit_provenance(&self, visit: &mut VisitWith<'_>) { + let _span = enter_trace_span!(borrow_tracker::visit_provenance); match self { AllocState::StackedBorrows(sb) => sb.visit_provenance(visit), AllocState::TreeBorrows(tb) => tb.visit_provenance(visit), diff --git a/src/tools/miri/src/borrow_tracker/stacked_borrows/mod.rs b/src/tools/miri/src/borrow_tracker/stacked_borrows/mod.rs index b8bcacf7c99..834a4b41f22 100644 --- a/src/tools/miri/src/borrow_tracker/stacked_borrows/mod.rs +++ b/src/tools/miri/src/borrow_tracker/stacked_borrows/mod.rs @@ -30,7 +30,7 @@ pub type AllocState = Stacks; #[derive(Clone, Debug)] pub struct Stacks { // Even reading memory can have effects on the stack, so we need a `RefCell` here. - stacks: RangeMap<Stack>, + stacks: DedupRangeMap<Stack>, /// Stores past operations on this allocation history: AllocHistory, /// The set of tags that have been exposed inside this allocation. @@ -468,7 +468,7 @@ impl<'tcx> Stacks { let stack = Stack::new(item); Stacks { - stacks: RangeMap::new(size, stack), + stacks: DedupRangeMap::new(size, stack), history: AllocHistory::new(id, item, machine), exposed_tags: FxHashSet::default(), } diff --git a/src/tools/miri/src/borrow_tracker/tree_borrows/mod.rs b/src/tools/miri/src/borrow_tracker/tree_borrows/mod.rs index a0761cb07a1..c157c69d7c8 100644 --- a/src/tools/miri/src/borrow_tracker/tree_borrows/mod.rs +++ b/src/tools/miri/src/borrow_tracker/tree_borrows/mod.rs @@ -314,7 +314,7 @@ trait EvalContextPrivExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let span = this.machine.current_span(); // Store initial permissions and their corresponding range. - let mut perms_map: RangeMap<LocationState> = RangeMap::new( + let mut perms_map: DedupRangeMap<LocationState> = DedupRangeMap::new( ptr_size, LocationState::new_accessed(Permission::new_disabled(), IdempotentForeignAccess::None), // this will be overwritten ); diff --git a/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs b/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs index 48e4a19e263..1f29bcfc2b0 100644 --- a/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs +++ b/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs @@ -247,7 +247,7 @@ pub struct Tree { /// `unwrap` any `perm.get(key)`. /// /// We do uphold the fact that `keys(perms)` is a subset of `keys(nodes)` - pub(super) rperms: RangeMap<UniValMap<LocationState>>, + pub(super) rperms: DedupRangeMap<UniValMap<LocationState>>, /// The index of the root node. pub(super) root: UniIndex, } @@ -609,7 +609,7 @@ impl Tree { IdempotentForeignAccess::None, ), ); - RangeMap::new(size, perms) + DedupRangeMap::new(size, perms) }; Self { root: root_idx, nodes, rperms, tag_mapping } } @@ -631,7 +631,7 @@ impl<'tcx> Tree { base_offset: Size, parent_tag: BorTag, new_tag: BorTag, - initial_perms: RangeMap<LocationState>, + initial_perms: DedupRangeMap<LocationState>, default_perm: Permission, protected: bool, span: Span, diff --git a/src/tools/miri/src/concurrency/data_race.rs b/src/tools/miri/src/concurrency/data_race.rs index b5e7e9d0ac0..38d76f5cf73 100644 --- a/src/tools/miri/src/concurrency/data_race.rs +++ b/src/tools/miri/src/concurrency/data_race.rs @@ -997,7 +997,7 @@ pub trait EvalContextExt<'tcx>: MiriInterpCxExt<'tcx> { #[derive(Debug, Clone)] pub struct VClockAlloc { /// Assigning each byte a MemoryCellClocks. - alloc_ranges: RefCell<RangeMap<MemoryCellClocks>>, + alloc_ranges: RefCell<DedupRangeMap<MemoryCellClocks>>, } impl VisitProvenance for VClockAlloc { @@ -1045,7 +1045,7 @@ impl VClockAlloc { (VTimestamp::ZERO, global.thread_index(ThreadId::MAIN_THREAD)), }; VClockAlloc { - alloc_ranges: RefCell::new(RangeMap::new( + alloc_ranges: RefCell::new(DedupRangeMap::new( len, MemoryCellClocks::new(alloc_timestamp, alloc_index), )), diff --git a/src/tools/miri/src/concurrency/mod.rs b/src/tools/miri/src/concurrency/mod.rs index 49bcc0d30b5..c2ea8a00dec 100644 --- a/src/tools/miri/src/concurrency/mod.rs +++ b/src/tools/miri/src/concurrency/mod.rs @@ -2,7 +2,6 @@ pub mod cpu_affinity; pub mod data_race; mod data_race_handler; pub mod init_once; -mod range_object_map; pub mod sync; pub mod thread; mod vector_clock; diff --git a/src/tools/miri/src/concurrency/thread.rs b/src/tools/miri/src/concurrency/thread.rs index a4e93f9222c..878afdf2517 100644 --- a/src/tools/miri/src/concurrency/thread.rs +++ b/src/tools/miri/src/concurrency/thread.rs @@ -186,15 +186,15 @@ pub struct Thread<'tcx> { /// The join status. join_status: ThreadJoinStatus, - /// Stack of active panic payloads for the current thread. Used for storing - /// the argument of the call to `miri_start_unwind` (the panic payload) when unwinding. + /// Stack of active unwind payloads for the current thread. Used for storing + /// the argument of the call to `miri_start_unwind` (the payload) when unwinding. /// This is pointer-sized, and matches the `Payload` type in `src/libpanic_unwind/miri.rs`. /// /// In real unwinding, the payload gets passed as an argument to the landing pad, /// which then forwards it to 'Resume'. However this argument is implicit in MIR, /// so we have to store it out-of-band. When there are multiple active unwinds, /// the innermost one is always caught first, so we can store them as a stack. - pub(crate) panic_payloads: Vec<ImmTy<'tcx>>, + pub(crate) unwind_payloads: Vec<ImmTy<'tcx>>, /// Last OS error location in memory. It is a 32-bit integer. pub(crate) last_error: Option<MPlaceTy<'tcx>>, @@ -282,7 +282,7 @@ impl<'tcx> Thread<'tcx> { stack: Vec::new(), top_user_relevant_frame: None, join_status: ThreadJoinStatus::Joinable, - panic_payloads: Vec::new(), + unwind_payloads: Vec::new(), last_error: None, on_stack_empty, } @@ -292,7 +292,7 @@ impl<'tcx> Thread<'tcx> { impl VisitProvenance for Thread<'_> { fn visit_provenance(&self, visit: &mut VisitWith<'_>) { let Thread { - panic_payloads: panic_payload, + unwind_payloads: panic_payload, last_error, stack, top_user_relevant_frame: _, @@ -677,6 +677,8 @@ trait EvalContextPrivExt<'tcx>: MiriInterpCxExt<'tcx> { fn run_on_stack_empty(&mut self) -> InterpResult<'tcx, Poll<()>> { let this = self.eval_context_mut(); // Inform GenMC that a thread has finished all user code. GenMC needs to know this for scheduling. + // FIXME(GenMC): Thread-local destructors *are* user code, so this is odd. Also now that we + // support pre-main constructors, it can get called there as well. if let Some(genmc_ctx) = this.machine.data_race.as_genmc_ref() { let thread_id = this.active_thread(); genmc_ctx.handle_thread_stack_empty(thread_id); diff --git a/src/tools/miri/src/concurrency/weak_memory.rs b/src/tools/miri/src/concurrency/weak_memory.rs index 95c010be2fd..a752ef7e454 100644 --- a/src/tools/miri/src/concurrency/weak_memory.rs +++ b/src/tools/miri/src/concurrency/weak_memory.rs @@ -90,9 +90,9 @@ use rustc_data_structures::fx::FxHashMap; use super::AllocDataRaceHandler; use super::data_race::{GlobalState as DataRaceState, ThreadClockSet}; -use super::range_object_map::{AccessType, RangeObjectMap}; use super::vector_clock::{VClock, VTimestamp, VectorIdx}; use crate::concurrency::GlobalDataRaceHandler; +use crate::data_structures::range_object_map::{AccessType, RangeObjectMap}; use crate::*; pub type AllocState = StoreBufferAlloc; diff --git a/src/tools/miri/src/range_map.rs b/src/tools/miri/src/data_structures/dedup_range_map.rs index 29a5a8537a4..56e9883b1ca 100644 --- a/src/tools/miri/src/range_map.rs +++ b/src/tools/miri/src/data_structures/dedup_range_map.rs @@ -17,18 +17,18 @@ struct Elem<T> { data: T, } #[derive(Clone, Debug)] -pub struct RangeMap<T> { +pub struct DedupRangeMap<T> { v: Vec<Elem<T>>, } -impl<T> RangeMap<T> { +impl<T> DedupRangeMap<T> { /// Creates a new `RangeMap` for the given size, and with the given initial value used for /// the entire range. #[inline(always)] - pub fn new(size: Size, init: T) -> RangeMap<T> { + pub fn new(size: Size, init: T) -> DedupRangeMap<T> { let size = size.bytes(); let v = if size > 0 { vec![Elem { range: 0..size, data: init }] } else { Vec::new() }; - RangeMap { v } + DedupRangeMap { v } } pub fn size(&self) -> Size { @@ -246,7 +246,7 @@ mod tests { use super::*; /// Query the map at every offset in the range and collect the results. - fn to_vec<T: Copy>(map: &RangeMap<T>, offset: u64, len: u64) -> Vec<T> { + fn to_vec<T: Copy>(map: &DedupRangeMap<T>, offset: u64, len: u64) -> Vec<T> { (offset..offset + len) .map(|i| { map.iter(Size::from_bytes(i), Size::from_bytes(1)).next().map(|(_, &t)| t).unwrap() @@ -256,7 +256,7 @@ mod tests { #[test] fn basic_insert() { - let mut map = RangeMap::<i32>::new(Size::from_bytes(20), -1); + let mut map = DedupRangeMap::<i32>::new(Size::from_bytes(20), -1); // Insert. for (_, x) in map.iter_mut(Size::from_bytes(10), Size::from_bytes(1)) { *x = 42; @@ -278,7 +278,7 @@ mod tests { #[test] fn gaps() { - let mut map = RangeMap::<i32>::new(Size::from_bytes(20), -1); + let mut map = DedupRangeMap::<i32>::new(Size::from_bytes(20), -1); for (_, x) in map.iter_mut(Size::from_bytes(11), Size::from_bytes(1)) { *x = 42; } @@ -319,26 +319,26 @@ mod tests { #[test] #[should_panic] fn out_of_range_iter_mut() { - let mut map = RangeMap::<i32>::new(Size::from_bytes(20), -1); + let mut map = DedupRangeMap::<i32>::new(Size::from_bytes(20), -1); let _ = map.iter_mut(Size::from_bytes(11), Size::from_bytes(11)); } #[test] #[should_panic] fn out_of_range_iter() { - let map = RangeMap::<i32>::new(Size::from_bytes(20), -1); + let map = DedupRangeMap::<i32>::new(Size::from_bytes(20), -1); let _ = map.iter(Size::from_bytes(11), Size::from_bytes(11)); } #[test] fn empty_map_iter() { - let map = RangeMap::<i32>::new(Size::from_bytes(0), -1); + let map = DedupRangeMap::<i32>::new(Size::from_bytes(0), -1); let _ = map.iter(Size::from_bytes(0), Size::from_bytes(0)); } #[test] fn empty_map_iter_mut() { - let mut map = RangeMap::<i32>::new(Size::from_bytes(0), -1); + let mut map = DedupRangeMap::<i32>::new(Size::from_bytes(0), -1); let _ = map.iter_mut(Size::from_bytes(0), Size::from_bytes(0)); } } diff --git a/src/tools/miri/src/data_structures/mod.rs b/src/tools/miri/src/data_structures/mod.rs new file mode 100644 index 00000000000..d4468bc57ea --- /dev/null +++ b/src/tools/miri/src/data_structures/mod.rs @@ -0,0 +1,3 @@ +pub mod dedup_range_map; +pub mod mono_hash_map; +pub mod range_object_map; diff --git a/src/tools/miri/src/mono_hash_map.rs b/src/tools/miri/src/data_structures/mono_hash_map.rs index 220233f8ff5..220233f8ff5 100644 --- a/src/tools/miri/src/mono_hash_map.rs +++ b/src/tools/miri/src/data_structures/mono_hash_map.rs diff --git a/src/tools/miri/src/concurrency/range_object_map.rs b/src/tools/miri/src/data_structures/range_object_map.rs index 4c9cf3dc635..4c9cf3dc635 100644 --- a/src/tools/miri/src/concurrency/range_object_map.rs +++ b/src/tools/miri/src/data_structures/range_object_map.rs diff --git a/src/tools/miri/src/eval.rs b/src/tools/miri/src/eval.rs index 425a136dfa5..be6404f64e8 100644 --- a/src/tools/miri/src/eval.rs +++ b/src/tools/miri/src/eval.rs @@ -11,14 +11,14 @@ use rustc_abi::ExternAbi; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_hir::def::Namespace; use rustc_hir::def_id::DefId; -use rustc_middle::ty::layout::LayoutCx; +use rustc_middle::ty::layout::{HasTyCtxt, HasTypingEnv, LayoutCx}; use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_session::config::EntryFnType; use crate::concurrency::GenmcCtx; use crate::concurrency::thread::TlsAllocAction; use crate::diagnostics::report_leaks; -use crate::shims::tls; +use crate::shims::{global_ctor, tls}; use crate::*; #[derive(Copy, Clone, Debug)] @@ -216,9 +216,17 @@ impl Default for MiriConfig { } /// The state of the main thread. Implementation detail of `on_main_stack_empty`. -#[derive(Default, Debug)] +#[derive(Debug)] enum MainThreadState<'tcx> { - #[default] + GlobalCtors { + ctor_state: global_ctor::GlobalCtorState<'tcx>, + /// The main function to call. + entry_id: DefId, + entry_type: MiriEntryFnType, + /// Arguments passed to `main`. + argc: ImmTy<'tcx>, + argv: ImmTy<'tcx>, + }, Running, TlsDtors(tls::TlsDtorsState<'tcx>), Yield { @@ -234,6 +242,15 @@ impl<'tcx> MainThreadState<'tcx> { ) -> InterpResult<'tcx, Poll<()>> { use MainThreadState::*; match self { + GlobalCtors { ctor_state, entry_id, entry_type, argc, argv } => { + match ctor_state.on_stack_empty(this)? { + Poll::Pending => {} // just keep going + Poll::Ready(()) => { + call_main(this, *entry_id, *entry_type, argc.clone(), argv.clone())?; + *self = Running; + } + } + } Running => { *self = TlsDtors(Default::default()); } @@ -309,13 +326,6 @@ pub fn create_ecx<'tcx>( MiriMachine::new(config, layout_cx, genmc_ctx), ); - // Some parts of initialization require a full `InterpCx`. - MiriMachine::late_init(&mut ecx, config, { - let mut state = MainThreadState::default(); - // Cannot capture anything GC-relevant here. - Box::new(move |m| state.on_main_stack_empty(m)) - })?; - // Make sure we have MIR. We check MIR for some stable monomorphic function in libcore. let sentinel = helpers::try_resolve_path(tcx, &["core", "ascii", "escape_default"], Namespace::ValueNS); @@ -326,15 +336,9 @@ pub fn create_ecx<'tcx>( ); } - // Setup first stack frame. - let entry_instance = ty::Instance::mono(tcx, entry_id); - - // First argument is constructed later, because it's skipped for `miri_start.` - - // Second argument (argc): length of `config.args`. + // Compute argc and argv from `config.args`. let argc = ImmTy::from_int(i64::try_from(config.args.len()).unwrap(), ecx.machine.layouts.isize); - // Third argument (`argv`): created from `config.args`. let argv = { // Put each argument in memory, collect pointers. let mut argvs = Vec::<Immediate<Provenance>>::with_capacity(config.args.len()); @@ -359,7 +363,7 @@ pub fn create_ecx<'tcx>( ecx.write_immediate(arg, &place)?; } ecx.mark_immutable(&argvs_place); - // Store `argc` and `argv` for macOS `_NSGetArg{c,v}`. + // Store `argc` and `argv` for macOS `_NSGetArg{c,v}`, and for the GC to see them. { let argc_place = ecx.allocate(ecx.machine.layouts.isize, MiriMemoryKind::Machine.into())?; @@ -374,7 +378,7 @@ pub fn create_ecx<'tcx>( ecx.machine.argv = Some(argv_place.ptr()); } // Store command line as UTF-16 for Windows `GetCommandLineW`. - { + if tcx.sess.target.os == "windows" { // Construct a command string with all the arguments. let cmd_utf16: Vec<u16> = args_to_utf16_command_string(config.args.iter()); @@ -395,11 +399,43 @@ pub fn create_ecx<'tcx>( ImmTy::from_immediate(imm, layout) }; + // Some parts of initialization require a full `InterpCx`. + MiriMachine::late_init(&mut ecx, config, { + let mut main_thread_state = MainThreadState::GlobalCtors { + entry_id, + entry_type, + argc, + argv, + ctor_state: global_ctor::GlobalCtorState::default(), + }; + + // Cannot capture anything GC-relevant here. + // `argc` and `argv` *are* GC_relevant, but they also get stored in `machine.argc` and + // `machine.argv` so we are good. + Box::new(move |m| main_thread_state.on_main_stack_empty(m)) + })?; + + interp_ok(ecx) +} + +// Call the entry function. +fn call_main<'tcx>( + ecx: &mut MiriInterpCx<'tcx>, + entry_id: DefId, + entry_type: MiriEntryFnType, + argc: ImmTy<'tcx>, + argv: ImmTy<'tcx>, +) -> InterpResult<'tcx, ()> { + let tcx = ecx.tcx(); + + // Setup first stack frame. + let entry_instance = ty::Instance::mono(tcx, entry_id); + // Return place (in static memory so that it does not count as leak). let ret_place = ecx.allocate(ecx.machine.layouts.isize, MiriMemoryKind::Machine.into())?; ecx.machine.main_fn_ret_place = Some(ret_place.clone()); - // Call start function. + // Call start function. match entry_type { MiriEntryFnType::Rustc(EntryFnType::Main { .. }) => { let start_id = tcx.lang_items().start_fn().unwrap_or_else(|| { @@ -409,7 +445,7 @@ pub fn create_ecx<'tcx>( let main_ret_ty = main_ret_ty.no_bound_vars().unwrap(); let start_instance = ty::Instance::try_resolve( tcx, - typing_env, + ecx.typing_env(), start_id, tcx.mk_args(&[ty::GenericArg::from(main_ret_ty)]), ) @@ -427,7 +463,7 @@ pub fn create_ecx<'tcx>( ExternAbi::Rust, &[ ImmTy::from_scalar( - Scalar::from_pointer(main_ptr, &ecx), + Scalar::from_pointer(main_ptr, ecx), // FIXME use a proper fn ptr type ecx.machine.layouts.const_raw_ptr, ), @@ -450,7 +486,7 @@ pub fn create_ecx<'tcx>( } } - interp_ok(ecx) + interp_ok(()) } /// Evaluates the entry function specified by `entry_id`. diff --git a/src/tools/miri/src/helpers.rs b/src/tools/miri/src/helpers.rs index c150dc16b07..ccfff7fa94b 100644 --- a/src/tools/miri/src/helpers.rs +++ b/src/tools/miri/src/helpers.rs @@ -13,7 +13,7 @@ use rustc_index::IndexVec; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; use rustc_middle::middle::dependency_format::Linkage; use rustc_middle::middle::exported_symbols::ExportedSymbol; -use rustc_middle::ty::layout::{FnAbiOf, LayoutOf, MaybeResult, TyAndLayout}; +use rustc_middle::ty::layout::{LayoutOf, MaybeResult, TyAndLayout}; use rustc_middle::ty::{self, Binder, FloatTy, FnSig, IntTy, Ty, TyCtxt, UintTy}; use rustc_session::config::CrateType; use rustc_span::{Span, Symbol}; @@ -1235,8 +1235,11 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { interp_ok(()) } - /// Lookup an array of immediates stored as a linker section of name `name`. - fn lookup_link_section(&mut self, name: &str) -> InterpResult<'tcx, Vec<ImmTy<'tcx>>> { + /// Lookup an array of immediates from any linker sections matching the provided predicate. + fn lookup_link_section( + &mut self, + include_name: impl Fn(&str) -> bool, + ) -> InterpResult<'tcx, Vec<ImmTy<'tcx>>> { let this = self.eval_context_mut(); let tcx = this.tcx.tcx; @@ -1247,7 +1250,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let Some(link_section) = attrs.link_section else { return interp_ok(()); }; - if link_section.as_str() == name { + if include_name(link_section.as_str()) { let instance = ty::Instance::mono(tcx, def_id); let const_val = this.eval_global(instance).unwrap_or_else(|err| { panic!( @@ -1431,3 +1434,44 @@ impl ToU64 for usize { self.try_into().unwrap() } } + +/// This struct is needed to enforce `#[must_use]` on values produced by [enter_trace_span] even +/// when the "tracing" feature is not enabled. +#[must_use] +pub struct MaybeEnteredTraceSpan { + #[cfg(feature = "tracing")] + pub _entered_span: tracing::span::EnteredSpan, +} + +/// Enters a [tracing::info_span] only if the "tracing" feature is enabled, otherwise does nothing. +/// This is like [rustc_const_eval::enter_trace_span] except that it does not depend on the +/// [Machine] trait to check if tracing is enabled, because from the Miri codebase we can directly +/// check whether the "tracing" feature is enabled, unlike from the rustc_const_eval codebase. +/// +/// In addition to the syntax accepted by [tracing::span!], this macro optionally allows passing +/// the span name (i.e. the first macro argument) in the form `NAME::SUBNAME` (without quotes) to +/// indicate that the span has name "NAME" (usually the name of the component) and has an additional +/// more specific name "SUBNAME" (usually the function name). The latter is passed to the [tracing] +/// infrastructure as a span field with the name "NAME". This allows not being distracted by +/// subnames when looking at the trace in <https://ui.perfetto.dev>, but when deeper introspection +/// is needed within a component, it's still possible to view the subnames directly in the UI by +/// selecting a span, clicking on the "NAME" argument on the right, and clicking on "Visualize +/// argument values". +/// ```rust +/// // for example, the first will expand to the second +/// enter_trace_span!(borrow_tracker::on_stack_pop, /* ... */) +/// enter_trace_span!("borrow_tracker", borrow_tracker = "on_stack_pop", /* ... */) +/// ``` +#[macro_export] +macro_rules! enter_trace_span { + ($name:ident :: $subname:ident $($tt:tt)*) => {{ + enter_trace_span!(stringify!($name), $name = %stringify!(subname) $($tt)*) + }}; + + ($($tt:tt)*) => { + $crate::MaybeEnteredTraceSpan { + #[cfg(feature = "tracing")] + _entered_span: tracing::info_span!($($tt)*).entered() + } + }; +} diff --git a/src/tools/miri/src/lib.rs b/src/tools/miri/src/lib.rs index c86e33e5185..a591d21071d 100644 --- a/src/tools/miri/src/lib.rs +++ b/src/tools/miri/src/lib.rs @@ -15,6 +15,7 @@ #![feature(unqualified_local_imports)] #![feature(derive_coerce_pointee)] #![feature(arbitrary_self_types)] +#![feature(iter_advance_by)] // Configure clippy and other lints #![allow( clippy::collapsible_else_if, @@ -75,16 +76,15 @@ mod alloc_addresses; mod borrow_tracker; mod clock; mod concurrency; +mod data_structures; mod diagnostics; mod eval; mod helpers; mod intrinsics; mod machine; mod math; -mod mono_hash_map; mod operator; mod provenance_gc; -mod range_map; mod shims; // Establish a "crate-wide prelude": we often import `crate::*`. @@ -97,10 +97,10 @@ pub use rustc_const_eval::interpret::{self, AllocMap, Provenance as _}; use rustc_middle::{bug, span_bug}; use tracing::{info, trace}; -//#[cfg(target_os = "linux")] -//pub mod native_lib { -// pub use crate::shims::{init_sv, register_retcode_sv}; -//} +#[cfg(target_os = "linux")] +pub mod native_lib { + pub use crate::shims::{init_sv, register_retcode_sv}; +} // Type aliases that set the provenance parameter. pub type Pointer = interpret::Pointer<Option<machine::Provenance>>; @@ -132,6 +132,8 @@ pub use crate::concurrency::thread::{ ThreadManager, TimeoutAnchor, TimeoutClock, UnblockKind, }; pub use crate::concurrency::{GenmcConfig, GenmcCtx}; +pub use crate::data_structures::dedup_range_map::DedupRangeMap; +pub use crate::data_structures::mono_hash_map::MonoHashMap; pub use crate::diagnostics::{ EvalContextExt as _, NonHaltingDiagnostic, TerminationInfo, report_error, }; @@ -139,24 +141,25 @@ pub use crate::eval::{ AlignmentCheck, BacktraceStyle, IsolatedOp, MiriConfig, MiriEntryFnType, RejectOpWith, ValidationMode, create_ecx, eval_entry, }; -pub use crate::helpers::{AccessKind, EvalContextExt as _, ToU64 as _, ToUsize as _}; +pub use crate::helpers::{ + AccessKind, EvalContextExt as _, MaybeEnteredTraceSpan, ToU64 as _, ToUsize as _, +}; pub use crate::intrinsics::EvalContextExt as _; pub use crate::machine::{ AllocExtra, DynMachineCallback, FrameExtra, MachineCallback, MemoryKind, MiriInterpCx, MiriInterpCxExt, MiriMachine, MiriMemoryKind, PrimitiveLayouts, Provenance, ProvenanceExtra, }; -pub use crate::mono_hash_map::MonoHashMap; pub use crate::operator::EvalContextExt as _; pub use crate::provenance_gc::{EvalContextExt as _, LiveAllocs, VisitProvenance, VisitWith}; -pub use crate::range_map::RangeMap; pub use crate::shims::EmulateItemResult; pub use crate::shims::env::{EnvVars, EvalContextExt as _}; pub use crate::shims::foreign_items::{DynSym, EvalContextExt as _}; pub use crate::shims::io_error::{EvalContextExt as _, IoError, LibcError}; pub use crate::shims::os_str::EvalContextExt as _; -pub use crate::shims::panic::{CatchUnwindData, EvalContextExt as _}; +pub use crate::shims::panic::EvalContextExt as _; pub use crate::shims::time::EvalContextExt as _; pub use crate::shims::tls::TlsData; +pub use crate::shims::unwind::{CatchUnwindData, EvalContextExt as _}; /// Insert rustc arguments at the beginning of the argument list that Miri wants to be /// set per default, for maximal validation power. diff --git a/src/tools/miri/src/machine.rs b/src/tools/miri/src/machine.rs index 35399dbf4cb..f309e34c75b 100644 --- a/src/tools/miri/src/machine.rs +++ b/src/tools/miri/src/machine.rs @@ -1086,7 +1086,7 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> { ecx: &MiriInterpCx<'tcx>, instance: ty::Instance<'tcx>, ) -> InterpResult<'tcx> { - let attrs = ecx.tcx.codegen_fn_attrs(instance.def_id()); + let attrs = ecx.tcx.codegen_instance_attrs(instance.def); if attrs .target_features .iter() @@ -1790,7 +1790,7 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> { ecx.tcx.sess.opts.unstable_opts.cross_crate_inline_threshold, InliningThreshold::Always ) || !matches!( - ecx.tcx.codegen_fn_attrs(instance.def_id()).inline, + ecx.tcx.codegen_instance_attrs(instance.def).inline, InlineAttr::Never ); !is_generic && !can_be_inlined @@ -1828,8 +1828,11 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> { fn enter_trace_span(span: impl FnOnce() -> tracing::Span) -> impl EnteredTraceSpan { #[cfg(feature = "tracing")] - { span().entered() } + { + span().entered() + } #[cfg(not(feature = "tracing"))] + #[expect(clippy::unused_unit)] { let _ = span; // so we avoid the "unused variable" warning () diff --git a/src/tools/miri/src/shims/global_ctor.rs b/src/tools/miri/src/shims/global_ctor.rs new file mode 100644 index 00000000000..c56251bbe63 --- /dev/null +++ b/src/tools/miri/src/shims/global_ctor.rs @@ -0,0 +1,98 @@ +//! Implement global constructors. + +use std::task::Poll; + +use rustc_abi::ExternAbi; +use rustc_target::spec::BinaryFormat; + +use crate::*; + +#[derive(Debug, Default)] +pub struct GlobalCtorState<'tcx>(GlobalCtorStatePriv<'tcx>); + +#[derive(Debug, Default)] +enum GlobalCtorStatePriv<'tcx> { + #[default] + Init, + /// The list of constructor functions that we still have to call. + Ctors(Vec<ImmTy<'tcx>>), + Done, +} + +impl<'tcx> GlobalCtorState<'tcx> { + pub fn on_stack_empty( + &mut self, + this: &mut MiriInterpCx<'tcx>, + ) -> InterpResult<'tcx, Poll<()>> { + use GlobalCtorStatePriv::*; + let new_state = 'new_state: { + match &mut self.0 { + Init => { + let this = this.eval_context_mut(); + + // Lookup constructors from the relevant magic link section. + let ctors = match this.tcx.sess.target.binary_format { + // Read the CRT library section on Windows. + BinaryFormat::Coff => + this.lookup_link_section(|section| section == ".CRT$XCU")?, + + // Read the `__mod_init_func` section on macOS. + BinaryFormat::MachO => + this.lookup_link_section(|section| { + let mut parts = section.splitn(3, ','); + let (segment_name, section_name, section_type) = + (parts.next(), parts.next(), parts.next()); + + segment_name == Some("__DATA") + && section_name == Some("__mod_init_func") + // The `mod_init_funcs` directive ensures that the + // `S_MOD_INIT_FUNC_POINTERS` flag is set on the section. LLVM + // adds this automatically so we currently do not require it. + // FIXME: is this guaranteed LLVM behavior? If not, we shouldn't + // implicitly add it here. Also see + // <https://github.com/rust-lang/miri/pull/4459#discussion_r2200115629>. + && matches!(section_type, None | Some("mod_init_funcs")) + })?, + + // Read the standard `.init_array` section on platforms that use ELF, or WASM, + // which supports the same linker directive. + // FIXME: Add support for `.init_array.N` and `.ctors`? + BinaryFormat::Elf | BinaryFormat::Wasm => + this.lookup_link_section(|section| section == ".init_array")?, + + // Other platforms have no global ctor support. + _ => break 'new_state Done, + }; + + break 'new_state Ctors(ctors); + } + Ctors(ctors) => { + if let Some(ctor) = ctors.pop() { + let this = this.eval_context_mut(); + + let ctor = ctor.to_scalar().to_pointer(this)?; + let thread_callback = this.get_ptr_fn(ctor)?.as_instance()?; + + // The signature of this function is `unsafe extern "C" fn()`. + this.call_function( + thread_callback, + ExternAbi::C { unwind: false }, + &[], + None, + ReturnContinuation::Stop { cleanup: true }, + )?; + + return interp_ok(Poll::Pending); // we stay in this state (but `ctors` got shorter) + } + + // No more constructors to run. + break 'new_state Done; + } + Done => return interp_ok(Poll::Ready(())), + } + }; + + self.0 = new_state; + interp_ok(Poll::Pending) + } +} diff --git a/src/tools/miri/src/shims/mod.rs b/src/tools/miri/src/shims/mod.rs index f09fc546b3e..75540f6f150 100644 --- a/src/tools/miri/src/shims/mod.rs +++ b/src/tools/miri/src/shims/mod.rs @@ -14,15 +14,17 @@ mod x86; pub mod env; pub mod extern_static; pub mod foreign_items; +pub mod global_ctor; pub mod io_error; pub mod os_str; pub mod panic; pub mod time; pub mod tls; +pub mod unwind; pub use self::files::FdTable; -//#[cfg(target_os = "linux")] -//pub use self::native_lib::trace::{init_sv, register_retcode_sv}; +#[cfg(target_os = "linux")] +pub use self::native_lib::trace::{init_sv, register_retcode_sv}; pub use self::unix::{DirTable, EpollInterestTable}; /// What needs to be done after emulating an item (a shim or an intrinsic) is done. diff --git a/src/tools/miri/src/shims/native_lib/mod.rs b/src/tools/miri/src/shims/native_lib/mod.rs index 02c3bde036d..fb7b1df41a4 100644 --- a/src/tools/miri/src/shims/native_lib/mod.rs +++ b/src/tools/miri/src/shims/native_lib/mod.rs @@ -1,9 +1,5 @@ //! Implements calling functions from a native library. -// FIXME: disabled since it fails to build on many targets. -//#[cfg(target_os = "linux")] -//pub mod trace; - use std::ops::Deref; use libffi::high::call as ffi; @@ -13,14 +9,68 @@ use rustc_middle::mir::interpret::Pointer; use rustc_middle::ty::{self as ty, IntTy, UintTy}; use rustc_span::Symbol; -//#[cfg(target_os = "linux")] -//use self::trace::Supervisor; +#[cfg_attr( + not(all( + target_os = "linux", + target_env = "gnu", + any(target_arch = "x86", target_arch = "x86_64") + )), + path = "trace/stub.rs" +)] +pub mod trace; + use crate::*; -//#[cfg(target_os = "linux")] -//type CallResult<'tcx> = InterpResult<'tcx, (ImmTy<'tcx>, Option<self::trace::messages::MemEvents>)>; -//#[cfg(not(target_os = "linux"))] -type CallResult<'tcx> = InterpResult<'tcx, (ImmTy<'tcx>, Option<!>)>; +/// The final results of an FFI trace, containing every relevant event detected +/// by the tracer. +#[allow(dead_code)] +#[cfg_attr(target_os = "linux", derive(serde::Serialize, serde::Deserialize))] +#[derive(Debug)] +pub struct MemEvents { + /// An list of memory accesses that occurred, in the order they occurred in. + pub acc_events: Vec<AccessEvent>, +} + +/// A single memory access. +#[allow(dead_code)] +#[cfg_attr(target_os = "linux", derive(serde::Serialize, serde::Deserialize))] +#[derive(Clone, Debug)] +pub enum AccessEvent { + /// A read occurred on this memory range. + Read(AccessRange), + /// A write may have occurred on this memory range. + /// Some instructions *may* write memory without *always* doing that, + /// so this can be an over-approximation. + /// The range info, however, is reliable if the access did happen. + /// If the second field is true, the access definitely happened. + Write(AccessRange, bool), +} + +impl AccessEvent { + fn get_range(&self) -> AccessRange { + match self { + AccessEvent::Read(access_range) => access_range.clone(), + AccessEvent::Write(access_range, _) => access_range.clone(), + } + } +} + +/// The memory touched by a given access. +#[allow(dead_code)] +#[cfg_attr(target_os = "linux", derive(serde::Serialize, serde::Deserialize))] +#[derive(Clone, Debug)] +pub struct AccessRange { + /// The base address in memory where an access occurred. + pub addr: usize, + /// The number of bytes affected from the base. + pub size: usize, +} + +impl AccessRange { + fn end(&self) -> usize { + self.addr.strict_add(self.size) + } +} impl<'tcx> EvalContextExtPriv<'tcx> for crate::MiriInterpCx<'tcx> {} trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { @@ -31,18 +81,17 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { dest: &MPlaceTy<'tcx>, ptr: CodePtr, libffi_args: Vec<libffi::high::Arg<'a>>, - ) -> CallResult<'tcx> { + ) -> InterpResult<'tcx, (crate::ImmTy<'tcx>, Option<MemEvents>)> { let this = self.eval_context_mut(); - //#[cfg(target_os = "linux")] - //let alloc = this.machine.allocator.as_ref().unwrap(); - - // SAFETY: We don't touch the machine memory past this point. - //#[cfg(target_os = "linux")] - //let (guard, stack_ptr) = unsafe { Supervisor::start_ffi(alloc) }; + #[cfg(target_os = "linux")] + let alloc = this.machine.allocator.as_ref().unwrap(); + #[cfg(not(target_os = "linux"))] + // Placeholder value. + let alloc = (); - // Call the function (`ptr`) with arguments `libffi_args`, and obtain the return value - // as the specified primitive integer type - let res = 'res: { + trace::Supervisor::do_ffi(alloc, || { + // Call the function (`ptr`) with arguments `libffi_args`, and obtain the return value + // as the specified primitive integer type let scalar = match dest.layout.ty.kind() { // ints ty::Int(IntTy::I8) => { @@ -93,7 +142,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { // have the output_type `Tuple([])`. ty::Tuple(t_list) if (*t_list).deref().is_empty() => { unsafe { ffi::call::<()>(ptr, libffi_args.as_slice()) }; - break 'res interp_ok(ImmTy::uninit(dest.layout)); + return interp_ok(ImmTy::uninit(dest.layout)); } ty::RawPtr(..) => { let x = unsafe { ffi::call::<*const ()>(ptr, libffi_args.as_slice()) }; @@ -101,23 +150,14 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { Scalar::from_pointer(ptr, this) } _ => - break 'res Err(err_unsup_format!( + return Err(err_unsup_format!( "unsupported return type for native call: {:?}", link_name )) .into(), }; interp_ok(ImmTy::from_scalar(scalar, dest.layout)) - }; - - // SAFETY: We got the guard and stack pointer from start_ffi, and - // the allocator is the same - //#[cfg(target_os = "linux")] - //let events = unsafe { Supervisor::end_ffi(alloc, guard, stack_ptr) }; - //#[cfg(not(target_os = "linux"))] - let events = None; - - interp_ok((res?, events)) + }) } /// Get the pointer to the function of the specified name in the shared object file, @@ -169,6 +209,73 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { } None } + + /// Applies the `events` to Miri's internal state. The event vector must be + /// ordered sequentially by when the accesses happened, and the sizes are + /// assumed to be exact. + fn tracing_apply_accesses(&mut self, events: MemEvents) -> InterpResult<'tcx> { + let this = self.eval_context_mut(); + + for evt in events.acc_events { + let evt_rg = evt.get_range(); + // LLVM at least permits vectorising accesses to adjacent allocations, + // so we cannot assume 1 access = 1 allocation. :( + let mut rg = evt_rg.addr..evt_rg.end(); + while let Some(curr) = rg.next() { + let Some(alloc_id) = this.alloc_id_from_addr( + curr.to_u64(), + rg.len().try_into().unwrap(), + /* only_exposed_allocations */ true, + ) else { + throw_ub_format!("Foreign code did an out-of-bounds access!") + }; + let alloc = this.get_alloc_raw(alloc_id)?; + // The logical and physical address of the allocation coincide, so we can use + // this instead of `addr_from_alloc_id`. + let alloc_addr = alloc.get_bytes_unchecked_raw().addr(); + + // Determine the range inside the allocation that this access covers. This range is + // in terms of offsets from the start of `alloc`. The start of the overlap range + // will be `curr`; the end will be the minimum of the end of the allocation and the + // end of the access' range. + let overlap = curr.strict_sub(alloc_addr) + ..std::cmp::min(alloc.len(), rg.end.strict_sub(alloc_addr)); + // Skip forward however many bytes of the access are contained in the current + // allocation, subtracting 1 since the overlap range includes the current addr + // that was already popped off of the range. + rg.advance_by(overlap.len().strict_sub(1)).unwrap(); + + match evt { + AccessEvent::Read(_) => { + // FIXME: ProvenanceMap should have something like get_range(). + let p_map = alloc.provenance(); + for idx in overlap { + // If a provenance was read by the foreign code, expose it. + if let Some(prov) = p_map.get(Size::from_bytes(idx), this) { + this.expose_provenance(prov)?; + } + } + } + AccessEvent::Write(_, certain) => { + // Sometimes we aren't certain if a write happened, in which case we + // only initialise that data if the allocation is mutable. + if certain || alloc.mutability.is_mut() { + let (alloc, cx) = this.get_alloc_raw_mut(alloc_id)?; + alloc.process_native_write( + &cx.tcx, + Some(AllocRange { + start: Size::from_bytes(overlap.start), + size: Size::from_bytes(overlap.len()), + }), + ) + } + } + } + } + } + + interp_ok(()) + } } impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {} @@ -194,6 +301,9 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { } }; + // Do we have ptrace? + let tracing = trace::Supervisor::is_enabled(); + // Get the function arguments, and convert them to `libffi`-compatible form. let mut libffi_args = Vec::<CArg>::with_capacity(args.len()); for arg in args.iter() { @@ -213,12 +323,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // The first time this happens, print a warning. if !this.machine.native_call_mem_warned.replace(true) { // Newly set, so first time we get here. - this.emit_diagnostic(NonHaltingDiagnostic::NativeCallSharedMem { - //#[cfg(target_os = "linux")] - //tracing: self::trace::Supervisor::is_enabled(), - //#[cfg(not(target_os = "linux"))] - tracing: false, - }); + this.emit_diagnostic(NonHaltingDiagnostic::NativeCallSharedMem { tracing }); } this.expose_provenance(prov)?; @@ -245,15 +350,23 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // be read by FFI. The `black_box` is defensive programming as LLVM likes // to (incorrectly) optimize away ptr2int casts whose result is unused. std::hint::black_box(alloc.get_bytes_unchecked_raw().expose_provenance()); - // Expose all provenances in this allocation, since the native code can do $whatever. - for prov in alloc.provenance().provenances() { - this.expose_provenance(prov)?; + + if !tracing { + // Expose all provenances in this allocation, since the native code can do $whatever. + // Can be skipped when tracing; in that case we'll expose just the actually-read parts later. + for prov in alloc.provenance().provenances() { + this.expose_provenance(prov)?; + } } // Prepare for possible write from native code if mutable. if info.mutbl.is_mut() { let (alloc, cx) = this.get_alloc_raw_mut(alloc_id)?; - alloc.process_native_write(&cx.tcx, None); + // These writes could initialize everything and wreck havoc with the pointers. + // We can skip that when tracing; in that case we'll later do that only for the memory that got actually written. + if !tracing { + alloc.process_native_write(&cx.tcx, None); + } // Also expose *mutable* provenance for the interpreter-level allocation. std::hint::black_box(alloc.get_bytes_unchecked_raw_mut().expose_provenance()); } @@ -265,10 +378,8 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let (ret, maybe_memevents) = this.call_native_with_args(link_name, dest, code_ptr, libffi_args)?; - if cfg!(target_os = "linux") - && let Some(events) = maybe_memevents - { - trace!("Registered FFI events:\n{events:#0x?}"); + if tracing { + this.tracing_apply_accesses(maybe_memevents.unwrap())?; } this.write_immediate(*ret, dest)?; diff --git a/src/tools/miri/src/shims/native_lib/trace/child.rs b/src/tools/miri/src/shims/native_lib/trace/child.rs index 4961e875c77..de26cb0fe55 100644 --- a/src/tools/miri/src/shims/native_lib/trace/child.rs +++ b/src/tools/miri/src/shims/native_lib/trace/child.rs @@ -4,12 +4,22 @@ use std::rc::Rc; use ipc_channel::ipc; use nix::sys::{ptrace, signal}; use nix::unistd; +use rustc_const_eval::interpret::InterpResult; use super::CALLBACK_STACK_SIZE; -use super::messages::{Confirmation, MemEvents, StartFfiInfo, TraceRequest}; +use super::messages::{Confirmation, StartFfiInfo, TraceRequest}; use super::parent::{ChildListener, sv_loop}; use crate::alloc::isolated_alloc::IsolatedAlloc; +use crate::shims::native_lib::MemEvents; +/// A handle to the single, shared supervisor process across all `MiriMachine`s. +/// Since it would be very difficult to trace multiple FFI calls in parallel, we +/// need to ensure that either (a) only one `MiriMachine` is performing an FFI call +/// at any given time, or (b) there are distinct supervisor and child processes for +/// each machine. The former was chosen here. +/// +/// This should only contain a `None` if the supervisor has not (yet) been initialised; +/// otherwise, if `init_sv` was called and did not error, this will always be nonempty. static SUPERVISOR: std::sync::Mutex<Option<Supervisor>> = std::sync::Mutex::new(None); /// The main means of communication between the child and parent process, @@ -34,32 +44,23 @@ impl Supervisor { SUPERVISOR.lock().unwrap().is_some() } - /// Begins preparations for doing an FFI call. This should be called at - /// the last possible moment before entering said call. `alloc` points to - /// the allocator which handed out the memory used for this machine. - /// + /// Performs an arbitrary FFI call, enabling tracing from the supervisor. /// As this locks the supervisor via a mutex, no other threads may enter FFI - /// until this one returns and its guard is dropped via `end_ffi`. The - /// pointer returned should be passed to `end_ffi` to avoid a memory leak. - /// - /// SAFETY: The resulting guard must be dropped *via `end_ffi`* immediately - /// after the desired call has concluded. - pub unsafe fn start_ffi( + /// until this function returns. + pub fn do_ffi<'tcx>( alloc: &Rc<RefCell<IsolatedAlloc>>, - ) -> (std::sync::MutexGuard<'static, Option<Supervisor>>, Option<*mut [u8; CALLBACK_STACK_SIZE]>) - { + f: impl FnOnce() -> InterpResult<'tcx, crate::ImmTy<'tcx>>, + ) -> InterpResult<'tcx, (crate::ImmTy<'tcx>, Option<MemEvents>)> { let mut sv_guard = SUPERVISOR.lock().unwrap(); - // If the supervisor is not initialised for whatever reason, fast-fail. - // This might be desired behaviour, as even on platforms where ptracing - // is not implemented it enables us to enforce that only one FFI call + // If the supervisor is not initialised for whatever reason, fast-return. + // As a side-effect, even on platforms where ptracing + // is not implemented, we enforce that only one FFI call // happens at a time. - let Some(sv) = sv_guard.take() else { - return (sv_guard, None); - }; + let Some(sv) = sv_guard.as_mut() else { return f().map(|v| (v, None)) }; // Get pointers to all the pages the supervisor must allow accesses in // and prepare the callback stack. - let page_ptrs = alloc.borrow().pages(); + let page_ptrs = alloc.borrow().pages().collect(); let raw_stack_ptr: *mut [u8; CALLBACK_STACK_SIZE] = Box::leak(Box::new([0u8; CALLBACK_STACK_SIZE])).as_mut_ptr().cast(); let stack_ptr = raw_stack_ptr.expose_provenance(); @@ -68,9 +69,9 @@ impl Supervisor { // SAFETY: We do not access machine memory past this point until the // supervisor is ready to allow it. unsafe { - if alloc.borrow_mut().prepare_ffi().is_err() { + if alloc.borrow_mut().start_ffi().is_err() { // Don't mess up unwinding by maybe leaving the memory partly protected - alloc.borrow_mut().unprep_ffi(); + alloc.borrow_mut().end_ffi(); panic!("Cannot protect memory for FFI call!"); } } @@ -82,27 +83,13 @@ impl Supervisor { // enforce an ordering for these events. sv.message_tx.send(TraceRequest::StartFfi(start_info)).unwrap(); sv.confirm_rx.recv().unwrap(); - *sv_guard = Some(sv); // We need to be stopped for the supervisor to be able to make certain // modifications to our memory - simply waiting on the recv() doesn't // count. signal::raise(signal::SIGSTOP).unwrap(); - (sv_guard, Some(raw_stack_ptr)) - } - /// Undoes FFI-related preparations, allowing Miri to continue as normal, then - /// gets the memory accesses and changes performed during the FFI call. Note - /// that this may include some spurious accesses done by `libffi` itself in - /// the process of executing the function call. - /// - /// SAFETY: The `sv_guard` and `raw_stack_ptr` passed must be the same ones - /// received by a prior call to `start_ffi`, and the allocator must be the - /// one passed to it also. - pub unsafe fn end_ffi( - alloc: &Rc<RefCell<IsolatedAlloc>>, - mut sv_guard: std::sync::MutexGuard<'static, Option<Supervisor>>, - raw_stack_ptr: Option<*mut [u8; CALLBACK_STACK_SIZE]>, - ) -> Option<MemEvents> { + let res = f(); + // We can't use IPC channels here to signal that FFI mode has ended, // since they might allocate memory which could get us stuck in a SIGTRAP // with no easy way out! While this could be worked around, it is much @@ -113,42 +100,40 @@ impl Supervisor { signal::raise(signal::SIGUSR1).unwrap(); // This is safe! It just sets memory to normal expected permissions. - alloc.borrow_mut().unprep_ffi(); + alloc.borrow_mut().end_ffi(); - // If this is `None`, then `raw_stack_ptr` is None and does not need to - // be deallocated (and there's no need to worry about the guard, since - // it contains nothing). - let sv = sv_guard.take()?; // SAFETY: Caller upholds that this pointer was allocated as a box with // this type. unsafe { - drop(Box::from_raw(raw_stack_ptr.unwrap())); + drop(Box::from_raw(raw_stack_ptr)); } // On the off-chance something really weird happens, don't block forever. - let ret = sv + let events = sv .event_rx .try_recv_timeout(std::time::Duration::from_secs(5)) .map_err(|e| { match e { ipc::TryRecvError::IpcError(_) => (), ipc::TryRecvError::Empty => - eprintln!("Waiting for accesses from supervisor timed out!"), + panic!("Waiting for accesses from supervisor timed out!"), } }) .ok(); - // Do *not* leave the supervisor empty, or else we might get another fork... - *sv_guard = Some(sv); - ret + + res.map(|v| (v, events)) } } /// Initialises the supervisor process. If this function errors, then the /// supervisor process could not be created successfully; else, the caller -/// is now the child process and can communicate via `start_ffi`/`end_ffi`, -/// receiving back events through `get_events`. +/// is now the child process and can communicate via `do_ffi`, receiving back +/// events at the end. /// /// # Safety -/// The invariants for `fork()` must be upheld by the caller. +/// The invariants for `fork()` must be upheld by the caller, namely either: +/// - Other threads do not exist, or; +/// - If they do exist, either those threads or the resulting child process +/// only ever act in [async-signal-safe](https://www.man7.org/linux/man-pages/man7/signal-safety.7.html) ways. pub unsafe fn init_sv() -> Result<(), SvInitError> { // FIXME: Much of this could be reimplemented via the mitosis crate if we upstream the // relevant missing bits. @@ -191,8 +176,7 @@ pub unsafe fn init_sv() -> Result<(), SvInitError> { // The child process is free to unwind, so we won't to avoid doubly freeing // system resources. let init = std::panic::catch_unwind(|| { - let listener = - ChildListener { message_rx, attached: false, override_retcode: None }; + let listener = ChildListener::new(message_rx, confirm_tx.clone()); // Trace as many things as possible, to be able to handle them as needed. let options = ptrace::Options::PTRACE_O_TRACESYSGOOD | ptrace::Options::PTRACE_O_TRACECLONE @@ -218,7 +202,9 @@ pub unsafe fn init_sv() -> Result<(), SvInitError> { // The "Ok" case means that we couldn't ptrace. Ok(e) => return Err(e), Err(p) => { - eprintln!("Supervisor process panicked!\n{p:?}"); + eprintln!( + "Supervisor process panicked!\n{p:?}\n\nTry running again without using the native-lib tracer." + ); std::process::exit(1); } } @@ -239,13 +225,11 @@ pub unsafe fn init_sv() -> Result<(), SvInitError> { } /// Instruct the supervisor process to return a particular code. Useful if for -/// whatever reason this code fails to be intercepted normally. In the case of -/// `abort_if_errors()` used in `bin/miri.rs`, the return code is erroneously -/// given as a 0 if this is not used. +/// whatever reason this code fails to be intercepted normally. pub fn register_retcode_sv(code: i32) { let mut sv_guard = SUPERVISOR.lock().unwrap(); - if let Some(sv) = sv_guard.take() { + if let Some(sv) = sv_guard.as_mut() { sv.message_tx.send(TraceRequest::OverrideRetcode(code)).unwrap(); - *sv_guard = Some(sv); + sv.confirm_rx.recv().unwrap(); } } diff --git a/src/tools/miri/src/shims/native_lib/trace/messages.rs b/src/tools/miri/src/shims/native_lib/trace/messages.rs index 8a83dab5c09..1f9df556b57 100644 --- a/src/tools/miri/src/shims/native_lib/trace/messages.rs +++ b/src/tools/miri/src/shims/native_lib/trace/messages.rs @@ -1,25 +1,28 @@ //! Houses the types that are directly sent across the IPC channels. //! -//! The overall structure of a traced FFI call, from the child process's POV, is -//! as follows: +//! When forking to initialise the supervisor during `init_sv`, the child raises +//! a `SIGSTOP`; if the parent successfully ptraces the child, it will allow it +//! to resume. Else, the child will be killed by the parent. +//! +//! After initialisation is done, the overall structure of a traced FFI call from +//! the child process's POV is as follows: //! ``` //! message_tx.send(TraceRequest::StartFfi); -//! confirm_rx.recv(); +//! confirm_rx.recv(); // receives a `Confirmation` //! raise(SIGSTOP); //! /* do ffi call */ //! raise(SIGUSR1); // morally equivalent to some kind of "TraceRequest::EndFfi" -//! let events = event_rx.recv(); +//! let events = event_rx.recv(); // receives a `MemEvents` //! ``` //! `TraceRequest::OverrideRetcode` can be sent at any point in the above, including -//! before or after all of them. +//! before or after all of them. `confirm_rx.recv()` is to be called after, to ensure +//! that the child does not exit before the supervisor has registered the return code. //! //! NB: sending these events out of order, skipping steps, etc. will result in //! unspecified behaviour from the supervisor process, so use the abstractions -//! in `super::child` (namely `start_ffi()` and `end_ffi()`) to handle this. It is +//! in `super::child` (namely `do_ffi()`) to handle this. It is //! trivially easy to cause a deadlock or crash by messing this up! -use std::ops::Range; - /// An IPC request sent by the child process to the parent. /// /// The sender for this channel should live on the child process. @@ -34,6 +37,8 @@ pub enum TraceRequest { StartFfi(StartFfiInfo), /// Manually overrides the code that the supervisor will return upon exiting. /// Once set, it is permanent. This can be called again to change the value. + /// + /// After sending this, the child must wait to receive a `Confirmation`. OverrideRetcode(i32), } @@ -41,7 +46,7 @@ pub enum TraceRequest { #[derive(serde::Serialize, serde::Deserialize, Debug, Clone)] pub struct StartFfiInfo { /// A vector of page addresses. These should have been automatically obtained - /// with `IsolatedAlloc::pages` and prepared with `IsolatedAlloc::prepare_ffi`. + /// with `IsolatedAlloc::pages` and prepared with `IsolatedAlloc::start_ffi`. pub page_ptrs: Vec<usize>, /// The address of an allocation that can serve as a temporary stack. /// This should be a leaked `Box<[u8; CALLBACK_STACK_SIZE]>` cast to an int. @@ -54,27 +59,3 @@ pub struct StartFfiInfo { /// The sender for this channel should live on the parent process. #[derive(serde::Serialize, serde::Deserialize, Debug)] pub struct Confirmation; - -/// The final results of an FFI trace, containing every relevant event detected -/// by the tracer. Sent by the supervisor after receiving a `SIGUSR1` signal. -/// -/// The sender for this channel should live on the parent process. -#[derive(serde::Serialize, serde::Deserialize, Debug)] -pub struct MemEvents { - /// An ordered list of memory accesses that occurred. These should be assumed - /// to be overcautious; that is, if the size of an access is uncertain it is - /// pessimistically rounded up, and if the type (read/write/both) is uncertain - /// it is reported as whatever would be safest to assume; i.e. a read + maybe-write - /// becomes a read + write, etc. - pub acc_events: Vec<AccessEvent>, -} - -/// A single memory access, conservatively overestimated -/// in case of ambiguity. -#[derive(serde::Serialize, serde::Deserialize, Debug)] -pub enum AccessEvent { - /// A read may have occurred on no more than the specified address range. - Read(Range<usize>), - /// A write may have occurred on no more than the specified address range. - Write(Range<usize>), -} diff --git a/src/tools/miri/src/shims/native_lib/trace/mod.rs b/src/tools/miri/src/shims/native_lib/trace/mod.rs index 174b06b3ac5..c8abacfb5e2 100644 --- a/src/tools/miri/src/shims/native_lib/trace/mod.rs +++ b/src/tools/miri/src/shims/native_lib/trace/mod.rs @@ -5,4 +5,6 @@ mod parent; pub use self::child::{Supervisor, init_sv, register_retcode_sv}; /// The size of the temporary stack we use for callbacks that the server executes in the client. +/// This should be big enough that `mempr_on` and `mempr_off` can safely be jumped into with the +/// stack pointer pointing to a "stack" of this size without overflowing it. const CALLBACK_STACK_SIZE: usize = 1024; diff --git a/src/tools/miri/src/shims/native_lib/trace/parent.rs b/src/tools/miri/src/shims/native_lib/trace/parent.rs index dfb0b35da69..83f6c7a13fc 100644 --- a/src/tools/miri/src/shims/native_lib/trace/parent.rs +++ b/src/tools/miri/src/shims/native_lib/trace/parent.rs @@ -5,26 +5,17 @@ use nix::sys::{ptrace, signal, wait}; use nix::unistd; use super::CALLBACK_STACK_SIZE; -use super::messages::{AccessEvent, Confirmation, MemEvents, StartFfiInfo, TraceRequest}; +use super::messages::{Confirmation, StartFfiInfo, TraceRequest}; +use crate::shims::native_lib::{AccessEvent, AccessRange, MemEvents}; /// The flags to use when calling `waitid()`. -/// Since bitwise or on the nix version of these flags is implemented as a trait, -/// this cannot be const directly so we do it this way. const WAIT_FLAGS: wait::WaitPidFlag = - wait::WaitPidFlag::from_bits_truncate(libc::WUNTRACED | libc::WEXITED); - -/// Arch-specific maximum size a single access might perform. x86 value is set -/// assuming nothing bigger than AVX-512 is available. -#[cfg(any(target_arch = "x86", target_arch = "x86_64"))] -const ARCH_MAX_ACCESS_SIZE: usize = 64; -/// The largest arm64 simd instruction operates on 16 bytes. -#[cfg(any(target_arch = "arm", target_arch = "aarch64"))] -const ARCH_MAX_ACCESS_SIZE: usize = 16; + wait::WaitPidFlag::WUNTRACED.union(wait::WaitPidFlag::WEXITED); /// The default word size on a given platform, in bytes. -#[cfg(any(target_arch = "x86", target_arch = "arm"))] +#[cfg(target_arch = "x86")] const ARCH_WORD_SIZE: usize = 4; -#[cfg(any(target_arch = "x86_64", target_arch = "aarch64"))] +#[cfg(target_arch = "x86_64")] const ARCH_WORD_SIZE: usize = 8; /// The address of the page set to be edited, initialised to a sentinel null @@ -53,39 +44,25 @@ trait ArchIndependentRegs { // It's fine / desirable behaviour for values to wrap here, we care about just // preserving the bit pattern. #[cfg(target_arch = "x86_64")] -#[expect(clippy::as_conversions)] #[rustfmt::skip] impl ArchIndependentRegs for libc::user_regs_struct { #[inline] - fn ip(&self) -> usize { self.rip as _ } + fn ip(&self) -> usize { self.rip.try_into().unwrap() } #[inline] - fn set_ip(&mut self, ip: usize) { self.rip = ip as _ } + fn set_ip(&mut self, ip: usize) { self.rip = ip.try_into().unwrap() } #[inline] - fn set_sp(&mut self, sp: usize) { self.rsp = sp as _ } + fn set_sp(&mut self, sp: usize) { self.rsp = sp.try_into().unwrap() } } #[cfg(target_arch = "x86")] -#[expect(clippy::as_conversions)] #[rustfmt::skip] impl ArchIndependentRegs for libc::user_regs_struct { #[inline] - fn ip(&self) -> usize { self.eip as _ } + fn ip(&self) -> usize { self.eip.cast_unsigned().try_into().unwrap() } #[inline] - fn set_ip(&mut self, ip: usize) { self.eip = ip as _ } + fn set_ip(&mut self, ip: usize) { self.eip = ip.cast_signed().try_into().unwrap() } #[inline] - fn set_sp(&mut self, sp: usize) { self.esp = sp as _ } -} - -#[cfg(target_arch = "aarch64")] -#[expect(clippy::as_conversions)] -#[rustfmt::skip] -impl ArchIndependentRegs for libc::user_regs_struct { - #[inline] - fn ip(&self) -> usize { self.pc as _ } - #[inline] - fn set_ip(&mut self, ip: usize) { self.pc = ip as _ } - #[inline] - fn set_sp(&mut self, sp: usize) { self.sp = sp as _ } + fn set_sp(&mut self, sp: usize) { self.esp = sp.cast_signed().try_into().unwrap() } } /// A unified event representing something happening on the child process. Wraps @@ -109,11 +86,24 @@ pub enum ExecEvent { /// A listener for the FFI start info channel along with relevant state. pub struct ChildListener { /// The matching channel for the child's `Supervisor` struct. - pub message_rx: ipc::IpcReceiver<TraceRequest>, + message_rx: ipc::IpcReceiver<TraceRequest>, + /// ... + confirm_tx: ipc::IpcSender<Confirmation>, /// Whether an FFI call is currently ongoing. - pub attached: bool, + attached: bool, /// If `Some`, overrides the return code with the given value. - pub override_retcode: Option<i32>, + override_retcode: Option<i32>, + /// Last code obtained from a child exiting. + last_code: Option<i32>, +} + +impl ChildListener { + pub fn new( + message_rx: ipc::IpcReceiver<TraceRequest>, + confirm_tx: ipc::IpcSender<Confirmation>, + ) -> Self { + Self { message_rx, confirm_tx, attached: false, override_retcode: None, last_code: None } + } } impl Iterator for ChildListener { @@ -133,16 +123,10 @@ impl Iterator for ChildListener { Ok(stat) => match stat { // Child exited normally with a specific code set. - wait::WaitStatus::Exited(_, code) => { - let code = self.override_retcode.unwrap_or(code); - return Some(ExecEvent::Died(Some(code))); - } + wait::WaitStatus::Exited(_, code) => self.last_code = Some(code), // Child was killed by a signal, without giving a code. - wait::WaitStatus::Signaled(_, _, _) => - return Some(ExecEvent::Died(self.override_retcode)), - // Child entered a syscall. Since we're always technically - // tracing, only pass this along if we're actively - // monitoring the child. + wait::WaitStatus::Signaled(_, _, _) => self.last_code = None, + // Child entered or exited a syscall. wait::WaitStatus::PtraceSyscall(pid) => if self.attached { return Some(ExecEvent::Syscall(pid)); @@ -179,10 +163,8 @@ impl Iterator for ChildListener { }, _ => (), }, - // This case should only trigger if all children died and we - // somehow missed that, but it's best we not allow any room - // for deadlocks. - Err(_) => return Some(ExecEvent::Died(None)), + // This case should only trigger when all children died. + Err(_) => return Some(ExecEvent::Died(self.override_retcode.or(self.last_code))), } // Similarly, do a non-blocking poll of the IPC channel. @@ -196,7 +178,10 @@ impl Iterator for ChildListener { self.attached = true; return Some(ExecEvent::Start(info)); }, - TraceRequest::OverrideRetcode(code) => self.override_retcode = Some(code), + TraceRequest::OverrideRetcode(code) => { + self.override_retcode = Some(code); + self.confirm_tx.send(Confirmation).unwrap(); + } } } @@ -211,6 +196,12 @@ impl Iterator for ChildListener { #[derive(Debug)] pub struct ExecEnd(pub Option<i32>); +/// Whether to call `ptrace::cont()` immediately. Used exclusively by `wait_for_signal`. +enum InitialCont { + Yes, + No, +} + /// This is the main loop of the supervisor process. It runs in a separate /// process from the rest of Miri (but because we fork, addresses for anything /// created before the fork - like statics - are the same). @@ -239,12 +230,12 @@ pub fn sv_loop( let mut curr_pid = init_pid; // There's an initial sigstop we need to deal with. - wait_for_signal(Some(curr_pid), signal::SIGSTOP, false)?; + wait_for_signal(Some(curr_pid), signal::SIGSTOP, InitialCont::No)?; ptrace::cont(curr_pid, None).unwrap(); for evt in listener { match evt { - // start_ffi was called by the child, so prep memory. + // Child started ffi, so prep memory. ExecEvent::Start(ch_info) => { // All the pages that the child process is "allowed to" access. ch_pages = ch_info.page_ptrs; @@ -252,17 +243,17 @@ pub fn sv_loop( ch_stack = Some(ch_info.stack_ptr); // We received the signal and are no longer in the main listener loop, - // so we can let the child move on to the end of start_ffi where it will + // so we can let the child move on to the end of the ffi prep where it will // raise a SIGSTOP. We need it to be signal-stopped *and waited for* in // order to do most ptrace operations! confirm_tx.send(Confirmation).unwrap(); // We can't trust simply calling `Pid::this()` in the child process to give the right // PID for us, so we get it this way. - curr_pid = wait_for_signal(None, signal::SIGSTOP, false).unwrap(); + curr_pid = wait_for_signal(None, signal::SIGSTOP, InitialCont::No).unwrap(); ptrace::syscall(curr_pid, None).unwrap(); } - // end_ffi was called by the child. + // Child wants to end tracing. ExecEvent::End => { // Hand over the access info we traced. event_tx.send(MemEvents { acc_events }).unwrap(); @@ -322,10 +313,6 @@ fn get_disasm() -> capstone::Capstone { {cs_pre.x86().mode(arch::x86::ArchMode::Mode64)} #[cfg(target_arch = "x86")] {cs_pre.x86().mode(arch::x86::ArchMode::Mode32)} - #[cfg(target_arch = "aarch64")] - {cs_pre.arm64().mode(arch::arm64::ArchMode::Arm)} - #[cfg(target_arch = "arm")] - {cs_pre.arm().mode(arch::arm::ArchMode::Arm)} } .detail(true) .build() @@ -339,9 +326,9 @@ fn get_disasm() -> capstone::Capstone { fn wait_for_signal( pid: Option<unistd::Pid>, wait_signal: signal::Signal, - init_cont: bool, + init_cont: InitialCont, ) -> Result<unistd::Pid, ExecEnd> { - if init_cont { + if matches!(init_cont, InitialCont::Yes) { ptrace::cont(pid.unwrap(), None).unwrap(); } // Repeatedly call `waitid` until we get the signal we want, or the process dies. @@ -374,6 +361,84 @@ fn wait_for_signal( } } +/// Add the memory events from `op` being executed while there is a memory access at `addr` to +/// `acc_events`. Return whether this was a memory operand. +fn capstone_find_events( + addr: usize, + op: &capstone::arch::ArchOperand, + acc_events: &mut Vec<AccessEvent>, +) -> bool { + use capstone::prelude::*; + match op { + #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] + arch::ArchOperand::X86Operand(x86_operand) => { + match x86_operand.op_type { + // We only care about memory accesses + arch::x86::X86OperandType::Mem(_) => { + let push = AccessRange { addr, size: x86_operand.size.into() }; + // It's called a "RegAccessType" but it also applies to memory + let acc_ty = x86_operand.access.unwrap(); + // The same instruction might do both reads and writes, so potentially add both. + // We do not know the order in which they happened, but writing and then reading + // makes little sense so we put the read first. That is also the more + // conservative choice. + if acc_ty.is_readable() { + acc_events.push(AccessEvent::Read(push.clone())); + } + if acc_ty.is_writable() { + // FIXME: This could be made certain; either determine all cases where + // only reads happen, or have an intermediate mempr_* function to first + // map the page(s) as readonly and check if a segfault occurred. + + // Per https://docs.rs/iced-x86/latest/iced_x86/enum.OpAccess.html, + // we know that the possible access types are Read, CondRead, Write, + // CondWrite, ReadWrite, and ReadCondWrite. Since we got a segfault + // we know some kind of access happened so Cond{Read, Write}s are + // certain reads and writes; the only uncertainty is with an RW op + // as it might be a ReadCondWrite with the write condition unmet. + acc_events.push(AccessEvent::Write(push, !acc_ty.is_readable())); + } + + return true; + } + _ => (), + } + } + // FIXME: arm64 + _ => unimplemented!(), + } + + false +} + +/// Extract the events from the given instruction. +fn capstone_disassemble( + instr: &[u8], + addr: usize, + cs: &capstone::Capstone, + acc_events: &mut Vec<AccessEvent>, +) -> capstone::CsResult<()> { + // The arch_detail is what we care about, but it relies on these temporaries + // that we can't drop. 0x1000 is the default base address for Captsone, and + // we're expecting 1 instruction. + let insns = cs.disasm_count(instr, 0x1000, 1)?; + let ins_detail = cs.insn_detail(&insns[0])?; + let arch_detail = ins_detail.arch_detail(); + + let mut found_mem_op = false; + + for op in arch_detail.operands() { + if capstone_find_events(addr, &op, acc_events) { + if found_mem_op { + panic!("more than one memory operand found; we don't know which one accessed what"); + } + found_mem_op = true; + } + } + + Ok(()) +} + /// Grabs the access that caused a segfault and logs it down if it's to our memory, /// or kills the child and returns the appropriate error otherwise. fn handle_segfault( @@ -384,116 +449,10 @@ fn handle_segfault( cs: &capstone::Capstone, acc_events: &mut Vec<AccessEvent>, ) -> Result<(), ExecEnd> { - /// This is just here to not pollute the main namespace with `capstone::prelude::*`. - #[inline] - fn capstone_disassemble( - instr: &[u8], - addr: usize, - cs: &capstone::Capstone, - acc_events: &mut Vec<AccessEvent>, - ) -> capstone::CsResult<()> { - use capstone::prelude::*; - - // The arch_detail is what we care about, but it relies on these temporaries - // that we can't drop. 0x1000 is the default base address for Captsone, and - // we're expecting 1 instruction. - let insns = cs.disasm_count(instr, 0x1000, 1)?; - let ins_detail = cs.insn_detail(&insns[0])?; - let arch_detail = ins_detail.arch_detail(); - - for op in arch_detail.operands() { - match op { - #[cfg(any(target_arch = "x86", target_arch = "x86_64"))] - arch::ArchOperand::X86Operand(x86_operand) => { - match x86_operand.op_type { - // We only care about memory accesses - arch::x86::X86OperandType::Mem(_) => { - let push = addr..addr.strict_add(usize::from(x86_operand.size)); - // It's called a "RegAccessType" but it also applies to memory - let acc_ty = x86_operand.access.unwrap(); - if acc_ty.is_readable() { - acc_events.push(AccessEvent::Read(push.clone())); - } - if acc_ty.is_writable() { - acc_events.push(AccessEvent::Write(push)); - } - } - _ => (), - } - } - #[cfg(target_arch = "aarch64")] - arch::ArchOperand::Arm64Operand(arm64_operand) => { - // Annoyingly, we don't always get the size here, so just be pessimistic for now. - match arm64_operand.op_type { - arch::arm64::Arm64OperandType::Mem(_) => { - // B = 1 byte, H = 2 bytes, S = 4 bytes, D = 8 bytes, Q = 16 bytes. - let size = match arm64_operand.vas { - // Not an fp/simd instruction. - arch::arm64::Arm64Vas::ARM64_VAS_INVALID => ARCH_WORD_SIZE, - // 1 byte. - arch::arm64::Arm64Vas::ARM64_VAS_1B => 1, - // 2 bytes. - arch::arm64::Arm64Vas::ARM64_VAS_1H => 2, - // 4 bytes. - arch::arm64::Arm64Vas::ARM64_VAS_4B - | arch::arm64::Arm64Vas::ARM64_VAS_2H - | arch::arm64::Arm64Vas::ARM64_VAS_1S => 4, - // 8 bytes. - arch::arm64::Arm64Vas::ARM64_VAS_8B - | arch::arm64::Arm64Vas::ARM64_VAS_4H - | arch::arm64::Arm64Vas::ARM64_VAS_2S - | arch::arm64::Arm64Vas::ARM64_VAS_1D => 8, - // 16 bytes. - arch::arm64::Arm64Vas::ARM64_VAS_16B - | arch::arm64::Arm64Vas::ARM64_VAS_8H - | arch::arm64::Arm64Vas::ARM64_VAS_4S - | arch::arm64::Arm64Vas::ARM64_VAS_2D - | arch::arm64::Arm64Vas::ARM64_VAS_1Q => 16, - }; - let push = addr..addr.strict_add(size); - // FIXME: This now has access type info in the latest - // git version of capstone because this pissed me off - // and I added it. Change this when it updates. - acc_events.push(AccessEvent::Read(push.clone())); - acc_events.push(AccessEvent::Write(push)); - } - _ => (), - } - } - #[cfg(target_arch = "arm")] - arch::ArchOperand::ArmOperand(arm_operand) => - match arm_operand.op_type { - arch::arm::ArmOperandType::Mem(_) => { - // We don't get info on the size of the access, but - // we're at least told if it's a vector instruction. - let size = if arm_operand.vector_index.is_some() { - ARCH_MAX_ACCESS_SIZE - } else { - ARCH_WORD_SIZE - }; - let push = addr..addr.strict_add(size); - let acc_ty = arm_operand.access.unwrap(); - if acc_ty.is_readable() { - acc_events.push(AccessEvent::Read(push.clone())); - } - if acc_ty.is_writable() { - acc_events.push(AccessEvent::Write(push)); - } - } - _ => (), - }, - _ => unimplemented!(), - } - } - - Ok(()) - } - // Get information on what caused the segfault. This contains the address // that triggered it. let siginfo = ptrace::getsiginfo(pid).unwrap(); - // All x86, ARM, etc. instructions only have at most one memory operand - // (thankfully!) + // All x86 instructions only have at most one memory operand (thankfully!) // SAFETY: si_addr is safe to call. let addr = unsafe { siginfo.si_addr().addr() }; let page_addr = addr.strict_sub(addr.strict_rem(page_size)); @@ -515,7 +474,7 @@ fn handle_segfault( // global atomic variables. This is what we use the temporary callback stack for. // - Step 1 instruction // - Parse executed code to estimate size & type of access - // - Reprotect the memory by executing `mempr_on` in the child. + // - Reprotect the memory by executing `mempr_on` in the child, using the callback stack again. // - Continue // Ensure the stack is properly zeroed out! @@ -540,7 +499,7 @@ fn handle_segfault( ptrace::write( pid, (&raw const PAGE_ADDR).cast_mut().cast(), - libc::c_long::try_from(page_addr).unwrap(), + libc::c_long::try_from(page_addr.cast_signed()).unwrap(), ) .unwrap(); @@ -552,7 +511,7 @@ fn handle_segfault( ptrace::setregs(pid, new_regs).unwrap(); // Our mempr_* functions end with a raise(SIGSTOP). - wait_for_signal(Some(pid), signal::SIGSTOP, true)?; + wait_for_signal(Some(pid), signal::SIGSTOP, InitialCont::Yes)?; // Step 1 instruction. ptrace::setregs(pid, regs_bak).unwrap(); @@ -573,6 +532,12 @@ fn handle_segfault( let regs_bak = ptrace::getregs(pid).unwrap(); new_regs = regs_bak; let ip_poststep = regs_bak.ip(); + + // Ensure that we've actually gone forwards. + assert!(ip_poststep > ip_prestep); + // But not by too much. 64 bytes should be "big enough" on ~any architecture. + assert!(ip_prestep.strict_add(64) > ip_poststep); + // We need to do reads/writes in word-sized chunks. let diff = (ip_poststep.strict_sub(ip_prestep)).div_ceil(ARCH_WORD_SIZE); let instr = (ip_prestep..ip_prestep.strict_add(diff)).fold(vec![], |mut ret, ip| { @@ -587,20 +552,14 @@ fn handle_segfault( }); // Now figure out the size + type of access and log it down. - // This will mark down e.g. the same area being read multiple times, - // since it's more efficient to compress the accesses at the end. - if capstone_disassemble(&instr, addr, cs, acc_events).is_err() { - // Read goes first because we need to be pessimistic. - acc_events.push(AccessEvent::Read(addr..addr.strict_add(ARCH_MAX_ACCESS_SIZE))); - acc_events.push(AccessEvent::Write(addr..addr.strict_add(ARCH_MAX_ACCESS_SIZE))); - } + capstone_disassemble(&instr, addr, cs, acc_events).expect("Failed to disassemble instruction"); // Reprotect everything and continue. #[expect(clippy::as_conversions)] new_regs.set_ip(mempr_on as usize); new_regs.set_sp(stack_ptr); ptrace::setregs(pid, new_regs).unwrap(); - wait_for_signal(Some(pid), signal::SIGSTOP, true)?; + wait_for_signal(Some(pid), signal::SIGSTOP, InitialCont::Yes)?; ptrace::setregs(pid, regs_bak).unwrap(); ptrace::syscall(pid, None).unwrap(); diff --git a/src/tools/miri/src/shims/native_lib/trace/stub.rs b/src/tools/miri/src/shims/native_lib/trace/stub.rs new file mode 100644 index 00000000000..22787a6f6fa --- /dev/null +++ b/src/tools/miri/src/shims/native_lib/trace/stub.rs @@ -0,0 +1,34 @@ +use rustc_const_eval::interpret::InterpResult; + +static SUPERVISOR: std::sync::Mutex<()> = std::sync::Mutex::new(()); + +pub struct Supervisor; + +#[derive(Debug)] +pub struct SvInitError; + +impl Supervisor { + #[inline(always)] + pub fn is_enabled() -> bool { + false + } + + pub fn do_ffi<'tcx, T>( + _: T, + f: impl FnOnce() -> InterpResult<'tcx, crate::ImmTy<'tcx>>, + ) -> InterpResult<'tcx, (crate::ImmTy<'tcx>, Option<super::MemEvents>)> { + // We acquire the lock to ensure that no two FFI calls run concurrently. + let _g = SUPERVISOR.lock().unwrap(); + f().map(|v| (v, None)) + } +} + +#[inline(always)] +#[allow(dead_code, clippy::missing_safety_doc)] +pub unsafe fn init_sv() -> Result<!, SvInitError> { + Err(SvInitError) +} + +#[inline(always)] +#[allow(dead_code)] +pub fn register_retcode_sv<T>(_: T) {} diff --git a/src/tools/miri/src/shims/panic.rs b/src/tools/miri/src/shims/panic.rs index 9490761d0c9..85564e47685 100644 --- a/src/tools/miri/src/shims/panic.rs +++ b/src/tools/miri/src/shims/panic.rs @@ -1,162 +1,12 @@ -//! Panic runtime for Miri. -//! -//! The core pieces of the runtime are: -//! - An implementation of `__rust_maybe_catch_panic` that pushes the invoked stack frame with -//! some extra metadata derived from the panic-catching arguments of `__rust_maybe_catch_panic`. -//! - A hack in `libpanic_unwind` that calls the `miri_start_unwind` intrinsic instead of the -//! target-native panic runtime. (This lives in the rustc repo.) -//! - An implementation of `miri_start_unwind` that stores its argument (the panic payload), and then -//! immediately returns, but on the *unwind* edge (not the normal return edge), thus initiating unwinding. -//! - A hook executed each time a frame is popped, such that if the frame pushed by `__rust_maybe_catch_panic` -//! gets popped *during unwinding*, we take the panic payload and store it according to the extra -//! metadata we remembered when pushing said frame. +//! Helper functions for causing panics. use rustc_abi::ExternAbi; use rustc_middle::{mir, ty}; -use rustc_target::spec::PanicStrategy; -use self::helpers::check_intrinsic_arg_count; use crate::*; -/// Holds all of the relevant data for when unwinding hits a `try` frame. -#[derive(Debug)] -pub struct CatchUnwindData<'tcx> { - /// The `catch_fn` callback to call in case of a panic. - catch_fn: Pointer, - /// The `data` argument for that callback. - data: ImmTy<'tcx>, - /// The return place from the original call to `try`. - dest: MPlaceTy<'tcx>, - /// The return block from the original call to `try`. - ret: Option<mir::BasicBlock>, -} - -impl VisitProvenance for CatchUnwindData<'_> { - fn visit_provenance(&self, visit: &mut VisitWith<'_>) { - let CatchUnwindData { catch_fn, data, dest, ret: _ } = self; - catch_fn.visit_provenance(visit); - data.visit_provenance(visit); - dest.visit_provenance(visit); - } -} - impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {} pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { - /// Handles the special `miri_start_unwind` intrinsic, which is called - /// by libpanic_unwind to delegate the actual unwinding process to Miri. - fn handle_miri_start_unwind(&mut self, payload: &OpTy<'tcx>) -> InterpResult<'tcx> { - let this = self.eval_context_mut(); - - trace!("miri_start_unwind: {:?}", this.frame().instance()); - - let payload = this.read_immediate(payload)?; - let thread = this.active_thread_mut(); - thread.panic_payloads.push(payload); - - interp_ok(()) - } - - /// Handles the `catch_unwind` intrinsic. - fn handle_catch_unwind( - &mut self, - args: &[OpTy<'tcx>], - dest: &MPlaceTy<'tcx>, - ret: Option<mir::BasicBlock>, - ) -> InterpResult<'tcx> { - let this = self.eval_context_mut(); - - // Signature: - // fn catch_unwind(try_fn: fn(*mut u8), data: *mut u8, catch_fn: fn(*mut u8, *mut u8)) -> i32 - // Calls `try_fn` with `data` as argument. If that executes normally, returns 0. - // If that unwinds, calls `catch_fn` with the first argument being `data` and - // then second argument being a target-dependent `payload` (i.e. it is up to us to define - // what that is), and returns 1. - // The `payload` is passed (by libstd) to `__rust_panic_cleanup`, which is then expected to - // return a `Box<dyn Any + Send + 'static>`. - // In Miri, `miri_start_unwind` is passed exactly that type, so we make the `payload` simply - // a pointer to `Box<dyn Any + Send + 'static>`. - - // Get all the arguments. - let [try_fn, data, catch_fn] = check_intrinsic_arg_count(args)?; - let try_fn = this.read_pointer(try_fn)?; - let data = this.read_immediate(data)?; - let catch_fn = this.read_pointer(catch_fn)?; - - // Now we make a function call, and pass `data` as first and only argument. - let f_instance = this.get_ptr_fn(try_fn)?.as_instance()?; - trace!("try_fn: {:?}", f_instance); - #[allow(clippy::cloned_ref_to_slice_refs)] // the code is clearer as-is - this.call_function( - f_instance, - ExternAbi::Rust, - &[data.clone()], - None, - // Directly return to caller. - ReturnContinuation::Goto { ret, unwind: mir::UnwindAction::Continue }, - )?; - - // We ourselves will return `0`, eventually (will be overwritten if we catch a panic). - this.write_null(dest)?; - - // In unwind mode, we tag this frame with the extra data needed to catch unwinding. - // This lets `handle_stack_pop` (below) know that we should stop unwinding - // when we pop this frame. - if this.tcx.sess.panic_strategy() == PanicStrategy::Unwind { - this.frame_mut().extra.catch_unwind = - Some(CatchUnwindData { catch_fn, data, dest: dest.clone(), ret }); - } - - interp_ok(()) - } - - fn handle_stack_pop_unwind( - &mut self, - mut extra: FrameExtra<'tcx>, - unwinding: bool, - ) -> InterpResult<'tcx, ReturnAction> { - let this = self.eval_context_mut(); - trace!("handle_stack_pop_unwind(extra = {:?}, unwinding = {})", extra, unwinding); - - // We only care about `catch_panic` if we're unwinding - if we're doing a normal - // return, then we don't need to do anything special. - if let (true, Some(catch_unwind)) = (unwinding, extra.catch_unwind.take()) { - // We've just popped a frame that was pushed by `catch_unwind`, - // and we are unwinding, so we should catch that. - trace!( - "unwinding: found catch_panic frame during unwinding: {:?}", - this.frame().instance() - ); - - // We set the return value of `catch_unwind` to 1, since there was a panic. - this.write_scalar(Scalar::from_i32(1), &catch_unwind.dest)?; - - // The Thread's `panic_payload` holds what was passed to `miri_start_unwind`. - // This is exactly the second argument we need to pass to `catch_fn`. - let payload = this.active_thread_mut().panic_payloads.pop().unwrap(); - - // Push the `catch_fn` stackframe. - let f_instance = this.get_ptr_fn(catch_unwind.catch_fn)?.as_instance()?; - trace!("catch_fn: {:?}", f_instance); - this.call_function( - f_instance, - ExternAbi::Rust, - &[catch_unwind.data, payload], - None, - // Directly return to caller of `catch_unwind`. - ReturnContinuation::Goto { - ret: catch_unwind.ret, - // `catch_fn` must not unwind. - unwind: mir::UnwindAction::Unreachable, - }, - )?; - - // We pushed a new stack frame, the engine should not do any jumping now! - interp_ok(ReturnAction::NoJump) - } else { - interp_ok(ReturnAction::Normal) - } - } - /// Start a panic in the interpreter with the given message as payload. fn start_panic(&mut self, msg: &str, unwind: mir::UnwindAction) -> InterpResult<'tcx> { let this = self.eval_context_mut(); diff --git a/src/tools/miri/src/shims/tls.rs b/src/tools/miri/src/shims/tls.rs index 7182637437a..1200029692d 100644 --- a/src/tools/miri/src/shims/tls.rs +++ b/src/tools/miri/src/shims/tls.rs @@ -302,7 +302,7 @@ trait EvalContextPrivExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Windows has a special magic linker section that is run on certain events. // We don't support most of that, but just enough to make thread-local dtors in `std` work. - interp_ok(this.lookup_link_section(".CRT$XLB")?) + interp_ok(this.lookup_link_section(|section| section == ".CRT$XLB")?) } fn schedule_windows_tls_dtor(&mut self, dtor: ImmTy<'tcx>) -> InterpResult<'tcx> { diff --git a/src/tools/miri/src/shims/unix/fs.rs b/src/tools/miri/src/shims/unix/fs.rs index 0f7d453b296..0f2878ad26c 100644 --- a/src/tools/miri/src/shims/unix/fs.rs +++ b/src/tools/miri/src/shims/unix/fs.rs @@ -132,12 +132,12 @@ trait EvalContextExtPrivate<'tcx>: crate::MiriInterpCxExt<'tcx> { let buf = this.deref_pointer_as(buf_op, this.libc_ty_layout("stat"))?; this.write_int_fields_named( &[ - ("st_dev", 0), + ("st_dev", metadata.dev.into()), ("st_mode", mode.try_into().unwrap()), ("st_nlink", 0), ("st_ino", 0), - ("st_uid", 0), - ("st_gid", 0), + ("st_uid", metadata.uid.into()), + ("st_gid", metadata.gid.into()), ("st_rdev", 0), ("st_atime", access_sec.into()), ("st_mtime", modified_sec.into()), @@ -1544,6 +1544,9 @@ struct FileMetadata { created: Option<(u64, u32)>, accessed: Option<(u64, u32)>, modified: Option<(u64, u32)>, + dev: u64, + uid: u32, + gid: u32, } impl FileMetadata { @@ -1601,6 +1604,21 @@ impl FileMetadata { let modified = extract_sec_and_nsec(metadata.modified())?; // FIXME: Provide more fields using platform specific methods. - interp_ok(Ok(FileMetadata { mode, size, created, accessed, modified })) + + cfg_select! { + unix => { + use std::os::unix::fs::MetadataExt; + let dev = metadata.dev(); + let uid = metadata.uid(); + let gid = metadata.gid(); + } + _ => { + let dev = 0; + let uid = 0; + let gid = 0; + } + } + + interp_ok(Ok(FileMetadata { mode, size, created, accessed, modified, dev, uid, gid })) } } diff --git a/src/tools/miri/src/shims/unwind.rs b/src/tools/miri/src/shims/unwind.rs new file mode 100644 index 00000000000..ba0c50b54b4 --- /dev/null +++ b/src/tools/miri/src/shims/unwind.rs @@ -0,0 +1,160 @@ +//! Unwinding runtime for Miri. +//! +//! The core pieces of the runtime are: +//! - An implementation of `catch_unwind` that pushes the invoked stack frame with +//! some extra metadata derived from the panic-catching arguments of `catch_unwind`. +//! - A hack in `libpanic_unwind` that calls the `miri_start_unwind` intrinsic instead of the +//! target-native panic runtime. (This lives in the rustc repo.) +//! - An implementation of `miri_start_unwind` that stores its argument (the panic payload), and +//! then immediately returns, but on the *unwind* edge (not the normal return edge), thus +//! initiating unwinding. +//! - A hook executed each time a frame is popped, such that if the frame pushed by `catch_unwind` +//! gets popped *during unwinding*, we take the panic payload and store it according to the extra +//! metadata we remembered when pushing said frame. + +use rustc_abi::ExternAbi; +use rustc_middle::mir; +use rustc_target::spec::PanicStrategy; + +use self::helpers::check_intrinsic_arg_count; +use crate::*; + +/// Holds all of the relevant data for when unwinding hits a `try` frame. +#[derive(Debug)] +pub struct CatchUnwindData<'tcx> { + /// The `catch_fn` callback to call in case of a panic. + catch_fn: Pointer, + /// The `data` argument for that callback. + data: ImmTy<'tcx>, + /// The return place from the original call to `try`. + dest: MPlaceTy<'tcx>, + /// The return block from the original call to `try`. + ret: Option<mir::BasicBlock>, +} + +impl VisitProvenance for CatchUnwindData<'_> { + fn visit_provenance(&self, visit: &mut VisitWith<'_>) { + let CatchUnwindData { catch_fn, data, dest, ret: _ } = self; + catch_fn.visit_provenance(visit); + data.visit_provenance(visit); + dest.visit_provenance(visit); + } +} + +impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {} +pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { + /// Handles the special `miri_start_unwind` intrinsic, which is called + /// by libpanic_unwind to delegate the actual unwinding process to Miri. + fn handle_miri_start_unwind(&mut self, payload: &OpTy<'tcx>) -> InterpResult<'tcx> { + let this = self.eval_context_mut(); + + trace!("miri_start_unwind: {:?}", this.frame().instance()); + + let payload = this.read_immediate(payload)?; + let thread = this.active_thread_mut(); + thread.unwind_payloads.push(payload); + + interp_ok(()) + } + + /// Handles the `catch_unwind` intrinsic. + fn handle_catch_unwind( + &mut self, + args: &[OpTy<'tcx>], + dest: &MPlaceTy<'tcx>, + ret: Option<mir::BasicBlock>, + ) -> InterpResult<'tcx> { + let this = self.eval_context_mut(); + + // Signature: + // fn catch_unwind(try_fn: fn(*mut u8), data: *mut u8, catch_fn: fn(*mut u8, *mut u8)) -> i32 + // Calls `try_fn` with `data` as argument. If that executes normally, returns 0. + // If that unwinds, calls `catch_fn` with the first argument being `data` and + // then second argument being a target-dependent `payload` (i.e. it is up to us to define + // what that is), and returns 1. + // The `payload` is passed (by libstd) to `__rust_panic_cleanup`, which is then expected to + // return a `Box<dyn Any + Send + 'static>`. + // In Miri, `miri_start_unwind` is passed exactly that type, so we make the `payload` simply + // a pointer to `Box<dyn Any + Send + 'static>`. + + // Get all the arguments. + let [try_fn, data, catch_fn] = check_intrinsic_arg_count(args)?; + let try_fn = this.read_pointer(try_fn)?; + let data = this.read_immediate(data)?; + let catch_fn = this.read_pointer(catch_fn)?; + + // Now we make a function call, and pass `data` as first and only argument. + let f_instance = this.get_ptr_fn(try_fn)?.as_instance()?; + trace!("try_fn: {:?}", f_instance); + #[allow(clippy::cloned_ref_to_slice_refs)] // the code is clearer as-is + this.call_function( + f_instance, + ExternAbi::Rust, + &[data.clone()], + None, + // Directly return to caller. + ReturnContinuation::Goto { ret, unwind: mir::UnwindAction::Continue }, + )?; + + // We ourselves will return `0`, eventually (will be overwritten if we catch a panic). + this.write_null(dest)?; + + // In unwind mode, we tag this frame with the extra data needed to catch unwinding. + // This lets `handle_stack_pop` (below) know that we should stop unwinding + // when we pop this frame. + if this.tcx.sess.panic_strategy() == PanicStrategy::Unwind { + this.frame_mut().extra.catch_unwind = + Some(CatchUnwindData { catch_fn, data, dest: dest.clone(), ret }); + } + + interp_ok(()) + } + + fn handle_stack_pop_unwind( + &mut self, + mut extra: FrameExtra<'tcx>, + unwinding: bool, + ) -> InterpResult<'tcx, ReturnAction> { + let this = self.eval_context_mut(); + trace!("handle_stack_pop_unwind(extra = {:?}, unwinding = {})", extra, unwinding); + + // We only care about `catch_panic` if we're unwinding - if we're doing a normal + // return, then we don't need to do anything special. + if let (true, Some(catch_unwind)) = (unwinding, extra.catch_unwind.take()) { + // We've just popped a frame that was pushed by `catch_unwind`, + // and we are unwinding, so we should catch that. + trace!( + "unwinding: found catch_panic frame during unwinding: {:?}", + this.frame().instance() + ); + + // We set the return value of `catch_unwind` to 1, since there was a panic. + this.write_scalar(Scalar::from_i32(1), &catch_unwind.dest)?; + + // The Thread's `panic_payload` holds what was passed to `miri_start_unwind`. + // This is exactly the second argument we need to pass to `catch_fn`. + let payload = this.active_thread_mut().unwind_payloads.pop().unwrap(); + + // Push the `catch_fn` stackframe. + let f_instance = this.get_ptr_fn(catch_unwind.catch_fn)?.as_instance()?; + trace!("catch_fn: {:?}", f_instance); + this.call_function( + f_instance, + ExternAbi::Rust, + &[catch_unwind.data, payload], + None, + // Directly return to caller of `catch_unwind`. + ReturnContinuation::Goto { + ret: catch_unwind.ret, + // `catch_fn` must not unwind. + unwind: mir::UnwindAction::Unreachable, + }, + )?; + + // We pushed a new stack frame, the engine should not do any jumping now! + interp_ok(ReturnAction::NoJump) + } else { + interp_ok(ReturnAction::Normal) + } + } +} diff --git a/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_read_neg_offset.rs b/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_read_neg_offset.rs new file mode 100644 index 00000000000..107a3db91d8 --- /dev/null +++ b/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_read_neg_offset.rs @@ -0,0 +1,6 @@ +fn main() { + let v: Vec<u16> = vec![1, 2]; + // This read is also misaligned. We make sure that the OOB message has priority. + let x = unsafe { *v.as_ptr().wrapping_byte_sub(5) }; //~ ERROR: before the beginning of the allocation + panic!("this should never print: {}", x); +} diff --git a/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_read_neg_offset.stderr b/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_read_neg_offset.stderr new file mode 100644 index 00000000000..5c37caa1ebf --- /dev/null +++ b/src/tools/miri/tests/fail/dangling_pointers/out_of_bounds_read_neg_offset.stderr @@ -0,0 +1,21 @@ +error: Undefined Behavior: memory access failed: attempting to access 2 bytes, but got ALLOC-0x5 which points to before the beginning of the allocation + --> tests/fail/dangling_pointers/out_of_bounds_read_neg_offset.rs:LL:CC + | +LL | let x = unsafe { *v.as_ptr().wrapping_byte_sub(5) }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Undefined Behavior occurred here + | + = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior + = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information +help: ALLOC was allocated here: + --> tests/fail/dangling_pointers/out_of_bounds_read_neg_offset.rs:LL:CC + | +LL | let v: Vec<u16> = vec![1, 2]; + | ^^^^^^^^^^ + = note: BACKTRACE (of the first span): + = note: inside `main` at tests/fail/dangling_pointers/out_of_bounds_read_neg_offset.rs:LL:CC + = note: this error originates in the macro `vec` (in Nightly builds, run with -Z macro-backtrace for more info) + +note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace + +error: aborting due to 1 previous error + diff --git a/src/tools/miri/tests/fail/intrinsics/ptr_offset_out_of_bounds_neg2.rs b/src/tools/miri/tests/fail/intrinsics/ptr_offset_out_of_bounds_neg2.rs new file mode 100644 index 00000000000..0085e2af367 --- /dev/null +++ b/src/tools/miri/tests/fail/intrinsics/ptr_offset_out_of_bounds_neg2.rs @@ -0,0 +1,6 @@ +fn main() { + let v = [0i8; 4]; + let x = &v as *const i8; + let x = unsafe { x.wrapping_offset(-1).offset(-1) }; //~ERROR: before the beginning of the allocation + panic!("this should never print: {:?}", x); +} diff --git a/src/tools/miri/tests/fail/intrinsics/ptr_offset_out_of_bounds_neg2.stderr b/src/tools/miri/tests/fail/intrinsics/ptr_offset_out_of_bounds_neg2.stderr new file mode 100644 index 00000000000..495aaf8d40e --- /dev/null +++ b/src/tools/miri/tests/fail/intrinsics/ptr_offset_out_of_bounds_neg2.stderr @@ -0,0 +1,20 @@ +error: Undefined Behavior: in-bounds pointer arithmetic failed: attempting to offset pointer by -1 bytes, but got ALLOC-0x1 which points to before the beginning of the allocation + --> tests/fail/intrinsics/ptr_offset_out_of_bounds_neg2.rs:LL:CC + | +LL | let x = unsafe { x.wrapping_offset(-1).offset(-1) }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Undefined Behavior occurred here + | + = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior + = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information +help: ALLOC was allocated here: + --> tests/fail/intrinsics/ptr_offset_out_of_bounds_neg2.rs:LL:CC + | +LL | let v = [0i8; 4]; + | ^ + = note: BACKTRACE (of the first span): + = note: inside `main` at tests/fail/intrinsics/ptr_offset_out_of_bounds_neg2.rs:LL:CC + +note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace + +error: aborting due to 1 previous error + diff --git a/src/tools/miri/tests/native-lib/fail/tracing/partial_init.rs b/src/tools/miri/tests/native-lib/fail/tracing/partial_init.rs new file mode 100644 index 00000000000..e267f82e215 --- /dev/null +++ b/src/tools/miri/tests/native-lib/fail/tracing/partial_init.rs @@ -0,0 +1,25 @@ +//@only-target: x86_64-unknown-linux-gnu i686-unknown-linux-gnu +//@compile-flags: -Zmiri-native-lib-enable-tracing + +extern "C" { + fn init_n(n: i32, ptr: *mut u8); +} + +fn main() { + partial_init(); +} + +// Initialise the first 2 elements of the slice from native code, and check +// that the 3rd is correctly deemed uninit. +fn partial_init() { + let mut slice = std::mem::MaybeUninit::<[u8; 3]>::uninit(); + let slice_ptr = slice.as_mut_ptr().cast::<u8>(); + unsafe { + // Initialize the first two elements. + init_n(2, slice_ptr); + assert!(*slice_ptr == 0); + assert!(*slice_ptr.offset(1) == 0); + // Reading the third is UB! + let _val = *slice_ptr.offset(2); //~ ERROR: Undefined Behavior: using uninitialized data + } +} diff --git a/src/tools/miri/tests/native-lib/fail/tracing/partial_init.stderr b/src/tools/miri/tests/native-lib/fail/tracing/partial_init.stderr new file mode 100644 index 00000000000..84fd913b5e5 --- /dev/null +++ b/src/tools/miri/tests/native-lib/fail/tracing/partial_init.stderr @@ -0,0 +1,39 @@ +warning: sharing memory with a native function called via FFI + --> tests/native-lib/fail/tracing/partial_init.rs:LL:CC + | +LL | init_n(2, slice_ptr); + | ^^^^^^^^^^^^^^^^^^^^ sharing memory with a native function + | + = help: when memory is shared with a native function call, Miri can only track initialisation and provenance on a best-effort basis + = help: in particular, Miri assumes that the native call initializes all memory it has written to + = help: Miri also assumes that any part of this memory may be a pointer that is permitted to point to arbitrary exposed memory + = help: what this means is that Miri will easily miss Undefined Behavior related to incorrect usage of this shared memory, so you should not take a clean Miri run as a signal that your FFI code is UB-free + = help: tracing memory accesses in native code is not yet fully implemented, so there can be further imprecisions beyond what is documented here + = note: BACKTRACE: + = note: inside `partial_init` at tests/native-lib/fail/tracing/partial_init.rs:LL:CC +note: inside `main` + --> tests/native-lib/fail/tracing/partial_init.rs:LL:CC + | +LL | partial_init(); + | ^^^^^^^^^^^^^^ + +error: Undefined Behavior: using uninitialized data, but this operation requires initialized memory + --> tests/native-lib/fail/tracing/partial_init.rs:LL:CC + | +LL | let _val = *slice_ptr.offset(2); + | ^^^^^^^^^^^^^^^^^^^^ Undefined Behavior occurred here + | + = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior + = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information + = note: BACKTRACE: + = note: inside `partial_init` at tests/native-lib/fail/tracing/partial_init.rs:LL:CC +note: inside `main` + --> tests/native-lib/fail/tracing/partial_init.rs:LL:CC + | +LL | partial_init(); + | ^^^^^^^^^^^^^^ + +note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace + +error: aborting due to 1 previous error; 1 warning emitted + diff --git a/src/tools/miri/tests/native-lib/fail/tracing/unexposed_reachable_alloc.rs b/src/tools/miri/tests/native-lib/fail/tracing/unexposed_reachable_alloc.rs new file mode 100644 index 00000000000..b78c29d98db --- /dev/null +++ b/src/tools/miri/tests/native-lib/fail/tracing/unexposed_reachable_alloc.rs @@ -0,0 +1,29 @@ +//@only-target: x86_64-unknown-linux-gnu i686-unknown-linux-gnu +//@compile-flags: -Zmiri-permissive-provenance -Zmiri-native-lib-enable-tracing + +extern "C" { + fn do_one_deref(ptr: *const *const *const i32) -> usize; +} + +fn main() { + unexposed_reachable_alloc(); +} + +// Expose 2 pointers by virtue of doing a native read and assert that the 3rd in +// the chain remains properly unexposed. +fn unexposed_reachable_alloc() { + let inner = 42; + let intermediate_a = &raw const inner; + let intermediate_b = &raw const intermediate_a; + let exposed = &raw const intermediate_b; + // Discard the return value; it's just there so the access in C doesn't get optimised away. + unsafe { do_one_deref(exposed) }; + // Native read should have exposed the address of intermediate_b... + let valid: *const i32 = std::ptr::with_exposed_provenance(intermediate_b.addr()); + // but not of intermediate_a. + let invalid: *const i32 = std::ptr::with_exposed_provenance(intermediate_a.addr()); + unsafe { + let _ok = *valid; + let _not_ok = *invalid; //~ ERROR: Undefined Behavior: memory access failed: attempting to access + } +} diff --git a/src/tools/miri/tests/native-lib/fail/tracing/unexposed_reachable_alloc.stderr b/src/tools/miri/tests/native-lib/fail/tracing/unexposed_reachable_alloc.stderr new file mode 100644 index 00000000000..2d34dac1b3f --- /dev/null +++ b/src/tools/miri/tests/native-lib/fail/tracing/unexposed_reachable_alloc.stderr @@ -0,0 +1,39 @@ +warning: sharing memory with a native function called via FFI + --> tests/native-lib/fail/tracing/unexposed_reachable_alloc.rs:LL:CC + | +LL | unsafe { do_one_deref(exposed) }; + | ^^^^^^^^^^^^^^^^^^^^^ sharing memory with a native function + | + = help: when memory is shared with a native function call, Miri can only track initialisation and provenance on a best-effort basis + = help: in particular, Miri assumes that the native call initializes all memory it has written to + = help: Miri also assumes that any part of this memory may be a pointer that is permitted to point to arbitrary exposed memory + = help: what this means is that Miri will easily miss Undefined Behavior related to incorrect usage of this shared memory, so you should not take a clean Miri run as a signal that your FFI code is UB-free + = help: tracing memory accesses in native code is not yet fully implemented, so there can be further imprecisions beyond what is documented here + = note: BACKTRACE: + = note: inside `unexposed_reachable_alloc` at tests/native-lib/fail/tracing/unexposed_reachable_alloc.rs:LL:CC +note: inside `main` + --> tests/native-lib/fail/tracing/unexposed_reachable_alloc.rs:LL:CC + | +LL | unexposed_reachable_alloc(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error: Undefined Behavior: memory access failed: attempting to access 4 bytes, but got $HEX[noalloc] which is a dangling pointer (it has no provenance) + --> tests/native-lib/fail/tracing/unexposed_reachable_alloc.rs:LL:CC + | +LL | let _not_ok = *invalid; + | ^^^^^^^^ Undefined Behavior occurred here + | + = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior + = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information + = note: BACKTRACE: + = note: inside `unexposed_reachable_alloc` at tests/native-lib/fail/tracing/unexposed_reachable_alloc.rs:LL:CC +note: inside `main` + --> tests/native-lib/fail/tracing/unexposed_reachable_alloc.rs:LL:CC + | +LL | unexposed_reachable_alloc(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace + +error: aborting due to 1 previous error; 1 warning emitted + diff --git a/src/tools/miri/tests/native-lib/pass/ptr_read_access.stderr b/src/tools/miri/tests/native-lib/pass/ptr_read_access.notrace.stderr index 04a3025baef..04a3025baef 100644 --- a/src/tools/miri/tests/native-lib/pass/ptr_read_access.stderr +++ b/src/tools/miri/tests/native-lib/pass/ptr_read_access.notrace.stderr diff --git a/src/tools/miri/tests/native-lib/pass/ptr_read_access.stdout b/src/tools/miri/tests/native-lib/pass/ptr_read_access.notrace.stdout index 1a8799abfc9..1a8799abfc9 100644 --- a/src/tools/miri/tests/native-lib/pass/ptr_read_access.stdout +++ b/src/tools/miri/tests/native-lib/pass/ptr_read_access.notrace.stdout diff --git a/src/tools/miri/tests/native-lib/pass/ptr_read_access.rs b/src/tools/miri/tests/native-lib/pass/ptr_read_access.rs index 4c852135367..4f3c37f00c1 100644 --- a/src/tools/miri/tests/native-lib/pass/ptr_read_access.rs +++ b/src/tools/miri/tests/native-lib/pass/ptr_read_access.rs @@ -1,3 +1,7 @@ +//@revisions: trace notrace +//@[trace] only-target: x86_64-unknown-linux-gnu i686-unknown-linux-gnu +//@[trace] compile-flags: -Zmiri-native-lib-enable-tracing + fn main() { test_access_pointer(); test_access_simple(); diff --git a/src/tools/miri/tests/native-lib/pass/ptr_read_access.trace.stderr b/src/tools/miri/tests/native-lib/pass/ptr_read_access.trace.stderr new file mode 100644 index 00000000000..c2a4508b7fc --- /dev/null +++ b/src/tools/miri/tests/native-lib/pass/ptr_read_access.trace.stderr @@ -0,0 +1,19 @@ +warning: sharing memory with a native function called via FFI + --> tests/native-lib/pass/ptr_read_access.rs:LL:CC + | +LL | unsafe { print_pointer(&x) }; + | ^^^^^^^^^^^^^^^^^ sharing memory with a native function + | + = help: when memory is shared with a native function call, Miri can only track initialisation and provenance on a best-effort basis + = help: in particular, Miri assumes that the native call initializes all memory it has written to + = help: Miri also assumes that any part of this memory may be a pointer that is permitted to point to arbitrary exposed memory + = help: what this means is that Miri will easily miss Undefined Behavior related to incorrect usage of this shared memory, so you should not take a clean Miri run as a signal that your FFI code is UB-free + = help: tracing memory accesses in native code is not yet fully implemented, so there can be further imprecisions beyond what is documented here + = note: BACKTRACE: + = note: inside `test_access_pointer` at tests/native-lib/pass/ptr_read_access.rs:LL:CC +note: inside `main` + --> tests/native-lib/pass/ptr_read_access.rs:LL:CC + | +LL | test_access_pointer(); + | ^^^^^^^^^^^^^^^^^^^^^ + diff --git a/src/tools/miri/tests/native-lib/pass/ptr_read_access.trace.stdout b/src/tools/miri/tests/native-lib/pass/ptr_read_access.trace.stdout new file mode 100644 index 00000000000..1a8799abfc9 --- /dev/null +++ b/src/tools/miri/tests/native-lib/pass/ptr_read_access.trace.stdout @@ -0,0 +1 @@ +printing pointer dereference from C: 42 diff --git a/src/tools/miri/tests/native-lib/pass/ptr_write_access.stderr b/src/tools/miri/tests/native-lib/pass/ptr_write_access.notrace.stderr index c893b0d9f65..c893b0d9f65 100644 --- a/src/tools/miri/tests/native-lib/pass/ptr_write_access.stderr +++ b/src/tools/miri/tests/native-lib/pass/ptr_write_access.notrace.stderr diff --git a/src/tools/miri/tests/native-lib/pass/ptr_write_access.rs b/src/tools/miri/tests/native-lib/pass/ptr_write_access.rs index 86a9c97f4ce..57def78b0ab 100644 --- a/src/tools/miri/tests/native-lib/pass/ptr_write_access.rs +++ b/src/tools/miri/tests/native-lib/pass/ptr_write_access.rs @@ -1,3 +1,6 @@ +//@revisions: trace notrace +//@[trace] only-target: x86_64-unknown-linux-gnu i686-unknown-linux-gnu +//@[trace] compile-flags: -Zmiri-native-lib-enable-tracing //@compile-flags: -Zmiri-permissive-provenance #![feature(box_as_ptr)] diff --git a/src/tools/miri/tests/native-lib/pass/ptr_write_access.trace.stderr b/src/tools/miri/tests/native-lib/pass/ptr_write_access.trace.stderr new file mode 100644 index 00000000000..dbf021b15be --- /dev/null +++ b/src/tools/miri/tests/native-lib/pass/ptr_write_access.trace.stderr @@ -0,0 +1,19 @@ +warning: sharing memory with a native function called via FFI + --> tests/native-lib/pass/ptr_write_access.rs:LL:CC + | +LL | unsafe { increment_int(&mut x) }; + | ^^^^^^^^^^^^^^^^^^^^^ sharing memory with a native function + | + = help: when memory is shared with a native function call, Miri can only track initialisation and provenance on a best-effort basis + = help: in particular, Miri assumes that the native call initializes all memory it has written to + = help: Miri also assumes that any part of this memory may be a pointer that is permitted to point to arbitrary exposed memory + = help: what this means is that Miri will easily miss Undefined Behavior related to incorrect usage of this shared memory, so you should not take a clean Miri run as a signal that your FFI code is UB-free + = help: tracing memory accesses in native code is not yet fully implemented, so there can be further imprecisions beyond what is documented here + = note: BACKTRACE: + = note: inside `test_increment_int` at tests/native-lib/pass/ptr_write_access.rs:LL:CC +note: inside `main` + --> tests/native-lib/pass/ptr_write_access.rs:LL:CC + | +LL | test_increment_int(); + | ^^^^^^^^^^^^^^^^^^^^ + diff --git a/src/tools/miri/tests/native-lib/ptr_read_access.c b/src/tools/miri/tests/native-lib/ptr_read_access.c index b89126d3d7c..021eb6adca4 100644 --- a/src/tools/miri/tests/native-lib/ptr_read_access.c +++ b/src/tools/miri/tests/native-lib/ptr_read_access.c @@ -49,3 +49,9 @@ typedef struct Static { EXPORT int32_t access_static(const Static *s_ptr) { return s_ptr->recurse->recurse->value; } + +/* Test: unexposed_reachable_alloc */ + +EXPORT uintptr_t do_one_deref(const int32_t ***ptr) { + return (uintptr_t)*ptr; +} diff --git a/src/tools/miri/tests/native-lib/ptr_write_access.c b/src/tools/miri/tests/native-lib/ptr_write_access.c index fd8b005499c..5260d0b3651 100644 --- a/src/tools/miri/tests/native-lib/ptr_write_access.c +++ b/src/tools/miri/tests/native-lib/ptr_write_access.c @@ -107,3 +107,11 @@ EXPORT void set_shared_mem(int32_t** ptr) { EXPORT void init_ptr_stored_in_shared_mem(int32_t val) { **shared_place = val; } + +/* Test: partial_init */ + +EXPORT void init_n(int32_t n, char* ptr) { + for (int i=0; i<n; i++) { + *(ptr+i) = 0; + } +} diff --git a/src/tools/miri/tests/pass/0weak_memory_consistency.rs b/src/tools/miri/tests/pass/0weak_memory_consistency.rs index b33aefaf1d5..e4ed9675de8 100644 --- a/src/tools/miri/tests/pass/0weak_memory_consistency.rs +++ b/src/tools/miri/tests/pass/0weak_memory_consistency.rs @@ -41,7 +41,15 @@ fn static_atomic_bool(val: bool) -> &'static AtomicBool { } /// Spins until it acquires a pre-determined value. -fn loads_value(loc: &AtomicI32, ord: Ordering, val: i32) -> i32 { +fn spin_until_i32(loc: &AtomicI32, ord: Ordering, val: i32) -> i32 { + while loc.load(ord) != val { + std::hint::spin_loop(); + } + val +} + +/// Spins until it acquires a pre-determined boolean. +fn spin_until_bool(loc: &AtomicBool, ord: Ordering, val: bool) -> bool { while loc.load(ord) != val { std::hint::spin_loop(); } @@ -65,7 +73,7 @@ fn test_corr() { }); // | | #[rustfmt::skip] // |synchronizes-with |happens-before let j3 = spawn(move || { // | | - loads_value(&y, Acquire, 1); // <------------+ | + spin_until_i32(&y, Acquire, 1); // <---------+ | x.load(Relaxed) // <----------------------------------------------+ // The two reads on x are ordered by hb, so they cannot observe values // differently from the modification order. If the first read observed @@ -90,12 +98,12 @@ fn test_wrc() { }); // | | #[rustfmt::skip] // |synchronizes-with | let j2 = spawn(move || { // | | - loads_value(&x, Acquire, 1); // <------------+ | + spin_until_i32(&x, Acquire, 1); // <---------+ | y.store(1, Release); // ---------------------+ |happens-before }); // | | #[rustfmt::skip] // |synchronizes-with | let j3 = spawn(move || { // | | - loads_value(&y, Acquire, 1); // <------------+ | + spin_until_i32(&y, Acquire, 1); // <---------+ | x.load(Relaxed) // <-----------------------------------------------+ }); @@ -121,7 +129,7 @@ fn test_message_passing() { #[rustfmt::skip] // |synchronizes-with | happens-before let j2 = spawn(move || { // | | let x = x; // avoid field capturing | | - loads_value(&y, Acquire, 1); // <------------+ | + spin_until_i32(&y, Acquire, 1); // <---------+ | unsafe { *x.0 } // <---------------------------------------------+ }); @@ -216,12 +224,12 @@ fn test_sync_through_rmw_and_fences() { let go = static_atomic_bool(false); let t1 = spawn(move || { - while !go.load(Relaxed) {} + spin_until_bool(go, Relaxed, true); rdmw(y, x, z) }); let t2 = spawn(move || { - while !go.load(Relaxed) {} + spin_until_bool(go, Relaxed, true); rdmw(z, x, y) }); diff --git a/src/tools/miri/tests/pass/0weak_memory_consistency_sc.rs b/src/tools/miri/tests/pass/0weak_memory_consistency_sc.rs index 45cc5e6e722..937c2a8cf28 100644 --- a/src/tools/miri/tests/pass/0weak_memory_consistency_sc.rs +++ b/src/tools/miri/tests/pass/0weak_memory_consistency_sc.rs @@ -20,7 +20,15 @@ fn static_atomic_bool(val: bool) -> &'static AtomicBool { } /// Spins until it acquires a pre-determined value. -fn loads_value(loc: &AtomicI32, ord: Ordering, val: i32) -> i32 { +fn spin_until_i32(loc: &AtomicI32, ord: Ordering, val: i32) -> i32 { + while loc.load(ord) != val { + std::hint::spin_loop(); + } + val +} + +/// Spins until it acquires a pre-determined boolean. +fn spin_until_bool(loc: &AtomicBool, ord: Ordering, val: bool) -> bool { while loc.load(ord) != val { std::hint::spin_loop(); } @@ -60,11 +68,11 @@ fn test_iriw_sc_rlx() { let a = spawn(move || x.store(true, Relaxed)); let b = spawn(move || y.store(true, Relaxed)); let c = spawn(move || { - while !x.load(SeqCst) {} + spin_until_bool(x, SeqCst, true); y.load(SeqCst) }); let d = spawn(move || { - while !y.load(SeqCst) {} + spin_until_bool(y, SeqCst, true); x.load(SeqCst) }); @@ -136,7 +144,7 @@ fn test_cpp20_rwc_syncs() { }); let j2 = spawn(move || { - loads_value(&x, Relaxed, 1); + spin_until_i32(&x, Relaxed, 1); fence(SeqCst); y.load(Relaxed) }); diff --git a/src/tools/miri/tests/pass/alloc-access-tracking.rs b/src/tools/miri/tests/pass/alloc-access-tracking.rs index 0e88951dc43..9eba0ca171b 100644 --- a/src/tools/miri/tests/pass/alloc-access-tracking.rs +++ b/src/tools/miri/tests/pass/alloc-access-tracking.rs @@ -1,7 +1,7 @@ #![no_std] #![no_main] -//@compile-flags: -Zmiri-track-alloc-id=20 -Zmiri-track-alloc-accesses -Cpanic=abort -//@normalize-stderr-test: "id 20" -> "id $$ALLOC" +//@compile-flags: -Zmiri-track-alloc-id=19 -Zmiri-track-alloc-accesses -Cpanic=abort +//@normalize-stderr-test: "id 19" -> "id $$ALLOC" //@only-target: linux # alloc IDs differ between OSes (due to extern static allocations) extern "Rust" { diff --git a/src/tools/miri/tests/pass/shims/ctor.rs b/src/tools/miri/tests/pass/shims/ctor.rs new file mode 100644 index 00000000000..b997d2386b8 --- /dev/null +++ b/src/tools/miri/tests/pass/shims/ctor.rs @@ -0,0 +1,46 @@ +use std::sync::atomic::{AtomicUsize, Ordering}; + +static COUNT: AtomicUsize = AtomicUsize::new(0); + +unsafe extern "C" fn ctor() { + COUNT.fetch_add(1, Ordering::Relaxed); +} + +#[rustfmt::skip] +macro_rules! ctor { + ($ident:ident = $ctor:ident) => { + #[cfg_attr( + all(any( + target_os = "linux", + target_os = "android", + target_os = "dragonfly", + target_os = "freebsd", + target_os = "haiku", + target_os = "illumos", + target_os = "netbsd", + target_os = "openbsd", + target_os = "solaris", + target_os = "none", + target_family = "wasm", + )), + link_section = ".init_array" + )] + #[cfg_attr(windows, link_section = ".CRT$XCU")] + #[cfg_attr( + any(target_os = "macos", target_os = "ios"), + // We do not set the `mod_init_funcs` flag here since ctor/inventory also do not do + // that. See <https://github.com/rust-lang/miri/pull/4459#discussion_r2200115629>. + link_section = "__DATA,__mod_init_func" + )] + #[used] + static $ident: unsafe extern "C" fn() = $ctor; + }; +} + +ctor! { CTOR1 = ctor } +ctor! { CTOR2 = ctor } +ctor! { CTOR3 = ctor } + +fn main() { + assert_eq!(COUNT.load(Ordering::Relaxed), 3, "ctors did not run"); +} diff --git a/src/tools/miri/tests/pass/weak_memory/weak.rs b/src/tools/miri/tests/pass/weak_memory/weak.rs index eeab4ebf129..199f83f0528 100644 --- a/src/tools/miri/tests/pass/weak_memory/weak.rs +++ b/src/tools/miri/tests/pass/weak_memory/weak.rs @@ -24,7 +24,7 @@ fn static_atomic(val: usize) -> &'static AtomicUsize { } // Spins until it reads the given value -fn reads_value(loc: &AtomicUsize, val: usize) -> usize { +fn spin_until(loc: &AtomicUsize, val: usize) -> usize { while loc.load(Relaxed) != val { std::hint::spin_loop(); } @@ -85,7 +85,7 @@ fn initialization_write(add_fence: bool) -> bool { }); let j2 = spawn(move || { - reads_value(wait, 1); + spin_until(wait, 1); if add_fence { fence(AcqRel); } @@ -119,12 +119,12 @@ fn faa_replaced_by_load() -> bool { let go = static_atomic(0); let t1 = spawn(move || { - while go.load(Relaxed) == 0 {} + spin_until(go, 1); rdmw(y, x, z) }); let t2 = spawn(move || { - while go.load(Relaxed) == 0 {} + spin_until(go, 1); rdmw(z, x, y) }); diff --git a/src/tools/remote-test-client/src/main.rs b/src/tools/remote-test-client/src/main.rs index bda08423487..b9741431b50 100644 --- a/src/tools/remote-test-client/src/main.rs +++ b/src/tools/remote-test-client/src/main.rs @@ -335,7 +335,9 @@ fn run(support_lib_count: usize, exe: String, all_args: Vec<String>) { std::process::exit(code); } else { println!("died due to signal {}", code); - std::process::exit(3); + // Behave like bash and other tools and exit with 128 + the signal + // number. That way we can avoid special case code in other places. + std::process::exit(128 + code); } } diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index 7432a82080d..e55cd80943d 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -154,6 +154,22 @@ dependencies = [ ] [[package]] +name = "cargo-util-schemas" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dc1a6f7b5651af85774ae5a34b4e8be397d9cf4bc063b7e6dbd99a841837830" +dependencies = [ + "semver", + "serde", + "serde-untagged", + "serde-value", + "thiserror 2.0.12", + "toml", + "unicode-xid", + "url", +] + +[[package]] name = "cargo_metadata" version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -161,7 +177,22 @@ checksum = "4f7835cfc6135093070e95eb2b53e5d9b5c403dc3a6be6040ee026270aa82502" dependencies = [ "camino", "cargo-platform", - "cargo-util-schemas", + "cargo-util-schemas 0.2.0", + "semver", + "serde", + "serde_json", + "thiserror 2.0.12", +] + +[[package]] +name = "cargo_metadata" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cfca2aaa699835ba88faf58a06342a314a950d2b9686165e038286c30316868" +dependencies = [ + "camino", + "cargo-platform", + "cargo-util-schemas 0.8.2", "semver", "serde", "serde_json", @@ -1190,13 +1221,16 @@ checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" name = "lsp-server" version = "0.7.8" dependencies = [ + "anyhow", "crossbeam-channel", "ctrlc", "log", "lsp-types", + "rustc-hash 2.1.1", "serde", "serde_derive", "serde_json", + "toolchain", ] [[package]] @@ -1471,7 +1505,7 @@ dependencies = [ "edition", "expect-test", "ra-ap-rustc_lexer", - "rustc-literal-escaper 0.0.4", + "rustc-literal-escaper", "stdx", "tracing", ] @@ -1599,7 +1633,7 @@ dependencies = [ name = "proc-macro-test" version = "0.0.0" dependencies = [ - "cargo_metadata", + "cargo_metadata 0.20.0", ] [[package]] @@ -1640,7 +1674,7 @@ version = "0.0.0" dependencies = [ "anyhow", "base-db", - "cargo_metadata", + "cargo_metadata 0.21.0", "cfg", "expect-test", "intern", @@ -1722,9 +1756,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_abi" -version = "0.116.0" +version = "0.121.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a967e3a9cd3e38b543f503978e0eccee461e3aea3f7b10e944959bff41dbe612" +checksum = "3ee51482d1c9d3e538acda8cce723db8eea1a81540544bf362bf4c3d841b2329" dependencies = [ "bitflags 2.9.1", "ra-ap-rustc_hashes", @@ -1734,18 +1768,18 @@ dependencies = [ [[package]] name = "ra-ap-rustc_hashes" -version = "0.116.0" +version = "0.121.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ea4c755ecbbffa5743c251344f484ebe571ec7bc5b36d80b2a8ae775d1a7a40" +checksum = "19c8f1e0c28e24e1b4c55dc08058c6c9829df2204497d4034259f491d348c204" dependencies = [ "rustc-stable-hash", ] [[package]] name = "ra-ap-rustc_index" -version = "0.116.0" +version = "0.121.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aca7ad7cf911538c619caa2162339fe98637e9e46f11bb0484ef96735df4d64a" +checksum = "5f33f429cec6b92fa2c7243883279fb29dd233fdc3e94099aff32aa91aa87f50" dependencies = [ "ra-ap-rustc_index_macros", "smallvec", @@ -1753,9 +1787,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_index_macros" -version = "0.116.0" +version = "0.121.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8767ba551c9355bc3031be072cc4bb0381106e5e7cd275e72b7a8c76051c4070" +checksum = "b9b55910dbe1fe7ef34bdc1d1bcb41e99b377eb680ea58a1218d95d6b4152257" dependencies = [ "proc-macro2", "quote", @@ -1764,9 +1798,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_lexer" -version = "0.116.0" +version = "0.121.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6101374afb267e6c27e4e2eb0b1352e9f3504c1a8f716f619cd39244e2ed92ab" +checksum = "22944e31fb91e9b3e75bcbc91e37d958b8c0825a6160927f2856831d2ce83b36" dependencies = [ "memchr", "unicode-properties", @@ -1775,19 +1809,19 @@ dependencies = [ [[package]] name = "ra-ap-rustc_parse_format" -version = "0.116.0" +version = "0.121.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecd88a19f00da4f43e6727d5013444cbc399804b5046dfa2bbcd28ebed3970ce" +checksum = "81057891bc2063ad9e353f29462fbc47a0f5072560af34428ae9313aaa5e9d97" dependencies = [ "ra-ap-rustc_lexer", - "rustc-literal-escaper 0.0.2", + "rustc-literal-escaper", ] [[package]] name = "ra-ap-rustc_pattern_analysis" -version = "0.116.0" +version = "0.121.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb332dd32d7850a799862533b1c021e6062558861a4ad57817bf522499fbb892" +checksum = "fe21a3542980d56d2435e96c2720773cac1c63fd4db666417e414729da192eb3" dependencies = [ "ra-ap-rustc_index", "rustc-hash 2.1.1", @@ -1855,7 +1889,7 @@ version = "0.0.0" dependencies = [ "anyhow", "base64", - "cargo_metadata", + "cargo_metadata 0.21.0", "cfg", "crossbeam-channel", "dirs", @@ -1934,12 +1968,6 @@ checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" [[package]] name = "rustc-literal-escaper" -version = "0.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0041b6238913c41fe704213a4a9329e2f685a156d1781998128b4149c230ad04" - -[[package]] -name = "rustc-literal-escaper" version = "0.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab03008eb631b703dd16978282ae36c73282e7922fe101a4bd072a40ecea7b8b" @@ -2231,7 +2259,7 @@ dependencies = [ "rayon", "rowan", "rustc-hash 2.1.1", - "rustc-literal-escaper 0.0.4", + "rustc-literal-escaper", "rustc_apfloat", "smol_str", "stdx", diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml index d268ce5b0bb..41fa06a76a7 100644 --- a/src/tools/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/Cargo.toml @@ -4,7 +4,7 @@ exclude = ["crates/proc-macro-srv/proc-macro-test/imp"] resolver = "2" [workspace.package] -rust-version = "1.86" +rust-version = "1.88" edition = "2024" license = "MIT OR Apache-2.0" authors = ["rust-analyzer team"] @@ -89,11 +89,11 @@ vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" } vfs = { path = "./crates/vfs", version = "0.0.0" } edition = { path = "./crates/edition", version = "0.0.0" } -ra-ap-rustc_lexer = { version = "0.116", default-features = false } -ra-ap-rustc_parse_format = { version = "0.116", default-features = false } -ra-ap-rustc_index = { version = "0.116", default-features = false } -ra-ap-rustc_abi = { version = "0.116", default-features = false } -ra-ap-rustc_pattern_analysis = { version = "0.116", default-features = false } +ra-ap-rustc_lexer = { version = "0.121", default-features = false } +ra-ap-rustc_parse_format = { version = "0.121", default-features = false } +ra-ap-rustc_index = { version = "0.121", default-features = false } +ra-ap-rustc_abi = { version = "0.121", default-features = false } +ra-ap-rustc_pattern_analysis = { version = "0.121", default-features = false } # local crates that aren't published to crates.io. These should not have versions. @@ -106,7 +106,7 @@ lsp-server = { version = "0.7.8" } anyhow = "1.0.98" arrayvec = "0.7.6" bitflags = "2.9.1" -cargo_metadata = "0.20.0" +cargo_metadata = "0.21.0" camino = "1.1.10" chalk-solve = { version = "0.103.0", default-features = false } chalk-ir = "0.103.0" @@ -138,7 +138,11 @@ rayon = "1.10.0" rowan = "=0.15.15" # Ideally we'd not enable the macros feature but unfortunately the `tracked` attribute does not work # on impls without it -salsa = { version = "0.23.0", default-features = true, features = ["rayon","salsa_unstable", "macros"] } +salsa = { version = "0.23.0", default-features = true, features = [ + "rayon", + "salsa_unstable", + "macros", +] } salsa-macros = "0.23.0" semver = "1.0.26" serde = { version = "1.0.219" } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs index 51612f341a1..d3dfc05eb29 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store.rs @@ -22,6 +22,7 @@ use rustc_hash::FxHashMap; use smallvec::SmallVec; use span::{Edition, SyntaxContext}; use syntax::{AstPtr, SyntaxNodePtr, ast}; +use thin_vec::ThinVec; use triomphe::Arc; use tt::TextRange; @@ -93,17 +94,17 @@ pub type TypeSource = InFile<TypePtr>; pub type LifetimePtr = AstPtr<ast::Lifetime>; pub type LifetimeSource = InFile<LifetimePtr>; +// We split the store into types-only and expressions, because most stores (e.g. generics) +// don't store any expressions and this saves memory. Same thing for the source map. #[derive(Debug, PartialEq, Eq)] -pub struct ExpressionStore { - pub exprs: Arena<Expr>, - pub pats: Arena<Pat>, - pub bindings: Arena<Binding>, - pub labels: Arena<Label>, - pub types: Arena<TypeRef>, - pub lifetimes: Arena<LifetimeRef>, +struct ExpressionOnlyStore { + exprs: Arena<Expr>, + pats: Arena<Pat>, + bindings: Arena<Binding>, + labels: Arena<Label>, /// Id of the closure/coroutine that owns the corresponding binding. If a binding is owned by the /// top level expression, it will not be listed in here. - pub binding_owners: FxHashMap<BindingId, ExprId>, + binding_owners: FxHashMap<BindingId, ExprId>, /// Block expressions in this store that may contain inner items. block_scopes: Box<[BlockId]>, @@ -114,8 +115,15 @@ pub struct ExpressionStore { ident_hygiene: FxHashMap<ExprOrPatId, HygieneId>, } +#[derive(Debug, PartialEq, Eq)] +pub struct ExpressionStore { + expr_only: Option<Box<ExpressionOnlyStore>>, + pub types: Arena<TypeRef>, + pub lifetimes: Arena<LifetimeRef>, +} + #[derive(Debug, Eq, Default)] -pub struct ExpressionStoreSourceMap { +struct ExpressionOnlySourceMap { // AST expressions can create patterns in destructuring assignments. Therefore, `ExprSource` can also map // to `PatId`, and `PatId` can also map to `ExprSource` (the other way around is unaffected). expr_map: FxHashMap<ExprSource, ExprOrPatId>, @@ -127,12 +135,6 @@ pub struct ExpressionStoreSourceMap { label_map: FxHashMap<LabelSource, LabelId>, label_map_back: ArenaMap<LabelId, LabelSource>, - types_map_back: ArenaMap<TypeRefId, TypeSource>, - types_map: FxHashMap<TypeSource, TypeRefId>, - - lifetime_map_back: ArenaMap<LifetimeRefId, LifetimeSource>, - lifetime_map: FxHashMap<LifetimeSource, LifetimeRefId>, - binding_definitions: ArenaMap<BindingId, SmallVec<[PatId; 2 * size_of::<usize>() / size_of::<PatId>()]>>, @@ -143,14 +145,17 @@ pub struct ExpressionStoreSourceMap { template_map: Option<Box<FormatTemplate>>, - pub expansions: FxHashMap<InFile<MacroCallPtr>, MacroCallId>, + expansions: FxHashMap<InFile<MacroCallPtr>, MacroCallId>, /// Diagnostics accumulated during lowering. These contain `AstPtr`s and so are stored in /// the source map (since they're just as volatile). - pub diagnostics: Vec<ExpressionStoreDiagnostics>, + // + // We store diagnostics on the `ExpressionOnlySourceMap` because diagnostics are rare (except + // maybe for cfgs, and they are also not common in type places). + diagnostics: ThinVec<ExpressionStoreDiagnostics>, } -impl PartialEq for ExpressionStoreSourceMap { +impl PartialEq for ExpressionOnlySourceMap { fn eq(&self, other: &Self) -> bool { // we only need to compare one of the two mappings // as the other is a reverse mapping and thus will compare @@ -162,10 +167,6 @@ impl PartialEq for ExpressionStoreSourceMap { pat_map_back, label_map: _, label_map_back, - types_map_back, - types_map: _, - lifetime_map_back, - lifetime_map: _, // If this changed, our pattern data must have changed binding_definitions: _, // If this changed, our expression data must have changed @@ -179,14 +180,40 @@ impl PartialEq for ExpressionStoreSourceMap { *expr_map_back == other.expr_map_back && *pat_map_back == other.pat_map_back && *label_map_back == other.label_map_back - && *types_map_back == other.types_map_back - && *lifetime_map_back == other.lifetime_map_back && *template_map == other.template_map && *expansions == other.expansions && *diagnostics == other.diagnostics } } +#[derive(Debug, Eq, Default)] +pub struct ExpressionStoreSourceMap { + expr_only: Option<Box<ExpressionOnlySourceMap>>, + + types_map_back: ArenaMap<TypeRefId, TypeSource>, + types_map: FxHashMap<TypeSource, TypeRefId>, + + lifetime_map_back: ArenaMap<LifetimeRefId, LifetimeSource>, + #[expect( + unused, + reason = "this is here for completeness, and maybe we'll need it in the future" + )] + lifetime_map: FxHashMap<LifetimeSource, LifetimeRefId>, +} + +impl PartialEq for ExpressionStoreSourceMap { + fn eq(&self, other: &Self) -> bool { + // we only need to compare one of the two mappings + // as the other is a reverse mapping and thus will compare + // the same as normal mapping + let Self { expr_only, types_map_back, types_map: _, lifetime_map_back, lifetime_map: _ } = + self; + *expr_only == other.expr_only + && *types_map_back == other.types_map_back + && *lifetime_map_back == other.lifetime_map_back + } +} + /// The body of an item (function, const etc.). #[derive(Debug, Eq, PartialEq, Default)] pub struct ExpressionStoreBuilder { @@ -199,6 +226,42 @@ pub struct ExpressionStoreBuilder { pub types: Arena<TypeRef>, block_scopes: Vec<BlockId>, ident_hygiene: FxHashMap<ExprOrPatId, HygieneId>, + + // AST expressions can create patterns in destructuring assignments. Therefore, `ExprSource` can also map + // to `PatId`, and `PatId` can also map to `ExprSource` (the other way around is unaffected). + expr_map: FxHashMap<ExprSource, ExprOrPatId>, + expr_map_back: ArenaMap<ExprId, ExprOrPatSource>, + + pat_map: FxHashMap<PatSource, ExprOrPatId>, + pat_map_back: ArenaMap<PatId, ExprOrPatSource>, + + label_map: FxHashMap<LabelSource, LabelId>, + label_map_back: ArenaMap<LabelId, LabelSource>, + + types_map_back: ArenaMap<TypeRefId, TypeSource>, + types_map: FxHashMap<TypeSource, TypeRefId>, + + lifetime_map_back: ArenaMap<LifetimeRefId, LifetimeSource>, + lifetime_map: FxHashMap<LifetimeSource, LifetimeRefId>, + + binding_definitions: + ArenaMap<BindingId, SmallVec<[PatId; 2 * size_of::<usize>() / size_of::<PatId>()]>>, + + /// We don't create explicit nodes for record fields (`S { record_field: 92 }`). + /// Instead, we use id of expression (`92`) to identify the field. + field_map_back: FxHashMap<ExprId, FieldSource>, + pat_field_map_back: FxHashMap<PatId, PatFieldSource>, + + template_map: Option<Box<FormatTemplate>>, + + expansions: FxHashMap<InFile<MacroCallPtr>, MacroCallId>, + + /// Diagnostics accumulated during lowering. These contain `AstPtr`s and so are stored in + /// the source map (since they're just as volatile). + // + // We store diagnostics on the `ExpressionOnlySourceMap` because diagnostics are rare (except + // maybe for cfgs, and they are also not common in type places). + pub(crate) diagnostics: Vec<ExpressionStoreDiagnostics>, } #[derive(Default, Debug, Eq, PartialEq)] @@ -226,7 +289,7 @@ pub enum ExpressionStoreDiagnostics { } impl ExpressionStoreBuilder { - pub fn finish(self) -> ExpressionStore { + pub fn finish(self) -> (ExpressionStore, ExpressionStoreSourceMap) { let Self { block_scopes, mut exprs, @@ -237,6 +300,23 @@ impl ExpressionStoreBuilder { mut ident_hygiene, mut types, mut lifetimes, + + mut expr_map, + mut expr_map_back, + mut pat_map, + mut pat_map_back, + mut label_map, + mut label_map_back, + mut types_map_back, + mut types_map, + mut lifetime_map_back, + mut lifetime_map, + mut binding_definitions, + mut field_map_back, + mut pat_field_map_back, + mut template_map, + mut expansions, + diagnostics, } = self; exprs.shrink_to_fit(); labels.shrink_to_fit(); @@ -247,24 +327,90 @@ impl ExpressionStoreBuilder { types.shrink_to_fit(); lifetimes.shrink_to_fit(); - ExpressionStore { - exprs, - pats, - bindings, - labels, - binding_owners, - types, - lifetimes, - block_scopes: block_scopes.into_boxed_slice(), - ident_hygiene, + expr_map.shrink_to_fit(); + expr_map_back.shrink_to_fit(); + pat_map.shrink_to_fit(); + pat_map_back.shrink_to_fit(); + label_map.shrink_to_fit(); + label_map_back.shrink_to_fit(); + types_map_back.shrink_to_fit(); + types_map.shrink_to_fit(); + lifetime_map_back.shrink_to_fit(); + lifetime_map.shrink_to_fit(); + binding_definitions.shrink_to_fit(); + field_map_back.shrink_to_fit(); + pat_field_map_back.shrink_to_fit(); + if let Some(template_map) = &mut template_map { + let FormatTemplate { + format_args_to_captures, + asm_to_captures, + implicit_capture_to_source, + } = &mut **template_map; + format_args_to_captures.shrink_to_fit(); + asm_to_captures.shrink_to_fit(); + implicit_capture_to_source.shrink_to_fit(); } + expansions.shrink_to_fit(); + + let has_exprs = + !exprs.is_empty() || !labels.is_empty() || !pats.is_empty() || !bindings.is_empty(); + + let store = { + let expr_only = if has_exprs { + Some(Box::new(ExpressionOnlyStore { + exprs, + pats, + bindings, + labels, + binding_owners, + block_scopes: block_scopes.into_boxed_slice(), + ident_hygiene, + })) + } else { + None + }; + ExpressionStore { expr_only, types, lifetimes } + }; + + let source_map = { + let expr_only = if has_exprs || !expansions.is_empty() || !diagnostics.is_empty() { + Some(Box::new(ExpressionOnlySourceMap { + expr_map, + expr_map_back, + pat_map, + pat_map_back, + label_map, + label_map_back, + binding_definitions, + field_map_back, + pat_field_map_back, + template_map, + expansions, + diagnostics: ThinVec::from_iter(diagnostics), + })) + } else { + None + }; + ExpressionStoreSourceMap { + expr_only, + types_map_back, + types_map, + lifetime_map_back, + lifetime_map, + } + }; + + (store, source_map) } } impl ExpressionStore { - pub fn empty_singleton() -> Arc<Self> { - static EMPTY: LazyLock<Arc<ExpressionStore>> = - LazyLock::new(|| Arc::new(ExpressionStoreBuilder::default().finish())); + pub fn empty_singleton() -> (Arc<ExpressionStore>, Arc<ExpressionStoreSourceMap>) { + static EMPTY: LazyLock<(Arc<ExpressionStore>, Arc<ExpressionStoreSourceMap>)> = + LazyLock::new(|| { + let (store, source_map) = ExpressionStoreBuilder::default().finish(); + (Arc::new(store), Arc::new(source_map)) + }); EMPTY.clone() } @@ -273,7 +419,12 @@ impl ExpressionStore { &'a self, db: &'a dyn DefDatabase, ) -> impl Iterator<Item = (BlockId, &'a DefMap)> + 'a { - self.block_scopes.iter().map(move |&block| (block, block_def_map(db, block))) + self.expr_only + .as_ref() + .map(|it| &*it.block_scopes) + .unwrap_or_default() + .iter() + .map(move |&block| (block, block_def_map(db, block))) } pub fn walk_bindings_in_pat(&self, pat_id: PatId, mut f: impl FnMut(BindingId)) { @@ -320,7 +471,8 @@ impl ExpressionStore { } pub fn is_binding_upvar(&self, binding: BindingId, relative_to: ExprId) -> bool { - match self.binding_owners.get(&binding) { + let Some(expr_only) = &self.expr_only else { return false }; + match expr_only.binding_owners.get(&binding) { Some(it) => { // We assign expression ids in a way that outer closures will receive // a lower id @@ -330,6 +482,11 @@ impl ExpressionStore { } } + #[inline] + pub fn binding_owner(&self, id: BindingId) -> Option<ExprId> { + self.expr_only.as_ref()?.binding_owners.get(&id).copied() + } + /// Walks the immediate children expressions and calls `f` for each child expression. /// /// Note that this does not walk const blocks. @@ -601,16 +758,22 @@ impl ExpressionStore { }); } + #[inline] + #[track_caller] + fn assert_expr_only(&self) -> &ExpressionOnlyStore { + self.expr_only.as_ref().expect("should have `ExpressionStore::expr_only`") + } + fn binding_hygiene(&self, binding: BindingId) -> HygieneId { - self.bindings[binding].hygiene + self.assert_expr_only().bindings[binding].hygiene } pub fn expr_path_hygiene(&self, expr: ExprId) -> HygieneId { - self.ident_hygiene.get(&expr.into()).copied().unwrap_or(HygieneId::ROOT) + self.assert_expr_only().ident_hygiene.get(&expr.into()).copied().unwrap_or(HygieneId::ROOT) } pub fn pat_path_hygiene(&self, pat: PatId) -> HygieneId { - self.ident_hygiene.get(&pat.into()).copied().unwrap_or(HygieneId::ROOT) + self.assert_expr_only().ident_hygiene.get(&pat.into()).copied().unwrap_or(HygieneId::ROOT) } pub fn expr_or_pat_path_hygiene(&self, id: ExprOrPatId) -> HygieneId { @@ -619,43 +782,72 @@ impl ExpressionStore { ExprOrPatId::PatId(id) => self.pat_path_hygiene(id), } } + + #[inline] + pub fn exprs(&self) -> impl Iterator<Item = (ExprId, &Expr)> { + match &self.expr_only { + Some(it) => it.exprs.iter(), + None => const { &Arena::new() }.iter(), + } + } + + #[inline] + pub fn pats(&self) -> impl Iterator<Item = (PatId, &Pat)> { + match &self.expr_only { + Some(it) => it.pats.iter(), + None => const { &Arena::new() }.iter(), + } + } + + #[inline] + pub fn bindings(&self) -> impl Iterator<Item = (BindingId, &Binding)> { + match &self.expr_only { + Some(it) => it.bindings.iter(), + None => const { &Arena::new() }.iter(), + } + } } impl Index<ExprId> for ExpressionStore { type Output = Expr; + #[inline] fn index(&self, expr: ExprId) -> &Expr { - &self.exprs[expr] + &self.assert_expr_only().exprs[expr] } } impl Index<PatId> for ExpressionStore { type Output = Pat; + #[inline] fn index(&self, pat: PatId) -> &Pat { - &self.pats[pat] + &self.assert_expr_only().pats[pat] } } impl Index<LabelId> for ExpressionStore { type Output = Label; + #[inline] fn index(&self, label: LabelId) -> &Label { - &self.labels[label] + &self.assert_expr_only().labels[label] } } impl Index<BindingId> for ExpressionStore { type Output = Binding; + #[inline] fn index(&self, b: BindingId) -> &Binding { - &self.bindings[b] + &self.assert_expr_only().bindings[b] } } impl Index<TypeRefId> for ExpressionStore { type Output = TypeRef; + #[inline] fn index(&self, b: TypeRefId) -> &TypeRef { &self.types[b] } @@ -664,6 +856,7 @@ impl Index<TypeRefId> for ExpressionStore { impl Index<LifetimeRefId> for ExpressionStore { type Output = LifetimeRef; + #[inline] fn index(&self, b: LifetimeRefId) -> &LifetimeRef { &self.lifetimes[b] } @@ -684,12 +877,6 @@ impl Index<PathId> for ExpressionStore { // FIXME: Change `node_` prefix to something more reasonable. // Perhaps `expr_syntax` and `expr_id`? impl ExpressionStoreSourceMap { - pub fn empty_singleton() -> Arc<Self> { - static EMPTY: LazyLock<Arc<ExpressionStoreSourceMap>> = - LazyLock::new(|| Arc::new(ExpressionStoreSourceMap::default())); - EMPTY.clone() - } - pub fn expr_or_pat_syntax(&self, id: ExprOrPatId) -> Result<ExprOrPatSource, SyntheticSyntax> { match id { ExprOrPatId::ExprId(id) => self.expr_syntax(id), @@ -697,30 +884,46 @@ impl ExpressionStoreSourceMap { } } + #[inline] + fn expr_or_synthetic(&self) -> Result<&ExpressionOnlySourceMap, SyntheticSyntax> { + self.expr_only.as_deref().ok_or(SyntheticSyntax) + } + + #[inline] + fn expr_only(&self) -> Option<&ExpressionOnlySourceMap> { + self.expr_only.as_deref() + } + + #[inline] + #[track_caller] + fn assert_expr_only(&self) -> &ExpressionOnlySourceMap { + self.expr_only.as_ref().expect("should have `ExpressionStoreSourceMap::expr_only`") + } + pub fn expr_syntax(&self, expr: ExprId) -> Result<ExprOrPatSource, SyntheticSyntax> { - self.expr_map_back.get(expr).cloned().ok_or(SyntheticSyntax) + self.expr_or_synthetic()?.expr_map_back.get(expr).cloned().ok_or(SyntheticSyntax) } pub fn node_expr(&self, node: InFile<&ast::Expr>) -> Option<ExprOrPatId> { let src = node.map(AstPtr::new); - self.expr_map.get(&src).cloned() + self.expr_only()?.expr_map.get(&src).cloned() } pub fn node_macro_file(&self, node: InFile<&ast::MacroCall>) -> Option<MacroCallId> { let src = node.map(AstPtr::new); - self.expansions.get(&src).cloned() + self.expr_only()?.expansions.get(&src).cloned() } pub fn macro_calls(&self) -> impl Iterator<Item = (InFile<MacroCallPtr>, MacroCallId)> + '_ { - self.expansions.iter().map(|(&a, &b)| (a, b)) + self.expr_only().into_iter().flat_map(|it| it.expansions.iter().map(|(&a, &b)| (a, b))) } pub fn pat_syntax(&self, pat: PatId) -> Result<ExprOrPatSource, SyntheticSyntax> { - self.pat_map_back.get(pat).cloned().ok_or(SyntheticSyntax) + self.expr_or_synthetic()?.pat_map_back.get(pat).cloned().ok_or(SyntheticSyntax) } pub fn node_pat(&self, node: InFile<&ast::Pat>) -> Option<ExprOrPatId> { - self.pat_map.get(&node.map(AstPtr::new)).cloned() + self.expr_only()?.pat_map.get(&node.map(AstPtr::new)).cloned() } pub fn type_syntax(&self, id: TypeRefId) -> Result<TypeSource, SyntheticSyntax> { @@ -732,49 +935,50 @@ impl ExpressionStoreSourceMap { } pub fn label_syntax(&self, label: LabelId) -> LabelSource { - self.label_map_back[label] + self.assert_expr_only().label_map_back[label] } pub fn patterns_for_binding(&self, binding: BindingId) -> &[PatId] { - self.binding_definitions.get(binding).map_or(&[], Deref::deref) + self.assert_expr_only().binding_definitions.get(binding).map_or(&[], Deref::deref) } pub fn node_label(&self, node: InFile<&ast::Label>) -> Option<LabelId> { let src = node.map(AstPtr::new); - self.label_map.get(&src).cloned() + self.expr_only()?.label_map.get(&src).cloned() } pub fn field_syntax(&self, expr: ExprId) -> FieldSource { - self.field_map_back[&expr] + self.assert_expr_only().field_map_back[&expr] } pub fn pat_field_syntax(&self, pat: PatId) -> PatFieldSource { - self.pat_field_map_back[&pat] + self.assert_expr_only().pat_field_map_back[&pat] } pub fn macro_expansion_expr(&self, node: InFile<&ast::MacroExpr>) -> Option<ExprOrPatId> { let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::MacroExpr>).map(AstPtr::upcast); - self.expr_map.get(&src).copied() + self.expr_only()?.expr_map.get(&src).copied() } pub fn expansions(&self) -> impl Iterator<Item = (&InFile<MacroCallPtr>, &MacroCallId)> { - self.expansions.iter() + self.expr_only().into_iter().flat_map(|it| it.expansions.iter()) } pub fn expansion(&self, node: InFile<&ast::MacroCall>) -> Option<MacroCallId> { - self.expansions.get(&node.map(AstPtr::new)).copied() + self.expr_only()?.expansions.get(&node.map(AstPtr::new)).copied() } pub fn implicit_format_args( &self, node: InFile<&ast::FormatArgsExpr>, ) -> Option<(HygieneId, &[(syntax::TextRange, Name)])> { + let expr_only = self.expr_only()?; let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::Expr>); - let (hygiene, names) = self + let (hygiene, names) = expr_only .template_map .as_ref()? .format_args_to_captures - .get(&self.expr_map.get(&src)?.as_expr()?)?; + .get(&expr_only.expr_map.get(&src)?.as_expr()?)?; Some((*hygiene, &**names)) } @@ -782,67 +986,28 @@ impl ExpressionStoreSourceMap { &self, capture_expr: ExprId, ) -> Option<InFile<(ExprPtr, TextRange)>> { - self.template_map.as_ref()?.implicit_capture_to_source.get(&capture_expr).copied() + self.expr_only()? + .template_map + .as_ref()? + .implicit_capture_to_source + .get(&capture_expr) + .copied() } pub fn asm_template_args( &self, node: InFile<&ast::AsmExpr>, ) -> Option<(ExprId, &[Vec<(syntax::TextRange, usize)>])> { + let expr_only = self.expr_only()?; let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::Expr>); - let expr = self.expr_map.get(&src)?.as_expr()?; - Some(expr) - .zip(self.template_map.as_ref()?.asm_to_captures.get(&expr).map(std::ops::Deref::deref)) + let expr = expr_only.expr_map.get(&src)?.as_expr()?; + Some(expr).zip( + expr_only.template_map.as_ref()?.asm_to_captures.get(&expr).map(std::ops::Deref::deref), + ) } /// Get a reference to the source map's diagnostics. pub fn diagnostics(&self) -> &[ExpressionStoreDiagnostics] { - &self.diagnostics - } - - fn shrink_to_fit(&mut self) { - let Self { - expr_map, - expr_map_back, - pat_map, - pat_map_back, - label_map, - label_map_back, - field_map_back, - pat_field_map_back, - expansions, - template_map, - diagnostics, - binding_definitions, - types_map, - types_map_back, - lifetime_map_back, - lifetime_map, - } = self; - if let Some(template_map) = template_map { - let FormatTemplate { - format_args_to_captures, - asm_to_captures, - implicit_capture_to_source, - } = &mut **template_map; - format_args_to_captures.shrink_to_fit(); - asm_to_captures.shrink_to_fit(); - implicit_capture_to_source.shrink_to_fit(); - } - expr_map.shrink_to_fit(); - expr_map_back.shrink_to_fit(); - pat_map.shrink_to_fit(); - pat_map_back.shrink_to_fit(); - label_map.shrink_to_fit(); - label_map_back.shrink_to_fit(); - field_map_back.shrink_to_fit(); - pat_field_map_back.shrink_to_fit(); - expansions.shrink_to_fit(); - diagnostics.shrink_to_fit(); - binding_definitions.shrink_to_fit(); - types_map.shrink_to_fit(); - types_map_back.shrink_to_fit(); - lifetime_map.shrink_to_fit(); - lifetime_map_back.shrink_to_fit(); + self.expr_only().map(|it| &*it.diagnostics).unwrap_or_default() } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/body.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/body.rs index fb6d931e0e4..c955393b9cf 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/body.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/body.rs @@ -36,6 +36,7 @@ pub struct Body { impl ops::Deref for Body { type Target = ExpressionStore; + #[inline] fn deref(&self) -> &Self::Target { &self.store } @@ -61,6 +62,7 @@ pub struct BodySourceMap { impl ops::Deref for BodySourceMap { type Target = ExpressionStoreSourceMap; + #[inline] fn deref(&self) -> &Self::Target { &self.store } @@ -102,9 +104,7 @@ impl Body { } }; let module = def.module(db); - let (body, mut source_map) = - lower_body(db, def, file_id, module, params, body, is_async_fn); - source_map.store.shrink_to_fit(); + let (body, source_map) = lower_body(db, def, file_id, module, params, body, is_async_fn); (Arc::new(body), Arc::new(source_map)) } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs index c0e51b338b4..4e877748ca2 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs @@ -121,14 +121,10 @@ pub(super) fn lower_body( params = (0..count).map(|_| collector.missing_pat()).collect(); }; let body_expr = collector.missing_expr(); + let (store, source_map) = collector.store.finish(); return ( - Body { - store: collector.store.finish(), - params: params.into_boxed_slice(), - self_param, - body_expr, - }, - BodySourceMap { self_param: source_map_self_param, store: collector.source_map }, + Body { store, params: params.into_boxed_slice(), self_param, body_expr }, + BodySourceMap { self_param: source_map_self_param, store: source_map }, ); } @@ -171,14 +167,10 @@ pub(super) fn lower_body( }, ); + let (store, source_map) = collector.store.finish(); ( - Body { - store: collector.store.finish(), - params: params.into_boxed_slice(), - self_param, - body_expr, - }, - BodySourceMap { self_param: source_map_self_param, store: collector.source_map }, + Body { store, params: params.into_boxed_slice(), self_param, body_expr }, + BodySourceMap { self_param: source_map_self_param, store: source_map }, ) } @@ -190,7 +182,8 @@ pub(crate) fn lower_type_ref( let mut expr_collector = ExprCollector::new(db, module, type_ref.file_id); let type_ref = expr_collector.lower_type_ref_opt(type_ref.value, &mut ExprCollector::impl_trait_allocator); - (expr_collector.store.finish(), expr_collector.source_map, type_ref) + let (store, source_map) = expr_collector.store.finish(); + (store, source_map, type_ref) } pub(crate) fn lower_generic_params( @@ -205,7 +198,8 @@ pub(crate) fn lower_generic_params( let mut collector = generics::GenericParamsCollector::new(def); collector.lower(&mut expr_collector, param_list, where_clause); let params = collector.finish(); - (Arc::new(expr_collector.store.finish()), params, expr_collector.source_map) + let (store, source_map) = expr_collector.store.finish(); + (Arc::new(store), params, source_map) } pub(crate) fn lower_impl( @@ -232,7 +226,8 @@ pub(crate) fn lower_impl( impl_syntax.value.where_clause(), ); let params = collector.finish(); - (expr_collector.store.finish(), expr_collector.source_map, self_ty, trait_, params) + let (store, source_map) = expr_collector.store.finish(); + (store, source_map, self_ty, trait_, params) } pub(crate) fn lower_trait( @@ -253,7 +248,8 @@ pub(crate) fn lower_trait( trait_syntax.value.where_clause(), ); let params = collector.finish(); - (expr_collector.store.finish(), expr_collector.source_map, params) + let (store, source_map) = expr_collector.store.finish(); + (store, source_map, params) } pub(crate) fn lower_trait_alias( @@ -274,7 +270,8 @@ pub(crate) fn lower_trait_alias( trait_syntax.value.where_clause(), ); let params = collector.finish(); - (expr_collector.store.finish(), expr_collector.source_map, params) + let (store, source_map) = expr_collector.store.finish(); + (store, source_map, params) } pub(crate) fn lower_type_alias( @@ -313,7 +310,8 @@ pub(crate) fn lower_type_alias( .value .ty() .map(|ty| expr_collector.lower_type_ref(ty, &mut ExprCollector::impl_trait_allocator)); - (expr_collector.store.finish(), expr_collector.source_map, params, bounds, type_ref) + let (store, source_map) = expr_collector.store.finish(); + (store, source_map, params, bounds, type_ref) } pub(crate) fn lower_function( @@ -421,9 +419,10 @@ pub(crate) fn lower_function( } else { return_type }; + let (store, source_map) = expr_collector.store.finish(); ( - expr_collector.store.finish(), - expr_collector.source_map, + store, + source_map, generics, params.into_boxed_slice(), return_type, @@ -440,7 +439,6 @@ pub struct ExprCollector<'db> { local_def_map: &'db LocalDefMap, module: ModuleId, pub store: ExpressionStoreBuilder, - pub(crate) source_map: ExpressionStoreSourceMap, // state stuff // Prevent nested impl traits like `impl Foo<impl Bar>`. @@ -551,7 +549,6 @@ impl ExprCollector<'_> { module, def_map, local_def_map, - source_map: ExpressionStoreSourceMap::default(), store: ExpressionStoreBuilder::default(), expander, current_try_block_label: None, @@ -698,7 +695,7 @@ impl ExprCollector<'_> { let id = self.collect_macro_call(mcall, macro_ptr, true, |this, expansion| { this.lower_type_ref_opt(expansion, impl_trait_lower_fn) }); - self.source_map.types_map.insert(src, id); + self.store.types_map.insert(src, id); return id; } None => TypeRef::Error, @@ -732,8 +729,8 @@ impl ExprCollector<'_> { fn alloc_type_ref(&mut self, type_ref: TypeRef, node: TypePtr) -> TypeRefId { let id = self.store.types.alloc(type_ref); let ptr = self.expander.in_file(node); - self.source_map.types_map_back.insert(id, ptr); - self.source_map.types_map.insert(ptr, id); + self.store.types_map_back.insert(id, ptr); + self.store.types_map.insert(ptr, id); id } @@ -744,8 +741,8 @@ impl ExprCollector<'_> { ) -> LifetimeRefId { let id = self.store.lifetimes.alloc(lifetime_ref); let ptr = self.expander.in_file(node); - self.source_map.lifetime_map_back.insert(id, ptr); - self.source_map.lifetime_map.insert(ptr, id); + self.store.lifetime_map_back.insert(id, ptr); + self.store.lifetime_map.insert(ptr, id); id } @@ -1190,14 +1187,14 @@ impl ExprCollector<'_> { } ast::Expr::ContinueExpr(e) => { let label = self.resolve_label(e.lifetime()).unwrap_or_else(|e| { - self.source_map.diagnostics.push(e); + self.store.diagnostics.push(e); None }); self.alloc_expr(Expr::Continue { label }, syntax_ptr) } ast::Expr::BreakExpr(e) => { let label = self.resolve_label(e.lifetime()).unwrap_or_else(|e| { - self.source_map.diagnostics.push(e); + self.store.diagnostics.push(e); None }); let expr = e.expr().map(|e| self.collect_expr(e)); @@ -1207,7 +1204,7 @@ impl ExprCollector<'_> { let inner = self.collect_expr_opt(e.expr()); // make the paren expr point to the inner expression as well for IDE resolution let src = self.expander.in_file(syntax_ptr); - self.source_map.expr_map.insert(src, inner.into()); + self.store.expr_map.insert(src, inner.into()); inner } ast::Expr::ReturnExpr(e) => { @@ -1248,7 +1245,7 @@ impl ExprCollector<'_> { None => self.missing_expr(), }; let src = self.expander.in_file(AstPtr::new(&field)); - self.source_map.field_map_back.insert(expr, src); + self.store.field_map_back.insert(expr, src); Some(RecordLitField { name, expr }) }) .collect(); @@ -1271,12 +1268,10 @@ impl ExprCollector<'_> { ast::Expr::AwaitExpr(e) => { let expr = self.collect_expr_opt(e.expr()); if let Awaitable::No(location) = self.is_lowering_awaitable_block() { - self.source_map.diagnostics.push( - ExpressionStoreDiagnostics::AwaitOutsideOfAsync { - node: self.expander.in_file(AstPtr::new(&e)), - location: location.to_string(), - }, - ); + self.store.diagnostics.push(ExpressionStoreDiagnostics::AwaitOutsideOfAsync { + node: self.expander.in_file(AstPtr::new(&e)), + location: location.to_string(), + }); } self.alloc_expr(Expr::Await { expr }, syntax_ptr) } @@ -1442,7 +1437,7 @@ impl ExprCollector<'_> { // Make the macro-call point to its expanded expression so we can query // semantics on syntax pointers to the macro let src = self.expander.in_file(syntax_ptr); - self.source_map.expr_map.insert(src, id.into()); + self.store.expr_map.insert(src, id.into()); id } None => self.alloc_expr(Expr::Missing, syntax_ptr), @@ -1486,7 +1481,7 @@ impl ExprCollector<'_> { let expr = self.collect_expr(expr); // Do not use `alloc_pat_from_expr()` here, it will override the entry in `expr_map`. let id = self.store.pats.alloc(Pat::Expr(expr)); - self.source_map.pat_map_back.insert(id, src); + self.store.pat_map_back.insert(id, src); id }) } @@ -1555,7 +1550,7 @@ impl ExprCollector<'_> { let id = self.collect_macro_call(e, macro_ptr, true, |this, expansion| { this.collect_expr_as_pat_opt(expansion) }); - self.source_map.expr_map.insert(src, id.into()); + self.store.expr_map.insert(src, id.into()); id } ast::Expr::RecordExpr(e) => { @@ -1576,7 +1571,7 @@ impl ExprCollector<'_> { let pat = self.collect_expr_as_pat(field_expr); let name = f.field_name()?.as_name(); let src = self.expander.in_file(AstPtr::new(&f).wrap_left()); - self.source_map.pat_field_map_back.insert(pat, src); + self.store.pat_field_map_back.insert(pat, src); Some(RecordFieldPat { name, pat }) }) .collect(); @@ -1622,7 +1617,7 @@ impl ExprCollector<'_> { ); if let Either::Left(pat) = pat { let src = this.expander.in_file(AstPtr::new(&expr).wrap_left()); - this.source_map.pat_map_back.insert(pat, src); + this.store.pat_map_back.insert(pat, src); } pat } @@ -1968,7 +1963,7 @@ impl ExprCollector<'_> { self.module.krate(), resolver, &mut |ptr, call| { - _ = self.source_map.expansions.insert(ptr.map(|(it, _)| it), call); + _ = self.store.expansions.insert(ptr.map(|(it, _)| it), call); }, ) } @@ -1978,19 +1973,17 @@ impl ExprCollector<'_> { Ok(res) => res, Err(UnresolvedMacro { path }) => { if record_diagnostics { - self.source_map.diagnostics.push( - ExpressionStoreDiagnostics::UnresolvedMacroCall { - node: self.expander.in_file(syntax_ptr), - path, - }, - ); + self.store.diagnostics.push(ExpressionStoreDiagnostics::UnresolvedMacroCall { + node: self.expander.in_file(syntax_ptr), + path, + }); } return collector(self, None); } }; if record_diagnostics { if let Some(err) = res.err { - self.source_map + self.store .diagnostics .push(ExpressionStoreDiagnostics::MacroError { node: macro_call_ptr, err }); } @@ -2001,7 +1994,7 @@ impl ExprCollector<'_> { // Keep collecting even with expansion errors so we can provide completions and // other services in incomplete macro expressions. if let Some(macro_file) = self.expander.current_file_id().macro_file() { - self.source_map.expansions.insert(macro_call_ptr, macro_file); + self.store.expansions.insert(macro_call_ptr, macro_file); } if record_diagnostics { @@ -2050,7 +2043,7 @@ impl ExprCollector<'_> { // Make the macro-call point to its expanded expression so we can query // semantics on syntax pointers to the macro let src = self.expander.in_file(syntax_ptr); - self.source_map.expr_map.insert(src, tail.into()); + self.store.expr_map.insert(src, tail.into()); }) } @@ -2361,7 +2354,7 @@ impl ExprCollector<'_> { let pat = self.collect_pat(ast_pat, binding_list); let name = f.field_name()?.as_name(); let src = self.expander.in_file(AstPtr::new(&f).wrap_right()); - self.source_map.pat_field_map_back.insert(pat, src); + self.store.pat_field_map_back.insert(pat, src); Some(RecordFieldPat { name, pat }) }) .collect(); @@ -2424,7 +2417,7 @@ impl ExprCollector<'_> { self.collect_macro_call(call, macro_ptr, true, |this, expanded_pat| { this.collect_pat_opt(expanded_pat, binding_list) }); - self.source_map.pat_map.insert(src, pat.into()); + self.store.pat_map.insert(src, pat.into()); return pat; } None => Pat::Missing, @@ -2515,7 +2508,7 @@ impl ExprCollector<'_> { } }); if let Some(pat) = pat.left() { - self.source_map.pat_map.insert(src, pat.into()); + self.store.pat_map.insert(src, pat.into()); } pat } @@ -2537,7 +2530,7 @@ impl ExprCollector<'_> { match enabled { Ok(()) => true, Err(cfg) => { - self.source_map.diagnostics.push(ExpressionStoreDiagnostics::InactiveCode { + self.store.diagnostics.push(ExpressionStoreDiagnostics::InactiveCode { node: self.expander.in_file(SyntaxNodePtr::new(owner.syntax())), cfg, opts: self.cfg_options.clone(), @@ -2548,7 +2541,7 @@ impl ExprCollector<'_> { } fn add_definition_to_binding(&mut self, binding_id: BindingId, pat_id: PatId) { - self.source_map.binding_definitions.entry(binding_id).or_default().push(pat_id); + self.store.binding_definitions.entry(binding_id).or_default().push(pat_id); } // region: labels @@ -2724,7 +2717,7 @@ impl ExprCollector<'_> { |name, range| { let expr_id = self.alloc_expr_desugared(Expr::Path(Path::from(name))); if let Some(range) = range { - self.source_map + self.store .template_map .get_or_insert_with(Default::default) .implicit_capture_to_source @@ -2836,7 +2829,7 @@ impl ExprCollector<'_> { ) }; - self.source_map + self.store .template_map .get_or_insert_with(Default::default) .format_args_to_captures @@ -3386,8 +3379,8 @@ impl ExprCollector<'_> { fn alloc_expr(&mut self, expr: Expr, ptr: ExprPtr) -> ExprId { let src = self.expander.in_file(ptr); let id = self.store.exprs.alloc(expr); - self.source_map.expr_map_back.insert(id, src.map(AstPtr::wrap_left)); - self.source_map.expr_map.insert(src, id.into()); + self.store.expr_map_back.insert(id, src.map(AstPtr::wrap_left)); + self.store.expr_map.insert(src, id.into()); id } // FIXME: desugared exprs don't have ptr, that's wrong and should be fixed. @@ -3398,9 +3391,9 @@ impl ExprCollector<'_> { fn alloc_expr_desugared_with_ptr(&mut self, expr: Expr, ptr: ExprPtr) -> ExprId { let src = self.expander.in_file(ptr); let id = self.store.exprs.alloc(expr); - self.source_map.expr_map_back.insert(id, src.map(AstPtr::wrap_left)); + self.store.expr_map_back.insert(id, src.map(AstPtr::wrap_left)); // We intentionally don't fill this as it could overwrite a non-desugared entry - // self.source_map.expr_map.insert(src, id); + // self.store.expr_map.insert(src, id); id } fn missing_expr(&mut self) -> ExprId { @@ -3423,24 +3416,24 @@ impl ExprCollector<'_> { fn alloc_pat_from_expr(&mut self, pat: Pat, ptr: ExprPtr) -> PatId { let src = self.expander.in_file(ptr); let id = self.store.pats.alloc(pat); - self.source_map.expr_map.insert(src, id.into()); - self.source_map.pat_map_back.insert(id, src.map(AstPtr::wrap_left)); + self.store.expr_map.insert(src, id.into()); + self.store.pat_map_back.insert(id, src.map(AstPtr::wrap_left)); id } fn alloc_expr_from_pat(&mut self, expr: Expr, ptr: PatPtr) -> ExprId { let src = self.expander.in_file(ptr); let id = self.store.exprs.alloc(expr); - self.source_map.pat_map.insert(src, id.into()); - self.source_map.expr_map_back.insert(id, src.map(AstPtr::wrap_right)); + self.store.pat_map.insert(src, id.into()); + self.store.expr_map_back.insert(id, src.map(AstPtr::wrap_right)); id } fn alloc_pat(&mut self, pat: Pat, ptr: PatPtr) -> PatId { let src = self.expander.in_file(ptr); let id = self.store.pats.alloc(pat); - self.source_map.pat_map_back.insert(id, src.map(AstPtr::wrap_right)); - self.source_map.pat_map.insert(src, id.into()); + self.store.pat_map_back.insert(id, src.map(AstPtr::wrap_right)); + self.store.pat_map.insert(src, id.into()); id } // FIXME: desugared pats don't have ptr, that's wrong and should be fixed somehow. @@ -3454,8 +3447,8 @@ impl ExprCollector<'_> { fn alloc_label(&mut self, label: Label, ptr: LabelPtr) -> LabelId { let src = self.expander.in_file(ptr); let id = self.store.labels.alloc(label); - self.source_map.label_map_back.insert(id, src); - self.source_map.label_map.insert(src, id); + self.store.label_map_back.insert(id, src); + self.store.label_map.insert(src, id); id } // FIXME: desugared labels don't have ptr, that's wrong and should be fixed somehow. diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/asm.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/asm.rs index d36e5205c73..3bc4afb5c8a 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/asm.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/asm.rs @@ -10,7 +10,7 @@ use tt::TextRange; use crate::{ expr_store::lower::{ExprCollector, FxIndexSet}, - hir::{AsmOperand, AsmOptions, Expr, ExprId, InlineAsm, InlineAsmRegOrRegClass}, + hir::{AsmOperand, AsmOptions, Expr, ExprId, InlineAsm, InlineAsmKind, InlineAsmRegOrRegClass}, }; impl ExprCollector<'_> { @@ -269,11 +269,20 @@ impl ExprCollector<'_> { } }) }; + + let kind = if asm.global_asm_token().is_some() { + InlineAsmKind::GlobalAsm + } else if asm.naked_asm_token().is_some() { + InlineAsmKind::NakedAsm + } else { + InlineAsmKind::Asm + }; + let idx = self.alloc_expr( - Expr::InlineAsm(InlineAsm { operands: operands.into_boxed_slice(), options }), + Expr::InlineAsm(InlineAsm { operands: operands.into_boxed_slice(), options, kind }), syntax_ptr, ); - self.source_map + self.store .template_map .get_or_insert_with(Default::default) .asm_to_captures diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/path/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/path/tests.rs index 8fd81c7b3df..f507841a91b 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/path/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower/path/tests.rs @@ -23,7 +23,7 @@ fn lower_path(path: ast::Path) -> (TestDB, ExpressionStore, Option<Path>) { let mut ctx = ExprCollector::new(&db, crate_def_map(&db, krate).root_module_id(), file_id.into()); let lowered_path = ctx.lower_path(path, &mut ExprCollector::impl_trait_allocator); - let store = ctx.store.finish(); + let (store, _) = ctx.store.finish(); (db, store, lowered_path) } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs index 87bcd33ed7b..f1b011333d9 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/pretty.rs @@ -902,7 +902,7 @@ impl Printer<'_> { let mut same_name = false; if let Pat::Bind { id, subpat: None } = &self.store[arg.pat] { if let Binding { name, mode: BindingAnnotation::Unannotated, .. } = - &self.store.bindings[*id] + &self.store.assert_expr_only().bindings[*id] { if name.as_str() == field_name { same_name = true; @@ -1063,7 +1063,7 @@ impl Printer<'_> { } fn print_binding(&mut self, id: BindingId) { - let Binding { name, mode, .. } = &self.store.bindings[id]; + let Binding { name, mode, .. } = &self.store.assert_expr_only().bindings[id]; let mode = match mode { BindingAnnotation::Unannotated => "", BindingAnnotation::Mutable => "mut ", diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/scope.rs index 2dd0b9bdb86..1952dae9d71 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/scope.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/scope.rs @@ -106,7 +106,9 @@ impl ExprScopes { let mut scopes = ExprScopes { scopes: Arena::default(), scope_entries: Arena::default(), - scope_by_expr: ArenaMap::with_capacity(body.exprs.len()), + scope_by_expr: ArenaMap::with_capacity( + body.expr_only.as_ref().map_or(0, |it| it.exprs.len()), + ), }; let mut root = scopes.root_scope(); if let Some(self_param) = body.self_param { @@ -179,7 +181,7 @@ impl ExprScopes { binding: BindingId, hygiene: HygieneId, ) { - let Binding { name, .. } = &store.bindings[binding]; + let Binding { name, .. } = &store[binding]; let entry = self.scope_entries.alloc(ScopeEntry { name: name.clone(), binding, hygiene }); self.scopes[scope].entries = IdxRange::new_inclusive(self.scopes[scope].entries.start()..=entry); @@ -251,7 +253,7 @@ fn compute_expr_scopes( scope: &mut ScopeId, ) { let make_label = - |label: &Option<LabelId>| label.map(|label| (label, store.labels[label].name.clone())); + |label: &Option<LabelId>| label.map(|label| (label, store[label].name.clone())); let compute_expr_scopes = |scopes: &mut ExprScopes, expr: ExprId, scope: &mut ScopeId| { compute_expr_scopes(expr, store, scopes, scope) @@ -534,9 +536,8 @@ fn foo() { }; let resolved = scopes.resolve_name_in_scope(expr_scope, &name_ref.as_name()).unwrap(); - let pat_src = source_map - .pat_syntax(*source_map.binding_definitions[resolved.binding()].first().unwrap()) - .unwrap(); + let pat_src = + source_map.pat_syntax(source_map.patterns_for_binding(resolved.binding())[0]).unwrap(); let local_name = pat_src.value.syntax_node_ptr().to_node(file.syntax()); assert_eq!(local_name.text_range(), expected_name.syntax().text_range()); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body.rs index 927e280d739..c31428be28f 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/tests/body.rs @@ -508,9 +508,9 @@ fn f() { } "#, ); - assert_eq!(body.bindings.len(), 1, "should have a binding for `B`"); + assert_eq!(body.assert_expr_only().bindings.len(), 1, "should have a binding for `B`"); assert_eq!( - body.bindings[BindingId::from_raw(RawIdx::from_u32(0))].name.as_str(), + body[BindingId::from_raw(RawIdx::from_u32(0))].name.as_str(), "B", "should have a binding for `B`", ); @@ -566,6 +566,7 @@ const fn f(x: i32) -> i32 { ); let mtch_arms = body + .assert_expr_only() .exprs .iter() .find_map(|(_, expr)| { @@ -578,10 +579,10 @@ const fn f(x: i32) -> i32 { .unwrap(); let MatchArm { pat, .. } = mtch_arms[1]; - match body.pats[pat] { + match body[pat] { Pat::Range { start, end } => { - let hir_start = &body.exprs[start.unwrap()]; - let hir_end = &body.exprs[end.unwrap()]; + let hir_start = &body[start.unwrap()]; + let hir_end = &body[end.unwrap()]; assert!(matches!(hir_start, Expr::Path { .. })); assert!(matches!(hir_end, Expr::Path { .. })); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir.rs index 0fc7857d978..e70cd2cd6c5 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/hir.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/hir.rs @@ -332,6 +332,17 @@ pub struct OffsetOf { pub struct InlineAsm { pub operands: Box<[(Option<Name>, AsmOperand)]>, pub options: AsmOptions, + pub kind: InlineAsmKind, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum InlineAsmKind { + /// `asm!()`. + Asm, + /// `global_asm!()`. + GlobalAsm, + /// `naked_asm!()`. + NakedAsm, } #[derive(Clone, Copy, PartialEq, Eq, Hash)] diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs index f3273667158..5ab61c89394 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/lower.rs @@ -143,6 +143,8 @@ impl<'a> Ctx<'a> { ast::Item::MacroRules(ast) => self.lower_macro_rules(ast)?.into(), ast::Item::MacroDef(ast) => self.lower_macro_def(ast)?.into(), ast::Item::ExternBlock(ast) => self.lower_extern_block(ast).into(), + // FIXME: Handle `global_asm!()`. + ast::Item::AsmExpr(_) => return None, }; let attrs = RawAttrs::new(self.db, item, self.span_map()); self.add_attrs(mod_item.ast_id(), attrs); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs index 5923b3ea491..91b42bef8f7 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/tests.rs @@ -35,10 +35,10 @@ use a::{c, d::{e}}; #![no_std] #![doc = " another file comment"] - // AstId: ExternCrate[5A82, 0] + // AstId: ExternCrate[070B, 0] pub(self) extern crate self as renamed; - // AstId: ExternCrate[7E1C, 0] + // AstId: ExternCrate[1EA5, 0] pub(in super) extern crate bli; // AstId: Use[0000, 0] @@ -78,15 +78,15 @@ extern "C" { // AstId: ExternBlock[0000, 0] extern { #[on_extern_type] - // AstId: TypeAlias[9FDF, 0] + // AstId: TypeAlias[A09C, 0] pub(self) type ExType; #[on_extern_static] - // AstId: Static[43C1, 0] + // AstId: Static[D85E, 0] pub(self) static EX_STATIC = _; #[on_extern_fn] - // AstId: Fn[452D, 0] + // AstId: Fn[B240, 0] pub(self) fn ex_fn; } "#]], @@ -124,20 +124,20 @@ enum E { } "#, expect![[r#" - // AstId: Struct[DFF3, 0] + // AstId: Struct[ED35, 0] pub(self) struct Unit; #[derive(Debug)] - // AstId: Struct[C7A1, 0] + // AstId: Struct[A47C, 0] pub(self) struct Struct { ... } - // AstId: Struct[DAC2, 0] + // AstId: Struct[C8C9, 0] pub(self) struct Tuple(...); - // AstId: Union[2DBB, 0] + // AstId: Union[2797, 0] pub(self) union Ize { ... } - // AstId: Enum[7FF8, 0] + // AstId: Enum[7D23, 0] pub(self) enum E { ... } "#]], ); @@ -162,18 +162,18 @@ trait Tr: SuperTrait + 'lifetime { } "#, expect![[r#" - // AstId: Static[B393, 0] + // AstId: Static[F7C1, 0] pub static ST = _; - // AstId: Const[B309, 0] + // AstId: Const[84BB, 0] pub(self) const _ = _; #[attr] #[inner_attr_in_fn] - // AstId: Fn[75E3, 0] + // AstId: Fn[BE8F, 0] pub(self) fn f; - // AstId: Trait[2998, 0] + // AstId: Trait[9320, 0] pub(self) trait Tr { ... } "#]], ); @@ -197,16 +197,16 @@ mod outline; expect![[r##" #[doc = " outer"] #[doc = " inner"] - // AstId: Module[CF93, 0] + // AstId: Module[03AE, 0] pub(self) mod inline { // AstId: Use[0000, 0] pub(self) use super::*; - // AstId: Fn[1B26, 0] + // AstId: Fn[2A78, 0] pub(self) fn fn_in_module; } - // AstId: Module[8994, 0] + // AstId: Module[C08B, 0] pub(self) mod outline; "##]], ); @@ -225,13 +225,13 @@ pub macro m2() {} m!(); "#, expect![[r#" - // AstId: MacroRules[88CE, 0] + // AstId: MacroRules[7E68, 0] macro_rules! m { ... } - // AstId: MacroDef[DC34, 0] + // AstId: MacroDef[1C1E, 0] pub macro m2 { ... } - // AstId: MacroCall[612F, 0], SyntaxContextId: ROOT2024, ExpandTo: Items + // AstId: MacroCall[7E68, 0], SyntaxContextId: ROOT2024, ExpandTo: Items m!(...); "#]], ); @@ -244,7 +244,7 @@ fn pub_self() { pub(self) struct S; "#, expect![[r#" - // AstId: Struct[42E2, 0] + // AstId: Struct[5024, 0] pub(self) struct S; "#]], ) diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs index 293868df613..1c3af47d522 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs @@ -28,6 +28,19 @@ fn test_asm_expand() { r#" #[rustc_builtin_macro] macro_rules! asm {() => {}} +#[rustc_builtin_macro] +macro_rules! global_asm {() => {}} +#[rustc_builtin_macro] +macro_rules! naked_asm {() => {}} + +global_asm! { + "" +} + +#[unsafe(naked)] +extern "C" fn foo() { + naked_asm!(""); +} fn main() { let i: u64 = 3; @@ -45,6 +58,17 @@ fn main() { expect![[r##" #[rustc_builtin_macro] macro_rules! asm {() => {}} +#[rustc_builtin_macro] +macro_rules! global_asm {() => {}} +#[rustc_builtin_macro] +macro_rules! naked_asm {() => {}} + +builtin #global_asm ("") + +#[unsafe(naked)] +extern "C" fn foo() { + builtin #naked_asm (""); +} fn main() { let i: u64 = 3; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs index c6d901ec93b..c489c1f7c1d 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -35,9 +35,9 @@ macro_rules! f { }; } -struct#0:MacroRules[8C8E, 0]@58..64#14336# MyTraitMap2#0:MacroCall[D499, 0]@31..42#ROOT2024# {#0:MacroRules[8C8E, 0]@72..73#14336# - map#0:MacroRules[8C8E, 0]@86..89#14336#:#0:MacroRules[8C8E, 0]@89..90#14336# #0:MacroRules[8C8E, 0]@89..90#14336#::#0:MacroRules[8C8E, 0]@91..93#14336#std#0:MacroRules[8C8E, 0]@93..96#14336#::#0:MacroRules[8C8E, 0]@96..98#14336#collections#0:MacroRules[8C8E, 0]@98..109#14336#::#0:MacroRules[8C8E, 0]@109..111#14336#HashSet#0:MacroRules[8C8E, 0]@111..118#14336#<#0:MacroRules[8C8E, 0]@118..119#14336#(#0:MacroRules[8C8E, 0]@119..120#14336#)#0:MacroRules[8C8E, 0]@120..121#14336#>#0:MacroRules[8C8E, 0]@121..122#14336#,#0:MacroRules[8C8E, 0]@122..123#14336# -}#0:MacroRules[8C8E, 0]@132..133#14336# +struct#0:MacroRules[BE8F, 0]@58..64#14336# MyTraitMap2#0:MacroCall[BE8F, 0]@31..42#ROOT2024# {#0:MacroRules[BE8F, 0]@72..73#14336# + map#0:MacroRules[BE8F, 0]@86..89#14336#:#0:MacroRules[BE8F, 0]@89..90#14336# #0:MacroRules[BE8F, 0]@89..90#14336#::#0:MacroRules[BE8F, 0]@91..93#14336#std#0:MacroRules[BE8F, 0]@93..96#14336#::#0:MacroRules[BE8F, 0]@96..98#14336#collections#0:MacroRules[BE8F, 0]@98..109#14336#::#0:MacroRules[BE8F, 0]@109..111#14336#HashSet#0:MacroRules[BE8F, 0]@111..118#14336#<#0:MacroRules[BE8F, 0]@118..119#14336#(#0:MacroRules[BE8F, 0]@119..120#14336#)#0:MacroRules[BE8F, 0]@120..121#14336#>#0:MacroRules[BE8F, 0]@121..122#14336#,#0:MacroRules[BE8F, 0]@122..123#14336# +}#0:MacroRules[BE8F, 0]@132..133#14336# "#]], ); } @@ -75,12 +75,12 @@ macro_rules! f { }; } -fn#0:MacroCall[D499, 0]@30..32#ROOT2024# main#0:MacroCall[D499, 0]@33..37#ROOT2024#(#0:MacroCall[D499, 0]@37..38#ROOT2024#)#0:MacroCall[D499, 0]@38..39#ROOT2024# {#0:MacroCall[D499, 0]@40..41#ROOT2024# - 1#0:MacroCall[D499, 0]@50..51#ROOT2024#;#0:MacroCall[D499, 0]@51..52#ROOT2024# - 1.0#0:MacroCall[D499, 0]@61..64#ROOT2024#;#0:MacroCall[D499, 0]@64..65#ROOT2024# - (#0:MacroCall[D499, 0]@74..75#ROOT2024#(#0:MacroCall[D499, 0]@75..76#ROOT2024#1#0:MacroCall[D499, 0]@76..77#ROOT2024#,#0:MacroCall[D499, 0]@77..78#ROOT2024# )#0:MacroCall[D499, 0]@78..79#ROOT2024#,#0:MacroCall[D499, 0]@79..80#ROOT2024# )#0:MacroCall[D499, 0]@80..81#ROOT2024#.#0:MacroCall[D499, 0]@81..82#ROOT2024#0#0:MacroCall[D499, 0]@82..85#ROOT2024#.#0:MacroCall[D499, 0]@82..85#ROOT2024#0#0:MacroCall[D499, 0]@82..85#ROOT2024#;#0:MacroCall[D499, 0]@85..86#ROOT2024# - let#0:MacroCall[D499, 0]@95..98#ROOT2024# x#0:MacroCall[D499, 0]@99..100#ROOT2024# =#0:MacroCall[D499, 0]@101..102#ROOT2024# 1#0:MacroCall[D499, 0]@103..104#ROOT2024#;#0:MacroCall[D499, 0]@104..105#ROOT2024# -}#0:MacroCall[D499, 0]@110..111#ROOT2024# +fn#0:MacroCall[BE8F, 0]@30..32#ROOT2024# main#0:MacroCall[BE8F, 0]@33..37#ROOT2024#(#0:MacroCall[BE8F, 0]@37..38#ROOT2024#)#0:MacroCall[BE8F, 0]@38..39#ROOT2024# {#0:MacroCall[BE8F, 0]@40..41#ROOT2024# + 1#0:MacroCall[BE8F, 0]@50..51#ROOT2024#;#0:MacroCall[BE8F, 0]@51..52#ROOT2024# + 1.0#0:MacroCall[BE8F, 0]@61..64#ROOT2024#;#0:MacroCall[BE8F, 0]@64..65#ROOT2024# + (#0:MacroCall[BE8F, 0]@74..75#ROOT2024#(#0:MacroCall[BE8F, 0]@75..76#ROOT2024#1#0:MacroCall[BE8F, 0]@76..77#ROOT2024#,#0:MacroCall[BE8F, 0]@77..78#ROOT2024# )#0:MacroCall[BE8F, 0]@78..79#ROOT2024#,#0:MacroCall[BE8F, 0]@79..80#ROOT2024# )#0:MacroCall[BE8F, 0]@80..81#ROOT2024#.#0:MacroCall[BE8F, 0]@81..82#ROOT2024#0#0:MacroCall[BE8F, 0]@82..85#ROOT2024#.#0:MacroCall[BE8F, 0]@82..85#ROOT2024#0#0:MacroCall[BE8F, 0]@82..85#ROOT2024#;#0:MacroCall[BE8F, 0]@85..86#ROOT2024# + let#0:MacroCall[BE8F, 0]@95..98#ROOT2024# x#0:MacroCall[BE8F, 0]@99..100#ROOT2024# =#0:MacroCall[BE8F, 0]@101..102#ROOT2024# 1#0:MacroCall[BE8F, 0]@103..104#ROOT2024#;#0:MacroCall[BE8F, 0]@104..105#ROOT2024# +}#0:MacroCall[BE8F, 0]@110..111#ROOT2024# "#]], @@ -171,7 +171,7 @@ fn main(foo: ()) { } fn main(foo: ()) { - /* error: unresolved macro unresolved */"helloworld!"#0:Fn[B9C7, 0]@236..321#ROOT2024#; + /* error: unresolved macro unresolved */"helloworld!"#0:Fn[15AE, 0]@236..321#ROOT2024#; } } @@ -197,7 +197,7 @@ macro_rules! mk_struct { #[macro_use] mod foo; -struct#1:MacroRules[E572, 0]@59..65#14336# Foo#0:MacroCall[BDD3, 0]@32..35#ROOT2024#(#1:MacroRules[E572, 0]@70..71#14336#u32#0:MacroCall[BDD3, 0]@41..44#ROOT2024#)#1:MacroRules[E572, 0]@74..75#14336#;#1:MacroRules[E572, 0]@75..76#14336# +struct#1:MacroRules[DB0C, 0]@59..65#14336# Foo#0:MacroCall[DB0C, 0]@32..35#ROOT2024#(#1:MacroRules[DB0C, 0]@70..71#14336#u32#0:MacroCall[DB0C, 0]@41..44#ROOT2024#)#1:MacroRules[DB0C, 0]@74..75#14336#;#1:MacroRules[DB0C, 0]@75..76#14336# "#]], ); } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs index 1c69b37f164..5e95b061399 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -20,13 +20,14 @@ use base_db::RootQueryDb; use expect_test::Expect; use hir_expand::{ AstId, InFile, MacroCallId, MacroCallKind, MacroKind, + builtin::quote::quote, db::ExpandDatabase, proc_macro::{ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind}, span_map::SpanMapRef, }; -use intern::Symbol; +use intern::{Symbol, sym}; use itertools::Itertools; -use span::{Edition, Span}; +use span::{Edition, ROOT_ERASED_FILE_AST_ID, Span, SpanAnchor, SyntaxContext}; use stdx::{format_to, format_to_acc}; use syntax::{ AstNode, AstPtr, @@ -34,7 +35,9 @@ use syntax::{ SyntaxNode, T, ast::{self, edit::IndentLevel}, }; +use syntax_bridge::token_tree_to_syntax_node; use test_fixture::WithFixture; +use tt::{TextRange, TextSize}; use crate::{ AdtId, Lookup, ModuleDefId, @@ -386,3 +389,38 @@ impl ProcMacroExpander for IdentityWhenValidProcMacroExpander { other.type_id() == TypeId::of::<Self>() } } + +#[test] +fn regression_20171() { + // This really isn't the appropriate place to put this test, but it's convenient with access to `quote!`. + let span = Span { + range: TextRange::empty(TextSize::new(0)), + anchor: SpanAnchor { + file_id: span::EditionedFileId::current_edition(span::FileId::from_raw(0)), + ast_id: ROOT_ERASED_FILE_AST_ID, + }, + ctx: SyntaxContext::root(Edition::CURRENT), + }; + let close_brace = tt::Punct { char: '}', spacing: tt::Spacing::Alone, span }; + let dotdot1 = tt::Punct { char: '.', spacing: tt::Spacing::Joint, span }; + let dotdot2 = tt::Punct { char: '.', spacing: tt::Spacing::Alone, span }; + let dollar_crate = sym::dollar_crate; + let tt = quote! { + span => { + if !((matches!( + drive_parser(&mut parser, data, false), + Err(TarParserError::CorruptField { + field: CorruptFieldContext::PaxKvLength, + error: GeneralParseError::ParseInt(ParseIntError { #dotdot1 #dotdot2 }) + }) + #close_brace ))) { + #dollar_crate::panic::panic_2021!(); + }} + }; + token_tree_to_syntax_node( + &tt, + syntax_bridge::TopEntryPoint::MacroStmts, + &mut |_| Edition::CURRENT, + Edition::CURRENT, + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs index d5ae6f8d885..6952a9da101 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/proc_macros.rs @@ -181,9 +181,9 @@ fn foo(&self) { self.0. 1; } -fn#0:Fn[4D85, 0]@45..47#ROOT2024# foo#0:Fn[4D85, 0]@48..51#ROOT2024#(#0:Fn[4D85, 0]@51..52#ROOT2024#�:Fn[4D85, 0]@52..53#ROOT2024#self#0:Fn[4D85, 0]@53..57#ROOT2024# )#0:Fn[4D85, 0]@57..58#ROOT2024# {#0:Fn[4D85, 0]@59..60#ROOT2024# - self#0:Fn[4D85, 0]@65..69#ROOT2024# .#0:Fn[4D85, 0]@69..70#ROOT2024#0#0:Fn[4D85, 0]@70..71#ROOT2024#.#0:Fn[4D85, 0]@71..72#ROOT2024#1#0:Fn[4D85, 0]@73..74#ROOT2024#;#0:Fn[4D85, 0]@74..75#ROOT2024# -}#0:Fn[4D85, 0]@76..77#ROOT2024#"#]], +fn#0:Fn[8A31, 0]@45..47#ROOT2024# foo#0:Fn[8A31, 0]@48..51#ROOT2024#(#0:Fn[8A31, 0]@51..52#ROOT2024#�:Fn[8A31, 0]@52..53#ROOT2024#self#0:Fn[8A31, 0]@53..57#ROOT2024# )#0:Fn[8A31, 0]@57..58#ROOT2024# {#0:Fn[8A31, 0]@59..60#ROOT2024# + self#0:Fn[8A31, 0]@65..69#ROOT2024# .#0:Fn[8A31, 0]@69..70#ROOT2024#0#0:Fn[8A31, 0]@70..71#ROOT2024#.#0:Fn[8A31, 0]@71..72#ROOT2024#1#0:Fn[8A31, 0]@73..74#ROOT2024#;#0:Fn[8A31, 0]@74..75#ROOT2024# +}#0:Fn[8A31, 0]@76..77#ROOT2024#"#]], ); } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs index 0837308d5b6..5030585147d 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs @@ -373,19 +373,14 @@ pub fn crate_def_map(db: &dyn DefDatabase, crate_id: Crate) -> &DefMap { crate_local_def_map(db, crate_id).def_map(db) } -#[allow(unused_lifetimes)] -mod __ { - use super::*; - #[salsa_macros::tracked] - pub(crate) struct DefMapPair<'db> { - #[tracked] - #[returns(ref)] - pub(crate) def_map: DefMap, - #[returns(ref)] - pub(crate) local: LocalDefMap, - } +#[salsa_macros::tracked] +pub(crate) struct DefMapPair<'db> { + #[tracked] + #[returns(ref)] + pub(crate) def_map: DefMap, + #[returns(ref)] + pub(crate) local: LocalDefMap, } -pub(crate) use __::DefMapPair; #[salsa_macros::tracked(returns(ref))] pub(crate) fn crate_local_def_map(db: &dyn DefDatabase, crate_id: Crate) -> DefMapPair<'_> { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs index 6f321980af4..316ad5dae69 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs @@ -1052,17 +1052,6 @@ impl<'db> Scope<'db> { } } -pub fn resolver_for_expr( - db: &dyn DefDatabase, - owner: DefWithBodyId, - expr_id: ExprId, -) -> Resolver<'_> { - let r = owner.resolver(db); - let scopes = db.expr_scopes(owner); - let scope_id = scopes.scope_for(expr_id); - resolver_for_scope_(db, scopes, scope_id, r, owner) -} - pub fn resolver_for_scope( db: &dyn DefDatabase, owner: DefWithBodyId, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs b/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs index 1958eb6c6a1..92e610b36ac 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/signatures.rs @@ -779,14 +779,10 @@ impl VariantFields { Arc::new(VariantFields { fields, store: Arc::new(store), shape }), Arc::new(source_map), ), - None => ( - Arc::new(VariantFields { - fields: Arena::default(), - store: ExpressionStore::empty_singleton(), - shape, - }), - ExpressionStoreSourceMap::empty_singleton(), - ), + None => { + let (store, source_map) = ExpressionStore::empty_singleton(); + (Arc::new(VariantFields { fields: Arena::default(), store, shape }), source_map) + } } } @@ -878,7 +874,7 @@ fn lower_fields<Field: ast::HasAttrs + ast::HasVisibility>( idx += 1; } Err(cfg) => { - col.source_map.diagnostics.push( + col.store.diagnostics.push( crate::expr_store::ExpressionStoreDiagnostics::InactiveCode { node: InFile::new(fields.file_id, SyntaxNodePtr::new(field.syntax())), cfg, @@ -891,9 +887,9 @@ fn lower_fields<Field: ast::HasAttrs + ast::HasVisibility>( if !has_fields { return None; } - let store = col.store.finish(); + let (store, source_map) = col.store.finish(); arena.shrink_to_fit(); - Some((arena, store, col.source_map)) + Some((arena, store, source_map)) } #[derive(Debug, PartialEq, Eq)] @@ -980,7 +976,7 @@ impl EnumVariants { if !matches!(variant.shape, FieldsShape::Unit) { let body = db.body(v.into()); // A variant with explicit discriminant - if body.exprs[body.body_expr] != crate::hir::Expr::Missing { + if !matches!(body[body.body_expr], crate::hir::Expr::Missing) { return false; } } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs index 800b40a9e7e..60fbc660652 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/fn_macro.rs @@ -125,8 +125,8 @@ register_builtin! { (assert, Assert) => assert_expand, (stringify, Stringify) => stringify_expand, (asm, Asm) => asm_expand, - (global_asm, GlobalAsm) => asm_expand, - (naked_asm, NakedAsm) => asm_expand, + (global_asm, GlobalAsm) => global_asm_expand, + (naked_asm, NakedAsm) => naked_asm_expand, (cfg, Cfg) => cfg_expand, (core_panic, CorePanic) => panic_expand, (std_panic, StdPanic) => panic_expand, @@ -325,6 +325,36 @@ fn asm_expand( ExpandResult::ok(expanded) } +fn global_asm_expand( + _db: &dyn ExpandDatabase, + _id: MacroCallId, + tt: &tt::TopSubtree, + span: Span, +) -> ExpandResult<tt::TopSubtree> { + let mut tt = tt.clone(); + tt.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Parenthesis; + let pound = mk_pound(span); + let expanded = quote! {span => + builtin #pound global_asm #tt + }; + ExpandResult::ok(expanded) +} + +fn naked_asm_expand( + _db: &dyn ExpandDatabase, + _id: MacroCallId, + tt: &tt::TopSubtree, + span: Span, +) -> ExpandResult<tt::TopSubtree> { + let mut tt = tt.clone(); + tt.top_subtree_delimiter_mut().kind = tt::DelimiterKind::Parenthesis; + let pound = mk_pound(span); + let expanded = quote! {span => + builtin #pound naked_asm #tt + }; + ExpandResult::ok(expanded) +} + fn cfg_expand( db: &dyn ExpandDatabase, id: MacroCallId, diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/quote.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/quote.rs index d5874f829ba..70c38d4d7c7 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/quote.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/quote.rs @@ -129,7 +129,7 @@ macro_rules! quote { } } } -pub(super) use quote; +pub use quote; pub trait ToTokenTree { fn to_tokens(self, span: Span, builder: &mut TopSubtreeBuilder); diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs index 679f61112ad..217d991d110 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs @@ -179,10 +179,9 @@ impl Name { self.symbol.as_str() } - #[inline] pub fn display<'a>( &'a self, - db: &dyn salsa::Database, + db: &dyn crate::db::ExpandDatabase, edition: Edition, ) -> impl fmt::Display + 'a { _ = db; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs index 24530a5f67c..14b9cd203f6 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs @@ -281,7 +281,7 @@ pub(crate) fn const_eval_discriminant_variant( let def = variant_id.into(); let body = db.body(def); let loc = variant_id.lookup(db); - if body.exprs[body.body_expr] == Expr::Missing { + if matches!(body[body.body_expr], Expr::Missing) { let prev_idx = loc.index.checked_sub(1); let value = match prev_idx { Some(prev_idx) => { @@ -334,7 +334,7 @@ pub(crate) fn eval_to_const( // Type checking clousres need an isolated body (See the above FIXME). Bail out early to prevent panic. return unknown_const(infer[expr].clone()); } - if let Expr::Path(p) = &ctx.body.exprs[expr] { + if let Expr::Path(p) = &ctx.body[expr] { let resolver = &ctx.resolver; if let Some(c) = path_to_const(db, resolver, p, mode, || ctx.generics(), debruijn, infer[expr].clone()) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs index 5d3be07f3db..b3d46845c44 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs @@ -273,8 +273,9 @@ pub trait HirDatabase: DefDatabase + std::fmt::Debug { #[salsa::invoke(crate::variance::variances_of)] #[salsa::cycle( - cycle_fn = crate::variance::variances_of_cycle_fn, - cycle_initial = crate::variance::variances_of_cycle_initial, + // cycle_fn = crate::variance::variances_of_cycle_fn, + // cycle_initial = crate::variance::variances_of_cycle_initial, + cycle_result = crate::variance::variances_of_cycle_initial, )] fn variances_of(&self, def: GenericDefId) -> Option<Arc<[crate::variance::Variance]>>; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs index 9c0f8f40080..40fe3073cf2 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs @@ -226,11 +226,10 @@ impl<'a> DeclValidator<'a> { let body = self.db.body(func.into()); let edition = self.edition(func); let mut pats_replacements = body - .pats - .iter() + .pats() .filter_map(|(pat_id, pat)| match pat { Pat::Bind { id, .. } => { - let bind_name = &body.bindings[*id].name; + let bind_name = &body[*id].name; let mut suggested_text = to_lower_snake_case(bind_name.as_str())?; if is_raw_identifier(&suggested_text, edition) { suggested_text.insert_str(0, "r#"); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs index 5d56957be6d..5ae6bf6dffd 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/expr.rs @@ -101,7 +101,7 @@ impl ExprValidator { self.check_for_trailing_return(body.body_expr, &body); } - for (id, expr) in body.exprs.iter() { + for (id, expr) in body.exprs() { if let Some((variant, missed_fields, true)) = record_literal_missing_fields(db, &self.infer, id, expr) { @@ -132,7 +132,7 @@ impl ExprValidator { } } - for (id, pat) in body.pats.iter() { + for (id, pat) in body.pats() { if let Some((variant, missed_fields, true)) = record_pattern_missing_fields(db, &self.infer, id, pat) { @@ -389,7 +389,7 @@ impl ExprValidator { if !self.validate_lints { return; } - match &body.exprs[body_expr] { + match &body[body_expr] { Expr::Block { statements, tail, .. } => { let last_stmt = tail.or_else(|| match statements.last()? { Statement::Expr { expr, .. } => Some(*expr), @@ -428,7 +428,7 @@ impl ExprValidator { if else_branch.is_none() { return; } - if let Expr::Block { statements, tail, .. } = &self.body.exprs[*then_branch] { + if let Expr::Block { statements, tail, .. } = &self.body[*then_branch] { let last_then_expr = tail.or_else(|| match statements.last()? { Statement::Expr { expr, .. } => Some(*expr), _ => None, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs index c3ab5aff3db..ca132fbdc45 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/match_check.rs @@ -150,7 +150,7 @@ impl<'a> PatCtxt<'a> { hir_def::hir::Pat::Bind { id, subpat, .. } => { let bm = self.infer.binding_modes[pat]; ty = &self.infer[id]; - let name = &self.body.bindings[id].name; + let name = &self.body[id].name; match (bm, ty.kind(Interner)) { (BindingMode::Ref(_), TyKind::Ref(.., rty)) => ty = rty, (BindingMode::Ref(_), _) => { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs index 20cf3c78115..f6ad3c7aae2 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs @@ -7,7 +7,7 @@ use either::Either; use hir_def::{ AdtId, DefWithBodyId, FieldId, FunctionId, VariantId, expr_store::{Body, path::Path}, - hir::{AsmOperand, Expr, ExprId, ExprOrPatId, Pat, PatId, Statement, UnaryOp}, + hir::{AsmOperand, Expr, ExprId, ExprOrPatId, InlineAsmKind, Pat, PatId, Statement, UnaryOp}, resolver::{HasResolver, ResolveValueResult, Resolver, ValueNs}, signatures::StaticFlags, type_ref::Rawness, @@ -217,7 +217,7 @@ impl<'db> UnsafeVisitor<'db> { } fn walk_pat(&mut self, current: PatId) { - let pat = &self.body.pats[current]; + let pat = &self.body[current]; if self.inside_union_destructure { match pat { @@ -264,7 +264,7 @@ impl<'db> UnsafeVisitor<'db> { } fn walk_expr(&mut self, current: ExprId) { - let expr = &self.body.exprs[current]; + let expr = &self.body[current]; let inside_assignment = mem::replace(&mut self.inside_assignment, false); match expr { &Expr::Call { callee, .. } => { @@ -284,7 +284,7 @@ impl<'db> UnsafeVisitor<'db> { self.resolver.reset_to_guard(guard); } Expr::Ref { expr, rawness: Rawness::RawPtr, mutability: _ } => { - match self.body.exprs[*expr] { + match self.body[*expr] { // Do not report unsafe for `addr_of[_mut]!(EXTERN_OR_MUT_STATIC)`, // see https://github.com/rust-lang/rust/pull/125834. Expr::Path(_) => return, @@ -315,7 +315,12 @@ impl<'db> UnsafeVisitor<'db> { self.inside_assignment = old_inside_assignment; } Expr::InlineAsm(asm) => { - self.on_unsafe_op(current.into(), UnsafetyReason::InlineAsm); + if asm.kind == InlineAsmKind::Asm { + // `naked_asm!()` requires `unsafe` on the attribute (`#[unsafe(naked)]`), + // and `global_asm!()` doesn't require it at all. + self.on_unsafe_op(current.into(), UnsafetyReason::InlineAsm); + } + asm.operands.iter().for_each(|(_, op)| match op { AsmOperand::In { expr, .. } | AsmOperand::Out { expr: Some(expr), .. } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs index 810fe76f231..b3760e3a382 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs @@ -795,6 +795,14 @@ fn render_const_scalar( let Some(bytes) = memory_map.get(addr, size_one * count) else { return f.write_str("<ref-data-not-available>"); }; + let expected_len = count * size_one; + if bytes.len() < expected_len { + never!( + "Memory map size is too small. Expected {expected_len}, got {}", + bytes.len(), + ); + return f.write_str("<layout-error>"); + } f.write_str("&[")?; let mut first = true; for i in 0..count { @@ -2328,6 +2336,7 @@ impl HirDisplayWithExpressionStore for TypeBound { store[*path].hir_fmt(f, store) } TypeBound::Use(args) => { + write!(f, "use<")?; let edition = f.edition(); let last = args.len().saturating_sub(1); for (idx, arg) in args.iter().enumerate() { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs index d2eaf212365..3f7eba9dd18 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs @@ -273,7 +273,7 @@ impl InferenceContext<'_> { fn pat_bound_mutability(&self, pat: PatId) -> Mutability { let mut r = Mutability::Not; self.body.walk_bindings_in_pat(pat, |b| { - if self.body.bindings[b].mode == BindingAnnotation::RefMut { + if self.body[b].mode == BindingAnnotation::RefMut { r = Mutability::Mut; } }); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs index 99d3b5c7a84..18288b718f7 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs @@ -459,7 +459,7 @@ impl InferenceContext<'_> { expected: &Ty, decl: Option<DeclContext>, ) -> Ty { - let Binding { mode, .. } = self.body.bindings[binding]; + let Binding { mode, .. } = self.body[binding]; let mode = if mode == BindingAnnotation::Unannotated { default_bm } else { @@ -639,7 +639,7 @@ impl InferenceContext<'_> { pub(super) fn contains_explicit_ref_binding(body: &Body, pat_id: PatId) -> bool { let mut res = false; body.walk_pats(pat_id, &mut |pat| { - res |= matches!(body[pat], Pat::Bind { id, .. } if body.bindings[id].mode == BindingAnnotation::Ref); + res |= matches!(body[pat], Pat::Bind { id, .. } if body[id].mode == BindingAnnotation::Ref); }); res } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/target.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/target.rs index 88c33ecccad..82d0ed4f194 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/target.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/target.rs @@ -2,7 +2,7 @@ use base_db::Crate; use hir_def::layout::TargetDataLayout; -use rustc_abi::{AlignFromBytesError, TargetDataLayoutErrors, AddressSpace}; +use rustc_abi::{AddressSpace, AlignFromBytesError, TargetDataLayoutErrors}; use triomphe::Arc; use crate::db::HirDatabase; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs index cc7d74f4fb0..b3bc226ec93 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs @@ -119,8 +119,7 @@ fn eval_expr( .unwrap(); let hir_body = db.body(function_id.into()); let b = hir_body - .bindings - .iter() + .bindings() .find(|x| x.1.name.display_no_db(file_id.edition(&db)).to_smolstr() == "goal") .unwrap() .0; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs index 06686b6a164..5c06234fa07 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs @@ -1018,8 +1018,12 @@ fn check_generic_args_len( } let lifetime_args_len = def_generics.len_lifetimes_self(); - if provided_lifetimes_count == 0 && lifetime_args_len > 0 && !lowering_assoc_type_generics { - // In generic associated types, we never allow inferring the lifetimes. + if provided_lifetimes_count == 0 + && lifetime_args_len > 0 + && (!lowering_assoc_type_generics || infer_args) + { + // In generic associated types, we never allow inferring the lifetimes, but only in type context, that is + // when `infer_args == false`. In expression/pattern context we always allow inferring them, even for GATs. match lifetime_elision { &LifetimeElisionKind::AnonymousCreateParameter { report_in_path } => { ctx.report_elided_lifetimes_in_path(def, lifetime_args_len as u32, report_in_path); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs index bf80ed7967a..482b420279c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs @@ -1212,10 +1212,9 @@ impl MirSpan { match *self { MirSpan::ExprId(expr) => matches!(body[expr], Expr::Ref { .. }), // FIXME: Figure out if this is correct wrt. match ergonomics. - MirSpan::BindingId(binding) => matches!( - body.bindings[binding].mode, - BindingAnnotation::Ref | BindingAnnotation::RefMut - ), + MirSpan::BindingId(binding) => { + matches!(body[binding].mode, BindingAnnotation::Ref | BindingAnnotation::RefMut) + } MirSpan::PatId(_) | MirSpan::SelfParam | MirSpan::Unknown => false, } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs index 55fada14363..9a97bd6dbe2 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs @@ -31,8 +31,8 @@ use syntax::{SyntaxNodePtr, TextRange}; use triomphe::Arc; use crate::{ - CallableDefId, ClosureId, ComplexMemoryMap, Const, ConstData, ConstScalar, FnDefId, Interner, - MemoryMap, Substitution, ToChalk, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, + AliasTy, CallableDefId, ClosureId, ComplexMemoryMap, Const, ConstData, ConstScalar, FnDefId, + Interner, MemoryMap, Substitution, ToChalk, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, consteval::{ConstEvalError, intern_const_scalar, try_const_usize}, db::{HirDatabase, InternedClosure}, display::{ClosureStyle, DisplayTarget, HirDisplay}, @@ -2195,7 +2195,7 @@ impl Evaluator<'_> { } } } - chalk_ir::TyKind::Array(inner, len) => { + TyKind::Array(inner, len) => { let len = match try_const_usize(this.db, len) { Some(it) => it as usize, None => not_supported!("non evaluatable array len in patching addresses"), @@ -2213,7 +2213,7 @@ impl Evaluator<'_> { )?; } } - chalk_ir::TyKind::Tuple(_, subst) => { + TyKind::Tuple(_, subst) => { let layout = this.layout(ty)?; for (id, ty) in subst.iter(Interner).enumerate() { let ty = ty.assert_ty_ref(Interner); // Tuple only has type argument @@ -2229,7 +2229,7 @@ impl Evaluator<'_> { )?; } } - chalk_ir::TyKind::Adt(adt, subst) => match adt.0 { + TyKind::Adt(adt, subst) => match adt.0 { AdtId::StructId(s) => { let data = s.fields(this.db); let layout = this.layout(ty)?; @@ -2280,6 +2280,10 @@ impl Evaluator<'_> { } AdtId::UnionId(_) => (), }, + TyKind::Alias(AliasTy::Projection(proj)) => { + let ty = this.db.normalize_projection(proj.clone(), this.trait_env.clone()); + rec(this, bytes, &ty, locals, mm, stack_depth_limit - 1)?; + } _ => (), } Ok(()) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs index 845d6b8eae1..07d81472729 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs @@ -321,7 +321,7 @@ impl<'ctx> MirLowerCtx<'ctx> { current: BasicBlockId, ) -> Result<Option<(Operand, BasicBlockId)>> { if !self.has_adjustments(expr_id) { - if let Expr::Literal(l) = &self.body.exprs[expr_id] { + if let Expr::Literal(l) = &self.body[expr_id] { let ty = self.expr_ty_without_adjust(expr_id); return Ok(Some((self.lower_literal_to_operand(ty, l)?, current))); } @@ -411,7 +411,7 @@ impl<'ctx> MirLowerCtx<'ctx> { place: Place, mut current: BasicBlockId, ) -> Result<Option<BasicBlockId>> { - match &self.body.exprs[expr_id] { + match &self.body[expr_id] { Expr::OffsetOf(_) => { not_supported!("builtin#offset_of") } @@ -1374,7 +1374,7 @@ impl<'ctx> MirLowerCtx<'ctx> { } fn lower_literal_or_const_to_operand(&mut self, ty: Ty, loc: &ExprId) -> Result<Operand> { - match &self.body.exprs[*loc] { + match &self.body[*loc] { Expr::Literal(l) => self.lower_literal_to_operand(ty, l), Expr::Path(c) => { let owner = self.owner; @@ -1850,7 +1850,7 @@ impl<'ctx> MirLowerCtx<'ctx> { self.drop_scopes.last_mut().unwrap().locals.push(local_id); if let Pat::Bind { id, subpat: None } = self.body[it] { if matches!( - self.body.bindings[id].mode, + self.body[id].mode, BindingAnnotation::Unannotated | BindingAnnotation::Mutable ) { self.result.binding_locals.insert(id, local_id); @@ -1859,7 +1859,7 @@ impl<'ctx> MirLowerCtx<'ctx> { local_id })); // and then rest of bindings - for (id, _) in self.body.bindings.iter() { + for (id, _) in self.body.bindings() { if !pick_binding(id) { continue; } @@ -2126,7 +2126,7 @@ pub fn mir_body_for_closure_query( .result .binding_locals .into_iter() - .filter(|it| ctx.body.binding_owners.get(&it.0).copied() == Some(expr)) + .filter(|it| ctx.body.binding_owner(it.0) == Some(expr)) .collect(); if let Some(err) = err { return Err(MirLowerError::UnresolvedUpvar(err)); @@ -2191,7 +2191,7 @@ pub fn lower_to_mir( // 0 is return local ctx.result.locals.alloc(Local { ty: ctx.expr_ty_after_adjustments(root_expr) }); let binding_picker = |b: BindingId| { - let owner = ctx.body.binding_owners.get(&b).copied(); + let owner = ctx.body.binding_owner(b); if root_expr == body.body_expr { owner.is_none() } else { owner == Some(root_expr) } }; // 1 to param_len is for params diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs index e7bffead931..e074c2d558e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs @@ -133,7 +133,7 @@ impl MirLowerCtx<'_> { } this.lower_expr_to_some_place_without_adjust(expr_id, current) }; - match &self.body.exprs[expr_id] { + match &self.body[expr_id] { Expr::Path(p) => { let resolver_guard = self.resolver.update_to_inner_scope(self.db, self.owner, expr_id); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs index 61c0685c48a..3325226b1d3 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs @@ -130,7 +130,7 @@ impl MirLowerCtx<'_> { .collect::<Vec<_>>() .into(), ); - Ok(match &self.body.pats[pattern] { + Ok(match &self.body[pattern] { Pat::Missing => return Err(MirLowerError::IncompletePattern), Pat::Wild => (current, current_else), Pat::Tuple { args, ellipsis } => { @@ -436,7 +436,7 @@ impl MirLowerCtx<'_> { (next, Some(else_target)) } }, - Pat::Lit(l) => match &self.body.exprs[*l] { + Pat::Lit(l) => match &self.body[*l] { Expr::Literal(l) => { if mode == MatchingMode::Check { let c = self.lower_literal_to_operand(self.infer[pattern].clone(), l)?; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs index 78a69cf4509..aad54f88438 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs @@ -219,7 +219,7 @@ impl<'a> MirPrettyCtx<'a> { fn local_name(&self, local: LocalId) -> LocalName { match self.local_to_binding.get(local) { - Some(b) => LocalName::Binding(self.hir_body.bindings[*b].name.clone(), local), + Some(b) => LocalName::Binding(self.hir_body[*b].name.clone(), local), None => LocalName::Unknown(local), } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs index 79754bc8a09..9605a0b4124 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs @@ -168,7 +168,7 @@ fn check_impl( let inference_result = db.infer(def); for (pat, mut ty) in inference_result.type_of_pat.iter() { - if let Pat::Bind { id, .. } = body.pats[pat] { + if let Pat::Bind { id, .. } = body[pat] { ty = &inference_result.type_of_binding[id]; } let node = match pat_node(&body_source_map, pat, &db) { @@ -316,7 +316,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String { } for (pat, mut ty) in inference_result.type_of_pat.iter() { - if let Pat::Bind { id, .. } = body.pats[pat] { + if let Pat::Bind { id, .. } = body[pat] { ty = &inference_result.type_of_binding[id]; } let node = match body_source_map.pat_syntax(pat) { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs b/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs index 87d9df611bd..08a215fecf6 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/variance.rs @@ -54,14 +54,14 @@ pub(crate) fn variances_of(db: &dyn HirDatabase, def: GenericDefId) -> Option<Ar variances.is_empty().not().then(|| Arc::from_iter(variances)) } -pub(crate) fn variances_of_cycle_fn( - _db: &dyn HirDatabase, - _result: &Option<Arc<[Variance]>>, - _count: u32, - _def: GenericDefId, -) -> salsa::CycleRecoveryAction<Option<Arc<[Variance]>>> { - salsa::CycleRecoveryAction::Iterate -} +// pub(crate) fn variances_of_cycle_fn( +// _db: &dyn HirDatabase, +// _result: &Option<Arc<[Variance]>>, +// _count: u32, +// _def: GenericDefId, +// ) -> salsa::CycleRecoveryAction<Option<Arc<[Variance]>>> { +// salsa::CycleRecoveryAction::Iterate +// } pub(crate) fn variances_of_cycle_initial( db: &dyn HirDatabase, @@ -965,7 +965,7 @@ struct S3<T>(S<T, T>); struct FixedPoint<T, U, V>(&'static FixedPoint<(), T, U>, V); "#, expect![[r#" - FixedPoint[T: covariant, U: covariant, V: covariant] + FixedPoint[T: bivariant, U: bivariant, V: bivariant] "#]], ); } diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index 5c6f622e6c3..1b2b76999f7 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -2036,7 +2036,7 @@ impl DefWithBody { ) } let mol = &borrowck_result.mutability_of_locals; - for (binding_id, binding_data) in body.bindings.iter() { + for (binding_id, binding_data) in body.bindings() { if binding_data.problems.is_some() { // We should report specific diagnostics for these problems, not `need-mut` and `unused-mut`. continue; @@ -3222,7 +3222,8 @@ impl Macro { } } - pub fn is_asm_or_global_asm(&self, db: &dyn HirDatabase) -> bool { + /// Is this `asm!()`, or a variant of it (e.g. `global_asm!()`)? + pub fn is_asm_like(&self, db: &dyn HirDatabase) -> bool { match self.id { MacroId::Macro2Id(it) => { matches!(it.lookup(db).expander, MacroExpander::BuiltIn(m) if m.is_asm()) diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index 247bb693983..adba59236a4 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -677,8 +677,7 @@ impl<'db> SemanticsImpl<'db> { pub fn rename_conflicts(&self, to_be_renamed: &Local, new_name: &Name) -> Vec<Local> { let body = self.db.body(to_be_renamed.parent); let resolver = to_be_renamed.parent.resolver(self.db); - let starting_expr = - body.binding_owners.get(&to_be_renamed.binding_id).copied().unwrap_or(body.body_expr); + let starting_expr = body.binding_owner(to_be_renamed.binding_id).unwrap_or(body.body_expr); let mut visitor = RenameConflictsVisitor { body: &body, conflicts: FxHashSet::default(), @@ -1776,7 +1775,7 @@ impl<'db> SemanticsImpl<'db> { pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool { let Some(mac) = self.resolve_macro_call(macro_call) else { return false }; - if mac.is_asm_or_global_asm(self.db) { + if mac.is_asm_like(self.db) { return true; } diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs index 0662bfddcf8..ecc6e5f3d03 100644 --- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs +++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs @@ -242,11 +242,7 @@ impl<'db> SourceAnalyzer<'db> { fn binding_id_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingId> { let pat_id = self.pat_id(&pat.clone().into())?; - if let Pat::Bind { id, .. } = self.store()?.pats[pat_id.as_pat()?] { - Some(id) - } else { - None - } + if let Pat::Bind { id, .. } = self.store()?[pat_id.as_pat()?] { Some(id) } else { None } } pub(crate) fn expr_adjustments(&self, expr: &ast::Expr) -> Option<&[Adjustment]> { @@ -995,7 +991,7 @@ impl<'db> SourceAnalyzer<'db> { let parent_hir_path = path .parent_path() .and_then(|p| collector.lower_path(p, &mut ExprCollector::impl_trait_error_allocator)); - let store = collector.store.finish(); + let (store, _) = collector.store.finish(); // Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are // trying to resolve foo::bar. @@ -1204,7 +1200,7 @@ impl<'db> SourceAnalyzer<'db> { let mut collector = ExprCollector::new(db, self.resolver.module(), self.file_id); let hir_path = collector.lower_path(path.clone(), &mut ExprCollector::impl_trait_error_allocator)?; - let store = collector.store.finish(); + let (store, _) = collector.store.finish(); Some(resolve_hir_path_( db, &self.resolver, @@ -1439,9 +1435,11 @@ fn scope_for( ) -> Option<ScopeId> { node.ancestors_with_macros(db) .take_while(|it| { - !ast::Item::can_cast(it.kind()) - || ast::MacroCall::can_cast(it.kind()) - || ast::Use::can_cast(it.kind()) + let kind = it.kind(); + !ast::Item::can_cast(kind) + || ast::MacroCall::can_cast(kind) + || ast::Use::can_cast(kind) + || ast::AsmExpr::can_cast(kind) }) .filter_map(|it| it.map(ast::Expr::cast).transpose()) .filter_map(|it| source_map.node_expr(it.as_ref())?.as_expr()) diff --git a/src/tools/rust-analyzer/crates/hir/src/symbols.rs b/src/tools/rust-analyzer/crates/hir/src/symbols.rs index 756650891d4..dca10193e29 100644 --- a/src/tools/rust-analyzer/crates/hir/src/symbols.rs +++ b/src/tools/rust-analyzer/crates/hir/src/symbols.rs @@ -125,6 +125,13 @@ impl<'a> SymbolCollector<'a> { } ModuleDefId::AdtId(AdtId::EnumId(id)) => { this.push_decl(id, name, false, None); + let enum_name = this.db.enum_signature(id).name.as_str().to_smolstr(); + this.with_container_name(Some(enum_name), |this| { + let variants = id.enum_variants(this.db); + for (variant_id, variant_name, _) in &variants.variants { + this.push_decl(*variant_id, variant_name, true, None); + } + }); } ModuleDefId::AdtId(AdtId::UnionId(id)) => { this.push_decl(id, name, false, None); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_match_to_let_else.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_match_to_let_else.rs index efcbcef00e9..9126e869b9a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_match_to_let_else.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_match_to_let_else.rs @@ -1,8 +1,8 @@ use ide_db::defs::{Definition, NameRefClass}; use syntax::{ AstNode, SyntaxNode, - ast::{self, HasName, Name}, - ted, + ast::{self, HasName, Name, syntax_factory::SyntaxFactory}, + syntax_editor::SyntaxEditor, }; use crate::{ @@ -121,34 +121,36 @@ fn find_extracted_variable(ctx: &AssistContext<'_>, arm: &ast::MatchArm) -> Opti // Rename `extracted` with `binding` in `pat`. fn rename_variable(pat: &ast::Pat, extracted: &[Name], binding: ast::Pat) -> SyntaxNode { - let syntax = pat.syntax().clone_for_update(); + let syntax = pat.syntax().clone_subtree(); + let mut editor = SyntaxEditor::new(syntax.clone()); + let make = SyntaxFactory::with_mappings(); let extracted = extracted .iter() - .map(|e| syntax.covering_element(e.syntax().text_range())) + .map(|e| e.syntax().text_range() - pat.syntax().text_range().start()) + .map(|r| syntax.covering_element(r)) .collect::<Vec<_>>(); for extracted_syntax in extracted { // If `extracted` variable is a record field, we should rename it to `binding`, // otherwise we just need to replace `extracted` with `binding`. - if let Some(record_pat_field) = extracted_syntax.ancestors().find_map(ast::RecordPatField::cast) { if let Some(name_ref) = record_pat_field.field_name() { - ted::replace( + editor.replace( record_pat_field.syntax(), - ast::make::record_pat_field( - ast::make::name_ref(&name_ref.text()), - binding.clone(), + make.record_pat_field( + make.name_ref(&name_ref.text()), + binding.clone_for_update(), ) - .syntax() - .clone_for_update(), + .syntax(), ); } } else { - ted::replace(extracted_syntax, binding.clone().syntax().clone_for_update()); + editor.replace(extracted_syntax, binding.syntax().clone_for_update()); } } - syntax + editor.add_mappings(make.finish_with_mappings()); + editor.finish().new_root().clone() } #[cfg(test)] diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs index 32c4ae2e869..8d27574eb2c 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs @@ -4,7 +4,8 @@ use itertools::Itertools; use syntax::{ SyntaxKind, ast::{self, AstNode, HasAttrs, HasGenericParams, HasVisibility}, - match_ast, ted, + match_ast, + syntax_editor::{Position, SyntaxEditor}, }; use crate::{AssistContext, AssistId, Assists, assist_context::SourceChangeBuilder}; @@ -97,11 +98,14 @@ fn edit_struct_def( // Note that we don't need to consider macro files in this function because this is // currently not triggered for struct definitions inside macro calls. let tuple_fields = record_fields.fields().filter_map(|f| { - let field = ast::make::tuple_field(f.visibility(), f.ty()?).clone_for_update(); - ted::insert_all( - ted::Position::first_child_of(field.syntax()), + let field = ast::make::tuple_field(f.visibility(), f.ty()?); + let mut editor = SyntaxEditor::new(field.syntax().clone()); + editor.insert_all( + Position::first_child_of(field.syntax()), f.attrs().map(|attr| attr.syntax().clone_subtree().clone_for_update().into()).collect(), ); + let field_syntax = editor.finish().new_root().clone(); + let field = ast::TupleField::cast(field_syntax)?; Some(field) }); let tuple_fields = ast::make::tuple_field_list(tuple_fields); @@ -1086,8 +1090,7 @@ pub struct $0Foo { } "#, r#" -pub struct Foo(#[my_custom_attr] -u32); +pub struct Foo(#[my_custom_attr]u32); "#, ); } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs index 79a78ab3698..47233fb399d 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_default_from_new.rs @@ -2,7 +2,7 @@ use ide_db::famous_defs::FamousDefs; use stdx::format_to; use syntax::{ AstNode, - ast::{self, HasGenericParams, HasName, Impl, make}, + ast::{self, HasGenericParams, HasName, HasTypeBounds, Impl, make}, }; use crate::{ @@ -88,20 +88,19 @@ fn generate_trait_impl_text_from_impl( let generic_params = impl_.generic_param_list().map(|generic_params| { let lifetime_params = generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam); - let ty_or_const_params = generic_params.type_or_const_params().map(|param| { + let ty_or_const_params = generic_params.type_or_const_params().filter_map(|param| { // remove defaults since they can't be specified in impls - match param { + let param = match param { ast::TypeOrConstParam::Type(param) => { - let param = param.clone_for_update(); - param.remove_default(); + let param = make::type_param(param.name()?, param.type_bound_list()); ast::GenericParam::TypeParam(param) } ast::TypeOrConstParam::Const(param) => { - let param = param.clone_for_update(); - param.remove_default(); + let param = make::const_param(param.name()?, param.ty()?); ast::GenericParam::ConstParam(param) } - } + }; + Some(param) }); make::generic_param_list(itertools::chain(lifetime_params, ty_or_const_params)) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs index c7e5e41aac4..20ee9253d37 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs @@ -294,7 +294,7 @@ fn generate_setter_from_info(info: &AssistInfo, record_field_info: &RecordFieldI let self_expr = make::ext::expr_self(); let lhs = make::expr_field(self_expr, field_name); let rhs = make::expr_path(make::ext::ident_path(field_name)); - let assign_stmt = make::expr_stmt(make::expr_assignment(lhs, rhs)); + let assign_stmt = make::expr_stmt(make::expr_assignment(lhs, rhs).into()); let body = make::block_expr([assign_stmt.into()], None); // Make the setter fn diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs index 2862e6d5afb..14601ca0207 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs @@ -1,14 +1,14 @@ use syntax::{ ast::{self, AstNode, HasName, edit_in_place::Indent, make}, - ted, + syntax_editor::{Position, SyntaxEditor}, }; use crate::{AssistContext, AssistId, Assists, utils}; -fn insert_impl(impl_: ast::Impl, nominal: &ast::Adt) { +fn insert_impl(editor: &mut SyntaxEditor, impl_: &ast::Impl, nominal: &ast::Adt) { let indent = nominal.indent_level(); - ted::insert_all_raw( - ted::Position::after(nominal.syntax()), + editor.insert_all( + Position::after(nominal.syntax()), vec![ // Add a blank line after the ADT, and indentation for the impl to match the ADT make::tokens::whitespace(&format!("\n\n{indent}")).into(), @@ -51,14 +51,17 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio // Generate the impl let impl_ = utils::generate_impl(&nominal); + let mut editor = edit.make_editor(nominal.syntax()); // Add a tabstop after the left curly brace if let Some(cap) = ctx.config.snippet_cap { if let Some(l_curly) = impl_.assoc_item_list().and_then(|it| it.l_curly_token()) { - edit.add_tabstop_after_token(cap, l_curly); + let tabstop = edit.make_tabstop_after(cap); + editor.add_annotation(l_curly, tabstop); } } - insert_impl(impl_, &edit.make_mut(nominal)); + insert_impl(&mut editor, &impl_, &nominal); + edit.add_file_edits(ctx.vfs_file_id(), editor); }, ) } @@ -97,18 +100,22 @@ pub(crate) fn generate_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> // Generate the impl let impl_ = utils::generate_trait_impl_intransitive(&nominal, make::ty_placeholder()); + let mut editor = edit.make_editor(nominal.syntax()); // Make the trait type a placeholder snippet if let Some(cap) = ctx.config.snippet_cap { if let Some(trait_) = impl_.trait_() { - edit.add_placeholder_snippet(cap, trait_); + let placeholder = edit.make_placeholder_snippet(cap); + editor.add_annotation(trait_.syntax(), placeholder); } if let Some(l_curly) = impl_.assoc_item_list().and_then(|it| it.l_curly_token()) { - edit.add_tabstop_after_token(cap, l_curly); + let tabstop = edit.make_tabstop_after(cap); + editor.add_annotation(l_curly, tabstop); } } - insert_impl(impl_, &edit.make_mut(nominal)); + insert_impl(&mut editor, &impl_, &nominal); + edit.add_file_edits(ctx.vfs_file_id(), editor); }, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs index bab2ccf3f33..4ddab2cfad0 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs @@ -1,6 +1,6 @@ -use ide_db::famous_defs::FamousDefs; +use ide_db::{famous_defs::FamousDefs, traits::resolve_target_trait}; use syntax::{ - AstNode, + AstNode, T, ast::{self, edit_in_place::Indent, make}, ted, }; @@ -32,7 +32,7 @@ use crate::{AssistContext, AssistId, Assists}; // // $0impl<T> core::ops::IndexMut<Axis> for [T; 3] { // fn index_mut(&mut self, index: Axis) -> &mut Self::Output { -// &self[index as usize] +// &mut self[index as usize] // } // } // @@ -48,36 +48,34 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_> let impl_def = ctx.find_node_at_offset::<ast::Impl>()?.clone_for_update(); let indent = impl_def.indent_level(); - let trait_ = impl_def.trait_()?; - if let ast::Type::PathType(trait_path) = trait_ { - let trait_type = ctx.sema.resolve_trait(&trait_path.path()?)?; - let scope = ctx.sema.scope(trait_path.syntax())?; - if trait_type != FamousDefs(&ctx.sema, scope.krate()).core_convert_Index()? { - return None; - } - } + let ast::Type::PathType(path) = impl_def.trait_()? else { + return None; + }; + let trait_name = path.path()?.segment()?.name_ref()?; + + let scope = ctx.sema.scope(impl_def.trait_()?.syntax())?; + let famous = FamousDefs(&ctx.sema, scope.krate()); + + let trait_ = resolve_target_trait(&ctx.sema, &impl_def)?; + let trait_new = get_trait_mut(&trait_, famous)?; // Index -> IndexMut - let index_trait = impl_def - .syntax() - .descendants() - .filter_map(ast::NameRef::cast) - .find(|it| it.text() == "Index")?; - ted::replace( - index_trait.syntax(), - make::path_segment(make::name_ref("IndexMut")).clone_for_update().syntax(), - ); + ted::replace(trait_name.syntax(), make::name_ref(trait_new).clone_for_update().syntax()); // index -> index_mut - let trait_method_name = impl_def + let (trait_method_name, new_trait_method_name) = impl_def .syntax() .descendants() .filter_map(ast::Name::cast) - .find(|it| it.text() == "index")?; - ted::replace(trait_method_name.syntax(), make::name("index_mut").clone_for_update().syntax()); + .find_map(process_method_name)?; + ted::replace( + trait_method_name.syntax(), + make::name(new_trait_method_name).clone_for_update().syntax(), + ); - let type_alias = impl_def.syntax().descendants().find_map(ast::TypeAlias::cast)?; - ted::remove(type_alias.syntax()); + if let Some(type_alias) = impl_def.syntax().descendants().find_map(ast::TypeAlias::cast) { + ted::remove(type_alias.syntax()); + } // &self -> &mut self let mut_self_param = make::mut_self_param(); @@ -87,15 +85,14 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_> // &Self::Output -> &mut Self::Output let ret_type = impl_def.syntax().descendants().find_map(ast::RetType::cast)?; - ted::replace( - ret_type.syntax(), - make::ret_type(make::ty("&mut Self::Output")).clone_for_update().syntax(), - ); + let new_ret_type = process_ret_type(&ret_type)?; + ted::replace(ret_type.syntax(), make::ret_type(new_ret_type).clone_for_update().syntax()); let fn_ = impl_def.assoc_item_list()?.assoc_items().find_map(|it| match it { ast::AssocItem::Fn(f) => Some(f), _ => None, })?; + let _ = process_ref_mut(&fn_); let assoc_list = make::assoc_item_list().clone_for_update(); ted::replace(impl_def.assoc_item_list()?.syntax(), assoc_list.syntax()); @@ -104,7 +101,7 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_> let target = impl_def.syntax().text_range(); acc.add( AssistId::generate("generate_mut_trait_impl"), - "Generate `IndexMut` impl from this `Index` trait", + format!("Generate `{trait_new}` impl from this `{trait_name}` trait"), target, |edit| { edit.insert(target.start(), format!("$0{impl_def}\n\n{indent}")); @@ -112,6 +109,52 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_> ) } +fn process_ref_mut(fn_: &ast::Fn) -> Option<()> { + let expr = fn_.body()?.tail_expr()?; + match &expr { + ast::Expr::RefExpr(ref_expr) if ref_expr.mut_token().is_none() => { + ted::insert_all_raw( + ted::Position::after(ref_expr.amp_token()?), + vec![make::token(T![mut]).into(), make::tokens::whitespace(" ").into()], + ); + } + _ => {} + } + None +} + +fn get_trait_mut(apply_trait: &hir::Trait, famous: FamousDefs<'_, '_>) -> Option<&'static str> { + let trait_ = Some(apply_trait); + if trait_ == famous.core_convert_Index().as_ref() { + return Some("IndexMut"); + } + if trait_ == famous.core_convert_AsRef().as_ref() { + return Some("AsMut"); + } + if trait_ == famous.core_borrow_Borrow().as_ref() { + return Some("BorrowMut"); + } + None +} + +fn process_method_name(name: ast::Name) -> Option<(ast::Name, &'static str)> { + let new_name = match &*name.text() { + "index" => "index_mut", + "as_ref" => "as_mut", + "borrow" => "borrow_mut", + _ => return None, + }; + Some((name, new_name)) +} + +fn process_ret_type(ref_ty: &ast::RetType) -> Option<ast::Type> { + let ty = ref_ty.ty()?; + let ast::Type::RefType(ref_type) = ty else { + return None; + }; + Some(make::ty_ref(ref_type.ty()?, true)) +} + #[cfg(test)] mod tests { use crate::tests::{check_assist, check_assist_not_applicable}; @@ -139,7 +182,7 @@ pub enum Axis { X = 0, Y = 1, Z = 2 } $0impl<T> core::ops::IndexMut<Axis> for [T; 3] { fn index_mut(&mut self, index: Axis) -> &mut Self::Output { - &self[index as usize] + &mut self[index as usize] } } @@ -188,6 +231,35 @@ impl<T> core::ops::Index<Axis> for [T; 3] where T: Copy { } "#, ); + + check_assist( + generate_mut_trait_impl, + r#" +//- minicore: as_ref +struct Foo(i32); + +impl core::convert::AsRef$0<i32> for Foo { + fn as_ref(&self) -> &i32 { + &self.0 + } +} +"#, + r#" +struct Foo(i32); + +$0impl core::convert::AsMut<i32> for Foo { + fn as_mut(&mut self) -> &mut i32 { + &mut self.0 + } +} + +impl core::convert::AsRef<i32> for Foo { + fn as_ref(&self) -> &i32 { + &self.0 + } +} +"#, + ); } #[test] @@ -287,5 +359,13 @@ pub trait Index<Idx: ?Sized> {} impl<T> Index$0<i32> for [T; 3] {} "#, ); + check_assist_not_applicable( + generate_mut_trait_impl, + r#" +pub trait AsRef<T: ?Sized> {} + +impl AsRef$0<i32> for [T; 3] {} +"#, + ); } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs index 4837f92f934..51c2f65e025 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs @@ -1,5 +1,6 @@ use ide_db::{ - imports::import_assets::item_for_path_search, use_trivial_constructor::use_trivial_constructor, + imports::import_assets::item_for_path_search, syntax_helpers::suggest_name::NameGenerator, + use_trivial_constructor::use_trivial_constructor, }; use syntax::{ ast::{self, AstNode, HasName, HasVisibility, StructKind, edit_in_place::Indent, make}, @@ -35,10 +36,30 @@ use crate::{ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let strukt = ctx.find_node_at_offset::<ast::Struct>()?; - // We want to only apply this to non-union structs with named fields let field_list = match strukt.kind() { - StructKind::Record(named) => named, - _ => return None, + StructKind::Record(named) => { + named.fields().filter_map(|f| Some((f.name()?, f.ty()?))).collect::<Vec<_>>() + } + StructKind::Tuple(tuple) => { + let mut name_generator = NameGenerator::default(); + tuple + .fields() + .enumerate() + .filter_map(|(i, f)| { + let ty = f.ty()?; + let name = match name_generator.for_type( + &ctx.sema.resolve_type(&ty)?, + ctx.db(), + ctx.edition(), + ) { + Some(name) => name, + None => name_generator.suggest_name(&format!("_{i}")), + }; + Some((make::name(name.as_str()), f.ty()?)) + }) + .collect::<Vec<_>>() + } + StructKind::Unit => return None, }; // Return early if we've found an existing new fn @@ -50,11 +71,9 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option let target = strukt.syntax().text_range(); acc.add(AssistId::generate("generate_new"), "Generate `new`", target, |builder| { let trivial_constructors = field_list - .fields() - .map(|f| { - let name = f.name()?; - - let ty = ctx.sema.resolve_type(&f.ty()?)?; + .iter() + .map(|(name, ty)| { + let ty = ctx.sema.resolve_type(ty)?; let item_in_ns = hir::ItemInNs::from(hir::ModuleDef::from(ty.as_adt()?)); @@ -73,34 +92,44 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option edition, )?; - Some(make::record_expr_field(make::name_ref(&name.text()), Some(expr))) + Some((make::name_ref(&name.text()), Some(expr))) }) .collect::<Vec<_>>(); - let params = field_list.fields().enumerate().filter_map(|(i, f)| { + let params = field_list.iter().enumerate().filter_map(|(i, (name, ty))| { if trivial_constructors[i].is_none() { - let name = f.name()?; - let ty = f.ty()?; - - Some(make::param(make::ident_pat(false, false, name).into(), ty)) + Some(make::param(make::ident_pat(false, false, name.clone()).into(), ty.clone())) } else { None } }); let params = make::param_list(None, params); - let fields = field_list.fields().enumerate().filter_map(|(i, f)| { - let constructor = trivial_constructors[i].clone(); - if constructor.is_some() { + let fields = field_list.iter().enumerate().map(|(i, (name, _))| { + if let Some(constructor) = trivial_constructors[i].clone() { constructor } else { - Some(make::record_expr_field(make::name_ref(&f.name()?.text()), None)) + (make::name_ref(&name.text()), None) } }); - let fields = make::record_expr_field_list(fields); - let record_expr = make::record_expr(make::ext::ident_path("Self"), fields); - let body = make::block_expr(None, Some(record_expr.into())); + let tail_expr: ast::Expr = match strukt.kind() { + StructKind::Record(_) => { + let fields = fields.map(|(name, expr)| make::record_expr_field(name, expr)); + let fields = make::record_expr_field_list(fields); + make::record_expr(make::ext::ident_path("Self"), fields).into() + } + StructKind::Tuple(_) => { + let args = fields.map(|(arg, expr)| { + let arg = || make::expr_path(make::path_unqualified(make::path_segment(arg))); + expr.unwrap_or_else(arg) + }); + let arg_list = make::arg_list(args); + make::expr_call(make::expr_path(make::ext::ident_path("Self")), arg_list).into() + } + StructKind::Unit => unreachable!(), + }; + let body = make::block_expr(None, tail_expr.into()); let ret_type = make::ret_type(make::ty_path(make::ext::ident_path("Self"))); @@ -120,8 +149,35 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option .clone_for_update(); fn_.indent(1.into()); - // Add a tabstop before the name if let Some(cap) = ctx.config.snippet_cap { + match strukt.kind() { + StructKind::Tuple(_) => { + let struct_args = fn_ + .body() + .unwrap() + .syntax() + .descendants() + .filter(|it| syntax::ast::ArgList::can_cast(it.kind())) + .flat_map(|args| args.children()) + .filter(|it| syntax::ast::PathExpr::can_cast(it.kind())) + .enumerate() + .filter_map(|(i, node)| { + if trivial_constructors[i].is_none() { Some(node) } else { None } + }); + if let Some(fn_params) = fn_.param_list() { + for (struct_arg, fn_param) in struct_args.zip(fn_params.params()) { + if let Some(fn_pat) = fn_param.pat() { + let fn_pat = fn_pat.syntax().clone(); + builder + .add_placeholder_snippet_group(cap, vec![struct_arg, fn_pat]); + } + } + } + } + _ => {} + } + + // Add a tabstop before the name if let Some(name) = fn_.name() { builder.add_tabstop_before(cap, name); } @@ -157,7 +213,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option } #[cfg(test)] -mod tests { +mod record_tests { use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; use super::*; @@ -695,3 +751,308 @@ impl<T> Source<T> { ); } } + +#[cfg(test)] +mod tuple_tests { + use crate::tests::{check_assist, check_assist_not_applicable, check_assist_target}; + + use super::*; + + #[test] + fn test_generate_new_with_zst_fields() { + check_assist( + generate_new, + r#" +struct Empty; + +struct Foo(Empty$0); +"#, + r#" +struct Empty; + +struct Foo(Empty); + +impl Foo { + fn $0new() -> Self { + Self(Empty) + } +} +"#, + ); + check_assist( + generate_new, + r#" +struct Empty; + +struct Foo(String, Empty$0); +"#, + r#" +struct Empty; + +struct Foo(String, Empty); + +impl Foo { + fn $0new(${1:_0}: String) -> Self { + Self(${1:_0}, Empty) + } +} +"#, + ); + check_assist( + generate_new, + r#" +enum Empty { Bar } + +struct Foo(Empty$0); +"#, + r#" +enum Empty { Bar } + +struct Foo(Empty); + +impl Foo { + fn $0new() -> Self { + Self(Empty::Bar) + } +} +"#, + ); + + // make sure the assist only works on unit variants + check_assist( + generate_new, + r#" +struct Empty {} + +struct Foo(Empty$0); +"#, + r#" +struct Empty {} + +struct Foo(Empty); + +impl Foo { + fn $0new(${1:empty}: Empty) -> Self { + Self(${1:empty}) + } +} +"#, + ); + check_assist( + generate_new, + r#" +enum Empty { Bar {} } + +struct Foo(Empty$0); +"#, + r#" +enum Empty { Bar {} } + +struct Foo(Empty); + +impl Foo { + fn $0new(${1:empty}: Empty) -> Self { + Self(${1:empty}) + } +} +"#, + ); + } + + #[test] + fn test_generate_new() { + check_assist( + generate_new, + r#" +struct Foo($0); +"#, + r#" +struct Foo(); + +impl Foo { + fn $0new() -> Self { + Self() + } +} +"#, + ); + check_assist( + generate_new, + r#" +struct Foo<T: Clone>($0); +"#, + r#" +struct Foo<T: Clone>(); + +impl<T: Clone> Foo<T> { + fn $0new() -> Self { + Self() + } +} +"#, + ); + check_assist( + generate_new, + r#" +struct Foo<'a, T: Foo<'a>>($0); +"#, + r#" +struct Foo<'a, T: Foo<'a>>(); + +impl<'a, T: Foo<'a>> Foo<'a, T> { + fn $0new() -> Self { + Self() + } +} +"#, + ); + check_assist( + generate_new, + r#" +struct Foo(String$0); +"#, + r#" +struct Foo(String); + +impl Foo { + fn $0new(${1:_0}: String) -> Self { + Self(${1:_0}) + } +} +"#, + ); + check_assist( + generate_new, + r#" +struct Vec<T> { }; +struct Foo(String, Vec<i32>$0); +"#, + r#" +struct Vec<T> { }; +struct Foo(String, Vec<i32>); + +impl Foo { + fn $0new(${1:_0}: String, ${2:items}: Vec<i32>) -> Self { + Self(${1:_0}, ${2:items}) + } +} +"#, + ); + } + + #[test] + fn check_that_visibility_modifiers_dont_get_brought_in() { + check_assist( + generate_new, + r#" +struct Vec<T> { }; +struct Foo(pub String, pub Vec<i32>$0); +"#, + r#" +struct Vec<T> { }; +struct Foo(pub String, pub Vec<i32>); + +impl Foo { + fn $0new(${1:_0}: String, ${2:items}: Vec<i32>) -> Self { + Self(${1:_0}, ${2:items}) + } +} +"#, + ); + } + + #[test] + fn generate_new_not_applicable_if_fn_exists() { + check_assist_not_applicable( + generate_new, + r#" +struct Foo($0); + +impl Foo { + fn new() -> Self { + Self + } +} +"#, + ); + + check_assist_not_applicable( + generate_new, + r#" +struct Foo($0); + +impl Foo { + fn New() -> Self { + Self + } +} +"#, + ); + } + + #[test] + fn generate_new_target() { + check_assist_target( + generate_new, + r#" +struct SomeThingIrrelevant; +/// Has a lifetime parameter +struct Foo<'a, T: Foo<'a>>($0); +struct EvenMoreIrrelevant; +"#, + "/// Has a lifetime parameter +struct Foo<'a, T: Foo<'a>>();", + ); + } + + #[test] + fn test_unrelated_new() { + check_assist( + generate_new, + r#" +pub struct AstId<N: AstNode> { + file_id: HirFileId, + file_ast_id: FileAstId<N>, +} + +impl<N: AstNode> AstId<N> { + pub fn new(file_id: HirFileId, file_ast_id: FileAstId<N>) -> AstId<N> { + AstId { file_id, file_ast_id } + } +} + +pub struct Source<T>(pub HirFileId,$0 pub T); + +impl<T> Source<T> { + pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> { + Source(self.file_id, f(self.ast)) + } +} +"#, + r#" +pub struct AstId<N: AstNode> { + file_id: HirFileId, + file_ast_id: FileAstId<N>, +} + +impl<N: AstNode> AstId<N> { + pub fn new(file_id: HirFileId, file_ast_id: FileAstId<N>) -> AstId<N> { + AstId { file_id, file_ast_id } + } +} + +pub struct Source<T>(pub HirFileId, pub T); + +impl<T> Source<T> { + pub fn $0new(${1:_0}: HirFileId, ${2:_1}: T) -> Self { + Self(${1:_0}, ${2:_1}) + } + + pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> { + Source(self.file_id, f(self.ast)) + } +} +"#, + ); + } +} diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_single_field_struct_from.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_single_field_struct_from.rs new file mode 100644 index 00000000000..4e95ceb2e85 --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_single_field_struct_from.rs @@ -0,0 +1,1000 @@ +use ast::make; +use hir::{HasCrate, ModuleDef, Semantics}; +use ide_db::{ + RootDatabase, famous_defs::FamousDefs, helpers::mod_path_to_ast, + imports::import_assets::item_for_path_search, use_trivial_constructor::use_trivial_constructor, +}; +use syntax::{ + TokenText, + ast::{self, AstNode, HasGenericParams, HasName, edit, edit_in_place::Indent}, +}; + +use crate::{ + AssistId, + assist_context::{AssistContext, Assists}, + utils::add_cfg_attrs_to, +}; + +// Assist: generate_single_field_struct_from +// +// Implement From for a single field structure, ignore trivial types. +// +// ``` +// # //- minicore: from, phantom_data +// use core::marker::PhantomData; +// struct $0Foo<T> { +// id: i32, +// _phantom_data: PhantomData<T>, +// } +// ``` +// -> +// ``` +// use core::marker::PhantomData; +// struct Foo<T> { +// id: i32, +// _phantom_data: PhantomData<T>, +// } +// +// impl<T> From<i32> for Foo<T> { +// fn from(id: i32) -> Self { +// Self { id, _phantom_data: PhantomData } +// } +// } +// ``` +pub(crate) fn generate_single_field_struct_from( + acc: &mut Assists, + ctx: &AssistContext<'_>, +) -> Option<()> { + let strukt_name = ctx.find_node_at_offset::<ast::Name>()?; + let adt = ast::Adt::cast(strukt_name.syntax().parent()?)?; + let ast::Adt::Struct(strukt) = adt else { + return None; + }; + + let sema = &ctx.sema; + let (names, types) = get_fields(&strukt)?; + + let module = sema.scope(strukt.syntax())?.module(); + let constructors = make_constructors(ctx, module, &types); + + if constructors.iter().filter(|expr| expr.is_none()).count() != 1 { + return None; + } + let main_field_i = constructors.iter().position(Option::is_none)?; + if from_impl_exists(&strukt, main_field_i, &ctx.sema).is_some() { + return None; + } + + let main_field_name = + names.as_ref().map_or(TokenText::borrowed("value"), |names| names[main_field_i].text()); + let main_field_ty = types[main_field_i].clone(); + + acc.add( + AssistId::generate("generate_single_field_struct_from"), + "Generate single field `From`", + strukt.syntax().text_range(), + |builder| { + let indent = strukt.indent_level(); + let ty_where_clause = strukt.where_clause(); + let type_gen_params = strukt.generic_param_list(); + let type_gen_args = type_gen_params.as_ref().map(|params| params.to_generic_args()); + let trait_gen_args = Some(make::generic_arg_list([ast::GenericArg::TypeArg( + make::type_arg(main_field_ty.clone()), + )])); + + let ty = make::ty(&strukt_name.text()); + + let constructor = + make_adt_constructor(names.as_deref(), constructors, &main_field_name); + let body = make::block_expr([], Some(constructor)); + + let fn_ = make::fn_( + None, + make::name("from"), + None, + None, + make::param_list( + None, + [make::param( + make::path_pat(make::path_from_text(&main_field_name)), + main_field_ty, + )], + ), + body, + Some(make::ret_type(make::ty("Self"))), + false, + false, + false, + false, + ) + .clone_for_update(); + + fn_.indent(1.into()); + + let impl_ = make::impl_trait( + false, + None, + trait_gen_args, + type_gen_params, + type_gen_args, + false, + make::ty("From"), + ty.clone(), + None, + ty_where_clause.map(|wc| edit::AstNodeEdit::reset_indent(&wc)), + None, + ) + .clone_for_update(); + + impl_.get_or_create_assoc_item_list().add_item(fn_.into()); + + add_cfg_attrs_to(&strukt, &impl_); + + impl_.reindent_to(indent); + + builder.insert(strukt.syntax().text_range().end(), format!("\n\n{indent}{impl_}")); + }, + ) +} + +fn make_adt_constructor( + names: Option<&[ast::Name]>, + constructors: Vec<Option<ast::Expr>>, + main_field_name: &TokenText<'_>, +) -> ast::Expr { + if let Some(names) = names { + let fields = make::record_expr_field_list(names.iter().zip(constructors).map( + |(name, initializer)| { + make::record_expr_field(make::name_ref(&name.text()), initializer) + }, + )); + make::record_expr(make::path_from_text("Self"), fields).into() + } else { + let arg_list = make::arg_list(constructors.into_iter().map(|expr| { + expr.unwrap_or_else(|| make::expr_path(make::path_from_text(main_field_name))) + })); + make::expr_call(make::expr_path(make::path_from_text("Self")), arg_list).into() + } +} + +fn make_constructors( + ctx: &AssistContext<'_>, + module: hir::Module, + types: &[ast::Type], +) -> Vec<Option<ast::Expr>> { + let (db, sema) = (ctx.db(), &ctx.sema); + types + .iter() + .map(|ty| { + let ty = sema.resolve_type(ty)?; + if ty.is_unit() { + return Some(make::expr_tuple([]).into()); + } + let item_in_ns = ModuleDef::Adt(ty.as_adt()?).into(); + let edition = module.krate().edition(db); + + let ty_path = module.find_path( + db, + item_for_path_search(db, item_in_ns)?, + ctx.config.import_path_config(), + )?; + + use_trivial_constructor(db, mod_path_to_ast(&ty_path, edition), &ty, edition) + }) + .collect() +} + +fn get_fields(strukt: &ast::Struct) -> Option<(Option<Vec<ast::Name>>, Vec<ast::Type>)> { + Some(match strukt.kind() { + ast::StructKind::Unit => return None, + ast::StructKind::Record(fields) => { + let names = fields.fields().map(|field| field.name()).collect::<Option<_>>()?; + let types = fields.fields().map(|field| field.ty()).collect::<Option<_>>()?; + (Some(names), types) + } + ast::StructKind::Tuple(fields) => { + (None, fields.fields().map(|field| field.ty()).collect::<Option<_>>()?) + } + }) +} + +fn from_impl_exists( + strukt: &ast::Struct, + main_field_i: usize, + sema: &Semantics<'_, RootDatabase>, +) -> Option<()> { + let db = sema.db; + let strukt = sema.to_def(strukt)?; + let krate = strukt.krate(db); + let from_trait = FamousDefs(sema, krate).core_convert_From()?; + let ty = strukt.fields(db).get(main_field_i)?.ty(db); + + strukt.ty(db).impls_trait(db, from_trait, &[ty]).then_some(()) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::generate_single_field_struct_from; + + #[test] + fn works() { + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from + struct $0Foo { + foo: i32, + } + "#, + r#" + struct Foo { + foo: i32, + } + + impl From<i32> for Foo { + fn from(foo: i32) -> Self { + Self { foo } + } + } + "#, + ); + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from, phantom_data + struct $0Foo { + b1: (), + b2: core::marker::PhantomData, + foo: i32, + a1: (), + a2: core::marker::PhantomData, + } + "#, + r#" + struct Foo { + b1: (), + b2: core::marker::PhantomData, + foo: i32, + a1: (), + a2: core::marker::PhantomData, + } + + impl From<i32> for Foo { + fn from(foo: i32) -> Self { + Self { b1: (), b2: core::marker::PhantomData, foo, a1: (), a2: core::marker::PhantomData } + } + } + "#, + ); + } + + #[test] + fn cfgs() { + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from + #[cfg(feature = "foo")] + #[cfg(test)] + struct $0Foo { + foo: i32, + } + "#, + r#" + #[cfg(feature = "foo")] + #[cfg(test)] + struct Foo { + foo: i32, + } + + #[cfg(feature = "foo")] + #[cfg(test)] + impl From<i32> for Foo { + fn from(foo: i32) -> Self { + Self { foo } + } + } + "#, + ); + } + + #[test] + fn indent() { + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from + mod foo { + struct $0Foo { + foo: i32, + } + } + "#, + r#" + mod foo { + struct Foo { + foo: i32, + } + + impl From<i32> for Foo { + fn from(foo: i32) -> Self { + Self { foo } + } + } + } + "#, + ); + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from + mod foo { + mod bar { + struct $0Foo { + foo: i32, + } + } + } + "#, + r#" + mod foo { + mod bar { + struct Foo { + foo: i32, + } + + impl From<i32> for Foo { + fn from(foo: i32) -> Self { + Self { foo } + } + } + } + } + "#, + ); + } + + #[test] + fn where_clause_indent() { + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from + mod foo { + mod bar { + trait Trait {} + struct $0Foo<T> + where + T: Trait, + { + foo: T, + } + } + } + "#, + r#" + mod foo { + mod bar { + trait Trait {} + struct Foo<T> + where + T: Trait, + { + foo: T, + } + + impl<T> From<T> for Foo<T> + where + T: Trait, + { + fn from(foo: T) -> Self { + Self { foo } + } + } + } + } + "#, + ); + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from + mod foo { + mod bar { + trait Trait<const B: bool> {} + struct $0Foo<T> + where + T: Trait<{ + true + }> + { + foo: T, + } + } + } + "#, + r#" + mod foo { + mod bar { + trait Trait<const B: bool> {} + struct Foo<T> + where + T: Trait<{ + true + }> + { + foo: T, + } + + impl<T> From<T> for Foo<T> + where + T: Trait<{ + true + }> + { + fn from(foo: T) -> Self { + Self { foo } + } + } + } + } + "#, + ); + } + + #[test] + fn generics() { + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from + struct $0Foo<T> { + foo: T, + } + "#, + r#" + struct Foo<T> { + foo: T, + } + + impl<T> From<T> for Foo<T> { + fn from(foo: T) -> Self { + Self { foo } + } + } + "#, + ); + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from + struct $0Foo<T: Send> { + foo: T, + } + "#, + r#" + struct Foo<T: Send> { + foo: T, + } + + impl<T: Send> From<T> for Foo<T> { + fn from(foo: T) -> Self { + Self { foo } + } + } + "#, + ); + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from + struct $0Foo<T: Send> where T: Sync,{ + foo: T, + } + "#, + r#" + struct Foo<T: Send> where T: Sync,{ + foo: T, + } + + impl<T: Send> From<T> for Foo<T> + where T: Sync, + { + fn from(foo: T) -> Self { + Self { foo } + } + } + "#, + ); + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from + struct $0Foo<T: Send> where T: Sync { + foo: T, + } + "#, + r#" + struct Foo<T: Send> where T: Sync { + foo: T, + } + + impl<T: Send> From<T> for Foo<T> + where T: Sync + { + fn from(foo: T) -> Self { + Self { foo } + } + } + "#, + ); + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from + struct $0Foo<T: Send> where T: Sync, Self: Send { + foo: T, + } + "#, + r#" + struct Foo<T: Send> where T: Sync, Self: Send { + foo: T, + } + + impl<T: Send> From<T> for Foo<T> + where T: Sync, Self: Send + { + fn from(foo: T) -> Self { + Self { foo } + } + } + "#, + ); + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from + struct $0Foo<T: Send> + where T: Sync, Self: Send + { + foo: T, + } + "#, + r#" + struct Foo<T: Send> + where T: Sync, Self: Send + { + foo: T, + } + + impl<T: Send> From<T> for Foo<T> + where T: Sync, Self: Send + { + fn from(foo: T) -> Self { + Self { foo } + } + } + "#, + ); + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from + struct $0Foo<T: Send> + where T: Sync, Self: Send, + { + foo: T, + } + "#, + r#" + struct Foo<T: Send> + where T: Sync, Self: Send, + { + foo: T, + } + + impl<T: Send> From<T> for Foo<T> + where T: Sync, Self: Send, + { + fn from(foo: T) -> Self { + Self { foo } + } + } + "#, + ); + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from + struct $0Foo<T: Send> + where T: Sync, + Self: Send, + { + foo: T, + } + "#, + r#" + struct Foo<T: Send> + where T: Sync, + Self: Send, + { + foo: T, + } + + impl<T: Send> From<T> for Foo<T> + where T: Sync, + Self: Send, + { + fn from(foo: T) -> Self { + Self { foo } + } + } + "#, + ); + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from + struct $0Foo<T: Send> + where + T: Sync, + Self: Send, + { + foo: T, + } + "#, + r#" + struct Foo<T: Send> + where + T: Sync, + Self: Send, + { + foo: T, + } + + impl<T: Send> From<T> for Foo<T> + where + T: Sync, + Self: Send, + { + fn from(foo: T) -> Self { + Self { foo } + } + } + "#, + ); + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from + struct $0Foo<T: Send + Sync> + where + T: Sync, + Self: Send, + { + foo: T, + } + "#, + r#" + struct Foo<T: Send + Sync> + where + T: Sync, + Self: Send, + { + foo: T, + } + + impl<T: Send + Sync> From<T> for Foo<T> + where + T: Sync, + Self: Send, + { + fn from(foo: T) -> Self { + Self { foo } + } + } + "#, + ); + } + + #[test] + fn tuple() { + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from + struct $0Foo(i32); + "#, + r#" + struct Foo(i32); + + impl From<i32> for Foo { + fn from(value: i32) -> Self { + Self(value) + } + } + "#, + ); + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from + struct $0Foo<T>(T); + "#, + r#" + struct Foo<T>(T); + + impl<T> From<T> for Foo<T> { + fn from(value: T) -> Self { + Self(value) + } + } + "#, + ); + } + + #[test] + fn trivial() { + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from, phantom_data + use core::marker::PhantomData; + struct $0Foo(i32, PhantomData<i32>); + "#, + r#" + use core::marker::PhantomData; + struct Foo(i32, PhantomData<i32>); + + impl From<i32> for Foo { + fn from(value: i32) -> Self { + Self(value, PhantomData) + } + } + "#, + ); + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from, phantom_data + use core::marker::PhantomData; + struct $0Foo(i32, PhantomData<()>); + "#, + r#" + use core::marker::PhantomData; + struct Foo(i32, PhantomData<()>); + + impl From<i32> for Foo { + fn from(value: i32) -> Self { + Self(value, PhantomData) + } + } + "#, + ); + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from, phantom_data + use core::marker::PhantomData; + struct $0Foo(PhantomData<()>, i32, PhantomData<()>); + "#, + r#" + use core::marker::PhantomData; + struct Foo(PhantomData<()>, i32, PhantomData<()>); + + impl From<i32> for Foo { + fn from(value: i32) -> Self { + Self(PhantomData, value, PhantomData) + } + } + "#, + ); + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from, phantom_data + use core::marker::PhantomData; + struct $0Foo<T>(PhantomData<T>, i32, PhantomData<()>); + "#, + r#" + use core::marker::PhantomData; + struct Foo<T>(PhantomData<T>, i32, PhantomData<()>); + + impl<T> From<i32> for Foo<T> { + fn from(value: i32) -> Self { + Self(PhantomData, value, PhantomData) + } + } + "#, + ); + } + + #[test] + fn unit() { + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from + struct $0Foo(i32, ()); + "#, + r#" + struct Foo(i32, ()); + + impl From<i32> for Foo { + fn from(value: i32) -> Self { + Self(value, ()) + } + } + "#, + ); + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from + struct $0Foo((), i32, ()); + "#, + r#" + struct Foo((), i32, ()); + + impl From<i32> for Foo { + fn from(value: i32) -> Self { + Self((), value, ()) + } + } + "#, + ); + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from + struct $0Foo((), (), i32, ()); + "#, + r#" + struct Foo((), (), i32, ()); + + impl From<i32> for Foo { + fn from(value: i32) -> Self { + Self((), (), value, ()) + } + } + "#, + ); + } + + #[test] + fn invalid_multiple_main_field() { + check_assist_not_applicable( + generate_single_field_struct_from, + r#" + //- minicore: from + struct $0Foo(i32, i32); + "#, + ); + check_assist_not_applicable( + generate_single_field_struct_from, + r#" + //- minicore: from + struct $0Foo<T>(i32, T); + "#, + ); + check_assist_not_applicable( + generate_single_field_struct_from, + r#" + //- minicore: from + struct $0Foo<T>(T, T); + "#, + ); + check_assist_not_applicable( + generate_single_field_struct_from, + r#" + //- minicore: from + struct $0Foo<T> { foo: T, bar: i32 } + "#, + ); + check_assist_not_applicable( + generate_single_field_struct_from, + r#" + //- minicore: from + struct $0Foo { foo: i32, bar: i64 } + "#, + ); + } + + #[test] + fn exists_other_from() { + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from + struct $0Foo(i32); + + impl From<&i32> for Foo { + fn from(value: &i32) -> Self { + todo!() + } + } + "#, + r#" + struct Foo(i32); + + impl From<i32> for Foo { + fn from(value: i32) -> Self { + Self(value) + } + } + + impl From<&i32> for Foo { + fn from(value: &i32) -> Self { + todo!() + } + } + "#, + ); + check_assist( + generate_single_field_struct_from, + r#" + //- minicore: from + struct $0Foo(i32); + + type X = i32; + + impl From<&X> for Foo { + fn from(value: &X) -> Self { + todo!() + } + } + "#, + r#" + struct Foo(i32); + + impl From<i32> for Foo { + fn from(value: i32) -> Self { + Self(value) + } + } + + type X = i32; + + impl From<&X> for Foo { + fn from(value: &X) -> Self { + todo!() + } + } + "#, + ); + } + + #[test] + fn exists_from() { + check_assist_not_applicable( + generate_single_field_struct_from, + r#" + //- minicore: from + struct $0Foo(i32); + + impl From<i32> for Foo { + fn from(_: i32) -> Self { + todo!() + } + } + "#, + ); + check_assist_not_applicable( + generate_single_field_struct_from, + r#" + //- minicore: from + struct $0Foo(i32); + + type X = i32; + + impl From<X> for Foo { + fn from(_: X) -> Self { + todo!() + } + } + "#, + ); + } +} diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs index 5f626d29571..1b0c3139353 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs @@ -1,7 +1,8 @@ use syntax::{ AstNode, - ast::{self, make}, - ted, + algo::find_node_at_range, + ast::{self, syntax_factory::SyntaxFactory}, + syntax_editor::SyntaxEditor, }; use crate::{ @@ -66,33 +67,51 @@ pub(crate) fn pull_assignment_up(acc: &mut Assists, ctx: &AssistContext<'_>) -> return None; } } - + let target = tgt.syntax().text_range(); + + let edit_tgt = tgt.syntax().clone_subtree(); + let assignments: Vec<_> = collector + .assignments + .into_iter() + .filter_map(|(stmt, rhs)| { + Some(( + find_node_at_range::<ast::BinExpr>( + &edit_tgt, + stmt.syntax().text_range() - target.start(), + )?, + find_node_at_range::<ast::Expr>( + &edit_tgt, + rhs.syntax().text_range() - target.start(), + )?, + )) + }) + .collect(); + + let mut editor = SyntaxEditor::new(edit_tgt); + for (stmt, rhs) in assignments { + let mut stmt = stmt.syntax().clone(); + if let Some(parent) = stmt.parent() { + if ast::ExprStmt::cast(parent.clone()).is_some() { + stmt = parent.clone(); + } + } + editor.replace(stmt, rhs.syntax()); + } + let new_tgt_root = editor.finish().new_root().clone(); + let new_tgt = ast::Expr::cast(new_tgt_root)?; acc.add( AssistId::refactor_extract("pull_assignment_up"), "Pull assignment up", - tgt.syntax().text_range(), + target, move |edit| { - let assignments: Vec<_> = collector - .assignments - .into_iter() - .map(|(stmt, rhs)| (edit.make_mut(stmt), rhs.clone_for_update())) - .collect(); - - let tgt = edit.make_mut(tgt); - - for (stmt, rhs) in assignments { - let mut stmt = stmt.syntax().clone(); - if let Some(parent) = stmt.parent() { - if ast::ExprStmt::cast(parent.clone()).is_some() { - stmt = parent.clone(); - } - } - ted::replace(stmt, rhs.syntax()); - } - let assign_expr = make::expr_assignment(collector.common_lhs, tgt.clone()); - let assign_stmt = make::expr_stmt(assign_expr); - - ted::replace(tgt.syntax(), assign_stmt.syntax().clone_for_update()); + let make = SyntaxFactory::with_mappings(); + let mut editor = edit.make_editor(tgt.syntax()); + let assign_expr = make.expr_assignment(collector.common_lhs, new_tgt.clone()); + let assign_stmt = make.expr_stmt(assign_expr.into()); + + editor.replace(tgt.syntax(), assign_stmt.syntax()); + editor.add_mappings(make.finish_with_mappings()); + edit.add_file_edits(ctx.vfs_file_id(), editor); }, ) } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs index 52ace03f3cf..9356d02706c 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/remove_dbg.rs @@ -1,8 +1,9 @@ use itertools::Itertools; use syntax::{ - Edition, NodeOrToken, SyntaxElement, T, TextRange, TextSize, - ast::{self, AstNode, AstToken, make}, - match_ast, ted, + Edition, NodeOrToken, SyntaxNode, SyntaxToken, T, + ast::{self, AstNode, make}, + match_ast, + syntax_editor::{Position, SyntaxEditor}, }; use crate::{AssistContext, AssistId, Assists}; @@ -40,21 +41,23 @@ pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<( let replacements = macro_calls.into_iter().filter_map(compute_dbg_replacement).collect::<Vec<_>>(); - - acc.add( - AssistId::quick_fix("remove_dbg"), - "Remove dbg!()", - replacements.iter().map(|&(range, _)| range).reduce(|acc, range| acc.cover(range))?, - |builder| { - for (range, expr) in replacements { - if let Some(expr) = expr { - builder.replace(range, expr.to_string()); - } else { - builder.delete(range); - } + let target = replacements + .iter() + .flat_map(|(node_or_token, _)| node_or_token.iter()) + .map(|t| t.text_range()) + .reduce(|acc, range| acc.cover(range))?; + acc.add(AssistId::quick_fix("remove_dbg"), "Remove dbg!()", target, |builder| { + let mut editor = builder.make_editor(ctx.source_file().syntax()); + for (range, expr) in replacements { + if let Some(expr) = expr { + editor.insert(Position::before(range[0].clone()), expr.syntax().clone_for_update()); + } + for node_or_token in range { + editor.delete(node_or_token); } - }, - ) + } + builder.add_file_edits(ctx.vfs_file_id(), editor); + }) } /// Returns `None` when either @@ -63,7 +66,9 @@ pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<( /// - (`macro_expr` has no parent - is that possible?) /// /// Returns `Some(_, None)` when the macro call should just be removed. -fn compute_dbg_replacement(macro_expr: ast::MacroExpr) -> Option<(TextRange, Option<ast::Expr>)> { +fn compute_dbg_replacement( + macro_expr: ast::MacroExpr, +) -> Option<(Vec<NodeOrToken<SyntaxNode, SyntaxToken>>, Option<ast::Expr>)> { let macro_call = macro_expr.macro_call()?; let tt = macro_call.token_tree()?; let r_delim = NodeOrToken::Token(tt.right_delimiter_token()?); @@ -88,22 +93,22 @@ fn compute_dbg_replacement(macro_expr: ast::MacroExpr) -> Option<(TextRange, Opt match_ast! { match parent { ast::StmtList(_) => { - let range = macro_expr.syntax().text_range(); - let range = match whitespace_start(macro_expr.syntax().prev_sibling_or_token()) { - Some(start) => range.cover_offset(start), - None => range, - }; - (range, None) + let mut replace = vec![macro_expr.syntax().clone().into()]; + if let Some(prev_sibling) = macro_expr.syntax().prev_sibling_or_token() + && prev_sibling.kind() == syntax::SyntaxKind::WHITESPACE { + replace.push(prev_sibling); + } + (replace, None) }, ast::ExprStmt(it) => { - let range = it.syntax().text_range(); - let range = match whitespace_start(it.syntax().prev_sibling_or_token()) { - Some(start) => range.cover_offset(start), - None => range, - }; - (range, None) + let mut replace = vec![it.syntax().clone().into()]; + if let Some(prev_sibling) = it.syntax().prev_sibling_or_token() + && prev_sibling.kind() == syntax::SyntaxKind::WHITESPACE { + replace.push(prev_sibling); + } + (replace, None) }, - _ => (macro_call.syntax().text_range(), Some(make::ext::expr_unit())), + _ => (vec![macro_call.syntax().clone().into()], Some(make::ext::expr_unit())), } } } @@ -147,13 +152,13 @@ fn compute_dbg_replacement(macro_expr: ast::MacroExpr) -> Option<(TextRange, Opt }; let expr = replace_nested_dbgs(expr.clone()); let expr = if wrap { make::expr_paren(expr).into() } else { expr.clone_subtree() }; - (macro_call.syntax().text_range(), Some(expr)) + (vec![macro_call.syntax().clone().into()], Some(expr)) } // dbg!(expr0, expr1, ...) exprs => { let exprs = exprs.iter().cloned().map(replace_nested_dbgs); let expr = make::expr_tuple(exprs); - (macro_call.syntax().text_range(), Some(expr.into())) + (vec![macro_call.syntax().clone().into()], Some(expr.into())) } }) } @@ -178,8 +183,8 @@ fn replace_nested_dbgs(expanded: ast::Expr) -> ast::Expr { return replaced; } - let expanded = expanded.clone_for_update(); - + let expanded = expanded.clone_subtree(); + let mut editor = SyntaxEditor::new(expanded.syntax().clone()); // We need to collect to avoid mutation during traversal. let macro_exprs: Vec<_> = expanded.syntax().descendants().filter_map(ast::MacroExpr::cast).collect(); @@ -191,17 +196,13 @@ fn replace_nested_dbgs(expanded: ast::Expr) -> ast::Expr { }; if let Some(expr) = expr_opt { - ted::replace(mac.syntax(), expr.syntax().clone_for_update()); + editor.replace(mac.syntax(), expr.syntax().clone_for_update()); } else { - ted::remove(mac.syntax()); + editor.delete(mac.syntax()); } } - - expanded -} - -fn whitespace_start(it: Option<SyntaxElement>) -> Option<TextSize> { - Some(it?.into_token().and_then(ast::Whitespace::cast)?.syntax().text_range().start()) + let expanded_syntax = editor.finish().new_root().clone(); + ast::Expr::cast(expanded_syntax).unwrap() } #[cfg(test)] diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs index 62914ee7f38..5ef8ba46b9e 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_is_method_with_if_let_method.rs @@ -64,13 +64,12 @@ pub(crate) fn replace_is_method_with_if_let_method( let pat = make.tuple_struct_pat(make.ident_path(text), [var_pat.into()]); let let_expr = make.expr_let(pat.into(), receiver); - if let Some(cap) = ctx.config.snippet_cap { - if let Some(ast::Pat::TupleStructPat(pat)) = let_expr.pat() { - if let Some(first_var) = pat.fields().next() { - let placeholder = edit.make_placeholder_snippet(cap); - editor.add_annotation(first_var.syntax(), placeholder); - } - } + if let Some(cap) = ctx.config.snippet_cap + && let Some(ast::Pat::TupleStructPat(pat)) = let_expr.pat() + && let Some(first_var) = pat.fields().next() + { + let placeholder = edit.make_placeholder_snippet(cap); + editor.add_annotation(first_var.syntax(), placeholder); } editor.replace(call_expr.syntax(), let_expr.syntax()); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs index c2604432032..cde0d875e0d 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs @@ -172,6 +172,7 @@ mod handlers { mod generate_is_empty_from_len; mod generate_mut_trait_impl; mod generate_new; + mod generate_single_field_struct_from; mod generate_trait_from_impl; mod inline_call; mod inline_const_as_literal; @@ -305,6 +306,7 @@ mod handlers { generate_mut_trait_impl::generate_mut_trait_impl, generate_new::generate_new, generate_trait_from_impl::generate_trait_from_impl, + generate_single_field_struct_from::generate_single_field_struct_from, inline_call::inline_call, inline_call::inline_into_callers, inline_const_as_literal::inline_const_as_literal, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs index 72f7195cbd7..fc1c6928ff3 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs @@ -1933,7 +1933,7 @@ pub enum Axis { X = 0, Y = 1, Z = 2 } $0impl<T> core::ops::IndexMut<Axis> for [T; 3] { fn index_mut(&mut self, index: Axis) -> &mut Self::Output { - &self[index as usize] + &mut self[index as usize] } } @@ -1995,6 +1995,34 @@ impl Person { } #[test] +fn doctest_generate_single_field_struct_from() { + check_doc_test( + "generate_single_field_struct_from", + r#####" +//- minicore: from, phantom_data +use core::marker::PhantomData; +struct $0Foo<T> { + id: i32, + _phantom_data: PhantomData<T>, +} +"#####, + r#####" +use core::marker::PhantomData; +struct Foo<T> { + id: i32, + _phantom_data: PhantomData<T>, +} + +impl<T> From<i32> for Foo<T> { + fn from(id: i32) -> Self { + Self { id, _phantom_data: PhantomData } + } +} +"#####, + ) +} + +#[test] fn doctest_generate_trait_from_impl() { check_doc_test( "generate_trait_from_impl", diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs index 87a4c2ef758..2c8cb6e4d91 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs @@ -594,12 +594,10 @@ fn generate_impl_text_inner( let generic_params = adt.generic_param_list().map(|generic_params| { let lifetime_params = generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam); - let ty_or_const_params = generic_params.type_or_const_params().map(|param| { - match param { + let ty_or_const_params = generic_params.type_or_const_params().filter_map(|param| { + let param = match param { ast::TypeOrConstParam::Type(param) => { - let param = param.clone_for_update(); // remove defaults since they can't be specified in impls - param.remove_default(); let mut bounds = param.type_bound_list().map_or_else(Vec::new, |it| it.bounds().collect()); if let Some(trait_) = trait_text { @@ -610,17 +608,16 @@ fn generate_impl_text_inner( } }; // `{ty_param}: {bounds}` - let param = - make::type_param(param.name().unwrap(), make::type_bound_list(bounds)); + let param = make::type_param(param.name()?, make::type_bound_list(bounds)); ast::GenericParam::TypeParam(param) } ast::TypeOrConstParam::Const(param) => { - let param = param.clone_for_update(); // remove defaults since they can't be specified in impls - param.remove_default(); + let param = make::const_param(param.name()?, param.ty()?); ast::GenericParam::ConstParam(param) } - } + }; + Some(param) }); make::generic_param_list(itertools::chain(lifetime_params, ty_or_const_params)) @@ -695,12 +692,10 @@ fn generate_impl_inner( let generic_params = adt.generic_param_list().map(|generic_params| { let lifetime_params = generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam); - let ty_or_const_params = generic_params.type_or_const_params().map(|param| { - match param { + let ty_or_const_params = generic_params.type_or_const_params().filter_map(|param| { + let param = match param { ast::TypeOrConstParam::Type(param) => { - let param = param.clone_for_update(); // remove defaults since they can't be specified in impls - param.remove_default(); let mut bounds = param.type_bound_list().map_or_else(Vec::new, |it| it.bounds().collect()); if let Some(trait_) = &trait_ { @@ -711,17 +706,16 @@ fn generate_impl_inner( } }; // `{ty_param}: {bounds}` - let param = - make::type_param(param.name().unwrap(), make::type_bound_list(bounds)); + let param = make::type_param(param.name()?, make::type_bound_list(bounds)); ast::GenericParam::TypeParam(param) } ast::TypeOrConstParam::Const(param) => { - let param = param.clone_for_update(); // remove defaults since they can't be specified in impls - param.remove_default(); + let param = make::const_param(param.name()?, param.ty()?); ast::GenericParam::ConstParam(param) } - } + }; + Some(param) }); make::generic_param_list(itertools::chain(lifetime_params, ty_or_const_params)) @@ -749,16 +743,23 @@ fn generate_impl_inner( .clone_for_update(); // Copy any cfg attrs from the original adt - let cfg_attrs = adt - .attrs() - .filter(|attr| attr.as_simple_call().map(|(name, _arg)| name == "cfg").unwrap_or(false)); - for attr in cfg_attrs { - impl_.add_attr(attr.clone_for_update()); - } + add_cfg_attrs_to(adt, &impl_); impl_ } +pub(crate) fn add_cfg_attrs_to<T, U>(from: &T, to: &U) +where + T: HasAttrs, + U: AttrsOwnerEdit, +{ + let cfg_attrs = + from.attrs().filter(|attr| attr.as_simple_call().is_some_and(|(name, _arg)| name == "cfg")); + for attr in cfg_attrs { + to.add_attr(attr.clone_for_update()); + } +} + pub(crate) fn add_method_to_adt( builder: &mut SourceChangeBuilder, adt: &ast::Adt, diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs index 092219a058a..975c2f02259 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/item_list/trait_impl.rs @@ -37,6 +37,7 @@ use ide_db::{ SymbolKind, documentation::HasDocs, path_transform::PathTransform, syntax_helpers::prettify_macro_expansion, traits::get_missing_assoc_items, }; +use syntax::ast::HasGenericParams; use syntax::{ AstNode, SmolStr, SyntaxElement, SyntaxKind, T, TextRange, ToSmolStr, ast::{self, HasGenericArgs, HasTypeBounds, edit_in_place::AttrsOwnerEdit, make}, @@ -390,6 +391,12 @@ fn add_type_alias_impl( } else if let Some(end) = transformed_ty.eq_token().map(|tok| tok.text_range().start()) { end + } else if let Some(end) = transformed_ty + .where_clause() + .and_then(|wc| wc.where_token()) + .map(|tok| tok.text_range().start()) + { + end } else if let Some(end) = transformed_ty.semicolon_token().map(|tok| tok.text_range().start()) { @@ -400,17 +407,29 @@ fn add_type_alias_impl( let len = end - start; let mut decl = transformed_ty.syntax().text().slice(..len).to_string(); - if !decl.ends_with(' ') { - decl.push(' '); - } - decl.push_str("= "); + decl.truncate(decl.trim_end().len()); + decl.push_str(" = "); + + let wc = transformed_ty + .where_clause() + .map(|wc| { + let ws = wc + .where_token() + .and_then(|it| it.prev_token()) + .filter(|token| token.kind() == SyntaxKind::WHITESPACE) + .map(|token| token.to_string()) + .unwrap_or_else(|| " ".into()); + format!("{ws}{wc}") + }) + .unwrap_or_default(); match ctx.config.snippet_cap { Some(cap) => { - let snippet = format!("{decl}$0;"); + let snippet = format!("{decl}$0{wc};"); item.snippet_edit(cap, TextEdit::replace(replacement_range, snippet)); } None => { + decl.push_str(&wc); item.text_edit(TextEdit::replace(replacement_range, decl)); } }; @@ -1440,6 +1459,30 @@ impl<'b> Tr<'b> for () { "#, ); } + #[test] + fn includes_where_clause() { + check_edit( + "type Ty", + r#" +trait Tr { + type Ty where Self: Copy; +} + +impl Tr for () { + $0 +} +"#, + r#" +trait Tr { + type Ty where Self: Copy; +} + +impl Tr for () { + type Ty = $0 where Self: Copy; +} +"#, + ); + } #[test] fn strips_comments() { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs index 179d6693602..ac32649d4ff 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/item_list.rs @@ -458,6 +458,33 @@ type O = $0; r" struct A; trait B { +type O<'a> +where +Self: 'a; +} +impl B for A { +$0 +} +", + r#" +struct A; +trait B { +type O<'a> +where +Self: 'a; +} +impl B for A { +type O<'a> = $0 +where +Self: 'a; +} +"#, + ); + check_edit( + "type O", + r" +struct A; +trait B { type O: ?Sized = u32; } impl B for A { diff --git a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt index de046e70c67..973256c470f 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt +++ b/src/tools/rust-analyzer/crates/ide-db/src/test_data/test_symbol_index_collection.txt @@ -11,6 +11,40 @@ }, [ FileSymbol { + name: "A", + def: Variant( + Variant { + id: EnumVariantId( + 7800, + ), + }, + ), + loc: DeclarationLocation { + hir_file_id: FileId( + EditionedFileId( + Id(2000), + ), + ), + ptr: SyntaxNodePtr { + kind: VARIANT, + range: 201..202, + }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 201..202, + }, + ), + }, + container_name: Some( + "Enum", + ), + is_alias: false, + is_assoc: true, + is_import: false, + do_not_complete: Yes, + }, + FileSymbol { name: "Alias", def: TypeAlias( TypeAlias { @@ -43,6 +77,40 @@ do_not_complete: Yes, }, FileSymbol { + name: "B", + def: Variant( + Variant { + id: EnumVariantId( + 7801, + ), + }, + ), + loc: DeclarationLocation { + hir_file_id: FileId( + EditionedFileId( + Id(2000), + ), + ), + ptr: SyntaxNodePtr { + kind: VARIANT, + range: 204..205, + }, + name_ptr: AstPtr( + SyntaxNodePtr { + kind: NAME, + range: 204..205, + }, + ), + }, + container_name: Some( + "Enum", + ), + is_alias: false, + is_assoc: true, + is_import: false, + do_not_complete: Yes, + }, + FileSymbol { name: "CONST", def: Const( Const { diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_generics_len.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_generics_len.rs index 06f35759420..7402133f747 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_generics_len.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_generics_len.rs @@ -183,4 +183,28 @@ fn main() { "#, ); } + + #[test] + fn generic_assoc_type_infer_lifetime_in_expr_position() { + check_diagnostics( + r#" +//- minicore: sized +struct Player; + +struct Foo<'c, C> { + _v: &'c C, +} +trait WithSignals: Sized { + type SignalCollection<'c, C>; + fn __signals_from_external(&self) -> Self::SignalCollection<'_, Self>; +} +impl WithSignals for Player { + type SignalCollection<'c, C> = Foo<'c, C>; + fn __signals_from_external(&self) -> Self::SignalCollection<'_, Self> { + Self::SignalCollection { _v: self } + } +} + "#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs index d8f6e813d80..17caf630182 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs @@ -983,4 +983,19 @@ fn test() { "#, ); } + + #[test] + fn naked_asm_is_safe() { + check_diagnostics( + r#" +#[rustc_builtin_macro] +macro_rules! naked_asm { () => {} } + +#[unsafe(naked)] +extern "C" fn naked() { + naked_asm!(""); +} + "#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs index f58202a4213..a5d9a10d2e5 100644 --- a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs +++ b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs @@ -505,7 +505,7 @@ fn map_links<'e>( Event::End(Tag::Link(link_type, target, _)) => { in_link = false; Event::End(Tag::Link( - end_link_type.unwrap_or(link_type), + end_link_type.take().unwrap_or(link_type), end_link_target.take().unwrap_or(target), CowStr::Borrowed(""), )) @@ -514,7 +514,7 @@ fn map_links<'e>( let (link_type, link_target_s, link_name) = callback(&end_link_target.take().unwrap(), &s, range, end_link_type.unwrap()); end_link_target = Some(CowStr::Boxed(link_target_s.into())); - if !matches!(end_link_type, Some(LinkType::Autolink)) { + if !matches!(end_link_type, Some(LinkType::Autolink)) && link_type.is_some() { end_link_type = link_type; } Event::Text(CowStr::Boxed(link_name.into())) @@ -523,7 +523,7 @@ fn map_links<'e>( let (link_type, link_target_s, link_name) = callback(&end_link_target.take().unwrap(), &s, range, end_link_type.unwrap()); end_link_target = Some(CowStr::Boxed(link_target_s.into())); - if !matches!(end_link_type, Some(LinkType::Autolink)) { + if !matches!(end_link_type, Some(LinkType::Autolink)) && link_type.is_some() { end_link_type = link_type; } Event::Code(CowStr::Boxed(link_name.into())) diff --git a/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs b/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs index 9bd8504733a..c081796d078 100755 --- a/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs +++ b/src/tools/rust-analyzer/crates/ide/src/folding_ranges.rs @@ -23,6 +23,7 @@ pub enum FoldKind { WhereClause, ReturnType, MatchArm, + Function, // region: item runs Modules, Consts, @@ -47,6 +48,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> { let mut res = vec![]; let mut visited_comments = FxHashSet::default(); let mut visited_nodes = FxHashSet::default(); + let mut merged_fn_bodies = FxHashSet::default(); // regions can be nested, here is a LIFO buffer let mut region_starts: Vec<TextSize> = vec![]; @@ -59,6 +61,32 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec<Fold> { NodeOrToken::Token(token) => token.text().contains('\n'), }; if is_multiline { + // for the func with multiline param list + if matches!(element.kind(), FN) { + if let NodeOrToken::Node(node) = &element { + if let Some(fn_node) = ast::Fn::cast(node.clone()) { + if !fn_node + .param_list() + .map(|param_list| param_list.syntax().text().contains_char('\n')) + .unwrap_or(false) + { + continue; + } + + if let Some(body) = fn_node.body() { + res.push(Fold { + range: TextRange::new( + node.text_range().start(), + node.text_range().end(), + ), + kind: FoldKind::Function, + }); + merged_fn_bodies.insert(body.syntax().text_range()); + continue; + } + } + } + } res.push(Fold { range: element.text_range(), kind }); continue; } @@ -152,6 +180,7 @@ fn fold_kind(kind: SyntaxKind) -> Option<FoldKind> { ARG_LIST | PARAM_LIST | GENERIC_ARG_LIST | GENERIC_PARAM_LIST => Some(FoldKind::ArgList), ARRAY_EXPR => Some(FoldKind::Array), RET_TYPE => Some(FoldKind::ReturnType), + FN => Some(FoldKind::Function), WHERE_CLAUSE => Some(FoldKind::WhereClause), ASSOC_ITEM_LIST | RECORD_FIELD_LIST @@ -291,6 +320,7 @@ mod tests { use super::*; + #[track_caller] fn check(#[rust_analyzer::rust_fixture] ra_fixture: &str) { let (ranges, text) = extract_tags(ra_fixture, "fold"); @@ -322,6 +352,7 @@ mod tests { FoldKind::WhereClause => "whereclause", FoldKind::ReturnType => "returntype", FoldKind::MatchArm => "matcharm", + FoldKind::Function => "function", FoldKind::TraitAliases => "traitaliases", FoldKind::ExternCrates => "externcrates", }; @@ -330,6 +361,23 @@ mod tests { } #[test] + fn test_fold_func_with_multiline_param_list() { + check( + r#" +<fold function>fn func<fold arglist>( + a: i32, + b: i32, + c: i32, +)</fold> <fold block>{ + + + +}</fold></fold> +"#, + ); + } + + #[test] fn test_fold_comments() { check( r#" @@ -541,10 +589,10 @@ const _: S = S <fold block>{ fn fold_multiline_params() { check( r#" -fn foo<fold arglist>( +<fold function>fn foo<fold arglist>( x: i32, y: String, -)</fold> {} +)</fold> {}</fold> "#, ) } diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs index f63499aa0fd..c5480217a91 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs @@ -10958,3 +10958,68 @@ fn bar$0() -> Foo { "#]], ); } + +#[test] +fn regression_20190() { + check( + r#" +struct Foo; + +/// [`foo` bar](Foo). +fn has_docs$0() {} + "#, + expect. + "#]], + ); +} + +#[test] +fn regression_20225() { + check( + r#" +//- minicore: coerce_unsized +trait Trait { + type Type<'a, T: ?Sized + 'a>; +} + +enum Borrowed {} + +impl Trait for Borrowed { + type Type<'a, T: ?Sized + 'a> = &'a T; +} + +enum Enum<'a, T: Trait + 'a> { + Variant1(T::Type<'a, [Enum<'a, T>]>), + Variant2, +} + +impl Enum<'_, Borrowed> { + const CONSTANT$0: Self = Self::Variant1(&[Self::Variant2]); +} + "#, + expect![[r#" + *CONSTANT* + + ```rust + ra_test_fixture::Enum + ``` + + ```rust + const CONSTANT: Self = Variant1(&[Variant2]) + ``` + "#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs index bf4688e9d82..d0539abe282 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implicit_drop.rs @@ -92,7 +92,7 @@ pub(super) fn hints( }, MirSpan::Unknown => continue, }; - let binding = &hir.bindings[binding_idx]; + let binding = &hir[binding_idx]; let name = binding.name.display_no_db(display_target.edition).to_smolstr(); if name.starts_with("<ra@") { continue; // Ignore desugared variables diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implied_dyn_trait.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implied_dyn_trait.rs index cd01c075832..0da1785234a 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implied_dyn_trait.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/implied_dyn_trait.rs @@ -17,8 +17,12 @@ pub(super) fn hints( let parent = path.syntax().parent()?; let range = match path { Either::Left(path) => { - let paren = - parent.ancestors().take_while(|it| ast::ParenType::can_cast(it.kind())).last(); + let paren = parent + .ancestors() + .take_while(|it| { + ast::ParenType::can_cast(it.kind()) || ast::ForType::can_cast(it.kind()) + }) + .last(); let parent = paren.as_ref().and_then(|it| it.parent()).unwrap_or(parent); if ast::TypeBound::can_cast(parent.kind()) || ast::TypeAnchor::can_cast(parent.kind()) @@ -34,7 +38,7 @@ pub(super) fn hints( return None; } sema.resolve_trait(&path.path()?)?; - paren.map_or_else(|| path.syntax().text_range(), |it| it.text_range()) + path.syntax().text_range() } Either::Right(dyn_) => { if dyn_.dyn_token().is_some() { @@ -89,7 +93,7 @@ fn foo(_: &T, _: for<'a> T) {} impl T {} // ^ dyn impl T for (T) {} - // ^^^ dyn + // ^ dyn impl T "#, ); @@ -112,7 +116,7 @@ fn foo( _: &mut (T + T) // ^^^^^ dyn _: *mut (T), - // ^^^ dyn + // ^ dyn ) {} "#, ); @@ -136,4 +140,26 @@ fn foo( "#]], ); } + + #[test] + fn hrtb_bound_does_not_add_dyn() { + check( + r#" +//- minicore: fn +fn test<F>(f: F) where F: for<'a> FnOnce(&'a i32) {} + // ^: Sized + "#, + ); + } + + #[test] + fn with_parentheses() { + check( + r#" +trait T {} +fn foo(v: &(T)) {} + // ^ dyn + "#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs index 0ac25da3294..2b4151e3b75 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions.rs @@ -4,7 +4,7 @@ use crate::grammar::attributes::ATTRIBUTE_FIRST; use super::*; -pub(super) use atom::{EXPR_RECOVERY_SET, LITERAL_FIRST, literal}; +pub(super) use atom::{EXPR_RECOVERY_SET, LITERAL_FIRST, literal, parse_asm_expr}; pub(crate) use atom::{block_expr, match_arm_list}; #[derive(PartialEq, Eq)] diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs index 8ed0fc6729f..76656567e7f 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/expressions/atom.rs @@ -253,8 +253,7 @@ fn builtin_expr(p: &mut Parser<'_>) -> Option<CompletedMarker> { let m = p.start(); p.bump_remap(T![builtin]); p.bump(T![#]); - if p.at_contextual_kw(T![offset_of]) { - p.bump_remap(T![offset_of]); + if p.eat_contextual_kw(T![offset_of]) { p.expect(T!['(']); type_(p); p.expect(T![,]); @@ -278,8 +277,7 @@ fn builtin_expr(p: &mut Parser<'_>) -> Option<CompletedMarker> { p.expect(T![')']); } Some(m.complete(p, OFFSET_OF_EXPR)) - } else if p.at_contextual_kw(T![format_args]) { - p.bump_remap(T![format_args]); + } else if p.eat_contextual_kw(T![format_args]) { p.expect(T!['(']); expr(p); if p.eat(T![,]) { @@ -302,7 +300,16 @@ fn builtin_expr(p: &mut Parser<'_>) -> Option<CompletedMarker> { } p.expect(T![')']); Some(m.complete(p, FORMAT_ARGS_EXPR)) - } else if p.at_contextual_kw(T![asm]) { + } else if p.eat_contextual_kw(T![asm]) + || p.eat_contextual_kw(T![global_asm]) + || p.eat_contextual_kw(T![naked_asm]) + { + // test asm_kinds + // fn foo() { + // builtin#asm(""); + // builtin#global_asm(""); + // builtin#naked_asm(""); + // } parse_asm_expr(p, m) } else { m.abandon(p); @@ -321,8 +328,7 @@ fn builtin_expr(p: &mut Parser<'_>) -> Option<CompletedMarker> { // tmp = out(reg) _, // ); // } -fn parse_asm_expr(p: &mut Parser<'_>, m: Marker) -> Option<CompletedMarker> { - p.bump_remap(T![asm]); +pub(crate) fn parse_asm_expr(p: &mut Parser<'_>, m: Marker) -> Option<CompletedMarker> { p.expect(T!['(']); if expr(p).is_none() { p.err_and_bump("expected asm template"); @@ -411,11 +417,10 @@ fn parse_asm_expr(p: &mut Parser<'_>, m: Marker) -> Option<CompletedMarker> { dir_spec.abandon(p); op.abandon(p); op_n.abandon(p); - p.err_and_bump("expected asm operand"); - // improves error recovery and handles err_and_bump recovering from `{` which gets - // the parser stuck here + // improves error recovery if p.at(T!['{']) { + p.error("expected asm operand"); // test_err bad_asm_expr // fn foo() { // builtin#asm( @@ -423,6 +428,8 @@ fn parse_asm_expr(p: &mut Parser<'_>, m: Marker) -> Option<CompletedMarker> { // ); // } expr(p); + } else { + p.err_and_bump("expected asm operand"); } if p.at(T!['}']) { diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs index b9f4866574a..8e551b0b961 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/items.rs @@ -261,6 +261,19 @@ fn opt_item_without_modifiers(p: &mut Parser<'_>, m: Marker) -> Result<(), Marke T![const] if (la == IDENT || la == T![_] || la == T![mut]) => consts::konst(p, m), T![static] if (la == IDENT || la == T![_] || la == T![mut]) => consts::static_(p, m), + IDENT + if p.at_contextual_kw(T![builtin]) + && p.nth_at(1, T![#]) + && p.nth_at_contextual_kw(2, T![global_asm]) => + { + p.bump_remap(T![builtin]); + p.bump(T![#]); + p.bump_remap(T![global_asm]); + // test global_asm + // builtin#global_asm("") + expressions::parse_asm_expr(p, m); + } + _ => return Err(m), }; Ok(()) diff --git a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs index bff9acd78fa..8fff1c3db74 100644 --- a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs +++ b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs @@ -11,8 +11,8 @@ use std::ops; use rustc_literal_escaper::{ - unescape_byte, unescape_byte_str, unescape_c_str, unescape_char, unescape_str, EscapeError, - Mode, + EscapeError, Mode, unescape_byte, unescape_byte_str, unescape_c_str, unescape_char, + unescape_str, }; use crate::{ diff --git a/src/tools/rust-analyzer/crates/parser/src/parser.rs b/src/tools/rust-analyzer/crates/parser/src/parser.rs index 36a363afe93..ca02d9fdfde 100644 --- a/src/tools/rust-analyzer/crates/parser/src/parser.rs +++ b/src/tools/rust-analyzer/crates/parser/src/parser.rs @@ -29,7 +29,7 @@ pub(crate) struct Parser<'t> { edition: Edition, } -const PARSER_STEP_LIMIT: usize = 15_000_000; +const PARSER_STEP_LIMIT: usize = if cfg!(debug_assertions) { 150_000 } else { 15_000_000 }; impl<'t> Parser<'t> { pub(super) fn new(inp: &'t Input, edition: Edition) -> Parser<'t> { @@ -254,7 +254,10 @@ impl<'t> Parser<'t> { /// Create an error node and consume the next token. pub(crate) fn err_and_bump(&mut self, message: &str) { - self.err_recover(message, TokenSet::EMPTY); + let m = self.start(); + self.error(message); + self.bump_any(); + m.complete(self, ERROR); } /// Create an error node and consume the next token unless it is in the recovery set. diff --git a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs index f534546ea07..12a13caa4d9 100644 --- a/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs +++ b/src/tools/rust-analyzer/crates/parser/src/syntax_kind/generated.rs @@ -120,12 +120,14 @@ pub enum SyntaxKind { DYN_KW, FORMAT_ARGS_KW, GEN_KW, + GLOBAL_ASM_KW, INLATEOUT_KW, INOUT_KW, LABEL_KW, LATEOUT_KW, MACRO_RULES_KW, MAY_UNWIND_KW, + NAKED_ASM_KW, NOMEM_KW, NORETURN_KW, NOSTACK_KW, @@ -599,12 +601,14 @@ impl SyntaxKind { DEFAULT_KW => "default", DYN_KW => "dyn", FORMAT_ARGS_KW => "format_args", + GLOBAL_ASM_KW => "global_asm", INLATEOUT_KW => "inlateout", INOUT_KW => "inout", LABEL_KW => "label", LATEOUT_KW => "lateout", MACRO_RULES_KW => "macro_rules", MAY_UNWIND_KW => "may_unwind", + NAKED_ASM_KW => "naked_asm", NOMEM_KW => "nomem", NORETURN_KW => "noreturn", NOSTACK_KW => "nostack", @@ -699,12 +703,14 @@ impl SyntaxKind { DEFAULT_KW => true, DYN_KW if edition < Edition::Edition2018 => true, FORMAT_ARGS_KW => true, + GLOBAL_ASM_KW => true, INLATEOUT_KW => true, INOUT_KW => true, LABEL_KW => true, LATEOUT_KW => true, MACRO_RULES_KW => true, MAY_UNWIND_KW => true, + NAKED_ASM_KW => true, NOMEM_KW => true, NORETURN_KW => true, NOSTACK_KW => true, @@ -787,12 +793,14 @@ impl SyntaxKind { DEFAULT_KW => true, DYN_KW if edition < Edition::Edition2018 => true, FORMAT_ARGS_KW => true, + GLOBAL_ASM_KW => true, INLATEOUT_KW => true, INOUT_KW => true, LABEL_KW => true, LATEOUT_KW => true, MACRO_RULES_KW => true, MAY_UNWIND_KW => true, + NAKED_ASM_KW => true, NOMEM_KW => true, NORETURN_KW => true, NOSTACK_KW => true, @@ -938,12 +946,14 @@ impl SyntaxKind { "default" => DEFAULT_KW, "dyn" if edition < Edition::Edition2018 => DYN_KW, "format_args" => FORMAT_ARGS_KW, + "global_asm" => GLOBAL_ASM_KW, "inlateout" => INLATEOUT_KW, "inout" => INOUT_KW, "label" => LABEL_KW, "lateout" => LATEOUT_KW, "macro_rules" => MACRO_RULES_KW, "may_unwind" => MAY_UNWIND_KW, + "naked_asm" => NAKED_ASM_KW, "nomem" => NOMEM_KW, "noreturn" => NORETURN_KW, "nostack" => NOSTACK_KW, @@ -998,7 +1008,7 @@ impl SyntaxKind { } } #[macro_export] -macro_rules ! T_ { [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [abstract] => { $ crate :: SyntaxKind :: ABSTRACT_KW } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [become] => { $ crate :: SyntaxKind :: BECOME_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [final] => { $ crate :: SyntaxKind :: FINAL_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [override] => { $ crate :: SyntaxKind :: OVERRIDE_KW } ; [priv] => { $ crate :: SyntaxKind :: PRIV_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [typeof] => { $ crate :: SyntaxKind :: TYPEOF_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [unsized] => { $ crate :: SyntaxKind :: UNSIZED_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [virtual] => { $ crate :: SyntaxKind :: VIRTUAL_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [att_syntax] => { $ crate :: SyntaxKind :: ATT_SYNTAX_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [clobber_abi] => { $ crate :: SyntaxKind :: CLOBBER_ABI_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [inlateout] => { $ crate :: SyntaxKind :: INLATEOUT_KW } ; [inout] => { $ crate :: SyntaxKind :: INOUT_KW } ; [label] => { $ crate :: SyntaxKind :: LABEL_KW } ; [lateout] => { $ crate :: SyntaxKind :: LATEOUT_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [may_unwind] => { $ crate :: SyntaxKind :: MAY_UNWIND_KW } ; [nomem] => { $ crate :: SyntaxKind :: NOMEM_KW } ; [noreturn] => { $ crate :: SyntaxKind :: NORETURN_KW } ; [nostack] => { $ crate :: SyntaxKind :: NOSTACK_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [options] => { $ crate :: SyntaxKind :: OPTIONS_KW } ; [out] => { $ crate :: SyntaxKind :: OUT_KW } ; [preserves_flags] => { $ crate :: SyntaxKind :: PRESERVES_FLAGS_KW } ; [pure] => { $ crate :: SyntaxKind :: PURE_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [readonly] => { $ crate :: SyntaxKind :: READONLY_KW } ; [safe] => { $ crate :: SyntaxKind :: SAFE_KW } ; [sym] => { $ crate :: SyntaxKind :: SYM_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [gen] => { $ crate :: SyntaxKind :: GEN_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [int_number] => { $ crate :: SyntaxKind :: INT_NUMBER } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [string] => { $ crate :: SyntaxKind :: STRING } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; [frontmatter] => { $ crate :: SyntaxKind :: FRONTMATTER } ; } +macro_rules ! T_ { [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [abstract] => { $ crate :: SyntaxKind :: ABSTRACT_KW } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [become] => { $ crate :: SyntaxKind :: BECOME_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [final] => { $ crate :: SyntaxKind :: FINAL_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [override] => { $ crate :: SyntaxKind :: OVERRIDE_KW } ; [priv] => { $ crate :: SyntaxKind :: PRIV_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [typeof] => { $ crate :: SyntaxKind :: TYPEOF_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [unsized] => { $ crate :: SyntaxKind :: UNSIZED_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [virtual] => { $ crate :: SyntaxKind :: VIRTUAL_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [att_syntax] => { $ crate :: SyntaxKind :: ATT_SYNTAX_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [clobber_abi] => { $ crate :: SyntaxKind :: CLOBBER_ABI_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [global_asm] => { $ crate :: SyntaxKind :: GLOBAL_ASM_KW } ; [inlateout] => { $ crate :: SyntaxKind :: INLATEOUT_KW } ; [inout] => { $ crate :: SyntaxKind :: INOUT_KW } ; [label] => { $ crate :: SyntaxKind :: LABEL_KW } ; [lateout] => { $ crate :: SyntaxKind :: LATEOUT_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [may_unwind] => { $ crate :: SyntaxKind :: MAY_UNWIND_KW } ; [naked_asm] => { $ crate :: SyntaxKind :: NAKED_ASM_KW } ; [nomem] => { $ crate :: SyntaxKind :: NOMEM_KW } ; [noreturn] => { $ crate :: SyntaxKind :: NORETURN_KW } ; [nostack] => { $ crate :: SyntaxKind :: NOSTACK_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [options] => { $ crate :: SyntaxKind :: OPTIONS_KW } ; [out] => { $ crate :: SyntaxKind :: OUT_KW } ; [preserves_flags] => { $ crate :: SyntaxKind :: PRESERVES_FLAGS_KW } ; [pure] => { $ crate :: SyntaxKind :: PURE_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [readonly] => { $ crate :: SyntaxKind :: READONLY_KW } ; [safe] => { $ crate :: SyntaxKind :: SAFE_KW } ; [sym] => { $ crate :: SyntaxKind :: SYM_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [gen] => { $ crate :: SyntaxKind :: GEN_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [int_number] => { $ crate :: SyntaxKind :: INT_NUMBER } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [string] => { $ crate :: SyntaxKind :: STRING } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; [frontmatter] => { $ crate :: SyntaxKind :: FRONTMATTER } ; } impl ::core::marker::Copy for SyntaxKind {} impl ::core::clone::Clone for SyntaxKind { #[inline] diff --git a/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs b/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs index 6ec4192830b..cef7b0ee239 100644 --- a/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs +++ b/src/tools/rust-analyzer/crates/parser/test_data/generated/runner.rs @@ -21,6 +21,8 @@ mod ok { #[test] fn asm_expr() { run_and_expect_no_errors("test_data/parser/inline/ok/asm_expr.rs"); } #[test] + fn asm_kinds() { run_and_expect_no_errors("test_data/parser/inline/ok/asm_kinds.rs"); } + #[test] fn asm_label() { run_and_expect_no_errors("test_data/parser/inline/ok/asm_label.rs"); } #[test] fn assoc_const_eq() { @@ -298,6 +300,8 @@ mod ok { run_and_expect_no_errors("test_data/parser/inline/ok/generic_param_list.rs"); } #[test] + fn global_asm() { run_and_expect_no_errors("test_data/parser/inline/ok/global_asm.rs"); } + #[test] fn half_open_range_pat() { run_and_expect_no_errors("test_data/parser/inline/ok/half_open_range_pat.rs"); } diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/asm_kinds.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/asm_kinds.rast new file mode 100644 index 00000000000..c337d89aa50 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/asm_kinds.rast @@ -0,0 +1,48 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "foo" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE "\n " + EXPR_STMT + ASM_EXPR + BUILTIN_KW "builtin" + POUND "#" + ASM_KW "asm" + L_PAREN "(" + LITERAL + STRING "\"\"" + R_PAREN ")" + SEMICOLON ";" + WHITESPACE "\n " + ASM_EXPR + BUILTIN_KW "builtin" + POUND "#" + GLOBAL_ASM_KW "global_asm" + L_PAREN "(" + LITERAL + STRING "\"\"" + R_PAREN ")" + SEMICOLON ";" + WHITESPACE "\n " + EXPR_STMT + ASM_EXPR + BUILTIN_KW "builtin" + POUND "#" + NAKED_ASM_KW "naked_asm" + L_PAREN "(" + LITERAL + STRING "\"\"" + R_PAREN ")" + SEMICOLON ";" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/asm_kinds.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/asm_kinds.rs new file mode 100644 index 00000000000..9c03e9de689 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/asm_kinds.rs @@ -0,0 +1,5 @@ +fn foo() { + builtin#asm(""); + builtin#global_asm(""); + builtin#naked_asm(""); +} diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/global_asm.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/global_asm.rast new file mode 100644 index 00000000000..5337c56be17 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/global_asm.rast @@ -0,0 +1,10 @@ +SOURCE_FILE + ASM_EXPR + BUILTIN_KW "builtin" + POUND "#" + GLOBAL_ASM_KW "global_asm" + L_PAREN "(" + LITERAL + STRING "\"\"" + R_PAREN ")" + WHITESPACE "\n" diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/global_asm.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/global_asm.rs new file mode 100644 index 00000000000..967ce1f5fd9 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/global_asm.rs @@ -0,0 +1 @@ +builtin#global_asm("") diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_config_file.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_config_file.rs new file mode 100644 index 00000000000..7966f74df30 --- /dev/null +++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_config_file.rs @@ -0,0 +1,34 @@ +//! Read `.cargo/config.toml` as a JSON object +use rustc_hash::FxHashMap; +use toolchain::Tool; + +use crate::{ManifestPath, Sysroot, utf8_stdout}; + +pub(crate) type CargoConfigFile = serde_json::Map<String, serde_json::Value>; + +pub(crate) fn read( + manifest: &ManifestPath, + extra_env: &FxHashMap<String, Option<String>>, + sysroot: &Sysroot, +) -> Option<CargoConfigFile> { + let mut cargo_config = sysroot.tool(Tool::Cargo, manifest.parent(), extra_env); + cargo_config + .args(["-Z", "unstable-options", "config", "get", "--format", "json"]) + .env("RUSTC_BOOTSTRAP", "1"); + if manifest.is_rust_manifest() { + cargo_config.arg("-Zscript"); + } + + tracing::debug!("Discovering cargo config by {:?}", cargo_config); + let json: serde_json::Map<String, serde_json::Value> = utf8_stdout(&mut cargo_config) + .inspect(|json| { + tracing::debug!("Discovered cargo config: {:?}", json); + }) + .inspect_err(|err| { + tracing::debug!("Failed to discover cargo config: {:?}", err); + }) + .ok() + .and_then(|stdout| serde_json::from_str(&stdout).ok())?; + + Some(json) +} diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs index 4bacc904174..daadcd9d79a 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs @@ -300,8 +300,6 @@ pub struct CargoMetadataConfig { pub extra_args: Vec<String>, /// Extra env vars to set when invoking the cargo command pub extra_env: FxHashMap<String, Option<String>>, - /// The target dir for this workspace load. - pub target_dir: Utf8PathBuf, /// What kind of metadata are we fetching: workspace, rustc, or sysroot. pub kind: &'static str, /// The toolchain version, if known. @@ -317,188 +315,6 @@ struct PackageMetadata { } impl CargoWorkspace { - /// Fetches the metadata for the given `cargo_toml` manifest. - /// A successful result may contain another metadata error if the initial fetching failed but - /// the `--no-deps` retry succeeded. - /// - /// The sysroot is used to set the `RUSTUP_TOOLCHAIN` env var when invoking cargo - /// to ensure that the rustup proxy uses the correct toolchain. - pub fn fetch_metadata( - cargo_toml: &ManifestPath, - current_dir: &AbsPath, - config: &CargoMetadataConfig, - sysroot: &Sysroot, - no_deps: bool, - locked: bool, - progress: &dyn Fn(String), - ) -> anyhow::Result<(cargo_metadata::Metadata, Option<anyhow::Error>)> { - let res = Self::fetch_metadata_( - cargo_toml, - current_dir, - config, - sysroot, - no_deps, - locked, - progress, - ); - if let Ok((_, Some(ref e))) = res { - tracing::warn!( - %cargo_toml, - ?e, - "`cargo metadata` failed, but retry with `--no-deps` succeeded" - ); - } - res - } - - fn fetch_metadata_( - cargo_toml: &ManifestPath, - current_dir: &AbsPath, - config: &CargoMetadataConfig, - sysroot: &Sysroot, - no_deps: bool, - locked: bool, - progress: &dyn Fn(String), - ) -> anyhow::Result<(cargo_metadata::Metadata, Option<anyhow::Error>)> { - let cargo = sysroot.tool(Tool::Cargo, current_dir, &config.extra_env); - let mut meta = MetadataCommand::new(); - meta.cargo_path(cargo.get_program()); - cargo.get_envs().for_each(|(var, val)| _ = meta.env(var, val.unwrap_or_default())); - meta.manifest_path(cargo_toml.to_path_buf()); - match &config.features { - CargoFeatures::All => { - meta.features(CargoOpt::AllFeatures); - } - CargoFeatures::Selected { features, no_default_features } => { - if *no_default_features { - meta.features(CargoOpt::NoDefaultFeatures); - } - if !features.is_empty() { - meta.features(CargoOpt::SomeFeatures(features.clone())); - } - } - } - meta.current_dir(current_dir); - - let mut other_options = vec![]; - // cargo metadata only supports a subset of flags of what cargo usually accepts, and usually - // the only relevant flags for metadata here are unstable ones, so we pass those along - // but nothing else - let mut extra_args = config.extra_args.iter(); - while let Some(arg) = extra_args.next() { - if arg == "-Z" { - if let Some(arg) = extra_args.next() { - other_options.push("-Z".to_owned()); - other_options.push(arg.to_owned()); - } - } - } - - if !config.targets.is_empty() { - other_options.extend( - config.targets.iter().flat_map(|it| ["--filter-platform".to_owned(), it.clone()]), - ); - } - if no_deps { - other_options.push("--no-deps".to_owned()); - } - - let mut using_lockfile_copy = false; - // The manifest is a rust file, so this means its a script manifest - if cargo_toml.is_rust_manifest() { - other_options.push("-Zscript".to_owned()); - } else if config - .toolchain_version - .as_ref() - .is_some_and(|v| *v >= MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH) - { - let lockfile = <_ as AsRef<Utf8Path>>::as_ref(cargo_toml).with_extension("lock"); - let target_lockfile = config - .target_dir - .join("rust-analyzer") - .join("metadata") - .join(config.kind) - .join("Cargo.lock"); - match std::fs::copy(&lockfile, &target_lockfile) { - Ok(_) => { - using_lockfile_copy = true; - other_options.push("--lockfile-path".to_owned()); - other_options.push(target_lockfile.to_string()); - } - Err(e) if e.kind() == std::io::ErrorKind::NotFound => { - // There exists no lockfile yet - using_lockfile_copy = true; - other_options.push("--lockfile-path".to_owned()); - other_options.push(target_lockfile.to_string()); - } - Err(e) => { - tracing::warn!( - "Failed to copy lock file from `{lockfile}` to `{target_lockfile}`: {e}", - ); - } - } - } - if using_lockfile_copy { - other_options.push("-Zunstable-options".to_owned()); - meta.env("RUSTC_BOOTSTRAP", "1"); - } - // No need to lock it if we copied the lockfile, we won't modify the original after all/ - // This way cargo cannot error out on us if the lockfile requires updating. - if !using_lockfile_copy && locked { - other_options.push("--locked".to_owned()); - } - meta.other_options(other_options); - - // FIXME: Fetching metadata is a slow process, as it might require - // calling crates.io. We should be reporting progress here, but it's - // unclear whether cargo itself supports it. - progress("cargo metadata: started".to_owned()); - - let res = (|| -> anyhow::Result<(_, _)> { - let mut errored = false; - let output = - spawn_with_streaming_output(meta.cargo_command(), &mut |_| (), &mut |line| { - errored = errored || line.starts_with("error") || line.starts_with("warning"); - if errored { - progress("cargo metadata: ?".to_owned()); - return; - } - progress(format!("cargo metadata: {line}")); - })?; - if !output.status.success() { - progress(format!("cargo metadata: failed {}", output.status)); - let error = cargo_metadata::Error::CargoMetadata { - stderr: String::from_utf8(output.stderr)?, - } - .into(); - if !no_deps { - // If we failed to fetch metadata with deps, try again without them. - // This makes r-a still work partially when offline. - if let Ok((metadata, _)) = Self::fetch_metadata_( - cargo_toml, - current_dir, - config, - sysroot, - true, - locked, - progress, - ) { - return Ok((metadata, Some(error))); - } - } - return Err(error); - } - let stdout = from_utf8(&output.stdout)? - .lines() - .find(|line| line.starts_with('{')) - .ok_or(cargo_metadata::Error::NoJson)?; - Ok((cargo_metadata::MetadataCommand::parse(stdout)?, None)) - })() - .with_context(|| format!("Failed to run `{:?}`", meta.cargo_command())); - progress("cargo metadata: finished".to_owned()); - res - } - pub fn new( mut meta: cargo_metadata::Metadata, ws_manifest_path: ManifestPath, @@ -733,3 +549,214 @@ impl CargoWorkspace { self.requires_rustc_private } } + +pub(crate) struct FetchMetadata { + command: cargo_metadata::MetadataCommand, + lockfile_path: Option<Utf8PathBuf>, + kind: &'static str, + no_deps: bool, + no_deps_result: anyhow::Result<cargo_metadata::Metadata>, + other_options: Vec<String>, +} + +impl FetchMetadata { + /// Builds a command to fetch metadata for the given `cargo_toml` manifest. + /// + /// Performs a lightweight pre-fetch using the `--no-deps` option, + /// available via [`FetchMetadata::no_deps_metadata`], to gather basic + /// information such as the `target-dir`. + /// + /// The provided sysroot is used to set the `RUSTUP_TOOLCHAIN` + /// environment variable when invoking Cargo, ensuring that the + /// rustup proxy selects the correct toolchain. + pub(crate) fn new( + cargo_toml: &ManifestPath, + current_dir: &AbsPath, + config: &CargoMetadataConfig, + sysroot: &Sysroot, + no_deps: bool, + ) -> Self { + let cargo = sysroot.tool(Tool::Cargo, current_dir, &config.extra_env); + let mut command = MetadataCommand::new(); + command.cargo_path(cargo.get_program()); + cargo.get_envs().for_each(|(var, val)| _ = command.env(var, val.unwrap_or_default())); + command.manifest_path(cargo_toml.to_path_buf()); + match &config.features { + CargoFeatures::All => { + command.features(CargoOpt::AllFeatures); + } + CargoFeatures::Selected { features, no_default_features } => { + if *no_default_features { + command.features(CargoOpt::NoDefaultFeatures); + } + if !features.is_empty() { + command.features(CargoOpt::SomeFeatures(features.clone())); + } + } + } + command.current_dir(current_dir); + + let mut needs_nightly = false; + let mut other_options = vec![]; + // cargo metadata only supports a subset of flags of what cargo usually accepts, and usually + // the only relevant flags for metadata here are unstable ones, so we pass those along + // but nothing else + let mut extra_args = config.extra_args.iter(); + while let Some(arg) = extra_args.next() { + if arg == "-Z" { + if let Some(arg) = extra_args.next() { + needs_nightly = true; + other_options.push("-Z".to_owned()); + other_options.push(arg.to_owned()); + } + } + } + + let mut lockfile_path = None; + if cargo_toml.is_rust_manifest() { + needs_nightly = true; + other_options.push("-Zscript".to_owned()); + } else if config + .toolchain_version + .as_ref() + .is_some_and(|v| *v >= MINIMUM_TOOLCHAIN_VERSION_SUPPORTING_LOCKFILE_PATH) + { + lockfile_path = Some(<_ as AsRef<Utf8Path>>::as_ref(cargo_toml).with_extension("lock")); + } + + if !config.targets.is_empty() { + other_options.extend( + config.targets.iter().flat_map(|it| ["--filter-platform".to_owned(), it.clone()]), + ); + } + + command.other_options(other_options.clone()); + + if needs_nightly { + command.env("RUSTC_BOOTSTRAP", "1"); + } + + // Pre-fetch basic metadata using `--no-deps`, which: + // - avoids fetching registries like crates.io, + // - skips dependency resolution and does not modify lockfiles, + // - and thus doesn't require progress reporting or copying lockfiles. + // + // Useful as a fast fallback to extract info like `target-dir`. + let cargo_command; + let no_deps_result = if no_deps { + command.no_deps(); + cargo_command = command.cargo_command(); + command.exec() + } else { + let mut no_deps_command = command.clone(); + no_deps_command.no_deps(); + cargo_command = no_deps_command.cargo_command(); + no_deps_command.exec() + } + .with_context(|| format!("Failed to run `{cargo_command:?}`")); + + Self { command, lockfile_path, kind: config.kind, no_deps, no_deps_result, other_options } + } + + pub(crate) fn no_deps_metadata(&self) -> Option<&cargo_metadata::Metadata> { + self.no_deps_result.as_ref().ok() + } + + /// Executes the metadata-fetching command. + /// + /// A successful result may still contain a metadata error if the full fetch failed, + /// but the fallback `--no-deps` pre-fetch succeeded during command construction. + pub(crate) fn exec( + self, + target_dir: &Utf8Path, + locked: bool, + progress: &dyn Fn(String), + ) -> anyhow::Result<(cargo_metadata::Metadata, Option<anyhow::Error>)> { + let Self { mut command, lockfile_path, kind, no_deps, no_deps_result, mut other_options } = + self; + + if no_deps { + return no_deps_result.map(|m| (m, None)); + } + + let mut using_lockfile_copy = false; + // The manifest is a rust file, so this means its a script manifest + if let Some(lockfile) = lockfile_path { + let target_lockfile = + target_dir.join("rust-analyzer").join("metadata").join(kind).join("Cargo.lock"); + match std::fs::copy(&lockfile, &target_lockfile) { + Ok(_) => { + using_lockfile_copy = true; + other_options.push("--lockfile-path".to_owned()); + other_options.push(target_lockfile.to_string()); + } + Err(e) if e.kind() == std::io::ErrorKind::NotFound => { + // There exists no lockfile yet + using_lockfile_copy = true; + other_options.push("--lockfile-path".to_owned()); + other_options.push(target_lockfile.to_string()); + } + Err(e) => { + tracing::warn!( + "Failed to copy lock file from `{lockfile}` to `{target_lockfile}`: {e}", + ); + } + } + } + if using_lockfile_copy { + other_options.push("-Zunstable-options".to_owned()); + command.env("RUSTC_BOOTSTRAP", "1"); + } + // No need to lock it if we copied the lockfile, we won't modify the original after all/ + // This way cargo cannot error out on us if the lockfile requires updating. + if !using_lockfile_copy && locked { + other_options.push("--locked".to_owned()); + } + command.other_options(other_options); + + // FIXME: Fetching metadata is a slow process, as it might require + // calling crates.io. We should be reporting progress here, but it's + // unclear whether cargo itself supports it. + progress("cargo metadata: started".to_owned()); + + let res = (|| -> anyhow::Result<(_, _)> { + let mut errored = false; + let output = + spawn_with_streaming_output(command.cargo_command(), &mut |_| (), &mut |line| { + errored = errored || line.starts_with("error") || line.starts_with("warning"); + if errored { + progress("cargo metadata: ?".to_owned()); + return; + } + progress(format!("cargo metadata: {line}")); + })?; + if !output.status.success() { + progress(format!("cargo metadata: failed {}", output.status)); + let error = cargo_metadata::Error::CargoMetadata { + stderr: String::from_utf8(output.stderr)?, + } + .into(); + if !no_deps { + // If we failed to fetch metadata with deps, return pre-fetched result without them. + // This makes r-a still work partially when offline. + if let Ok(metadata) = no_deps_result { + tracing::warn!( + ?error, + "`cargo metadata` failed and returning succeeded result with `--no-deps`" + ); + return Ok((metadata, Some(error))); + } + } + return Err(error); + } + let stdout = from_utf8(&output.stdout)? + .lines() + .find(|line| line.starts_with('{')) + .ok_or(cargo_metadata::Error::NoJson)?; + Ok((cargo_metadata::MetadataCommand::parse(stdout)?, None)) + })() + .with_context(|| format!("Failed to run `{:?}`", command.cargo_command())); + progress("cargo metadata: finished".to_owned()); + res + } +} diff --git a/src/tools/rust-analyzer/crates/project-model/src/env.rs b/src/tools/rust-analyzer/crates/project-model/src/env.rs index 9e0415c3b39..d281492fc98 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/env.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/env.rs @@ -1,10 +1,9 @@ //! Cargo-like environment variables injection. use base_db::Env; -use paths::{Utf8Path, Utf8PathBuf}; -use rustc_hash::FxHashMap; +use paths::Utf8Path; use toolchain::Tool; -use crate::{ManifestPath, PackageData, Sysroot, TargetKind, utf8_stdout}; +use crate::{ManifestPath, PackageData, TargetKind, cargo_config_file::CargoConfigFile}; /// Recreates the compile-time environment variables that Cargo sets. /// @@ -61,104 +60,68 @@ pub(crate) fn inject_rustc_tool_env(env: &mut Env, cargo_name: &str, kind: Targe env.set("CARGO_CRATE_NAME", cargo_name.replace('-', "_")); } -pub(crate) fn cargo_config_env( - manifest: &ManifestPath, - extra_env: &FxHashMap<String, Option<String>>, - sysroot: &Sysroot, -) -> Env { - let mut cargo_config = sysroot.tool(Tool::Cargo, manifest.parent(), extra_env); - cargo_config - .args(["-Z", "unstable-options", "config", "get", "env"]) - .env("RUSTC_BOOTSTRAP", "1"); - if manifest.is_rust_manifest() { - cargo_config.arg("-Zscript"); - } - // if successful we receive `env.key.value = "value" per entry - tracing::debug!("Discovering cargo config env by {:?}", cargo_config); - utf8_stdout(&mut cargo_config) - .map(|stdout| parse_output_cargo_config_env(manifest, &stdout)) - .inspect(|env| { - tracing::debug!("Discovered cargo config env: {:?}", env); - }) - .inspect_err(|err| { - tracing::debug!("Failed to discover cargo config env: {:?}", err); - }) - .unwrap_or_default() -} - -fn parse_output_cargo_config_env(manifest: &ManifestPath, stdout: &str) -> Env { +pub(crate) fn cargo_config_env(manifest: &ManifestPath, config: &Option<CargoConfigFile>) -> Env { let mut env = Env::default(); - let mut relatives = vec![]; - for (key, val) in - stdout.lines().filter_map(|l| l.strip_prefix("env.")).filter_map(|l| l.split_once(" = ")) - { - let val = val.trim_matches('"').to_owned(); - if let Some((key, modifier)) = key.split_once('.') { - match modifier { - "relative" => relatives.push((key, val)), - "value" => _ = env.insert(key, val), - _ => { - tracing::warn!( - "Unknown modifier in cargo config env: {}, expected `relative` or `value`", - modifier - ); - continue; - } - } - } else { - env.insert(key, val); - } - } + let Some(serde_json::Value::Object(env_json)) = config.as_ref().and_then(|c| c.get("env")) + else { + return env; + }; + // FIXME: The base here should be the parent of the `.cargo/config` file, not the manifest. // But cargo does not provide this information. let base = <_ as AsRef<Utf8Path>>::as_ref(manifest.parent()); - for (key, relative) in relatives { - if relative != "true" { + + for (key, entry) in env_json { + let serde_json::Value::Object(entry) = entry else { continue; - } - if let Some(suffix) = env.get(key) { - env.insert(key, base.join(suffix).to_string()); - } - } - env -} + }; + let Some(value) = entry.get("value").and_then(|v| v.as_str()) else { + continue; + }; -pub(crate) fn cargo_config_build_target_dir( - manifest: &ManifestPath, - extra_env: &FxHashMap<String, Option<String>>, - sysroot: &Sysroot, -) -> Option<Utf8PathBuf> { - let mut cargo_config = sysroot.tool(Tool::Cargo, manifest.parent(), extra_env); - cargo_config - .args(["-Z", "unstable-options", "config", "get", "build.target-dir"]) - .env("RUSTC_BOOTSTRAP", "1"); - if manifest.is_rust_manifest() { - cargo_config.arg("-Zscript"); + let value = if entry + .get("relative") + .and_then(|v| v.as_bool()) + .is_some_and(std::convert::identity) + { + base.join(value).to_string() + } else { + value.to_owned() + }; + env.insert(key, value); } - utf8_stdout(&mut cargo_config) - .map(|stdout| { - Utf8Path::new(stdout.trim_start_matches("build.target-dir = ").trim_matches('"')) - .to_owned() - }) - .ok() + + env } #[test] fn parse_output_cargo_config_env_works() { - let stdout = r#" -env.CARGO_WORKSPACE_DIR.relative = true -env.CARGO_WORKSPACE_DIR.value = "" -env.RELATIVE.relative = true -env.RELATIVE.value = "../relative" -env.INVALID.relative = invalidbool -env.INVALID.value = "../relative" -env.TEST.value = "test" -"# - .trim(); + let raw = r#" +{ + "env": { + "CARGO_WORKSPACE_DIR": { + "relative": true, + "value": "" + }, + "INVALID": { + "relative": "invalidbool", + "value": "../relative" + }, + "RELATIVE": { + "relative": true, + "value": "../relative" + }, + "TEST": { + "value": "test" + } + } +} +"#; + let config: CargoConfigFile = serde_json::from_str(raw).unwrap(); let cwd = paths::Utf8PathBuf::try_from(std::env::current_dir().unwrap()).unwrap(); let manifest = paths::AbsPathBuf::assert(cwd.join("Cargo.toml")); let manifest = ManifestPath::try_from(manifest).unwrap(); - let env = parse_output_cargo_config_env(&manifest, stdout); + let env = cargo_config_env(&manifest, &Some(config)); assert_eq!(env.get("CARGO_WORKSPACE_DIR").as_deref(), Some(cwd.join("").as_str())); assert_eq!(env.get("RELATIVE").as_deref(), Some(cwd.join("../relative").as_str())); assert_eq!(env.get("INVALID").as_deref(), Some("../relative")); diff --git a/src/tools/rust-analyzer/crates/project-model/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/src/lib.rs index 436af64cf13..3bf3d06e6b1 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/lib.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/lib.rs @@ -24,7 +24,7 @@ pub mod toolchain_info { use std::path::Path; - use crate::{ManifestPath, Sysroot}; + use crate::{ManifestPath, Sysroot, cargo_config_file::CargoConfigFile}; #[derive(Copy, Clone)] pub enum QueryConfig<'a> { @@ -32,11 +32,12 @@ pub mod toolchain_info { Rustc(&'a Sysroot, &'a Path), /// Attempt to use cargo to query the desired information, honoring cargo configurations. /// If this fails, falls back to invoking `rustc` directly. - Cargo(&'a Sysroot, &'a ManifestPath), + Cargo(&'a Sysroot, &'a ManifestPath, &'a Option<CargoConfigFile>), } } mod build_dependencies; +mod cargo_config_file; mod cargo_workspace; mod env; mod manifest_path; diff --git a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs index 9f19260d309..9781c46737d 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs @@ -9,14 +9,15 @@ use std::{env, fs, ops::Not, path::Path, process::Command}; use anyhow::{Result, format_err}; use itertools::Itertools; -use paths::{AbsPath, AbsPathBuf, Utf8PathBuf}; +use paths::{AbsPath, AbsPathBuf, Utf8Path, Utf8PathBuf}; use rustc_hash::FxHashMap; use stdx::format_to; use toolchain::{Tool, probe_for_binary}; use crate::{ CargoWorkspace, ManifestPath, ProjectJson, RustSourceWorkspaceConfig, - cargo_workspace::CargoMetadataConfig, utf8_stdout, + cargo_workspace::{CargoMetadataConfig, FetchMetadata}, + utf8_stdout, }; #[derive(Debug, Clone, PartialEq, Eq)] @@ -211,6 +212,7 @@ impl Sysroot { sysroot_source_config: &RustSourceWorkspaceConfig, no_deps: bool, current_dir: &AbsPath, + target_dir: &Utf8Path, progress: &dyn Fn(String), ) -> Option<RustLibSrcWorkspace> { assert!(matches!(self.workspace, RustLibSrcWorkspace::Empty), "workspace already loaded"); @@ -224,6 +226,7 @@ impl Sysroot { match self.load_library_via_cargo( &library_manifest, current_dir, + target_dir, cargo_config, no_deps, progress, @@ -319,6 +322,7 @@ impl Sysroot { &self, library_manifest: &ManifestPath, current_dir: &AbsPath, + target_dir: &Utf8Path, cargo_config: &CargoMetadataConfig, no_deps: bool, progress: &dyn Fn(String), @@ -331,16 +335,11 @@ impl Sysroot { Some("nightly".to_owned()), ); - let (mut res, _) = CargoWorkspace::fetch_metadata( - library_manifest, - current_dir, - &cargo_config, - self, - no_deps, - // Make sure we never attempt to write to the sysroot - true, - progress, - )?; + // Make sure we never attempt to write to the sysroot + let locked = true; + let (mut res, _) = + FetchMetadata::new(library_manifest, current_dir, &cargo_config, self, no_deps) + .exec(target_dir, locked, progress)?; // Patch out `rustc-std-workspace-*` crates to point to the real crates. // This is done prior to `CrateGraph` construction to prevent de-duplication logic from failing. diff --git a/src/tools/rust-analyzer/crates/project-model/src/tests.rs b/src/tools/rust-analyzer/crates/project-model/src/tests.rs index f229e9a650d..ed72520f40d 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/tests.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/tests.rs @@ -239,8 +239,13 @@ fn smoke_test_real_sysroot_cargo() { ); let cwd = AbsPathBuf::assert_utf8(temp_dir().join("smoke_test_real_sysroot_cargo")); std::fs::create_dir_all(&cwd).unwrap(); - let loaded_sysroot = - sysroot.load_workspace(&RustSourceWorkspaceConfig::default_cargo(), false, &cwd, &|_| ()); + let loaded_sysroot = sysroot.load_workspace( + &RustSourceWorkspaceConfig::default_cargo(), + false, + &cwd, + &Utf8PathBuf::default(), + &|_| (), + ); if let Some(loaded_sysroot) = loaded_sysroot { sysroot.set_workspace(loaded_sysroot); } diff --git a/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/rustc_cfg.rs b/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/rustc_cfg.rs index a77f76797fc..6e06e88bf7a 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/rustc_cfg.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/rustc_cfg.rs @@ -63,7 +63,7 @@ fn rustc_print_cfg( ) -> anyhow::Result<String> { const RUSTC_ARGS: [&str; 2] = ["--print", "cfg"]; let (sysroot, current_dir) = match config { - QueryConfig::Cargo(sysroot, cargo_toml) => { + QueryConfig::Cargo(sysroot, cargo_toml, _) => { let mut cmd = sysroot.tool(Tool::Cargo, cargo_toml.parent(), extra_env); cmd.args(["rustc", "-Z", "unstable-options"]).args(RUSTC_ARGS); if let Some(target) = target { @@ -109,7 +109,7 @@ mod tests { let sysroot = Sysroot::empty(); let manifest_path = ManifestPath::try_from(AbsPathBuf::assert(Utf8PathBuf::from(manifest_path))).unwrap(); - let cfg = QueryConfig::Cargo(&sysroot, &manifest_path); + let cfg = QueryConfig::Cargo(&sysroot, &manifest_path, &None); assert_ne!(get(cfg, None, &FxHashMap::default()), vec![]); } diff --git a/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/target_data_layout.rs b/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/target_data_layout.rs index a4d0ec69537..a28f468e692 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/target_data_layout.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/target_data_layout.rs @@ -20,7 +20,7 @@ pub fn get( }) }; let (sysroot, current_dir) = match config { - QueryConfig::Cargo(sysroot, cargo_toml) => { + QueryConfig::Cargo(sysroot, cargo_toml, _) => { let mut cmd = sysroot.tool(Tool::Cargo, cargo_toml.parent(), extra_env); cmd.env("RUSTC_BOOTSTRAP", "1"); cmd.args(["rustc", "-Z", "unstable-options"]).args(RUSTC_ARGS).args([ @@ -66,7 +66,7 @@ mod tests { let sysroot = Sysroot::empty(); let manifest_path = ManifestPath::try_from(AbsPathBuf::assert(Utf8PathBuf::from(manifest_path))).unwrap(); - let cfg = QueryConfig::Cargo(&sysroot, &manifest_path); + let cfg = QueryConfig::Cargo(&sysroot, &manifest_path, &None); assert!(get(cfg, None, &FxHashMap::default()).is_ok()); } diff --git a/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/target_tuple.rs b/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/target_tuple.rs index f6ab8532197..9f12ededb61 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/target_tuple.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/target_tuple.rs @@ -5,7 +5,9 @@ use anyhow::Context; use rustc_hash::FxHashMap; use toolchain::Tool; -use crate::{ManifestPath, Sysroot, toolchain_info::QueryConfig, utf8_stdout}; +use crate::{ + Sysroot, cargo_config_file::CargoConfigFile, toolchain_info::QueryConfig, utf8_stdout, +}; /// For cargo, runs `cargo -Zunstable-options config get build.target` to get the configured project target(s). /// For rustc, runs `rustc --print -vV` to get the host target. @@ -20,8 +22,8 @@ pub fn get( } let (sysroot, current_dir) = match config { - QueryConfig::Cargo(sysroot, cargo_toml) => { - match cargo_config_build_target(cargo_toml, extra_env, sysroot) { + QueryConfig::Cargo(sysroot, cargo_toml, config_file) => { + match config_file.as_ref().and_then(cargo_config_build_target) { Some(it) => return Ok(it), None => (sysroot, cargo_toml.parent().as_ref()), } @@ -50,30 +52,30 @@ fn rustc_discover_host_tuple( } } -fn cargo_config_build_target( - cargo_toml: &ManifestPath, - extra_env: &FxHashMap<String, Option<String>>, - sysroot: &Sysroot, -) -> Option<Vec<String>> { - let mut cmd = sysroot.tool(Tool::Cargo, cargo_toml.parent(), extra_env); - cmd.current_dir(cargo_toml.parent()).env("RUSTC_BOOTSTRAP", "1"); - cmd.args(["-Z", "unstable-options", "config", "get", "build.target"]); - // if successful we receive `build.target = "target-tuple"` - // or `build.target = ["<target 1>", ..]` - // this might be `error: config value `build.target` is not set` in which case we - // don't wanna log the error - utf8_stdout(&mut cmd).and_then(parse_output_cargo_config_build_target).ok() +fn cargo_config_build_target(config: &CargoConfigFile) -> Option<Vec<String>> { + match parse_json_cargo_config_build_target(config) { + Ok(v) => v, + Err(e) => { + tracing::debug!("Failed to discover cargo config build target {e:?}"); + None + } + } } // Parses `"build.target = [target-tuple, target-tuple, ...]"` or `"build.target = "target-tuple"` -fn parse_output_cargo_config_build_target(stdout: String) -> anyhow::Result<Vec<String>> { - let trimmed = stdout.trim_start_matches("build.target = ").trim_matches('"'); - - if !trimmed.starts_with('[') { - return Ok([trimmed.to_owned()].to_vec()); +fn parse_json_cargo_config_build_target( + config: &CargoConfigFile, +) -> anyhow::Result<Option<Vec<String>>> { + let target = config.get("build").and_then(|v| v.as_object()).and_then(|m| m.get("target")); + match target { + Some(serde_json::Value::String(s)) => Ok(Some(vec![s.to_owned()])), + Some(v) => serde_json::from_value(v.clone()) + .map(Option::Some) + .context("Failed to parse `build.target` as an array of target"), + // t`error: config value `build.target` is not set`, in which case we + // don't wanna log the error + None => Ok(None), } - - serde_json::from_str(trimmed).context("Failed to parse `build.target` as an array of target") } #[cfg(test)] @@ -90,7 +92,7 @@ mod tests { let sysroot = Sysroot::empty(); let manifest_path = ManifestPath::try_from(AbsPathBuf::assert(Utf8PathBuf::from(manifest_path))).unwrap(); - let cfg = QueryConfig::Cargo(&sysroot, &manifest_path); + let cfg = QueryConfig::Cargo(&sysroot, &manifest_path, &None); assert!(get(cfg, None, &FxHashMap::default()).is_ok()); } diff --git a/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/version.rs b/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/version.rs index 91ba8598591..357053d8e82 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/version.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/toolchain_info/version.rs @@ -12,7 +12,7 @@ pub(crate) fn get( extra_env: &FxHashMap<String, Option<String>>, ) -> Result<Option<Version>, anyhow::Error> { let (mut cmd, prefix) = match config { - QueryConfig::Cargo(sysroot, cargo_toml) => { + QueryConfig::Cargo(sysroot, cargo_toml, _) => { (sysroot.tool(Tool::Cargo, cargo_toml.parent(), extra_env), "cargo ") } QueryConfig::Rustc(sysroot, current_dir) => { @@ -44,7 +44,7 @@ mod tests { let sysroot = Sysroot::empty(); let manifest_path = ManifestPath::try_from(AbsPathBuf::assert(Utf8PathBuf::from(manifest_path))).unwrap(); - let cfg = QueryConfig::Cargo(&sysroot, &manifest_path); + let cfg = QueryConfig::Cargo(&sysroot, &manifest_path, &None); assert!(get(cfg, &FxHashMap::default()).is_ok()); } diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs index 43db84b4fa3..677f29e3c60 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs @@ -25,11 +25,9 @@ use crate::{ ProjectJson, ProjectManifest, RustSourceWorkspaceConfig, Sysroot, TargetData, TargetKind, WorkspaceBuildScripts, build_dependencies::BuildScriptOutput, - cargo_workspace::{CargoMetadataConfig, DepKind, PackageData, RustLibSource}, - env::{ - cargo_config_build_target_dir, cargo_config_env, inject_cargo_env, - inject_cargo_package_env, inject_rustc_tool_env, - }, + cargo_config_file, + cargo_workspace::{CargoMetadataConfig, DepKind, FetchMetadata, PackageData, RustLibSource}, + env::{cargo_config_env, inject_cargo_env, inject_cargo_package_env, inject_rustc_tool_env}, project_json::{Crate, CrateArrayIdx}, sysroot::RustLibSrcWorkspace, toolchain_info::{QueryConfig, rustc_cfg, target_data_layout, target_tuple, version}, @@ -270,7 +268,9 @@ impl ProjectWorkspace { tracing::info!(workspace = %cargo_toml, src_root = ?sysroot.rust_lib_src_root(), root = ?sysroot.root(), "Using sysroot"); progress("querying project metadata".to_owned()); - let toolchain_config = QueryConfig::Cargo(&sysroot, cargo_toml); + let config_file = cargo_config_file::read(cargo_toml, extra_env, &sysroot); + let config_file_ = config_file.clone(); + let toolchain_config = QueryConfig::Cargo(&sysroot, cargo_toml, &config_file_); let targets = target_tuple::get(toolchain_config, target.as_deref(), extra_env).unwrap_or_default(); let toolchain = version::get(toolchain_config, extra_env) @@ -282,10 +282,24 @@ impl ProjectWorkspace { .ok() .flatten(); + let fetch_metadata = FetchMetadata::new( + cargo_toml, + workspace_dir, + &CargoMetadataConfig { + features: features.clone(), + targets: targets.clone(), + extra_args: extra_args.clone(), + extra_env: extra_env.clone(), + toolchain_version: toolchain.clone(), + kind: "workspace", + }, + &sysroot, + *no_deps, + ); let target_dir = config .target_dir .clone() - .or_else(|| cargo_config_build_target_dir(cargo_toml, extra_env, &sysroot)) + .or_else(|| fetch_metadata.no_deps_metadata().map(|m| m.target_directory.clone())) .unwrap_or_else(|| workspace_dir.join("target").into()); // We spawn a bunch of processes to query various information about the workspace's @@ -319,7 +333,7 @@ impl ProjectWorkspace { }; rustc_dir.and_then(|rustc_dir| { info!(workspace = %cargo_toml, rustc_dir = %rustc_dir, "Using rustc source"); - match CargoWorkspace::fetch_metadata( + match FetchMetadata::new( &rustc_dir, workspace_dir, &CargoMetadataConfig { @@ -327,15 +341,12 @@ impl ProjectWorkspace { targets: targets.clone(), extra_args: extra_args.clone(), extra_env: extra_env.clone(), - target_dir: target_dir.clone(), toolchain_version: toolchain.clone(), kind: "rustc-dev" }, &sysroot, *no_deps, - true, - progress, - ) { + ).exec(&target_dir, true, progress) { Ok((meta, _error)) => { let workspace = CargoWorkspace::new( meta, @@ -364,40 +375,22 @@ impl ProjectWorkspace { }) }); - let cargo_metadata = s.spawn(|| { - CargoWorkspace::fetch_metadata( - cargo_toml, - workspace_dir, - &CargoMetadataConfig { - features: features.clone(), - targets: targets.clone(), - extra_args: extra_args.clone(), - extra_env: extra_env.clone(), - target_dir: target_dir.clone(), - toolchain_version: toolchain.clone(), - kind: "workspace", - }, - &sysroot, - *no_deps, - false, - progress, - ) - }); + let cargo_metadata = s.spawn(|| fetch_metadata.exec(&target_dir, false, progress)); let loaded_sysroot = s.spawn(|| { sysroot.load_workspace( &RustSourceWorkspaceConfig::CargoMetadata(sysroot_metadata_config( config, &targets, toolchain.clone(), - target_dir.clone(), )), config.no_deps, workspace_dir, + &target_dir, progress, ) }); let cargo_config_extra_env = - s.spawn(|| cargo_config_env(cargo_toml, extra_env, &sysroot)); + s.spawn(move || cargo_config_env(cargo_toml, &config_file)); thread::Result::Ok(( rustc_cfg.join()?, data_layout.join()?, @@ -476,9 +469,7 @@ impl ProjectWorkspace { let target_dir = config .target_dir .clone() - .or_else(|| { - cargo_config_build_target_dir(project_json.manifest()?, &config.extra_env, &sysroot) - }) + .or_else(|| cargo_target_dir(project_json.manifest()?, &config.extra_env, &sysroot)) .unwrap_or_else(|| project_root.join("target").into()); // We spawn a bunch of processes to query various information about the workspace's @@ -502,6 +493,7 @@ impl ProjectWorkspace { &RustSourceWorkspaceConfig::Json(*sysroot_project), config.no_deps, project_root, + &target_dir, progress, ) } else { @@ -510,10 +502,10 @@ impl ProjectWorkspace { config, &targets, toolchain.clone(), - target_dir, )), config.no_deps, project_root, + &target_dir, progress, ) } @@ -554,7 +546,8 @@ impl ProjectWorkspace { None => Sysroot::empty(), }; - let query_config = QueryConfig::Cargo(&sysroot, detached_file); + let config_file = cargo_config_file::read(detached_file, &config.extra_env, &sysroot); + let query_config = QueryConfig::Cargo(&sysroot, detached_file, &config_file); let toolchain = version::get(query_config, &config.extra_env).ok().flatten(); let targets = target_tuple::get(query_config, config.target.as_deref(), &config.extra_env) .unwrap_or_default(); @@ -563,7 +556,7 @@ impl ProjectWorkspace { let target_dir = config .target_dir .clone() - .or_else(|| cargo_config_build_target_dir(detached_file, &config.extra_env, &sysroot)) + .or_else(|| cargo_target_dir(detached_file, &config.extra_env, &sysroot)) .unwrap_or_else(|| dir.join("target").into()); let loaded_sysroot = sysroot.load_workspace( @@ -571,17 +564,17 @@ impl ProjectWorkspace { config, &targets, toolchain.clone(), - target_dir.clone(), )), config.no_deps, dir, + &target_dir, &|_| (), ); if let Some(loaded_sysroot) = loaded_sysroot { sysroot.set_workspace(loaded_sysroot); } - let cargo_script = CargoWorkspace::fetch_metadata( + let fetch_metadata = FetchMetadata::new( detached_file, dir, &CargoMetadataConfig { @@ -589,25 +582,26 @@ impl ProjectWorkspace { targets, extra_args: config.extra_args.clone(), extra_env: config.extra_env.clone(), - target_dir, toolchain_version: toolchain.clone(), kind: "detached-file", }, &sysroot, config.no_deps, - false, - &|_| (), - ) - .ok() - .map(|(ws, error)| { - let cargo_config_extra_env = - cargo_config_env(detached_file, &config.extra_env, &sysroot); - ( - CargoWorkspace::new(ws, detached_file.clone(), cargo_config_extra_env, false), - WorkspaceBuildScripts::default(), - error.map(Arc::new), - ) - }); + ); + let target_dir = config + .target_dir + .clone() + .or_else(|| fetch_metadata.no_deps_metadata().map(|m| m.target_directory.clone())) + .unwrap_or_else(|| dir.join("target").into()); + let cargo_script = + fetch_metadata.exec(&target_dir, false, &|_| ()).ok().map(|(ws, error)| { + let cargo_config_extra_env = cargo_config_env(detached_file, &config_file); + ( + CargoWorkspace::new(ws, detached_file.clone(), cargo_config_extra_env, false), + WorkspaceBuildScripts::default(), + error.map(Arc::new), + ) + }); Ok(ProjectWorkspace { kind: ProjectWorkspaceKind::DetachedFile { @@ -1889,15 +1883,33 @@ fn sysroot_metadata_config( config: &CargoConfig, targets: &[String], toolchain_version: Option<Version>, - target_dir: Utf8PathBuf, ) -> CargoMetadataConfig { CargoMetadataConfig { features: Default::default(), targets: targets.to_vec(), extra_args: Default::default(), extra_env: config.extra_env.clone(), - target_dir, toolchain_version, kind: "sysroot", } } + +fn cargo_target_dir( + manifest: &ManifestPath, + extra_env: &FxHashMap<String, Option<String>>, + sysroot: &Sysroot, +) -> Option<Utf8PathBuf> { + let cargo = sysroot.tool(Tool::Cargo, manifest.parent(), extra_env); + let mut meta = cargo_metadata::MetadataCommand::new(); + meta.cargo_path(cargo.get_program()); + meta.manifest_path(manifest); + // `--no-deps` doesn't (over)write lockfiles as it doesn't do any package resolve. + // So we can use it to get `target_directory` before copying lockfiles + let mut other_options = vec!["--no-deps".to_owned()]; + if manifest.is_rust_manifest() { + meta.env("RUSTC_BOOTSTRAP", "1"); + other_options.push("-Zscript".to_owned()); + } + meta.other_options(other_options); + meta.exec().map(|m| m.target_directory).ok() +} diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs index 0ee01982fea..fc89f486f84 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -796,7 +796,7 @@ impl flags::AnalysisStats { // region:expressions let (previous_exprs, previous_unknown, previous_partially_unknown) = (num_exprs, num_exprs_unknown, num_exprs_partially_unknown); - for (expr_id, _) in body.exprs.iter() { + for (expr_id, _) in body.exprs() { let ty = &inference_result[expr_id]; num_exprs += 1; let unknown_or_partial = if ty.is_unknown() { @@ -901,7 +901,7 @@ impl flags::AnalysisStats { // region:patterns let (previous_pats, previous_unknown, previous_partially_unknown) = (num_pats, num_pats_unknown, num_pats_partially_unknown); - for (pat_id, _) in body.pats.iter() { + for (pat_id, _) in body.pats() { let ty = &inference_result[pat_id]; num_pats += 1; let unknown_or_partial = if ty.is_unknown() { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs index f97bf832442..30ac93fb6f8 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs @@ -9,6 +9,7 @@ use hir::{ChangeWithProcMacros, Crate}; use ide::{AnalysisHost, DiagnosticCode, DiagnosticsConfig}; use ide_db::base_db; use itertools::Either; +use paths::Utf8PathBuf; use profile::StopWatch; use project_model::toolchain_info::{QueryConfig, target_data_layout}; use project_model::{ @@ -79,6 +80,7 @@ impl Tester { &RustSourceWorkspaceConfig::default_cargo(), false, &path, + &Utf8PathBuf::default(), &|_| (), ); if let Some(loaded_sysroot) = loaded_sysroot { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs index 8a848fb848c..292be1d5315 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs @@ -911,7 +911,8 @@ pub(crate) fn folding_range( | FoldKind::Array | FoldKind::TraitAliases | FoldKind::ExternCrates - | FoldKind::MatchArm => None, + | FoldKind::MatchArm + | FoldKind::Function => None, }; let range = range(line_index, fold.range); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs index 59073af983b..1b940c70da6 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/main.rs @@ -880,7 +880,8 @@ fn main() {{}} #[test] fn diagnostics_dont_block_typing() { - if skip_slow_tests() { + if skip_slow_tests() || std::env::var("CI").is_ok() { + // FIXME: This test is failing too frequently (therefore we disable it on CI). return; } diff --git a/src/tools/rust-analyzer/crates/span/src/ast_id.rs b/src/tools/rust-analyzer/crates/span/src/ast_id.rs index 121d2e33243..a9288ecd6fa 100644 --- a/src/tools/rust-analyzer/crates/span/src/ast_id.rs +++ b/src/tools/rust-analyzer/crates/span/src/ast_id.rs @@ -92,6 +92,7 @@ impl fmt::Debug for ErasedFileAstId { Use, Impl, BlockExpr, + AsmExpr, Fixup, ); if f.alternate() { @@ -144,6 +145,10 @@ enum ErasedFileAstIdKind { Impl, /// Associated with [`BlockExprFileAstId`]. BlockExpr, + // `global_asm!()` is an item, so we need to give it an `AstId`. So we give to all inline asm + // because incrementality is not a problem, they will always be the only item in the macro file, + // and memory usage also not because they're rare. + AsmExpr, /// Keep this last. Root, } @@ -204,14 +209,17 @@ impl ErasedFileAstId { .or_else(|| extern_block_ast_id(node, index_map)) .or_else(|| use_ast_id(node, index_map)) .or_else(|| impl_ast_id(node, index_map)) + .or_else(|| asm_expr_ast_id(node, index_map)) } fn should_alloc(node: &SyntaxNode) -> bool { - should_alloc_has_name(node) - || should_alloc_assoc_item(node) - || ast::ExternBlock::can_cast(node.kind()) - || ast::Use::can_cast(node.kind()) - || ast::Impl::can_cast(node.kind()) + let kind = node.kind(); + should_alloc_has_name(kind) + || should_alloc_assoc_item(kind) + || ast::ExternBlock::can_cast(kind) + || ast::Use::can_cast(kind) + || ast::Impl::can_cast(kind) + || ast::AsmExpr::can_cast(kind) } #[inline] @@ -278,7 +286,6 @@ impl<N> FileAstId<N> { #[derive(Hash)] struct ErasedHasNameFileAstId<'a> { - kind: SyntaxKind, name: &'a str, } @@ -332,6 +339,19 @@ fn use_ast_id( } } +impl AstIdNode for ast::AsmExpr {} + +fn asm_expr_ast_id( + node: &SyntaxNode, + index_map: &mut ErasedAstIdNextIndexMap, +) -> Option<ErasedFileAstId> { + if ast::AsmExpr::can_cast(node.kind()) { + Some(index_map.new_id(ErasedFileAstIdKind::AsmExpr, ())) + } else { + None + } +} + impl AstIdNode for ast::Impl {} fn impl_ast_id( @@ -433,7 +453,6 @@ macro_rules! register_has_name_ast_id { )+ fn has_name_ast_id(node: &SyntaxNode, index_map: &mut ErasedAstIdNextIndexMap) -> Option<ErasedFileAstId> { - let kind = node.kind(); match_ast! { match node { $( @@ -441,7 +460,6 @@ macro_rules! register_has_name_ast_id { let name = node.$name_method(); let name = name.as_ref().map_or("", |it| it.text_non_mutable()); let result = ErasedHasNameFileAstId { - kind, name, }; Some(index_map.new_id(ErasedFileAstIdKind::$ident, result)) @@ -452,8 +470,7 @@ macro_rules! register_has_name_ast_id { } } - fn should_alloc_has_name(node: &SyntaxNode) -> bool { - let kind = node.kind(); + fn should_alloc_has_name(kind: SyntaxKind) -> bool { false $( || ast::$ident::can_cast(kind) )* } }; @@ -483,7 +500,6 @@ macro_rules! register_assoc_item_ast_id { index_map: &mut ErasedAstIdNextIndexMap, parent: Option<&ErasedFileAstId>, ) -> Option<ErasedFileAstId> { - let kind = node.kind(); match_ast! { match node { $( @@ -491,7 +507,6 @@ macro_rules! register_assoc_item_ast_id { let name = $name_callback(node); let name = name.as_ref().map_or("", |it| it.text_non_mutable()); let properties = ErasedHasNameFileAstId { - kind, name, }; let result = ErasedAssocItemFileAstId { @@ -506,8 +521,7 @@ macro_rules! register_assoc_item_ast_id { } } - fn should_alloc_assoc_item(node: &SyntaxNode) -> bool { - let kind = node.kind(); + fn should_alloc_assoc_item(kind: SyntaxKind) -> bool { false $( || ast::$ident::can_cast(kind) )* } }; diff --git a/src/tools/rust-analyzer/crates/syntax/rust.ungram b/src/tools/rust-analyzer/crates/syntax/rust.ungram index 3f439472337..4cbc88cfb5e 100644 --- a/src/tools/rust-analyzer/crates/syntax/rust.ungram +++ b/src/tools/rust-analyzer/crates/syntax/rust.ungram @@ -158,6 +158,7 @@ Item = | TypeAlias | Union | Use +| AsmExpr MacroRules = Attr* Visibility? @@ -409,7 +410,8 @@ OffsetOfExpr = // global_asm := "global_asm!(" format_string *("," format_string) *("," operand) [","] ")" // format_string := STRING_LITERAL / RAW_STRING_LITERAL AsmExpr = - Attr* 'builtin' '#' 'asm' '(' template:(Expr (',' Expr)*) (AsmPiece (',' AsmPiece)*)? ','? ')' + Attr* 'builtin' '#' ( 'asm' | 'global_asm' | 'naked_asm' ) + '(' template:(Expr (',' Expr)*) (AsmPiece (',' AsmPiece)*)? ','? ')' // operand_expr := expr / "_" / expr "=>" expr / expr "=>" "_" AsmOperandExpr = in_expr:Expr ('=>' out_expr:Expr)? diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs index e60243f2c91..e902516471d 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs @@ -406,42 +406,6 @@ impl ast::WhereClause { } } -impl ast::TypeParam { - pub fn remove_default(&self) { - if let Some((eq, last)) = self - .syntax() - .children_with_tokens() - .find(|it| it.kind() == T![=]) - .zip(self.syntax().last_child_or_token()) - { - ted::remove_all(eq..=last); - - // remove any trailing ws - if let Some(last) = self.syntax().last_token().filter(|it| it.kind() == WHITESPACE) { - last.detach(); - } - } - } -} - -impl ast::ConstParam { - pub fn remove_default(&self) { - if let Some((eq, last)) = self - .syntax() - .children_with_tokens() - .find(|it| it.kind() == T![=]) - .zip(self.syntax().last_child_or_token()) - { - ted::remove_all(eq..=last); - - // remove any trailing ws - if let Some(last) = self.syntax().last_token().filter(|it| it.kind() == WHITESPACE) { - last.detach(); - } - } - } -} - pub trait Removable: AstNode { fn remove(&self); } diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs index 79a9f4da338..2b862465420 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/generated/nodes.rs @@ -118,6 +118,14 @@ impl AsmExpr { pub fn asm_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![asm]) } #[inline] pub fn builtin_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![builtin]) } + #[inline] + pub fn global_asm_token(&self) -> Option<SyntaxToken> { + support::token(&self.syntax, T![global_asm]) + } + #[inline] + pub fn naked_asm_token(&self) -> Option<SyntaxToken> { + support::token(&self.syntax, T![naked_asm]) + } } pub struct AsmLabel { pub(crate) syntax: SyntaxNode, @@ -2087,6 +2095,7 @@ impl ast::HasAttrs for GenericParam {} #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum Item { + AsmExpr(AsmExpr), Const(Const), Enum(Enum), ExternBlock(ExternBlock), @@ -2106,7 +2115,6 @@ pub enum Item { Use(Use), } impl ast::HasAttrs for Item {} -impl ast::HasDocComments for Item {} #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum Pat { @@ -8409,6 +8417,10 @@ impl AstNode for GenericParam { } } } +impl From<AsmExpr> for Item { + #[inline] + fn from(node: AsmExpr) -> Item { Item::AsmExpr(node) } +} impl From<Const> for Item { #[inline] fn from(node: Const) -> Item { Item::Const(node) } @@ -8482,7 +8494,8 @@ impl AstNode for Item { fn can_cast(kind: SyntaxKind) -> bool { matches!( kind, - CONST + ASM_EXPR + | CONST | ENUM | EXTERN_BLOCK | EXTERN_CRATE @@ -8504,6 +8517,7 @@ impl AstNode for Item { #[inline] fn cast(syntax: SyntaxNode) -> Option<Self> { let res = match syntax.kind() { + ASM_EXPR => Item::AsmExpr(AsmExpr { syntax }), CONST => Item::Const(Const { syntax }), ENUM => Item::Enum(Enum { syntax }), EXTERN_BLOCK => Item::ExternBlock(ExternBlock { syntax }), @@ -8528,6 +8542,7 @@ impl AstNode for Item { #[inline] fn syntax(&self) -> &SyntaxNode { match self { + Item::AsmExpr(it) => &it.syntax, Item::Const(it) => &it.syntax, Item::Enum(it) => &it.syntax, Item::ExternBlock(it) => &it.syntax, diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs index 309332873cb..d67f24fda96 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs @@ -680,7 +680,7 @@ pub fn expr_tuple(elements: impl IntoIterator<Item = ast::Expr>) -> ast::TupleEx let expr = elements.into_iter().format(", "); expr_from_text(&format!("({expr})")) } -pub fn expr_assignment(lhs: ast::Expr, rhs: ast::Expr) -> ast::Expr { +pub fn expr_assignment(lhs: ast::Expr, rhs: ast::Expr) -> ast::BinExpr { expr_from_text(&format!("{lhs} = {rhs}")) } fn expr_from_text<E: Into<ast::Expr> + AstNode>(text: &str) -> E { diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs index 17cc5f9c057..1ba61073151 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs @@ -440,6 +440,19 @@ impl SyntaxFactory { ast } + pub fn expr_assignment(&self, lhs: ast::Expr, rhs: ast::Expr) -> ast::BinExpr { + let ast = make::expr_assignment(lhs.clone(), rhs.clone()).clone_for_update(); + + if let Some(mut mapping) = self.mappings() { + let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone()); + builder.map_node(lhs.syntax().clone(), ast.lhs().unwrap().syntax().clone()); + builder.map_node(rhs.syntax().clone(), ast.rhs().unwrap().syntax().clone()); + builder.finish(&mut mapping); + } + + ast + } + pub fn expr_bin(&self, lhs: ast::Expr, op: ast::BinaryOp, rhs: ast::Expr) -> ast::BinExpr { let ast::Expr::BinExpr(ast) = make::expr_bin_op(lhs.clone(), op, rhs.clone()).clone_for_update() diff --git a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs index dc1eba1a1ab..7b719b5dec7 100644 --- a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs +++ b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs @@ -1011,8 +1011,7 @@ pub mod ops { } #[lang = "add_assign"] - #[const_trait] - pub trait AddAssign<Rhs = Self> { + pub const trait AddAssign<Rhs = Self> { fn add_assign(&mut self, rhs: Rhs); } @@ -1941,6 +1940,7 @@ pub mod prelude { clone::Clone, // :clone cmp::{Eq, PartialEq}, // :eq cmp::{Ord, PartialOrd}, // :ord + convert::AsMut, // :as_mut convert::AsRef, // :as_ref convert::{From, Into, TryFrom, TryInto}, // :from default::Default, // :default diff --git a/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml b/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml index 35a5a4d82b2..1fc1da50a0a 100644 --- a/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml +++ b/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml @@ -16,6 +16,9 @@ crossbeam-channel.workspace = true [dev-dependencies] lsp-types = "=0.95" ctrlc = "3.4.7" +anyhow.workspace = true +rustc-hash.workspace = true +toolchain.workspace = true [lints] workspace = true diff --git a/src/tools/rust-analyzer/lib/lsp-server/examples/goto_def.rs b/src/tools/rust-analyzer/lib/lsp-server/examples/goto_def.rs deleted file mode 100644 index 6b3acda7bcd..00000000000 --- a/src/tools/rust-analyzer/lib/lsp-server/examples/goto_def.rs +++ /dev/null @@ -1,132 +0,0 @@ -//! A minimal example LSP server that can only respond to the `gotoDefinition` request. To use -//! this example, execute it and then send an `initialize` request. -//! -//! ```no_run -//! Content-Length: 85 -//! -//! {"jsonrpc": "2.0", "method": "initialize", "id": 1, "params": {"capabilities": {}}} -//! ``` -//! -//! This will respond with a server response. Then send it a `initialized` notification which will -//! have no response. -//! -//! ```no_run -//! Content-Length: 59 -//! -//! {"jsonrpc": "2.0", "method": "initialized", "params": {}} -//! ``` -//! -//! Once these two are sent, then we enter the main loop of the server. The only request this -//! example can handle is `gotoDefinition`: -//! -//! ```no_run -//! Content-Length: 159 -//! -//! {"jsonrpc": "2.0", "method": "textDocument/definition", "id": 2, "params": {"textDocument": {"uri": "file://temp"}, "position": {"line": 1, "character": 1}}} -//! ``` -//! -//! To finish up without errors, send a shutdown request: -//! -//! ```no_run -//! Content-Length: 67 -//! -//! {"jsonrpc": "2.0", "method": "shutdown", "id": 3, "params": null} -//! ``` -//! -//! The server will exit the main loop and finally we send a `shutdown` notification to stop -//! the server. -//! -//! ``` -//! Content-Length: 54 -//! -//! {"jsonrpc": "2.0", "method": "exit", "params": null} -//! ``` - -#![allow(clippy::print_stderr)] - -use std::error::Error; - -use lsp_types::OneOf; -use lsp_types::{ - GotoDefinitionResponse, InitializeParams, ServerCapabilities, request::GotoDefinition, -}; - -use lsp_server::{Connection, ExtractError, Message, Request, RequestId, Response}; - -fn main() -> Result<(), Box<dyn Error + Sync + Send>> { - // Note that we must have our logging only write out to stderr. - eprintln!("starting generic LSP server"); - - // Create the transport. Includes the stdio (stdin and stdout) versions but this could - // also be implemented to use sockets or HTTP. - let (connection, io_threads) = Connection::stdio(); - - // Run the server and wait for the two threads to end (typically by trigger LSP Exit event). - let server_capabilities = serde_json::to_value(&ServerCapabilities { - definition_provider: Some(OneOf::Left(true)), - ..Default::default() - }) - .unwrap(); - let initialization_params = match connection.initialize(server_capabilities) { - Ok(it) => it, - Err(e) => { - if e.channel_is_disconnected() { - io_threads.join()?; - } - return Err(e.into()); - } - }; - main_loop(connection, initialization_params)?; - io_threads.join()?; - - // Shut down gracefully. - eprintln!("shutting down server"); - Ok(()) -} - -fn main_loop( - connection: Connection, - params: serde_json::Value, -) -> Result<(), Box<dyn Error + Sync + Send>> { - let _params: InitializeParams = serde_json::from_value(params).unwrap(); - eprintln!("starting example main loop"); - for msg in &connection.receiver { - eprintln!("got msg: {msg:?}"); - match msg { - Message::Request(req) => { - if connection.handle_shutdown(&req)? { - return Ok(()); - } - eprintln!("got request: {req:?}"); - match cast::<GotoDefinition>(req) { - Ok((id, params)) => { - eprintln!("got gotoDefinition request #{id}: {params:?}"); - let result = Some(GotoDefinitionResponse::Array(Vec::new())); - let result = serde_json::to_value(&result).unwrap(); - let resp = Response { id, result: Some(result), error: None }; - connection.sender.send(Message::Response(resp))?; - continue; - } - Err(err @ ExtractError::JsonError { .. }) => panic!("{err:?}"), - Err(ExtractError::MethodMismatch(req)) => req, - }; - // ... - } - Message::Response(resp) => { - eprintln!("got response: {resp:?}"); - } - Message::Notification(not) => { - eprintln!("got notification: {not:?}"); - } - } - } - Ok(()) -} - -fn cast<R>(req: Request) -> Result<(RequestId, R::Params), ExtractError<Request>> -where - R: lsp_types::request::Request, - R::Params: serde::de::DeserializeOwned, -{ - req.extract(R::METHOD) -} diff --git a/src/tools/rust-analyzer/lib/lsp-server/examples/manual_test.sh b/src/tools/rust-analyzer/lib/lsp-server/examples/manual_test.sh new file mode 100755 index 00000000000..d028ac43301 --- /dev/null +++ b/src/tools/rust-analyzer/lib/lsp-server/examples/manual_test.sh @@ -0,0 +1,53 @@ +#!/usr/bin/env bash +# Simple nine-packet LSP test for examples/minimal_lsp.rs +# Usage (two tabs): +# +# mkfifo /tmp/lsp_pipe # one-time setup +# # tab 1 – run the server +# cat /tmp/lsp_pipe | cargo run --example minimal_lsp +# +# # tab 2 – fire the packets (this script) +# bash examples/manual_test.sh # blocks until server exits +# +# If you don’t use a second tab, run the script in the background: +# +# bash examples/manual_test.sh & # writer in background +# cat /tmp/lsp_pipe | cargo run --example minimal_lsp +# +# The script opens /tmp/lsp_pipe for writing (exec 3>) and sends each JSON +# packet with a correct Content-Length header. +# +# One-liner alternative (single terminal, no FIFO): +# +# cargo run --example minimal_lsp <<'EOF' +# … nine packets … +# EOF +# +# Both approaches feed identical bytes to minimal_lsp via stdin. + +set -eu +PIPE=${1:-/tmp/lsp_pipe} + +mkfifo -m 600 "$PIPE" 2>/dev/null || true # create once, ignore if exists + +# open write end so the fifo stays open +exec 3> "$PIPE" + +send() { + local body=$1 + local len=$(printf '%s' "$body" | wc -c) + printf 'Content-Length: %d\r\n\r\n%s' "$len" "$body" >&3 +} + +send '{"jsonrpc":"2.0","id":1,"method":"initialize","params":{"capabilities":{}}}' +send '{"jsonrpc":"2.0","method":"initialized","params":{}}' +send '{"jsonrpc":"2.0","method":"textDocument/didOpen","params":{"textDocument":{"uri":"file:///tmp/foo.rs","languageId":"rust","version":1,"text":"fn main( ){println!(\"hi\") }"}}}' +send '{"jsonrpc":"2.0","id":2,"method":"textDocument/completion","params":{"textDocument":{"uri":"file:///tmp/foo.rs"},"position":{"line":0,"character":0}}}' +send '{"jsonrpc":"2.0","id":3,"method":"textDocument/hover","params":{"textDocument":{"uri":"file:///tmp/foo.rs"},"position":{"line":0,"character":0}}}' +send '{"jsonrpc":"2.0","id":4,"method":"textDocument/definition","params":{"textDocument":{"uri":"file:///tmp/foo.rs"},"position":{"line":0,"character":0}}}' +send '{"jsonrpc":"2.0","id":5,"method":"textDocument/formatting","params":{"textDocument":{"uri":"file:///tmp/foo.rs"},"options":{"tabSize":4,"insertSpaces":true}}}' +send '{"jsonrpc":"2.0","id":6,"method":"shutdown","params":null}' +send '{"jsonrpc":"2.0","method":"exit","params":null}' + +exec 3>&- +echo "Packets sent – watch the other terminal for responses." diff --git a/src/tools/rust-analyzer/lib/lsp-server/examples/minimal_lsp.rs b/src/tools/rust-analyzer/lib/lsp-server/examples/minimal_lsp.rs new file mode 100644 index 00000000000..5eef999e062 --- /dev/null +++ b/src/tools/rust-analyzer/lib/lsp-server/examples/minimal_lsp.rs @@ -0,0 +1,335 @@ +//! Minimal Language‑Server‑Protocol example: **`minimal_lsp.rs`** +//! ============================================================= +//! +//! | ↔ / ← | LSP method | What the implementation does | +//! |-------|------------|------------------------------| +//! | ↔ | `initialize` / `initialized` | capability handshake | +//! | ← | `textDocument/publishDiagnostics` | pushes a dummy info diagnostic whenever the buffer changes | +//! | ← | `textDocument/definition` | echoes an empty location array so the jump works | +//! | ← | `textDocument/completion` | offers one hard‑coded item `HelloFromLSP` | +//! | ← | `textDocument/hover` | shows *Hello from minimal_lsp* markdown | +//! | ← | `textDocument/formatting` | pipes the doc through **rustfmt** and returns a full‑file edit | +//! +//! ### Quick start +//! ```bash +//! cd rust-analyzer/lib/lsp-server +//! cargo run --example minimal_lsp +//! ``` +//! +//! ### Minimal manual session (all nine packets) +//! ```no_run +//! # 1. initialize - server replies with capabilities +//! Content-Length: 85 + +//! {"jsonrpc":"2.0","id":1,"method":"initialize","params":{"capabilities":{}}} +//! +//! # 2. initialized - no response expected +//! Content-Length: 59 + +//! {"jsonrpc":"2.0","method":"initialized","params":{}} +//! +//! # 3. didOpen - provide initial buffer text +//! Content-Length: 173 + +//! {"jsonrpc":"2.0","method":"textDocument/didOpen","params":{"textDocument":{"uri":"file:///tmp/foo.rs","languageId":"rust","version":1,"text":"fn main( ){println!(\"hi\") }"}}} +//! +//! # 4. completion - expect HelloFromLSP +//! Content-Length: 139 + +//! {"jsonrpc":"2.0","id":2,"method":"textDocument/completion","params":{"textDocument":{"uri":"file:///tmp/foo.rs"},"position":{"line":0,"character":0}}} +//! +//! # 5. hover - expect markdown greeting +//! Content-Length: 135 + +//! {"jsonrpc":"2.0","id":3,"method":"textDocument/hover","params":{"textDocument":{"uri":"file:///tmp/foo.rs"},"position":{"line":0,"character":0}}} +//! +//! # 6. goto-definition - dummy empty array +//! Content-Length: 139 + +//! {"jsonrpc":"2.0","id":4,"method":"textDocument/definition","params":{"textDocument":{"uri":"file:///tmp/foo.rs"},"position":{"line":0,"character":0}}} +//! +//! # 7. formatting - rustfmt full document +//! Content-Length: 157 + +//! {"jsonrpc":"2.0","id":5,"method":"textDocument/formatting","params":{"textDocument":{"uri":"file:///tmp/foo.rs"},"options":{"tabSize":4,"insertSpaces":true}}} +//! +//! # 8. shutdown request - server acks and prepares to exit +//! Content-Length: 67 + +//! {"jsonrpc":"2.0","id":6,"method":"shutdown","params":null} +//! +//! # 9. exit notification - terminates the server +//! Content-Length: 54 + +//! {"jsonrpc":"2.0","method":"exit","params":null} +//! ``` +//! + +use std::{error::Error, io::Write}; + +use rustc_hash::FxHashMap; // fast hash map +use std::process::Stdio; +use toolchain::command; // clippy-approved wrapper + +#[allow(clippy::print_stderr, clippy::disallowed_types, clippy::disallowed_methods)] +use anyhow::{Context, Result, anyhow, bail}; +use lsp_server::{Connection, Message, Request as ServerRequest, RequestId, Response}; +use lsp_types::notification::Notification as _; // for METHOD consts +use lsp_types::request::Request as _; +use lsp_types::{ + CompletionItem, + CompletionItemKind, + // capability helpers + CompletionOptions, + CompletionResponse, + Diagnostic, + DiagnosticSeverity, + DidChangeTextDocumentParams, + DidOpenTextDocumentParams, + DocumentFormattingParams, + Hover, + HoverContents, + HoverProviderCapability, + // core + InitializeParams, + MarkedString, + OneOf, + Position, + PublishDiagnosticsParams, + Range, + ServerCapabilities, + TextDocumentSyncCapability, + TextDocumentSyncKind, + TextEdit, + Url, + // notifications + notification::{DidChangeTextDocument, DidOpenTextDocument, PublishDiagnostics}, + // requests + request::{Completion, Formatting, GotoDefinition, HoverRequest}, +}; // for METHOD consts + +// ===================================================================== +// main +// ===================================================================== + +#[allow(clippy::print_stderr)] +fn main() -> std::result::Result<(), Box<dyn Error + Sync + Send>> { + log::error!("starting minimal_lsp"); + + // transport + let (connection, io_thread) = Connection::stdio(); + + // advertised capabilities + let caps = ServerCapabilities { + text_document_sync: Some(TextDocumentSyncCapability::Kind(TextDocumentSyncKind::FULL)), + completion_provider: Some(CompletionOptions::default()), + definition_provider: Some(OneOf::Left(true)), + hover_provider: Some(HoverProviderCapability::Simple(true)), + document_formatting_provider: Some(OneOf::Left(true)), + ..Default::default() + }; + let init_value = serde_json::json!({ + "capabilities": caps, + "offsetEncoding": ["utf-8"], + }); + + let init_params = connection.initialize(init_value)?; + main_loop(connection, init_params)?; + io_thread.join()?; + log::error!("shutting down server"); + Ok(()) +} + +// ===================================================================== +// event loop +// ===================================================================== + +fn main_loop( + connection: Connection, + params: serde_json::Value, +) -> std::result::Result<(), Box<dyn Error + Sync + Send>> { + let _init: InitializeParams = serde_json::from_value(params)?; + let mut docs: FxHashMap<Url, String> = FxHashMap::default(); + + for msg in &connection.receiver { + match msg { + Message::Request(req) => { + if connection.handle_shutdown(&req)? { + break; + } + if let Err(err) = handle_request(&connection, &req, &mut docs) { + log::error!("[lsp] request {} failed: {err}", &req.method); + } + } + Message::Notification(note) => { + if let Err(err) = handle_notification(&connection, ¬e, &mut docs) { + log::error!("[lsp] notification {} failed: {err}", note.method); + } + } + Message::Response(resp) => log::error!("[lsp] response: {resp:?}"), + } + } + Ok(()) +} + +// ===================================================================== +// notifications +// ===================================================================== + +fn handle_notification( + conn: &Connection, + note: &lsp_server::Notification, + docs: &mut FxHashMap<Url, String>, +) -> Result<()> { + match note.method.as_str() { + DidOpenTextDocument::METHOD => { + let p: DidOpenTextDocumentParams = serde_json::from_value(note.params.clone())?; + let uri = p.text_document.uri; + docs.insert(uri.clone(), p.text_document.text); + publish_dummy_diag(conn, &uri)?; + } + DidChangeTextDocument::METHOD => { + let p: DidChangeTextDocumentParams = serde_json::from_value(note.params.clone())?; + if let Some(change) = p.content_changes.into_iter().next() { + let uri = p.text_document.uri; + docs.insert(uri.clone(), change.text); + publish_dummy_diag(conn, &uri)?; + } + } + _ => {} + } + Ok(()) +} + +// ===================================================================== +// requests +// ===================================================================== + +fn handle_request( + conn: &Connection, + req: &ServerRequest, + docs: &mut FxHashMap<Url, String>, +) -> Result<()> { + match req.method.as_str() { + GotoDefinition::METHOD => { + send_ok(conn, req.id.clone(), &lsp_types::GotoDefinitionResponse::Array(Vec::new()))?; + } + Completion::METHOD => { + let item = CompletionItem { + label: "HelloFromLSP".into(), + kind: Some(CompletionItemKind::FUNCTION), + detail: Some("dummy completion".into()), + ..Default::default() + }; + send_ok(conn, req.id.clone(), &CompletionResponse::Array(vec![item]))?; + } + HoverRequest::METHOD => { + let hover = Hover { + contents: HoverContents::Scalar(MarkedString::String( + "Hello from *minimal_lsp*".into(), + )), + range: None, + }; + send_ok(conn, req.id.clone(), &hover)?; + } + Formatting::METHOD => { + let p: DocumentFormattingParams = serde_json::from_value(req.params.clone())?; + let uri = p.text_document.uri; + let text = docs + .get(&uri) + .ok_or_else(|| anyhow!("document not in cache – did you send DidOpen?"))?; + let formatted = run_rustfmt(text)?; + let edit = TextEdit { range: full_range(text), new_text: formatted }; + send_ok(conn, req.id.clone(), &vec![edit])?; + } + _ => send_err( + conn, + req.id.clone(), + lsp_server::ErrorCode::MethodNotFound, + "unhandled method", + )?, + } + Ok(()) +} + +// ===================================================================== +// diagnostics +// ===================================================================== +fn publish_dummy_diag(conn: &Connection, uri: &Url) -> Result<()> { + let diag = Diagnostic { + range: Range::new(Position::new(0, 0), Position::new(0, 1)), + severity: Some(DiagnosticSeverity::INFORMATION), + code: None, + code_description: None, + source: Some("minimal_lsp".into()), + message: "dummy diagnostic".into(), + related_information: None, + tags: None, + data: None, + }; + let params = + PublishDiagnosticsParams { uri: uri.clone(), diagnostics: vec![diag], version: None }; + conn.sender.send(Message::Notification(lsp_server::Notification::new( + PublishDiagnostics::METHOD.to_owned(), + params, + )))?; + Ok(()) +} + +// ===================================================================== +// helpers +// ===================================================================== + +fn run_rustfmt(input: &str) -> Result<String> { + let cwd = std::env::current_dir().expect("can't determine CWD"); + let mut child = command("rustfmt", &cwd, &FxHashMap::default()) + .arg("--emit") + .arg("stdout") + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()) + .spawn() + .context("failed to spawn rustfmt – is it installed?")?; + + let Some(stdin) = child.stdin.as_mut() else { + bail!("stdin unavailable"); + }; + stdin.write_all(input.as_bytes())?; + let output = child.wait_with_output()?; + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + bail!("rustfmt failed: {stderr}"); + } + Ok(String::from_utf8(output.stdout)?) +} + +fn full_range(text: &str) -> Range { + let last_line_idx = text.lines().count().saturating_sub(1) as u32; + let last_col = text.lines().last().map_or(0, |l| l.chars().count()) as u32; + Range::new(Position::new(0, 0), Position::new(last_line_idx, last_col)) +} + +fn send_ok<T: serde::Serialize>(conn: &Connection, id: RequestId, result: &T) -> Result<()> { + let resp = Response { id, result: Some(serde_json::to_value(result)?), error: None }; + conn.sender.send(Message::Response(resp))?; + Ok(()) +} + +fn send_err( + conn: &Connection, + id: RequestId, + code: lsp_server::ErrorCode, + msg: &str, +) -> Result<()> { + let resp = Response { + id, + result: None, + error: Some(lsp_server::ResponseError { + code: code as i32, + message: msg.into(), + data: None, + }), + }; + conn.sender.send(Message::Response(resp))?; + Ok(()) +} diff --git a/src/tools/rust-analyzer/rust-version b/src/tools/rust-analyzer/rust-version index 902793225ea..57ff326ce5a 100644 --- a/src/tools/rust-analyzer/rust-version +++ b/src/tools/rust-analyzer/rust-version @@ -1 +1 @@ -ad3b7257615c28aaf8212a189ec032b8af75de51 +a9fb6103b05c6ad6eee6bed4c0bb5a2e8e1024c6 diff --git a/src/tools/rust-analyzer/xtask/src/codegen/grammar/ast_src.rs b/src/tools/rust-analyzer/xtask/src/codegen/grammar/ast_src.rs index d8cbf894520..b9f570fe0e3 100644 --- a/src/tools/rust-analyzer/xtask/src/codegen/grammar/ast_src.rs +++ b/src/tools/rust-analyzer/xtask/src/codegen/grammar/ast_src.rs @@ -116,6 +116,8 @@ const CONTEXTUAL_KEYWORDS: &[&str] = // keywords we use for special macro expansions const CONTEXTUAL_BUILTIN_KEYWORDS: &[&str] = &[ "asm", + "naked_asm", + "global_asm", "att_syntax", "builtin", "clobber_abi", diff --git a/src/tools/rustbook/Cargo.lock b/src/tools/rustbook/Cargo.lock index 050ddf47bae..27798d6aeb0 100644 --- a/src/tools/rustbook/Cargo.lock +++ b/src/tools/rustbook/Cargo.lock @@ -885,9 +885,9 @@ dependencies = [ [[package]] name = "mdbook" -version = "0.4.51" +version = "0.4.52" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a87e65420ab45ca9c1b8cdf698f95b710cc826d373fa550f0f7fad82beac9328" +checksum = "93c284d2855916af7c5919cf9ad897cfc77d3c2db6f55429c7cfb769182030ec" dependencies = [ "ammonia", "anyhow", diff --git a/src/tools/rustbook/Cargo.toml b/src/tools/rustbook/Cargo.toml index 69c0cfaf5c9..c7c6e39f157 100644 --- a/src/tools/rustbook/Cargo.toml +++ b/src/tools/rustbook/Cargo.toml @@ -15,6 +15,6 @@ mdbook-i18n-helpers = "0.3.3" mdbook-spec = { path = "../../doc/reference/mdbook-spec" } [dependencies.mdbook] -version = "0.4.51" +version = "0.4.52" default-features = false features = ["search"] diff --git a/src/tools/rustdoc-gui-test/src/main.rs b/src/tools/rustdoc-gui-test/src/main.rs index 0e35861fbf7..5b86bea8932 100644 --- a/src/tools/rustdoc-gui-test/src/main.rs +++ b/src/tools/rustdoc-gui-test/src/main.rs @@ -1,63 +1,15 @@ +use std::env; use std::path::{Path, PathBuf}; use std::process::Command; use std::sync::Arc; -use std::{env, fs}; +use build_helper::npm; use build_helper::util::try_run; use compiletest::directives::TestProps; use config::Config; mod config; -fn get_browser_ui_test_version_inner(npm: &Path, global: bool) -> Option<String> { - let mut command = Command::new(&npm); - command.arg("list").arg("--parseable").arg("--long").arg("--depth=0"); - if global { - command.arg("--global"); - } - let lines = match command.output() { - Ok(output) => String::from_utf8_lossy(&output.stdout).into_owned(), - Err(e) => { - eprintln!( - "path to npm can be wrong, provided path: {npm:?}. Try to set npm path \ - in bootstrap.toml in [build.npm]", - ); - panic!("{:?}", e) - } - }; - lines - .lines() - .find_map(|l| l.rsplit(':').next()?.strip_prefix("browser-ui-test@")) - .map(|v| v.to_owned()) -} - -fn get_browser_ui_test_version(npm: &Path) -> Option<String> { - get_browser_ui_test_version_inner(npm, false) - .or_else(|| get_browser_ui_test_version_inner(npm, true)) -} - -fn compare_browser_ui_test_version(installed_version: &str, src: &Path) { - match fs::read_to_string( - src.join("src/ci/docker/host-x86_64/x86_64-gnu-tools/browser-ui-test.version"), - ) { - Ok(v) => { - if v.trim() != installed_version { - eprintln!( - "⚠️ Installed version of browser-ui-test (`{}`) is different than the \ - one used in the CI (`{}`)", - installed_version, v - ); - eprintln!( - "You can install this version using `npm update browser-ui-test` or by using \ - `npm install browser-ui-test@{}`", - v, - ); - } - } - Err(e) => eprintln!("Couldn't find the CI browser-ui-test version: {:?}", e), - } -} - fn find_librs<P: AsRef<Path>>(path: P) -> Option<PathBuf> { for entry in walkdir::WalkDir::new(path) { let entry = entry.ok()?; @@ -71,27 +23,6 @@ fn find_librs<P: AsRef<Path>>(path: P) -> Option<PathBuf> { fn main() -> Result<(), ()> { let config = Arc::new(Config::from_args(env::args().collect())); - // The goal here is to check if the necessary packages are installed, and if not, we - // panic. - match get_browser_ui_test_version(&config.npm) { - Some(version) => { - // We also check the version currently used in CI and emit a warning if it's not the - // same one. - compare_browser_ui_test_version(&version, &config.rust_src); - } - None => { - eprintln!( - r#" -error: rustdoc-gui test suite cannot be run because npm `browser-ui-test` dependency is missing. - -If you want to install the `browser-ui-test` dependency, run `npm install browser-ui-test` -"#, - ); - - panic!("Cannot run rustdoc-gui tests"); - } - } - let src_path = config.rust_src.join("tests/rustdoc-gui/src"); for entry in src_path.read_dir().expect("read_dir call failed") { if let Ok(entry) = entry { @@ -134,16 +65,12 @@ If you want to install the `browser-ui-test` dependency, run `npm install browse } } - let mut command = Command::new(&config.nodejs); + // FIXME(binarycat): once we get package.json in version control, this should be updated to install via that instead + let local_node_modules = + npm::install_one(&config.out_dir, &config.npm, "browser-ui-test", "0.21.1") + .expect("unable to install browser-ui-test"); - if let Ok(current_dir) = env::current_dir() { - let local_node_modules = current_dir.join("node_modules"); - if local_node_modules.exists() { - // Link the local node_modules if exists. - // This is useful when we run rustdoc-gui-test from outside of the source root. - env::set_var("NODE_PATH", local_node_modules); - } - } + let mut command = Command::new(&config.nodejs); command .arg(config.rust_src.join("src/tools/rustdoc-gui/tester.js")) @@ -154,6 +81,12 @@ If you want to install the `browser-ui-test` dependency, run `npm install browse .arg("--tests-folder") .arg(config.rust_src.join("tests/rustdoc-gui")); + if local_node_modules.exists() { + // Link the local node_modules if exists. + // This is useful when we run rustdoc-gui-test from outside of the source root. + command.env("NODE_PATH", local_node_modules); + } + for file in &config.goml_files { command.arg("--file").arg(file); } diff --git a/src/tools/rustfmt/src/items.rs b/src/tools/rustfmt/src/items.rs index 1a3897b51cb..7084639aca9 100644 --- a/src/tools/rustfmt/src/items.rs +++ b/src/tools/rustfmt/src/items.rs @@ -1172,6 +1172,7 @@ pub(crate) fn format_trait( unreachable!(); }; let ast::Trait { + constness, is_auto, safety, ident, @@ -1182,7 +1183,8 @@ pub(crate) fn format_trait( let mut result = String::with_capacity(128); let header = format!( - "{}{}{}trait ", + "{}{}{}{}trait ", + format_constness(constness), format_visibility(context, &item.vis), format_safety(safety), format_auto(is_auto), diff --git a/src/tools/tidy/src/filenames.rs b/src/tools/tidy/src/filenames.rs new file mode 100644 index 00000000000..53115f4eaa4 --- /dev/null +++ b/src/tools/tidy/src/filenames.rs @@ -0,0 +1,40 @@ +//! Tidy check to ensure that there are no filenames containing forbidden characters +//! checked into the source tree by accident: +//! - Non-UTF8 filenames +//! - Control characters such as CR or TAB +//! - Filenames containing ":" as they are not supported on Windows +//! +//! Only files added to git are checked, as it may be acceptable to have temporary +//! invalid filenames in the local directory during development. + +use std::path::Path; +use std::process::Command; + +pub fn check(root_path: &Path, bad: &mut bool) { + let stat_output = Command::new("git") + .arg("-C") + .arg(root_path) + .args(["ls-files", "-z"]) + .output() + .unwrap() + .stdout; + for filename in stat_output.split(|&b| b == 0) { + match str::from_utf8(filename) { + Err(_) => tidy_error!( + bad, + r#"non-UTF8 file names are not supported: "{}""#, + String::from_utf8_lossy(filename), + ), + Ok(name) if name.chars().any(|c| c.is_control()) => tidy_error!( + bad, + r#"control characters are not supported in file names: "{}""#, + String::from_utf8_lossy(filename), + ), + Ok(name) if name.contains(':') => tidy_error!( + bad, + r#"":" is not supported in file names because of Windows compatibility: "{name}""#, + ), + _ => (), + } + } +} diff --git a/src/tools/tidy/src/issues.txt b/src/tools/tidy/src/issues.txt index 8b57db23d01..77414bec82d 100644 --- a/src/tools/tidy/src/issues.txt +++ b/src/tools/tidy/src/issues.txt @@ -1021,7 +1021,6 @@ ui/foreign/issue-91370-foreign-fn-block-impl.rs ui/foreign/issue-99276-same-type-lifetimes.rs ui/function-pointer/issue-102289.rs ui/functions-closures/closure-expected-type/issue-38714.rs -ui/generic-associated-types/bugs/issue-100013.rs ui/generic-associated-types/bugs/issue-80626.rs ui/generic-associated-types/bugs/issue-87735.rs ui/generic-associated-types/bugs/issue-87755.rs @@ -1099,7 +1098,6 @@ ui/generic-associated-types/issue-90729.rs ui/generic-associated-types/issue-91139.rs ui/generic-associated-types/issue-91883.rs ui/generic-associated-types/issue-92033.rs -ui/generic-associated-types/issue-92096.rs ui/generic-associated-types/issue-92280.rs ui/generic-associated-types/issue-92954.rs ui/generic-associated-types/issue-93141.rs diff --git a/src/tools/tidy/src/lib.rs b/src/tools/tidy/src/lib.rs index 77855392b4d..ade4055b5bd 100644 --- a/src/tools/tidy/src/lib.rs +++ b/src/tools/tidy/src/lib.rs @@ -167,6 +167,7 @@ pub mod error_codes; pub mod ext_tool_checks; pub mod extdeps; pub mod features; +pub mod filenames; pub mod fluent_alphabetical; pub mod fluent_period; mod fluent_used; diff --git a/src/tools/tidy/src/main.rs b/src/tools/tidy/src/main.rs index a67f7a511b5..0f1116a632e 100644 --- a/src/tools/tidy/src/main.rs +++ b/src/tools/tidy/src/main.rs @@ -155,6 +155,8 @@ fn main() { check!(triagebot, &root_path); + check!(filenames, &root_path); + let collected = { drain_handles(&mut handles); |
