diff options
| author | Alexander Regueiro <alexreg@me.com> | 2019-02-08 14:53:55 +0100 |
|---|---|---|
| committer | Alexander Regueiro <alexreg@me.com> | 2019-02-10 23:42:32 +0000 |
| commit | c3e182cf43aea2c010a1915eb37293a458df2228 (patch) | |
| tree | 225aa2dfceff56d10c0b31f6966fbf7ec5da8180 | |
| parent | 0b7af2668a80fb2fa720a06ca44aff4dd1e9de38 (diff) | |
| download | rust-c3e182cf43aea2c010a1915eb37293a458df2228.tar.gz rust-c3e182cf43aea2c010a1915eb37293a458df2228.zip | |
rustc: doc comments
343 files changed, 2260 insertions, 2241 deletions
diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs index f512e1d7a0c..78ba1d376be 100644 --- a/src/bootstrap/builder.rs +++ b/src/bootstrap/builder.rs @@ -60,17 +60,17 @@ pub trait Step: 'static + Clone + Debug + PartialEq + Eq + Hash { /// Run this rule for all hosts without cross compiling. const ONLY_HOSTS: bool = false; - /// Primary function to execute this rule. Can call `builder.ensure(...)` + /// Primary function to execute this rule. Can call `builder.ensure()` /// with other steps to run those. fn run(self, builder: &Builder) -> Self::Output; /// When bootstrap is passed a set of paths, this controls whether this rule /// will execute. However, it does not get called in a "default" context - /// when we are not passed any paths; in that case, make_run is called + /// when we are not passed any paths; in that case, `make_run` is called /// directly. fn should_run(run: ShouldRun) -> ShouldRun; - /// Build up a "root" rule, either as a default rule or from a path passed + /// Builds up a "root" rule, either as a default rule or from a path passed /// to us. /// /// When path is `None`, we are executing in a context where no paths were @@ -648,7 +648,7 @@ impl<'a> Builder<'a> { add_lib_path(vec![self.rustc_libdir(compiler)], cmd); } - /// Get a path to the compiler specified. + /// Gets a path to the compiler specified. pub fn rustc(&self, compiler: Compiler) -> PathBuf { if compiler.is_snapshot(self) { self.initial_rustc.clone() @@ -659,7 +659,7 @@ impl<'a> Builder<'a> { } } - /// Get the paths to all of the compiler's codegen backends. + /// Gets the paths to all of the compiler's codegen backends. fn codegen_backends(&self, compiler: Compiler) -> impl Iterator<Item = PathBuf> { fs::read_dir(self.sysroot_codegen_backends(compiler)) .into_iter() diff --git a/src/bootstrap/cache.rs b/src/bootstrap/cache.rs index ea8bc657a57..5f84816789a 100644 --- a/src/bootstrap/cache.rs +++ b/src/bootstrap/cache.rs @@ -227,10 +227,10 @@ lazy_static! { pub static ref INTERNER: Interner = Interner::default(); } -/// This is essentially a HashMap which allows storing any type in its input and +/// This is essentially a `HashMap` which allows storing any type in its input and /// any type in its output. It is a write-once cache; values are never evicted, /// which means that references to the value can safely be returned from the -/// get() method. +/// `get()` method. #[derive(Debug)] pub struct Cache( RefCell<HashMap< diff --git a/src/bootstrap/check.rs b/src/bootstrap/check.rs index cc539d4c895..2a2533a3c14 100644 --- a/src/bootstrap/check.rs +++ b/src/bootstrap/check.rs @@ -66,7 +66,7 @@ impl Step for Rustc { }); } - /// Build the compiler. + /// Builds the compiler. /// /// This will build the compiler for a particular stage of the build using /// the `compiler` targeting the `target` architecture. The artifacts diff --git a/src/bootstrap/clean.rs b/src/bootstrap/clean.rs index 74a2b7e4aa9..b52e1a7b0e6 100644 --- a/src/bootstrap/clean.rs +++ b/src/bootstrap/clean.rs @@ -3,7 +3,7 @@ //! Responsible for cleaning out a build directory of all old and stale //! artifacts to prepare for a fresh build. Currently doesn't remove the //! `build/cache` directory (download cache) or the `build/$target/llvm` -//! directory unless the --all flag is present. +//! directory unless the `--all` flag is present. use std::fs; use std::io::{self, ErrorKind}; diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs index ddae3cb0d60..8fabb8c3fd0 100644 --- a/src/bootstrap/compile.rs +++ b/src/bootstrap/compile.rs @@ -48,7 +48,7 @@ impl Step for Std { }); } - /// Build the standard library. + /// Builds the standard library. /// /// This will build the standard library for a particular stage of the build /// using the `compiler` targeting the `target` architecture. The artifacts @@ -269,7 +269,7 @@ impl Step for StartupObjects { }); } - /// Build and prepare startup objects like rsbegin.o and rsend.o + /// Builds and prepare startup objects like rsbegin.o and rsend.o /// /// These are primarily used on Windows right now for linking executables/dlls. /// They don't require any library support as they're just plain old object @@ -334,7 +334,7 @@ impl Step for Test { }); } - /// Build libtest. + /// Builds libtest. /// /// This will build libtest and supporting libraries for a particular stage of /// the build using the `compiler` targeting the `target` architecture. The @@ -455,7 +455,7 @@ impl Step for Rustc { }); } - /// Build the compiler. + /// Builds the compiler. /// /// This will build the compiler for a particular stage of the build using /// the `compiler` targeting the `target` architecture. The artifacts diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs index d9bf95d13ac..bc1fdad356b 100644 --- a/src/bootstrap/dist.rs +++ b/src/bootstrap/dist.rs @@ -342,7 +342,7 @@ impl Step for Mingw { run.builder.ensure(Mingw { host: run.target }); } - /// Build the `rust-mingw` installer component. + /// Builds the `rust-mingw` installer component. /// /// This contains all the bits and pieces to run the MinGW Windows targets /// without any extra installed software (e.g., we bundle gcc, libraries, etc). diff --git a/src/bootstrap/doc.rs b/src/bootstrap/doc.rs index f1d8fca71cd..d14b23e5988 100644 --- a/src/bootstrap/doc.rs +++ b/src/bootstrap/doc.rs @@ -259,7 +259,7 @@ impl Step for TheBook { }); } - /// Build the book and associated stuff. + /// Builds the book and associated stuff. /// /// We need to build: /// @@ -611,7 +611,7 @@ impl Step for WhitelistedRustc { }); } - /// Generate whitelisted compiler crate documentation. + /// Generates whitelisted compiler crate documentation. /// /// This will generate all documentation for crates that are whitelisted /// to be included in the standard documentation. This documentation is @@ -683,7 +683,7 @@ impl Step for Rustc { }); } - /// Generate compiler documentation. + /// Generates compiler documentation. /// /// This will generate all documentation for compiler and dependencies. /// Compiler documentation is distributed separately, so we make sure @@ -784,7 +784,7 @@ impl Step for Rustdoc { }); } - /// Generate compiler documentation. + /// Generates compiler documentation. /// /// This will generate all documentation for compiler and dependencies. /// Compiler documentation is distributed separately, so we make sure diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs index 1aa2e116a5a..6a93c95c3d9 100644 --- a/src/bootstrap/lib.rs +++ b/src/bootstrap/lib.rs @@ -69,7 +69,7 @@ //! ## Copying stage0 {std,test,rustc} //! //! This copies the build output from Cargo into -//! `build/$HOST/stage0-sysroot/lib/rustlib/$ARCH/lib`. FIXME: This step's +//! `build/$HOST/stage0-sysroot/lib/rustlib/$ARCH/lib`. FIXME: this step's //! documentation should be expanded -- the information already here may be //! incorrect. //! @@ -504,7 +504,7 @@ impl Build { cleared } - /// Get the space-separated set of activated features for the standard + /// Gets the space-separated set of activated features for the standard /// library. fn std_features(&self) -> String { let mut features = "panic-unwind".to_string(); @@ -521,7 +521,7 @@ impl Build { features } - /// Get the space-separated set of activated features for the compiler. + /// Gets the space-separated set of activated features for the compiler. fn rustc_features(&self) -> String { let mut features = String::new(); if self.config.jemalloc { @@ -609,7 +609,7 @@ impl Build { self.out.join(&*target).join("crate-docs") } - /// Returns true if no custom `llvm-config` is set for the specified target. + /// Returns `true` if no custom `llvm-config` is set for the specified target. /// /// If no custom `llvm-config` was specified then Rust's llvm will be used. fn is_rust_llvm(&self, target: Interned<String>) -> bool { @@ -857,13 +857,13 @@ impl Build { .map(|p| &**p) } - /// Returns true if this is a no-std `target`, if defined + /// Returns `true` if this is a no-std `target`, if defined fn no_std(&self, target: Interned<String>) -> Option<bool> { self.config.target_config.get(&target) .map(|t| t.no_std) } - /// Returns whether the target will be tested using the `remote-test-client` + /// Returns `true` if the target will be tested using the `remote-test-client` /// and `remote-test-server` binaries. fn remote_tested(&self, target: Interned<String>) -> bool { self.qemu_rootfs(target).is_some() || target.contains("android") || @@ -1059,7 +1059,7 @@ impl Build { self.rust_info.version(self, channel::CFG_RELEASE_NUM) } - /// Return the full commit hash + /// Returns the full commit hash. fn rust_sha(&self) -> Option<&str> { self.rust_info.sha() } @@ -1079,7 +1079,7 @@ impl Build { panic!("failed to find version in {}'s Cargo.toml", package) } - /// Returns whether unstable features should be enabled for the compiler + /// Returns `true` if unstable features should be enabled for the compiler /// we're building. fn unstable_features(&self) -> bool { match &self.config.channel[..] { @@ -1327,7 +1327,7 @@ impl<'a> Compiler { self } - /// Returns whether this is a snapshot compiler for `build`'s configuration + /// Returns `true` if this is a snapshot compiler for `build`'s configuration pub fn is_snapshot(&self, build: &Build) -> bool { self.stage == 0 && self.host == build.build } diff --git a/src/bootstrap/test.rs b/src/bootstrap/test.rs index bb00f6f6251..a882550f734 100644 --- a/src/bootstrap/test.rs +++ b/src/bootstrap/test.rs @@ -30,9 +30,9 @@ const ADB_TEST_DIR: &str = "/data/tmp/work"; /// The two modes of the test runner; tests or benchmarks. #[derive(Debug, PartialEq, Eq, Hash, Copy, Clone, PartialOrd, Ord)] pub enum TestKind { - /// Run `cargo test` + /// Run `cargo test`. Test, - /// Run `cargo bench` + /// Run `cargo bench`. Bench, } @@ -1288,7 +1288,7 @@ impl Step for DocTest { run.never() } - /// Run `rustdoc --test` for all documentation in `src/doc`. + /// Runs `rustdoc --test` for all documentation in `src/doc`. /// /// This will run all tests in our markdown documentation (e.g., the book) /// located in `src/doc`. The `rustdoc` that's run is the one that sits next to @@ -1408,7 +1408,7 @@ impl Step for ErrorIndex { }); } - /// Run the error index generator tool to execute the tests located in the error + /// Runs the error index generator tool to execute the tests located in the error /// index. /// /// The `error_index_generator` tool lives in `src/tools` and is used to @@ -1614,7 +1614,7 @@ impl Step for Crate { } } - /// Run all unit tests plus documentation tests for a given crate defined + /// Runs all unit tests plus documentation tests for a given crate defined /// by a `Cargo.toml` (single manifest) /// /// This is what runs tests for crates like the standard library, compiler, etc. @@ -1833,7 +1833,7 @@ fn envify(s: &str) -> String { /// the standard library and such to the emulator ahead of time. This step /// represents this and is a dependency of all test suites. /// -/// Most of the time this is a noop. For some steps such as shipping data to +/// Most of the time this is a no-op. For some steps such as shipping data to /// QEMU we have to build our own tools so we've got conditional dependencies /// on those programs as well. Note that the remote test client is built for /// the build target (us) and the server is built for the target. @@ -1904,7 +1904,7 @@ impl Step for Distcheck { run.builder.ensure(Distcheck); } - /// Run "distcheck", a 'make check' from a tarball + /// Runs "distcheck", a 'make check' from a tarball fn run(self, builder: &Builder) { builder.info("Distcheck"); let dir = builder.out.join("tmp").join("distcheck"); @@ -1965,7 +1965,7 @@ impl Step for Bootstrap { const DEFAULT: bool = true; const ONLY_HOSTS: bool = true; - /// Test the build system itself + /// Tests the build system itself. fn run(self, builder: &Builder) { let mut cmd = Command::new(&builder.initial_cargo); cmd.arg("test") diff --git a/src/bootstrap/tool.rs b/src/bootstrap/tool.rs index cd3afc59e56..c09e9332895 100644 --- a/src/bootstrap/tool.rs +++ b/src/bootstrap/tool.rs @@ -40,7 +40,7 @@ impl Step for ToolBuild { run.never() } - /// Build a tool in `src/tools` + /// Builds a tool in `src/tools` /// /// This will build the specified tool with the specified `host` compiler in /// `stage` into the normal cargo output directory. @@ -621,7 +621,7 @@ tool_extended!((self, builder), ); impl<'a> Builder<'a> { - /// Get a `Command` which is ready to run `tool` in `stage` built for + /// Gets a `Command` which is ready to run `tool` in `stage` built for /// `host`. pub fn tool_cmd(&self, tool: Tool) -> Command { let mut cmd = Command::new(self.tool_exe(tool)); diff --git a/src/bootstrap/util.rs b/src/bootstrap/util.rs index 37c6c040da8..29aa98971fb 100644 --- a/src/bootstrap/util.rs +++ b/src/bootstrap/util.rs @@ -33,7 +33,7 @@ pub fn exe(name: &str, target: &str) -> String { } } -/// Returns whether the file name given looks like a dynamic library. +/// Returns `true` if the file name given looks like a dynamic library. pub fn is_dylib(name: &str) -> bool { name.ends_with(".dylib") || name.ends_with(".so") || name.ends_with(".dll") } diff --git a/src/build_helper/lib.rs b/src/build_helper/lib.rs index 93aa9176812..bd99dc118e6 100644 --- a/src/build_helper/lib.rs +++ b/src/build_helper/lib.rs @@ -163,7 +163,7 @@ pub fn mtime(path: &Path) -> SystemTime { .unwrap_or(UNIX_EPOCH) } -/// Returns whether `dst` is up to date given that the file or files in `src` +/// Returns `true` if `dst` is up to date given that the file or files in `src` /// are used to generate it. /// /// Uses last-modified time checks to verify this. @@ -190,12 +190,12 @@ pub struct NativeLibBoilerplate { } impl NativeLibBoilerplate { - /// On OSX we don't want to ship the exact filename that compiler-rt builds. + /// On macOS we don't want to ship the exact filename that compiler-rt builds. /// This conflicts with the system and ours is likely a wildly different /// version, so they can't be substituted. /// /// As a result, we rename it here but we need to also use - /// `install_name_tool` on OSX to rename the commands listed inside of it to + /// `install_name_tool` on macOS to rename the commands listed inside of it to /// ensure it's linked against correctly. pub fn fixup_sanitizer_lib_name(&self, sanitizer_name: &str) { if env::var("TARGET").unwrap() != "x86_64-apple-darwin" { diff --git a/src/libcore/str/mod.rs b/src/libcore/str/mod.rs index e9190cc3ddf..81c351be305 100644 --- a/src/libcore/str/mod.rs +++ b/src/libcore/str/mod.rs @@ -1,6 +1,6 @@ -//! String manipulation +//! String manipulation. //! -//! For more details, see std::str +//! For more details, see the `std::str` module. #![stable(feature = "rust1", since = "1.0.0")] diff --git a/src/libcore/str/pattern.rs b/src/libcore/str/pattern.rs index 55a7ba181e5..e5a75cdbbcc 100644 --- a/src/libcore/str/pattern.rs +++ b/src/libcore/str/pattern.rs @@ -1,7 +1,7 @@ //! The string Pattern API. //! -//! For more details, see the traits `Pattern`, `Searcher`, -//! `ReverseSearcher` and `DoubleEndedSearcher`. +//! For more details, see the traits [`Pattern`], [`Searcher`], +//! [`ReverseSearcher`], and [`DoubleEndedSearcher`]. #![unstable(feature = "pattern", reason = "API not fully fleshed out and ready to be stabilized", diff --git a/src/libgraphviz/lib.rs b/src/libgraphviz/lib.rs index 8ce0f755df0..a445e70ca99 100644 --- a/src/libgraphviz/lib.rs +++ b/src/libgraphviz/lib.rs @@ -392,7 +392,7 @@ impl<'a> Id<'a> { /// digit (i.e., the regular expression `[a-zA-Z_][a-zA-Z_0-9]*`). /// /// (Note: this format is a strict subset of the `ID` format - /// defined by the DOT language. This function may change in the + /// defined by the DOT language. This function may change in the /// future to accept a broader subset, or the entirety, of DOT's /// `ID` format.) /// @@ -529,7 +529,7 @@ impl<'a> LabelText<'a> { } /// Decomposes content into string suitable for making EscStr that - /// yields same content as self. The result obeys the law + /// yields same content as self. The result obeys the law /// render(`lt`) == render(`EscStr(lt.pre_escaped_content())`) for /// all `lt: LabelText`. fn pre_escaped_content(self) -> Cow<'a, str> { diff --git a/src/libpanic_unwind/dummy.rs b/src/libpanic_unwind/dummy.rs index b052f76e2a3..3a00d637665 100644 --- a/src/libpanic_unwind/dummy.rs +++ b/src/libpanic_unwind/dummy.rs @@ -1,6 +1,6 @@ -//! Unwinding for wasm32 +//! Unwinding for *wasm32* target. //! -//! Right now we don't support this, so this is just stubs +//! Right now we don't support this, so this is just stubs. use alloc::boxed::Box; use core::any::Any; diff --git a/src/libpanic_unwind/dwarf/eh.rs b/src/libpanic_unwind/dwarf/eh.rs index ce7fab8584a..ce24406b556 100644 --- a/src/libpanic_unwind/dwarf/eh.rs +++ b/src/libpanic_unwind/dwarf/eh.rs @@ -6,7 +6,7 @@ //! http://www.airs.com/blog/archives/464 //! //! A reference implementation may be found in the GCC source tree -//! (<root>/libgcc/unwind-c.c as of this writing) +//! (`<root>/libgcc/unwind-c.c` as of this writing). #![allow(non_upper_case_globals)] #![allow(unused)] diff --git a/src/libpanic_unwind/dwarf/mod.rs b/src/libpanic_unwind/dwarf/mod.rs index eb5fb81f61b..0360696426d 100644 --- a/src/libpanic_unwind/dwarf/mod.rs +++ b/src/libpanic_unwind/dwarf/mod.rs @@ -1,5 +1,5 @@ //! Utilities for parsing DWARF-encoded data streams. -//! See http://www.dwarfstd.org, +//! See <http://www.dwarfstd.org>, //! DWARF-4 standard, Section 7 - "Data Representation" // This module is used only by x86_64-pc-windows-gnu for now, but we diff --git a/src/libpanic_unwind/emcc.rs b/src/libpanic_unwind/emcc.rs index 45c9244a46f..1f5ccfb0f12 100644 --- a/src/libpanic_unwind/emcc.rs +++ b/src/libpanic_unwind/emcc.rs @@ -1,9 +1,9 @@ -//! Unwinding for emscripten +//! Unwinding for *emscripten* target. //! //! Whereas Rust's usual unwinding implementation for Unix platforms -//! calls into the libunwind APIs directly, on emscripten we instead +//! calls into the libunwind APIs directly, on Emscripten we instead //! call into the C++ unwinding APIs. This is just an expedience since -//! emscripten's runtime always implements those APIs and does not +//! Emscripten's runtime always implements those APIs and does not //! implement libunwind. #![allow(private_no_mangle_fns)] diff --git a/src/libpanic_unwind/gcc.rs b/src/libpanic_unwind/gcc.rs index 065403aba1b..607fe28e3f2 100644 --- a/src/libpanic_unwind/gcc.rs +++ b/src/libpanic_unwind/gcc.rs @@ -1,4 +1,4 @@ -//! Implementation of panics backed by libgcc/libunwind (in some form) +//! Implementation of panics backed by libgcc/libunwind (in some form). //! //! For background on exception handling and stack unwinding please see //! "Exception Handling in LLVM" (llvm.org/docs/ExceptionHandling.html) and @@ -23,14 +23,14 @@ //! //! In the search phase, the job of a personality routine is to examine //! exception object being thrown, and to decide whether it should be caught at -//! that stack frame. Once the handler frame has been identified, cleanup phase +//! that stack frame. Once the handler frame has been identified, cleanup phase //! begins. //! //! In the cleanup phase, the unwinder invokes each personality routine again. //! This time it decides which (if any) cleanup code needs to be run for -//! the current stack frame. If so, the control is transferred to a special +//! the current stack frame. If so, the control is transferred to a special //! branch in the function body, the "landing pad", which invokes destructors, -//! frees memory, etc. At the end of the landing pad, control is transferred +//! frees memory, etc. At the end of the landing pad, control is transferred //! back to the unwinder and unwinding resumes. //! //! Once stack has been unwound down to the handler frame level, unwinding stops @@ -39,7 +39,7 @@ //! ## `eh_personality` and `eh_unwind_resume` //! //! These language items are used by the compiler when generating unwind info. -//! The first one is the personality routine described above. The second one +//! The first one is the personality routine described above. The second one //! allows compilation target to customize the process of resuming unwind at the //! end of the landing pads. `eh_unwind_resume` is used only if //! `custom_unwind_resume` flag in the target options is set. diff --git a/src/librustc/dep_graph/debug.rs b/src/librustc/dep_graph/debug.rs index a9ad22c5e91..f18ee3dced7 100644 --- a/src/librustc/dep_graph/debug.rs +++ b/src/librustc/dep_graph/debug.rs @@ -22,7 +22,7 @@ impl DepNodeFilter { } } - /// True if all nodes always pass the filter. + /// Returns `true` if all nodes always pass the filter. pub fn accepts_all(&self) -> bool { self.text.is_empty() } diff --git a/src/librustc/dep_graph/dep_node.rs b/src/librustc/dep_graph/dep_node.rs index 58087b76266..796739c8721 100644 --- a/src/librustc/dep_graph/dep_node.rs +++ b/src/librustc/dep_graph/dep_node.rs @@ -302,7 +302,7 @@ macro_rules! define_dep_nodes { } } - /// Create a new, parameterless DepNode. This method will assert + /// Creates a new, parameterless DepNode. This method will assert /// that the DepNode corresponding to the given DepKind actually /// does not require any parameters. #[inline(always)] @@ -314,7 +314,7 @@ macro_rules! define_dep_nodes { } } - /// Extract the DefId corresponding to this DepNode. This will work + /// Extracts the DefId corresponding to this DepNode. This will work /// if two conditions are met: /// /// 1. The Fingerprint of the DepNode actually is a DefPathHash, and @@ -798,7 +798,7 @@ impl<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> DepNodeParams<'a, 'gcx, 'tcx> for HirId { } /// A "work product" corresponds to a `.o` (or other) file that we -/// save in between runs. These ids do not have a DefId but rather +/// save in between runs. These IDs do not have a `DefId` but rather /// some independent path or string that persists between runs without /// the need to be mapped or unmapped. (This ensures we can serialize /// them even in the absence of a tcx.) diff --git a/src/librustc/dep_graph/dep_tracking_map.rs b/src/librustc/dep_graph/dep_tracking_map.rs index a296a3379c2..94b832bea62 100644 --- a/src/librustc/dep_graph/dep_tracking_map.rs +++ b/src/librustc/dep_graph/dep_tracking_map.rs @@ -43,7 +43,7 @@ impl<M: DepTrackingMapConfig> MemoizationMap for RefCell<DepTrackingMap<M>> { /// /// Here, `[op]` represents whatever nodes `op` reads in the /// course of execution; `Map(key)` represents the node for this - /// map; and `CurrentTask` represents the current task when + /// map, and `CurrentTask` represents the current task when /// `memoize` is invoked. /// /// **Important:** when `op` is invoked, the current task will be diff --git a/src/librustc/dep_graph/graph.rs b/src/librustc/dep_graph/graph.rs index e8c1cd36064..59ec459de96 100644 --- a/src/librustc/dep_graph/graph.rs +++ b/src/librustc/dep_graph/graph.rs @@ -61,13 +61,13 @@ struct DepGraphData { colors: DepNodeColorMap, - /// A set of loaded diagnostics which has been emitted. + /// A set of loaded diagnostics that have been emitted. emitted_diagnostics: Mutex<FxHashSet<DepNodeIndex>>, /// Used to wait for diagnostics to be emitted. emitted_diagnostics_cond_var: Condvar, - /// When we load, there may be `.o` files, cached mir, or other such + /// When we load, there may be `.o` files, cached MIR, or other such /// things available to us. If we find that they are not dirty, we /// load the path to the file storing those work-products here into /// this map. We can later look for and extract that data. @@ -115,7 +115,7 @@ impl DepGraph { } } - /// True if we are actually building the full dep-graph. + /// Returns `true` if we are actually building the full dep-graph, and `false` otherwise. #[inline] pub fn is_fully_enabled(&self) -> bool { self.data.is_some() @@ -320,8 +320,8 @@ impl DepGraph { } } - /// Execute something within an "anonymous" task, that is, a task the - /// DepNode of which is determined by the list of inputs it read from. + /// Executes something within an "anonymous" task, that is, a task the + /// `DepNode` of which is determined by the list of inputs it read from. pub fn with_anon_task<OP,R>(&self, dep_kind: DepKind, op: OP) -> (R, DepNodeIndex) where OP: FnOnce() -> R { @@ -356,8 +356,8 @@ impl DepGraph { } } - /// Execute something within an "eval-always" task which is a task - // that runs whenever anything changes. + /// Executes something within an "eval-always" task which is a task + /// that runs whenever anything changes. pub fn with_eval_always_task<'a, C, A, R>( &self, key: DepNode, @@ -438,7 +438,7 @@ impl DepGraph { self.data.as_ref().unwrap().previous.node_to_index(dep_node) } - /// Check whether a previous work product exists for `v` and, if + /// Checks whether a previous work product exists for `v` and, if /// so, return the path that leads to it. Used to skip doing work. pub fn previous_work_product(&self, v: &WorkProductId) -> Option<WorkProduct> { self.data @@ -589,7 +589,7 @@ impl DepGraph { } } - /// Try to mark a dep-node which existed in the previous compilation session as green + /// Try to mark a dep-node which existed in the previous compilation session as green. fn try_mark_previous_green<'tcx>( &self, tcx: TyCtxt<'_, 'tcx, 'tcx>, @@ -773,8 +773,8 @@ impl DepGraph { Some(dep_node_index) } - /// Atomically emits some loaded diagnotics assuming that this only gets called with - /// did_allocation set to true on one thread + /// Atomically emits some loaded diagnotics, assuming that this only gets called with + /// `did_allocation` set to `true` on a single thread. #[cold] #[inline(never)] fn emit_diagnostics<'tcx>( @@ -913,7 +913,7 @@ impl DepGraph { #[derive(Clone, Debug, RustcEncodable, RustcDecodable)] pub struct WorkProduct { pub cgu_name: String, - /// Saved files associated with this CGU + /// Saved files associated with this CGU. pub saved_files: Vec<(WorkProductFileKind, String)>, } @@ -937,17 +937,17 @@ pub(super) struct CurrentDepGraph { #[allow(dead_code)] forbidden_edge: Option<EdgeFilter>, - // Anonymous DepNodes are nodes the ID of which we compute from the list of - // their edges. This has the beneficial side-effect that multiple anonymous - // nodes can be coalesced into one without changing the semantics of the - // dependency graph. However, the merging of nodes can lead to a subtle - // problem during red-green marking: The color of an anonymous node from - // the current session might "shadow" the color of the node with the same - // ID from the previous session. In order to side-step this problem, we make - // sure that anon-node IDs allocated in different sessions don't overlap. - // This is implemented by mixing a session-key into the ID fingerprint of - // each anon node. The session-key is just a random number generated when - // the DepGraph is created. + /// Anonymous `DepNode`s are nodes whose IDs we compute from the list of + /// their edges. This has the beneficial side-effect that multiple anonymous + /// nodes can be coalesced into one without changing the semantics of the + /// dependency graph. However, the merging of nodes can lead to a subtle + /// problem during red-green marking: The color of an anonymous node from + /// the current session might "shadow" the color of the node with the same + /// ID from the previous session. In order to side-step this problem, we make + /// sure that anonymous `NodeId`s allocated in different sessions don't overlap. + /// This is implemented by mixing a session-key into the ID fingerprint of + /// each anon node. The session-key is just a random number generated when + /// the `DepGraph` is created. anon_id_seed: Fingerprint, total_read_count: u64, diff --git a/src/librustc/hir/check_attr.rs b/src/librustc/hir/check_attr.rs index ba340ad251f..ddc1eebe645 100644 --- a/src/librustc/hir/check_attr.rs +++ b/src/librustc/hir/check_attr.rs @@ -91,7 +91,7 @@ struct CheckAttrVisitor<'a, 'tcx: 'a> { } impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> { - /// Check any attribute. + /// Checks any attribute. fn check_attributes(&self, item: &hir::Item, target: Target) { if target == Target::Fn || target == Target::Const { self.tcx.codegen_fn_attrs(self.tcx.hir().local_def_id(item.id)); @@ -115,7 +115,7 @@ impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> { self.check_used(item, target); } - /// Check if an `#[inline]` is applied to a function or a closure. + /// Checks if an `#[inline]` is applied to a function or a closure. fn check_inline(&self, attr: &hir::Attribute, span: &Span, target: Target) { if target != Target::Fn && target != Target::Closure { struct_span_err!(self.tcx.sess, @@ -127,7 +127,7 @@ impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> { } } - /// Check if the `#[non_exhaustive]` attribute on an `item` is valid. + /// Checks if the `#[non_exhaustive]` attribute on an `item` is valid. fn check_non_exhaustive(&self, attr: &hir::Attribute, item: &hir::Item, target: Target) { match target { Target::Struct | Target::Enum => { /* Valid */ }, @@ -143,7 +143,7 @@ impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> { } } - /// Check if the `#[marker]` attribute on an `item` is valid. + /// Checks if the `#[marker]` attribute on an `item` is valid. fn check_marker(&self, attr: &hir::Attribute, item: &hir::Item, target: Target) { match target { Target::Trait => { /* Valid */ }, @@ -157,7 +157,7 @@ impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> { } } - /// Check if the `#[repr]` attributes on `item` are valid. + /// Checks if the `#[repr]` attributes on `item` are valid. fn check_repr(&self, item: &hir::Item, target: Target) { // Extract the names of all repr hints, e.g., [foo, bar, align] for: // ``` diff --git a/src/librustc/hir/def.rs b/src/librustc/hir/def.rs index 15efa765029..b15bea01776 100644 --- a/src/librustc/hir/def.rs +++ b/src/librustc/hir/def.rs @@ -182,7 +182,7 @@ impl<T> ::std::ops::IndexMut<Namespace> for PerNS<T> { } impl<T> PerNS<Option<T>> { - /// Returns whether all the items in this collection are `None`. + /// Returns `true` if all the items in this collection are `None`. pub fn is_empty(&self) -> bool { self.type_ns.is_none() && self.value_ns.is_none() && self.macro_ns.is_none() } diff --git a/src/librustc/hir/def_id.rs b/src/librustc/hir/def_id.rs index e06f09e21cb..ed1c15a73c2 100644 --- a/src/librustc/hir/def_id.rs +++ b/src/librustc/hir/def_id.rs @@ -229,7 +229,7 @@ impl fmt::Debug for DefId { } impl DefId { - /// Make a local `DefId` with the given index. + /// Makes a local `DefId` from the given `DefIndex`. #[inline] pub fn local(index: DefIndex) -> DefId { DefId { krate: LOCAL_CRATE, index: index } diff --git a/src/librustc/hir/intravisit.rs b/src/librustc/hir/intravisit.rs index 86c3fb9e4fc..9436c600c9f 100644 --- a/src/librustc/hir/intravisit.rs +++ b/src/librustc/hir/intravisit.rs @@ -4,7 +4,7 @@ //! `super::itemlikevisit::ItemLikeVisitor` trait.** //! //! If you have decided to use this visitor, here are some general -//! notes on how to do it: +//! notes on how to do so: //! //! Each overridden visit method has full control over what //! happens with its node, it can do its own traversal of the node's children, @@ -86,7 +86,7 @@ pub enum NestedVisitorMap<'this, 'tcx: 'this> { /// using this setting. OnlyBodies(&'this Map<'tcx>), - /// Visit all nested things, including item-likes. + /// Visits all nested things, including item-likes. /// /// **This is an unusual choice.** It is used when you want to /// process everything within their lexical context. Typically you @@ -96,7 +96,7 @@ pub enum NestedVisitorMap<'this, 'tcx: 'this> { impl<'this, 'tcx> NestedVisitorMap<'this, 'tcx> { /// Returns the map to use for an "intra item-like" thing (if any). - /// e.g., function body. + /// E.g., function body. pub fn intra(self) -> Option<&'this Map<'tcx>> { match self { NestedVisitorMap::None => None, @@ -106,7 +106,7 @@ impl<'this, 'tcx> NestedVisitorMap<'this, 'tcx> { } /// Returns the map to use for an "item-like" thing (if any). - /// e.g., item, impl-item. + /// E.g., item, impl-item. pub fn inter(self) -> Option<&'this Map<'tcx>> { match self { NestedVisitorMap::None => None, @@ -117,7 +117,7 @@ impl<'this, 'tcx> NestedVisitorMap<'this, 'tcx> { } /// Each method of the Visitor trait is a hook to be potentially -/// overridden. Each method's default implementation recursively visits +/// overridden. Each method's default implementation recursively visits /// the substructure of the input via the corresponding `walk` method; /// e.g., the `visit_mod` method by default calls `intravisit::walk_mod`. /// @@ -129,7 +129,7 @@ impl<'this, 'tcx> NestedVisitorMap<'this, 'tcx> { /// on `visit_nested_item` for details on how to visit nested items. /// /// If you want to ensure that your code handles every variant -/// explicitly, you need to override each method. (And you also need +/// explicitly, you need to override each method. (And you also need /// to monitor future changes to `Visitor` in case a new method with a /// new default implementation gets introduced.) pub trait Visitor<'v> : Sized { @@ -203,7 +203,7 @@ pub trait Visitor<'v> : Sized { } } - /// Visit the top-level item and (optionally) nested items / impl items. See + /// Visits the top-level item and (optionally) nested items / impl items. See /// `visit_nested_item` for details. fn visit_item(&mut self, i: &'v Item) { walk_item(self, i) @@ -214,7 +214,7 @@ pub trait Visitor<'v> : Sized { } /// When invoking `visit_all_item_likes()`, you need to supply an - /// item-like visitor. This method converts a "intra-visit" + /// item-like visitor. This method converts a "intra-visit" /// visitor into an item-like visitor that walks the entire tree. /// If you use this, you probably don't want to process the /// contents of nested item-like things, since the outer loop will diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index 3de41b1665d..8ce6d140122 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -3,24 +3,24 @@ //! Since the AST and HIR are fairly similar, this is mostly a simple procedure, //! much like a fold. Where lowering involves a bit more work things get more //! interesting and there are some invariants you should know about. These mostly -//! concern spans and ids. +//! concern spans and IDs. //! //! Spans are assigned to AST nodes during parsing and then are modified during //! expansion to indicate the origin of a node and the process it went through -//! being expanded. Ids are assigned to AST nodes just before lowering. +//! being expanded. IDs are assigned to AST nodes just before lowering. //! -//! For the simpler lowering steps, ids and spans should be preserved. Unlike +//! For the simpler lowering steps, IDs and spans should be preserved. Unlike //! expansion we do not preserve the process of lowering in the spans, so spans //! should not be modified here. When creating a new node (as opposed to -//! 'folding' an existing one), then you create a new id using `next_id()`. +//! 'folding' an existing one), then you create a new ID using `next_id()`. //! -//! You must ensure that ids are unique. That means that you should only use the -//! id from an AST node in a single HIR node (you can assume that AST node ids -//! are unique). Every new node must have a unique id. Avoid cloning HIR nodes. -//! If you do, you must then set the new node's id to a fresh one. +//! You must ensure that IDs are unique. That means that you should only use the +//! ID from an AST node in a single HIR node (you can assume that AST node IDs +//! are unique). Every new node must have a unique ID. Avoid cloning HIR nodes. +//! If you do, you must then set the new node's ID to a fresh one. //! //! Spans are used for error messages and for tools to map semantics back to -//! source code. It is therefore not as important with spans as ids to be strict +//! source code. It is therefore not as important with spans as IDs to be strict //! about use (you can't break the compiler by screwing up a span). Obviously, a //! HIR node can only have a single span. But multiple nodes can have the same //! span and spans don't need to be kept in order, etc. Where code is preserved @@ -144,7 +144,7 @@ pub trait Resolver { is_value: bool, ) -> hir::Path; - /// Obtain the resolution for a node-id. + /// Obtain the resolution for a `NodeId`. fn get_resolution(&mut self, id: NodeId) -> Option<PathResolution>; /// Obtain the possible resolutions for the given `use` statement. @@ -273,10 +273,10 @@ enum ParenthesizedGenericArgs { } /// What to do when we encounter an **anonymous** lifetime -/// reference. Anonymous lifetime references come in two flavors. You +/// reference. Anonymous lifetime references come in two flavors. You /// have implicit, or fully elided, references to lifetimes, like the /// one in `&T` or `Ref<T>`, and you have `'_` lifetimes, like `&'_ T` -/// or `Ref<'_, T>`. These often behave the same, but not always: +/// or `Ref<'_, T>`. These often behave the same, but not always: /// /// - certain usages of implicit references are deprecated, like /// `Ref<T>`, and we sometimes just give hard errors in those cases @@ -3287,7 +3287,7 @@ impl<'a> LoweringContext<'a> { /// Paths like the visibility path in `pub(super) use foo::{bar, baz}` are repeated /// many times in the HIR tree; for each occurrence, we need to assign distinct - /// node-ids. (See e.g., #56128.) + /// `NodeId`s. (See, e.g., #56128.) fn renumber_segment_ids(&mut self, path: &P<hir::Path>) -> P<hir::Path> { debug!("renumber_segment_ids(path = {:?})", path); let mut path = path.clone(); diff --git a/src/librustc/hir/map/blocks.rs b/src/librustc/hir/map/blocks.rs index d5fb578d8d4..6919628c767 100644 --- a/src/librustc/hir/map/blocks.rs +++ b/src/librustc/hir/map/blocks.rs @@ -1,9 +1,9 @@ //! This module provides a simplified abstraction for working with -//! code blocks identified by their integer node-id. In particular, +//! code blocks identified by their integer `NodeId`. In particular, //! it captures a common set of attributes that all "function-like -//! things" (represented by `FnLike` instances) share. For example, +//! things" (represented by `FnLike` instances) share. For example, //! all `FnLike` instances have a type signature (be it explicit or -//! inferred). And all `FnLike` instances have a body, i.e., the code +//! inferred). And all `FnLike` instances have a body, i.e., the code //! that is run when the function-like thing it represents is invoked. //! //! With the above abstraction in place, one can treat the program diff --git a/src/librustc/hir/map/def_collector.rs b/src/librustc/hir/map/def_collector.rs index 02fb503e752..8fe10a85ef3 100644 --- a/src/librustc/hir/map/def_collector.rs +++ b/src/librustc/hir/map/def_collector.rs @@ -12,7 +12,7 @@ use syntax_pos::Span; use crate::hir::map::{ITEM_LIKE_SPACE, REGULAR_SPACE}; -/// Creates def ids for nodes in the AST. +/// Creates `DefId`s for nodes in the AST. pub struct DefCollector<'a> { definitions: &'a mut Definitions, parent_def: Option<DefIndex>, diff --git a/src/librustc/hir/map/definitions.rs b/src/librustc/hir/map/definitions.rs index 84e9cde6df1..f454d691d41 100644 --- a/src/librustc/hir/map/definitions.rs +++ b/src/librustc/hir/map/definitions.rs @@ -1,5 +1,5 @@ -//! For each definition, we track the following data. A definition -//! here is defined somewhat circularly as "something with a def-id", +//! For each definition, we track the following data. A definition +//! here is defined somewhat circularly as "something with a `DefId`", //! but it generally corresponds to things like structs, enums, etc. //! There are also some rather random cases (like const initializer //! expressions) that are mostly just leftovers. @@ -163,10 +163,10 @@ pub struct Definitions { /// any) with a `DisambiguatedDefPathData`. #[derive(Clone, PartialEq, Debug, Hash, RustcEncodable, RustcDecodable)] pub struct DefKey { - /// Parent path. + /// The parent path. pub parent: Option<DefIndex>, - /// Identifier of this node. + /// The identifier of this node. pub disambiguated_data: DisambiguatedDefPathData, } @@ -207,12 +207,12 @@ impl DefKey { } } -/// Pair of `DefPathData` and an integer disambiguator. The integer is +/// A pair of `DefPathData` and an integer disambiguator. The integer is /// normally 0, but in the event that there are multiple defs with the /// same `parent` and `data`, we use this field to disambiguate /// between them. This introduces some artificial ordering dependency /// but means that if you have (e.g.) two impls for the same type in -/// the same module, they do get distinct def-ids. +/// the same module, they do get distinct `DefId`s. #[derive(Clone, PartialEq, Debug, Hash, RustcEncodable, RustcDecodable)] pub struct DisambiguatedDefPathData { pub data: DefPathData, @@ -221,10 +221,10 @@ pub struct DisambiguatedDefPathData { #[derive(Clone, Debug, Hash, RustcEncodable, RustcDecodable)] pub struct DefPath { - /// the path leading from the crate root to the item + /// The path leading from the crate root to the item. pub data: Vec<DisambiguatedDefPathData>, - /// what krate root is this path relative to? + /// The crate root this path is relative to. pub krate: CrateNum, } @@ -260,9 +260,9 @@ impl DefPath { DefPath { data: data, krate: krate } } - /// Returns a string representation of the DefPath without + /// Returns a string representation of the `DefPath` without /// the crate-prefix. This method is useful if you don't have - /// a TyCtxt available. + /// a `TyCtxt` available. pub fn to_string_no_crate(&self) -> String { let mut s = String::with_capacity(self.data.len() * 16); @@ -277,7 +277,7 @@ impl DefPath { s } - /// Return filename friendly string of the DefPah with the + /// Returns a filename-friendly string for the `DefPath`, with the /// crate-prefix. pub fn to_string_friendly<F>(&self, crate_imported_name: F) -> String where F: FnOnce(CrateNum) -> Symbol @@ -302,9 +302,9 @@ impl DefPath { s } - /// Return filename friendly string of the DefPah without + /// Returns a filename-friendly string of the `DefPath`, without /// the crate-prefix. This method is useful if you don't have - /// a TyCtxt available. + /// a `TyCtxt` available. pub fn to_filename_friendly_no_crate(&self) -> String { let mut s = String::with_capacity(self.data.len() * 16); @@ -394,18 +394,18 @@ impl Borrow<Fingerprint> for DefPathHash { } impl Definitions { - /// Create new empty definition map. + /// Creates new empty definition map. /// - /// The DefIndex returned from a new Definitions are as follows: - /// 1. At DefIndexAddressSpace::Low, + /// The `DefIndex` returned from a new `Definitions` are as follows: + /// 1. At `DefIndexAddressSpace::Low`, /// CRATE_ROOT has index 0:0, and then new indexes are allocated in /// ascending order. - /// 2. At DefIndexAddressSpace::High, - /// the first FIRST_FREE_HIGH_DEF_INDEX indexes are reserved for - /// internal use, then 1:FIRST_FREE_HIGH_DEF_INDEX are allocated in + /// 2. At `DefIndexAddressSpace::High`, + /// the first `FIRST_FREE_HIGH_DEF_INDEX` indexes are reserved for + /// internal use, then `1:FIRST_FREE_HIGH_DEF_INDEX` are allocated in /// ascending order. - /// - /// FIXME: there is probably a better place to put this comment. + // + // FIXME: there is probably a better place to put this comment. pub fn new() -> Self { Self::default() } @@ -414,7 +414,7 @@ impl Definitions { &self.table } - /// Get the number of definitions. + /// Gets the number of definitions. pub fn def_index_counts_lo_hi(&self) -> (usize, usize) { (self.table.index_to_key[DefIndexAddressSpace::Low.index()].len(), self.table.index_to_key[DefIndexAddressSpace::High.index()].len()) @@ -497,8 +497,8 @@ impl Definitions { self.node_to_hir_id[node_id] } - /// Retrieve the span of the given `DefId` if `DefId` is in the local crate, the span exists and - /// it's not DUMMY_SP + /// Retrieves the span of the given `DefId` if `DefId` is in the local crate, the span exists + /// and it's not `DUMMY_SP`. #[inline] pub fn opt_span(&self, def_id: DefId) -> Option<Span> { if def_id.krate == LOCAL_CRATE { @@ -508,7 +508,7 @@ impl Definitions { } } - /// Add a definition with a parent definition. + /// Adds a root definition (no parent). pub fn create_root_def(&mut self, crate_name: &str, crate_disambiguator: CrateDisambiguator) @@ -606,7 +606,7 @@ impl Definitions { index } - /// Initialize the ast::NodeId to HirId mapping once it has been generated during + /// Initialize the `ast::NodeId` to `HirId` mapping once it has been generated during /// AST to HIR lowering. pub fn init_node_id_to_hir_id_mapping(&mut self, mapping: IndexVec<ast::NodeId, hir::HirId>) { diff --git a/src/librustc/hir/map/mod.rs b/src/librustc/hir/map/mod.rs index 955f834e403..bf89eada4a5 100644 --- a/src/librustc/hir/map/mod.rs +++ b/src/librustc/hir/map/mod.rs @@ -36,7 +36,7 @@ mod hir_id_validator; pub const ITEM_LIKE_SPACE: DefIndexAddressSpace = DefIndexAddressSpace::Low; pub const REGULAR_SPACE: DefIndexAddressSpace = DefIndexAddressSpace::High; -/// Represents an entry and its parent NodeId. +/// Represents an entry and its parent `NodeId`. #[derive(Copy, Clone, Debug)] pub struct Entry<'hir> { parent: NodeId, @@ -162,8 +162,7 @@ impl Forest { } } -/// Represents a mapping from Node IDs to AST elements and their parent -/// Node IDs +/// Represents a mapping from `NodeId`s to AST elements and their parent `NodeId`s. #[derive(Clone)] pub struct Map<'hir> { /// The backing storage for all the AST nodes. @@ -473,7 +472,7 @@ impl<'hir> Map<'hir> { self.local_def_id(self.body_owner(id)) } - /// Given a node id, returns the `BodyId` associated with it, + /// Given a `NodeId`, returns the `BodyId` associated with it, /// if the node is a body owner, otherwise returns `None`. pub fn maybe_body_owned_by(&self, id: NodeId) -> Option<BodyId> { if let Some(entry) = self.find_entry(id) { @@ -558,7 +557,7 @@ impl<'hir> Map<'hir> { self.trait_auto_impl(trait_did).is_some() } - /// Get the attributes on the krate. This is preferable to + /// Gets the attributes on the crate. This is preferable to /// invoking `krate.attrs` because it registers a tighter /// dep-graph access. pub fn krate_attrs(&self) -> &'hir [ast::Attribute] { @@ -653,8 +652,7 @@ impl<'hir> Map<'hir> { self.get_generics(id).map(|generics| generics.span).filter(|sp| *sp != DUMMY_SP) } - /// Retrieve the Node corresponding to `id`, returning None if - /// cannot be found. + /// Retrieves the `Node` corresponding to `id`, returning `None` if cannot be found. pub fn find(&self, id: NodeId) -> Option<Node<'hir>> { let result = self.find_entry(id).and_then(|entry| { if let Node::Crate = entry.node { @@ -683,8 +681,8 @@ impl<'hir> Map<'hir> { /// returns the enclosing item. Note that this might not be the actual parent /// node in the AST - some kinds of nodes are not in the map and these will /// never appear as the parent_node. So you can always walk the `parent_nodes` - /// from a node to the root of the ast (unless you get the same id back here - /// that can happen if the id is not in the map itself or is just weird). + /// from a node to the root of the ast (unless you get the same ID back here + /// that can happen if the ID is not in the map itself or is just weird). pub fn get_parent_node(&self, id: NodeId) -> NodeId { if self.dep_graph.is_fully_enabled() { let hir_id_owner = self.node_to_hir_id(id).owner; @@ -725,7 +723,7 @@ impl<'hir> Map<'hir> { /// If there is some error when walking the parents (e.g., a node does not /// have a parent in the map or a node can't be found), then we return the - /// last good node id we found. Note that reaching the crate root (`id == 0`), + /// last good `NodeId` we found. Note that reaching the crate root (`id == 0`), /// is not an error, since items in the crate module have the crate root as /// parent. fn walk_parent_nodes<F, F2>(&self, @@ -761,7 +759,7 @@ impl<'hir> Map<'hir> { } } - /// Retrieve the `NodeId` for `id`'s enclosing method, unless there's a + /// Retrieves the `NodeId` for `id`'s enclosing method, unless there's a /// `while` or `loop` before reaching it, as block tail returns are not /// available in them. /// @@ -809,7 +807,7 @@ impl<'hir> Map<'hir> { self.walk_parent_nodes(id, match_fn, match_non_returning_block).ok() } - /// Retrieve the `NodeId` for `id`'s parent item, or `id` itself if no + /// Retrieves the `NodeId` for `id`'s parent item, or `id` itself if no /// parent item is in this map. The "parent item" is the closest parent node /// in the HIR which is recorded by the map and is an item, either an item /// in a module, trait, or impl. @@ -1122,7 +1120,7 @@ pub struct NodesMatchingSuffix<'a, 'hir:'a> { } impl<'a, 'hir> NodesMatchingSuffix<'a, 'hir> { - /// Returns true only if some suffix of the module path for parent + /// Returns `true` only if some suffix of the module path for parent /// matches `self.in_which`. /// /// In other words: let `[x_0,x_1,...,x_k]` be `self.in_which`; diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index 3e7dd1432e1..d9759da9dfc 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -62,14 +62,14 @@ pub mod map; pub mod pat_util; pub mod print; -/// A HirId uniquely identifies a node in the HIR of the current crate. It is -/// composed of the `owner`, which is the DefIndex of the directly enclosing -/// hir::Item, hir::TraitItem, or hir::ImplItem (i.e., the closest "item-like"), +/// Uniquely identifies a node in the HIR of the current crate. It is +/// composed of the `owner`, which is the `DefIndex` of the directly enclosing +/// `hir::Item`, `hir::TraitItem`, or `hir::ImplItem` (i.e., the closest "item-like"), /// and the `local_id` which is unique within the given owner. /// /// This two-level structure makes for more stable values: One can move an item /// around within the source code, or add or remove stuff before it, without -/// the local_id part of the HirId changing, which is a very useful property in +/// the `local_id` part of the `HirId` changing, which is a very useful property in /// incremental compilation where we have to persist things through changes to /// the code base. #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] @@ -130,7 +130,7 @@ mod item_local_id_inner { pub use self::item_local_id_inner::ItemLocalId; -/// The `HirId` corresponding to CRATE_NODE_ID and CRATE_DEF_INDEX +/// The `HirId` corresponding to `CRATE_NODE_ID` and `CRATE_DEF_INDEX`. pub const CRATE_HIR_ID: HirId = HirId { owner: CRATE_DEF_INDEX, local_id: ItemLocalId::from_u32_const(0) @@ -149,8 +149,8 @@ pub struct Lifetime { pub hir_id: HirId, pub span: Span, - /// Either "'a", referring to a named lifetime definition, - /// or "" (aka keywords::Invalid), for elision placeholders. + /// Either "`'a`", referring to a named lifetime definition, + /// or "``" (i.e., `keywords::Invalid`), for elision placeholders. /// /// HIR lowering inserts these placeholders in type paths that /// refer to type definitions needing lifetime parameters, @@ -163,8 +163,9 @@ pub enum ParamName { /// Some user-given name like `T` or `'x`. Plain(Ident), - /// Synthetic name generated when user elided a lifetime in an impl header, - /// e.g., the lifetimes in cases like these: + /// Synthetic name generated when user elided a lifetime in an impl header. + /// + /// E.g., the lifetimes in cases like these: /// /// impl Foo for &u32 /// impl Foo<'_> for u32 @@ -180,7 +181,7 @@ pub enum ParamName { /// Indicates an illegal name was given and an error has been /// repored (so we should squelch other derived errors). Occurs - /// when e.g., `'_` is used in the wrong place. + /// when, e.g., `'_` is used in the wrong place. Error, } @@ -205,17 +206,17 @@ pub enum LifetimeName { /// User-given names or fresh (synthetic) names. Param(ParamName), - /// User typed nothing. e.g., the lifetime in `&u32`. + /// User wrote nothing (e.g., the lifetime in `&u32`). Implicit, /// Indicates an error during lowering (usually `'_` in wrong place) /// that was already reported. Error, - /// User typed `'_`. + /// User wrote specifies `'_`. Underscore, - /// User wrote `'static` + /// User wrote `'static`. Static, } @@ -280,7 +281,7 @@ impl Lifetime { } } -/// A "Path" is essentially Rust's notion of a name; for instance: +/// A `Path` is essentially Rust's notion of a name; for instance, /// `std::cmp::PartialEq`. It's represented as a sequence of identifiers, /// along with a bunch of supporting information. #[derive(Clone, RustcEncodable, RustcDecodable)] @@ -340,7 +341,7 @@ pub struct PathSegment { } impl PathSegment { - /// Convert an identifier to the corresponding segment. + /// Converts an identifier to the corresponding segment. pub fn from_ident(ident: Ident) -> PathSegment { PathSegment { ident, @@ -597,14 +598,14 @@ impl Generics { } } -/// Synthetic Type Parameters are converted to an other form during lowering, this allows -/// to track the original form they had. Useful for error messages. +/// Synthetic type parameters are converted to another form during lowering; this allows +/// us to track the original form they had, and is useful for error messages. #[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum SyntheticTyParamKind { ImplTrait } -/// A `where` clause in a definition +/// A where-clause in a definition. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct WhereClause { pub id: NodeId, @@ -624,7 +625,7 @@ impl WhereClause { } } -/// A single predicate in a `where` clause +/// A single predicate in a where-clause. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum WherePredicate { /// A type binding (e.g., `for<'c> Foo: Send + Clone + 'c`). @@ -645,19 +646,19 @@ impl WherePredicate { } } -/// A type bound, eg `for<'c> Foo: Send+Clone+'c` +/// A type bound (e.g., `for<'c> Foo: Send + Clone + 'c`). #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct WhereBoundPredicate { pub span: Span, - /// Any generics from a `for` binding + /// Any generics from a `for` binding. pub bound_generic_params: HirVec<GenericParam>, - /// The type being bounded + /// The type being bounded. pub bounded_ty: P<Ty>, - /// Trait and lifetime bounds (`Clone+Send+'static`) + /// Trait and lifetime bounds (e.g., `Clone + Send + 'static`). pub bounds: GenericBounds, } -/// A lifetime predicate, e.g., `'a: 'b+'c` +/// A lifetime predicate (e.g., `'a: 'b + 'c`). #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct WhereRegionPredicate { pub span: Span, @@ -665,7 +666,7 @@ pub struct WhereRegionPredicate { pub bounds: GenericBounds, } -/// An equality predicate (unsupported), e.g., `T=int` +/// An equality predicate (e.g., `T = int`); currently unsupported. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct WhereEqPredicate { pub id: NodeId, @@ -759,7 +760,7 @@ impl Crate { } } - /// A parallel version of visit_all_item_likes + /// A parallel version of `visit_all_item_likes`. pub fn par_visit_all_item_likes<'hir, V>(&'hir self, visitor: &V) where V: itemlikevisit::ParItemLikeVisitor<'hir> + Sync + Send { @@ -800,14 +801,14 @@ pub struct MacroDef { #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Block { - /// Statements in a block + /// Statements in a block. pub stmts: HirVec<Stmt>, /// An expression at the end of the block - /// without a semicolon, if any + /// without a semicolon, if any. pub expr: Option<P<Expr>>, pub id: NodeId, pub hir_id: HirId, - /// Distinguishes between `unsafe { ... }` and `{ ... }` + /// Distinguishes between `unsafe { ... }` and `{ ... }`. pub rules: BlockCheckMode, pub span: Span, /// If true, then there may exist `break 'a` values that aim to @@ -874,18 +875,18 @@ impl Pat { } } -/// A single field in a struct pattern +/// A single field in a struct pattern. /// /// Patterns like the fields of Foo `{ x, ref y, ref mut z }` /// are treated the same as` x: x, y: ref y, z: ref mut z`, -/// except is_shorthand is true +/// except `is_shorthand` is true. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct FieldPat { pub id: NodeId, pub hir_id: HirId, - /// The identifier for the field + /// The identifier for the field. pub ident: Ident, - /// The pattern the field is destructured to + /// The pattern the field is destructured to. pub pat: P<Pat>, pub is_shorthand: bool, } @@ -922,41 +923,41 @@ pub enum RangeEnd { #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum PatKind { - /// Represents a wildcard pattern (`_`) + /// Represents a wildcard pattern (i.e., `_`). Wild, /// A fresh binding `ref mut binding @ OPT_SUBPATTERN`. /// The `NodeId` is the canonical ID for the variable being bound, - /// e.g., in `Ok(x) | Err(x)`, both `x` use the same canonical ID, + /// (e.g., in `Ok(x) | Err(x)`, both `x` use the same canonical ID), /// which is the pattern ID of the first `x`. Binding(BindingAnnotation, NodeId, HirId, Ident, Option<P<Pat>>), - /// A struct or struct variant pattern, e.g., `Variant {x, y, ..}`. + /// A struct or struct variant pattern (e.g., `Variant {x, y, ..}`). /// The `bool` is `true` in the presence of a `..`. Struct(QPath, HirVec<Spanned<FieldPat>>, bool), /// A tuple struct/variant pattern `Variant(x, y, .., z)`. /// If the `..` pattern fragment is present, then `Option<usize>` denotes its position. - /// 0 <= position <= subpats.len() + /// `0 <= position <= subpats.len()` TupleStruct(QPath, HirVec<P<Pat>>, Option<usize>), /// A path pattern for an unit struct/variant or a (maybe-associated) constant. Path(QPath), - /// A tuple pattern `(a, b)`. + /// A tuple pattern (e.g., `(a, b)`). /// If the `..` pattern fragment is present, then `Option<usize>` denotes its position. - /// 0 <= position <= subpats.len() + /// `0 <= position <= subpats.len()` Tuple(HirVec<P<Pat>>, Option<usize>), - /// A `box` pattern + /// A `box` pattern. Box(P<Pat>), - /// A reference pattern, e.g., `&mut (a, b)` + /// A reference pattern (e.g., `&mut (a, b)`). Ref(P<Pat>, Mutability), - /// A literal + /// A literal. Lit(P<Expr>), - /// A range pattern, e.g., `1...2` or `1..2` + /// A range pattern (e.g., `1...2` or `1..2`). Range(P<Expr>, P<Expr>, RangeEnd), /// `[a, b, ..i, y, z]` is represented as: - /// `PatKind::Slice(box [a, b], Some(i), box [y, z])` + /// `PatKind::Slice(box [a, b], Some(i), box [y, z])`. Slice(HirVec<P<Pat>>, Option<P<Pat>>, HirVec<P<Pat>>), } @@ -967,7 +968,7 @@ pub enum Mutability { } impl Mutability { - /// Return MutMutable only if both arguments are mutable. + /// Returns `MutMutable` only if both arguments are mutable. pub fn and(self, other: Self) -> Self { match self { MutMutable => other, @@ -978,41 +979,41 @@ impl Mutability { #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy, Hash)] pub enum BinOpKind { - /// The `+` operator (addition) + /// The `+` operator (addition). Add, - /// The `-` operator (subtraction) + /// The `-` operator (subtraction). Sub, - /// The `*` operator (multiplication) + /// The `*` operator (multiplication). Mul, - /// The `/` operator (division) + /// The `/` operator (division). Div, - /// The `%` operator (modulus) + /// The `%` operator (modulus). Rem, - /// The `&&` operator (logical and) + /// The `&&` operator (logical and). And, - /// The `||` operator (logical or) + /// The `||` operator (logical or). Or, - /// The `^` operator (bitwise xor) + /// The `^` operator (bitwise xor). BitXor, - /// The `&` operator (bitwise and) + /// The `&` operator (bitwise and). BitAnd, - /// The `|` operator (bitwise or) + /// The `|` operator (bitwise or). BitOr, - /// The `<<` operator (shift left) + /// The `<<` operator (shift left). Shl, - /// The `>>` operator (shift right) + /// The `>>` operator (shift right). Shr, - /// The `==` operator (equality) + /// The `==` operator (equality). Eq, - /// The `<` operator (less than) + /// The `<` operator (less than). Lt, - /// The `<=` operator (less than or equal to) + /// The `<=` operator (less than or equal to). Le, - /// The `!=` operator (not equal to) + /// The `!=` operator (not equal to). Ne, - /// The `>=` operator (greater than or equal to) + /// The `>=` operator (greater than or equal to). Ge, - /// The `>` operator (greater than) + /// The `>` operator (greater than). Gt, } @@ -1077,7 +1078,7 @@ impl BinOpKind { } } - /// Returns `true` if the binary operator takes its arguments by value + /// Returns `true` if the binary operator takes its arguments by value. pub fn is_by_value(self) -> bool { !self.is_comparison() } @@ -1112,11 +1113,11 @@ pub type BinOp = Spanned<BinOpKind>; #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy, Hash)] pub enum UnOp { - /// The `*` operator for dereferencing + /// The `*` operator (deferencing). UnDeref, - /// The `!` operator for logical inversion + /// The `!` operator (logical negation). UnNot, - /// The `-` operator for negation + /// The `-` operator (negation). UnNeg, } @@ -1129,7 +1130,7 @@ impl UnOp { } } - /// Returns `true` if the unary operator takes its argument by value + /// Returns `true` if the unary operator takes its argument by value. pub fn is_by_value(self) -> bool { match self { UnNeg | UnNot => true, @@ -1138,7 +1139,7 @@ impl UnOp { } } -/// A statement +/// A statement. #[derive(Clone, RustcEncodable, RustcDecodable)] pub struct Stmt { pub id: NodeId, @@ -1156,15 +1157,15 @@ impl fmt::Debug for Stmt { #[derive(Clone, RustcEncodable, RustcDecodable)] pub enum StmtKind { - /// A local (let) binding: + /// A local (`let`) binding. Local(P<Local>), - /// An item binding: + /// An item binding. Item(P<ItemId>), - /// Expr without trailing semi-colon (must have unit type): + /// An expression without a trailing semi-colon (must have unit type). Expr(P<Expr>), - /// Expr with trailing semi-colon (may have any type): + /// An expression with a trailing semi-colon (may have any type). Semi(P<Expr>), } @@ -1179,12 +1180,12 @@ impl StmtKind { } } -/// Local represents a `let` statement, e.g., `let <pat>:<ty> = <expr>;` +/// Represents a `let` statement (i.e., `let <pat>:<ty> = <expr>;`). #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Local { pub pat: P<Pat>, pub ty: Option<P<Ty>>, - /// Initializer expression to set the value, if any + /// Initializer expression to set the value, if any. pub init: Option<P<Expr>>, pub id: NodeId, pub hir_id: HirId, @@ -1193,7 +1194,7 @@ pub struct Local { pub source: LocalSource, } -/// represents one arm of a 'match' +/// Represents a single arm of a `match` expression. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Arm { pub attrs: HirVec<Attribute>, @@ -1419,16 +1420,16 @@ impl fmt::Debug for Expr { pub enum ExprKind { /// A `box x` expression. Box(P<Expr>), - /// An array (`[a, b, c, d]`) + /// An array (e.g., `[a, b, c, d]`). Array(HirVec<Expr>), - /// A function call + /// A function call. /// /// The first field resolves to the function itself (usually an `ExprKind::Path`), /// and the second field is the list of arguments. /// This also represents calling the constructor of /// tuple-like ADTs such as tuple structs and enum variants. Call(P<Expr>, HirVec<Expr>), - /// A method call (`x.foo::<'static, Bar, Baz>(a, b, c, d)`) + /// A method call (e.g., `x.foo::<'static, Bar, Baz>(a, b, c, d)`). /// /// The `PathSegment`/`Span` represent the method name and its generic arguments /// (within the angle brackets). @@ -1438,63 +1439,64 @@ pub enum ExprKind { /// Thus, `x.foo::<Bar, Baz>(a, b, c, d)` is represented as /// `ExprKind::MethodCall(PathSegment { foo, [Bar, Baz] }, [x, a, b, c, d])`. MethodCall(PathSegment, Span, HirVec<Expr>), - /// A tuple (`(a, b, c ,d)`) + /// A tuple (e.g., `(a, b, c ,d)`). Tup(HirVec<Expr>), - /// A binary operation (For example: `a + b`, `a * b`) + /// A binary operation (e.g., `a + b`, `a * b`). Binary(BinOp, P<Expr>, P<Expr>), - /// A unary operation (For example: `!x`, `*x`) + /// A unary operation (e.g., `!x`, `*x`). Unary(UnOp, P<Expr>), - /// A literal (For example: `1`, `"foo"`) + /// A literal (e.g., `1`, `"foo"`). Lit(Lit), - /// A cast (`foo as f64`) + /// A cast (e.g., `foo as f64`). Cast(P<Expr>, P<Ty>), + /// A type reference (e.g., `Foo`). Type(P<Expr>, P<Ty>), - /// An `if` block, with an optional else block + /// An `if` block, with an optional else block. /// - /// `if expr { expr } else { expr }` + /// I.e., `if <expr> { <expr> } else { <expr> }`. If(P<Expr>, P<Expr>, Option<P<Expr>>), /// A while loop, with an optional label /// - /// `'label: while expr { block }` + /// I.e., `'label: while expr { <block> }`. While(P<Expr>, P<Block>, Option<Label>), - /// Conditionless loop (can be exited with break, continue, or return) + /// A conditionless loop (can be exited with `break`, `continue`, or `return`). /// - /// `'label: loop { block }` + /// I.e., `'label: loop { <block> }`. Loop(P<Block>, Option<Label>, LoopSource), /// A `match` block, with a source that indicates whether or not it is /// the result of a desugaring, and if so, which kind. Match(P<Expr>, HirVec<Arm>, MatchSource), - /// A closure (for example, `move |a, b, c| {a + b + c}`). + /// A closure (e.g., `move |a, b, c| {a + b + c}`). /// - /// The final span is the span of the argument block `|...|` + /// The final span is the span of the argument block `|...|`. /// /// This may also be a generator literal, indicated by the final boolean, - /// in that case there is an GeneratorClause. + /// in that case there is an `GeneratorClause`. Closure(CaptureClause, P<FnDecl>, BodyId, Span, Option<GeneratorMovability>), - /// A block (`'label: { ... }`) + /// A block (e.g., `'label: { ... }`). Block(P<Block>, Option<Label>), - /// An assignment (`a = foo()`) + /// An assignment (e.g., `a = foo()`). Assign(P<Expr>, P<Expr>), - /// An assignment with an operator + /// An assignment with an operator. /// - /// For example, `a += 1`. + /// E.g., `a += 1`. AssignOp(BinOp, P<Expr>, P<Expr>), - /// Access of a named (`obj.foo`) or unnamed (`obj.0`) struct or tuple field + /// Access of a named (e.g., `obj.foo`) or unnamed (e.g., `obj.0`) struct or tuple field. Field(P<Expr>, Ident), - /// An indexing operation (`foo[2]`) + /// An indexing operation (`foo[2]`). Index(P<Expr>, P<Expr>), /// Path to a definition, possibly containing lifetime or type parameters. Path(QPath), - /// A referencing operation (`&a` or `&mut a`) + /// A referencing operation (i.e., `&a` or `&mut a`). AddrOf(Mutability, P<Expr>), - /// A `break`, with an optional label to break + /// A `break`, with an optional label to break. Break(Destination, Option<P<Expr>>), - /// A `continue`, with an optional label + /// A `continue`, with an optional label. Continue(Destination), - /// A `return`, with an optional value to be returned + /// A `return`, with an optional value to be returned. Ret(Option<P<Expr>>), /// Inline assembly (from `asm!`), with its outputs and inputs. @@ -1512,10 +1514,10 @@ pub enum ExprKind { /// to be repeated; the second is the number of times to repeat it. Repeat(P<Expr>, AnonConst), - /// A suspension point for generators. This is `yield <expr>` in Rust. + /// A suspension point for generators (i.e., `yield <expr>`). Yield(P<Expr>), - /// Placeholder for an expression that wasn't syntactically well formed in some way. + /// A placeholder for an expression that wasn't syntactically well formed in some way. Err, } @@ -1525,12 +1527,12 @@ pub enum QPath { /// Path to a definition, optionally "fully-qualified" with a `Self` /// type, if the path points to an associated item in a trait. /// - /// e.g., an unqualified path like `Clone::clone` has `None` for `Self`, + /// E.g., an unqualified path like `Clone::clone` has `None` for `Self`, /// while `<Vec<T> as Clone>::clone` has `Some(Vec<T>)` for `Self`, /// even though they both have the same two-segment `Clone::clone` `Path`. Resolved(Option<P<Ty>>, P<Path>), - /// Type-related paths, e.g., `<T>::default` or `<T>::Output`. + /// Type-related paths (e.g., `<T>::default` or `<T>::Output`). /// Will be resolved by type-checking to an associated item. /// /// UFCS source paths can desugar into this, with `Vec::new` turning into @@ -1539,41 +1541,41 @@ pub enum QPath { TypeRelative(P<Ty>, P<PathSegment>) } -/// Hints at the original code for a let statement +/// Hints at the original code for a let statement. #[derive(Clone, RustcEncodable, RustcDecodable, Debug, Copy)] pub enum LocalSource { - /// A `match _ { .. }` + /// A `match _ { .. }`. Normal, - /// A desugared `for _ in _ { .. }` loop + /// A desugared `for _ in _ { .. }` loop. ForLoopDesugar, } -/// Hints at the original code for a `match _ { .. }` +/// Hints at the original code for a `match _ { .. }`. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum MatchSource { - /// A `match _ { .. }` + /// A `match _ { .. }`. Normal, - /// An `if let _ = _ { .. }` (optionally with `else { .. }`) + /// An `if let _ = _ { .. }` (optionally with `else { .. }`). IfLetDesugar { contains_else_clause: bool, }, /// A `while let _ = _ { .. }` (which was desugared to a - /// `loop { match _ { .. } }`) + /// `loop { match _ { .. } }`). WhileLetDesugar, - /// A desugared `for _ in _ { .. }` loop + /// A desugared `for _ in _ { .. }` loop. ForLoopDesugar, - /// A desugared `?` operator + /// A desugared `?` operator. TryDesugar, } -/// The loop type that yielded an ExprKind::Loop +/// The loop type that yielded an `ExprKind::Loop`. #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)] pub enum LoopSource { - /// A `loop { .. }` loop + /// A `loop { .. }` loop. Loop, - /// A `while let _ = _ { .. }` loop + /// A `while let _ = _ { .. }` loop. WhileLet, - /// A `for _ in _ { .. }` loop + /// A `for _ in _ { .. }` loop. ForLoop, } @@ -1739,7 +1741,7 @@ impl fmt::Debug for Ty { } } -/// Not represented directly in the AST, referred to by name through a ty_path. +/// Not represented directly in the AST; referred to by name through a `ty_path`. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum PrimTy { Int(IntTy), @@ -1766,38 +1768,38 @@ pub struct ExistTy { pub impl_trait_fn: Option<DefId>, } +/// The various kinds of types recognized by the compiler. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] -/// The different kinds of types recognized by the compiler pub enum TyKind { - /// A variable length slice (`[T]`) + /// A variable length slice (i.e., `[T]`). Slice(P<Ty>), - /// A fixed length array (`[T; n]`) + /// A fixed length array (i.e., `[T; n]`). Array(P<Ty>, AnonConst), - /// A raw pointer (`*const T` or `*mut T`) + /// A raw pointer (i.e., `*const T` or `*mut T`). Ptr(MutTy), - /// A reference (`&'a T` or `&'a mut T`) + /// A reference (i.e., `&'a T` or `&'a mut T`). Rptr(Lifetime, MutTy), - /// A bare function (e.g., `fn(usize) -> bool`) + /// A bare function (e.g., `fn(usize) -> bool`). BareFn(P<BareFnTy>), - /// The never type (`!`) + /// The never type (`!`). Never, - /// A tuple (`(A, B, C, D,...)`) + /// A tuple (`(A, B, C, D,...)`). Tup(HirVec<Ty>), /// A path to a type definition (`module::module::...::Type`), or an - /// associated type, e.g., `<Vec<T> as Trait>::Type` or `<T>::Target`. + /// associated type (e.g., `<Vec<T> as Trait>::Type` or `<T>::Target`). /// /// Type parameters may be stored in each `PathSegment`. Path(QPath), /// A type definition itself. This is currently only used for the `existential type` /// item that `impl Trait` in return position desugars to. /// - /// The generic arg list are the lifetimes (and in the future possibly parameters) that are - /// actually bound on the `impl Trait`. + /// The generic argument list contains the lifetimes (and in the future possibly parameters) + /// that are actually bound on the `impl Trait`. Def(ItemId, HirVec<GenericArg>), /// A trait object type `Bound1 + Bound2 + Bound3` /// where `Bound` is a trait or a lifetime. TraitObject(HirVec<PolyTraitRef>, Lifetime), - /// Unused for now + /// Unused for now. Typeof(AnonConst), /// `TyKind::Infer` means the type should be inferred instead of it having been /// specified. This can appear anywhere in a type. @@ -1827,7 +1829,7 @@ pub struct InlineAsm { pub ctxt: SyntaxContext, } -/// represents an argument in a function header +/// Represents an argument in a function header. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Arg { pub pat: P<Pat>, @@ -1835,7 +1837,7 @@ pub struct Arg { pub hir_id: HirId, } -/// Represents the header (not the body) of a function declaration +/// Represents the header (not the body) of a function declaration. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct FnDecl { pub inputs: HirVec<Ty>, @@ -1958,7 +1960,7 @@ pub enum FunctionRetTy { /// closures default to inference. Span points to where return /// type would be inserted. DefaultReturn(Span), - /// Everything else + /// Everything else. Return(P<Ty>), } @@ -2011,7 +2013,7 @@ pub struct VariantKind { pub ident: Ident, pub attrs: HirVec<Attribute>, pub data: VariantData, - /// Explicit discriminant, e.g., `Foo = 1` + /// Explicit discriminant (e.g., `Foo = 1`). pub disr_expr: Option<AnonConst>, } @@ -2047,7 +2049,7 @@ pub struct TraitRef { } impl TraitRef { - /// Get the `DefId` of the referenced trait. It _must_ actually be a trait or trait alias. + /// Gets the `DefId` of the referenced trait. It _must_ actually be a trait or trait alias. pub fn trait_def_id(&self) -> DefId { match self.path.def { Def::Trait(did) => did, @@ -2062,10 +2064,10 @@ impl TraitRef { #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct PolyTraitRef { - /// The `'a` in `<'a> Foo<&'a T>` + /// The `'a` in `<'a> Foo<&'a T>`. pub bound_generic_params: HirVec<GenericParam>, - /// The `Foo<&'a T>` in `<'a> Foo<&'a T>` + /// The `Foo<&'a T>` in `<'a> Foo<&'a T>`. pub trait_ref: TraitRef, pub span: Span, @@ -2223,7 +2225,7 @@ pub struct FnHeader { pub enum ItemKind { /// An `extern crate` item, with optional *original* crate name if the crate was renamed. /// - /// e.g., `extern crate foo` or `extern crate foo_bar as foo` + /// E.g., `extern crate foo` or `extern crate foo_bar as foo`. ExternCrate(Option<Name>), /// `use foo::bar::*;` or `use foo::bar::baz as quux;` @@ -2320,7 +2322,7 @@ impl ItemKind { /// contains the item's id, naturally, but also the item's name and /// some other high-level details (like whether it is an associated /// type or method, and whether it is public). This allows other -/// passes to find the impl they want without loading the id (which +/// passes to find the impl they want without loading the ID (which /// means fewer edges in the incremental compilation graph). #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct TraitItemRef { @@ -2332,10 +2334,10 @@ pub struct TraitItemRef { } /// A reference from an impl to one of its associated items. This -/// contains the item's id, naturally, but also the item's name and +/// contains the item's ID, naturally, but also the item's name and /// some other high-level details (like whether it is an associated /// type or method, and whether it is public). This allows other -/// passes to find the impl they want without loading the id (which +/// passes to find the impl they want without loading the ID (which /// means fewer edges in the incremental compilation graph). #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct ImplItemRef { @@ -2366,15 +2368,15 @@ pub struct ForeignItem { pub vis: Visibility, } -/// An item within an `extern` block +/// An item within an `extern` block. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum ForeignItemKind { - /// A foreign function + /// A foreign function. Fn(P<FnDecl>, HirVec<Ident>, Generics), /// A foreign static item (`static ext: u8`), with optional mutability - /// (the boolean is true when mutable) + /// (the boolean is true when mutable). Static(P<Ty>, bool), - /// A foreign type + /// A foreign type. Type, } @@ -2458,36 +2460,37 @@ pub struct CodegenFnAttrs { bitflags! { #[derive(RustcEncodable, RustcDecodable)] pub struct CodegenFnAttrFlags: u32 { - /// #[cold], a hint to LLVM that this function, when called, is never on - /// the hot path + /// `#[cold]`: a hint to LLVM that this function, when called, is never on + /// the hot path. const COLD = 1 << 0; - /// #[allocator], a hint to LLVM that the pointer returned from this - /// function is never null + /// `#[allocator]`: a hint to LLVM that the pointer returned from this + /// function is never null. const ALLOCATOR = 1 << 1; - /// #[unwind], an indicator that this function may unwind despite what - /// its ABI signature may otherwise imply + /// `#[unwind]`: an indicator that this function may unwind despite what + /// its ABI signature may otherwise imply. const UNWIND = 1 << 2; - /// #[rust_allocator_nounwind], an indicator that an imported FFI + /// `#[rust_allocator_nounwind]`, an indicator that an imported FFI /// function will never unwind. Probably obsolete by recent changes with /// #[unwind], but hasn't been removed/migrated yet const RUSTC_ALLOCATOR_NOUNWIND = 1 << 3; - /// #[naked], indicates to LLVM that no function prologue/epilogue - /// should be generated + /// `#[naked]`: an indicator to LLVM that no function prologue/epilogue + /// should be generated. const NAKED = 1 << 4; - /// #[no_mangle], the function's name should be the same as its symbol + /// `#[no_mangle]`: an indicator that the function's name should be the same + /// as its symbol. const NO_MANGLE = 1 << 5; - /// #[rustc_std_internal_symbol], and indicator that this symbol is a + /// `#[rustc_std_internal_symbol]`: an indicator that this symbol is a /// "weird symbol" for the standard library in that it has slightly /// different linkage, visibility, and reachability rules. const RUSTC_STD_INTERNAL_SYMBOL = 1 << 6; - /// #[no_debug], indicates that no debugging information should be - /// generated for this function by LLVM + /// `#[no_debug]`: an indicator that no debugging information should be + /// generated for this function by LLVM. const NO_DEBUG = 1 << 7; - /// #[thread_local], indicates a static is actually a thread local + /// `#[thread_local]`: indicates a static is actually a thread local /// piece of memory const THREAD_LOCAL = 1 << 8; - /// #[used], indicates that LLVM can't eliminate this function (but the - /// linker can!) + /// `#[used]`: indicates that LLVM can't eliminate this function (but the + /// linker can!). const USED = 1 << 9; } } @@ -2506,7 +2509,7 @@ impl CodegenFnAttrs { } } - /// True if `#[inline]` or `#[inline(always)]` is present. + /// Returns `true` if `#[inline]` or `#[inline(always)]` is present. pub fn requests_inline(&self) -> bool { match self.inline { InlineAttr::Hint | InlineAttr::Always => true, diff --git a/src/librustc/hir/pat_util.rs b/src/librustc/hir/pat_util.rs index c92cbc9b96c..e2df290a455 100644 --- a/src/librustc/hir/pat_util.rs +++ b/src/librustc/hir/pat_util.rs @@ -129,7 +129,7 @@ impl hir::Pat { } } - /// Return variants that are necessary to exist for the pattern to match. + /// Returns variants that are necessary to exist for the pattern to match. pub fn necessary_variants(&self) -> Vec<DefId> { let mut variants = vec![]; self.walk(|p| { @@ -154,11 +154,9 @@ impl hir::Pat { /// Checks if the pattern contains any `ref` or `ref mut` bindings, and if /// yes whether it contains mutable or just immutables ones. - /// - /// FIXME(tschottdorf): this is problematic as the HIR is being scraped, but - /// ref bindings are be implicit after #42640 (default match binding modes). - /// - /// See #44848. + // + // FIXME(tschottdorf): this is problematic as the HIR is being scraped, but + // ref bindings are be implicit after #42640 (default match binding modes). See issue #44848. pub fn contains_explicit_ref_binding(&self) -> Option<hir::Mutability> { let mut result = None; self.each_binding(|annotation, _, _, _| { diff --git a/src/librustc/infer/at.rs b/src/librustc/infer/at.rs index 7b2b1184a63..34cd3ae5427 100644 --- a/src/librustc/infer/at.rs +++ b/src/librustc/infer/at.rs @@ -1,6 +1,6 @@ -//! A nice interface for working with the infcx. The basic idea is to +//! A nice interface for working with the infcx. The basic idea is to //! do `infcx.at(cause, param_env)`, which sets the "cause" of the -//! operation as well as the surrounding parameter environment. Then +//! operation as well as the surrounding parameter environment. Then //! you can do something like `.sub(a, b)` or `.eq(a, b)` to create a //! subtype or equality relationship respectively. The first argument //! is always the "expected" output from the POV of diagnostics. @@ -78,7 +78,7 @@ impl<'a, 'gcx, 'tcx> At<'a, 'gcx, 'tcx> { } } - /// Make `a <: b` where `a` may or may not be expected + /// Makes `a <: b`, where `a` may or may not be expected. pub fn sub_exp<T>(self, a_is_expected: bool, a: T, @@ -89,7 +89,7 @@ impl<'a, 'gcx, 'tcx> At<'a, 'gcx, 'tcx> { self.trace_exp(a_is_expected, a, b).sub(&a, &b) } - /// Make `actual <: expected`. For example, if type-checking a + /// Makes `actual <: expected`. For example, if type-checking a /// call like `foo(x)`, where `foo: fn(i32)`, you might have /// `sup(i32, x)`, since the "expected" type is the type that /// appears in the signature. @@ -102,7 +102,7 @@ impl<'a, 'gcx, 'tcx> At<'a, 'gcx, 'tcx> { self.sub_exp(false, actual, expected) } - /// Make `expected <: actual` + /// Makes `expected <: actual`. pub fn sub<T>(self, expected: T, actual: T) @@ -112,7 +112,7 @@ impl<'a, 'gcx, 'tcx> At<'a, 'gcx, 'tcx> { self.sub_exp(true, expected, actual) } - /// Make `expected <: actual` + /// Makes `expected <: actual`. pub fn eq_exp<T>(self, a_is_expected: bool, a: T, @@ -123,7 +123,7 @@ impl<'a, 'gcx, 'tcx> At<'a, 'gcx, 'tcx> { self.trace_exp(a_is_expected, a, b).eq(&a, &b) } - /// Make `expected <: actual` + /// Makes `expected <: actual`. pub fn eq<T>(self, expected: T, actual: T) @@ -155,7 +155,7 @@ impl<'a, 'gcx, 'tcx> At<'a, 'gcx, 'tcx> { } } - /// Compute the least-upper-bound, or mutual supertype, of two + /// Computes the least-upper-bound, or mutual supertype, of two /// values. The order of the arguments doesn't matter, but since /// this can result in an error (e.g., if asked to compute LUB of /// u32 and i32), it is meaningful to call one of them the @@ -169,7 +169,7 @@ impl<'a, 'gcx, 'tcx> At<'a, 'gcx, 'tcx> { self.trace(expected, actual).lub(&expected, &actual) } - /// Compute the greatest-lower-bound, or mutual subtype, of two + /// Computes the greatest-lower-bound, or mutual subtype, of two /// values. As with `lub` order doesn't matter, except for error /// cases. pub fn glb<T>(self, @@ -210,9 +210,9 @@ impl<'a, 'gcx, 'tcx> At<'a, 'gcx, 'tcx> { } impl<'a, 'gcx, 'tcx> Trace<'a, 'gcx, 'tcx> { - /// Make `a <: b` where `a` may or may not be expected (if + /// Makes `a <: b` where `a` may or may not be expected (if /// `a_is_expected` is true, then `a` is expected). - /// Make `expected <: actual` + /// Makes `expected <: actual`. pub fn sub<T>(self, a: &T, b: &T) @@ -229,7 +229,7 @@ impl<'a, 'gcx, 'tcx> Trace<'a, 'gcx, 'tcx> { }) } - /// Make `a == b`; the expectation is set by the call to + /// Makes `a == b`; the expectation is set by the call to /// `trace()`. pub fn eq<T>(self, a: &T, diff --git a/src/librustc/infer/canonical/canonicalizer.rs b/src/librustc/infer/canonical/canonicalizer.rs index 4e1c797a2c7..d06334c3ba6 100644 --- a/src/librustc/infer/canonical/canonicalizer.rs +++ b/src/librustc/infer/canonical/canonicalizer.rs @@ -112,14 +112,14 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> { } /// A hacky variant of `canonicalize_query` that does not - /// canonicalize `'static`. Unfortunately, the existing leak + /// canonicalize `'static`. Unfortunately, the existing leak /// check treaks `'static` differently in some cases (see also /// #33684), so if we are performing an operation that may need to /// prove "leak-check" related things, we leave `'static` /// alone. - /// - /// FIXME(#48536) -- once we have universes, we can remove this and just use - /// `canonicalize_query`. + // + // FIXME(#48536): once we have universes, we can remove this and just use + // `canonicalize_query`. pub fn canonicalize_hr_query_hack<V>( &self, value: &V, @@ -595,7 +595,7 @@ impl<'cx, 'gcx, 'tcx> Canonicalizer<'cx, 'gcx, 'tcx> { .var_universe(vid) } - /// Create a canonical variable (with the given `info`) + /// Creates a canonical variable (with the given `info`) /// representing the region `r`; return a region referencing it. fn canonical_var_for_region( &mut self, diff --git a/src/librustc/infer/canonical/mod.rs b/src/librustc/infer/canonical/mod.rs index 6f28c0b131f..613e153ae33 100644 --- a/src/librustc/infer/canonical/mod.rs +++ b/src/librustc/infer/canonical/mod.rs @@ -289,7 +289,7 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> { /// /// This is only meant to be invoked as part of constructing an /// inference context at the start of a query (see - /// `InferCtxtBuilder::enter_with_canonical`). It basically + /// `InferCtxtBuilder::enter_with_canonical`). It basically /// brings the canonical value "into scope" within your new infcx. /// /// At the end of processing, the substitution S (once @@ -424,7 +424,7 @@ impl<'tcx> CanonicalVarValues<'tcx> { self.var_values.len() } - /// Make an identity substitution from this one: each bound var + /// Makes an identity substitution from this one: each bound var /// is matched to the same bound var, preserving the original kinds. /// For example, if we have: /// `self.var_values == [Type(u32), Lifetime('a), Type(u64)]` diff --git a/src/librustc/infer/canonical/query_response.rs b/src/librustc/infer/canonical/query_response.rs index 409afca4320..aef0152b6ed 100644 --- a/src/librustc/infer/canonical/query_response.rs +++ b/src/librustc/infer/canonical/query_response.rs @@ -119,7 +119,7 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> { /// If you DO want to keep track of pending obligations (which /// include all region obligations, so this includes all cases /// that care about regions) with this function, you have to - /// do it yourself, by e.g. having them be a part of the answer. + /// do it yourself, by e.g., having them be a part of the answer. pub fn make_query_response_ignoring_pending_obligations<T>( &self, inference_vars: CanonicalVarValues<'tcx>, @@ -267,7 +267,7 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> { /// they should be ignored). /// - It **can happen** (though it rarely does currently) that /// equating types and things will give rise to subobligations - /// that must be processed. In this case, those subobligations + /// that must be processed. In this case, those subobligations /// are propagated back in the return value. /// - Finally, the query result (of type `R`) is propagated back, /// after applying the substitution `S`. @@ -506,7 +506,7 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> { /// Given a "guess" at the values for the canonical variables in /// the input, try to unify with the *actual* values found in the - /// query result. Often, but not always, this is a no-op, because + /// query result. Often, but not always, this is a no-op, because /// we already found the mapping in the "guessing" step. /// /// See also: `query_response_substitution_guess` diff --git a/src/librustc/infer/combine.rs b/src/librustc/infer/combine.rs index 7e225214734..361fbfea097 100644 --- a/src/librustc/infer/combine.rs +++ b/src/librustc/infer/combine.rs @@ -165,8 +165,8 @@ impl<'infcx, 'gcx, 'tcx> CombineFields<'infcx, 'gcx, 'tcx> { Glb::new(self, a_is_expected) } - /// Here dir is either EqTo, SubtypeOf, or SupertypeOf. The - /// idea is that we should ensure that the type `a_ty` is equal + /// Here, `dir` is either `EqTo`, `SubtypeOf`, or `SupertypeOf`. + /// The idea is that we should ensure that the type `a_ty` is equal /// to, a subtype of, or a supertype of (respectively) the type /// to which `b_vid` is bound. /// @@ -280,7 +280,7 @@ impl<'infcx, 'gcx, 'tcx> CombineFields<'infcx, 'gcx, 'tcx> { struct Generalizer<'cx, 'gcx: 'cx+'tcx, 'tcx: 'cx> { infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>, - /// Span, used when creating new type variables and things. + /// The span, used when creating new type variables and things. span: Span, /// The vid of the type variable that is in the process of being @@ -310,7 +310,7 @@ struct Generalization<'tcx> { /// particular around 'bivariant' type parameters that are only /// constrained by a where-clause. As an example, imagine a type: /// - /// struct Foo<A, B> where A: Iterator<Item=B> { + /// struct Foo<A, B> where A: Iterator<Item = B> { /// data: A /// } /// @@ -323,7 +323,7 @@ struct Generalization<'tcx> { /// <: ?C`, but no particular relationship between `?B` and `?D` /// (after all, we do not know the variance of the normalized form /// of `A::Item` with respect to `A`). If we do nothing else, this - /// may mean that `?D` goes unconstrained (as in #41677). So, in + /// may mean that `?D` goes unconstrained (as in #41677). So, in /// this scenario where we create a new type variable in a /// bivariant context, we set the `needs_wf` flag to true. This /// will force the calling code to check that `WF(Foo<?C, ?D>)` diff --git a/src/librustc/infer/error_reporting/mod.rs b/src/librustc/infer/error_reporting/mod.rs index 1c23438a3b4..89237b34c7f 100644 --- a/src/librustc/infer/error_reporting/mod.rs +++ b/src/librustc/infer/error_reporting/mod.rs @@ -659,7 +659,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { None } - /// Add a `,` to the type representation only if it is appropriate. + /// Adds a `,` to the type representation only if it is appropriate. fn push_comma( &self, value: &mut DiagnosticStyledString, @@ -715,7 +715,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { substs.truncate_to(self.tcx, &generics) } - /// Compare two given types, eliding parts that are the same between them and highlighting + /// Compares two given types, eliding parts that are the same between them and highlighting /// relevant differences, and return two representation of those types for highlighted printing. fn cmp(&self, t1: Ty<'tcx>, t2: Ty<'tcx>) -> (DiagnosticStyledString, DiagnosticStyledString) { fn equals<'tcx>(a: &Ty<'tcx>, b: &Ty<'tcx>) -> bool { diff --git a/src/librustc/infer/error_reporting/nice_region_error/different_lifetimes.rs b/src/librustc/infer/error_reporting/nice_region_error/different_lifetimes.rs index 0f440151779..5d5a9b36087 100644 --- a/src/librustc/infer/error_reporting/nice_region_error/different_lifetimes.rs +++ b/src/librustc/infer/error_reporting/nice_region_error/different_lifetimes.rs @@ -39,7 +39,7 @@ impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> { /// x.push(y); /// ^ ...but data from `y` flows into `x` here /// } - /// ```` + /// ``` /// /// It will later be extended to trait objects. pub(super) fn try_report_anon_anon_conflict(&self) -> Option<ErrorReported> { diff --git a/src/librustc/infer/fudge.rs b/src/librustc/infer/fudge.rs index d205cfcf73b..5f6a8802b4d 100644 --- a/src/librustc/infer/fudge.rs +++ b/src/librustc/infer/fudge.rs @@ -22,13 +22,13 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { /// closure `f`. In our example above, what this closure will do /// is to unify the expectation (`Option<&[u32]>`) with the actual /// return type (`Option<?T>`, where `?T` represents the variable - /// instantiated for `T`). This will cause `?T` to be unified + /// instantiated for `T`). This will cause `?T` to be unified /// with `&?a [u32]`, where `?a` is a fresh lifetime variable. The /// input type (`?T`) is then returned by `f()`. /// /// At this point, `fudge_regions_if_ok` will normalize all type /// variables, converting `?T` to `&?a [u32]` and end the - /// snapshot. The problem is that we can't just return this type + /// snapshot. The problem is that we can't just return this type /// out, because it references the region variable `?a`, and that /// region variable was popped when we popped the snapshot. /// diff --git a/src/librustc/infer/higher_ranked/mod.rs b/src/librustc/infer/higher_ranked/mod.rs index c7fc446b978..7f01078737d 100644 --- a/src/librustc/infer/higher_ranked/mod.rs +++ b/src/librustc/infer/higher_ranked/mod.rs @@ -54,7 +54,7 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> { } impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { - /// Replace all regions (resp. types) bound by `binder` with placeholder + /// Replaces all regions (resp. types) bound by `binder` with placeholder /// regions (resp. types) and return a map indicating which bound-region /// placeholder region. This is the first step of checking subtyping /// when higher-ranked things are involved. diff --git a/src/librustc/infer/lattice.rs b/src/librustc/infer/lattice.rs index dfa086a64de..e40bb972407 100644 --- a/src/librustc/infer/lattice.rs +++ b/src/librustc/infer/lattice.rs @@ -1,7 +1,7 @@ //! # Lattice Variables //! //! This file contains generic code for operating on inference variables -//! that are characterized by an upper- and lower-bound. The logic and +//! that are characterized by an upper- and lower-bound. The logic and //! reasoning is explained in detail in the large comment in `infer.rs`. //! //! The code in here is defined quite generically so that it can be @@ -13,7 +13,7 @@ //! //! Although all the functions are generic, we generally write the //! comments in a way that is specific to type variables and the LUB -//! operation. It's just easier that way. +//! operation. It's just easier that way. //! //! In general all of the functions are defined parametrically //! over a `LatticeValue`, which is a value defined with respect to diff --git a/src/librustc/infer/lexical_region_resolve/mod.rs b/src/librustc/infer/lexical_region_resolve/mod.rs index 7add8a26ede..03ade882536 100644 --- a/src/librustc/infer/lexical_region_resolve/mod.rs +++ b/src/librustc/infer/lexical_region_resolve/mod.rs @@ -1,4 +1,4 @@ -//! The code to do lexical region resolution. +//! Lexical region resolution. use crate::infer::region_constraints::Constraint; use crate::infer::region_constraints::GenericKind; @@ -492,20 +492,20 @@ impl<'cx, 'gcx, 'tcx> LexicalResolver<'cx, 'gcx, 'tcx> { match *value { VarValue::Value(_) => { /* Inference successful */ } VarValue::ErrorValue => { - /* Inference impossible, this value contains + /* Inference impossible: this value contains inconsistent constraints. I think that in this case we should report an - error now---unlike the case above, we can't + error now -- unlike the case above, we can't wait to see whether the user needs the result - of this variable. The reason is that the mere + of this variable. The reason is that the mere existence of this variable implies that the region graph is inconsistent, whether or not it is used. For example, we may have created a region variable that is the GLB of two other regions - which do not have a GLB. Even if that variable + which do not have a GLB. Even if that variable is not used, it implies that those two regions *should* have a GLB. diff --git a/src/librustc/infer/mod.rs b/src/librustc/infer/mod.rs index 06c94d13334..334eccb9564 100644 --- a/src/librustc/infer/mod.rs +++ b/src/librustc/infer/mod.rs @@ -221,7 +221,7 @@ pub struct InferCtxt<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { /// replaced with. pub type PlaceholderMap<'tcx> = BTreeMap<ty::BoundRegion, ty::Region<'tcx>>; -/// See `error_reporting` module for more details +/// See the `error_reporting` module for more details. #[derive(Clone, Debug, PartialEq, Eq)] pub enum ValuePairs<'tcx> { Types(ExpectedFound<Ty<'tcx>>), @@ -233,7 +233,7 @@ pub enum ValuePairs<'tcx> { /// The trace designates the path through inference that we took to /// encounter an error or subtyping constraint. /// -/// See `error_reporting` module for more details. +/// See the `error_reporting` module for more details. #[derive(Clone)] pub struct TypeTrace<'tcx> { cause: ObligationCause<'tcx>, @@ -454,9 +454,9 @@ impl fmt::Display for FixupError { } } -/// Helper type of a temporary returned by tcx.infer_ctxt(). +/// Helper type of a temporary returned by `tcx.infer_ctxt()`. /// Necessary because we can't write the following bound: -/// F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(InferCtxt<'b, 'gcx, 'tcx>). +/// `F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(InferCtxt<'b, 'gcx, 'tcx>)`. pub struct InferCtxtBuilder<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { global_tcx: TyCtxt<'a, 'gcx, 'gcx>, arena: SyncDroplessArena, @@ -487,7 +487,7 @@ impl<'a, 'gcx, 'tcx> InferCtxtBuilder<'a, 'gcx, 'tcx> { /// inference context that contains each of the bound values /// within instantiated as a fresh variable. The `f` closure is /// invoked with the new infcx, along with the instantiated value - /// `V` and a substitution `S`. This substitution `S` maps from + /// `V` and a substitution `S`. This substitution `S` maps from /// the bound values in `C` to their instantiated values in `V` /// (in other words, `S(C) = V`). pub fn enter_with_canonical<T, R>( @@ -563,7 +563,7 @@ impl<'tcx, T> InferOk<'tcx, T> { } } - /// Extract `value`, registering any obligations into `fulfill_cx` + /// Extracts `value`, registering any obligations into `fulfill_cx`. pub fn into_value_registering_obligations( self, infcx: &InferCtxt<'_, '_, 'tcx>, @@ -794,7 +794,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { .commit(region_constraints_snapshot); } - /// Execute `f` and commit the bindings + /// Executes `f` and commit the bindings. pub fn commit_unconditionally<R, F>(&self, f: F) -> R where F: FnOnce() -> R, @@ -806,7 +806,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { r } - /// Execute `f` and commit the bindings if closure `f` returns `Ok(_)` + /// Executes `f` and commit the bindings if closure `f` returns `Ok(_)`. pub fn commit_if_ok<T, E, F>(&self, f: F) -> Result<T, E> where F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> Result<T, E>, @@ -838,7 +838,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { r } - /// Execute `f` then unroll any bindings it creates + /// Executes `f` then unroll any bindings it creates. pub fn probe<R, F>(&self, f: F) -> R where F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> R, @@ -996,14 +996,14 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { self.float_unification_table.borrow_mut().new_key(None) } - /// Create a fresh region variable with the next available index. + /// Creates a fresh region variable with the next available index. /// The variable will be created in the maximum universe created /// thus far, allowing it to name any region created thus far. pub fn next_region_var(&self, origin: RegionVariableOrigin) -> ty::Region<'tcx> { self.next_region_var_in_universe(origin, self.universe()) } - /// Create a fresh region variable with the next available index + /// Creates a fresh region variable with the next available index /// in the given universe; typically, you can use /// `next_region_var` and just use the maximal universe. pub fn next_region_var_in_universe( @@ -1069,8 +1069,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { Substs::for_item(self.tcx, def_id, |param, _| self.var_for_def(span, param)) } - /// True if errors have been reported since this infcx was - /// created. This is sometimes used as a heuristic to skip + /// Returns `true` if errors have been reported since this infcx was + /// created. This is sometimes used as a heuristic to skip /// reporting errors that often occur as a result of earlier /// errors, but where it's hard to be 100% sure (e.g., unresolved /// inference variables, regionck errors). @@ -1278,7 +1278,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { value.fold_with(&mut r) } - /// Returns true if `T` contains unresolved type variables. In the + /// Returns `true` if `T` contains unresolved type variables. In the /// process of visiting `T`, this will resolve (where possible) /// type variables in `T`, but it never constructs the final, /// resolved type, so it's more efficient than @@ -1369,7 +1369,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { self.tcx.replace_bound_vars(value, fld_r, fld_t) } - /// See `verify_generic_bound` method in `region_constraints` + /// See the [`region_constraints::verify_generic_bound`] method. pub fn verify_generic_bound( &self, origin: SubregionOrigin<'tcx>, @@ -1421,7 +1421,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { closure_kind_ty.to_opt_closure_kind() } - /// Obtain the signature of a closure. For closures, unlike + /// Obtain the signature of a closure. For closures, unlike /// `tcx.fn_sig(def_id)`, this method will work during the /// type-checking of the enclosing function and return the closure /// signature in its partially inferred state. @@ -1466,8 +1466,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { } /// Clears the selection, evaluation, and projection caches. This is useful when - /// repeatedly attempting to select an Obligation while changing only - /// its ParamEnv, since FulfillmentContext doesn't use 'probe' + /// repeatedly attempting to select an `Obligation` while changing only + /// its `ParamEnv`, since `FulfillmentContext` doesn't use probing. pub fn clear_caches(&self) { self.selection_cache.clear(); self.evaluation_cache.clear(); @@ -1478,7 +1478,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { self.universe.get() } - /// Create and return a fresh universe that extends all previous + /// Creates and return a fresh universe that extends all previous /// universes. Updates `self.universe` to that new universe. pub fn create_next_universe(&self) -> ty::UniverseIndex { let u = self.universe.get().next_universe(); diff --git a/src/librustc/infer/nll_relate/mod.rs b/src/librustc/infer/nll_relate/mod.rs index 7671a471357..f37e24b292e 100644 --- a/src/librustc/infer/nll_relate/mod.rs +++ b/src/librustc/infer/nll_relate/mod.rs @@ -47,17 +47,17 @@ where /// How are we relating `a` and `b`? /// - /// - covariant means `a <: b` - /// - contravariant means `b <: a` - /// - invariant means `a == b - /// - bivariant means that it doesn't matter + /// - Covariant means `a <: b`. + /// - Contravariant means `b <: a`. + /// - Invariant means `a == b. + /// - Bivariant means that it doesn't matter. ambient_variance: ty::Variance, /// When we pass through a set of binders (e.g., when looking into - /// a `fn` type), we push a new bound region scope onto here. This + /// a `fn` type), we push a new bound region scope onto here. This /// will contain the instantiated region for each region in those /// binders. When we then encounter a `ReLateBound(d, br)`, we can - /// use the debruijn index `d` to find the right scope, and then + /// use the De Bruijn index `d` to find the right scope, and then /// bound region name `br` to find the specific instantiation from /// within that scope. See `replace_bound_region`. /// @@ -114,7 +114,7 @@ pub trait TypeRelatingDelegate<'tcx> { /// Define the normalization strategy to use, eager or lazy. fn normalization() -> NormalizationStrategy; - /// Enable some optimizations if we do not expect inference variables + /// Enables some optimizations if we do not expect inference variables /// in the RHS of the relation. fn forbid_inference_vars() -> bool; } @@ -208,7 +208,7 @@ where /// When we encounter binders during the type traversal, we record /// the value to substitute for each of the things contained in /// that binder. (This will be either a universal placeholder or - /// an existential inference variable.) Given the debruijn index + /// an existential inference variable.) Given the De Bruijn index /// `debruijn` (and name `br`) of some binder we have now /// encountered, this routine finds the value that we instantiated /// the region with; to do so, it indexes backwards into the list diff --git a/src/librustc/infer/opaque_types/mod.rs b/src/librustc/infer/opaque_types/mod.rs index e28157f05f1..0e2c49a00da 100644 --- a/src/librustc/infer/opaque_types/mod.rs +++ b/src/librustc/infer/opaque_types/mod.rs @@ -46,7 +46,7 @@ pub struct OpaqueTypeDecl<'tcx> { /// lifetime parameter on `foo`.) pub concrete_ty: Ty<'tcx>, - /// True if the `impl Trait` bounds include region bounds. + /// Returns `true` if the `impl Trait` bounds include region bounds. /// For example, this would be true for: /// /// fn foo<'a, 'b, 'c>() -> impl Trait<'c> + 'a + 'b @@ -71,7 +71,7 @@ pub struct OpaqueTypeDecl<'tcx> { } impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { - /// Replace all opaque types in `value` with fresh inference variables + /// Replaces all opaque types in `value` with fresh inference variables /// and creates appropriate obligations. For example, given the input: /// /// impl Iterator<Item = impl Debug> @@ -88,7 +88,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { /// /// # Parameters /// - /// - `parent_def_id` -- the def-id of the function in which the opaque type + /// - `parent_def_id` -- the `DefId` of the function in which the opaque type /// is defined /// - `body_id` -- the body-id with which the resulting obligations should /// be associated @@ -132,7 +132,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { /// /// # The Problem /// - /// Let's work through an example to explain how it works. Assume + /// Let's work through an example to explain how it works. Assume /// the current function is as follows: /// /// ```text @@ -164,7 +164,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { /// replace each of the references (`Foo1<'a>`, `Foo2<'b>`) with /// fresh inference variables C1 and C2. We wish to use the values /// of these variables to infer the underlying types of `Foo1` and - /// `Foo2`. That is, this gives rise to higher-order (pattern) unification + /// `Foo2`. That is, this gives rise to higher-order (pattern) unification /// constraints like: /// /// ```text @@ -199,7 +199,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { /// /// Ordinarily, the subtyping rules would ensure that these are /// sufficiently large. But since `impl Bar<'a>` isn't a specific - /// type per se, we don't get such constraints by default. This + /// type per se, we don't get such constraints by default. This /// is where this function comes into play. It adds extra /// constraints to ensure that all the regions which appear in the /// inferred type are regions that could validly appear. @@ -813,7 +813,7 @@ impl<'a, 'gcx, 'tcx> Instantiator<'a, 'gcx, 'tcx> { } } -/// Whether `opaque_node_id` is a sibling or a child of a sibling of `def_id` +/// Returns `true` if `opaque_node_id` is a sibling or a child of a sibling of `def_id`. /// /// ```rust /// pub mod foo { @@ -827,11 +827,10 @@ impl<'a, 'gcx, 'tcx> Instantiator<'a, 'gcx, 'tcx> { /// } /// ``` /// -/// Here, `def_id` will be the `DefId` of the existential type `Baz`. -/// `opaque_node_id` is the `NodeId` of the reference to Baz -- -/// so either the return type of f1 or f2. -/// We will return true if the reference is within the same module as the existential type -/// So true for f1, false for f2. +/// Here, `def_id` is the `DefId` of the existential type `Baz` and `opaque_node_id` is the +/// `NodeId` of the reference to `Baz` (i.e., the return type of both `f1` and `f2`). +/// We return `true` if the reference is within the same module as the existential type +/// (i.e., `true` for `f1`, `false` for `f2`). pub fn may_define_existential_type( tcx: TyCtxt<'_, '_, '_>, def_id: DefId, diff --git a/src/librustc/infer/outlives/env.rs b/src/librustc/infer/outlives/env.rs index 20d03f3c6ed..43afb60ee17 100644 --- a/src/librustc/infer/outlives/env.rs +++ b/src/librustc/infer/outlives/env.rs @@ -63,7 +63,7 @@ pub struct OutlivesEnvironment<'tcx> { } /// "Region-bound pairs" tracks outlives relations that are known to -/// be true, either because of explicit where clauses like `T: 'a` or +/// be true, either because of explicit where-clauses like `T: 'a` or /// because of implied bounds. pub type RegionBoundPairs<'tcx> = Vec<(ty::Region<'tcx>, GenericKind<'tcx>)>; diff --git a/src/librustc/infer/outlives/free_region_map.rs b/src/librustc/infer/outlives/free_region_map.rs index 7daf6d71980..78353e52ad4 100644 --- a/src/librustc/infer/outlives/free_region_map.rs +++ b/src/librustc/infer/outlives/free_region_map.rs @@ -24,7 +24,7 @@ impl<'tcx> FreeRegionMap<'tcx> { } } - /// Compute the least-upper-bound of two free regions. In some + /// Computes the least-upper-bound of two free regions. In some /// cases, this is more conservative than necessary, in order to /// avoid making arbitrary choices. See /// `TransitiveRelation::postdom_upper_bound` for more details. diff --git a/src/librustc/infer/outlives/obligations.rs b/src/librustc/infer/outlives/obligations.rs index 884bd58b402..c40fbfb25e4 100644 --- a/src/librustc/infer/outlives/obligations.rs +++ b/src/librustc/infer/outlives/obligations.rs @@ -55,7 +55,7 @@ //! fn foo<U, F: for<'a> FnMut(&'a U)>(_f: F) {} //! ``` //! -//! the type of the closure's first argument would be `&'a ?U`. We +//! the type of the closure's first argument would be `&'a ?U`. We //! might later infer `?U` to something like `&'b u32`, which would //! imply that `'b: 'a`. diff --git a/src/librustc/infer/outlives/verify.rs b/src/librustc/infer/outlives/verify.rs index 0457e717946..494f708c6a7 100644 --- a/src/librustc/infer/outlives/verify.rs +++ b/src/librustc/infer/outlives/verify.rs @@ -74,7 +74,7 @@ impl<'cx, 'gcx, 'tcx> VerifyBoundCx<'cx, 'gcx, 'tcx> { /// This is an "approximate" check -- it may not find all /// applicable bounds, and not all the bounds it returns can be /// relied upon. In particular, this check ignores region - /// identity. So, for example, if we have `<T as + /// identity. So, for example, if we have `<T as /// Trait<'0>>::Item` where `'0` is a region variable, and the /// user has `<T as Trait<'a>>::Item: 'b` in the environment, then /// the clause from the environment only applies if `'0 = 'a`, @@ -96,7 +96,7 @@ impl<'cx, 'gcx, 'tcx> VerifyBoundCx<'cx, 'gcx, 'tcx> { }) } - /// Searches the where clauses in scope for regions that + /// Searches the where-clauses in scope for regions that /// `projection_ty` is known to outlive. Currently requires an /// exact match. pub fn projection_declared_bounds_from_trait( @@ -251,7 +251,7 @@ impl<'cx, 'gcx, 'tcx> VerifyBoundCx<'cx, 'gcx, 'tcx> { .map(move |r| r.subst(tcx, projection_ty.substs)) } - /// Given the def-id of an associated item, returns any region + /// Given the `DefId` of an associated item, returns any region /// bounds attached to that associated item from the trait definition. /// /// For example: @@ -262,7 +262,7 @@ impl<'cx, 'gcx, 'tcx> VerifyBoundCx<'cx, 'gcx, 'tcx> { /// } /// ``` /// - /// If we were given the def-id of `Foo::Bar`, we would return + /// If we were given the `DefId` of `Foo::Bar`, we would return /// `'a`. You could then apply the substitutions from the /// projection to convert this into your namespace. This also /// works if the user writes `where <Self as Foo<'a>>::Bar: 'a` on diff --git a/src/librustc/infer/region_constraints/mod.rs b/src/librustc/infer/region_constraints/mod.rs index 500497dc011..65d25333c71 100644 --- a/src/librustc/infer/region_constraints/mod.rs +++ b/src/librustc/infer/region_constraints/mod.rs @@ -1,4 +1,4 @@ -//! See README.md +//! See `README.md`. use self::CombineMapType::*; use self::UndoLog::*; @@ -108,16 +108,16 @@ pub struct RegionConstraintData<'tcx> { pub givens: FxHashSet<(Region<'tcx>, ty::RegionVid)>, } -/// A constraint that influences the inference process. +/// Represents a constraint that influences the inference process. #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, PartialOrd, Ord)] pub enum Constraint<'tcx> { - /// One region variable is subregion of another + /// A region variable is a subregion of another. VarSubVar(RegionVid, RegionVid), - /// Concrete region is subregion of region variable + /// A concrete region is a subregion of region variable. RegSubVar(Region<'tcx>, RegionVid), - /// Region variable is subregion of concrete region. This does not + /// A region variable is a subregion of a concrete region. This does not /// directly affect inference, but instead is checked after /// inference is complete. VarSubReg(RegionVid, Region<'tcx>), @@ -138,9 +138,9 @@ impl Constraint<'_> { } } -/// VerifyGenericBound(T, _, R, RS): The parameter type `T` (or +/// `VerifyGenericBound(T, _, R, RS)`: the parameter type `T` (or /// associated type) must outlive the region `R`. `T` is known to -/// outlive `RS`. Therefore verify that `R <= RS[i]` for some +/// outlive `RS`. Therefore, verify that `R <= RS[i]` for some /// `i`. Inference variables may be involved (but this verification /// step doesn't influence inference). #[derive(Debug, Clone)] @@ -164,7 +164,7 @@ EnumTypeFoldableImpl! { } } -/// Describes the things that some `GenericKind` value G is known to +/// Describes the things that some `GenericKind` value `G` is known to /// outlive. Each variant of `VerifyBound` can be thought of as a /// function: /// @@ -187,13 +187,15 @@ pub enum VerifyBound<'tcx> { /// following, where `G` is the generic for which this verify /// bound was created: /// - /// fn(min) -> bool { - /// if G == K { + /// ```rust + /// fn(min) -> bool { + /// if G == K { /// B(min) - /// } else { + /// } else { /// false - /// } /// } + /// } + /// ``` /// /// In other words, if the generic `G` that we are checking is /// equal to `K`, then check the associated verify bound @@ -202,14 +204,16 @@ pub enum VerifyBound<'tcx> { /// This is used when we have something in the environment that /// may or may not be relevant, depending on the region inference /// results. For example, we may have `where <T as - /// Trait<'a>>::Item: 'b` in our where clauses. If we are + /// Trait<'a>>::Item: 'b` in our where-clauses. If we are /// generating the verify-bound for `<T as Trait<'0>>::Item`, then /// this where-clause is only relevant if `'0` winds up inferred /// to `'a`. /// /// So we would compile to a verify-bound like /// - /// IfEq(<T as Trait<'a>>::Item, AnyRegion('a)) + /// ``` + /// IfEq(<T as Trait<'a>>::Item, AnyRegion('a)) + /// ``` /// /// meaning, if the subject G is equal to `<T as Trait<'a>>::Item` /// (after inference), and `'a: min`, then `G: min`. @@ -217,9 +221,11 @@ pub enum VerifyBound<'tcx> { /// Given a region `R`, expands to the function: /// - /// fn(min) -> bool { - /// R: min - /// } + /// ``` + /// fn(min) -> bool { + /// R: min + /// } + /// ``` /// /// This is used when we can establish that `G: R` -- therefore, /// if `R: min`, then by transitivity `G: min`. @@ -227,20 +233,23 @@ pub enum VerifyBound<'tcx> { /// Given a set of bounds `B`, expands to the function: /// - /// fn(min) -> bool { - /// exists (b in B) { b(min) } - /// } + /// ```rust + /// fn(min) -> bool { + /// exists (b in B) { b(min) } + /// } + /// ``` /// /// In other words, if we meet some bound in `B`, that suffices. - /// This is used when all the bounds in `B` are known to apply to - /// G. + /// This is used when all the bounds in `B` are known to apply to `G`. AnyBound(Vec<VerifyBound<'tcx>>), /// Given a set of bounds `B`, expands to the function: /// - /// fn(min) -> bool { - /// forall (b in B) { b(min) } - /// } + /// ```rust + /// fn(min) -> bool { + /// forall (b in B) { b(min) } + /// } + /// ``` /// /// In other words, if we meet *all* bounds in `B`, that suffices. /// This is used when *some* bound in `B` is known to suffice, but @@ -256,19 +265,19 @@ struct TwoRegions<'tcx> { #[derive(Copy, Clone, PartialEq)] enum UndoLog<'tcx> { - /// We added `RegionVid` + /// We added `RegionVid`. AddVar(RegionVid), - /// We added the given `constraint` + /// We added the given `constraint`. AddConstraint(Constraint<'tcx>), - /// We added the given `verify` + /// We added the given `verify`. AddVerify(usize), - /// We added the given `given` + /// We added the given `given`. AddGiven(Region<'tcx>, ty::RegionVid), - /// We added a GLB/LUB "combination variable" + /// We added a GLB/LUB "combination variable". AddCombination(CombineMapType, TwoRegions<'tcx>), /// During skolemization, we sometimes purge entries from the undo @@ -303,7 +312,7 @@ pub struct RegionSnapshot { /// When working with placeholder regions, we often wish to find all of /// the regions that are either reachable from a placeholder region, or /// which can reach a placeholder region, or both. We call such regions -/// *tainted* regions. This struct allows you to decide what set of +/// *tainted* regions. This struct allows you to decide what set of /// tainted regions you want. #[derive(Debug)] pub struct TaintDirections { @@ -359,7 +368,7 @@ impl<'tcx> RegionConstraintCollector<'tcx> { /// Takes (and clears) the current set of constraints. Note that /// the set of variables remains intact, but all relationships - /// between them are reset. This is used during NLL checking to + /// between them are reset. This is used during NLL checking to /// grab the set of constraints that arose from a particular /// operation. /// @@ -707,7 +716,7 @@ impl<'tcx> RegionConstraintCollector<'tcx> { } } - /// See `Verify::VerifyGenericBound` + /// See [`Verify::VerifyGenericBound`]. pub fn verify_generic_bound( &mut self, origin: SubregionOrigin<'tcx>, @@ -837,7 +846,7 @@ impl<'tcx> RegionConstraintCollector<'tcx> { }).collect() } - /// See [`RegionInference::region_constraints_added_in_snapshot`] + /// See [`RegionInference::region_constraints_added_in_snapshot`]. pub fn region_constraints_added_in_snapshot(&self, mark: &RegionSnapshot) -> Option<bool> { self.undo_log[mark.length..] .iter() @@ -925,7 +934,8 @@ impl<'a, 'gcx, 'tcx> VerifyBound<'tcx> { } impl<'tcx> RegionConstraintData<'tcx> { - /// True if this region constraint data contains no constraints. + /// Returns `true` if this region constraint data contains no constraints, and `false` + /// otherwise. pub fn is_empty(&self) -> bool { let RegionConstraintData { constraints, diff --git a/src/librustc/infer/type_variable.rs b/src/librustc/infer/type_variable.rs index 14f3261bfc2..09a0a6ce9c9 100644 --- a/src/librustc/infer/type_variable.rs +++ b/src/librustc/infer/type_variable.rs @@ -218,7 +218,7 @@ impl<'tcx> TypeVariableTable<'tcx> { self.sub_relations.find(vid) } - /// True if `a` and `b` have same "sub-root" (i.e., exists some + /// Returns `true` if `a` and `b` have same "sub-root" (i.e., exists some /// type X such that `forall i in {a, b}. (i <: X || X <: i)`. pub fn sub_unified(&mut self, a: ty::TyVid, b: ty::TyVid) -> bool { self.sub_root_var(a) == self.sub_root_var(b) @@ -245,9 +245,9 @@ impl<'tcx> TypeVariableTable<'tcx> { } } - /// Creates a snapshot of the type variable state. This snapshot + /// Creates a snapshot of the type variable state. This snapshot /// must later be committed (`commit()`) or rolled back - /// (`rollback_to()`). Nested snapshots are permitted, but must + /// (`rollback_to()`). Nested snapshots are permitted, but must /// be processed in a stack-like fashion. pub fn snapshot(&mut self) -> Snapshot<'tcx> { Snapshot { @@ -306,7 +306,7 @@ impl<'tcx> TypeVariableTable<'tcx> { .collect() } - /// Find the set of type variables that existed *before* `s` + /// Finds the set of type variables that existed *before* `s` /// but which have only been unified since `s` started, and /// return the types with which they were unified. So if we had /// a type variable `V0`, then we started the snapshot, then we diff --git a/src/librustc/lint/context.rs b/src/librustc/lint/context.rs index 27ead805d5d..b90ef4ea221 100644 --- a/src/librustc/lint/context.rs +++ b/src/librustc/lint/context.rs @@ -3,7 +3,7 @@ //! The lint checking is mostly consolidated into one pass which runs //! after all other analyses. Throughout compilation, lint warnings //! can be added via the `add_lint` method on the Session structure. This -//! requires a span and an id of the node that the lint is being added to. The +//! requires a span and an ID of the node that the lint is being added to. The //! lint isn't actually emitted at that time because it is unknown what the //! actual lint level at that location is. //! @@ -11,7 +11,7 @@ //! A context keeps track of the current state of all lint levels. //! Upon entering a node of the ast which can modify the lint settings, the //! previous lint state is pushed onto a stack and the ast is then recursed -//! upon. As the ast is traversed, this keeps track of the current lint level +//! upon. As the ast is traversed, this keeps track of the current lint level //! for all lint attributes. use self::TargetLint::*; @@ -703,7 +703,7 @@ impl<'a, T: EarlyLintPass> EarlyContextAndPass<'a, T> { impl<'a, 'tcx> LintContext<'tcx> for LateContext<'a, 'tcx> { type PassObject = LateLintPassObject; - /// Get the overall compiler `Session` object. + /// Gets the overall compiler `Session` object. fn sess(&self) -> &Session { &self.tcx.sess } @@ -736,7 +736,7 @@ impl<'a, 'tcx> LintContext<'tcx> for LateContext<'a, 'tcx> { impl<'a> LintContext<'a> for EarlyContext<'a> { type PassObject = EarlyLintPassObject; - /// Get the overall compiler `Session` object. + /// Gets the overall compiler `Session` object. fn sess(&self) -> &Session { &self.sess } @@ -1200,7 +1200,7 @@ impl<'a, T: EarlyLintPass> ast_visit::Visitor<'a> for EarlyContextAndPass<'a, T> } -/// Perform lint checking on a crate. +/// Performs lint checking on a crate. /// /// Consumes the `lint_store` field of the `Session`. pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { diff --git a/src/librustc/lint/mod.rs b/src/librustc/lint/mod.rs index 8952ae98e59..9fcc3be66aa 100644 --- a/src/librustc/lint/mod.rs +++ b/src/librustc/lint/mod.rs @@ -72,7 +72,7 @@ pub struct Lint { /// `default_level`. pub edition_lint_opts: Option<(Edition, Level)>, - /// Whether this lint is reported even inside expansions of external macros + /// `true` if this lint is reported even inside expansions of external macros. pub report_in_external_macro: bool, } @@ -86,7 +86,7 @@ impl Lint { } } - /// Get the lint's name, with ASCII letters converted to lowercase. + /// Gets the lint's name, with ASCII letters converted to lowercase. pub fn name_lower(&self) -> String { self.name.to_ascii_lowercase() } @@ -99,7 +99,7 @@ impl Lint { } } -/// Declare a static item of type `&'static Lint`. +/// Declares a static item of type `&'static Lint`. #[macro_export] macro_rules! declare_lint { ($vis: vis $NAME: ident, $Level: ident, $desc: expr) => ( @@ -150,7 +150,7 @@ macro_rules! declare_tool_lint { ); } -/// Declare a static `LintArray` and return it as an expression. +/// Declares a static `LintArray` and return it as an expression. #[macro_export] macro_rules! lint_array { ($( $lint:expr ),* ,) => { lint_array!( $($lint),* ) }; @@ -164,7 +164,7 @@ pub type LintArray = Vec<&'static Lint>; pub trait LintPass { fn name(&self) -> &'static str; - /// Get descriptions of the lints this `LintPass` object can emit. + /// Gets descriptions of the lints this `LintPass` object can emit. /// /// N.B., there is no enforcement that the object only emits lints it registered. /// And some `rustc` internal `LintPass`es register lints to be emitted by other @@ -487,7 +487,7 @@ impl hash::Hash for LintId { } impl LintId { - /// Get the `LintId` for a `Lint`. + /// Gets the `LintId` for a `Lint`. pub fn of(lint: &'static Lint) -> LintId { LintId { lint, @@ -498,7 +498,7 @@ impl LintId { self.lint.name } - /// Get the name of the lint. + /// Gets the name of the lint. pub fn to_string(&self) -> String { self.lint.name_lower() } @@ -518,7 +518,7 @@ impl_stable_hash_for!(enum self::Level { }); impl Level { - /// Convert a level to a lower-case string. + /// Converts a level to a lower-case string. pub fn as_str(self) -> &'static str { match self { Allow => "allow", @@ -528,7 +528,7 @@ impl Level { } } - /// Convert a lower-case string to a level. + /// Converts a lower-case string to a level. pub fn from_str(x: &str) -> Option<Level> { match x { "allow" => Some(Allow), diff --git a/src/librustc/middle/expr_use_visitor.rs b/src/librustc/middle/expr_use_visitor.rs index 0939f07f43b..8da20ba4266 100644 --- a/src/librustc/middle/expr_use_visitor.rs +++ b/src/librustc/middle/expr_use_visitor.rs @@ -1,4 +1,4 @@ -//! A different sort of visitor for walking fn bodies. Unlike the +//! A different sort of visitor for walking fn bodies. Unlike the //! normal visitor, which just walks the entire body in one shot, the //! `ExprUseVisitor` determines how expressions are being used. @@ -800,8 +800,8 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { self.consume_expr(&arm.body); } - /// Walks a pat that occurs in isolation (i.e., top-level of fn - /// arg or let binding. *Not* a match arm or nested pat.) + /// Walks a pat that occurs in isolation (i.e., top-level of fn argument or + /// let binding, and *not* a match arm or nested pat.) fn walk_irrefutable_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat) { let mut mode = Unknown; self.determine_pat_move_mode(cmt_discr.clone(), pat, &mut mode); diff --git a/src/librustc/middle/free_region.rs b/src/librustc/middle/free_region.rs index e752643e842..fc345df6551 100644 --- a/src/librustc/middle/free_region.rs +++ b/src/librustc/middle/free_region.rs @@ -1,9 +1,7 @@ -//! This file handles the relationships between free regions -- -//! meaning lifetime parameters. Ordinarily, free regions are -//! unrelated to one another, but they can be related via implied or -//! explicit bounds. In that case, we track the bounds using the -//! `TransitiveRelation` type and use that to decide when one free -//! region outlives another and so forth. +//! This module handles the relationships between "free regions", i.e., lifetime parameters. +//! Ordinarily, free regions are unrelated to one another, but they can be related via implied +//! or explicit bounds. In that case, we track the bounds using the `TransitiveRelation` type, +//! and use that to decide when one free region outlives another, and so forth. use crate::infer::outlives::free_region_map::{FreeRegionMap, FreeRegionRelations}; use crate::hir::def_id::DefId; @@ -16,17 +14,17 @@ use crate::ty::{self, TyCtxt, Region}; /// regions. /// /// This stuff is a bit convoluted and should be refactored, but as we -/// move to NLL it'll all go away anyhow. +/// transition to NLL, it'll all go away anyhow. pub struct RegionRelations<'a, 'gcx: 'tcx, 'tcx: 'a> { pub tcx: TyCtxt<'a, 'gcx, 'tcx>, - /// context used to fetch the region maps + /// The context used to fetch the region maps. pub context: DefId, - /// region maps for the given context + /// The region maps for the given context. pub region_scope_tree: &'a region::ScopeTree, - /// free-region relationships + /// Free-region relationships. pub free_regions: &'a FreeRegionMap<'tcx>, } @@ -45,7 +43,7 @@ impl<'a, 'gcx, 'tcx> RegionRelations<'a, 'gcx, 'tcx> { } } - /// Determines whether one region is a subregion of another. This is intended to run *after + /// Determines whether one region is a subregion of another. This is intended to run *after /// inference* and sadly the logic is somewhat duplicated with the code in infer.rs. pub fn is_subregion_of(&self, sub_region: ty::Region<'tcx>, @@ -86,7 +84,7 @@ impl<'a, 'gcx, 'tcx> RegionRelations<'a, 'gcx, 'tcx> { result } - /// Determines whether this free-region is required to be 'static + /// Determines whether this free region is required to be `'static`. fn is_static(&self, super_region: ty::Region<'tcx>) -> bool { debug!("is_static(super_region={:?})", super_region); match *super_region { diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index 4eb7b918dd8..a18574f030c 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -1,27 +1,27 @@ -//! A classic liveness analysis based on dataflow over the AST. Computes, +//! A classic liveness analysis based on dataflow over the AST. Computes, //! for each local variable in a function, whether that variable is live -//! at a given point. Program execution points are identified by their -//! id. +//! at a given point. Program execution points are identified by their +//! IDs. //! //! # Basic idea //! -//! The basic model is that each local variable is assigned an index. We +//! The basic model is that each local variable is assigned an index. We //! represent sets of local variables using a vector indexed by this -//! index. The value in the vector is either 0, indicating the variable -//! is dead, or the id of an expression that uses the variable. +//! index. The value in the vector is either 0, indicating the variable +//! is dead, or the ID of an expression that uses the variable. //! -//! We conceptually walk over the AST in reverse execution order. If we -//! find a use of a variable, we add it to the set of live variables. If +//! We conceptually walk over the AST in reverse execution order. If we +//! find a use of a variable, we add it to the set of live variables. If //! we find an assignment to a variable, we remove it from the set of live -//! variables. When we have to merge two flows, we take the union of -//! those two flows---if the variable is live on both paths, we simply -//! pick one id. In the event of loops, we continue doing this until a +//! variables. When we have to merge two flows, we take the union of +//! those two flows -- if the variable is live on both paths, we simply +//! pick one ID. In the event of loops, we continue doing this until a //! fixed point is reached. //! //! ## Checking initialization //! -//! At the function entry point, all variables must be dead. If this is -//! not the case, we can report an error using the id found in the set of +//! At the function entry point, all variables must be dead. If this is +//! not the case, we can report an error using the ID found in the set of //! live variables, which identifies a use of the variable which is not //! dominated by an assignment. //! @@ -38,20 +38,20 @@ //! //! The actual implementation contains two (nested) walks over the AST. //! The outer walk has the job of building up the ir_maps instance for the -//! enclosing function. On the way down the tree, it identifies those AST +//! enclosing function. On the way down the tree, it identifies those AST //! nodes and variable IDs that will be needed for the liveness analysis -//! and assigns them contiguous IDs. The liveness id for an AST node is -//! called a `live_node` (it's a newtype'd u32) and the id for a variable -//! is called a `variable` (another newtype'd u32). +//! and assigns them contiguous IDs. The liveness ID for an AST node is +//! called a `live_node` (it's a newtype'd `u32`) and the ID for a variable +//! is called a `variable` (another newtype'd `u32`). //! //! On the way back up the tree, as we are about to exit from a function -//! declaration we allocate a `liveness` instance. Now that we know +//! declaration we allocate a `liveness` instance. Now that we know //! precisely how many nodes and variables we need, we can allocate all -//! the various arrays that we will need to precisely the right size. We then +//! the various arrays that we will need to precisely the right size. We then //! perform the actual propagation on the `liveness` instance. //! //! This propagation is encoded in the various `propagate_through_*()` -//! methods. It effectively does a reverse walk of the AST; whenever we +//! methods. It effectively does a reverse walk of the AST; whenever we //! reach a loop node, we iterate until a fixed point is reached. //! //! ## The `RWU` struct @@ -60,21 +60,21 @@ //! variable `V` (these are encapsulated in the `RWU` struct): //! //! - `reader`: the `LiveNode` ID of some node which will read the value -//! that `V` holds on entry to `N`. Formally: a node `M` such +//! that `V` holds on entry to `N`. Formally: a node `M` such //! that there exists a path `P` from `N` to `M` where `P` does not -//! write `V`. If the `reader` is `invalid_node()`, then the current +//! write `V`. If the `reader` is `invalid_node()`, then the current //! value will never be read (the variable is dead, essentially). //! //! - `writer`: the `LiveNode` ID of some node which will write the -//! variable `V` and which is reachable from `N`. Formally: a node `M` +//! variable `V` and which is reachable from `N`. Formally: a node `M` //! such that there exists a path `P` from `N` to `M` and `M` writes -//! `V`. If the `writer` is `invalid_node()`, then there is no writer +//! `V`. If the `writer` is `invalid_node()`, then there is no writer //! of `V` that follows `N`. //! -//! - `used`: a boolean value indicating whether `V` is *used*. We +//! - `used`: a boolean value indicating whether `V` is *used*. We //! distinguish a *read* from a *use* in that a *use* is some read that -//! is not just used to generate a new value. For example, `x += 1` is -//! a read but not a use. This is used to generate better warnings. +//! is not just used to generate a new value. For example, `x += 1` is +//! a read but not a use. This is used to generate better warnings. //! //! ## Special Variables //! @@ -87,7 +87,7 @@ //! - `fallthrough_ln`: a live node that represents a fallthrough //! //! - `clean_exit_var`: a synthetic variable that is only 'read' from the -//! fallthrough node. It is only live if the function could converge +//! fallthrough node. It is only live if the function could converge //! via means other than an explicit `return` expression. That is, it is //! only dead if the end of the function's block can never be reached. //! It is the responsibility of typeck to ensure that there are no diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs index 04e4a0b39a2..b98f094aef9 100644 --- a/src/librustc/middle/mem_categorization.rs +++ b/src/librustc/middle/mem_categorization.rs @@ -3,7 +3,7 @@ //! The job of the categorization module is to analyze an expression to //! determine what kind of memory is used in evaluating it (for example, //! where dereferences occur and what kind of pointer is dereferenced; -//! whether the memory is mutable; etc) +//! whether the memory is mutable, etc.). //! //! Categorization effectively transforms all of our expressions into //! expressions of the following forms (the actual enum has many more @@ -16,21 +16,21 @@ //! | E.comp // access to an interior component //! //! Imagine a routine ToAddr(Expr) that evaluates an expression and returns an -//! address where the result is to be found. If Expr is a place, then this -//! is the address of the place. If Expr is an rvalue, this is the address of +//! address where the result is to be found. If Expr is a place, then this +//! is the address of the place. If `Expr` is an rvalue, this is the address of //! some temporary spot in memory where the result is stored. //! -//! Now, cat_expr() classifies the expression Expr and the address A=ToAddr(Expr) +//! Now, `cat_expr()` classifies the expression `Expr` and the address `A = ToAddr(Expr)` //! as follows: //! -//! - cat: what kind of expression was this? This is a subset of the +//! - `cat`: what kind of expression was this? This is a subset of the //! full expression forms which only includes those that we care about //! for the purpose of the analysis. -//! - mutbl: mutability of the address A -//! - ty: the type of data found at the address A +//! - `mutbl`: mutability of the address `A`. +//! - `ty`: the type of data found at the address `A`. //! //! The resulting categorization tree differs somewhat from the expressions -//! themselves. For example, auto-derefs are explicit. Also, an index a[b] is +//! themselves. For example, auto-derefs are explicit. Also, an index a[b] is //! decomposed into two operations: a dereference to reach the array data and //! then an index to jump forward to the relevant item. //! diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index 788d2185d6d..fd188b33d7e 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -85,11 +85,11 @@ use rustc_data_structures::stable_hasher::{HashStable, StableHasher, /// values live long enough; phrased another way, the starting point /// of each range is not really the important thing in the above /// picture, but rather the ending point. -/// -/// FIXME (pnkfelix): This currently derives `PartialOrd` and `Ord` to -/// placate the same deriving in `ty::FreeRegion`, but we may want to -/// actually attach a more meaningful ordering to scopes than the one -/// generated via deriving here. +// +// FIXME(pnkfelix): this currently derives `PartialOrd` and `Ord` to +// placate the same deriving in `ty::FreeRegion`, but we may want to +// actually attach a more meaningful ordering to scopes than the one +// generated via deriving here. #[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Copy, RustcEncodable, RustcDecodable)] pub struct Scope { pub id: hir::ItemLocalId, @@ -140,14 +140,14 @@ pub enum ScopeData { /// /// For example, given `{ let (a, b) = EXPR_1; let c = EXPR_2; ... }`: /// -/// * the subscope with `first_statement_index == 0` is scope of both +/// * The subscope with `first_statement_index == 0` is scope of both /// `a` and `b`; it does not include EXPR_1, but does include /// everything after that first `let`. (If you want a scope that /// includes EXPR_1 as well, then do not use `Scope::Remainder`, /// but instead another `Scope` that encompasses the whole block, /// e.g., `Scope::Node`. /// -/// * the subscope with `first_statement_index == 1` is scope of `c`, +/// * The subscope with `first_statement_index == 1` is scope of `c`, /// and thus does not include EXPR_2, but covers the `...`. newtype_index! { @@ -160,7 +160,7 @@ impl_stable_hash_for!(struct crate::middle::region::FirstStatementIndex { privat static_assert!(ASSERT_SCOPE_DATA: mem::size_of::<ScopeData>() == 4); impl Scope { - /// Returns a item-local id associated with this scope. + /// Returns a item-local ID associated with this scope. /// /// N.B., likely to be replaced as API is refined; e.g., pnkfelix /// anticipates `fn entry_node_id` and `fn each_exit_node_id`. @@ -180,8 +180,8 @@ impl Scope { } } - /// Returns the span of this Scope. Note that in general the - /// returned span may not correspond to the span of any node id in + /// Returns the span of this `Scope`. Note that in general the + /// returned span may not correspond to the span of any `NodeId` in /// the AST. pub fn span(&self, tcx: TyCtxt<'_, '_, '_>, scope_tree: &ScopeTree) -> Span { let node_id = self.node_id(tcx, scope_tree); @@ -225,19 +225,19 @@ pub struct ScopeTree { /// have lifetime parameters free in this body. root_parent: Option<ast::NodeId>, - /// `parent_map` maps from a scope id to the enclosing scope id; + /// `parent_map` maps from a scope ID to the enclosing scope id; /// this is usually corresponding to the lexical nesting, though /// in the case of closures the parent scope is the innermost /// conditional expression or repeating block. (Note that the - /// enclosing scope id for the block associated with a closure is + /// enclosing scope ID for the block associated with a closure is /// the closure itself.) parent_map: FxHashMap<Scope, (Scope, ScopeDepth)>, - /// `var_map` maps from a variable or binding id to the block in + /// `var_map` maps from a variable or binding ID to the block in /// which that variable is declared. var_map: FxHashMap<hir::ItemLocalId, Scope>, - /// maps from a node-id to the associated destruction scope (if any) + /// maps from a `NodeId` to the associated destruction scope (if any) destruction_scopes: FxHashMap<hir::ItemLocalId, Scope>, /// `rvalue_scopes` includes entries for those expressions whose cleanup scope is @@ -252,8 +252,8 @@ pub struct ScopeTree { /// Encodes the hierarchy of fn bodies. Every fn body (including /// closures) forms its own distinct region hierarchy, rooted in - /// the block that is the fn body. This map points from the id of - /// that root block to the id of the root block for the enclosing + /// the block that is the fn body. This map points from the ID of + /// that root block to the ID of the root block for the enclosing /// fn, if any. Thus the map structures the fn bodies into a /// hierarchy based on their lexical mapping. This is used to /// handle the relationships between regions in a fn and in a @@ -382,7 +382,7 @@ struct RegionResolutionVisitor<'a, 'tcx: 'a> { /// upon exiting the parent scope, we cannot statically know how /// many times the expression executed, and thus if the expression /// creates temporaries we cannot know statically how many such - /// temporaries we would have to cleanup. Therefore we ensure that + /// temporaries we would have to cleanup. Therefore, we ensure that /// the temporaries never outlast the conditional/repeating /// expression, preventing the need for dynamic checks and/or /// arbitrary amounts of stack space. Terminating scopes end @@ -465,7 +465,7 @@ impl<'tcx> ScopeTree { } /// Records that `sub_closure` is defined within `sup_closure`. These ids - /// should be the id of the block that is the fn body, which is + /// should be the ID of the block that is the fn body, which is /// also the root of the region hierarchy for that fn. fn record_closure_parent(&mut self, sub_closure: hir::ItemLocalId, @@ -551,8 +551,8 @@ impl<'tcx> ScopeTree { self.is_subscope_of(scope2, scope1) } - /// Returns true if `subscope` is equal to or is lexically nested inside `superscope` and false - /// otherwise. + /// Returns `true` if `subscope` is equal to or is lexically nested inside `superscope`, and + /// `false` otherwise. pub fn is_subscope_of(&self, subscope: Scope, superscope: Scope) @@ -575,7 +575,7 @@ impl<'tcx> ScopeTree { return true; } - /// Returns the id of the innermost containing body + /// Returns the ID of the innermost containing body pub fn containing_body(&self, mut scope: Scope) -> Option<hir::ItemLocalId> { loop { if let ScopeData::CallSite = scope.data { @@ -586,7 +586,7 @@ impl<'tcx> ScopeTree { } } - /// Finds the nearest common ancestor of two scopes. That is, finds the + /// Finds the nearest common ancestor of two scopes. That is, finds the /// smallest scope which is greater than or equal to both `scope_a` and /// `scope_b`. pub fn nearest_common_ancestor(&self, scope_a: Scope, scope_b: Scope) -> Scope { @@ -1051,7 +1051,7 @@ fn resolve_local<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, visitor.visit_pat(pat); } - /// True if `pat` match the `P&` nonterminal: + /// Returns `true` if `pat` match the `P&` non-terminal. /// /// P& = ref X /// | StructName { ..., P&, ... } diff --git a/src/librustc/middle/resolve_lifetime.rs b/src/librustc/middle/resolve_lifetime.rs index f187928e0d9..4e42816b3c6 100644 --- a/src/librustc/middle/resolve_lifetime.rs +++ b/src/librustc/middle/resolve_lifetime.rs @@ -3,7 +3,7 @@ //! Name resolution for lifetimes follows MUCH simpler rules than the //! full resolve. For example, lifetime names are never exported or //! used between functions, and they operate in a purely top-down -//! way. Therefore we break lifetime name resolution into a separate pass. +//! way. Therefore, we break lifetime name resolution into a separate pass. use crate::hir::def::Def; use crate::hir::def_id::{CrateNum, DefId, LocalDefId, LOCAL_CRATE}; @@ -207,7 +207,7 @@ struct NamedRegionMap { pub object_lifetime_defaults: NodeMap<Vec<ObjectLifetimeDefault>>, } -/// See `NamedRegionMap`. +/// See [`NamedRegionMap`]. #[derive(Default)] pub struct ResolveLifetimes { defs: FxHashMap<LocalDefId, Lrc<FxHashMap<ItemLocalId, Region>>>, @@ -227,21 +227,19 @@ struct LifetimeContext<'a, 'tcx: 'a> { map: &'a mut NamedRegionMap, scope: ScopeRef<'a>, - /// Deep breath. Our representation for poly trait refs contains a single + /// This is slightly complicated. Our representation for poly-trait-refs contains a single /// binder and thus we only allow a single level of quantification. However, /// the syntax of Rust permits quantification in two places, e.g., `T: for <'a> Foo<'a>` - /// and `for <'a, 'b> &'b T: Foo<'a>`. In order to get the de Bruijn indices + /// and `for <'a, 'b> &'b T: Foo<'a>`. In order to get the De Bruijn indices /// correct when representing these constraints, we should only introduce one /// scope. However, we want to support both locations for the quantifier and /// during lifetime resolution we want precise information (so we can't /// desugar in an earlier phase). /// - /// SO, if we encounter a quantifier at the outer scope, we set - /// trait_ref_hack to true (and introduce a scope), and then if we encounter - /// a quantifier at the inner scope, we error. If trait_ref_hack is false, + /// So, if we encounter a quantifier at the outer scope, we set + /// `trait_ref_hack` to `true` (and introduce a scope), and then if we encounter + /// a quantifier at the inner scope, we error. If `trait_ref_hack` is `false`, /// then we introduce the scope at the inner quantifier. - /// - /// I'm sorry. trait_ref_hack: bool, /// Used to disallow the use of in-band lifetimes in `fn` or `Fn` syntax. @@ -1676,7 +1674,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { /// If early bound lifetimes are present, we separate them into their own list (and likewise /// for late bound). They will be numbered sequentially, starting from the lowest index that is /// already in scope (for a fn item, that will be 0, but for a method it might not be). Late - /// bound lifetimes are resolved by name and associated with a binder id (`binder_id`), so the + /// bound lifetimes are resolved by name and associated with a binder ID (`binder_id`), so the /// ordering is not important there. fn visit_early_late<F>( &mut self, @@ -2610,7 +2608,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { } } - /// Returns true if, in the current scope, replacing `'_` would be + /// Returns `true` if, in the current scope, replacing `'_` would be /// equivalent to a single-use lifetime. fn track_lifetime_uses(&self) -> bool { let mut scope = self.scope; @@ -2714,7 +2712,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> { /// - it does not appear in a where-clause. /// /// "Constrained" basically means that it appears in any type but -/// not amongst the inputs to a projection. In other words, `<&'a +/// not amongst the inputs to a projection. In other words, `<&'a /// T as Trait<''b>>::Foo` does not constrain `'a` or `'b`. fn insert_late_bound_lifetimes( map: &mut NamedRegionMap, diff --git a/src/librustc/middle/stability.rs b/src/librustc/middle/stability.rs index 34c77d08f5a..26b48873244 100644 --- a/src/librustc/middle/stability.rs +++ b/src/librustc/middle/stability.rs @@ -51,7 +51,7 @@ enum AnnotationKind { pub struct DeprecationEntry { /// The metadata of the attribute associated with this entry. pub attr: Deprecation, - /// The def id where the attr was originally attached. `None` for non-local + /// The `DefId` where the attr was originally attached. `None` for non-local /// `DefId`'s. origin: Option<HirId>, } @@ -475,7 +475,7 @@ pub fn provide(providers: &mut Providers<'_>) { }; } -/// Check whether an item marked with `deprecated(since="X")` is currently +/// Checks whether an item marked with `deprecated(since="X")` is currently /// deprecated (i.e., whether X is not greater than the current rustc version). pub fn deprecation_in_effect(since: &str) -> bool { fn parse_version(ver: &str) -> Vec<u32> { diff --git a/src/librustc/middle/weak_lang_items.rs b/src/librustc/middle/weak_lang_items.rs index 119e855c585..312924e5e90 100644 --- a/src/librustc/middle/weak_lang_items.rs +++ b/src/librustc/middle/weak_lang_items.rs @@ -54,7 +54,7 @@ pub fn link_name(attrs: &[ast::Attribute]) -> Option<Symbol> { }) } -/// Returns whether the specified `lang_item` doesn't actually need to be +/// Returns `true` if the specified `lang_item` doesn't actually need to be /// present for this compilation. /// /// Not all lang items are always required for each compilation, particularly in diff --git a/src/librustc/mir/interpret/allocation.rs b/src/librustc/mir/interpret/allocation.rs index 7761e1fdafa..e96392edd64 100644 --- a/src/librustc/mir/interpret/allocation.rs +++ b/src/librustc/mir/interpret/allocation.rs @@ -1,4 +1,4 @@ -//! The virtual memory representation of the MIR interpreter +//! The virtual memory representation of the MIR interpreter. use super::{ Pointer, EvalResult, AllocId, ScalarMaybeUndef, write_target_uint, read_target_uint, Scalar, @@ -54,7 +54,7 @@ pub trait AllocationExtra<Tag, MemoryExtra>: ::std::fmt::Debug + Clone { /// Hook for performing extra checks on a memory read access. /// /// Takes read-only access to the allocation so we can keep all the memory read - /// operations take `&self`. Use a `RefCell` in `AllocExtra` if you + /// operations take `&self`. Use a `RefCell` in `AllocExtra` if you /// need to mutate. #[inline(always)] fn memory_read( @@ -133,7 +133,7 @@ impl<'tcx> ::serialize::UseSpecializedDecodable for &'tcx Allocation {} /// Alignment and bounds checks impl<'tcx, Tag, Extra> Allocation<Tag, Extra> { - /// Check if the pointer is "in-bounds". Notice that a pointer pointing at the end + /// Checks if the pointer is "in-bounds". Notice that a pointer pointing at the end /// of an allocation (i.e., at the first *inaccessible* location) *is* considered /// in-bounds! This follows C's/LLVM's rules. /// If you want to check bounds before doing a memory access, better use `check_bounds`. @@ -145,7 +145,7 @@ impl<'tcx, Tag, Extra> Allocation<Tag, Extra> { ptr.check_in_alloc(Size::from_bytes(allocation_size), InboundsCheck::Live) } - /// Check if the memory range beginning at `ptr` and of size `Size` is "in-bounds". + /// Checks if the memory range beginning at `ptr` and of size `Size` is "in-bounds". #[inline(always)] pub fn check_bounds( &self, @@ -161,7 +161,7 @@ impl<'tcx, Tag, Extra> Allocation<Tag, Extra> { /// Byte accessors impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> { /// The last argument controls whether we error out when there are undefined - /// or pointer bytes. You should never call this, call `get_bytes` or + /// or pointer bytes. You should never call this, call `get_bytes` or /// `get_bytes_with_undef_and_ptr` instead, /// /// This function also guarantees that the resulting pointer will remain stable @@ -462,7 +462,7 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> { /// Relocations impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> { - /// Return all relocations overlapping with the given ptr-offset pair. + /// Returns all relocations overlapping with the given ptr-offset pair. pub fn relocations( &self, cx: &impl HasDataLayout, @@ -476,7 +476,7 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> { self.relocations.range(Size::from_bytes(start)..end) } - /// Check that there are no relocations overlapping with the given range. + /// Checks that there are no relocations overlapping with the given range. #[inline(always)] fn check_relocations( &self, @@ -491,10 +491,10 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> { } } - /// Remove all relocations inside the given range. + /// Removes all relocations inside the given range. /// If there are relocations overlapping with the edges, they /// are removed as well *and* the bytes they cover are marked as - /// uninitialized. This is a somewhat odd "spooky action at a distance", + /// uninitialized. This is a somewhat odd "spooky action at a distance", /// but it allows strictly more code to run than if we would just error /// immediately in that case. fn clear_relocations( @@ -633,7 +633,7 @@ impl UndefMask { m } - /// Check whether the range `start..end` (end-exclusive) is entirely defined. + /// Checks whether the range `start..end` (end-exclusive) is entirely defined. /// /// Returns `Ok(())` if it's defined. Otherwise returns the index of the byte /// at which the first undefined access begins. diff --git a/src/librustc/mir/interpret/error.rs b/src/librustc/mir/interpret/error.rs index 870a51f95df..d456f29439d 100644 --- a/src/librustc/mir/interpret/error.rs +++ b/src/librustc/mir/interpret/error.rs @@ -19,7 +19,7 @@ use syntax::symbol::Symbol; #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum ErrorHandled { - /// Already reported a lint or an error for this evaluation + /// Already reported a lint or an error for this evaluation. Reported, /// Don't emit an error, the evaluation failed because the MIR was generic /// and the substs didn't fully monomorphize it. @@ -212,7 +212,7 @@ pub type AssertMessage<'tcx> = EvalErrorKind<'tcx, mir::Operand<'tcx>>; #[derive(Clone, RustcEncodable, RustcDecodable)] pub enum EvalErrorKind<'tcx, O> { /// This variant is used by machines to signal their own errors that do not - /// match an existing variant + /// match an existing variant. MachineError(String), FunctionAbiMismatch(Abi, Abi), diff --git a/src/librustc/mir/interpret/mod.rs b/src/librustc/mir/interpret/mod.rs index efd233f1f38..83858829741 100644 --- a/src/librustc/mir/interpret/mod.rs +++ b/src/librustc/mir/interpret/mod.rs @@ -260,23 +260,23 @@ impl fmt::Display for AllocId { #[derive(Debug, Clone, Eq, PartialEq, Hash, RustcDecodable, RustcEncodable)] pub enum AllocKind<'tcx> { - /// The alloc id is used as a function pointer + /// The alloc ID is used as a function pointer Function(Instance<'tcx>), - /// The alloc id points to a "lazy" static variable that did not get computed (yet). + /// The alloc ID points to a "lazy" static variable that did not get computed (yet). /// This is also used to break the cycle in recursive statics. Static(DefId), - /// The alloc id points to memory + /// The alloc ID points to memory. Memory(&'tcx Allocation), } pub struct AllocMap<'tcx> { - /// Lets you know what an AllocId refers to + /// Lets you know what an `AllocId` refers to. id_to_kind: FxHashMap<AllocId, AllocKind<'tcx>>, - /// Used to ensure that statics only get one associated AllocId + /// Used to ensure that statics only get one associated `AllocId`. type_interner: FxHashMap<AllocKind<'tcx>, AllocId>, - /// The AllocId to assign to the next requested id. + /// The `AllocId` to assign to the next requested ID. /// Always incremented, never gets smaller. next_id: AllocId, } @@ -345,7 +345,7 @@ impl<'tcx> AllocMap<'tcx> { } } - /// Generate an `AllocId` for a static or return a cached one in case this function has been + /// Generates an `AllocId` for a static or return a cached one in case this function has been /// called on the same static before. pub fn intern_static(&mut self, static_id: DefId) -> AllocId { self.intern(AllocKind::Static(static_id)) diff --git a/src/librustc/mir/interpret/value.rs b/src/librustc/mir/interpret/value.rs index 73917342814..5ec7de4308a 100644 --- a/src/librustc/mir/interpret/value.rs +++ b/src/librustc/mir/interpret/value.rs @@ -13,16 +13,17 @@ pub struct RawConst<'tcx> { pub ty: Ty<'tcx>, } -/// Represents a constant value in Rust. Scalar and ScalarPair are optimizations which -/// matches the LocalState optimizations for easy conversions between Value and ConstValue. +/// Represents a constant value in Rust. `Scalar` and `ScalarPair` are optimizations that +/// match the `LocalState` optimizations for easy conversions between `Value` and `ConstValue`. #[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, RustcEncodable, RustcDecodable, Hash)] pub enum ConstValue<'tcx> { - /// Used only for types with layout::abi::Scalar ABI and ZSTs + /// Used only for types with `layout::abi::Scalar` ABI and ZSTs. /// - /// Not using the enum `Value` to encode that this must not be `Undef` + /// Not using the enum `Value` to encode that this must not be `Undef`. Scalar(Scalar), - /// Used only for slices and strings (`&[T]`, `&str`, `*const [T]`, `*mut str`, `Box<str>`, ...) + /// Used only for slices and strings (`&[T]`, `&str`, `*const [T]`, `*mut str`, `Box<str>`, + /// etc.). /// /// Empty slices don't necessarily have an address backed by an `AllocId`, thus we also need to /// enable integer pointers. The `Scalar` type covers exactly those two cases. While we could @@ -30,8 +31,8 @@ pub enum ConstValue<'tcx> { /// it. Slice(Scalar, u64), - /// An allocation + offset into the allocation. - /// Invariant: The AllocId matches the allocation. + /// An allocation together with an offset into the allocation. + /// Invariant: the `AllocId` matches the allocation. ByRef(AllocId, &'tcx Allocation, Size), } diff --git a/src/librustc/mir/mod.rs b/src/librustc/mir/mod.rs index a0f16ae2715..3513d652b53 100644 --- a/src/librustc/mir/mod.rs +++ b/src/librustc/mir/mod.rs @@ -108,7 +108,7 @@ pub struct Mir<'tcx> { /// in scope, but a separate set of locals. pub promoted: IndexVec<Promoted, Mir<'tcx>>, - /// Yield type of the function, if it is a generator. + /// Yields type of the function, if it is a generator. pub yield_ty: Option<Ty<'tcx>>, /// Generator drop glue @@ -380,7 +380,7 @@ impl<'tcx> Mir<'tcx> { } } - /// Check if `sub` is a sub scope of `sup` + /// Checks if `sub` is a sub scope of `sup` pub fn is_sub_scope(&self, mut sub: SourceScope, sup: SourceScope) -> bool { while sub != sup { match self.source_scopes[sub].parent_scope { @@ -391,12 +391,12 @@ impl<'tcx> Mir<'tcx> { true } - /// Return the return type, it always return first element from `local_decls` array + /// Returns the return type, it always return first element from `local_decls` array pub fn return_ty(&self) -> Ty<'tcx> { self.local_decls[RETURN_PLACE].ty } - /// Get the location of the terminator for the given block + /// Gets the location of the terminator for the given block pub fn terminator_loc(&self, bb: BasicBlock) -> Location { Location { block: bb, @@ -526,7 +526,7 @@ pub enum BorrowKind { /// We can also report errors with this kind of borrow differently. Shallow, - /// Data must be immutable but not aliasable. This kind of borrow + /// Data must be immutable but not aliasable. This kind of borrow /// cannot currently be expressed by the user and is used only in /// implicit closure bindings. It is needed when the closure is /// borrowing or mutating a mutable referent, e.g.: @@ -565,8 +565,8 @@ pub enum BorrowKind { /// Data is mutable and not aliasable. Mut { - /// True if this borrow arose from method-call auto-ref - /// (i.e., `adjustment::Adjust::Borrow`) + /// `true` if this borrow arose from method-call auto-ref + /// (i.e., `adjustment::Adjust::Borrow`). allow_two_phase_borrow: bool, }, } @@ -610,7 +610,7 @@ pub struct VarBindingForm<'tcx> { /// If an explicit type was provided for this variable binding, /// this holds the source Span of that type. /// - /// NOTE: If you want to change this to a `HirId`, be wary that + /// NOTE: if you want to change this to a `HirId`, be wary that /// doing so breaks incremental compilation (as of this writing), /// while a `Span` does not cause our tests to fail. pub opt_ty_info: Option<Span>, @@ -737,7 +737,7 @@ pub struct LocalDecl<'tcx> { /// `ClearCrossCrate` as long as it carries as `HirId`. pub is_user_variable: Option<ClearCrossCrate<BindingForm<'tcx>>>, - /// True if this is an internal local + /// `true` if this is an internal local. /// /// These locals are not based on types in the source code and are only used /// for a few desugarings at the moment. @@ -864,7 +864,7 @@ pub struct LocalDecl<'tcx> { } impl<'tcx> LocalDecl<'tcx> { - /// Returns true only if local is a binding that can itself be + /// Returns `true` only if local is a binding that can itself be /// made mutable via the addition of the `mut` keyword, namely /// something like the occurrences of `x` in: /// - `fn foo(x: Type) { ... }`, @@ -886,7 +886,7 @@ impl<'tcx> LocalDecl<'tcx> { } } - /// Returns true if local is definitely not a `ref ident` or + /// Returns `true` if local is definitely not a `ref ident` or /// `ref mut ident` binding. (Such bindings cannot be made into /// mutable bindings, but the inverse does not necessarily hold). pub fn is_nonref_binding(&self) -> bool { @@ -904,7 +904,7 @@ impl<'tcx> LocalDecl<'tcx> { } } - /// Create a new `LocalDecl` for a temporary. + /// Creates a new `LocalDecl` for a temporary. #[inline] pub fn new_temp(ty: Ty<'tcx>, span: Span) -> Self { Self::new_local(ty, Mutability::Mut, false, span) @@ -925,7 +925,7 @@ impl<'tcx> LocalDecl<'tcx> { self } - /// Create a new `LocalDecl` for a internal temporary. + /// Creates a new `LocalDecl` for a internal temporary. #[inline] pub fn new_internal(ty: Ty<'tcx>, span: Span) -> Self { Self::new_local(ty, Mutability::Mut, true, span) @@ -1019,7 +1019,7 @@ pub struct BasicBlockData<'tcx> { /// Terminator for this block. /// - /// NB. This should generally ONLY be `None` during construction. + /// N.B., this should generally ONLY be `None` during construction. /// Therefore, you should generally access it via the /// `terminator()` or `terminator_mut()` methods. The only /// exception is that certain passes, such as `simplify_cfg`, swap @@ -1637,7 +1637,7 @@ impl<'tcx> TerminatorKind<'tcx> { } } - /// Return the list of labels for the edges to the successor basic blocks. + /// Returns the list of labels for the edges to the successor basic blocks. pub fn fmt_successor_labels(&self) -> Vec<Cow<'static, str>> { use self::TerminatorKind::*; match *self { @@ -1760,7 +1760,7 @@ pub enum StatementKind<'tcx> { /// error messages to these specific patterns. /// /// Note that this also is emitted for regular `let` bindings to ensure that locals that are - /// never accessed still get some sanity checks for e.g. `let x: ! = ..;` + /// never accessed still get some sanity checks for, e.g., `let x: ! = ..;` FakeRead(FakeReadCause, Place<'tcx>), /// Write the discriminant for a variant to the enum Place. @@ -1775,14 +1775,14 @@ pub enum StatementKind<'tcx> { /// End the current live range for the storage of the local. StorageDead(Local), - /// Execute a piece of inline Assembly. + /// Executes a piece of inline Assembly. InlineAsm { asm: Box<InlineAsm>, outputs: Box<[Place<'tcx>]>, inputs: Box<[(Span, Operand<'tcx>)]>, }, - /// Retag references in the given place, ensuring they got fresh tags. This is + /// Retag references in the given place, ensuring they got fresh tags. This is /// part of the Stacked Borrows model. These statements are currently only interpreted /// by miri and only generated when "-Z mir-emit-retag" is passed. /// See <https://internals.rust-lang.org/t/stacked-borrows-an-aliasing-model-for-rust/8153/> @@ -1904,7 +1904,7 @@ pub enum Place<'tcx> { Projection(Box<PlaceProjection<'tcx>>), } -/// The def-id of a static, along with its normalized type (which is +/// The `DefId` of a static, along with its normalized type (which is /// stored to avoid requiring normalization when reading MIR). #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)] pub struct Static<'tcx> { @@ -2009,10 +2009,10 @@ impl<'tcx> Place<'tcx> { Place::Projection(Box::new(PlaceProjection { base: self, elem })) } - /// Find the innermost `Local` from this `Place`, *if* it is either a local itself or + /// Finds the innermost `Local` from this `Place`, *if* it is either a local itself or /// a single deref of a local. - /// - /// FIXME: can we safely swap the semantics of `fn base_local` below in here instead? + // + // FIXME: can we safely swap the semantics of `fn base_local` below in here instead? pub fn local(&self) -> Option<Local> { match self { Place::Local(local) | @@ -2024,7 +2024,7 @@ impl<'tcx> Place<'tcx> { } } - /// Find the innermost `Local` from this `Place`. + /// Finds the innermost `Local` from this `Place`. pub fn base_local(&self) -> Option<Local> { match self { Place::Local(local) => Some(*local), @@ -2141,7 +2141,7 @@ impl<'tcx> Debug for Operand<'tcx> { impl<'tcx> Operand<'tcx> { /// Convenience helper to make a constant that refers to the fn - /// with given def-id and substs. Since this is used to synthesize + /// with given `DefId` and substs. Since this is used to synthesize /// MIR, assumes `user_ty` is None. pub fn function_handle<'a>( tcx: TyCtxt<'a, 'tcx, 'tcx>, @@ -2199,7 +2199,7 @@ pub enum Rvalue<'tcx> { /// be defined to return, say, a 0) if ADT is not an enum. Discriminant(Place<'tcx>), - /// Create an aggregate value, like a tuple or struct. This is + /// Creates an aggregate value, like a tuple or struct. This is /// only needed because we want to distinguish `dest = Foo { x: /// ..., y: ... }` from `dest.x = ...; dest.y = ...;` in the case /// that `Foo` has a destructor. These rvalues can be optimized @@ -2211,13 +2211,13 @@ pub enum Rvalue<'tcx> { pub enum CastKind { Misc, - /// Convert unique, zero-sized type for a fn to fn() + /// Converts unique, zero-sized type for a fn to fn() ReifyFnPointer, - /// Convert non capturing closure to fn() + /// Converts non capturing closure to fn() ClosureFnPointer, - /// Convert safe fn() to unsafe fn() + /// Converts safe fn() to unsafe fn() UnsafeFnPointer, /// "Unsize" -- convert a thin-or-fat pointer to a fat pointer. @@ -2301,9 +2301,9 @@ impl BinOp { #[derive(Copy, Clone, Debug, PartialEq, Eq, RustcEncodable, RustcDecodable)] pub enum NullOp { - /// Return the size of a value of that type + /// Returns the size of a value of that type SizeOf, - /// Create a new uninitialized box for a value of that type + /// Creates a new uninitialized box for a value of that type Box, } @@ -2847,7 +2847,7 @@ impl Location { #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub enum UnsafetyViolationKind { General, - /// Permitted in const fn and regular fns + /// Permitted in const fn and regular fns. GeneralAndConstFn, ExternStatic(ast::NodeId), BorrowPacked(ast::NodeId), @@ -2884,7 +2884,7 @@ pub struct BorrowCheckResult<'gcx> { /// After we borrow check a closure, we are left with various /// requirements that we have inferred between the free regions that -/// appear in the closure's signature or on its field types. These +/// appear in the closure's signature or on its field types. These /// requirements are then verified and proved by the closure's /// creating function. This struct encodes those requirements. /// @@ -2934,7 +2934,7 @@ pub struct BorrowCheckResult<'gcx> { /// internally within the rest of the NLL code). #[derive(Clone, Debug, RustcEncodable, RustcDecodable)] pub struct ClosureRegionRequirements<'gcx> { - /// The number of external regions defined on the closure. In our + /// The number of external regions defined on the closure. In our /// example above, it would be 3 -- one for `'static`, then `'1` /// and `'2`. This is just used for a sanity check later on, to /// make sure that the number of regions we see at the callsite diff --git a/src/librustc/mir/mono.rs b/src/librustc/mir/mono.rs index affa9f9fdd4..2296fe5763c 100644 --- a/src/librustc/mir/mono.rs +++ b/src/librustc/mir/mono.rs @@ -57,7 +57,7 @@ impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for MonoItem<'tcx> { pub struct CodegenUnit<'tcx> { /// A name for this CGU. Incremental compilation requires that - /// name be unique amongst **all** crates. Therefore, it should + /// name be unique amongst **all** crates. Therefore, it should /// contain something unique to this crate (e.g., a module path) /// as well as the crate name and disambiguator. name: InternedString, diff --git a/src/librustc/mir/tcx.rs b/src/librustc/mir/tcx.rs index ac3a97898b4..bf4ac7496d2 100644 --- a/src/librustc/mir/tcx.rs +++ b/src/librustc/mir/tcx.rs @@ -278,7 +278,7 @@ impl<'tcx> Rvalue<'tcx> { } #[inline] - /// Returns whether this rvalue is deeply initialized (most rvalues) or + /// Returns `true` if this rvalue is deeply initialized (most rvalues) or /// whether its only shallowly initialized (`Rvalue::Box`). pub fn initialization_state(&self) -> RvalueInitializationState { match *self { diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index 1a92f2c0f7a..b6c7ca11f1f 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -475,7 +475,7 @@ impl BorrowckMode { } pub enum Input { - /// Load source from file + /// Loads source from file File(PathBuf), Str { /// String that is shown in place of a filename @@ -523,7 +523,7 @@ impl OutputFilenames { .unwrap_or_else(|| self.temp_path(flavor, None)) } - /// Get the path where a compilation artifact of the given type for the + /// Gets the path where a compilation artifact of the given type for the /// given codegen unit should be placed on disk. If codegen_unit_name is /// None, a path distinct from those of any codegen unit will be generated. pub fn temp_path(&self, flavor: OutputType, codegen_unit_name: Option<&str>) -> PathBuf { @@ -532,7 +532,7 @@ impl OutputFilenames { } /// Like temp_path, but also supports things where there is no corresponding - /// OutputType, like no-opt-bitcode or lto-bitcode. + /// OutputType, like noopt-bitcode or lto-bitcode. pub fn temp_path_ext(&self, ext: &str, codegen_unit_name: Option<&str>) -> PathBuf { let base = self.out_directory.join(&self.filestem()); @@ -616,7 +616,7 @@ impl Default for Options { } impl Options { - /// True if there is a reason to build the dep graph. + /// Returns `true` if there is a reason to build the dep graph. pub fn build_dep_graph(&self) -> bool { self.incremental.is_some() || self.debugging_opts.dump_dep_graph || self.debugging_opts.query_dep_graph @@ -632,7 +632,7 @@ impl Options { FilePathMapping::new(self.remap_path_prefix.clone()) } - /// True if there will be an output file generated + /// Returns `true` if there will be an output file generated pub fn will_create_output_file(&self) -> bool { !self.debugging_opts.parse_only && // The file is just being parsed !self.debugging_opts.ls // The file is just being queried diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs index 9f387e1eab1..51b6205facb 100644 --- a/src/librustc/session/mod.rs +++ b/src/librustc/session/mod.rs @@ -51,7 +51,7 @@ pub mod filesearch; pub mod search_paths; pub struct OptimizationFuel { - /// If -zfuel=crate=n is specified, initially set to n. Otherwise 0. + /// If `-zfuel=crate=n` is specified, initially set to `n`, otherwise `0`. remaining: u64, /// We're rejecting all further optimizations. out_of_fuel: bool, @@ -64,7 +64,7 @@ pub struct Session { pub host: Target, pub opts: config::Options, pub host_tlib_path: SearchPath, - /// This is `None` if the host and target are the same. + /// `None` if the host and target are the same. pub target_tlib_path: Option<SearchPath>, pub parse_sess: ParseSess, pub sysroot: PathBuf, @@ -104,7 +104,7 @@ pub struct Session { /// The maximum length of types during monomorphization. pub type_length_limit: Once<usize>, - /// The maximum number of stackframes allowed in const eval + /// The maximum number of stackframes allowed in const eval. pub const_eval_stack_frame_limit: usize, /// The metadata::creader module may inject an allocator/panic_runtime @@ -123,13 +123,13 @@ pub struct Session { /// `-Zquery-dep-graph` is specified. pub cgu_reuse_tracker: CguReuseTracker, - /// Used by -Z profile-queries in util::common + /// Used by `-Z profile-queries` in `util::common`. pub profile_channel: Lock<Option<mpsc::Sender<ProfileQueriesMsg>>>, - /// Used by -Z self-profile + /// Used by `-Z self-profile`. pub self_profiling_active: bool, - /// Used by -Z self-profile + /// Used by `-Z self-profile`. pub self_profiling: Lock<SelfProfiler>, /// Some measurements that are being gathered during compilation. @@ -140,14 +140,14 @@ pub struct Session { next_node_id: OneThread<Cell<ast::NodeId>>, - /// If -zfuel=crate=n is specified, Some(crate). + /// If `-zfuel=crate=n` is specified, `Some(crate)`. optimization_fuel_crate: Option<String>, - /// Tracks fuel info if If -zfuel=crate=n is specified + /// Tracks fuel info if `-zfuel=crate=n` is specified. optimization_fuel: Lock<OptimizationFuel>, // The next two are public because the driver needs to read them. - /// If -zprint-fuel=crate, Some(crate). + /// If `-zprint-fuel=crate`, `Some(crate)`. pub print_fuel_crate: Option<String>, /// Always set to zero and incremented so that we can print fuel expended by a crate. pub print_fuel: AtomicU64, @@ -156,10 +156,10 @@ pub struct Session { /// false positives about a job server in our environment. pub jobserver: Client, - /// Metadata about the allocators for the current crate being compiled + /// Metadata about the allocators for the current crate being compiled. pub has_global_allocator: Once<bool>, - /// Metadata about the panic handlers for the current crate being compiled + /// Metadata about the panic handlers for the current crate being compiled. pub has_panic_handler: Once<bool>, /// Cap lint level specified by a driver specifically. @@ -167,9 +167,9 @@ pub struct Session { } pub struct PerfStats { - /// The accumulated time spent on computing symbol hashes + /// The accumulated time spent on computing symbol hashes. pub symbol_hash_time: Lock<Duration>, - /// The accumulated time spent decoding def path tables from metadata + /// The accumulated time spent decoding def path tables from metadata. pub decode_def_path_tables_time: Lock<Duration>, /// Total number of values canonicalized queries constructed. pub queries_canonicalized: AtomicUsize, @@ -539,7 +539,7 @@ impl Session { self.opts.debugging_opts.print_llvm_passes } - /// Get the features enabled for the current compilation session. + /// Gets the features enabled for the current compilation session. /// DO NOT USE THIS METHOD if there is a TyCtxt available, as it circumvents /// dependency tracking. Use tcx.features() instead. #[inline] @@ -989,7 +989,7 @@ impl Session { self.opts.edition } - /// True if we cannot skip the PLT for shared library calls. + /// Returns `true` if we cannot skip the PLT for shared library calls. pub fn needs_plt(&self) -> bool { // Check if the current target usually needs PLT to be enabled. // The user can use the command line flag to override it. diff --git a/src/librustc/traits/auto_trait.rs b/src/librustc/traits/auto_trait.rs index d1db49e05f1..012b9e5034c 100644 --- a/src/librustc/traits/auto_trait.rs +++ b/src/librustc/traits/auto_trait.rs @@ -57,7 +57,7 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> { AutoTraitFinder { tcx } } - /// Make a best effort to determine whether and under which conditions an auto trait is + /// Makes a best effort to determine whether and under which conditions an auto trait is /// implemented for a type. For example, if you have /// /// ``` diff --git a/src/librustc/traits/codegen/mod.rs b/src/librustc/traits/codegen/mod.rs index eed9345afae..d6b7b3b99ca 100644 --- a/src/librustc/traits/codegen/mod.rs +++ b/src/librustc/traits/codegen/mod.rs @@ -14,8 +14,8 @@ use crate::ty::{self, Ty, TyCtxt}; use crate::ty::subst::{Subst, Substs}; use crate::ty::fold::TypeFoldable; -/// Attempts to resolve an obligation to a vtable.. The result is -/// a shallow vtable resolution -- meaning that we do not +/// Attempts to resolve an obligation to a vtable. The result is +/// a shallow vtable resolution, meaning that we do not /// (necessarily) resolve all nested obligations on the impl. Note /// that type check should guarantee to us that all nested /// obligations *could be* resolved if we wanted to. diff --git a/src/librustc/traits/coherence.rs b/src/librustc/traits/coherence.rs index 4fe7a1507f7..a57007e51d3 100644 --- a/src/librustc/traits/coherence.rs +++ b/src/librustc/traits/coherence.rs @@ -1,4 +1,4 @@ -//! See rustc guide chapters on [trait-resolution] and [trait-specialization] for more info on how +//! See Rustc Guide chapters on [trait-resolution] and [trait-specialization] for more info on how //! this works. //! //! [trait-resolution]: https://rust-lang.github.io/rustc-guide/traits/resolution.html @@ -34,7 +34,7 @@ pub struct OverlapResult<'tcx> { pub impl_header: ty::ImplHeader<'tcx>, pub intercrate_ambiguity_causes: Vec<IntercrateAmbiguityCause>, - /// True if the overlap might've been permitted before the shift + /// `true` if the overlap might've been permitted before the shift /// to universes. pub involves_placeholder: bool, } @@ -111,7 +111,7 @@ fn with_fresh_ty_vars<'cx, 'gcx, 'tcx>(selcx: &mut SelectionContext<'cx, 'gcx, ' } /// Can both impl `a` and impl `b` be satisfied by a common type (including -/// `where` clauses)? If so, returns an `ImplHeader` that unifies the two impls. +/// where-clauses)? If so, returns an `ImplHeader` that unifies the two impls. fn overlap<'cx, 'gcx, 'tcx>( selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, a_def_id: DefId, @@ -242,7 +242,7 @@ pub enum OrphanCheckErr<'tcx> { } /// Checks the coherence orphan rules. `impl_def_id` should be the -/// def-id of a trait impl. To pass, either the trait must be local, or else +/// `DefId` of a trait impl. To pass, either the trait must be local, or else /// two conditions must be satisfied: /// /// 1. All type parameters in `Self` must be "covered" by some local type constructor. @@ -268,7 +268,7 @@ pub fn orphan_check<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, orphan_check_trait_ref(tcx, trait_ref, InCrate::Local) } -/// Check whether a trait-ref is potentially implementable by a crate. +/// Checks whether a trait-ref is potentially implementable by a crate. /// /// The current rule is that a trait-ref orphan checks in a crate C: /// diff --git a/src/librustc/traits/error_reporting.rs b/src/librustc/traits/error_reporting.rs index c17adaf1f9f..3a47b554b2a 100644 --- a/src/librustc/traits/error_reporting.rs +++ b/src/librustc/traits/error_reporting.rs @@ -583,7 +583,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { } - /// Get the parent trait chain start + /// Gets the parent trait chain start fn get_parent_trait_ref(&self, code: &ObligationCauseCode<'tcx>) -> Option<String> { match code { &ObligationCauseCode::BuiltinDerivedObligation(ref data) => { @@ -1376,7 +1376,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { } } - /// Returns whether the trait predicate may apply for *some* assignment + /// Returns `true` if the trait predicate may apply for *some* assignment /// to the type parameters. fn predicate_can_apply(&self, param_env: ty::ParamEnv<'tcx>, diff --git a/src/librustc/traits/fulfill.rs b/src/librustc/traits/fulfill.rs index 98784bccb6f..587f57bb09d 100644 --- a/src/librustc/traits/fulfill.rs +++ b/src/librustc/traits/fulfill.rs @@ -23,7 +23,7 @@ impl<'tcx> ForestObligation for PendingPredicateObligation<'tcx> { fn as_predicate(&self) -> &Self::Predicate { &self.obligation.predicate } } -/// The fulfillment context is used to drive trait resolution. It +/// The fulfillment context is used to drive trait resolution. It /// consists of a list of obligations that must be (eventually) /// satisfied. The job is to track which are satisfied, which yielded /// errors, and which are still pending. At any point, users can call @@ -140,7 +140,7 @@ impl<'tcx> TraitEngine<'tcx> for FulfillmentContext<'tcx> { /// creating a fresh type variable `$0` as well as a projection /// predicate `<SomeType as SomeTrait>::X == $0`. When the /// inference engine runs, it will attempt to find an impl of - /// `SomeTrait` or a where clause that lets us unify `$0` with + /// `SomeTrait` or a where-clause that lets us unify `$0` with /// something concrete. If this fails, we'll unify `$0` with /// `projection_ty` again. fn normalize_projection_type<'a, 'gcx>(&mut self, @@ -509,7 +509,7 @@ impl<'a, 'b, 'gcx, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'b, 'gcx, } } -/// Return the set of type variables contained in a trait ref +/// Returns the set of type variables contained in a trait ref fn trait_ref_type_vars<'a, 'gcx, 'tcx>(selcx: &mut SelectionContext<'a, 'gcx, 'tcx>, t: ty::PolyTraitRef<'tcx>) -> Vec<Ty<'tcx>> { diff --git a/src/librustc/traits/mod.rs b/src/librustc/traits/mod.rs index d1be8d377a8..a32838f0e4c 100644 --- a/src/librustc/traits/mod.rs +++ b/src/librustc/traits/mod.rs @@ -73,14 +73,14 @@ pub use self::FulfillmentErrorCode::*; pub use self::SelectionError::*; pub use self::Vtable::*; -// Whether to enable bug compatibility with issue #43355 +/// Whether to enable bug compatibility with issue #43355. #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum IntercrateMode { Issue43355, Fixed } -// The mode that trait queries run in +/// The mode that trait queries run in. #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum TraitQueryMode { // Standard/un-canonicalized queries get accurate @@ -93,45 +93,45 @@ pub enum TraitQueryMode { Canonical, } -/// An `Obligation` represents some trait reference (e.g., `int:Eq`) for -/// which the vtable must be found. The process of finding a vtable is +/// An `Obligation` represents some trait reference (e.g., `int: Eq`) for +/// which the vtable must be found. The process of finding a vtable is /// called "resolving" the `Obligation`. This process consists of /// either identifying an `impl` (e.g., `impl Eq for int`) that /// provides the required vtable, or else finding a bound that is in /// scope. The eventual result is usually a `Selection` (defined below). #[derive(Clone, PartialEq, Eq, Hash)] pub struct Obligation<'tcx, T> { - /// Why do we have to prove this thing? + /// The reason we have to prove this thing. pub cause: ObligationCause<'tcx>, - /// In which environment should we prove this thing? + /// The environment in which we should prove this thing. pub param_env: ty::ParamEnv<'tcx>, - /// What are we trying to prove? + /// The thing we are trying to prove. pub predicate: T, /// If we started proving this as a result of trying to prove /// something else, track the total depth to ensure termination. /// If this goes over a certain threshold, we abort compilation -- /// in such cases, we can not say whether or not the predicate - /// holds for certain. Stupid halting problem. Such a drag. + /// holds for certain. Stupid halting problem; such a drag. pub recursion_depth: usize, } pub type PredicateObligation<'tcx> = Obligation<'tcx, ty::Predicate<'tcx>>; pub type TraitObligation<'tcx> = Obligation<'tcx, ty::PolyTraitPredicate<'tcx>>; -/// Why did we incur this obligation? Used for error reporting. +/// The reason why we incurred this obligation; used for error reporting. #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub struct ObligationCause<'tcx> { pub span: Span, - // The id of the fn body that triggered this obligation. This is - // used for region obligations to determine the precise - // environment in which the region obligation should be evaluated - // (in particular, closures can add new assumptions). See the - // field `region_obligations` of the `FulfillmentContext` for more - // information. + /// The ID of the fn body that triggered this obligation. This is + /// used for region obligations to determine the precise + /// environment in which the region obligation should be evaluated + /// (in particular, closures can add new assumptions). See the + /// field `region_obligations` of the `FulfillmentContext` for more + /// information. pub body_id: ast::NodeId, pub code: ObligationCauseCode<'tcx> @@ -152,20 +152,20 @@ impl<'tcx> ObligationCause<'tcx> { #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub enum ObligationCauseCode<'tcx> { - /// Not well classified or should be obvious from span. + /// Not well classified or should be obvious from the span. MiscObligation, - /// A slice or array is WF only if `T: Sized` + /// A slice or array is WF only if `T: Sized`. SliceOrArrayElem, - /// A tuple is WF only if its middle elements are Sized + /// A tuple is WF only if its middle elements are `Sized`. TupleElem, - /// This is the trait reference from the given projection + /// This is the trait reference from the given projection. ProjectionWf(ty::ProjectionTy<'tcx>), - /// In an impl of trait X for type Y, type Y must - /// also implement all supertraits of X. + /// In an impl of trait `X` for type `Y`, type `Y` must + /// also implement all supertraits of `X`. ItemObligation(DefId), /// A type like `&'a T` is WF only if `T: 'a`. @@ -271,7 +271,7 @@ pub struct DerivedObligationCause<'tcx> { /// directly. parent_trait_ref: ty::PolyTraitRef<'tcx>, - /// The parent trait had this cause + /// The parent trait had this cause. parent_code: Rc<ObligationCauseCode<'tcx>> } @@ -280,14 +280,14 @@ pub type PredicateObligations<'tcx> = Vec<PredicateObligation<'tcx>>; pub type TraitObligations<'tcx> = Vec<TraitObligation<'tcx>>; /// The following types: -/// * `WhereClause` -/// * `WellFormed` -/// * `FromEnv` -/// * `DomainGoal` -/// * `Goal` -/// * `Clause` -/// * `Environment` -/// * `InEnvironment` +/// * `WhereClause`, +/// * `WellFormed`, +/// * `FromEnv`, +/// * `DomainGoal`, +/// * `Goal`, +/// * `Clause`, +/// * `Environment`, +/// * `InEnvironment`, /// are used for representing the trait system in the form of /// logic programming clauses. They are part of the interface /// for the chalk SLG solver. @@ -399,10 +399,10 @@ pub type Clauses<'tcx> = &'tcx List<Clause<'tcx>>; /// with the goal to solve and proceeds from there). #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] pub struct ProgramClause<'tcx> { - /// This goal will be considered true... + /// This goal will be considered true ... pub goal: DomainGoal<'tcx>, - /// ...if we can prove these hypotheses (there may be no hypotheses at all): + /// ... if we can prove these hypotheses (there may be no hypotheses at all): pub hypotheses: Goals<'tcx>, /// Useful for filtering clauses. @@ -485,7 +485,6 @@ pub type SelectionResult<'tcx, T> = Result<Option<T>, SelectionError<'tcx>>; /// For example, the vtable may be tied to a specific impl (case A), /// or it may be relative to some bound that is in scope (case B). /// -/// /// ``` /// impl<T:Clone> Clone<T> for Option<T> { ... } // Impl_1 /// impl<T:Clone> Clone<T> for Box<T> { ... } // Impl_2 @@ -517,7 +516,7 @@ pub enum Vtable<'tcx, N> { /// Vtable identifying a particular impl. VtableImpl(VtableImplData<'tcx, N>), - /// Vtable for auto trait implementations + /// Vtable for auto trait implementations. /// This carries the information and nested obligations with regards /// to an auto implementation for a trait `Trait`. The nested obligations /// ensure the trait implementation holds for all the constituent types. @@ -529,18 +528,18 @@ pub enum Vtable<'tcx, N> { /// any). VtableParam(Vec<N>), - /// Virtual calls through an object + /// Virtual calls through an object. VtableObject(VtableObjectData<'tcx, N>), /// Successful resolution for a builtin trait. VtableBuiltin(VtableBuiltinData<N>), - /// Vtable automatically generated for a closure. The def ID is the ID + /// Vtable automatically generated for a closure. The `DefId` is the ID /// of the closure expression. This is a `VtableImpl` in spirit, but the /// impl is generated by the compiler and does not appear in the source. VtableClosure(VtableClosureData<'tcx, N>), - /// Same as above, but for a fn pointer type with the given signature. + /// Same as above, but for a function pointer type with the given signature. VtableFnPointer(VtableFnPointerData<'tcx, N>), /// Vtable automatically generated for a generator. diff --git a/src/librustc/traits/object_safety.rs b/src/librustc/traits/object_safety.rs index 75eaa67e767..b31aa5998f3 100644 --- a/src/librustc/traits/object_safety.rs +++ b/src/librustc/traits/object_safety.rs @@ -6,7 +6,7 @@ //! - have a suitable receiver from which we can extract a vtable and coerce to a "thin" version //! that doesn't contain the vtable; //! - not reference the erased type `Self` except for in this receiver; -//! - not have generic type parameters +//! - not have generic type parameters. use super::elaborate_predicates; @@ -22,17 +22,17 @@ use syntax_pos::Span; #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] pub enum ObjectSafetyViolation { - /// Self : Sized declared on the trait + /// `Self: Sized` declared on the trait. SizedSelf, /// Supertrait reference references `Self` an in illegal location - /// (e.g., `trait Foo : Bar<Self>`) + /// (e.g., `trait Foo : Bar<Self>`). SupertraitSelf, - /// Method has something illegal + /// Method has something illegal. Method(ast::Name, MethodViolationCode), - /// Associated const + /// Associated const. AssociatedConst(ast::Name), } @@ -84,7 +84,7 @@ pub enum MethodViolationCode { impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { /// Returns the object safety violations that affect - /// astconv - currently, Self in supertraits. This is needed + /// astconv -- currently, `Self` in supertraits. This is needed /// because `object_safety_violations` can't be used during /// type collection. pub fn astconv_object_safety_violations(self, trait_def_id: DefId) @@ -399,8 +399,8 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { None } - /// performs a type substitution to produce the version of receiver_ty when `Self = self_ty` - /// e.g., for receiver_ty = `Rc<Self>` and self_ty = `Foo`, returns `Rc<Foo>` + /// Performs a type substitution to produce the version of receiver_ty when `Self = self_ty` + /// e.g., for receiver_ty = `Rc<Self>` and self_ty = `Foo`, returns `Rc<Foo>`. fn receiver_for_self_ty( self, receiver_ty: Ty<'tcx>, self_ty: Ty<'tcx>, method_def_id: DefId ) -> Ty<'tcx> { @@ -419,9 +419,9 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { result } - /// creates the object type for the current trait. For example, + /// Creates the object type for the current trait. For example, /// if the current trait is `Deref`, then this will be - /// `dyn Deref<Target=Self::Target> + 'static` + /// `dyn Deref<Target = Self::Target> + 'static`. fn object_ty_for_trait(self, trait_def_id: DefId, lifetime: ty::Region<'tcx>) -> Ty<'tcx> { debug!("object_ty_for_trait: trait_def_id={:?}", trait_def_id); @@ -470,25 +470,27 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { object_ty } - /// checks the method's receiver (the `self` argument) can be dispatched on when `Self` is a + /// Checks the method's receiver (the `self` argument) can be dispatched on when `Self` is a /// trait object. We require that `DispatchableFromDyn` be implemented for the receiver type /// in the following way: - /// - let `Receiver` be the type of the `self` argument, i.e `Self`, `&Self`, `Rc<Self>` + /// - let `Receiver` be the type of the `self` argument, i.e `Self`, `&Self`, `Rc<Self>`, /// - require the following bound: /// - /// Receiver[Self => T]: DispatchFromDyn<Receiver[Self => dyn Trait]> + /// ``` + /// Receiver[Self => T]: DispatchFromDyn<Receiver[Self => dyn Trait]> + /// ``` /// - /// where `Foo[X => Y]` means "the same type as `Foo`, but with `X` replaced with `Y`" + /// where `Foo[X => Y]` means "the same type as `Foo`, but with `X` replaced with `Y`" /// (substitution notation). /// - /// some examples of receiver types and their required obligation - /// - `&'a mut self` requires `&'a mut Self: DispatchFromDyn<&'a mut dyn Trait>` - /// - `self: Rc<Self>` requires `Rc<Self>: DispatchFromDyn<Rc<dyn Trait>>` - /// - `self: Pin<Box<Self>>` requires `Pin<Box<Self>>: DispatchFromDyn<Pin<Box<dyn Trait>>>` + /// Some examples of receiver types and their required obligation: + /// - `&'a mut self` requires `&'a mut Self: DispatchFromDyn<&'a mut dyn Trait>`, + /// - `self: Rc<Self>` requires `Rc<Self>: DispatchFromDyn<Rc<dyn Trait>>`, + /// - `self: Pin<Box<Self>>` requires `Pin<Box<Self>>: DispatchFromDyn<Pin<Box<dyn Trait>>>`. /// /// The only case where the receiver is not dispatchable, but is still a valid receiver /// type (just not object-safe), is when there is more than one level of pointer indirection. - /// e.g., `self: &&Self`, `self: &Rc<Self>`, `self: Box<Box<Self>>`. In these cases, there + /// E.g., `self: &&Self`, `self: &Rc<Self>`, `self: Box<Box<Self>>`. In these cases, there /// is no way, or at least no inexpensive way, to coerce the receiver from the version where /// `Self = dyn Trait` to the version where `Self = T`, where `T` is the unknown erased type /// contained by the trait object, because the object that needs to be coerced is behind diff --git a/src/librustc/traits/project.rs b/src/librustc/traits/project.rs index 562a29f53f8..5a44d886e3c 100644 --- a/src/librustc/traits/project.rs +++ b/src/librustc/traits/project.rs @@ -55,7 +55,7 @@ pub enum Reveal { /// Also, `impl Trait` is normalized to the concrete type, /// which has to be already collected by type-checking. /// - /// NOTE: As `impl Trait`'s concrete type should *never* + /// NOTE: as `impl Trait`'s concrete type should *never* /// be observable directly by the user, `Reveal::All` /// should not be used by checks which may expose /// type equality or type contents to the user. @@ -751,9 +751,9 @@ fn prune_cache_value_obligations<'a, 'gcx, 'tcx>(infcx: &'a InferCtxt<'a, 'gcx, /// /// Concern #2. Even within the snapshot, if those original /// obligations are not yet proven, then we are able to do projections -/// that may yet turn out to be wrong. This *may* lead to some sort +/// that may yet turn out to be wrong. This *may* lead to some sort /// of trouble, though we don't have a concrete example of how that -/// can occur yet. But it seems risky at best. +/// can occur yet. But it seems risky at best. fn get_paranoid_cache_value_obligation<'a, 'gcx, 'tcx>( infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, param_env: ty::ParamEnv<'tcx>, @@ -786,7 +786,7 @@ fn get_paranoid_cache_value_obligation<'a, 'gcx, 'tcx>( /// cycles to arise, where you basically had a setup like `<MyType<$0> /// as Trait>::Foo == $0`. Here, normalizing `<MyType<$0> as /// Trait>::Foo> to `[type error]` would lead to an obligation of -/// `<MyType<[type error]> as Trait>::Foo`. We are supposed to report +/// `<MyType<[type error]> as Trait>::Foo`. We are supposed to report /// an error for this obligation, but we legitimately should not, /// because it contains `[type error]`. Yuck! (See issue #29857 for /// one case where this arose.) @@ -844,7 +844,7 @@ impl<'tcx> Progress<'tcx> { } } -/// Compute the result of a projection type (if we can). +/// Computes the result of a projection type (if we can). /// /// IMPORTANT: /// - `obligation` must be fully normalized @@ -1553,7 +1553,7 @@ fn assoc_ty_def<'cx, 'gcx, 'tcx>( // # Cache /// The projection cache. Unlike the standard caches, this can include -/// infcx-dependent type variables - therefore, we have to roll the +/// infcx-dependent type variables, therefore we have to roll the /// cache back each time we roll a snapshot back, to avoid assumptions /// on yet-unresolved inference variables. Types with placeholder /// regions also have to be removed when the respective snapshot ends. @@ -1564,9 +1564,9 @@ fn assoc_ty_def<'cx, 'gcx, 'tcx>( /// (for the lifetime of the infcx). /// /// Entries in the projection cache might contain inference variables -/// that will be resolved by obligations on the projection cache entry - e.g. +/// that will be resolved by obligations on the projection cache entry (e.g., /// when a type parameter in the associated type is constrained through -/// an "RFC 447" projection on the impl. +/// an "RFC 447" projection on the impl). /// /// When working with a fulfillment context, the derived obligations of each /// projection cache entry will be registered on the fulfillcx, so any users @@ -1578,10 +1578,9 @@ fn assoc_ty_def<'cx, 'gcx, 'tcx>( /// If that is done, after evaluation the obligations, it is a good idea to /// call `ProjectionCache::complete` to make sure the obligations won't be /// re-evaluated and avoid an exponential worst-case. -/// -/// FIXME: we probably also want some sort of cross-infcx cache here to -/// reduce the amount of duplication. Let's see what we get with the Chalk -/// reforms. +// +// FIXME: we probably also want some sort of cross-infcx cache here to +// reduce the amount of duplication. Let's see what we get with the Chalk reforms. #[derive(Default)] pub struct ProjectionCache<'tcx> { map: SnapshotMap<ProjectionCacheKey<'tcx>, ProjectionCacheEntry<'tcx>>, diff --git a/src/librustc/traits/query/dropck_outlives.rs b/src/librustc/traits/query/dropck_outlives.rs index 47ca416e6b5..e6f9c7ebe6f 100644 --- a/src/librustc/traits/query/dropck_outlives.rs +++ b/src/librustc/traits/query/dropck_outlives.rs @@ -184,7 +184,7 @@ impl_stable_hash_for!(struct DtorckConstraint<'tcx> { /// outlive. This is similar but not *quite* the same as the /// `needs_drop` test in the compiler already -- that is, for every /// type T for which this function return true, needs-drop would -/// return false. But the reverse does not hold: in particular, +/// return `false`. But the reverse does not hold: in particular, /// `needs_drop` returns false for `PhantomData`, but it is not /// trivial for dropck-outlives. /// diff --git a/src/librustc/traits/query/normalize.rs b/src/librustc/traits/query/normalize.rs index bcd11194b57..224076ce17e 100644 --- a/src/librustc/traits/query/normalize.rs +++ b/src/librustc/traits/query/normalize.rs @@ -24,7 +24,7 @@ impl<'cx, 'gcx, 'tcx> At<'cx, 'gcx, 'tcx> { /// the normalized value along with various outlives relations (in /// the form of obligations that must be discharged). /// - /// NB. This will *eventually* be the main means of + /// N.B., this will *eventually* be the main means of /// normalizing, but for now should be used only when we actually /// know that normalization will succeed, since error reporting /// and other details are still "under development". diff --git a/src/librustc/traits/query/normalize_erasing_regions.rs b/src/librustc/traits/query/normalize_erasing_regions.rs index 4fc61077e26..0c1252680c1 100644 --- a/src/librustc/traits/query/normalize_erasing_regions.rs +++ b/src/librustc/traits/query/normalize_erasing_regions.rs @@ -45,7 +45,7 @@ impl<'cx, 'tcx> TyCtxt<'cx, 'tcx, 'tcx> { /// a `T` (with regions erased). This is appropriate when the /// binder is being instantiated at the call site. /// - /// NB. Currently, higher-ranked type bounds inhibit + /// N.B., currently, higher-ranked type bounds inhibit /// normalization. Therefore, each time we erase them in /// codegen, we need to normalize the contents. pub fn normalize_erasing_late_bound_regions<T>( diff --git a/src/librustc/traits/query/outlives_bounds.rs b/src/librustc/traits/query/outlives_bounds.rs index e57236b999b..6fe361d5adf 100644 --- a/src/librustc/traits/query/outlives_bounds.rs +++ b/src/librustc/traits/query/outlives_bounds.rs @@ -13,7 +13,7 @@ use std::mem; /// Outlives bounds are relationships between generic parameters, /// whether they both be regions (`'a: 'b`) or whether types are -/// involved (`T: 'a`). These relationships can be extracted from the +/// involved (`T: 'a`). These relationships can be extracted from the /// full set of predicates we understand or also from types (in which /// case they are called implied bounds). They are fed to the /// `OutlivesEnv` which in turn is supplied to the region checker and @@ -66,7 +66,7 @@ impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for OutlivesBound<'tcx> { impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> { /// Implied bounds are region relationships that we deduce - /// automatically. The idea is that (e.g.) a caller must check that a + /// automatically. The idea is that (e.g.) a caller must check that a /// function's argument types are well-formed immediately before /// calling that fn, and hence the *callee* can assume that its /// argument types are well-formed. This may imply certain relationships diff --git a/src/librustc/traits/query/type_op/normalize.rs b/src/librustc/traits/query/type_op/normalize.rs index 346c1851623..e3d7a4d57a5 100644 --- a/src/librustc/traits/query/type_op/normalize.rs +++ b/src/librustc/traits/query/type_op/normalize.rs @@ -52,7 +52,7 @@ pub trait Normalizable<'gcx, 'tcx>: fmt::Debug + TypeFoldable<'tcx> + Lift<'gcx> canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>, ) -> Fallible<CanonicalizedQueryResponse<'gcx, Self>>; - /// Convert from the `'gcx` (lifted) form of `Self` into the `tcx` + /// Converts from the `'gcx` (lifted) form of `Self` into the `tcx` /// form of `Self`. fn shrink_to_tcx_lifetime( v: &'a CanonicalizedQueryResponse<'gcx, Self>, diff --git a/src/librustc/traits/select.rs b/src/librustc/traits/select.rs index 1e4cd145e17..557784b3e3c 100644 --- a/src/librustc/traits/select.rs +++ b/src/librustc/traits/select.rs @@ -162,11 +162,11 @@ pub struct SelectionCache<'tcx> { } /// The selection process begins by considering all impls, where -/// clauses, and so forth that might resolve an obligation. Sometimes +/// clauses, and so forth that might resolve an obligation. Sometimes /// we'll be able to say definitively that (e.g.) an impl does not /// apply to the obligation: perhaps it is defined for `usize` but the /// obligation is for `int`. In that case, we drop the impl out of the -/// list. But the other cases are considered *candidates*. +/// list. But the other cases are considered *candidates*. /// /// For selection to succeed, there must be exactly one matching /// candidate. If the obligation is fully known, this is guaranteed @@ -331,7 +331,7 @@ enum BuiltinImplConditions<'tcx> { /// - `EvaluatedToErr` implies `EvaluatedToRecur` /// - the "union" of evaluation results is equal to their maximum - /// all the "potential success" candidates can potentially succeed, -/// so they are no-ops when unioned with a definite error, and within +/// so they are noops when unioned with a definite error, and within /// the categories it's easy to see that the unions are correct. pub enum EvaluationResult { /// Evaluation successful @@ -383,31 +383,30 @@ pub enum EvaluationResult { /// ``` /// /// When we try to prove it, we first go the first option, which - /// recurses. This shows us that the impl is "useless" - it won't + /// recurses. This shows us that the impl is "useless" -- it won't /// tell us that `T: Trait` unless it already implemented `Trait` /// by some other means. However, that does not prevent `T: Trait` /// does not hold, because of the bound (which can indeed be satisfied /// by `SomeUnsizedType` from another crate). - /// - /// FIXME: when an `EvaluatedToRecur` goes past its parent root, we - /// ought to convert it to an `EvaluatedToErr`, because we know - /// there definitely isn't a proof tree for that obligation. Not - /// doing so is still sound - there isn't any proof tree, so the - /// branch still can't be a part of a minimal one - but does not - /// re-enable caching. + // + // FIXME: when an `EvaluatedToRecur` goes past its parent root, we + // ought to convert it to an `EvaluatedToErr`, because we know + // there definitely isn't a proof tree for that obligation. Not + // doing so is still sound -- there isn't any proof tree, so the + // branch still can't be a part of a minimal one -- but does not re-enable caching. EvaluatedToRecur, - /// Evaluation failed + /// Evaluation failed. EvaluatedToErr, } impl EvaluationResult { - /// True if this evaluation result is known to apply, even + /// Returns `true` if this evaluation result is known to apply, even /// considering outlives constraints. pub fn must_apply_considering_regions(self) -> bool { self == EvaluatedToOk } - /// True if this evaluation result is known to apply, ignoring + /// Returns `true` if this evaluation result is known to apply, ignoring /// outlives constraints. pub fn must_apply_modulo_regions(self) -> bool { self <= EvaluatedToOkModuloRegions @@ -981,8 +980,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { /// that recursion is ok. This routine returns true if the top of the /// stack (`cycle[0]`): /// - /// - is a defaulted trait, and - /// - it also appears in the backtrace at some position `X`; and, + /// - is a defaulted trait, + /// - it also appears in the backtrace at some position `X`, /// - all the predicates at positions `X..` between `X` an the top are /// also defaulted traits. pub fn coinductive_match<I>(&mut self, cycle: I) -> bool @@ -1003,7 +1002,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { } /// Further evaluate `candidate` to decide whether all type parameters match and whether nested - /// obligations are met. Returns true if `candidate` remains viable after this further + /// obligations are met. Returns whether `candidate` remains viable after this further /// scrutiny. fn evaluate_candidate<'o>( &mut self, @@ -1434,7 +1433,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { } } - /// Returns true if the global caches can be used. + /// Returns `true` if the global caches can be used. /// Do note that if the type itself is not in the /// global tcx, the local caches will be used. fn can_use_global_caches(&self, param_env: ty::ParamEnv<'tcx>) -> bool { @@ -1850,7 +1849,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { Ok(()) } - /// Check for the artificial impl that the compiler will create for an obligation like `X : + /// Checks for the artificial impl that the compiler will create for an obligation like `X : /// FnMut<..>` where `X` is a closure type. /// /// Note: the type parameters on a closure candidate are modeled as *output* type @@ -2231,8 +2230,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { // type variables and then we also attempt to evaluate recursive // bounds to see if they are satisfied. - /// Returns true if `victim` should be dropped in favor of - /// `other`. Generally speaking we will drop duplicate + /// Returns `true` if `victim` should be dropped in favor of + /// `other`. Generally speaking we will drop duplicate /// candidates and prefer where-clause candidates. /// /// See the comment for "SelectionCandidate" for more details. @@ -3221,7 +3220,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { /// we currently treat the input type parameters on the trait as /// outputs. This means that when we have a match we have only /// considered the self type, so we have to go back and make sure - /// to relate the argument types too. This is kind of wrong, but + /// to relate the argument types too. This is kind of wrong, but /// since we control the full set of impls, also not that wrong, /// and it DOES yield better error messages (since we don't report /// errors as if there is no applicable impl, but rather report @@ -3235,7 +3234,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { /// impl Fn(int) for Closure { ... } /// /// Now imagine our obligation is `Fn(usize) for Closure`. So far - /// we have matched the self-type `Closure`. At this point we'll + /// we have matched the self type `Closure`. At this point we'll /// compare the `int` to `usize` and generate an error. /// /// Note that this checking occurs *after* the impl has selected, @@ -3597,7 +3596,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { } /// Normalize `where_clause_trait_ref` and try to match it against - /// `obligation`. If successful, return any predicates that + /// `obligation`. If successful, return any predicates that /// result from the normalization. Normalization is necessary /// because where-clauses are stored in the parameter environment /// unnormalized. diff --git a/src/librustc/traits/specialize/mod.rs b/src/librustc/traits/specialize/mod.rs index e7187005c13..804f1b9d820 100644 --- a/src/librustc/traits/specialize/mod.rs +++ b/src/librustc/traits/specialize/mod.rs @@ -58,12 +58,12 @@ pub struct OverlapError { /// Suppose we have selected "source impl" with `V` instantiated with `u32`. /// This function will produce a substitution with `T` and `U` both mapping to `u32`. /// -/// Where clauses add some trickiness here, because they can be used to "define" +/// where-clauses add some trickiness here, because they can be used to "define" /// an argument indirectly: /// /// ```rust /// impl<'a, I, T: 'a> Iterator for Cloned<I> -/// where I: Iterator<Item=&'a T>, T: Clone +/// where I: Iterator<Item = &'a T>, T: Clone /// ``` /// /// In a case like this, the substitution for `T` is determined indirectly, @@ -145,10 +145,10 @@ pub fn find_associated_item<'a, 'tcx>( } } -/// Is impl1 a specialization of impl2? +/// Is `impl1` a specialization of `impl2`? /// /// Specialization is determined by the sets of types to which the impls apply; -/// impl1 specializes impl2 if it applies to a subset of the types impl2 applies +/// `impl1` specializes `impl2` if it applies to a subset of the types `impl2` applies /// to. pub(super) fn specializes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, (impl1_def_id, impl2_def_id): (DefId, DefId)) diff --git a/src/librustc/traits/specialize/specialization_graph.rs b/src/librustc/traits/specialize/specialization_graph.rs index 010555744b6..561859c7c31 100644 --- a/src/librustc/traits/specialize/specialization_graph.rs +++ b/src/librustc/traits/specialize/specialization_graph.rs @@ -97,7 +97,7 @@ impl<'a, 'gcx, 'tcx> Children { } } - /// Remove an impl from this set of children. Used when replacing + /// Removes an impl from this set of children. Used when replacing /// an impl with a parent. The impl must be present in the list of /// children already. fn remove_existing(&mut self, @@ -399,7 +399,7 @@ impl<'a, 'gcx, 'tcx> Graph { self.children.entry(parent).or_default().insert_blindly(tcx, child); } - /// The parent of a given impl, which is the def id of the trait when the + /// The parent of a given impl, which is the `DefId` of the trait when the /// impl is a "specialization root". pub fn parent(&self, child: DefId) -> DefId { *self.parent.get(&child).unwrap() diff --git a/src/librustc/ty/adjustment.rs b/src/librustc/ty/adjustment.rs index 68e7bd6e16a..ff4fc87542d 100644 --- a/src/librustc/ty/adjustment.rs +++ b/src/librustc/ty/adjustment.rs @@ -15,7 +15,7 @@ use crate::ty::subst::Substs; /// Here the pointer will be dereferenced N times (where a dereference can /// happen to raw or borrowed pointers or any smart pointer which implements /// Deref, including Box<_>). The types of dereferences is given by -/// `autoderefs`. It can then be auto-referenced zero or one times, indicated +/// `autoderefs`. It can then be auto-referenced zero or one times, indicated /// by `autoref`, to either a raw or borrowed pointer. In these cases unsize is /// `false`. /// @@ -38,7 +38,7 @@ use crate::ty::subst::Substs; /// stored in `unsize` is `Foo<[i32]>`, we don't store any further detail about /// the underlying conversions from `[i32; 4]` to `[i32]`. /// -/// 3. Coercing a `Box<T>` to `Box<dyn Trait>` is an interesting special case. In +/// 3. Coercing a `Box<T>` to `Box<dyn Trait>` is an interesting special case. In /// that case, we have the pointer we need coming in, so there are no /// autoderefs, and no autoref. Instead we just do the `Unsize` transformation. /// At some point, of course, `Box` should move out of the compiler, in which @@ -78,7 +78,7 @@ pub enum Adjust<'tcx> { /// This will do things like convert thin pointers to fat /// pointers, or convert structs containing thin pointers to /// structs containing fat pointers, or convert between fat - /// pointers. We don't store the details of how the transform is + /// pointers. We don't store the details of how the transform is /// done (in fact, we don't know that, because it might depend on /// the precise type parameters). We just store the target /// type. Codegen backends and miri figure out what has to be done @@ -110,12 +110,12 @@ impl<'a, 'gcx, 'tcx> OverloadedDeref<'tcx> { } /// At least for initial deployment, we want to limit two-phase borrows to -/// only a few specific cases. Right now, those mostly "things that desugar" -/// into method calls -/// - using x.some_method() syntax, where some_method takes &mut self -/// - using Foo::some_method(&mut x, ...) syntax -/// - binary assignment operators (+=, -=, *=, etc.) -/// Anything else should be rejected until generalized two phase borrow support +/// only a few specific cases. Right now, those are mostly "things that desugar" +/// into method calls: +/// - using `x.some_method()` syntax, where some_method takes `&mut self`, +/// - using `Foo::some_method(&mut x, ...)` syntax, +/// - binary assignment operators (`+=`, `-=`, `*=`, etc.). +/// Anything else should be rejected until generalized two-phase borrow support /// is implemented. Right now, dataflow can't handle the general case where there /// is more than one use of a mutable borrow, and we don't want to accept too much /// new code via two-phase borrows, so we try to limit where we create two-phase @@ -144,10 +144,10 @@ impl From<AutoBorrowMutability> for hir::Mutability { #[derive(Copy, Clone, PartialEq, Debug, RustcEncodable, RustcDecodable)] pub enum AutoBorrow<'tcx> { - /// Convert from T to &T. + /// Converts from T to &T. Ref(ty::Region<'tcx>, AutoBorrowMutability), - /// Convert from T to *T. + /// Converts from T to *T. RawPtr(hir::Mutability), } diff --git a/src/librustc/ty/constness.rs b/src/librustc/ty/constness.rs index 1bb63867289..fff5dcf433e 100644 --- a/src/librustc/ty/constness.rs +++ b/src/librustc/ty/constness.rs @@ -37,7 +37,7 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { } } - /// Returns true if this function must conform to `min_const_fn` + /// Returns `true` if this function must conform to `min_const_fn` pub fn is_min_const_fn(self, def_id: DefId) -> bool { // Bail out if the signature doesn't contain `const` if !self.is_const_fn_raw(def_id) { diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index 11211c91c54..95287efd0ac 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -1,4 +1,4 @@ -//! type context book-keeping +//! Type context book-keeping. use crate::dep_graph::DepGraph; use crate::dep_graph::{self, DepNode, DepConstructor}; @@ -332,13 +332,13 @@ pub struct TypeckTables<'tcx> { /// belongs, but it may not exist if it's a tuple field (`tuple.0`). field_indices: ItemLocalMap<usize>, - /// Stores the types for various nodes in the AST. Note that this table - /// is not guaranteed to be populated until after typeck. See + /// Stores the types for various nodes in the AST. Note that this table + /// is not guaranteed to be populated until after typeck. See /// typeck::check::fn_ctxt for details. node_types: ItemLocalMap<Ty<'tcx>>, /// Stores the type parameters which were substituted to obtain the type - /// of this node. This only applies to nodes that refer to entities + /// of this node. This only applies to nodes that refer to entities /// parameterized by type parameters, such as generic fns, types, or /// other items. node_substs: ItemLocalMap<&'tcx Substs<'tcx>>, @@ -413,7 +413,7 @@ pub struct TypeckTables<'tcx> { pub tainted_by_errors: bool, /// Stores the free-region relationships that were deduced from - /// its where clauses and parameter types. These are then + /// its where-clauses and parameter types. These are then /// read-again by borrowck. pub free_region_map: FreeRegionMap<'tcx>, @@ -837,7 +837,7 @@ pub type CanonicalUserType<'gcx> = Canonical<'gcx, UserType<'gcx>>; impl CanonicalUserType<'gcx> { /// Returns `true` if this represents a substitution of the form `[?0, ?1, ?2]`, - /// i.e. each thing is mapped to a canonical variable with the same index. + /// i.e., each thing is mapped to a canonical variable with the same index. pub fn is_identity(&self) -> bool { match self.value { UserType::Ty(_) => false, @@ -872,7 +872,7 @@ impl CanonicalUserType<'gcx> { } } -/// A user-given type annotation attached to a constant. These arise +/// A user-given type annotation attached to a constant. These arise /// from constants that are named via paths, like `Foo::<A>::new` and /// so forth. #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] @@ -1053,7 +1053,7 @@ pub struct GlobalCtxt<'tcx> { } impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { - /// Get the global TyCtxt. + /// Gets the global `TyCtxt`. #[inline] pub fn global_tcx(self) -> TyCtxt<'gcx, 'gcx, 'gcx> { TyCtxt { @@ -1153,12 +1153,12 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { value.lift_to_tcx(self.global_tcx()) } - /// Returns true if self is the same as self.global_tcx(). + /// Returns `true` if self is the same as self.global_tcx(). fn is_global(self) -> bool { ptr::eq(self.interners, &self.global_interners) } - /// Create a type context and call the closure with a `TyCtxt` reference + /// Creates a type context and call the closure with a `TyCtxt` reference /// to the context. The closure enforces that the type context and any interned /// value (types, substs, etc.) can only be used while `ty::tls` has a valid /// reference to the context, to allow formatting values that need it. @@ -1353,7 +1353,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - /// Convert a `DefId` into its fully expanded `DefPath` (every + /// Converts a `DefId` into its fully expanded `DefPath` (every /// `DefId` is really just an interned def-path). /// /// Note that if `id` is not local to this crate, the result will diff --git a/src/librustc/ty/fold.rs b/src/librustc/ty/fold.rs index 306c69666e5..7aa1694db80 100644 --- a/src/librustc/ty/fold.rs +++ b/src/librustc/ty/fold.rs @@ -4,7 +4,7 @@ //! instance of a "folder" (a type which implements `TypeFolder`). Then //! the setup is intended to be: //! -//! T.fold_with(F) --calls--> F.fold_T(T) --calls--> T.super_fold_with(F) +//! T.fold_with(F) --calls--> F.fold_T(T) --calls--> T.super_fold_with(F) //! //! This way, when you define a new folder F, you can override //! `fold_T()` to customize the behavior, and invoke `T.super_fold_with()` @@ -25,9 +25,11 @@ //! proper thing. //! //! A `TypeFoldable` T can also be visited by a `TypeVisitor` V using similar setup: -//! T.visit_with(V) --calls--> V.visit_T(T) --calls--> T.super_visit_with(V). -//! These methods return true to indicate that the visitor has found what it is looking for -//! and does not need to visit anything else. +//! +//! T.visit_with(V) --calls--> V.visit_T(T) --calls--> T.super_visit_with(V). +//! +//! These methods return true to indicate that the visitor has found what it is +//! looking for, and does not need to visit anything else. use crate::hir::def_id::DefId; use crate::ty::{self, Binder, Ty, TyCtxt, TypeFlags}; @@ -52,7 +54,7 @@ pub trait TypeFoldable<'tcx>: fmt::Debug + Clone { self.super_visit_with(visitor) } - /// True if `self` has any late-bound regions that are either + /// Returns `true` if `self` has any late-bound regions that are either /// bound by `binder` or bound by some binder outside of `binder`. /// If `binder` is `ty::INNERMOST`, this indicates whether /// there are any late-bound regions that appear free. @@ -60,7 +62,7 @@ pub trait TypeFoldable<'tcx>: fmt::Debug + Clone { self.visit_with(&mut HasEscapingVarsVisitor { outer_index: binder }) } - /// True if this `self` has any regions that escape `binder` (and + /// Returns `true` if this `self` has any regions that escape `binder` (and /// hence are not bound by it). fn has_vars_bound_above(&self, binder: ty::DebruijnIndex) -> bool { self.has_vars_bound_at_or_above(binder.shifted_in(1)) @@ -141,7 +143,7 @@ pub trait TypeFoldable<'tcx>: fmt::Debug + Clone { } } -/// The TypeFolder trait defines the actual *folding*. There is a +/// The `TypeFolder` trait defines the actual *folding*. There is a /// method defined for every foldable type. Each of these has a /// default implementation that does an "identity" fold. Within each /// identity fold, it should invoke `foo.fold_with(self)` to fold each @@ -262,7 +264,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { }); } - /// True if `callback` returns true for every region appearing free in `value`. + /// Returns `true` if `callback` returns true for every region appearing free in `value`. pub fn all_free_regions_meet( self, value: &impl TypeFoldable<'tcx>, @@ -271,7 +273,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { !self.any_free_region_meets(value, |r| !callback(r)) } - /// True if `callback` returns true for some region appearing free in `value`. + /// Returns `true` if `callback` returns true for some region appearing free in `value`. pub fn any_free_region_meets( self, value: &impl TypeFoldable<'tcx>, @@ -292,8 +294,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// ^ ^ ^ ^ /// | | | | here, would be shifted in 1 /// | | | here, would be shifted in 2 - /// | | here, would be INNERMOST shifted in by 1 - /// | here, initially, binder would be INNERMOST + /// | | here, would be `INNERMOST` shifted in by 1 + /// | here, initially, binder would be `INNERMOST` /// ``` /// /// You see that, initially, *any* bound value is free, @@ -496,12 +498,12 @@ impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for BoundVarReplacer<'a, 'gcx, 'tcx> } impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { - /// Replace all regions bound by the given `Binder` with the + /// Replaces all regions bound by the given `Binder` with the /// results returned by the closure; the closure is expected to /// return a free region (relative to this binder), and hence the /// binder is removed in the return type. The closure is invoked /// once for each unique `BoundRegion`; multiple references to the - /// same `BoundRegion` will reuse the previous result. A map is + /// same `BoundRegion` will reuse the previous result. A map is /// returned at the end with each bound region and the free region /// that replaced it. /// @@ -520,7 +522,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self.replace_escaping_bound_vars(value.skip_binder(), fld_r, fld_t) } - /// Replace all escaping bound vars. The `fld_r` closure replaces escaping + /// Replaces all escaping bound vars. The `fld_r` closure replaces escaping /// bound regions while the `fld_t` closure replaces escaping bound types. pub fn replace_escaping_bound_vars<T, F, G>( self, @@ -554,7 +556,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - /// Replace all types or regions bound by the given `Binder`. The `fld_r` + /// Replaces all types or regions bound by the given `Binder`. The `fld_r` /// closure replaces bound regions while the `fld_t` closure replaces bound /// types. pub fn replace_bound_vars<T, F, G>( @@ -570,7 +572,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self.replace_escaping_bound_vars(value.skip_binder(), fld_r, fld_t) } - /// Replace any late-bound regions bound in `value` with + /// Replaces any late-bound regions bound in `value` with /// free variants attached to `all_outlive_scope`. pub fn liberate_late_bound_regions<T>( &self, @@ -640,7 +642,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { collector.regions } - /// Replace any late-bound regions bound in `value` with `'erased`. Useful in codegen but also + /// Replaces any late-bound regions bound in `value` with `'erased`. Useful in codegen but also /// method lookup and a few other places where precise region relationships are not required. pub fn erase_late_bound_regions<T>(self, value: &Binder<T>) -> T where T : TypeFoldable<'tcx> @@ -648,13 +650,13 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self.replace_late_bound_regions(value, |_| self.types.re_erased).0 } - /// Rewrite any late-bound regions so that they are anonymous. Region numbers are + /// Rewrite any late-bound regions so that they are anonymous. Region numbers are /// assigned starting at 1 and increasing monotonically in the order traversed /// by the fold operation. /// /// The chief purpose of this function is to canonicalize regions so that two /// `FnSig`s or `TraitRef`s which are equivalent up to region naming will become - /// structurally identical. For example, `for<'a, 'b> fn(&'a isize, &'b isize)` and + /// structurally identical. For example, `for<'a, 'b> fn(&'a isize, &'b isize)` and /// `for<'a, 'b> fn(&'b isize, &'a isize)` will become identical after anonymization. pub fn anonymize_late_bound_regions<T>(self, sig: &Binder<T>) -> Binder<T> where T : TypeFoldable<'tcx>, @@ -818,7 +820,7 @@ pub fn shift_out_vars<'a, 'gcx, 'tcx, T>( /// scope to which it is attached, etc. An escaping var represents /// a bound var for which this processing has not yet been done. struct HasEscapingVarsVisitor { - /// Anything bound by `outer_index` or "above" is escaping + /// Anything bound by `outer_index` or "above" is escaping. outer_index: ty::DebruijnIndex, } @@ -881,10 +883,10 @@ struct LateBoundRegionsCollector { current_index: ty::DebruijnIndex, regions: FxHashSet<ty::BoundRegion>, - /// If true, we only want regions that are known to be + /// `true` if we only want regions that are known to be /// "constrained" when you equate this type with another type. In /// particular, if you have e.g., `&'a u32` and `&'b u32`, equating - /// them constraints `'a == 'b`. But if you have `<&'a u32 as + /// them constraints `'a == 'b`. But if you have `<&'a u32 as /// Trait>::Foo` and `<&'b u32 as Trait>::Foo`, normalizing those /// types may mean that `'a` and `'b` don't appear in the results, /// so they are not considered *constrained*. diff --git a/src/librustc/ty/inhabitedness/def_id_forest.rs b/src/librustc/ty/inhabitedness/def_id_forest.rs index 73b7d74d9da..3b393c3ca15 100644 --- a/src/librustc/ty/inhabitedness/def_id_forest.rs +++ b/src/librustc/ty/inhabitedness/def_id_forest.rs @@ -22,14 +22,14 @@ pub struct DefIdForest { } impl<'a, 'gcx, 'tcx> DefIdForest { - /// Create an empty forest. + /// Creates an empty forest. pub fn empty() -> DefIdForest { DefIdForest { root_ids: SmallVec::new(), } } - /// Create a forest consisting of a single tree representing the entire + /// Creates a forest consisting of a single tree representing the entire /// crate. #[inline] pub fn full(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> DefIdForest { @@ -37,7 +37,7 @@ impl<'a, 'gcx, 'tcx> DefIdForest { DefIdForest::from_id(crate_id) } - /// Create a forest containing a DefId and all its descendants. + /// Creates a forest containing a DefId and all its descendants. pub fn from_id(id: DefId) -> DefIdForest { let mut root_ids = SmallVec::new(); root_ids.push(id); @@ -46,12 +46,12 @@ impl<'a, 'gcx, 'tcx> DefIdForest { } } - /// Test whether the forest is empty. + /// Tests whether the forest is empty. pub fn is_empty(&self) -> bool { self.root_ids.is_empty() } - /// Test whether the forest contains a given DefId. + /// Tests whether the forest contains a given DefId. pub fn contains(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, id: DefId) -> bool diff --git a/src/librustc/ty/instance.rs b/src/librustc/ty/instance.rs index e4fe93d5dea..5fc22e3c02b 100644 --- a/src/librustc/ty/instance.rs +++ b/src/librustc/ty/instance.rs @@ -22,17 +22,17 @@ pub enum InstanceDef<'tcx> { /// `<T as Trait>::method` where `method` receives unsizeable `self: Self`. VtableShim(DefId), - /// \<fn() as FnTrait>::call_* - /// def-id is FnTrait::call_* + /// `<fn() as FnTrait>::call_*` + /// `DefId` is `FnTrait::call_*` FnPtrShim(DefId, Ty<'tcx>), - /// <Trait as Trait>::fn + /// `<Trait as Trait>::fn` Virtual(DefId, usize), - /// <[mut closure] as FnOnce>::call_once + /// `<[mut closure] as FnOnce>::call_once` ClosureOnceShim { call_once: DefId }, - /// drop_in_place::<T>; None for empty drop glue. + /// `drop_in_place::<T>; None` for empty drop glue. DropGlue(DefId, Option<Ty<'tcx>>), ///`<T as Clone>::clone` shim. @@ -220,7 +220,7 @@ impl<'a, 'b, 'tcx> Instance<'tcx> { self.def.def_id() } - /// Resolve a (def_id, substs) pair to an (optional) instance -- most commonly, + /// Resolves a `(def_id, substs)` pair to an (optional) instance -- most commonly, /// this is used to find the precise code that will run for a trait method invocation, /// if known. /// diff --git a/src/librustc/ty/item_path.rs b/src/librustc/ty/item_path.rs index 3f49c1b27ce..5dc31caf295 100644 --- a/src/librustc/ty/item_path.rs +++ b/src/librustc/ty/item_path.rs @@ -43,7 +43,7 @@ pub fn with_forced_impl_filename_line<F: FnOnce() -> R, R>(f: F) -> R { }) } -/// Add the `crate::` prefix to paths where appropriate. +/// Adds the `crate::` prefix to paths where appropriate. pub fn with_crate_prefix<F: FnOnce() -> R, R>(f: F) -> R { SHOULD_PREFIX_WITH_CRATE.with(|flag| { let old = flag.get(); @@ -55,7 +55,7 @@ pub fn with_crate_prefix<F: FnOnce() -> R, R>(f: F) -> R { } impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { - /// Returns a string identifying this def-id. This string is + /// Returns a string identifying this `DefId`. This string is /// suitable for user output. It is relative to the current crate /// root, unless with_forced_absolute_paths was used. pub fn item_path_str(self, def_id: DefId) -> String { @@ -468,7 +468,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { buffer.push(&format!("<impl at {}>", span_str)); } - /// Returns the def-id of `def_id`'s parent in the def tree. If + /// Returns the `DefId` of `def_id`'s parent in the def tree. If /// this returns `None`, then `def_id` represents a crate root or /// inlined root. pub fn parent_def_id(self, def_id: DefId) -> Option<DefId> { @@ -478,9 +478,9 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } /// As a heuristic, when we see an impl, if we see that the -/// 'self-type' is a type defined in the same module as the impl, +/// 'self type' is a type defined in the same module as the impl, /// we can omit including the path to the impl itself. This -/// function tries to find a "characteristic def-id" for a +/// function tries to find a "characteristic `DefId`" for a /// type. It's just a heuristic so it makes some questionable /// decisions and we may want to adjust it later. pub fn characteristic_def_id_of_type(ty: Ty<'_>) -> Option<DefId> { @@ -535,7 +535,7 @@ pub trait ItemPathBuffer { #[derive(Debug)] pub enum RootMode { - /// Try to make a path relative to the local crate. In + /// Try to make a path relative to the local crate. In /// particular, local paths have no prefix, and if the path comes /// from an extern crate, start with the path to the `extern /// crate` declaration. diff --git a/src/librustc/ty/layout.rs b/src/librustc/ty/layout.rs index 8401d0861ca..6c507c0015d 100644 --- a/src/librustc/ty/layout.rs +++ b/src/librustc/ty/layout.rs @@ -46,7 +46,7 @@ impl IntegerExt for Integer { } } - /// Get the Integer type from an attr::IntType. + /// Gets the Integer type from an attr::IntType. fn from_attr<C: HasDataLayout>(cx: &C, ity: attr::IntType) -> Integer { let dl = cx.data_layout(); @@ -62,7 +62,7 @@ impl IntegerExt for Integer { } } - /// Find the appropriate Integer type and signedness for the given + /// Finds the appropriate Integer type and signedness for the given /// signed discriminant range and #[repr] attribute. /// N.B.: u128 values above i128::MAX will be treated as signed, but /// that shouldn't affect anything, other than maybe debuginfo. @@ -1686,7 +1686,7 @@ impl<'a, 'tcx, C> TyLayoutMethods<'tcx, C> for Ty<'tcx> tcx.types.re_static, tcx.mk_array(tcx.types.usize, 3), ) - /* FIXME use actual fn pointers + /* FIXME: use actual fn pointers Warning: naively computing the number of entries in the vtable by counting the methods on the trait + methods on all parent traits does not work, because some methods can diff --git a/src/librustc/ty/mod.rs b/src/librustc/ty/mod.rs index 1f08d930fbd..70f72acad1f 100644 --- a/src/librustc/ty/mod.rs +++ b/src/librustc/ty/mod.rs @@ -135,8 +135,8 @@ pub enum AssociatedItemContainer { } impl AssociatedItemContainer { - /// Asserts that this is the def-id of an associated item declared - /// in a trait, and returns the trait def-id. + /// Asserts that this is the `DefId` of an associated item declared + /// in a trait, and returns the trait `DefId`. pub fn assert_trait(&self) -> DefId { match *self { TraitContainer(id) => id, @@ -154,7 +154,7 @@ impl AssociatedItemContainer { /// The "header" of an impl is everything outside the body: a Self type, a trait /// ref (in the case of a trait impl), and a set of predicates (from the -/// bounds/where clauses). +/// bounds / where-clauses). #[derive(Clone, PartialEq, Eq, Hash, Debug)] pub struct ImplHeader<'tcx> { pub impl_def_id: DefId, @@ -328,7 +328,7 @@ pub enum Variance { /// item. pub struct CrateVariancesMap { /// For each item with generics, maps to a vector of the variance - /// of its generics. If an item has no generics, it will have no + /// of its generics. If an item has no generics, it will have no /// entry. pub variances: FxHashMap<DefId, Lrc<Vec<ty::Variance>>>, @@ -338,7 +338,7 @@ pub struct CrateVariancesMap { impl Variance { /// `a.xform(b)` combines the variance of a context with the - /// variance of a type with the following meaning. If we are in a + /// variance of a type with the following meaning. If we are in a /// context with variance `a`, and we encounter a type argument in /// a position with variance `b`, then `a.xform(b)` is the new /// variance with which the argument appears. @@ -362,10 +362,10 @@ impl Variance { /// The ambient variance is covariant. A `fn` type is /// contravariant with respect to its parameters, so the variance /// within which both pointer types appear is - /// `Covariant.xform(Contravariant)`, or `Contravariant`. `*const + /// `Covariant.xform(Contravariant)`, or `Contravariant`. `*const /// T` is covariant with respect to `T`, so the variance within /// which the first `Vec<i32>` appears is - /// `Contravariant.xform(Covariant)` or `Contravariant`. The same + /// `Contravariant.xform(Covariant)` or `Contravariant`. The same /// is true for its `i32` argument. In the `*mut T` case, the /// variance of `Vec<i32>` is `Contravariant.xform(Invariant)`, /// and hence the outermost type is `Invariant` with respect to @@ -489,12 +489,12 @@ pub struct TyS<'tcx> { /// So, for a type without any late-bound things, like `u32`, this /// will be *innermost*, because that is the innermost binder that /// captures nothing. But for a type `&'D u32`, where `'D` is a - /// late-bound region with debruijn index `D`, this would be `D + 1` + /// late-bound region with De Bruijn index `D`, this would be `D + 1` /// -- the binder itself does not capture `D`, but `D` is captured /// by an inner binder. /// /// We call this concept an "exclusive" binder `D` because all - /// debruijn indices within the type are contained within `0..D` + /// De Bruijn indices within the type are contained within `0..D` /// (exclusive). outer_exclusive_binder: ty::DebruijnIndex, } @@ -720,9 +720,9 @@ pub struct UpvarPath { pub hir_id: hir::HirId, } -/// Upvars do not get their own node-id. Instead, we use the pair of -/// the original var id (that is, the root variable that is referenced -/// by the upvar) and the id of the closure expression. +/// Upvars do not get their own `NodeId`. Instead, we use the pair of +/// the original var ID (that is, the root variable that is referenced +/// by the upvar) and the ID of the closure expression. #[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub struct UpvarId { pub var_path: UpvarPath, @@ -734,7 +734,7 @@ pub enum BorrowKind { /// Data must be immutable and is aliasable. ImmBorrow, - /// Data must be immutable but not aliasable. This kind of borrow + /// Data must be immutable but not aliasable. This kind of borrow /// cannot currently be expressed by the user and is used only in /// implicit closure bindings. It is needed when the closure /// is borrowing or mutating a mutable referent, e.g.: @@ -1096,7 +1096,7 @@ impl<'a, 'gcx, 'tcx> Predicate<'tcx> { /// Performs a substitution suitable for going from a /// poly-trait-ref to supertraits that must hold if that /// poly-trait-ref holds. This is slightly different from a normal - /// substitution in terms of what happens with bound regions. See + /// substitution in terms of what happens with bound regions. See /// lengthy comment below for details. pub fn subst_supertrait(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, trait_ref: &ty::PolyTraitRef<'tcx>) @@ -1235,7 +1235,7 @@ pub type PolySubtypePredicate<'tcx> = ty::Binder<SubtypePredicate<'tcx>>; /// This kind of predicate has no *direct* correspondent in the /// syntax, but it roughly corresponds to the syntactic forms: /// -/// 1. `T: TraitRef<..., Item=Type>` +/// 1. `T: TraitRef<..., Item = Type>` /// 2. `<T as TraitRef<...>>::Item == Type` (NYI) /// /// In particular, form #1 is "desugared" to the combination of a @@ -1456,8 +1456,8 @@ impl<'tcx> Predicate<'tcx> { } /// Represents the bounds declared on a particular set of type -/// parameters. Should eventually be generalized into a flag list of -/// where clauses. You can obtain a `InstantiatedPredicates` list from a +/// parameters. Should eventually be generalized into a flag list of +/// where-clauses. You can obtain a `InstantiatedPredicates` list from a /// `GenericPredicates` by using the `instantiate` method. Note that this method /// reflects an important semantic invariant of `InstantiatedPredicates`: while /// the `GenericPredicates` are expressed in terms of the bound type @@ -1471,7 +1471,7 @@ impl<'tcx> Predicate<'tcx> { /// struct Foo<T,U:Bar<T>> { ... } /// /// Here, the `GenericPredicates` for `Foo` would contain a list of bounds like -/// `[[], [U:Bar<T>]]`. Now if there were some particular reference +/// `[[], [U:Bar<T>]]`. Now if there were some particular reference /// like `Foo<isize,usize>`, then the `InstantiatedPredicates` would be `[[], /// [usize:Bar<isize>]]`. #[derive(Clone)] @@ -1537,7 +1537,7 @@ impl UniverseIndex { /// Returns the "next" universe index in order -- this new index /// is considered to extend all previous universes. This - /// corresponds to entering a `forall` quantifier. So, for + /// corresponds to entering a `forall` quantifier. So, for /// example, suppose we have this type in universe `U`: /// /// ``` @@ -1619,7 +1619,7 @@ pub struct ParamEnv<'tcx> { impl<'tcx> ParamEnv<'tcx> { /// Construct a trait environment suitable for contexts where - /// there are no where clauses in scope. Hidden types (like `impl + /// there are no where-clauses in scope. Hidden types (like `impl /// Trait`) are left hidden, so this is suitable for ordinary /// type-checking. #[inline] @@ -1627,12 +1627,12 @@ impl<'tcx> ParamEnv<'tcx> { Self::new(List::empty(), Reveal::UserFacing, None) } - /// Construct a trait environment with no where clauses in scope + /// Construct a trait environment with no where-clauses in scope /// where the values of all `impl Trait` and other hidden types /// are revealed. This is suitable for monomorphized, post-typeck /// environments like codegen or doing optimizations. /// - /// N.B. If you want to have predicates in scope, use `ParamEnv::new`, + /// N.B., if you want to have predicates in scope, use `ParamEnv::new`, /// or invoke `param_env.with_reveal_all()`. #[inline] pub fn reveal_all() -> Self { @@ -1651,7 +1651,7 @@ impl<'tcx> ParamEnv<'tcx> { /// Returns a new parameter environment with the same clauses, but /// which "reveals" the true results of projections in all cases - /// (even for associated types that are specializable). This is + /// (even for associated types that are specializable). This is /// the desired behavior during codegen and certain other special /// contexts; normally though we want to use `Reveal::UserFacing`, /// which is the default. @@ -1736,7 +1736,7 @@ impl<'a, 'gcx, T> HashStable<StableHashingContext<'a>> for ParamEnvAnd<'gcx, T> #[derive(Copy, Clone, Debug)] pub struct Destructor { - /// The def-id of the destructor method + /// The `DefId` of the destructor method pub did: DefId, } @@ -1781,20 +1781,21 @@ pub struct VariantDef { } impl<'a, 'gcx, 'tcx> VariantDef { - /// Create a new `VariantDef`. + /// Creates a new `VariantDef`. /// - /// - `did` is the DefId used for the variant - for tuple-structs, it is the constructor DefId, - /// and for everything else, it is the variant DefId. + /// - `did` is the `DefId` used for the variant. + /// This is the constructor `DefId` for tuple stucts, and the variant `DefId` for everything + /// else. /// - `attribute_def_id` is the DefId that has the variant's attributes. - /// this is the struct DefId for structs, and the variant DefId for variants. + /// This is the struct `DefId` for structs, and the variant `DefId` for variants. /// - /// Note that we *could* use the constructor DefId, because the constructor attributes + /// Note that we *could* use the constructor `DefId`, because the constructor attributes /// redirect to the base attributes, but compiling a small crate requires - /// loading the AdtDefs for all the structs in the universe (e.g., coherence for any + /// loading the `AdtDef`s for all the structs in the universe (e.g., coherence for any /// built-in trait), and we do not want to load attributes twice. /// /// If someone speeds up attribute loading to not be a performance concern, they can - /// remove this hack and use the constructor DefId everywhere. + /// remove this hack and use the constructor `DefId` everywhere. pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>, did: DefId, ident: Ident, @@ -2049,13 +2050,13 @@ impl ReprOptions { } /// Returns `true` if this `#[repr()]` should inhibit struct field reordering - /// optimizations, such as with repr(C), repr(packed(1)), or repr(<int>). + /// optimizations, such as with `repr(C)`, `repr(packed(1))`, or `repr(<int>)`. pub fn inhibit_struct_field_reordering_opt(&self) -> bool { self.flags.intersects(ReprFlags::IS_UNOPTIMISABLE) || self.pack == 1 || self.int.is_some() } - /// Returns true if this `#[repr()]` should inhibit union abi optimisations + /// Returns `true` if this `#[repr()]` should inhibit union ABI optimisations. pub fn inhibit_union_abi_opt(&self) -> bool { self.c() } @@ -2170,14 +2171,14 @@ impl<'a, 'gcx, 'tcx> AdtDef { self.flags.contains(AdtFlags::HAS_CTOR) } - /// Returns whether this type is `#[fundamental]` for the purposes + /// Returns `true` if this type is `#[fundamental]` for the purposes /// of coherence checking. #[inline] pub fn is_fundamental(&self) -> bool { self.flags.contains(AdtFlags::IS_FUNDAMENTAL) } - /// Returns `true` if this is PhantomData<T>. + /// Returns `true` if this is `PhantomData<T>`. #[inline] pub fn is_phantom_data(&self) -> bool { self.flags.contains(AdtFlags::IS_PHANTOM_DATA) @@ -2199,7 +2200,7 @@ impl<'a, 'gcx, 'tcx> AdtDef { self.flags.contains(AdtFlags::IS_BOX) } - /// Returns whether this type has a destructor. + /// Returns `true` if this type has a destructor. pub fn has_dtor(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> bool { self.destructor(tcx).is_some() } @@ -2320,7 +2321,7 @@ impl<'a, 'gcx, 'tcx> AdtDef { }) } - /// Compute the discriminant value used by a specific variant. + /// Computes the discriminant value used by a specific variant. /// Unlike `discriminants`, this is (amortized) constant-time, /// only doing at most one query for evaluating an explicit /// discriminant (the last one before the requested variant), @@ -2336,9 +2337,9 @@ impl<'a, 'gcx, 'tcx> AdtDef { explicit_value.checked_add(tcx, offset as u128).0 } - /// Yields a DefId for the discriminant and an offset to add to it + /// Yields a `DefId` for the discriminant and an offset to add to it /// Alternatively, if there is no explicit discriminant, returns the - /// inferred discriminant directly + /// inferred discriminant directly. pub fn discriminant_def_for_variant( &self, variant_index: VariantIdx, @@ -2368,15 +2369,15 @@ impl<'a, 'gcx, 'tcx> AdtDef { } /// Returns a list of types such that `Self: Sized` if and only - /// if that type is Sized, or `TyErr` if this type is recursive. + /// if that type is `Sized`, or `TyErr` if this type is recursive. /// - /// Oddly enough, checking that the sized-constraint is Sized is + /// Oddly enough, checking that the sized-constraint is `Sized` is /// actually more expressive than checking all members: - /// the Sized trait is inductive, so an associated type that references - /// Self would prevent its containing ADT from being Sized. + /// the `Sized` trait is inductive, so an associated type that references + /// `Self` would prevent its containing ADT from being `Sized`. /// /// Due to normalization being eager, this applies even if - /// the associated type is behind a pointer, e.g., issue #31299. + /// the associated type is behind a pointer (e.g., issue #31299). pub fn sized_constraint(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> &'tcx [Ty<'tcx>] { match tcx.try_adt_sized_constraint(DUMMY_SP, self.did) { Ok(tys) => tys, @@ -2480,7 +2481,7 @@ impl<'a, 'gcx, 'tcx> FieldDef { } } -/// Represents the various closure traits in the Rust language. This +/// Represents the various closure traits in the language. This /// will determine the type of the environment (`self`, in the /// desugaring) argument that the closure expects. /// @@ -2552,7 +2553,7 @@ impl<'tcx> TyS<'tcx> { TypeWalker::new(self) } - /// Iterator that walks the immediate children of `self`. Hence + /// Iterator that walks the immediate children of `self`. Hence /// `Foo<Bar<i32>, u32>` yields the sequence `[Bar<i32>, u32]` /// (but not `i32`, like `walk`). pub fn walk_shallow(&'tcx self) -> smallvec::IntoIter<walk::TypeWalkerArray<'tcx>> { @@ -2560,7 +2561,7 @@ impl<'tcx> TyS<'tcx> { } /// Walks `ty` and any types appearing within `ty`, invoking the - /// callback `f` on each type. If the callback returns false, then the + /// callback `f` on each type. If the callback returns `false`, then the /// children of the current type are ignored. /// /// Note: prefer `ty.walk()` where possible. @@ -2670,7 +2671,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self.typeck_tables_of(self.hir().body_owner_def_id(body)) } - /// Returns an iterator of the def-ids for all body-owners in this + /// Returns an iterator of the `DefId`s for all body-owners in this /// crate. If you would prefer to iterate over the bodies /// themselves, you can do `self.hir().krate().body_ids.iter()`. pub fn body_owners( @@ -2917,7 +2918,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - /// Return the possibly-auto-generated MIR of a (DefId, Subst) pair. + /// Returns the possibly-auto-generated MIR of a `(DefId, Subst)` pair. pub fn instance_mir(self, instance: ty::InstanceDef<'gcx>) -> &'gcx Mir<'gcx> { @@ -2937,7 +2938,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - /// Get the attributes of a definition. + /// Gets the attributes of a definition. pub fn get_attrs(self, did: DefId) -> Attributes<'gcx> { if let Some(id) = self.hir().as_local_hir_id(did) { Attributes::Borrowed(self.hir().attrs_by_hir_id(id)) @@ -2946,7 +2947,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - /// Determine whether an item is annotated with an attribute. + /// Determines whether an item is annotated with an attribute. pub fn has_attr(self, did: DefId, attr: &str) -> bool { attr::contains_name(&self.get_attrs(did), attr) } @@ -2960,14 +2961,14 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self.optimized_mir(def_id).generator_layout.as_ref().unwrap() } - /// Given the def-id of an impl, return the def_id of the trait it implements. - /// If it implements no trait, return `None`. + /// Given the `DefId` of an impl, returns the `DefId` of the trait it implements. + /// If it implements no trait, returns `None`. pub fn trait_id_of_impl(self, def_id: DefId) -> Option<DefId> { self.impl_trait_ref(def_id).map(|tr| tr.def_id) } - /// If the given defid describes a method belonging to an impl, return the - /// def-id of the impl that the method belongs to. Otherwise, return `None`. + /// If the given defid describes a method belonging to an impl, returns the + /// `DefId` of the impl that the method belongs to; otherwise, returns `None`. pub fn impl_of_method(self, def_id: DefId) -> Option<DefId> { let item = if def_id.krate != LOCAL_CRATE { if let Some(Def::Method(_)) = self.describe_def(def_id) { @@ -2998,9 +2999,9 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - // Hygienically compare a use-site name (`use_name`) for a field or an associated item with its - // supposed definition name (`def_name`). The method also needs `DefId` of the supposed - // definition's parent/scope to perform comparison. + /// Hygienically compares a use-site name (`use_name`) for a field or an associated item with + /// its supposed definition name (`def_name`). The method also needs `DefId` of the supposed + /// definition's parent/scope to perform comparison. pub fn hygienic_eq(self, use_name: Ident, def_name: Ident, def_parent_def_id: DefId) -> bool { self.adjust_ident(use_name, def_parent_def_id, DUMMY_NODE_ID).0 == def_name.modern() } @@ -3082,7 +3083,7 @@ fn associated_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Asso parent_item.node) } -/// Calculates the Sized-constraint. +/// Calculates the `Sized` constraint. /// /// In fact, there are only a few options for the types in the constraint: /// - an obviously-unsized type @@ -3135,9 +3136,9 @@ fn def_span<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Span { tcx.hir().span_if_local(def_id).unwrap() } -/// If the given def ID describes an item belonging to a trait, -/// return the ID of the trait that the trait item belongs to. -/// Otherwise, return `None`. +/// If the given `DefId` describes an item belonging to a trait, +/// returns the `DefId` of the trait that the trait item belongs to; +/// otherwise, returns `None`. fn trait_of_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Option<DefId> { tcx.opt_associated_item(def_id) .and_then(|associated_item| { @@ -3232,10 +3233,9 @@ fn instance_def_size_estimate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, } } -/// If `def_id` is an issue 33140 hack impl, return its self type. Otherwise -/// return None. +/// If `def_id` is an issue 33140 hack impl, returns its self type; otherwise, returns `None`. /// -/// See ImplOverlapKind::Issue33140 for more details. +/// See [`ImplOverlapKind::Issue33140`] for more details. fn issue33140_self_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Option<Ty<'tcx>> diff --git a/src/librustc/ty/query/job.rs b/src/librustc/ty/query/job.rs index 0793366e6d4..16b4af53594 100644 --- a/src/librustc/ty/query/job.rs +++ b/src/librustc/ty/query/job.rs @@ -31,37 +31,38 @@ use { rustc_data_structures::stable_hasher::{StableHasherResult, StableHasher, HashStable}, }; -/// Indicates the state of a query for a given key in a query map +/// Indicates the state of a query for a given key in a query map. pub(super) enum QueryResult<'tcx> { - /// An already executing query. The query job can be used to await for its completion + /// An already executing query. The query job can be used to await for its completion. Started(Lrc<QueryJob<'tcx>>), - /// The query panicked. Queries trying to wait on this will raise a fatal error / silently panic + /// The query panicked. Queries trying to wait on this will raise a fatal error or + /// silently panic. Poisoned, } -/// A span and a query key +/// Represents a span and a query key. #[derive(Clone, Debug)] pub struct QueryInfo<'tcx> { - /// The span for a reason this query was required + /// The span corresponding to the reason for which this query was required. pub span: Span, pub query: Query<'tcx>, } -/// A object representing an active query job. +/// Representss an object representing an active query job. pub struct QueryJob<'tcx> { pub info: QueryInfo<'tcx>, /// The parent query job which created this job and is implicitly waiting on it. pub parent: Option<Lrc<QueryJob<'tcx>>>, - /// The latch which is used to wait on this job + /// The latch that is used to wait on this job. #[cfg(parallel_compiler)] latch: QueryLatch<'tcx>, } impl<'tcx> QueryJob<'tcx> { - /// Creates a new query job + /// Creates a new query job. pub fn new(info: QueryInfo<'tcx>, parent: Option<Lrc<QueryJob<'tcx>>>) -> Self { QueryJob { info, @@ -230,7 +231,7 @@ impl<'tcx> QueryLatch<'tcx> { } } - /// Remove a single waiter from the list of waiters. + /// Removes a single waiter from the list of waiters. /// This is used to break query cycles. fn extract_waiter( &self, diff --git a/src/librustc/ty/query/mod.rs b/src/librustc/ty/query/mod.rs index d002b99f385..67a5c7d6c9a 100644 --- a/src/librustc/ty/query/mod.rs +++ b/src/librustc/ty/query/mod.rs @@ -102,12 +102,12 @@ define_queries! { <'tcx> /// Records the type of every item. [] fn type_of: TypeOfItem(DefId) -> Ty<'tcx>, - /// Maps from the def-id of an item (trait/struct/enum/fn) to its + /// Maps from the `DefId` of an item (trait/struct/enum/fn) to its /// associated generics. [] fn generics_of: GenericsOfItem(DefId) -> &'tcx ty::Generics, - /// Maps from the def-id of an item (trait/struct/enum/fn) to the - /// predicates (where clauses) that must be proven true in order + /// Maps from the `DefId` of an item (trait/struct/enum/fn) to the + /// predicates (where-clauses) that must be proven true in order /// to reference it. This is almost always the "predicates query" /// that you want. /// @@ -123,8 +123,8 @@ define_queries! { <'tcx> /// user.) [] fn predicates_of: PredicatesOfItem(DefId) -> Lrc<ty::GenericPredicates<'tcx>>, - /// Maps from the def-id of an item (trait/struct/enum/fn) to the - /// predicates (where clauses) directly defined on it. This is + /// Maps from the `DefId` of an item (trait/struct/enum/fn) to the + /// predicates (where-clauses) directly defined on it. This is /// equal to the `explicit_predicates_of` predicates plus the /// `inferred_outlives_of` predicates. [] fn predicates_defined_on: PredicatesDefinedOnItem(DefId) @@ -138,7 +138,7 @@ define_queries! { <'tcx> /// Foo<'a, T> { x: &'a T }`, this would return `T: 'a`). [] fn inferred_outlives_of: InferredOutlivesOf(DefId) -> Lrc<Vec<ty::Predicate<'tcx>>>, - /// Maps from the def-id of a trait to the list of + /// Maps from the `DefId` of a trait to the list of /// super-predicates. This is a subset of the full list of /// predicates. We store these in a separate map because we must /// evaluate them even during type conversion, often before the @@ -216,7 +216,7 @@ define_queries! { <'tcx> }, Codegen { - /// Set of all the def-ids in this crate that have MIR associated with + /// Set of all the `DefId`s in this crate that have MIR associated with /// them. This includes all the body owners, but also things like struct /// constructors. [] fn mir_keys: mir_keys(CrateNum) -> Lrc<DefIdSet>, @@ -226,11 +226,11 @@ define_queries! { <'tcx> /// the value isn't known except to the pass itself. [] fn mir_const_qualif: MirConstQualif(DefId) -> (u8, Lrc<BitSet<mir::Local>>), - /// Fetch the MIR for a given def-id right after it's built - this includes + /// Fetch the MIR for a given `DefId` right after it's built - this includes /// unreachable code. [] fn mir_built: MirBuilt(DefId) -> &'tcx Steal<mir::Mir<'tcx>>, - /// Fetch the MIR for a given def-id up till the point where it is + /// Fetch the MIR for a given `DefId` up till the point where it is /// ready for const evaluation. /// /// See the README for the `mir` module for details. @@ -244,7 +244,7 @@ define_queries! { <'tcx> }, TypeChecking { - /// The result of unsafety-checking this def-id. + /// The result of unsafety-checking this `DefId`. [] fn unsafety_check_result: UnsafetyCheckResult(DefId) -> mir::UnsafetyCheckResult, /// HACK: when evaluated, this reports a "unsafe derive on repr(packed)" error @@ -307,13 +307,13 @@ define_queries! { <'tcx> TypeChecking { /// Gets a complete map from all types to their inherent impls. /// Not meant to be used directly outside of coherence. - /// (Defined only for LOCAL_CRATE) + /// (Defined only for `LOCAL_CRATE`.) [] fn crate_inherent_impls: crate_inherent_impls_dep_node(CrateNum) -> Lrc<CrateInherentImpls>, - /// Checks all types in the krate for overlap in their inherent impls. Reports errors. + /// Checks all types in the crate for overlap in their inherent impls. Reports errors. /// Not meant to be used directly outside of coherence. - /// (Defined only for LOCAL_CRATE) + /// (Defined only for `LOCAL_CRATE`.) [] fn crate_inherent_impls_overlap_check: inherent_impls_overlap_check_dep_node(CrateNum) -> (), }, @@ -321,9 +321,9 @@ define_queries! { <'tcx> Other { /// Evaluate a constant without running sanity checks /// - /// DO NOT USE THIS outside const eval. Const eval uses this to break query cycles during - /// validation. Please add a comment to every use site explaining why using `const_eval` - /// isn't sufficient + /// **Do not use this** outside const eval. Const eval uses this to break query cycles + /// during validation. Please add a comment to every use site explaining why using + /// `const_eval` isn't sufficient [] fn const_eval_raw: const_eval_raw_dep_node(ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>) -> ConstEvalRawResult<'tcx>, @@ -344,7 +344,7 @@ define_queries! { <'tcx> Other { [] fn reachable_set: reachability_dep_node(CrateNum) -> ReachableSet, - /// Per-body `region::ScopeTree`. The `DefId` should be the owner-def-id for the body; + /// Per-body `region::ScopeTree`. The `DefId` should be the owner `DefId` for the body; /// in the case of closures, this will be redirected to the enclosing function. [] fn region_scope_tree: RegionScopeTree(DefId) -> Lrc<region::ScopeTree>, @@ -398,7 +398,7 @@ define_queries! { <'tcx> -> Lrc<specialization_graph::Graph>, [] fn is_object_safe: ObjectSafety(DefId) -> bool, - /// Get the ParameterEnvironment for a given item; this environment + /// Gets the ParameterEnvironment for a given item; this environment /// will be in "user-facing" mode, meaning that it is suitabe for /// type-checking etc, and it does not normalize specializable /// associated types. This is almost always what you want, @@ -485,7 +485,7 @@ define_queries! { <'tcx> [] fn foreign_modules: ForeignModules(CrateNum) -> Lrc<Vec<ForeignModule>>, - /// Identifies the entry-point (e.g. the `main` function) for a given + /// Identifies the entry-point (e.g., the `main` function) for a given /// crate, returning `None` if there is no entry point (such as for library crates). [] fn entry_fn: EntryFn(CrateNum) -> Option<(DefId, EntryFnType)>, [] fn plugin_registrar_fn: PluginRegistrarFn(CrateNum) -> Option<DefId>, diff --git a/src/librustc/ty/query/on_disk_cache.rs b/src/librustc/ty/query/on_disk_cache.rs index 9c9bc0f6aa1..f948abc7f6f 100644 --- a/src/librustc/ty/query/on_disk_cache.rs +++ b/src/librustc/ty/query/on_disk_cache.rs @@ -103,7 +103,7 @@ impl AbsoluteBytePos { } impl<'sess> OnDiskCache<'sess> { - /// Create a new OnDiskCache instance from the serialized data in `data`. + /// Creates a new OnDiskCache instance from the serialized data in `data`. pub fn new(sess: &'sess Session, data: Vec<u8>, start_pos: usize) -> OnDiskCache<'sess> { debug_assert!(sess.opts.incremental.is_some()); @@ -325,7 +325,7 @@ impl<'sess> OnDiskCache<'sess> { }) } - /// Load a diagnostic emitted during the previous compilation session. + /// Loads a diagnostic emitted during the previous compilation session. pub fn load_diagnostics<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, dep_node_index: SerializedDepNodeIndex) @@ -339,7 +339,7 @@ impl<'sess> OnDiskCache<'sess> { diagnostics.unwrap_or_default() } - /// Store a diagnostic emitted during the current compilation session. + /// Stores a diagnostic emitted during the current compilation session. /// Anything stored like this will be available via `load_diagnostics` in /// the next compilation session. #[inline(never)] @@ -353,7 +353,7 @@ impl<'sess> OnDiskCache<'sess> { } /// Returns the cached query result if there is something in the cache for - /// the given SerializedDepNodeIndex. Otherwise returns None. + /// the given `SerializedDepNodeIndex`; otherwise returns `None`. pub fn try_load_query_result<'tcx, T>(&self, tcx: TyCtxt<'_, 'tcx, 'tcx>, dep_node_index: SerializedDepNodeIndex) @@ -366,7 +366,7 @@ impl<'sess> OnDiskCache<'sess> { "query result") } - /// Store a diagnostic emitted during computation of an anonymous query. + /// Stores a diagnostic emitted during computation of an anonymous query. /// Since many anonymous queries can share the same `DepNode`, we aggregate /// them -- as opposed to regular queries where we assume that there is a /// 1:1 relationship between query-key and `DepNode`. diff --git a/src/librustc/ty/query/plumbing.rs b/src/librustc/ty/query/plumbing.rs index f63fbd79825..9b2a70a6a6d 100644 --- a/src/librustc/ty/query/plumbing.rs +++ b/src/librustc/ty/query/plumbing.rs @@ -1,6 +1,6 @@ -//! The implementation of the query system itself. Defines the macros -//! that generate the actual methods on tcx which find and execute the -//! provider, manage the caches, and so forth. +//! The implementation of the query system itself. This defines the macros that +//! generate the actual methods on tcx which find and execute the provider, +//! manage the caches, and so forth. use crate::dep_graph::{DepNodeIndex, DepNode, DepKind, SerializedDepNodeIndex}; use crate::errors::DiagnosticBuilder; @@ -1017,8 +1017,8 @@ macro_rules! define_queries_inner { } impl<'a, $tcx, 'lcx> TyCtxt<'a, $tcx, 'lcx> { - /// Return a transparent wrapper for `TyCtxt` which ensures queries - /// are executed instead of returing their result + /// Returns a transparent wrapper for `TyCtxt`, which ensures queries + /// are executed instead of just returing their results. #[inline(always)] pub fn ensure(self) -> TyCtxtEnsure<'a, $tcx, 'lcx> { TyCtxtEnsure { @@ -1026,7 +1026,7 @@ macro_rules! define_queries_inner { } } - /// Return a transparent wrapper for `TyCtxt` which uses + /// Returns a transparent wrapper for `TyCtxt` which uses /// `span` as the location of queries performed through it. #[inline(always)] pub fn at(self, span: Span) -> TyCtxtAt<'a, $tcx, 'lcx> { @@ -1067,7 +1067,7 @@ macro_rules! define_queries_struct { (tcx: $tcx:tt, input: ($(([$($modifiers:tt)*] [$($attr:tt)*] [$name:ident]))*)) => { pub struct Queries<$tcx> { - /// This provides access to the incr. comp. on-disk cache for query results. + /// This provides access to the incrimental comilation on-disk cache for query results. /// Do not access this directly. It is only meant to be used by /// `DepGraph::try_mark_green()` and the query infrastructure. pub(crate) on_disk_cache: OnDiskCache<'tcx>, @@ -1123,22 +1123,22 @@ macro_rules! define_provider_struct { /// /// Now, if force_from_dep_node() would always fail, it would be pretty useless. /// Fortunately, we can use some contextual information that will allow us to -/// reconstruct query-keys for certain kinds of DepNodes. In particular, we -/// enforce by construction that the GUID/fingerprint of certain DepNodes is a -/// valid DefPathHash. Since we also always build a huge table that maps every -/// DefPathHash in the current codebase to the corresponding DefId, we have +/// reconstruct query-keys for certain kinds of `DepNode`s. In particular, we +/// enforce by construction that the GUID/fingerprint of certain `DepNode`s is a +/// valid `DefPathHash`. Since we also always build a huge table that maps every +/// `DefPathHash` in the current codebase to the corresponding `DefId`, we have /// everything we need to re-run the query. /// /// Take the `mir_validated` query as an example. Like many other queries, it -/// just has a single parameter: the DefId of the item it will compute the -/// validated MIR for. Now, when we call `force_from_dep_node()` on a dep-node -/// with kind `MirValidated`, we know that the GUID/fingerprint of the dep-node -/// is actually a DefPathHash, and can therefore just look up the corresponding -/// DefId in `tcx.def_path_hash_to_def_id`. +/// just has a single parameter: the `DefId` of the item it will compute the +/// validated MIR for. Now, when we call `force_from_dep_node()` on a `DepNode` +/// with kind `MirValidated`, we know that the GUID/fingerprint of the `DepNode` +/// is actually a `DefPathHash`, and can therefore just look up the corresponding +/// `DefId` in `tcx.def_path_hash_to_def_id`. /// /// When you implement a new query, it will likely have a corresponding new -/// DepKind, and you'll have to support it here in `force_from_dep_node()`. As -/// a rule of thumb, if your query takes a DefId or DefIndex as sole parameter, +/// `DepKind`, and you'll have to support it here in `force_from_dep_node()`. As +/// a rule of thumb, if your query takes a `DefId` or `DefIndex` as sole parameter, /// then `force_from_dep_node()` should not fail for it. Otherwise, you can just /// add it to the "We don't have enough information to reconstruct..." group in /// the match below. diff --git a/src/librustc/ty/relate.rs b/src/librustc/ty/relate.rs index 3dbd0dc1d97..db248072d9b 100644 --- a/src/librustc/ty/relate.rs +++ b/src/librustc/ty/relate.rs @@ -30,7 +30,7 @@ pub trait TypeRelation<'a, 'gcx: 'a+'tcx, 'tcx: 'a> : Sized { /// Returns a static string we can use for printouts. fn tag(&self) -> &'static str; - /// Returns true if the value `a` is the "expected" type in the + /// Returns `true` if the value `a` is the "expected" type in the /// relation. Just affects error messages. fn a_is_expected(&self) -> bool; diff --git a/src/librustc/ty/steal.rs b/src/librustc/ty/steal.rs index 336a4c3bf22..a8f9301ba51 100644 --- a/src/librustc/ty/steal.rs +++ b/src/librustc/ty/steal.rs @@ -12,14 +12,14 @@ use rustc_data_structures::sync::{RwLock, ReadGuard, MappedReadGuard}; /// Steal<Mir<'tcx>>` (to be very specific). Now we can read from this /// as much as we want (using `borrow()`), but you can also /// `steal()`. Once you steal, any further attempt to read will panic. -/// Therefore we know that -- assuming no ICE -- nobody is observing +/// Therefore, we know that -- assuming no ICE -- nobody is observing /// the fact that the MIR was updated. /// /// Obviously, whenever you have a query that yields a `Steal` value, /// you must treat it with caution, and make sure that you know that /// -- once the value is stolen -- it will never be read from again. -/// -/// FIXME(#41710) -- what is the best way to model linear queries? +// +// FIXME(#41710): what is the best way to model linear queries? pub struct Steal<T> { value: RwLock<Option<T>> } diff --git a/src/librustc/ty/sty.rs b/src/librustc/ty/sty.rs index d4c18c64c99..66efd2aea15 100644 --- a/src/librustc/ty/sty.rs +++ b/src/librustc/ty/sty.rs @@ -47,7 +47,7 @@ pub enum BoundRegion { /// Named region parameters for functions (a in &'a T) /// - /// The def-id is needed to distinguish free regions in + /// The `DefId` is needed to distinguish free regions in /// the event of shadowing. BrNamed(DefId, InternedString), @@ -87,7 +87,7 @@ pub enum TyKind<'tcx> { Bool, /// The primitive character type; holds a Unicode scalar value - /// (a non-surrogate code point). Written as `char`. + /// (a non-surrogate code point). Written as `char`. Char, /// A primitive signed integer type. For example, `i32`. @@ -116,7 +116,7 @@ pub enum TyKind<'tcx> { /// An array with the given length. Written as `[T; n]`. Array(Ty<'tcx>, &'tcx ty::LazyConst<'tcx>), - /// The pointee of an array slice. Written as `[T]`. + /// The pointee of an array slice. Written as `[T]`. Slice(Ty<'tcx>), /// A raw pointer. Written as `*mut T` or `*const T` @@ -138,7 +138,7 @@ pub enum TyKind<'tcx> { /// ``` FnDef(DefId, &'tcx Substs<'tcx>), - /// A pointer to a function. Written as `fn() -> i32`. + /// A pointer to a function. Written as `fn() -> i32`. /// /// For example the type of `bar` here: /// @@ -166,10 +166,10 @@ pub enum TyKind<'tcx> { /// The never type `!` Never, - /// A tuple type. For example, `(i32, bool)`. + /// A tuple type. For example, `(i32, bool)`. Tuple(&'tcx List<Ty<'tcx>>), - /// The projection of an associated type. For example, + /// The projection of an associated type. For example, /// `<T as Trait<..>>::N`. Projection(ProjectionTy<'tcx>), @@ -278,7 +278,7 @@ static_assert!(MEM_SIZE_OF_TY_KIND: ::std::mem::size_of::<TyKind<'_>>() == 24); /// /// All right, you say, but why include the type parameters from the /// original function then? The answer is that codegen may need them -/// when monomorphizing, and they may not appear in the upvars. A +/// when monomorphizing, and they may not appear in the upvars. A /// closure could capture no variables but still make use of some /// in-scope type parameter with a bound (e.g., if our example above /// had an extra `U: Default`, and the closure called `U::default()`). @@ -295,9 +295,9 @@ static_assert!(MEM_SIZE_OF_TY_KIND: ::std::mem::size_of::<TyKind<'_>>() == 24); /// ## Generators /// /// Perhaps surprisingly, `ClosureSubsts` are also used for -/// generators. In that case, what is written above is only half-true +/// generators. In that case, what is written above is only half-true /// -- the set of type parameters is similar, but the role of CK and -/// CS are different. CK represents the "yield type" and CS +/// CS are different. CK represents the "yield type" and CS /// represents the "return type" of the generator. /// /// It'd be nice to split this struct into ClosureSubsts and @@ -442,17 +442,17 @@ impl<'tcx> GeneratorSubsts<'tcx> { self.split(def_id, tcx).return_ty } - /// Return the "generator signature", which consists of its yield + /// Returns the "generator signature", which consists of its yield /// and return types. /// - /// NB. Some bits of the code prefers to see this wrapped in a + /// N.B., some bits of the code prefers to see this wrapped in a /// binder, but it never contains bound regions. Probably this /// function should be removed. pub fn poly_sig(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> PolyGenSig<'tcx> { ty::Binder::dummy(self.sig(def_id, tcx)) } - /// Return the "generator signature", which consists of its yield + /// Returns the "generator signature", which consists of its yield /// and return types. pub fn sig(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> GenSig<'tcx> { ty::GenSig { @@ -520,11 +520,11 @@ impl<'tcx> UpvarSubsts<'tcx> { #[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Ord, Eq, Hash, RustcEncodable, RustcDecodable)] pub enum ExistentialPredicate<'tcx> { - /// e.g., Iterator + /// E.g., `Iterator`. Trait(ExistentialTraitRef<'tcx>), - /// e.g., Iterator::Item = T + /// E.g., `Iterator::Item = T`. Projection(ExistentialProjection<'tcx>), - /// e.g., Send + /// E.g., `Send`. AutoTrait(DefId), } @@ -655,12 +655,12 @@ impl<'tcx> Binder<&'tcx List<ExistentialPredicate<'tcx>>> { } /// A complete reference to a trait. These take numerous guises in syntax, -/// but perhaps the most recognizable form is in a where clause: +/// but perhaps the most recognizable form is in a where-clause: /// /// T: Foo<U> /// -/// This would be represented by a trait-reference where the def-id is the -/// def-id for the trait `Foo` and the substs define `T` as parameter 0, +/// This would be represented by a trait-reference where the `DefId` is the +/// `DefId` for the trait `Foo` and the substs define `T` as parameter 0, /// and `U` as parameter 1. /// /// Trait references also appear in object types like `Foo<U>`, but in @@ -766,9 +766,9 @@ impl<'a, 'gcx, 'tcx> ExistentialTraitRef<'tcx> { } } - /// Object types don't have a self-type specified. Therefore, when + /// Object types don't have a self type specified. Therefore, when /// we convert the principal trait-ref into a normal trait-ref, - /// you must give *some* self-type. A common choice is `mk_err()` + /// you must give *some* self type. A common choice is `mk_err()` /// or some placeholder type. pub fn with_self_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, self_ty: Ty<'tcx>) -> ty::TraitRef<'tcx> { @@ -789,9 +789,9 @@ impl<'tcx> PolyExistentialTraitRef<'tcx> { self.skip_binder().def_id } - /// Object types don't have a self-type specified. Therefore, when + /// Object types don't have a self type specified. Therefore, when /// we convert the principal trait-ref into a normal trait-ref, - /// you must give *some* self-type. A common choice is `mk_err()` + /// you must give *some* self type. A common choice is `mk_err()` /// or some placeholder type. pub fn with_self_ty(&self, tcx: TyCtxt<'_, '_, 'tcx>, self_ty: Ty<'tcx>) @@ -829,7 +829,7 @@ impl<T> Binder<T> { /// Skips the binder and returns the "bound" value. This is a /// risky thing to do because it's easy to get confused about - /// debruijn indices and the like. It is usually better to + /// De Bruijn indices and the like. It is usually better to /// discharge the binder using `no_bound_vars` or /// `replace_late_bound_regions` or something like /// that. `skip_binder` is only valid when you are either @@ -840,7 +840,7 @@ impl<T> Binder<T> { /// /// Some examples where `skip_binder` is reasonable: /// - /// - extracting the def-id from a PolyTraitRef; + /// - extracting the `DefId` from a PolyTraitRef; /// - comparing the self type of a PolyTraitRef to see if it is equal to /// a type parameter `X`, since the type `X` does not reference any regions pub fn skip_binder(&self) -> &T { @@ -884,8 +884,8 @@ impl<T> Binder<T> { } /// Given two things that have the same binder level, - /// and an operation that wraps on their contents, execute the operation - /// and then wrap its result. + /// and an operation that wraps on their contents, executes the operation + /// and then wraps its result. /// /// `f` should consider bound regions at depth 1 to be free, and /// anything it produces with bound regions at depth 1 will be @@ -896,7 +896,7 @@ impl<T> Binder<T> { Binder(f(self.0, u.0)) } - /// Split the contents into two things that share the same binder + /// Splits the contents into two things that share the same binder /// level as the original, returning two distinct binders. /// /// `f` should consider bound regions at depth 1 to be free, and @@ -1118,14 +1118,14 @@ pub type Region<'tcx> = &'tcx RegionKind; /// ## Bound Regions /// /// These are regions that are stored behind a binder and must be substituted -/// with some concrete region before being used. There are 2 kind of -/// bound regions: early-bound, which are bound in an item's Generics, -/// and are substituted by a Substs, and late-bound, which are part of -/// higher-ranked types (e.g., `for<'a> fn(&'a ())`) and are substituted by +/// with some concrete region before being used. There are two kind of +/// bound regions: early-bound, which are bound in an item's `Generics`, +/// and are substituted by a `Substs`, and late-bound, which are part of +/// higher-ranked types (e.g., `for<'a> fn(&'a ())`), and are substituted by /// the likes of `liberate_late_bound_regions`. The distinction exists /// because higher-ranked lifetimes aren't supported in all places. See [1][2]. /// -/// Unlike Param-s, bound regions are not supposed to exist "in the wild" +/// Unlike `Param`s, bound regions are not supposed to exist "in the wild" /// outside their binder, e.g., in types passed to type inference, and /// should first be substituted (by placeholder regions, free regions, /// or region variables). @@ -1141,7 +1141,7 @@ pub type Region<'tcx> = &'tcx RegionKind; /// To do this, we replace the bound regions with placeholder markers, /// which don't satisfy any relation not explicitly provided. /// -/// There are 2 kinds of placeholder regions in rustc: `ReFree` and +/// There are two kinds of placeholder regions in rustc: `ReFree` and /// `RePlaceholder`. When checking an item's body, `ReFree` is supposed /// to be used. These also support explicit bounds: both the internally-stored /// *scope*, which the region is assumed to outlive, as well as other @@ -1189,7 +1189,7 @@ pub enum RegionKind { /// Static data that has an "infinite" lifetime. Top in the region lattice. ReStatic, - /// A region variable. Should not exist after typeck. + /// A region variable. Should not exist after typeck. ReVar(RegionVid), /// A placeholder region - basically the higher-ranked version of ReFree. @@ -1346,11 +1346,11 @@ impl<'a, 'tcx, 'gcx> PolyExistentialProjection<'tcx> { impl DebruijnIndex { /// Returns the resulting index when this value is moved into - /// `amount` number of new binders. So e.g., if you had + /// `amount` number of new binders. So, e.g., if you had /// /// for<'a> fn(&'a x) /// - /// and you wanted to change to + /// and you wanted to change it to /// /// for<'a> fn(for<'b> fn(&'a x)) /// @@ -1378,7 +1378,7 @@ impl DebruijnIndex { *self = self.shifted_out(amount); } - /// Adjusts any Debruijn Indices so as to make `to_binder` the + /// Adjusts any De Bruijn indices so as to make `to_binder` the /// innermost binder. That is, if we have something bound at `to_binder`, /// it will now be bound at INNERMOST. This is an appropriate thing to do /// when moving a region out from inside binders: @@ -1388,12 +1388,12 @@ impl DebruijnIndex { /// // Binder: D3 D2 D1 ^^ /// ``` /// - /// Here, the region `'a` would have the debruijn index D3, + /// Here, the region `'a` would have the De Bruijn index D3, /// because it is the bound 3 binders out. However, if we wanted /// to refer to that region `'a` in the second argument (the `_`), /// those two binders would not be in scope. In that case, we /// might invoke `shift_out_to_binder(D3)`. This would adjust the - /// debruijn index of `'a` to D1 (the innermost binder). + /// De Bruijn index of `'a` to D1 (the innermost binder). /// /// If we invoke `shift_out_to_binder` and the region is in fact /// bound by one of the binders we are shifting out of, that is an @@ -1444,7 +1444,7 @@ impl RegionKind { } } - /// Adjusts any Debruijn Indices so as to make `to_binder` the + /// Adjusts any De Bruijn indices so as to make `to_binder` the /// innermost binder. That is, if we have something bound at `to_binder`, /// it will now be bound at INNERMOST. This is an appropriate thing to do /// when moving a region out from inside binders: @@ -1454,12 +1454,12 @@ impl RegionKind { /// // Binder: D3 D2 D1 ^^ /// ``` /// - /// Here, the region `'a` would have the debruijn index D3, + /// Here, the region `'a` would have the De Bruijn index D3, /// because it is the bound 3 binders out. However, if we wanted /// to refer to that region `'a` in the second argument (the `_`), /// those two binders would not be in scope. In that case, we /// might invoke `shift_out_to_binder(D3)`. This would adjust the - /// debruijn index of `'a` to D1 (the innermost binder). + /// De Bruijn index of `'a` to D1 (the innermost binder). /// /// If we invoke `shift_out_to_binder` and the region is in fact /// bound by one of the binders we are shifting out of, that is an @@ -1528,7 +1528,7 @@ impl RegionKind { flags } - /// Given an early-bound or free region, returns the def-id where it was bound. + /// Given an early-bound or free region, returns the `DefId` where it was bound. /// For example, consider the regions in this snippet of code: /// /// ``` @@ -1543,10 +1543,10 @@ impl RegionKind { /// } /// ``` /// - /// Here, `free_region_binding_scope('a)` would return the def-id + /// Here, `free_region_binding_scope('a)` would return the `DefId` /// of the impl, and for all the other highlighted regions, it - /// would return the def-id of the function. In other cases (not shown), this - /// function might return the def-id of a closure. + /// would return the `DefId` of the function. In other cases (not shown), this + /// function might return the `DefId` of a closure. pub fn free_region_binding_scope(&self, tcx: TyCtxt<'_, '_, '_>) -> DefId { match self { ty::ReEarlyBound(br) => { @@ -1772,7 +1772,7 @@ impl<'a, 'gcx, 'tcx> TyS<'tcx> { } } - /// Returns true if this type is a floating point type and false otherwise. + /// Returns `true` if this type is a floating point type. pub fn is_floating_point(&self) -> bool { match self.sty { Float(_) | diff --git a/src/librustc/ty/subst.rs b/src/librustc/ty/subst.rs index d7c322d0f84..7559ea90b17 100644 --- a/src/librustc/ty/subst.rs +++ b/src/librustc/ty/subst.rs @@ -171,7 +171,7 @@ impl<'a, 'gcx, 'tcx> Substs<'tcx> { /// Creates a `Substs` that maps each generic parameter to a higher-ranked /// var bound at index `0`. For types, we use a `BoundVar` index equal to /// the type parameter index. For regions, we use the `BoundRegion::BrNamed` - /// variant (which has a def-id). + /// variant (which has a `DefId`). pub fn bound_vars_for_item( tcx: TyCtxt<'a, 'gcx, 'tcx>, def_id: DefId @@ -492,7 +492,7 @@ impl<'a, 'gcx, 'tcx> SubstFolder<'a, 'gcx, 'tcx> { self.shift_vars_through_binders(ty) } - /// It is sometimes necessary to adjust the debruijn indices during substitution. This occurs + /// It is sometimes necessary to adjust the De Bruijn indices during substitution. This occurs /// when we are substituting a type with escaping bound vars into a context where we have /// passed through binders. That's quite a mouthful. Let's see an example: /// @@ -511,9 +511,9 @@ impl<'a, 'gcx, 'tcx> SubstFolder<'a, 'gcx, 'tcx> { /// /// Here the `'a` lifetime is bound in the outer function, but appears as an argument of the /// inner one. Therefore, that appearance will have a DebruijnIndex of 2, because we must skip - /// over the inner binder (remember that we count Debruijn indices from 1). However, in the + /// over the inner binder (remember that we count De Bruijn indices from 1). However, in the /// definition of `MetaFunc`, the binder is not visible, so the type `&'a int` will have a - /// debruijn index of 1. It's only during the substitution that we can see we must increase the + /// De Bruijn index of 1. It's only during the substitution that we can see we must increase the /// depth by 1 to account for the binder that we passed through. /// /// As a second example, consider this twist: @@ -532,7 +532,7 @@ impl<'a, 'gcx, 'tcx> SubstFolder<'a, 'gcx, 'tcx> { /// DebruijnIndex of 2 /// /// As indicated in the diagram, here the same type `&'a int` is substituted once, but in the - /// first case we do not increase the Debruijn index and in the second case we do. The reason + /// first case we do not increase the De Bruijn index and in the second case we do. The reason /// is that only in the second case have we passed through a fn binder. fn shift_vars_through_binders(&self, ty: Ty<'tcx>) -> Ty<'tcx> { debug!("shift_vars(ty={:?}, binders_passed={:?}, has_escaping_bound_vars={:?})", @@ -565,7 +565,7 @@ pub struct UserSubsts<'tcx> { /// The substitutions for the item as given by the user. pub substs: &'tcx Substs<'tcx>, - /// The self-type, in the case of a `<T>::Item` path (when applied + /// The self type, in the case of a `<T>::Item` path (when applied /// to an inherent impl). See `UserSelfTy` below. pub user_self_ty: Option<UserSelfTy<'tcx>>, } @@ -585,8 +585,8 @@ BraceStructLiftImpl! { } } -/// Specifies the user-given self-type. In the case of a path that -/// refers to a member in an inherent impl, this self-type is +/// Specifies the user-given self type. In the case of a path that +/// refers to a member in an inherent impl, this self type is /// sometimes needed to constrain the type parameters on the impl. For /// example, in this code: /// @@ -596,11 +596,11 @@ BraceStructLiftImpl! { /// ``` /// /// when you then have a path like `<Foo<&'static u32>>::method`, -/// this struct would carry the def-id of the impl along with the -/// self-type `Foo<u32>`. Then we can instantiate the parameters of +/// this struct would carry the `DefId` of the impl along with the +/// self type `Foo<u32>`. Then we can instantiate the parameters of /// the impl (with the substs from `UserSubsts`) and apply those to -/// the self-type, giving `Foo<?A>`. Finally, we unify that with -/// the self-type here, which contains `?A` to be `&'static u32` +/// the self type, giving `Foo<?A>`. Finally, we unify that with +/// the self type here, which contains `?A` to be `&'static u32` #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub struct UserSelfTy<'tcx> { pub impl_def_id: DefId, diff --git a/src/librustc/ty/trait_def.rs b/src/librustc/ty/trait_def.rs index 5429a2504b9..9ce8bf2e60a 100644 --- a/src/librustc/ty/trait_def.rs +++ b/src/librustc/ty/trait_def.rs @@ -39,7 +39,7 @@ pub struct TraitDef { #[derive(Default)] pub struct TraitImpls { blanket_impls: Vec<DefId>, - /// Impls indexed by their simplified self-type, for fast lookup. + /// Impls indexed by their simplified self type, for fast lookup. non_blanket_impls: FxHashMap<fast_reject::SimplifiedType, Vec<DefId>>, } @@ -84,7 +84,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } /// Iterate over every impl that could possibly match the - /// self-type `self_ty`. + /// self type `self_ty`. pub fn for_each_relevant_impl<F: FnMut(DefId)>(self, def_id: DefId, self_ty: Ty<'tcx>, @@ -134,7 +134,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - /// Return a vector containing all impls + /// Returns a vector containing all impls pub fn all_impls(self, def_id: DefId) -> Vec<DefId> { let impls = self.trait_impls_of(def_id); diff --git a/src/librustc/ty/util.rs b/src/librustc/ty/util.rs index 61544932b43..0578162f84d 100644 --- a/src/librustc/ty/util.rs +++ b/src/librustc/ty/util.rs @@ -1,4 +1,4 @@ -//! misc. type-system utilities too small to deserve their own file +//! Miscellaneous type-system utilities that are too small to deserve their own modules. use crate::hir::def::Def; use crate::hir::def_id::DefId; @@ -23,7 +23,7 @@ use syntax_pos::{Span, DUMMY_SP}; #[derive(Copy, Clone, Debug)] pub struct Discr<'tcx> { - /// bit representation of the discriminant, so `-128i8` is `0xFF_u128` + /// Bit representation of the discriminant (e.g., `-128i8` is `0xFF_u128`). pub val: u128, pub ty: Ty<'tcx> } @@ -46,7 +46,7 @@ impl<'tcx> fmt::Display for Discr<'tcx> { } impl<'tcx> Discr<'tcx> { - /// Adds 1 to the value and wraps around if the maximum for the type is reached + /// Adds `1` to the value and wraps around if the maximum for the type is reached. pub fn wrap_incr<'a, 'gcx>(self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Self { self.checked_add(tcx, 1).0 } @@ -342,9 +342,9 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// /// Requires that trait definitions have been processed so that we can /// elaborate predicates and walk supertraits. - /// - /// FIXME callers may only have a &[Predicate], not a Vec, so that's - /// what this code should accept. + // + // FIXME: callers may only have a `&[Predicate]`, not a `Vec`, so that's + // what this code should accept. pub fn required_region_bounds(self, erased_self_ty: Ty<'tcx>, predicates: Vec<ty::Predicate<'tcx>>) @@ -417,7 +417,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { Some(ty::Destructor { did: dtor_did? }) } - /// Return the set of types that are required to be alive in + /// Returns the set of types that are required to be alive in /// order to run the destructor of `def` (see RFCs 769 and /// 1238). /// @@ -507,17 +507,17 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { result } - /// True if `def_id` refers to a closure (e.g., `|x| x * 2`). Note - /// that closures have a def-id, but the closure *expression* also + /// Returns `true` if `def_id` refers to a closure (e.g., `|x| x * 2`). Note + /// that closures have a `DefId`, but the closure *expression* also /// has a `HirId` that is located within the context where the /// closure appears (and, sadly, a corresponding `NodeId`, since /// those are not yet phased out). The parent of the closure's - /// def-id will also be the context where it appears. + /// `DefId` will also be the context where it appears. pub fn is_closure(self, def_id: DefId) -> bool { self.def_key(def_id).disambiguated_data.data == DefPathData::ClosureExpr } - /// True if `def_id` refers to a trait (i.e., `trait Foo { ... }`). + /// Returns `true` if `def_id` refers to a trait (i.e., `trait Foo { ... }`). pub fn is_trait(self, def_id: DefId) -> bool { if let DefPathData::Trait(_) = self.def_key(def_id).disambiguated_data.data { true @@ -526,7 +526,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - /// True if `def_id` refers to a trait alias (i.e., `trait Foo = ...;`). + /// Returns `true` if `def_id` refers to a trait alias (i.e., `trait Foo = ...;`), + /// and `false` otherwise. pub fn is_trait_alias(self, def_id: DefId) -> bool { if let DefPathData::TraitAlias(_) = self.def_key(def_id).disambiguated_data.data { true @@ -535,17 +536,17 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - /// True if this def-id refers to the implicit constructor for - /// a tuple struct like `struct Foo(u32)`. + /// Returns `true` if this `DefId` refers to the implicit constructor for + /// a tuple struct like `struct Foo(u32)`, and `false` otherwise. pub fn is_struct_constructor(self, def_id: DefId) -> bool { self.def_key(def_id).disambiguated_data.data == DefPathData::StructCtor } /// Given the `DefId` of a fn or closure, returns the `DefId` of /// the innermost fn item that the closure is contained within. - /// This is a significant def-id because, when we do + /// This is a significant `DefId` because, when we do /// type-checking, we type-check this fn item and all of its - /// (transitive) closures together. Therefore, when we fetch the + /// (transitive) closures together. Therefore, when we fetch the /// `typeck_tables_of` the closure, for example, we really wind up /// fetching the `typeck_tables_of` the enclosing fn item. pub fn closure_base_def_id(self, def_id: DefId) -> DefId { @@ -558,10 +559,10 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { def_id } - /// Given the def-id and substs a closure, creates the type of + /// Given the `DefId` and substs a closure, creates the type of /// `self` argument that the closure expects. For example, for a /// `Fn` closure, this would return a reference type `&T` where - /// `T=closure_ty`. + /// `T = closure_ty`. /// /// Returns `None` if this closure's kind has not yet been inferred. /// This should only be possible during type checking. @@ -585,7 +586,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { Some(ty::Binder::bind(env_ty)) } - /// Given the def-id of some item that has no type parameters, make + /// Given the `DefId` of some item that has no type parameters, make /// a suitable "empty substs" for it. pub fn empty_substs_for_def_id(self, item_def_id: DefId) -> &'tcx Substs<'tcx> { Substs::for_item(self, item_def_id, |param, _| { @@ -598,7 +599,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { }) } - /// Return whether the node pointed to by def_id is a static item, and its mutability + /// Returns `true` if the node pointed to by `def_id` is a static item, and its mutability. pub fn is_static(&self, def_id: DefId) -> Option<hir::Mutability> { if let Some(node) = self.hir().get_if_local(def_id) { match node { @@ -730,7 +731,7 @@ impl<'a, 'tcx> ty::TyS<'tcx> { /// Checks whether values of this type `T` implement the `Freeze` /// trait -- frozen types are those that do not contain a - /// `UnsafeCell` anywhere. This is a language concept used to + /// `UnsafeCell` anywhere. This is a language concept used to /// distinguish "true immutability", which is relevant to /// optimization as well as the rules around static values. Note /// that the `Freeze` trait is not exposed to end users and is diff --git a/src/librustc/ty/wf.rs b/src/librustc/ty/wf.rs index 2aae953c1c4..ffb5471e34f 100644 --- a/src/librustc/ty/wf.rs +++ b/src/librustc/ty/wf.rs @@ -227,7 +227,7 @@ impl<'a, 'gcx, 'tcx> WfPredicates<'a, 'gcx, 'tcx> { } } - /// Push new obligations into `out`. Returns true if it was able + /// Pushes new obligations into `out`. Returns `true` if it was able /// to generate all the predicates needed to validate that `ty0` /// is WF. Returns false if `ty0` is an unresolved type variable, /// in which case we are not able to simplify at all. @@ -502,7 +502,7 @@ impl<'a, 'gcx, 'tcx> WfPredicates<'a, 'gcx, 'tcx> { } } -/// Given an object type like `SomeTrait+Send`, computes the lifetime +/// Given an object type like `SomeTrait + Send`, computes the lifetime /// bounds that must hold on the elided self type. These are derived /// from the declarations of `SomeTrait`, `Send`, and friends -- if /// they declare `trait SomeTrait : 'static`, for example, then diff --git a/src/librustc/util/common.rs b/src/librustc/util/common.rs index f6743ed75d9..dd635e5c946 100644 --- a/src/librustc/util/common.rs +++ b/src/librustc/util/common.rs @@ -63,11 +63,11 @@ pub fn install_panic_hook() { /// Parameters to the `Dump` variant of type `ProfileQueriesMsg`. #[derive(Clone,Debug)] pub struct ProfQDumpParams { - /// A base path for the files we will dump + /// A base path for the files we will dump. pub path:String, - /// To ensure that the compiler waits for us to finish our dumps + /// To ensure that the compiler waits for us to finish our dumps. pub ack:Sender<()>, - /// toggle dumping a log file with every `ProfileQueriesMsg` + /// Toggle dumping a log file with every `ProfileQueriesMsg`. pub dump_profq_msg_log:bool, } @@ -131,7 +131,7 @@ pub fn time_depth() -> usize { TIME_DEPTH.with(|slot| slot.get()) } -/// Set the current depth of `time()` calls. The idea is to call +/// Sets the current depth of `time()` calls. The idea is to call /// `set_time_depth()` with the result from `time_depth()` in the /// parent thread. pub fn set_time_depth(depth: usize) { diff --git a/src/librustc/util/nodemap.rs b/src/librustc/util/nodemap.rs index 6969b2f872a..63c7b76d1b6 100644 --- a/src/librustc/util/nodemap.rs +++ b/src/librustc/util/nodemap.rs @@ -1,4 +1,4 @@ -//! An efficient hash map for node IDs +//! An efficient hash map for `NodeId`s. use crate::hir::def_id::DefId; use crate::hir::{HirId, ItemLocalId}; diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs index 4945bf83648..1cb9f47bb31 100644 --- a/src/librustc/util/ppaux.rs +++ b/src/librustc/util/ppaux.rs @@ -23,18 +23,18 @@ use crate::hir; /// The "region highlights" are used to control region printing during /// specific error messages. When a "region highlight" is enabled, it /// gives an alternate way to print specific regions. For now, we -/// always print those regions using a number, so something like `'0`. +/// always print those regions using a number, so something like "`'0`". /// /// Regions not selected by the region highlight mode are presently /// unaffected. #[derive(Copy, Clone, Default)] pub struct RegionHighlightMode { - /// If enabled, when we see the selected region, use `"'N"` + /// If enabled, when we see the selected region, use "`'N`" /// instead of the ordinary behavior. highlight_regions: [Option<(ty::RegionKind, usize)>; 3], /// If enabled, when printing a "free region" that originated from - /// the given `ty::BoundRegion`, print it as `'1`. Free regions that would ordinarily + /// the given `ty::BoundRegion`, print it as "`'1`". Free regions that would ordinarily /// have names print as normal. /// /// This is used when you have a signature like `fn foo(x: &u32, @@ -51,12 +51,12 @@ thread_local! { } impl RegionHighlightMode { - /// Read and return current region highlight settings (accesses thread-local state).a + /// Reads and returns the current region highlight settings (accesses thread-local state). pub fn get() -> Self { REGION_HIGHLIGHT_MODE.with(|c| c.get()) } - /// Internal helper to update current settings during the execution of `op`. + // Internal helper to update current settings during the execution of `op`. fn set<R>( old_mode: Self, new_mode: Self, @@ -70,8 +70,8 @@ impl RegionHighlightMode { }) } - /// If `region` and `number` are both `Some`, invoke - /// `highlighting_region`. Otherwise, just invoke `op` directly. + /// If `region` and `number` are both `Some`, invokes + /// `highlighting_region`; otherwise, just invokes `op` directly. pub fn maybe_highlighting_region<R>( region: Option<ty::Region<'_>>, number: Option<usize>, @@ -86,8 +86,8 @@ impl RegionHighlightMode { op() } - /// During the execution of `op`, highlight the region inference - /// vairable `vid` as `'N`. We can only highlight one region vid + /// During the execution of `op`, highlights the region inference + /// variable `vid` as `'N`. We can only highlight one region `vid` /// at a time. pub fn highlighting_region<R>( region: ty::Region<'_>, @@ -109,7 +109,7 @@ impl RegionHighlightMode { Self::set(old_mode, new_mode, op) } - /// Convenience wrapper for `highlighting_region` + /// Convenience wrapper for `highlighting_region`. pub fn highlighting_region_vid<R>( vid: ty::RegionVid, number: usize, @@ -118,7 +118,7 @@ impl RegionHighlightMode { Self::highlighting_region(&ty::ReVar(vid), number, op) } - /// Returns true if any placeholders are highlighted. + /// Returns `true` if any placeholders are highlighted, and `false` otherwise. fn any_region_vids_highlighted(&self) -> bool { Self::get() .highlight_regions @@ -129,8 +129,7 @@ impl RegionHighlightMode { }) } - /// Returns `Some(n)` with the number to use for the given region, - /// if any. + /// Returns `Some(n)` with the number to use for the given region, if any. fn region_highlighted(&self, region: ty::Region<'_>) -> Option<usize> { Self::get() .highlight_regions @@ -143,7 +142,7 @@ impl RegionHighlightMode { } /// During the execution of `op`, highlight the given bound - /// region. We can only highlight one bound region at a time. See + /// region. We can only highlight one bound region at a time. See /// the field `highlight_bound_region` for more detailed notes. pub fn highlighting_bound_region<R>( br: ty::BoundRegion, @@ -162,7 +161,7 @@ impl RegionHighlightMode { ) } - /// Returns true if any placeholders are highlighted. + /// Returns `true` if any placeholders are highlighted, and `false` otherwise. pub fn any_placeholders_highlighted(&self) -> bool { Self::get() .highlight_regions @@ -173,7 +172,7 @@ impl RegionHighlightMode { }) } - /// Returns `Some(N)` if the placeholder `p` is highlighted to print as `'N`. + /// Returns `Some(N)` if the placeholder `p` is highlighted to print as "`'N`". pub fn placeholder_highlight(&self, p: ty::PlaceholderRegion) -> Option<usize> { self.region_highlighted(&ty::RePlaceholder(p)) } diff --git a/src/librustc_apfloat/ieee.rs b/src/librustc_apfloat/ieee.rs index 58066a9cada..9f68d770b9e 100644 --- a/src/librustc_apfloat/ieee.rs +++ b/src/librustc_apfloat/ieee.rs @@ -186,7 +186,7 @@ impl Semantics for X87DoubleExtendedS { /// exponent = all 1's, integer bit 0, significand 0 ("pseudoinfinity") /// exponent = all 1's, integer bit 0, significand nonzero ("pseudoNaN") /// exponent = 0, integer bit 1 ("pseudodenormal") - /// exponent!=0 nor all 1's, integer bit 0 ("unnormal") + /// exponent != 0 nor all 1's, integer bit 0 ("unnormal") /// At the moment, the first two are treated as NaNs, the second two as Normal. fn from_bits(bits: u128) -> IeeeFloat<Self> { let sign = bits & (1 << (Self::BITS - 1)); @@ -1549,11 +1549,11 @@ impl<S: Semantics> IeeeFloat<S> { } } - /// Returns TRUE if, when truncating the current number, with BIT the + /// Returns `true` if, when truncating the current number, with `bit` the /// new LSB, with the given lost fraction and rounding mode, the result /// would need to be rounded away from zero (i.e., by increasing the - /// signficand). This routine must work for Category::Zero of both signs, and - /// Category::Normal numbers. + /// signficand). This routine must work for `Category::Zero` of both signs, and + /// `Category::Normal` numbers. fn round_away_from_zero(&self, round: Round, loss: Loss, bit: usize) -> bool { // NaNs and infinities should not have lost fractions. assert!(self.is_finite_non_zero() || self.is_zero()); @@ -2257,7 +2257,7 @@ impl Loss { more_significant } - /// Return the fraction lost were a bignum truncated losing the least + /// Returns the fraction lost were a bignum truncated losing the least /// significant `bits` bits. fn through_truncation(limbs: &[Limb], bits: usize) -> Loss { if bits == 0 { @@ -2320,12 +2320,12 @@ mod sig { Ordering::Equal } - /// Extract the given bit. + /// Extracts the given bit. pub(super) fn get_bit(limbs: &[Limb], bit: usize) -> bool { limbs[bit / LIMB_BITS] & (1 << (bit % LIMB_BITS)) != 0 } - /// Set the given bit. + /// Sets the given bit. pub(super) fn set_bit(limbs: &mut [Limb], bit: usize) { limbs[bit / LIMB_BITS] |= 1 << (bit % LIMB_BITS); } @@ -2335,7 +2335,7 @@ mod sig { limbs[bit / LIMB_BITS] &= !(1 << (bit % LIMB_BITS)); } - /// Shift `dst` left `bits` bits, subtract `bits` from its exponent. + /// Shifts `dst` left `bits` bits, subtract `bits` from its exponent. pub(super) fn shift_left(dst: &mut [Limb], exp: &mut ExpInt, bits: usize) { if bits > 0 { // Our exponent should not underflow. @@ -2367,7 +2367,7 @@ mod sig { } } - /// Shift `dst` right `bits` bits noting lost fraction. + /// Shifts `dst` right `bits` bits noting lost fraction. pub(super) fn shift_right(dst: &mut [Limb], exp: &mut ExpInt, bits: usize) -> Loss { let loss = Loss::through_truncation(dst, bits); @@ -2403,7 +2403,7 @@ mod sig { loss } - /// Copy the bit vector of width `src_bits` from `src`, starting at bit SRC_LSB, + /// Copies the bit vector of width `src_bits` from `src`, starting at bit SRC_LSB, /// to `dst`, such that the bit SRC_LSB becomes the least significant bit of `dst`. /// All high bits above `src_bits` in `dst` are zero-filled. pub(super) fn extract(dst: &mut [Limb], src: &[Limb], src_bits: usize, src_lsb: usize) { diff --git a/src/librustc_apfloat/lib.rs b/src/librustc_apfloat/lib.rs index f79d448edce..18fd06960ea 100644 --- a/src/librustc_apfloat/lib.rs +++ b/src/librustc_apfloat/lib.rs @@ -374,7 +374,7 @@ pub trait Float fn from_str_r(s: &str, round: Round) -> Result<StatusAnd<Self>, ParseError>; fn to_bits(self) -> u128; - /// Convert a floating point number to an integer according to the + /// Converts a floating point number to an integer according to the /// rounding mode. In case of an invalid operation exception, /// deterministic values are returned, namely zero for NaNs and the /// minimal or maximal value respectively for underflow or overflow. @@ -387,7 +387,7 @@ pub trait Float /// /// The *is_exact output tells whether the result is exact, in the sense /// that converting it back to the original floating point type produces - /// the original value. This is almost equivalent to result==Status::OK, + /// the original value. This is almost equivalent to `result == Status::OK`, /// except for negative zeroes. fn to_i128_r(self, width: usize, round: Round, is_exact: &mut bool) -> StatusAnd<i128> { let status; @@ -457,13 +457,13 @@ pub trait Float } } - /// IEEE-754R isSignMinus: Returns true if and only if the current value is + /// IEEE-754R isSignMinus: Returns whether the current value is /// negative. /// /// This applies to zeros and NaNs as well. fn is_negative(self) -> bool; - /// IEEE-754R isNormal: Returns true if and only if the current value is normal. + /// IEEE-754R isNormal: Returns whether the current value is normal. /// /// This implies that the current value of the float is not zero, subnormal, /// infinite, or NaN following the definition of normality from IEEE-754R. @@ -471,7 +471,7 @@ pub trait Float !self.is_denormal() && self.is_finite_non_zero() } - /// Returns true if and only if the current value is zero, subnormal, or + /// Returns `true` if the current value is zero, subnormal, or /// normal. /// /// This means that the value is not infinite or NaN. @@ -479,26 +479,26 @@ pub trait Float !self.is_nan() && !self.is_infinite() } - /// Returns true if and only if the float is plus or minus zero. + /// Returns `true` if the float is plus or minus zero. fn is_zero(self) -> bool { self.category() == Category::Zero } - /// IEEE-754R isSubnormal(): Returns true if and only if the float is a + /// IEEE-754R isSubnormal(): Returns whether the float is a /// denormal. fn is_denormal(self) -> bool; - /// IEEE-754R isInfinite(): Returns true if and only if the float is infinity. + /// IEEE-754R isInfinite(): Returns whether the float is infinity. fn is_infinite(self) -> bool { self.category() == Category::Infinity } - /// Returns true if and only if the float is a quiet or signaling NaN. + /// Returns `true` if the float is a quiet or signaling NaN. fn is_nan(self) -> bool { self.category() == Category::NaN } - /// Returns true if and only if the float is a signaling NaN. + /// Returns `true` if the float is a signaling NaN. fn is_signaling(self) -> bool; // Simple Queries @@ -517,19 +517,19 @@ pub trait Float self.is_zero() && self.is_negative() } - /// Returns true if and only if the number has the smallest possible non-zero + /// Returns `true` if the number has the smallest possible non-zero /// magnitude in the current semantics. fn is_smallest(self) -> bool { Self::SMALLEST.copy_sign(self).bitwise_eq(self) } - /// Returns true if and only if the number has the largest possible finite + /// Returns `true` if the number has the largest possible finite /// magnitude in the current semantics. fn is_largest(self) -> bool { Self::largest().copy_sign(self).bitwise_eq(self) } - /// Returns true if and only if the number is an exact integer. + /// Returns `true` if the number is an exact integer. fn is_integer(self) -> bool { // This could be made more efficient; I'm going for obviously correct. if !self.is_finite() { @@ -571,11 +571,11 @@ pub trait Float } pub trait FloatConvert<T: Float>: Float { - /// Convert a value of one floating point type to another. + /// Converts a value of one floating point type to another. /// The return value corresponds to the IEEE754 exceptions. *loses_info /// records whether the transformation lost information, i.e., whether /// converting the result back to the original type will produce the - /// original value (this is almost the same as return value==Status::OK, + /// original value (this is almost the same as return `value == Status::OK`, /// but there are edge cases where this is not so). fn convert_r(self, round: Round, loses_info: &mut bool) -> StatusAnd<T>; fn convert(self, loses_info: &mut bool) -> StatusAnd<T> { diff --git a/src/librustc_borrowck/borrowck/check_loans.rs b/src/librustc_borrowck/borrowck/check_loans.rs index f675c8d38a6..b528967dd65 100644 --- a/src/librustc_borrowck/borrowck/check_loans.rs +++ b/src/librustc_borrowck/borrowck/check_loans.rs @@ -239,7 +239,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { { //! Iterates over each loan that has been issued //! on entrance to `node`, regardless of whether it is - //! actually *in scope* at that point. Sometimes loans + //! actually *in scope* at that point. Sometimes loans //! are issued for future scopes and thus they may have been //! *issued* but not yet be in effect. diff --git a/src/librustc_borrowck/borrowck/gather_loans/lifetime.rs b/src/librustc_borrowck/borrowck/gather_loans/lifetime.rs index 11597455bca..ae1d49afd49 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/lifetime.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/lifetime.rs @@ -53,7 +53,7 @@ struct GuaranteeLifetimeContext<'a, 'tcx: 'a> { impl<'a, 'tcx> GuaranteeLifetimeContext<'a, 'tcx> { fn check(&self, cmt: &mc::cmt_<'tcx>, discr_scope: Option<ast::NodeId>) -> R { //! Main routine. Walks down `cmt` until we find the - //! "guarantor". Reports an error if `self.loan_region` is + //! "guarantor". Reports an error if `self.loan_region` is //! larger than scope of `cmt`. debug!("guarantee_lifetime.check(cmt={:?}, loan_region={:?})", cmt, diff --git a/src/librustc_borrowck/borrowck/gather_loans/mod.rs b/src/librustc_borrowck/borrowck/gather_loans/mod.rs index c21a43bc683..1971c666312 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/mod.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/mod.rs @@ -285,7 +285,7 @@ impl<'a, 'tcx> GatherLoanCtxt<'a, 'tcx> { } /// Guarantees that `addr_of(cmt)` will be valid for the duration of `static_scope_r`, or - /// reports an error. This may entail taking out loans, which will be added to the + /// reports an error. This may entail taking out loans, which will be added to the /// `req_loan_map`. fn guarantee_valid(&mut self, borrow_id: hir::ItemLocalId, diff --git a/src/librustc_borrowck/borrowck/mod.rs b/src/librustc_borrowck/borrowck/mod.rs index 4ced72cd279..85c4ca7bd37 100644 --- a/src/librustc_borrowck/borrowck/mod.rs +++ b/src/librustc_borrowck/borrowck/mod.rs @@ -296,11 +296,11 @@ pub struct Loan<'tcx> { /// gen_scope indicates where loan is introduced. Typically the /// loan is introduced at the point of the borrow, but in some /// cases, notably method arguments, the loan may be introduced - /// only later, once it comes into scope. See also + /// only later, once it comes into scope. See also /// `GatherLoanCtxt::compute_gen_scope`. gen_scope: region::Scope, - /// kill_scope indicates when the loan goes out of scope. This is + /// kill_scope indicates when the loan goes out of scope. This is /// either when the lifetime expires or when the local variable /// which roots the loan-path goes out of scope, whichever happens /// faster. See also `GatherLoanCtxt::compute_kill_scope`. diff --git a/src/librustc_borrowck/borrowck/move_data.rs b/src/librustc_borrowck/borrowck/move_data.rs index a206c37e97b..325d3559f0a 100644 --- a/src/librustc_borrowck/borrowck/move_data.rs +++ b/src/librustc_borrowck/borrowck/move_data.rs @@ -114,7 +114,7 @@ pub struct Move { /// Path being moved. pub path: MovePathIndex, - /// id of node that is doing the move. + /// ID of node that is doing the move. pub id: hir::ItemLocalId, /// Kind of move, for error messages. @@ -129,7 +129,7 @@ pub struct Assignment { /// Path being assigned. pub path: MovePathIndex, - /// id where assignment occurs + /// ID where assignment occurs pub id: hir::ItemLocalId, /// span of node where assignment occurs @@ -168,8 +168,8 @@ fn loan_path_is_precise(loan_path: &LoanPath<'_>) -> bool { } impl<'a, 'tcx> MoveData<'tcx> { - /// return true if there are no trackable assignments or moves - /// in this move data - that means that there is nothing that + /// Returns `true` if there are no trackable assignments or moves + /// in this move data -- that means that there is nothing that /// could cause a borrow error. pub fn is_empty(&self) -> bool { self.moves.borrow().is_empty() && diff --git a/src/librustc_borrowck/dataflow.rs b/src/librustc_borrowck/dataflow.rs index 90f33ede62c..de2a3c4cb22 100644 --- a/src/librustc_borrowck/dataflow.rs +++ b/src/librustc_borrowck/dataflow.rs @@ -178,7 +178,7 @@ fn build_local_id_to_index(body: Option<&hir::Body>, return index; - /// Add mappings from the ast nodes for the formal bindings to + /// Adds mappings from the ast nodes for the formal bindings to /// the entry-node in the graph. fn add_entries_from_fn_body(index: &mut FxHashMap<hir::ItemLocalId, Vec<CFGIndex>>, body: &hir::Body, diff --git a/src/librustc_codegen_llvm/abi.rs b/src/librustc_codegen_llvm/abi.rs index 258d839d32e..f7d2699a27e 100644 --- a/src/librustc_codegen_llvm/abi.rs +++ b/src/librustc_codegen_llvm/abi.rs @@ -174,13 +174,13 @@ pub trait ArgTypeExt<'ll, 'tcx> { } impl ArgTypeExt<'ll, 'tcx> for ArgType<'tcx, Ty<'tcx>> { - /// Get the LLVM type for a place of the original Rust type of + /// Gets the LLVM type for a place of the original Rust type of /// this argument/return, i.e., the result of `type_of::type_of`. fn memory_ty(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type { self.layout.llvm_type(cx) } - /// Store a direct/indirect value described by this ArgType into a + /// Stores a direct/indirect value described by this ArgType into a /// place for the original Rust type of this argument/return. /// Can be used for both storing formal arguments into Rust variables /// or results of call/invoke instructions into their destinations. diff --git a/src/librustc_codegen_llvm/back/archive.rs b/src/librustc_codegen_llvm/back/archive.rs index 1cf150dad60..e02f7df2efc 100644 --- a/src/librustc_codegen_llvm/back/archive.rs +++ b/src/librustc_codegen_llvm/back/archive.rs @@ -51,7 +51,7 @@ fn is_relevant_child(c: &Child) -> bool { } impl<'a> ArchiveBuilder<'a> { - /// Create a new static archive, ready for modifying the archive specified + /// Creates a new static archive, ready for modifying the archive specified /// by `config`. pub fn new(config: ArchiveConfig<'a>) -> ArchiveBuilder<'a> { ArchiveBuilder { diff --git a/src/librustc_codegen_llvm/back/link.rs b/src/librustc_codegen_llvm/back/link.rs index fc744201a33..725009e1377 100644 --- a/src/librustc_codegen_llvm/back/link.rs +++ b/src/librustc_codegen_llvm/back/link.rs @@ -42,7 +42,7 @@ pub use rustc_codegen_utils::link::{find_crate_name, filename_for_input, default out_filename, check_file_is_writeable}; -/// Perform the linkage portion of the compilation phase. This will generate all +/// Performs the linkage portion of the compilation phase. This will generate all /// of the requested outputs for this compilation session. pub(crate) fn link_binary(sess: &Session, codegen_results: &CodegenResults, diff --git a/src/librustc_codegen_llvm/back/lto.rs b/src/librustc_codegen_llvm/back/lto.rs index 3e51078dc64..be7733bf554 100644 --- a/src/librustc_codegen_llvm/back/lto.rs +++ b/src/librustc_codegen_llvm/back/lto.rs @@ -791,7 +791,7 @@ impl ThinLTOImports { self.imports.get(llvm_module_name).map(|v| &v[..]).unwrap_or(&[]) } - /// Load the ThinLTO import map from ThinLTOData. + /// Loads the ThinLTO import map from ThinLTOData. unsafe fn from_thin_lto_data(data: *const llvm::ThinLTOData) -> ThinLTOImports { unsafe extern "C" fn imported_module_callback(payload: *mut libc::c_void, importing_module_name: *const libc::c_char, diff --git a/src/librustc_codegen_llvm/back/wasm.rs b/src/librustc_codegen_llvm/back/wasm.rs index 3501123a37f..b403660fa51 100644 --- a/src/librustc_codegen_llvm/back/wasm.rs +++ b/src/librustc_codegen_llvm/back/wasm.rs @@ -112,7 +112,7 @@ pub fn rewrite_imports(path: &Path, import_map: &FxHashMap<String, String>) { } } -/// Add or augment the existing `producers` section to encode information about +/// Adds or augment the existing `producers` section to encode information about /// the Rust compiler used to produce the wasm file. pub fn add_producer_section( path: &Path, diff --git a/src/librustc_codegen_llvm/base.rs b/src/librustc_codegen_llvm/base.rs index d9f44ca6e45..70986a4e179 100644 --- a/src/librustc_codegen_llvm/base.rs +++ b/src/librustc_codegen_llvm/base.rs @@ -7,11 +7,11 @@ //! //! Hopefully useful general knowledge about codegen: //! -//! * There's no way to find out the Ty type of a Value. Doing so -//! would be "trying to get the eggs out of an omelette" (credit: -//! pcwalton). You can, instead, find out its llvm::Type by calling val_ty, -//! but one llvm::Type corresponds to many `Ty`s; for instance, tup(int, int, -//! int) and rec(x=int, y=int, z=int) will have the same llvm::Type. +//! * There's no way to find out the `Ty` type of a Value. Doing so +//! would be "trying to get the eggs out of an omelette" (credit: +//! pcwalton). You can, instead, find out its `llvm::Type` by calling `val_ty`, +//! but one `llvm::Type` corresponds to many `Ty`s; for instance, `tup(int, int, +//! int)` and `rec(x=int, y=int, z=int)` will have the same `llvm::Type`. use super::ModuleLlvm; use rustc_codegen_ssa::{ModuleCodegen, ModuleKind}; diff --git a/src/librustc_codegen_llvm/callee.rs b/src/librustc_codegen_llvm/callee.rs index 0d9d6aa5aa2..9426328da32 100644 --- a/src/librustc_codegen_llvm/callee.rs +++ b/src/librustc_codegen_llvm/callee.rs @@ -1,6 +1,6 @@ //! Handles codegen of callees as well as other call-related -//! things. Callees are a superset of normal rust values and sometimes -//! have different representations. In particular, top-level fn items +//! things. Callees are a superset of normal rust values and sometimes +//! have different representations. In particular, top-level fn items //! and methods are represented as just a fn ptr and not a full //! closure. diff --git a/src/librustc_codegen_llvm/context.rs b/src/librustc_codegen_llvm/context.rs index f6795588441..a4b976dfbd9 100644 --- a/src/librustc_codegen_llvm/context.rs +++ b/src/librustc_codegen_llvm/context.rs @@ -75,7 +75,7 @@ pub struct CodegenCx<'ll, 'tcx: 'll> { pub statics_to_rauw: RefCell<Vec<(&'ll Value, &'ll Value)>>, /// Statics that will be placed in the llvm.used variable - /// See http://llvm.org/docs/LangRef.html#the-llvm-used-global-variable for details + /// See <http://llvm.org/docs/LangRef.html#the-llvm-used-global-variable> for details pub used_statics: RefCell<Vec<&'ll Value>>, pub lltypes: RefCell<FxHashMap<(Ty<'tcx>, Option<VariantIdx>), &'ll Type>>, @@ -807,7 +807,7 @@ impl CodegenCx<'b, 'tcx> { } impl<'b, 'tcx> CodegenCx<'b, 'tcx> { - /// Generate a new symbol name with the given prefix. This symbol name must + /// Generates a new symbol name with the given prefix. This symbol name must /// only be used for definitions with `internal` or `private` linkage. pub fn generate_local_symbol_name(&self, prefix: &str) -> String { let idx = self.local_gen_sym_counter.get(); diff --git a/src/librustc_codegen_llvm/debuginfo/create_scope_map.rs b/src/librustc_codegen_llvm/debuginfo/create_scope_map.rs index dbd821865f9..791526c98c8 100644 --- a/src/librustc_codegen_llvm/debuginfo/create_scope_map.rs +++ b/src/librustc_codegen_llvm/debuginfo/create_scope_map.rs @@ -16,7 +16,7 @@ use rustc_data_structures::indexed_vec::{Idx, IndexVec}; use syntax_pos::BytePos; -/// Produce DIScope DIEs for each MIR Scope which has variables defined in it. +/// Produces DIScope DIEs for each MIR Scope which has variables defined in it. /// If debuginfo is disabled, the returned vector is empty. pub fn create_mir_scopes( cx: &CodegenCx<'ll, '_>, diff --git a/src/librustc_codegen_llvm/debuginfo/doc.rs b/src/librustc_codegen_llvm/debuginfo/doc.rs index a4acc58eca9..cf18b995b61 100644 --- a/src/librustc_codegen_llvm/debuginfo/doc.rs +++ b/src/librustc_codegen_llvm/debuginfo/doc.rs @@ -160,7 +160,7 @@ //! //! This algorithm also provides a stable ID for types that are defined in one //! crate but instantiated from metadata within another crate. We just have to -//! take care to always map crate and node IDs back to the original crate +//! take care to always map crate and `NodeId`s back to the original crate //! context. //! //! As a side-effect these unique type IDs also help to solve a problem arising @@ -170,7 +170,7 @@ //! with different concrete substitutions for `'a`, and thus there will be N //! `Ty` instances for the type `Struct<'a>` even though it is not generic //! otherwise. Unfortunately this means that we cannot use `ty::type_id()` as -//! cheap identifier for type metadata---we have done this in the past, but it +//! cheap identifier for type metadata -- we have done this in the past, but it //! led to unnecessary metadata duplication in the best case and LLVM //! assertions in the worst. However, the unique type ID as described above //! *can* be used as identifier. Since it is comparatively expensive to diff --git a/src/librustc_codegen_llvm/debuginfo/mod.rs b/src/librustc_codegen_llvm/debuginfo/mod.rs index 113b9958c7f..625f6cd45fb 100644 --- a/src/librustc_codegen_llvm/debuginfo/mod.rs +++ b/src/librustc_codegen_llvm/debuginfo/mod.rs @@ -102,7 +102,7 @@ impl<'a, 'tcx> CrateDebugContext<'a, 'tcx> { } } -/// Create any deferred debug metadata nodes +/// Creates any deferred debug metadata nodes pub fn finalize(cx: &CodegenCx) { if cx.dbg_cx.is_none() { return; diff --git a/src/librustc_codegen_llvm/debuginfo/utils.rs b/src/librustc_codegen_llvm/debuginfo/utils.rs index 8b85df79d04..f2d92eefad3 100644 --- a/src/librustc_codegen_llvm/debuginfo/utils.rs +++ b/src/librustc_codegen_llvm/debuginfo/utils.rs @@ -36,7 +36,7 @@ pub fn create_DIArray( }; } -/// Return syntax_pos::Loc corresponding to the beginning of the span +/// Returns syntax_pos::Loc corresponding to the beginning of the span pub fn span_start(cx: &CodegenCx, span: Span) -> syntax_pos::Loc { cx.sess().source_map().lookup_char_pos(span.lo()) } diff --git a/src/librustc_codegen_llvm/llvm/ffi.rs b/src/librustc_codegen_llvm/llvm/ffi.rs index 58bdfc47fca..3232f4e8f51 100644 --- a/src/librustc_codegen_llvm/llvm/ffi.rs +++ b/src/librustc_codegen_llvm/llvm/ffi.rs @@ -1337,7 +1337,7 @@ extern "C" { pub fn LLVMGetSections(ObjFile: &'a ObjectFile) -> &'a mut SectionIterator<'a>; /// Destroys a section iterator. pub fn LLVMDisposeSectionIterator(SI: &'a mut SectionIterator<'a>); - /// Returns true if the section iterator is at the end of the section + /// Returns `true` if the section iterator is at the end of the section /// list: pub fn LLVMIsSectionIteratorAtEnd(ObjFile: &'a ObjectFile, SI: &SectionIterator<'a>) -> Bool; /// Moves the section iterator to point to the next section. diff --git a/src/librustc_codegen_llvm/type_of.rs b/src/librustc_codegen_llvm/type_of.rs index afaeb352cd9..39f48b266c2 100644 --- a/src/librustc_codegen_llvm/type_of.rs +++ b/src/librustc_codegen_llvm/type_of.rs @@ -226,7 +226,7 @@ impl<'tcx> LayoutLlvmExt<'tcx> for TyLayout<'tcx> { } } - /// Get the LLVM type corresponding to a Rust type, i.e., `rustc::ty::Ty`. + /// Gets the LLVM type corresponding to a Rust type, i.e., `rustc::ty::Ty`. /// The pointee type of the pointer in `PlaceRef` is always this type. /// For sized types, it is also the right LLVM type for an `alloca` /// containing a value of that type, and most immediates (except `bool`). diff --git a/src/librustc_codegen_ssa/back/linker.rs b/src/librustc_codegen_ssa/back/linker.rs index 249715a7b6e..3cbe3793f10 100644 --- a/src/librustc_codegen_ssa/back/linker.rs +++ b/src/librustc_codegen_ssa/back/linker.rs @@ -91,7 +91,7 @@ impl LinkerInfo { } } -/// Linker abstraction used by back::link to build up the command to invoke a +/// Linker abstraction used by `back::link` to build up the command to invoke a /// linker. /// /// This trait is the total list of requirements needed by `back::link` and @@ -145,7 +145,7 @@ pub struct GccLinker<'a> { impl<'a> GccLinker<'a> { /// Argument that must be passed *directly* to the linker /// - /// These arguments need to be prepended with '-Wl,' when a gcc-style linker is used + /// These arguments need to be prepended with `-Wl`, when a GCC-style linker is used. fn linker_arg<S>(&mut self, arg: S) -> &mut Self where S: AsRef<OsStr> { diff --git a/src/librustc_codegen_ssa/back/write.rs b/src/librustc_codegen_ssa/back/write.rs index 67d4d408bab..8f8095a96ee 100644 --- a/src/librustc_codegen_ssa/back/write.rs +++ b/src/librustc_codegen_ssa/back/write.rs @@ -663,7 +663,7 @@ pub enum WorkItem<B: WriteBackendMethods> { /// Copy the post-LTO artifacts from the incremental cache to the output /// directory. CopyPostLtoArtifacts(CachedModuleCodegen), - /// Perform (Thin)LTO on the given module. + /// Performs (Thin)LTO on the given module. LTO(lto::LtoModuleCodegen<B>), } @@ -1798,7 +1798,7 @@ impl<B: ExtraBackendMethods> OngoingCodegen<B> { drop(self.coordinator_send.send(Box::new(Message::CodegenComplete::<B>))); } - /// Consume this context indicating that codegen was entirely aborted, and + /// Consumes this context indicating that codegen was entirely aborted, and /// we need to exit as quickly as possible. /// /// This method blocks the current thread until all worker threads have diff --git a/src/librustc_codegen_ssa/base.rs b/src/librustc_codegen_ssa/base.rs index 84e55ce0f22..ecac82db947 100644 --- a/src/librustc_codegen_ssa/base.rs +++ b/src/librustc_codegen_ssa/base.rs @@ -7,11 +7,11 @@ //! //! Hopefully useful general knowledge about codegen: //! -//! * There's no way to find out the Ty type of a Value. Doing so -//! would be "trying to get the eggs out of an omelette" (credit: -//! pcwalton). You can, instead, find out its llvm::Type by calling val_ty, -//! but one llvm::Type corresponds to many `Ty`s; for instance, tup(int, int, -//! int) and rec(x=int, y=int, z=int) will have the same llvm::Type. +//! * There's no way to find out the `Ty` type of a Value. Doing so +//! would be "trying to get the eggs out of an omelette" (credit: +//! pcwalton). You can, instead, find out its `llvm::Type` by calling `val_ty`, +//! but one `llvm::Type` corresponds to many `Ty`s; for instance, `tup(int, int, +//! int)` and `rec(x=int, y=int, z=int)` will have the same `llvm::Type`. use {ModuleCodegen, ModuleKind, CachedModuleCodegen}; @@ -156,7 +156,7 @@ pub fn compare_simd_types<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>( bx.sext(cmp, ret_ty) } -/// Retrieve the information we are losing (making dynamic) in an unsizing +/// Retrieves the information we are losing (making dynamic) in an unsizing /// adjustment. /// /// The `old_info` argument is a bit funny. It is intended for use @@ -347,7 +347,7 @@ fn cast_shift_rhs<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>( } } -/// Returns whether this session's target will use SEH-based unwinding. +/// Returns `true` if this session's target will use SEH-based unwinding. /// /// This is only true for MSVC targets, and even then the 64-bit MSVC target /// currently uses SEH-ish unwinding with DWARF info tables to the side (same as @@ -436,7 +436,7 @@ pub fn codegen_instance<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>( mir::codegen_mir::<Bx>(cx, lldecl, &mir, instance, sig); } -/// Create the `main` function which will initialize the rust runtime and call +/// Creates the `main` function which will initialize the rust runtime and call /// users main function. pub fn maybe_create_entry_wrapper<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>( cx: &'a Bx::CodegenCx diff --git a/src/librustc_codegen_ssa/lib.rs b/src/librustc_codegen_ssa/lib.rs index 58b3f0434a6..1a911d94b1b 100644 --- a/src/librustc_codegen_ssa/lib.rs +++ b/src/librustc_codegen_ssa/lib.rs @@ -68,7 +68,7 @@ pub mod back; pub struct ModuleCodegen<M> { /// The name of the module. When the crate may be saved between /// compilations, incremental compilation requires that name be - /// unique amongst **all** crates. Therefore, it should contain + /// unique amongst **all** crates. Therefore, it should contain /// something unique to this crate (e.g., a module path) as well /// as the crate name and disambiguator. /// We currently generate these names via CodegenUnit::build_cgu_name(). @@ -141,7 +141,7 @@ bitflags! { } } -/// Misc info we load from metadata to persist beyond the tcx +/// Misc info we load from metadata to persist beyond the tcx. pub struct CrateInfo { pub panic_runtime: Option<CrateNum>, pub compiler_builtins: Option<CrateNum>, diff --git a/src/librustc_codegen_ssa/mir/block.rs b/src/librustc_codegen_ssa/mir/block.rs index aa82c853257..be2db47a533 100644 --- a/src/librustc_codegen_ssa/mir/block.rs +++ b/src/librustc_codegen_ssa/mir/block.rs @@ -884,7 +884,7 @@ impl<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } } - /// Return the landingpad wrapper around the given basic block + /// Returns the landing-pad wrapper around the given basic block. /// /// No-op in MSVC SEH scheme. fn landing_pad_to( diff --git a/src/librustc_codegen_ssa/mir/mod.rs b/src/librustc_codegen_ssa/mir/mod.rs index c7e2131eed5..32c3408f1cb 100644 --- a/src/librustc_codegen_ssa/mir/mod.rs +++ b/src/librustc_codegen_ssa/mir/mod.rs @@ -422,7 +422,7 @@ fn create_funclets<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>( }).unzip() } -/// Produce, for each argument, a `Value` pointing at the +/// Produces, for each argument, a `Value` pointing at the /// argument's value. As arguments are places, these are always /// indirect. fn arg_local_refs<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>( diff --git a/src/librustc_codegen_ssa/mir/place.rs b/src/librustc_codegen_ssa/mir/place.rs index 596f97a0388..efededb06dd 100644 --- a/src/librustc_codegen_ssa/mir/place.rs +++ b/src/librustc_codegen_ssa/mir/place.rs @@ -13,16 +13,16 @@ use super::operand::OperandValue; #[derive(Copy, Clone, Debug)] pub struct PlaceRef<'tcx, V> { - /// Pointer to the contents of the place + /// Pointer to the contents of the place. pub llval: V, - /// This place's extra data if it is unsized, or null + /// This place's extra data if it is unsized, or null. pub llextra: Option<V>, - /// Monomorphized type of this place, including variant information + /// Monomorphized type of this place, including variant information. pub layout: TyLayout<'tcx>, - /// What alignment we know for this place + /// What alignment we know for this place. pub align: Align, } @@ -277,7 +277,7 @@ impl<'a, 'tcx: 'a, V: CodegenObject> PlaceRef<'tcx, V> { } } - /// Set the discriminant for a new value of the given case of the given + /// Sets the discriminant for a new value of the given case of the given /// representation. pub fn codegen_set_discr<Bx: BuilderMethods<'a, 'tcx, Value = V>>( &self, diff --git a/src/librustc_codegen_ssa/traits/declare.rs b/src/librustc_codegen_ssa/traits/declare.rs index 3cd3c4e48b9..6a400a7d7a4 100644 --- a/src/librustc_codegen_ssa/traits/declare.rs +++ b/src/librustc_codegen_ssa/traits/declare.rs @@ -29,7 +29,7 @@ pub trait DeclareMethods<'tcx>: BackendTypes { /// Declare a global with an intention to define it. /// /// Use this function when you intend to define a global. This function will - /// return None if the name already has a definition associated with it. In that + /// return `None` if the name already has a definition associated with it. In that /// case an error should be reported to the user, because it usually happens due /// to user’s fault (e.g., misuse of #[no_mangle] or #[export_name] attributes). fn define_global(&self, name: &str, ty: Self::Type) -> Option<Self::Value>; @@ -53,10 +53,10 @@ pub trait DeclareMethods<'tcx>: BackendTypes { /// can happen with #[no_mangle] or #[export_name], for example. fn define_internal_fn(&self, name: &str, fn_sig: ty::PolyFnSig<'tcx>) -> Self::Value; - /// Get declared value by name. + /// Gets declared value by name. fn get_declared_value(&self, name: &str) -> Option<Self::Value>; - /// Get defined or externally defined (AvailableExternally linkage) value by + /// Gets defined or externally defined (AvailableExternally linkage) value by /// name. fn get_defined_value(&self, name: &str) -> Option<Self::Value>; } diff --git a/src/librustc_codegen_ssa/traits/type_.rs b/src/librustc_codegen_ssa/traits/type_.rs index 2ec0c8e5a75..2c990ed89c9 100644 --- a/src/librustc_codegen_ssa/traits/type_.rs +++ b/src/librustc_codegen_ssa/traits/type_.rs @@ -39,13 +39,13 @@ pub trait BaseTypeMethods<'tcx>: Backend<'tcx> { fn type_ptr_to(&self, ty: Self::Type) -> Self::Type; fn element_type(&self, ty: Self::Type) -> Self::Type; - /// Return the number of elements in `self` if it is a LLVM vector type. + /// Returns the number of elements in `self` if it is a LLVM vector type. fn vector_length(&self, ty: Self::Type) -> usize; fn func_params_types(&self, ty: Self::Type) -> Vec<Self::Type>; fn float_width(&self, ty: Self::Type) -> usize; - /// Retrieve the bit width of the integer type `self`. + /// Retrieves the bit width of the integer type `self`. fn int_width(&self, ty: Self::Type) -> u64; fn val_ty(&self, v: Self::Value) -> Self::Type; diff --git a/src/librustc_data_structures/base_n.rs b/src/librustc_data_structures/base_n.rs index c9c1933f25b..f1bd3f03aef 100644 --- a/src/librustc_data_structures/base_n.rs +++ b/src/librustc_data_structures/base_n.rs @@ -1,4 +1,4 @@ -/// Convert unsigned integers into a string representation with some base. +/// Converts unsigned integers into a string representation with some base. /// Bases up to and including 36 can be used for case-insensitive things. use std::str; diff --git a/src/librustc_data_structures/bit_set.rs b/src/librustc_data_structures/bit_set.rs index 05d2185ae69..ff7964646d6 100644 --- a/src/librustc_data_structures/bit_set.rs +++ b/src/librustc_data_structures/bit_set.rs @@ -27,7 +27,7 @@ pub struct BitSet<T: Idx> { } impl<T: Idx> BitSet<T> { - /// Create a new, empty bitset with a given `domain_size`. + /// Creates a new, empty bitset with a given `domain_size`. #[inline] pub fn new_empty(domain_size: usize) -> BitSet<T> { let num_words = num_words(domain_size); @@ -38,7 +38,7 @@ impl<T: Idx> BitSet<T> { } } - /// Create a new, filled bitset with a given `domain_size`. + /// Creates a new, filled bitset with a given `domain_size`. #[inline] pub fn new_filled(domain_size: usize) -> BitSet<T> { let num_words = num_words(domain_size); @@ -51,7 +51,7 @@ impl<T: Idx> BitSet<T> { result } - /// Get the domain size. + /// Gets the domain size. pub fn domain_size(&self) -> usize { self.domain_size } @@ -85,7 +85,7 @@ impl<T: Idx> BitSet<T> { self.words.iter().map(|e| e.count_ones() as usize).sum() } - /// True if `self` contains `elem`. + /// Returns `true` if `self` contains `elem`. #[inline] pub fn contains(&self, elem: T) -> bool { assert!(elem.index() < self.domain_size); @@ -106,7 +106,7 @@ impl<T: Idx> BitSet<T> { self.words.iter().all(|a| *a == 0) } - /// Insert `elem`. Returns true if the set has changed. + /// Insert `elem`. Returns whether the set has changed. #[inline] pub fn insert(&mut self, elem: T) -> bool { assert!(elem.index() < self.domain_size); @@ -126,7 +126,7 @@ impl<T: Idx> BitSet<T> { self.clear_excess_bits(); } - /// Returns true if the set has changed. + /// Returns `true` if the set has changed. #[inline] pub fn remove(&mut self, elem: T) -> bool { assert!(elem.index() < self.domain_size); @@ -138,26 +138,26 @@ impl<T: Idx> BitSet<T> { new_word != word } - /// Set `self = self | other` and return true if `self` changed + /// Sets `self = self | other` and returns `true` if `self` changed /// (i.e., if new bits were added). pub fn union(&mut self, other: &impl UnionIntoBitSet<T>) -> bool { other.union_into(self) } - /// Set `self = self - other` and return true if `self` changed. + /// Sets `self = self - other` and returns `true` if `self` changed. /// (i.e., if any bits were removed). pub fn subtract(&mut self, other: &impl SubtractFromBitSet<T>) -> bool { other.subtract_from(self) } - /// Set `self = self & other` and return true if `self` changed. + /// Sets `self = self & other` and return `true` if `self` changed. /// (i.e., if any bits were removed). pub fn intersect(&mut self, other: &BitSet<T>) -> bool { assert_eq!(self.domain_size, other.domain_size); bitwise(&mut self.words, &other.words, |a, b| { a & b }) } - /// Get a slice of the underlying words. + /// Gets a slice of the underlying words. pub fn words(&self) -> &[Word] { &self.words } @@ -611,7 +611,7 @@ impl<T: Idx> GrowableBitSet<T> { GrowableBitSet { bit_set: BitSet::new_empty(bits) } } - /// Returns true if the set has changed. + /// Returns `true` if the set has changed. #[inline] pub fn insert(&mut self, elem: T) -> bool { self.ensure(elem.index() + 1); @@ -645,7 +645,7 @@ pub struct BitMatrix<R: Idx, C: Idx> { } impl<R: Idx, C: Idx> BitMatrix<R, C> { - /// Create a new `rows x columns` matrix, initially empty. + /// Creates a new `rows x columns` matrix, initially empty. pub fn new(num_rows: usize, num_columns: usize) -> BitMatrix<R, C> { // For every element, we need one bit for every other // element. Round up to an even number of words. @@ -668,7 +668,7 @@ impl<R: Idx, C: Idx> BitMatrix<R, C> { /// Sets the cell at `(row, column)` to true. Put another way, insert /// `column` to the bitset for `row`. /// - /// Returns true if this changed the matrix, and false otherwise. + /// Returns `true` if this changed the matrix. pub fn insert(&mut self, row: R, column: C) -> bool { assert!(row.index() < self.num_rows && column.index() < self.num_columns); let (start, _) = self.range(row); @@ -691,7 +691,7 @@ impl<R: Idx, C: Idx> BitMatrix<R, C> { (self.words[start + word_index] & mask) != 0 } - /// Returns those indices that are true in rows `a` and `b`. This + /// Returns those indices that are true in rows `a` and `b`. This /// is an O(n) operation where `n` is the number of elements /// (somewhat independent from the actual size of the /// intersection, in particular). @@ -715,8 +715,8 @@ impl<R: Idx, C: Idx> BitMatrix<R, C> { result } - /// Add the bits from row `read` to the bits from row `write`, - /// return true if anything changed. + /// Adds the bits from row `read` to the bits from row `write`, and + /// returns `true` if anything changed. /// /// This is used when computing transitive reachability because if /// you have an edge `write -> read`, because in that case @@ -772,7 +772,7 @@ where } impl<R: Idx, C: Idx> SparseBitMatrix<R, C> { - /// Create a new empty sparse bit matrix with no rows or columns. + /// Creates a new empty sparse bit matrix with no rows or columns. pub fn new(num_columns: usize) -> Self { Self { num_columns, @@ -793,7 +793,7 @@ impl<R: Idx, C: Idx> SparseBitMatrix<R, C> { /// Sets the cell at `(row, column)` to true. Put another way, insert /// `column` to the bitset for `row`. /// - /// Returns true if this changed the matrix, and false otherwise. + /// Returns `true` if this changed the matrix. pub fn insert(&mut self, row: R, column: C) -> bool { self.ensure_row(row).insert(column) } @@ -806,8 +806,8 @@ impl<R: Idx, C: Idx> SparseBitMatrix<R, C> { self.row(row).map_or(false, |r| r.contains(column)) } - /// Add the bits from row `read` to the bits from row `write`, - /// return true if anything changed. + /// Adds the bits from row `read` to the bits from row `write`, and + /// returns `true` if anything changed. /// /// This is used when computing transitive reachability because if /// you have an edge `write -> read`, because in that case diff --git a/src/librustc_data_structures/graph/implementation/mod.rs b/src/librustc_data_structures/graph/implementation/mod.rs index a8b73409406..de4b1bcd0c2 100644 --- a/src/librustc_data_structures/graph/implementation/mod.rs +++ b/src/librustc_data_structures/graph/implementation/mod.rs @@ -14,7 +14,7 @@ //! stored. The edges are stored in a central array, but they are also //! threaded onto two linked lists for each node, one for incoming edges //! and one for outgoing edges. Note that every edge is a member of some -//! incoming list and some outgoing list. Basically you can load the +//! incoming list and some outgoing list. Basically you can load the //! first index of the linked list from the node data structures (the //! field `first_edge`) and then, for each edge, load the next index from //! the field `next_edge`). Each of those fields is an array that should @@ -79,7 +79,7 @@ pub const OUTGOING: Direction = Direction { repr: 0 }; pub const INCOMING: Direction = Direction { repr: 1 }; impl NodeIndex { - /// Returns unique id (unique with respect to the graph holding associated node). + /// Returns unique ID (unique with respect to the graph holding associated node). pub fn node_id(self) -> usize { self.0 } diff --git a/src/librustc_data_structures/graph/scc/mod.rs b/src/librustc_data_structures/graph/scc/mod.rs index e3264fda262..24c5448639e 100644 --- a/src/librustc_data_structures/graph/scc/mod.rs +++ b/src/librustc_data_structures/graph/scc/mod.rs @@ -200,7 +200,7 @@ where } } - /// Visit a node during the DFS. We first examine its current + /// Visits a node during the DFS. We first examine its current /// state -- if it is not yet visited (`NotVisited`), we can push /// it onto the stack and start walking its successors. /// diff --git a/src/librustc_data_structures/indexed_vec.rs b/src/librustc_data_structures/indexed_vec.rs index 516ea7fb7d9..09aec50e4bb 100644 --- a/src/librustc_data_structures/indexed_vec.rs +++ b/src/librustc_data_structures/indexed_vec.rs @@ -12,7 +12,7 @@ use rustc_serialize as serialize; /// Represents some newtyped `usize` wrapper. /// -/// (purpose: avoid mixing indexes for different bitvector domains.) +/// Purpose: avoid mixing indexes for different bitvector domains. pub trait Idx: Copy + 'static + Ord + Debug + Hash { fn new(idx: usize) -> Self; @@ -144,19 +144,19 @@ macro_rules! newtype_index { unsafe { $type { private: value } } } - /// Extract value of this index as an integer. + /// Extracts the value of this index as an integer. #[inline] $v fn index(self) -> usize { self.as_usize() } - /// Extract value of this index as a usize. + /// Extracts the value of this index as a `u32`. #[inline] $v fn as_u32(self) -> u32 { self.private } - /// Extract value of this index as a u32. + /// Extracts the value of this index as a `usize`. #[inline] $v fn as_usize(self) -> usize { self.as_u32() as usize @@ -641,7 +641,7 @@ impl<I: Idx, T> IndexVec<I, T> { self.raw.get_mut(index.index()) } - /// Return mutable references to two distinct elements, a and b. Panics if a == b. + /// Returns mutable references to two distinct elements, a and b. Panics if a == b. #[inline] pub fn pick2_mut(&mut self, a: I, b: I) -> (&mut T, &mut T) { let (ai, bi) = (a.index(), b.index()); diff --git a/src/librustc_data_structures/obligation_forest/graphviz.rs b/src/librustc_data_structures/obligation_forest/graphviz.rs index 72551b42324..a0363e165e0 100644 --- a/src/librustc_data_structures/obligation_forest/graphviz.rs +++ b/src/librustc_data_structures/obligation_forest/graphviz.rs @@ -7,8 +7,8 @@ use std::sync::atomic::AtomicUsize; use std::sync::atomic::Ordering; impl<O: ForestObligation> ObligationForest<O> { - /// Create a graphviz representation of the obligation forest. Given a directory this will - /// create files with name of the format `<counter>_<description>.gv`. The counter is + /// Creates a graphviz representation of the obligation forest. Given a directory this will + /// create files with name of the format `<counter>_<description>.gv`. The counter is /// global and is maintained internally. /// /// Calling this will do nothing unless the environment variable diff --git a/src/librustc_data_structures/obligation_forest/mod.rs b/src/librustc_data_structures/obligation_forest/mod.rs index 546bb64168e..4490e5f86d2 100644 --- a/src/librustc_data_structures/obligation_forest/mod.rs +++ b/src/librustc_data_structures/obligation_forest/mod.rs @@ -64,7 +64,7 @@ //! #### Snapshots //! //! The `ObligationForest` supports a limited form of snapshots; see -//! `start_snapshot`; `commit_snapshot`; and `rollback_snapshot`. In +//! `start_snapshot`, `commit_snapshot`, and `rollback_snapshot`. In //! particular, you can use a snapshot to roll back new root //! obligations. However, it is an error to attempt to //! `process_obligations` during a snapshot. @@ -72,7 +72,7 @@ //! ### Implementation details //! //! For the most part, comments specific to the implementation are in the -//! code. This file only contains a very high-level overview. Basically, +//! code. This file only contains a very high-level overview. Basically, //! the forest is stored in a vector. Each element of the vector is a node //! in some tree. Each node in the vector has the index of an (optional) //! parent and (for convenience) its root (which may be itself). It also @@ -163,7 +163,7 @@ pub struct ObligationForest<O: ForestObligation> { obligation_tree_id_generator: ObligationTreeIdGenerator, - /// Per tree error cache. This is used to deduplicate errors, + /// Per tree error cache. This is used to deduplicate errors, /// which is necessary to avoid trait resolution overflow in /// some cases. /// @@ -268,13 +268,13 @@ impl<O: ForestObligation> ObligationForest<O> { } } - /// Return the total number of nodes in the forest that have not + /// Returns the total number of nodes in the forest that have not /// yet been fully resolved. pub fn len(&self) -> usize { self.nodes.len() } - /// Registers an obligation + /// Registers an obligation. /// /// This CAN be done in a snapshot pub fn register_obligation(&mut self, obligation: O) { @@ -341,7 +341,7 @@ impl<O: ForestObligation> ObligationForest<O> { } } - /// Convert all remaining obligations to the given error. + /// Converts all remaining obligations to the given error. /// /// This cannot be done during a snapshot. pub fn to_errors<E: Clone>(&mut self, error: E) -> Vec<Error<O, E>> { @@ -380,10 +380,10 @@ impl<O: ForestObligation> ObligationForest<O> { .insert(node.obligation.as_predicate().clone()); } - /// Perform a pass through the obligation list. This must + /// Performs a pass through the obligation list. This must /// be called in a loop until `outcome.stalled` is false. /// - /// This CANNOT be unrolled (presently, at least). + /// This _cannot_ be unrolled (presently, at least). pub fn process_obligations<P>(&mut self, processor: &mut P, do_completed: DoCompleted) -> Outcome<O, P::Error> where P: ObligationProcessor<Obligation=O> @@ -461,7 +461,7 @@ impl<O: ForestObligation> ObligationForest<O> { } } - /// Mark all NodeState::Success nodes as NodeState::Done and + /// Mark all `NodeState::Success` nodes as `NodeState::Done` and /// report all cycles between them. This should be called /// after `mark_as_waiting` marks all nodes with pending /// subobligations as NodeState::Waiting. @@ -566,7 +566,7 @@ impl<O: ForestObligation> ObligationForest<O> { } } - /// Marks all nodes that depend on a pending node as NodeState::Waiting. + /// Marks all nodes that depend on a pending node as `NodeState::Waiting`. fn mark_as_waiting(&self) { for node in &self.nodes { if node.state.get() == NodeState::Waiting { diff --git a/src/librustc_data_structures/owning_ref/mod.rs b/src/librustc_data_structures/owning_ref/mod.rs index 30e510cc5b0..236559dcd7c 100644 --- a/src/librustc_data_structures/owning_ref/mod.rs +++ b/src/librustc_data_structures/owning_ref/mod.rs @@ -286,7 +286,7 @@ impl<T> Erased for T {} pub unsafe trait IntoErased<'a> { /// Owner with the dereference type substituted to `Erased`. type Erased; - /// Perform the type erasure. + /// Performs the type erasure. fn into_erased(self) -> Self::Erased; } @@ -296,7 +296,7 @@ pub unsafe trait IntoErased<'a> { pub unsafe trait IntoErasedSend<'a> { /// Owner with the dereference type substituted to `Erased + Send`. type Erased: Send; - /// Perform the type erasure. + /// Performs the type erasure. fn into_erased_send(self) -> Self::Erased; } @@ -306,7 +306,7 @@ pub unsafe trait IntoErasedSend<'a> { pub unsafe trait IntoErasedSendSync<'a> { /// Owner with the dereference type substituted to `Erased + Send + Sync`. type Erased: Send + Sync; - /// Perform the type erasure. + /// Performs the type erasure. fn into_erased_send_sync(self) -> Self::Erased; } @@ -844,7 +844,7 @@ pub trait ToHandleMut { impl<O, H> OwningHandle<O, H> where O: StableAddress, O::Target: ToHandle<Handle = H>, H: Deref, { - /// Create a new `OwningHandle` for a type that implements `ToHandle`. For types + /// Creates a new `OwningHandle` for a type that implements `ToHandle`. For types /// that don't implement `ToHandle`, callers may invoke `new_with_fn`, which accepts /// a callback to perform the conversion. pub fn new(o: O) -> Self { @@ -855,7 +855,7 @@ impl<O, H> OwningHandle<O, H> impl<O, H> OwningHandle<O, H> where O: StableAddress, O::Target: ToHandleMut<HandleMut = H>, H: DerefMut, { - /// Create a new mutable `OwningHandle` for a type that implements `ToHandleMut`. + /// Creates a new mutable `OwningHandle` for a type that implements `ToHandleMut`. pub fn new_mut(o: O) -> Self { OwningHandle::new_with_fn(o, |x| unsafe { O::Target::to_handle_mut(x) }) } @@ -864,7 +864,7 @@ impl<O, H> OwningHandle<O, H> impl<O, H> OwningHandle<O, H> where O: StableAddress, H: Deref, { - /// Create a new OwningHandle. The provided callback will be invoked with + /// Creates a new OwningHandle. The provided callback will be invoked with /// a pointer to the object owned by `o`, and the returned value is stored /// as the object to which this `OwningHandle` will forward `Deref` and /// `DerefMut`. @@ -882,7 +882,7 @@ impl<O, H> OwningHandle<O, H> } } - /// Create a new OwningHandle. The provided callback will be invoked with + /// Creates a new OwningHandle. The provided callback will be invoked with /// a pointer to the object owned by `o`, and the returned value is stored /// as the object to which this `OwningHandle` will forward `Deref` and /// `DerefMut`. diff --git a/src/librustc_data_structures/sip128.rs b/src/librustc_data_structures/sip128.rs index 9ec9a398400..06f157f9729 100644 --- a/src/librustc_data_structures/sip128.rs +++ b/src/librustc_data_structures/sip128.rs @@ -44,7 +44,7 @@ macro_rules! compress { }); } -/// Load an integer of the desired type from a byte stream, in LE order. Uses +/// Loads an integer of the desired type from a byte stream, in LE order. Uses /// `copy_nonoverlapping` to let the compiler generate the most efficient way /// to load it from a possibly unaligned address. /// @@ -61,7 +61,7 @@ macro_rules! load_int_le { }); } -/// Load an u64 using up to 7 bytes of a byte slice. +/// Loads an u64 using up to 7 bytes of a byte slice. /// /// Unsafe because: unchecked indexing at start..start+len #[inline] diff --git a/src/librustc_data_structures/svh.rs b/src/librustc_data_structures/svh.rs index 3757f921098..df4f6176837 100644 --- a/src/librustc_data_structures/svh.rs +++ b/src/librustc_data_structures/svh.rs @@ -17,7 +17,7 @@ pub struct Svh { } impl Svh { - /// Create a new `Svh` given the hash. If you actually want to + /// Creates a new `Svh` given the hash. If you actually want to /// compute the SVH from some HIR, you want the `calculate_svh` /// function found in `librustc_incremental`. pub fn new(hash: u64) -> Svh { diff --git a/src/librustc_data_structures/transitive_relation.rs b/src/librustc_data_structures/transitive_relation.rs index 39aed983360..0974607fabe 100644 --- a/src/librustc_data_structures/transitive_relation.rs +++ b/src/librustc_data_structures/transitive_relation.rs @@ -82,7 +82,7 @@ impl<T: Clone + Debug + Eq + Hash> TransitiveRelation<T> { } /// Applies the (partial) function to each edge and returns a new - /// relation. If `f` returns `None` for any end-point, returns + /// relation. If `f` returns `None` for any end-point, returns /// `None`. pub fn maybe_map<F, U>(&self, mut f: F) -> Option<TransitiveRelation<U>> where F: FnMut(&T) -> Option<U>, @@ -111,7 +111,7 @@ impl<T: Clone + Debug + Eq + Hash> TransitiveRelation<T> { } } - /// Check whether `a < target` (transitively) + /// Checks whether `a < target` (transitively) pub fn contains(&self, a: &T, b: &T) -> bool { match (self.index(a), self.index(b)) { (Some(a), Some(b)) => self.with_closure(|closure| closure.contains(a.0, b.0)), @@ -122,7 +122,7 @@ impl<T: Clone + Debug + Eq + Hash> TransitiveRelation<T> { /// Thinking of `x R y` as an edge `x -> y` in a graph, this /// returns all things reachable from `a`. /// - /// Really this probably ought to be `impl Iterator<Item=&T>`, but + /// Really this probably ought to be `impl Iterator<Item = &T>`, but /// I'm too lazy to make that work, and -- given the caching /// strategy -- it'd be a touch tricky anyhow. pub fn reachable_from(&self, a: &T) -> Vec<&T> { @@ -152,20 +152,20 @@ impl<T: Clone + Debug + Eq + Hash> TransitiveRelation<T> { /// the query is `postdom_upper_bound(a, b)`: /// /// ```text - /// // returns Some(x), which is also LUB + /// // Returns Some(x), which is also LUB. /// a -> a1 -> x /// ^ /// | /// b -> b1 ---+ /// - /// // returns Some(x), which is not LUB (there is none) - /// // diagonal edges run left-to-right + /// // Returns `Some(x)`, which is not LUB (there is none) + /// // diagonal edges run left-to-right. /// a -> a1 -> x /// \/ ^ /// /\ | /// b -> b1 ---+ /// - /// // returns None + /// // Returns `None`. /// a -> a1 /// b -> b1 /// ``` diff --git a/src/librustc_data_structures/work_queue.rs b/src/librustc_data_structures/work_queue.rs index 06418b1051a..193025aafad 100644 --- a/src/librustc_data_structures/work_queue.rs +++ b/src/librustc_data_structures/work_queue.rs @@ -14,7 +14,7 @@ pub struct WorkQueue<T: Idx> { } impl<T: Idx> WorkQueue<T> { - /// Create a new work queue with all the elements from (0..len). + /// Creates a new work queue with all the elements from (0..len). #[inline] pub fn with_all(len: usize) -> Self { WorkQueue { @@ -23,7 +23,7 @@ impl<T: Idx> WorkQueue<T> { } } - /// Create a new work queue that starts empty, where elements range from (0..len). + /// Creates a new work queue that starts empty, where elements range from (0..len). #[inline] pub fn with_none(len: usize) -> Self { WorkQueue { @@ -54,7 +54,7 @@ impl<T: Idx> WorkQueue<T> { } } - /// True if nothing is enqueued. + /// Returns `true` if nothing is enqueued. #[inline] pub fn is_empty(&self) -> bool { self.deque.is_empty() diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index 6a23cadf877..09804a706ec 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -711,7 +711,7 @@ pub struct InnerExpansionResult<'a> { pub hir_forest: hir_map::Forest, } -/// Run the "early phases" of the compiler: initial `cfg` processing, +/// Runs the "early phases" of the compiler: initial `cfg` processing, /// loading compiler plugins (including those from `addl_plugins`), /// syntax expansion, secondary `cfg` expansion, synthesis of a test /// harness if one is to be provided, injection of a dependency on the @@ -1167,7 +1167,7 @@ pub fn default_provide_extern(providers: &mut ty::query::Providers) { cstore::provide_extern(providers); } -/// Run the resolution, typechecking, region checking and other +/// Runs the resolution, typec-hecking, region checking and other /// miscellaneous analysis passes on the crate. Return various /// structures carrying the results of the analysis. pub fn phase_3_run_analysis_passes<'tcx, F, R>( @@ -1334,7 +1334,7 @@ where ) } -/// Run the codegen backend, after which the AST and analysis can +/// Runs the codegen backend, after which the AST and analysis can /// be discarded. pub fn phase_4_codegen<'a, 'tcx>( codegen_backend: &dyn CodegenBackend, diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index b356ae38e24..2d894bd65b2 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -114,7 +114,7 @@ pub mod target_features { use rustc::session::Session; use rustc_codegen_utils::codegen_backend::CodegenBackend; - /// Add `target_feature = "..."` cfgs for a variety of platform + /// Adds `target_feature = "..."` cfgs for a variety of platform /// specific features (SSE, NEON etc.). /// /// This is performed by checking whether a whitelisted set of @@ -1316,7 +1316,7 @@ fn print_flag_list<T>(cmdline_opt: &str, /// Process command line options. Emits messages as appropriate. If compilation /// should continue, returns a getopts::Matches object parsed from args, -/// otherwise returns None. +/// otherwise returns `None`. /// /// The compiler's handling of options is a little complicated as it ties into /// our stability story, and it's even *more* complicated by historical @@ -1480,7 +1480,7 @@ pub fn in_rustc_thread<F, R>(f: F) -> Result<R, Box<dyn Any + Send>> in_named_rustc_thread("rustc".to_string(), f) } -/// Get a list of extra command-line flags provided by the user, as strings. +/// Gets a list of extra command-line flags provided by the user, as strings. /// /// This function is used during ICEs to show more information useful for /// debugging, since some ICEs only happens with non-default compiler flags @@ -1545,7 +1545,7 @@ impl Display for CompilationFailure { } } -/// Run a procedure which will detect panics in the compiler and print nicer +/// Runs a procedure which will detect panics in the compiler and print nicer /// error messages rather than just failing the test. /// /// The diagnostic emitter yielded to the procedure should be used for reporting diff --git a/src/librustc_driver/test.rs b/src/librustc_driver/test.rs index afcf08632a4..2ec755bd626 100644 --- a/src/librustc_driver/test.rs +++ b/src/librustc_driver/test.rs @@ -1,4 +1,4 @@ -//! # Standalone Tests for the Inference Module +//! Standalone tests for the inference module. use driver; use errors; @@ -508,8 +508,8 @@ fn subst_ty_renumber_bound() { }) } -/// Test substituting a bound region into a function, which introduces another level of binding. -/// This requires adjusting the Debruijn index. +/// Tests substituting a bound region into a function, which introduces another level of binding. +/// This requires adjusting the De Bruijn index. #[test] fn subst_ty_renumber_some_bounds() { test_env(EMPTY_SOURCE_STR, errors(&[]), |env| { @@ -544,7 +544,7 @@ fn subst_ty_renumber_some_bounds() { }) } -/// Test that we correctly compute whether a type has escaping regions or not. +/// Tests that we correctly compute whether a type has escaping regions or not. #[test] fn escaping() { test_env(EMPTY_SOURCE_STR, errors(&[]), |mut env| { @@ -571,7 +571,7 @@ fn escaping() { }) } -/// Test applying a substitution where the value being substituted for an early-bound region is a +/// Tests applying a substitution where the value being substituted for an early-bound region is a /// late-bound region. #[test] fn subst_region_renumber_region() { diff --git a/src/librustc_errors/diagnostic.rs b/src/librustc_errors/diagnostic.rs index aefe296ad0f..2c410f69bfc 100644 --- a/src/librustc_errors/diagnostic.rs +++ b/src/librustc_errors/diagnostic.rs @@ -118,7 +118,7 @@ impl Diagnostic { self.level == Level::Cancelled } - /// Add a span/label to be included in the resulting snippet. + /// Adds a span/label to be included in the resulting snippet. /// This is pushed onto the `MultiSpan` that was created when the /// diagnostic was first built. If you don't call this function at /// all, and you just supplied a `Span` to create the diagnostic, diff --git a/src/librustc_errors/diagnostic_builder.rs b/src/librustc_errors/diagnostic_builder.rs index fd4ea7f2d82..9d5e8d10b17 100644 --- a/src/librustc_errors/diagnostic_builder.rs +++ b/src/librustc_errors/diagnostic_builder.rs @@ -26,7 +26,7 @@ pub struct DiagnosticBuilder<'a> { /// In general, the `DiagnosticBuilder` uses deref to allow access to /// the fields and methods of the embedded `diagnostic` in a -/// transparent way. *However,* many of the methods are intended to +/// transparent way. *However,* many of the methods are intended to /// be used in a chained way, and hence ought to return `self`. In /// that case, we can't just naively forward to the method on the /// `diagnostic`, because the return type would be a `&Diagnostic` @@ -150,7 +150,7 @@ impl<'a> DiagnosticBuilder<'a> { self.cancel(); } - /// Add a span/label to be included in the resulting snippet. + /// Adds a span/label to be included in the resulting snippet. /// This is pushed onto the `MultiSpan` that was created when the /// diagnostic was first built. If you don't call this function at /// all, and you just supplied a `Span` to create the diagnostic, diff --git a/src/librustc_errors/emitter.rs b/src/librustc_errors/emitter.rs index 2821201173e..1c0c9d137e4 100644 --- a/src/librustc_errors/emitter.rs +++ b/src/librustc_errors/emitter.rs @@ -22,7 +22,7 @@ pub trait Emitter { /// Emit a structured diagnostic. fn emit(&mut self, db: &DiagnosticBuilder<'_>); - /// Check if should show explanations about "rustc --explain" + /// Checks if should show explanations about "rustc --explain" fn should_show_explain(&self) -> bool { true } @@ -868,7 +868,7 @@ impl EmitterWriter { } } - /// Add a left margin to every line but the first, given a padding length and the label being + /// Adds a left margin to every line but the first, given a padding length and the label being /// displayed, keeping the provided highlighting. fn msg_to_buffer(&self, buffer: &mut StyledBuffer, @@ -895,7 +895,7 @@ impl EmitterWriter { // `max_line_num_len` let padding = " ".repeat(padding + label.len() + 5); - /// Return whether `style`, or the override if present and the style is `NoStyle`. + /// Returns `true` if `style`, or the override if present and the style is `NoStyle`. fn style_or_override(style: Style, override_style: Option<Style>) -> Style { if let Some(o) = override_style { if style == Style::NoStyle { diff --git a/src/librustc_errors/lib.rs b/src/librustc_errors/lib.rs index ea530fa1bfb..08af62afd4c 100644 --- a/src/librustc_errors/lib.rs +++ b/src/librustc_errors/lib.rs @@ -389,7 +389,7 @@ impl Handler { /// Resets the diagnostic error count as well as the cached emitted diagnostics. /// - /// NOTE: DO NOT call this function from rustc. It is only meant to be called from external + /// NOTE: *do not* call this function from rustc. It is only meant to be called from external /// tools that want to reuse a `Parser` cleaning the previously emitted diagnostics as well as /// the overall count of emitted error diagnostics. pub fn reset_err_count(&self) { diff --git a/src/librustc_fs_util/lib.rs b/src/librustc_fs_util/lib.rs index 340681d65c3..ce63bcafd79 100644 --- a/src/librustc_fs_util/lib.rs +++ b/src/librustc_fs_util/lib.rs @@ -58,7 +58,7 @@ pub enum LinkOrCopy { Copy, } -/// Copy `p` into `q`, preferring to use hard-linking if possible. If +/// Copies `p` into `q`, preferring to use hard-linking if possible. If /// `q` already exists, it is removed first. /// The result indicates which of the two operations has been performed. pub fn link_or_copy<P: AsRef<Path>, Q: AsRef<Path>>(p: P, q: Q) -> io::Result<LinkOrCopy> { diff --git a/src/librustc_incremental/assert_dep_graph.rs b/src/librustc_incremental/assert_dep_graph.rs index b715a32cb05..fe44e0cbe61 100644 --- a/src/librustc_incremental/assert_dep_graph.rs +++ b/src/librustc_incremental/assert_dep_graph.rs @@ -12,7 +12,7 @@ //! In this code, we report errors on each `rustc_if_this_changed` //! annotation. If a path exists in all cases, then we would report //! "all path(s) exist". Otherwise, we report: "no path to `foo`" for -//! each case where no path exists. `compile-fail` tests can then be +//! each case where no path exists. `compile-fail` tests can then be //! used to check when paths exist or do not. //! //! The full form of the `rustc_if_this_changed` annotation is diff --git a/src/librustc_incremental/persist/dirty_clean.rs b/src/librustc_incremental/persist/dirty_clean.rs index 51f3bcdf7a5..c13a3533032 100644 --- a/src/librustc_incremental/persist/dirty_clean.rs +++ b/src/librustc_incremental/persist/dirty_clean.rs @@ -5,14 +5,13 @@ //! //! - `#[rustc_clean(cfg="rev2", except="TypeckTables")]` if we are //! in `#[cfg(rev2)]`, then the fingerprints associated with -//! `DepNode::TypeckTables(X)` must be DIFFERENT (`X` is the def-id of the +//! `DepNode::TypeckTables(X)` must be DIFFERENT (`X` is the `DefId` of the //! current node). //! - `#[rustc_clean(cfg="rev2")]` same as above, except that the //! fingerprints must be the SAME (along with all other fingerprints). //! //! Errors are reported if we are in the suitable configuration but //! the required condition is not met. -//! use std::iter::FromIterator; use std::vec::Vec; @@ -84,7 +83,7 @@ const BASE_STRUCT: &[&str] = &[ label_strs::TypeOfItem, ]; -/// Trait Definition DepNodes +/// Trait definition `DepNode`s. const BASE_TRAIT_DEF: &[&str] = &[ label_strs::AssociatedItemDefIds, label_strs::GenericsOfItem, @@ -95,7 +94,7 @@ const BASE_TRAIT_DEF: &[&str] = &[ label_strs::TraitImpls, ]; -/// extra DepNodes for methods (+fn) +/// Extra `DepNode`s for functions and methods. const EXTRA_ASSOCIATED: &[&str] = &[ label_strs::AssociatedItems, ]; @@ -126,14 +125,14 @@ const LABELS_CONST_IN_TRAIT: &[&[&str]] = &[ EXTRA_TRAIT, ]; -/// Function DepNode +/// Function `DepNode`s. const LABELS_FN: &[&[&str]] = &[ BASE_HIR, BASE_MIR, BASE_FN, ]; -/// Method DepNodes +/// Method `DepNode`s. const LABELS_FN_IN_IMPL: &[&[&str]] = &[ BASE_HIR, BASE_MIR, @@ -141,7 +140,7 @@ const LABELS_FN_IN_IMPL: &[&[&str]] = &[ EXTRA_ASSOCIATED, ]; -/// Trait-Method DepNodes +/// Trait method `DepNode`s. const LABELS_FN_IN_TRAIT: &[&[&str]] = &[ BASE_HIR, BASE_MIR, @@ -150,24 +149,24 @@ const LABELS_FN_IN_TRAIT: &[&[&str]] = &[ EXTRA_TRAIT, ]; -/// For generic cases like inline-assembly/mod/etc +/// For generic cases like inline-assembly, modules, etc. const LABELS_HIR_ONLY: &[&[&str]] = &[ BASE_HIR, ]; -/// Impl DepNodes +/// Impl `DepNode`s. const LABELS_IMPL: &[&[&str]] = &[ BASE_HIR, BASE_IMPL, ]; -/// Abstract Data Type (Struct, Enum, Unions) DepNodes +/// Abstract data type (struct, enum, union) `DepNode`s. const LABELS_ADT: &[&[&str]] = &[ BASE_HIR, BASE_STRUCT, ]; -/// Trait Definition DepNodes +/// Trait definition `DepNode`s. #[allow(dead_code)] const LABELS_TRAIT: &[&[&str]] = &[ BASE_HIR, @@ -269,7 +268,7 @@ impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> { Some(assertion) } - /// Get the "auto" assertion on pre-validated attr, along with the `except` labels + /// Gets the "auto" assertion on pre-validated attr, along with the `except` labels. fn assertion_auto(&mut self, item_id: ast::NodeId, attr: &Attribute, is_clean: bool) -> Assertion { diff --git a/src/librustc_incremental/persist/file_format.rs b/src/librustc_incremental/persist/file_format.rs index 3b88a14f3a2..f363f718496 100644 --- a/src/librustc_incremental/persist/file_format.rs +++ b/src/librustc_incremental/persist/file_format.rs @@ -17,15 +17,15 @@ use std::env; use rustc::session::config::nightly_options; use rustc_serialize::opaque::Encoder; -/// The first few bytes of files generated by incremental compilation +/// The first few bytes of files generated by incremental compilation. const FILE_MAGIC: &[u8] = b"RSIC"; -/// Change this if the header format changes +/// Change this if the header format changes. const HEADER_FORMAT_VERSION: u16 = 0; /// A version string that hopefully is always different for compiler versions /// with different encodings of incremental compilation artifacts. Contains -/// the git commit hash. +/// the Git commit hash. const RUSTC_VERSION: Option<&str> = option_env!("CFG_VERSION"); pub fn write_file_header(stream: &mut Encoder) { diff --git a/src/librustc_incremental/persist/fs.rs b/src/librustc_incremental/persist/fs.rs index ff8b76829eb..7dcd5c94bf2 100644 --- a/src/librustc_incremental/persist/fs.rs +++ b/src/librustc_incremental/persist/fs.rs @@ -444,7 +444,7 @@ fn copy_files(sess: &Session, Ok(files_linked > 0 || files_copied == 0) } -/// Generate unique directory path of the form: +/// Generates unique directory path of the form: /// {crate_dir}/s-{timestamp}-{random-number}-working fn generate_session_dir_path(crate_dir: &Path) -> PathBuf { let timestamp = timestamp_to_string(SystemTime::now()); @@ -509,7 +509,7 @@ fn delete_session_dir_lock_file(sess: &Session, } } -/// Find the most recent published session directory that is not in the +/// Finds the most recent published session directory that is not in the /// ignore-list. fn find_source_directory(crate_dir: &Path, source_directories_already_tried: &FxHashSet<PathBuf>) diff --git a/src/librustc_lint/builtin.rs b/src/librustc_lint/builtin.rs index cbcc7f3574d..9f1a1d5cd92 100644 --- a/src/librustc_lint/builtin.rs +++ b/src/librustc_lint/builtin.rs @@ -13,10 +13,13 @@ //! `LintPass` (also, note that such lints will need to be defined in //! `rustc::lint::builtin`, not here). //! -//! If you define a new `LintPass`, you will also need to add it to the -//! `add_builtin!` or `add_builtin_with_new!` invocation in `lib.rs`. -//! Use the former for unit-like structs and the latter for structs with -//! a `pub fn new()`. +//! If you define a new `EarlyLintPass`, you will also need to add it to the +//! `add_early_builtin!` or `add_early_builtin_with_new!` invocation in +//! `lib.rs`. Use the former for unit-like structs and the latter for structs +//! with a `pub fn new()`. +//! +//! If you define a new `LateLintPass`, you will also need to add it to the +//! `late_lint_methods!` invocation in `lib.rs`. use rustc::hir::def::Def; use rustc::hir::def_id::{DefId, LOCAL_CRATE}; @@ -317,8 +320,7 @@ declare_lint! { } pub struct MissingDoc { - /// Stack of whether #[doc(hidden)] is set - /// at each level which has lint attributes. + /// Stack of whether `#[doc(hidden)]` is set at each level which has lint attributes. doc_hidden_stack: Vec<bool>, /// Private traits or trait items that leaked through. Don't check their methods. @@ -670,8 +672,8 @@ declare_lint! { "detects anonymous parameters" } -/// Checks for use of anonymous parameters (RFC 1685) -#[derive(Clone)] +/// Checks for use of anonymous parameters (RFC 1685). +#[derive(Copy, Clone)] pub struct AnonymousParameters; impl LintPass for AnonymousParameters { @@ -726,7 +728,7 @@ impl EarlyLintPass for AnonymousParameters { } } -/// Checks for use of attributes which have been deprecated. +/// Check for use of attributes which have been deprecated. #[derive(Clone)] pub struct DeprecatedAttr { // This is not free to compute, so we want to keep it around, rather than @@ -1083,7 +1085,8 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnionsWithDropFields { } } -/// Lint for items marked `pub` that aren't reachable from other crates +/// Lint for items marked `pub` that aren't reachable from other crates. +#[derive(Copy, Clone)] pub struct UnreachablePub; declare_lint! { @@ -1156,7 +1159,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnreachablePub { } } -/// Lint for trait and lifetime bounds in type aliases being mostly ignored: +/// Lint for trait and lifetime bounds in type aliases being mostly ignored. /// They are relevant when using associated types, but otherwise neither checked /// at definition site nor enforced at use site. @@ -1543,8 +1546,8 @@ declare_lint! { "detects edition keywords being used as an identifier" } -/// Checks for uses of edition keywords used as an identifier -#[derive(Clone)] +/// Check for uses of edition keywords used as an identifier. +#[derive(Copy, Clone)] pub struct KeywordIdents; impl LintPass for KeywordIdents { diff --git a/src/librustc_lint/types.rs b/src/librustc_lint/types.rs index f6b7ccfe2ec..141ba19479d 100644 --- a/src/librustc_lint/types.rs +++ b/src/librustc_lint/types.rs @@ -479,7 +479,7 @@ fn is_repr_nullable_ptr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, } impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { - /// Check if the given type is "ffi-safe" (has a stable, well-defined + /// Checks if the given type is "ffi-safe" (has a stable, well-defined /// representation which can be exported to C code). fn check_type_for_ffi(&self, cache: &mut FxHashSet<Ty<'tcx>>, diff --git a/src/librustc_metadata/creader.rs b/src/librustc_metadata/creader.rs index 0b4c8a5367c..b62971ccac6 100644 --- a/src/librustc_metadata/creader.rs +++ b/src/librustc_metadata/creader.rs @@ -513,7 +513,7 @@ impl<'a> CrateLoader<'a> { } } - /// Load custom derive macros. + /// Loads custom derive macros. /// /// Note that this is intentionally similar to how we load plugins today, /// but also intentionally separate. Plugins are likely always going to be diff --git a/src/librustc_metadata/cstore.rs b/src/librustc_metadata/cstore.rs index a2f69bc4563..d646879b4d4 100644 --- a/src/librustc_metadata/cstore.rs +++ b/src/librustc_metadata/cstore.rs @@ -46,7 +46,7 @@ pub struct CrateMetadata { /// Original name of the crate. pub name: Symbol, - /// Name of the crate as imported. I.e., if imported with + /// Name of the crate as imported. I.e., if imported with /// `extern crate foo as bar;` this will be `bar`. pub imported_name: Symbol, @@ -66,9 +66,9 @@ pub struct CrateMetadata { pub root: schema::CrateRoot, - /// For each public item in this crate, we encode a key. When the + /// For each public item in this crate, we encode a key. When the /// crate is loaded, we read all the keys and put them in this - /// hashmap, which gives the reverse mapping. This allows us to + /// hashmap, which gives the reverse mapping. This allows us to /// quickly retrace a `DefPath`, which is needed for incremental /// compilation support. pub def_path_table: Lrc<DefPathTable>, diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index 6d7907b096a..1c4e3bc6a50 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -434,7 +434,7 @@ impl<'tcx> EntryKind<'tcx> { } } -/// Create the "fake" DefPathTable for a given proc macro crate. +/// Creates the "fake" DefPathTable for a given proc macro crate. /// /// The DefPathTable is as follows: /// diff --git a/src/librustc_metadata/dynamic_lib.rs b/src/librustc_metadata/dynamic_lib.rs index b9dc4195cb2..9dd160c24c3 100644 --- a/src/librustc_metadata/dynamic_lib.rs +++ b/src/librustc_metadata/dynamic_lib.rs @@ -32,7 +32,7 @@ impl DynamicLibrary { } } - /// Load a dynamic library into the global namespace (RTLD_GLOBAL on Unix) + /// Loads a dynamic library into the global namespace (RTLD_GLOBAL on Unix) /// and do it now (don't use RTLD_LAZY on Unix). pub fn open_global_now(filename: &Path) -> Result<DynamicLibrary, String> { let maybe_library = dl::open_global_now(filename.as_os_str()); diff --git a/src/librustc_metadata/index_builder.rs b/src/librustc_metadata/index_builder.rs index 4175f7acd06..9aff1133ea9 100644 --- a/src/librustc_metadata/index_builder.rs +++ b/src/librustc_metadata/index_builder.rs @@ -3,7 +3,7 @@ //! //! ``` //! <common::data> // big list of item-like things... -//! <common::data_item> // ...for most def-ids, there is an entry. +//! <common::data_item> // ...for most `DefId`s, there is an entry. //! </common::data_item> //! </common::data> //! ``` @@ -85,7 +85,7 @@ impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> { } } - /// Emit the data for a def-id to the metadata. The function to + /// Emit the data for a `DefId` to the metadata. The function to /// emit the data is `op`, and it will be given `data` as /// arguments. This `record` function will call `op` to generate /// the `Entry` (which may point to other encoded information) @@ -129,7 +129,7 @@ impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> { } /// Trait used for data that can be passed from outside a dep-graph -/// task. The data must either be of some safe type, such as a +/// task. The data must either be of some safe type, such as a /// `DefId` index, or implement the `read` method so that it can add /// a read of whatever dep-graph nodes are appropriate. pub trait DepGraphRead { @@ -212,9 +212,9 @@ impl<T> DepGraphRead for Untracked<T> { } /// Newtype that can be used to package up misc data extracted from a -/// HIR node that doesn't carry its own id. This will allow an +/// HIR node that doesn't carry its own ID. This will allow an /// arbitrary `T` to be passed in, but register a read on the given -/// node-id. +/// `NodeId`. pub struct FromId<T>(pub ast::NodeId, pub T); impl<T> DepGraphRead for FromId<T> { diff --git a/src/librustc_metadata/locator.rs b/src/librustc_metadata/locator.rs index f120072b37c..6a1aada5ac7 100644 --- a/src/librustc_metadata/locator.rs +++ b/src/librustc_metadata/locator.rs @@ -27,7 +27,7 @@ //! //! The reason for this is that any of B's types could be composed of C's types, //! any function in B could return a type from C, etc. To be able to guarantee -//! that we can always typecheck/translate any function, we have to have +//! that we can always type-check/translate any function, we have to have //! complete knowledge of the whole ecosystem, not just our immediate //! dependencies. //! @@ -918,7 +918,7 @@ fn get_metadata_section_imp(target: &Target, } } -// A diagnostic function for dumping crate metadata to an output stream +/// A diagnostic function for dumping crate metadata to an output stream. pub fn list_file_metadata(target: &Target, path: &Path, loader: &dyn MetadataLoader, diff --git a/src/librustc_mir/borrow_check/borrow_set.rs b/src/librustc_mir/borrow_check/borrow_set.rs index 2788f5d4325..53e4ffc8bd6 100644 --- a/src/librustc_mir/borrow_check/borrow_set.rs +++ b/src/librustc_mir/borrow_check/borrow_set.rs @@ -26,12 +26,12 @@ crate struct BorrowSet<'tcx> { crate location_map: FxHashMap<Location, BorrowIndex>, /// Locations which activate borrows. - /// NOTE: A given location may activate more than one borrow in the future + /// NOTE: a given location may activate more than one borrow in the future /// when more general two-phase borrow support is introduced, but for now we - /// only need to store one borrow index + /// only need to store one borrow index. crate activation_map: FxHashMap<Location, Vec<BorrowIndex>>, - /// Map from local to all the borrows on that local + /// Map from local to all the borrows on that local. crate local_map: FxHashMap<mir::Local, FxHashSet<BorrowIndex>>, crate locals_state_at_exit: LocalsStateAtExit, @@ -45,8 +45,8 @@ impl<'tcx> Index<BorrowIndex> for BorrowSet<'tcx> { } } -/// Location where a two phase borrow is activated, if a borrow -/// is in fact a two phase borrow. +/// Location where a two-phase borrow is activated, if a borrow +/// is in fact a two-phase borrow. #[derive(Copy, Clone, PartialEq, Eq, Debug)] crate enum TwoPhaseActivation { NotTwoPhase, @@ -311,7 +311,7 @@ impl<'a, 'gcx, 'tcx> Visitor<'tcx> for GatherBorrows<'a, 'gcx, 'tcx> { } impl<'a, 'gcx, 'tcx> GatherBorrows<'a, 'gcx, 'tcx> { - /// Returns true if the borrow represented by `kind` is + /// Returns `true` if the borrow represented by `kind` is /// allowed to be split into separate Reservation and /// Activation phases. fn allow_two_phase_borrow(&self, kind: mir::BorrowKind) -> bool { diff --git a/src/librustc_mir/borrow_check/error_reporting.rs b/src/librustc_mir/borrow_check/error_reporting.rs index afb26963217..4cecb78426c 100644 --- a/src/librustc_mir/borrow_check/error_reporting.rs +++ b/src/librustc_mir/borrow_check/error_reporting.rs @@ -33,7 +33,7 @@ struct MoveSite { /// then tell us where the move occurred. moi: MoveOutIndex, - /// True if we traversed a back edge while walking from the point + /// `true` if we traversed a back edge while walking from the point /// of error to the move site. traversed_back_edge: bool } @@ -1793,7 +1793,7 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> { } } - /// Check if a place is a thread-local static. + /// Checks if a place is a thread-local static. pub fn is_place_thread_local(&self, place: &Place<'tcx>) -> bool { if let Place::Static(statik) = place { let attrs = self.infcx.tcx.get_attrs(statik.def_id); @@ -2314,7 +2314,7 @@ impl<'tcx> AnnotatedBorrowFnSignature<'tcx> { } } - /// Return the name of the provided `Ty` (that must be a reference)'s region with a + /// Returns the name of the provided `Ty` (that must be a reference)'s region with a /// synthesized lifetime name where required. fn get_region_name_for_ty(&self, ty: ty::Ty<'tcx>, counter: usize) -> String { match ty.sty { @@ -2389,7 +2389,7 @@ impl UseSpans { } } - /// Return `false` if this place is not used in a closure. + /// Returns `false` if this place is not used in a closure. fn for_closure(&self) -> bool { match *self { UseSpans::ClosureUse { is_generator, .. } => !is_generator, @@ -2397,7 +2397,7 @@ impl UseSpans { } } - /// Return `false` if this place is not used in a generator. + /// Returns `false` if this place is not used in a generator. fn for_generator(&self) -> bool { match *self { UseSpans::ClosureUse { is_generator, .. } => is_generator, diff --git a/src/librustc_mir/borrow_check/mod.rs b/src/librustc_mir/borrow_check/mod.rs index 45a8c9e8e69..f7d079c5494 100644 --- a/src/librustc_mir/borrow_check/mod.rs +++ b/src/librustc_mir/borrow_check/mod.rs @@ -420,11 +420,11 @@ pub struct MirBorrowckCtxt<'cx, 'gcx: 'tcx, 'tcx: 'cx> { access_place_error_reported: FxHashSet<(Place<'tcx>, Span)>, /// This field keeps track of when borrow conflict errors are reported /// for reservations, so that we don't report seemingly duplicate - /// errors for corresponding activations - /// - /// FIXME: Ideally this would be a set of BorrowIndex, not Places, - /// but it is currently inconvenient to track down the BorrowIndex - /// at the time we detect and report a reservation error. + /// errors for corresponding activations. + // + // FIXME: ideally this would be a set of `BorrowIndex`, not `Place`s, + // but it is currently inconvenient to track down the `BorrowIndex` + // at the time we detect and report a reservation error. reservation_error_reported: FxHashSet<Place<'tcx>>, /// This field keeps track of move errors that are to be reported for given move indicies. /// @@ -452,7 +452,7 @@ pub struct MirBorrowckCtxt<'cx, 'gcx: 'tcx, 'tcx: 'cx> { /// If the function we're checking is a closure, then we'll need to report back the list of /// mutable upvars that have been used. This field keeps track of them. used_mut_upvars: SmallVec<[Field; 8]>, - /// Non-lexical region inference context, if NLL is enabled. This + /// Non-lexical region inference context, if NLL is enabled. This /// contains the results from region inference and lets us e.g. /// find out which CFG points are contained in each borrow region. nonlexical_regioncx: Rc<RegionInferenceContext<'tcx>>, @@ -835,12 +835,12 @@ enum WriteKind { /// When checking permissions for a place access, this flag is used to indicate that an immutable /// local place can be mutated. -/// -/// FIXME: @nikomatsakis suggested that this flag could be removed with the following modifications: -/// - Merge `check_access_permissions()` and `check_if_reassignment_to_immutable_state()` -/// - Split `is_mutable()` into `is_assignable()` (can be directly assigned) and -/// `is_declared_mutable()` -/// - Take flow state into consideration in `is_assignable()` for local variables +// +// FIXME: @nikomatsakis suggested that this flag could be removed with the following modifications: +// - Merge `check_access_permissions()` and `check_if_reassignment_to_immutable_state()`. +// - Split `is_mutable()` into `is_assignable()` (can be directly assigned) and +// `is_declared_mutable()`. +// - Take flow state into consideration in `is_assignable()` for local variables. #[derive(Copy, Clone, PartialEq, Eq, Debug)] enum LocalMutationIsAllowed { Yes, @@ -895,7 +895,7 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> { /// place is initialized and (b) it is not borrowed in some way that would prevent this /// access. /// - /// Returns true if an error is reported, false otherwise. + /// Returns `true` if an error is reported. fn access_place( &mut self, context: Context, @@ -1785,9 +1785,9 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> { } } - /// Check the permissions for the given place and read or write kind + /// Checks the permissions for the given place and read or write kind /// - /// Returns true if an error is reported, false otherwise. + /// Returns `true` if an error is reported. fn check_access_permissions( &mut self, (place, span): (&Place<'tcx>, Span), diff --git a/src/librustc_mir/borrow_check/mutability_errors.rs b/src/librustc_mir/borrow_check/mutability_errors.rs index dad8d903cf9..008c081aeb6 100644 --- a/src/librustc_mir/borrow_check/mutability_errors.rs +++ b/src/librustc_mir/borrow_check/mutability_errors.rs @@ -615,7 +615,7 @@ fn is_closure_or_generator(ty: ty::Ty<'_>) -> bool { ty.is_closure() || ty.is_generator() } -/// Add a suggestion to a struct definition given a field access to a local. +/// Adds a suggestion to a struct definition given a field access to a local. /// This function expects the local to be a reference to a struct in order to produce a suggestion. /// /// ```text diff --git a/src/librustc_mir/borrow_check/nll/constraints/graph.rs b/src/librustc_mir/borrow_check/nll/constraints/graph.rs index 2479dfd1c70..c4b2a5daef8 100644 --- a/src/librustc_mir/borrow_check/nll/constraints/graph.rs +++ b/src/librustc_mir/borrow_check/nll/constraints/graph.rs @@ -71,7 +71,7 @@ impl ConstraintGraphDirecton for Reverse { } impl<D: ConstraintGraphDirecton> ConstraintGraph<D> { - /// Create a "dependency graph" where each region constraint `R1: + /// Creates a "dependency graph" where each region constraint `R1: /// R2` is treated as an edge `R1 -> R2`. We use this graph to /// construct SCCs for region inference but also for error /// reporting. @@ -186,7 +186,7 @@ crate struct RegionGraph<'s, D: ConstraintGraphDirecton> { } impl<'s, D: ConstraintGraphDirecton> RegionGraph<'s, D> { - /// Create a "dependency graph" where each region constraint `R1: + /// Creates a "dependency graph" where each region constraint `R1: /// R2` is treated as an edge `R1 -> R2`. We use this graph to /// construct SCCs for region inference but also for error /// reporting. diff --git a/src/librustc_mir/borrow_check/nll/constraints/mod.rs b/src/librustc_mir/borrow_check/nll/constraints/mod.rs index d3f9743dfed..b1091eb5ac8 100644 --- a/src/librustc_mir/borrow_check/nll/constraints/mod.rs +++ b/src/librustc_mir/borrow_check/nll/constraints/mod.rs @@ -31,7 +31,7 @@ impl ConstraintSet { /// easy to find the constraints affecting a particular region. /// /// N.B., this graph contains a "frozen" view of the current - /// constraints. any new constraints added to the `ConstraintSet` + /// constraints. Any new constraints added to the `ConstraintSet` /// after the graph is built will not be present in the graph. crate fn graph(&self, num_region_vars: usize) -> graph::NormalConstraintGraph { graph::ConstraintGraph::new(graph::Normal, self, num_region_vars) @@ -43,7 +43,7 @@ impl ConstraintSet { graph::ConstraintGraph::new(graph::Reverse, self, num_region_vars) } - /// Compute cycles (SCCs) in the graph of regions. In particular, + /// Computes cycles (SCCs) in the graph of regions. In particular, /// find all regions R1, R2 such that R1: R2 and R2: R1 and group /// them into an SCC, and find the relationships between SCCs. crate fn compute_sccs( diff --git a/src/librustc_mir/borrow_check/nll/explain_borrow/mod.rs b/src/librustc_mir/borrow_check/nll/explain_borrow/mod.rs index 8e57d107aa6..a6a6962bb15 100644 --- a/src/librustc_mir/borrow_check/nll/explain_borrow/mod.rs +++ b/src/librustc_mir/borrow_check/nll/explain_borrow/mod.rs @@ -285,7 +285,7 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> { } } - /// Check if a borrow location is within a loop. + /// Checks if a borrow location is within a loop. fn is_borrow_location_in_loop( &self, borrow_location: Location, @@ -407,7 +407,7 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> { } } - /// Check if a borrowed value was captured by a trait object. We do this by + /// Checks if a borrowed value was captured by a trait object. We do this by /// looking forward in the MIR from the reserve location and checking if we see /// a unsized cast to a trait object on our data. fn was_captured_by_trait_object(&self, borrow: &BorrowData<'tcx>) -> bool { diff --git a/src/librustc_mir/borrow_check/nll/facts.rs b/src/librustc_mir/borrow_check/nll/facts.rs index 9672d3e78cd..9714398d9d6 100644 --- a/src/librustc_mir/borrow_check/nll/facts.rs +++ b/src/librustc_mir/borrow_check/nll/facts.rs @@ -13,7 +13,7 @@ use std::path::Path; crate type AllFacts = PoloniusAllFacts<RegionVid, BorrowIndex, LocationIndex>; crate trait AllFactsExt { - /// Returns true if there is a need to gather `AllFacts` given the + /// Returns `true` if there is a need to gather `AllFacts` given the /// current `-Z` flags. fn enabled(tcx: TyCtxt<'_, '_, '_>) -> bool; diff --git a/src/librustc_mir/borrow_check/nll/invalidation.rs b/src/librustc_mir/borrow_check/nll/invalidation.rs index 3df6b797a44..3255899c86c 100644 --- a/src/librustc_mir/borrow_check/nll/invalidation.rs +++ b/src/librustc_mir/borrow_check/nll/invalidation.rs @@ -53,8 +53,8 @@ struct InvalidationGenerator<'cx, 'tcx: 'cx, 'gcx: 'tcx> { borrow_set: &'cx BorrowSet<'tcx>, } -/// Visits the whole MIR and generates invalidates() facts -/// Most of the code implementing this was stolen from borrow_check/mod.rs +/// Visits the whole MIR and generates `invalidates()` facts. +/// Most of the code implementing this was stolen from `borrow_check/mod.rs`. impl<'cx, 'tcx, 'gcx> Visitor<'tcx> for InvalidationGenerator<'cx, 'tcx, 'gcx> { fn visit_statement( &mut self, @@ -272,7 +272,7 @@ impl<'cx, 'tcx, 'gcx> Visitor<'tcx> for InvalidationGenerator<'cx, 'tcx, 'gcx> { } impl<'cg, 'cx, 'tcx, 'gcx> InvalidationGenerator<'cx, 'tcx, 'gcx> { - /// Simulates mutation of a place + /// Simulates mutation of a place. fn mutate_place( &mut self, context: Context, @@ -288,7 +288,7 @@ impl<'cg, 'cx, 'tcx, 'gcx> InvalidationGenerator<'cx, 'tcx, 'gcx> { ); } - /// Simulates consumption of an operand + /// Simulates consumption of an operand. fn consume_operand( &mut self, context: Context, @@ -384,7 +384,7 @@ impl<'cg, 'cx, 'tcx, 'gcx> InvalidationGenerator<'cx, 'tcx, 'gcx> { } } - /// Simulates an access to a place + /// Simulates an access to a place. fn access_place( &mut self, context: Context, @@ -472,7 +472,7 @@ impl<'cg, 'cx, 'tcx, 'gcx> InvalidationGenerator<'cx, 'tcx, 'gcx> { } - /// Generate a new invalidates(L, B) fact + /// Generates a new `invalidates(L, B)` fact. fn generate_invalidates(&mut self, b: BorrowIndex, l: Location) { let lidx = self.location_table.start_index(l); self.all_facts.invalidates.push((lidx, b)); diff --git a/src/librustc_mir/borrow_check/nll/region_infer/dump_mir.rs b/src/librustc_mir/borrow_check/nll/region_infer/dump_mir.rs index df6d187e442..419ee73b28a 100644 --- a/src/librustc_mir/borrow_check/nll/region_infer/dump_mir.rs +++ b/src/librustc_mir/borrow_check/nll/region_infer/dump_mir.rs @@ -57,7 +57,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { } /// Debugging aid: Invokes the `with_msg` callback repeatedly with - /// our internal region constraints. These are dumped into the + /// our internal region constraints. These are dumped into the /// -Zdump-mir file so that we can figure out why the region /// inference resulted in the values that it did when debugging. fn for_each_constraint( diff --git a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/mod.rs b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/mod.rs index 3498e343767..f741af0b228 100644 --- a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/mod.rs +++ b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/mod.rs @@ -738,8 +738,8 @@ impl<'tcx> RegionInferenceContext<'tcx> { } /// If `r2` represents a placeholder region, then this returns - /// true if `r1` cannot name that placeholder in its - /// value. Otherwise, returns false. + /// `true` if `r1` cannot name that placeholder in its + /// value; otherwise, returns `false`. fn cannot_name_placeholder(&self, r1: RegionVid, r2: RegionVid) -> bool { debug!("cannot_name_value_of(r1={:?}, r2={:?})", r1, r2); diff --git a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs index 2c4f359f65f..ed4a8d23a28 100644 --- a/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs +++ b/src/librustc_mir/borrow_check/nll/region_infer/error_reporting/region_name.rs @@ -173,7 +173,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { value } - /// Check for the case where `fr` maps to something that the + /// Checks for the case where `fr` maps to something that the /// *user* has a name for. In that case, we'll be able to map /// `fr` to a `Region<'tcx>`, and that region will be one of /// named variants. @@ -272,7 +272,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { } } - /// Get a span of a named region to provide context for error messages that + /// Gets a span of a named region to provide context for error messages that /// mention that span, for example: /// /// ``` @@ -306,7 +306,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { } } - /// Find an argument that contains `fr` and label it with a fully + /// Finds an argument that contains `fr` and label it with a fully /// elaborated type, returning something like `'1`. Result looks /// like: /// @@ -428,7 +428,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { /// to. For example, we might produce an annotation like this: /// /// ``` - /// | fn a<T>(items: &[T]) -> Box<dyn Iterator<Item=&T>> { + /// | fn a<T>(items: &[T]) -> Box<dyn Iterator<Item = &T>> { /// | - let's call the lifetime of this reference `'1` /// ``` /// @@ -437,7 +437,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { /// `argument_hir_ty`, a `hir::Ty` (the syntax of the type /// annotation). We are descending through the types stepwise, /// looking in to find the region `needle_fr` in the internal - /// type. Once we find that, we can use the span of the `hir::Ty` + /// type. Once we find that, we can use the span of the `hir::Ty` /// to add the highlight. /// /// This is a somewhat imperfect process, so long the way we also @@ -621,7 +621,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { None } - /// Find a closure upvar that contains `fr` and label it with a + /// Finds a closure upvar that contains `fr` and label it with a /// fully elaborated type, returning something like `'1`. Result /// looks like: /// @@ -647,7 +647,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { }) } - /// Check for arguments appearing in the (closure) return type. It + /// Checks for arguments appearing in the (closure) return type. It /// must be a closure since, in a free fn, such an argument would /// have to either also appear in an argument (if using elision) /// or be early bound (named, not in argument). @@ -711,7 +711,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { }) } - /// Create a synthetic region named `'1`, incrementing the + /// Creates a synthetic region named `'1`, incrementing the /// counter. fn synthesize_region_name(&self, counter: &mut usize) -> InternedString { let c = *counter; diff --git a/src/librustc_mir/borrow_check/nll/region_infer/mod.rs b/src/librustc_mir/borrow_check/nll/region_infer/mod.rs index 7fe657702d7..6de05777fe8 100644 --- a/src/librustc_mir/borrow_check/nll/region_infer/mod.rs +++ b/src/librustc_mir/borrow_check/nll/region_infer/mod.rs @@ -35,7 +35,7 @@ use self::values::{LivenessValues, RegionValueElements, RegionValues}; use super::ToRegionVid; pub struct RegionInferenceContext<'tcx> { - /// Contains the definition for every region variable. Region + /// Contains the definition for every region variable. Region /// variables are identified by their index (`RegionVid`). The /// definition contains information about where the region came /// from as well as its final inferred value. @@ -124,7 +124,7 @@ pub(crate) enum Cause { } /// A "type test" corresponds to an outlives constraint between a type -/// and a lifetime, like `T: 'x` or `<T as Foo>::Bar: 'x`. They are +/// and a lifetime, like `T: 'x` or `<T as Foo>::Bar: 'x`. They are /// translated from the `Verify` region constraints in the ordinary /// inference context. /// @@ -137,10 +137,10 @@ pub(crate) enum Cause { /// /// In some cases, however, there are outlives relationships that are /// not converted into a region constraint, but rather into one of -/// these "type tests". The distinction is that a type test does not +/// these "type tests". The distinction is that a type test does not /// influence the inference result, but instead just examines the /// values that we ultimately inferred for each region variable and -/// checks that they meet certain extra criteria. If not, an error +/// checks that they meet certain extra criteria. If not, an error /// can be issued. /// /// One reason for this is that these type tests typically boil down @@ -286,7 +286,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { /// Initializes the region variables for each universally /// quantified region (lifetime parameter). The first N variables /// always correspond to the regions appearing in the function - /// signature (both named and anonymous) and where clauses. This + /// signature (both named and anonymous) and where-clauses. This /// function iterates over those regions and initializes them with /// minimum values. /// @@ -368,12 +368,12 @@ impl<'tcx> RegionInferenceContext<'tcx> { self.universal_regions.to_region_vid(r) } - /// Add annotations for `#[rustc_regions]`; see `UniversalRegions::annotate`. + /// Adds annotations for `#[rustc_regions]`; see `UniversalRegions::annotate`. crate fn annotate(&self, tcx: TyCtxt<'_, '_, 'tcx>, err: &mut DiagnosticBuilder<'_>) { self.universal_regions.annotate(tcx, err) } - /// Returns true if the region `r` contains the point `p`. + /// Returns `true` if the region `r` contains the point `p`. /// /// Panics if called before `solve()` executes, crate fn region_contains(&self, r: impl ToRegionVid, p: impl ToElementIndex) -> bool { @@ -393,7 +393,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { self.scc_universes[scc] } - /// Perform region inference and report errors if we see any + /// Performs region inference and report errors if we see any /// unsatisfiable constraints. If this is a closure, returns the /// region requirements to propagate to our creator, if any. pub(super) fn solve<'gcx>( @@ -533,7 +533,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { ); } - /// True if all the elements in the value of `scc_b` are nameable + /// Returns `true` if all the elements in the value of `scc_b` are nameable /// in `scc_a`. Used during constraint propagation, and only once /// the value of `scc_b` has been computed. fn universe_compatible(&self, scc_b: ConstraintSccIndex, scc_a: ConstraintSccIndex) -> bool { @@ -928,8 +928,8 @@ impl<'tcx> RegionInferenceContext<'tcx> { lub } - /// Test if `test` is true when applied to `lower_bound` at - /// `point`, and returns true or false. + /// Tests if `test` is true when applied to `lower_bound` at + /// `point`. fn eval_verify_bound( &self, tcx: TyCtxt<'_, '_, 'tcx>, @@ -990,7 +990,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { /// different results. (For example, there might be two regions /// with the same value that are not in the same SCC). /// - /// NB. This is not an ideal approach and I would like to revisit + /// N.B., this is not an ideal approach and I would like to revisit /// it. However, it works pretty well in practice. In particular, /// this is needed to deal with projection outlives bounds like /// @@ -998,7 +998,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { /// /// In particular, this routine winds up being important when /// there are bounds like `where <T as Foo<'a>>::Item: 'b` in the - /// environment. In this case, if we can show that `'0 == 'a`, + /// environment. In this case, if we can show that `'0 == 'a`, /// and that `'b: '1`, then we know that the clause is /// satisfied. In such cases, particularly due to limitations of /// the trait solver =), we usually wind up with a where-clause like @@ -1077,7 +1077,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { /// Once regions have been propagated, this method is used to see /// whether any of the constraints were too strong. In particular, /// we want to check for a case where a universally quantified - /// region exceeded its bounds. Consider: + /// region exceeded its bounds. Consider: /// /// fn foo<'a, 'b>(x: &'a u32) -> &'b u32 { x } /// @@ -1126,7 +1126,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { } } - /// Check the final value for the free region `fr` to see if it + /// Checks the final value for the free region `fr` to see if it /// grew too large. In particular, examine what `end(X)` points /// wound up in `fr`'s final value; for each `end(X)` where `X != /// fr`, we want to check that `fr: X`. If not, that's either an diff --git a/src/librustc_mir/borrow_check/nll/region_infer/values.rs b/src/librustc_mir/borrow_check/nll/region_infer/values.rs index 88e8310db68..ef27fdbde38 100644 --- a/src/librustc_mir/borrow_check/nll/region_infer/values.rs +++ b/src/librustc_mir/borrow_check/nll/region_infer/values.rs @@ -166,7 +166,7 @@ impl<N: Idx> LivenessValues<N> { self.points.rows() } - /// Adds the given element to the value for the given region. Returns true if + /// Adds the given element to the value for the given region. Returns whether /// the element is newly added (i.e., was not already present). crate fn add_element(&mut self, row: N, location: Location) -> bool { debug!("LivenessValues::add(r={:?}, location={:?})", row, location); @@ -175,7 +175,7 @@ impl<N: Idx> LivenessValues<N> { } /// Adds all the elements in the given bit array into the given - /// region. Returns true if any of them are newly added. + /// region. Returns whether any of them are newly added. crate fn add_elements(&mut self, row: N, locations: &HybridBitSet<PointIndex>) -> bool { debug!( "LivenessValues::add_elements(row={:?}, locations={:?})", @@ -189,7 +189,7 @@ impl<N: Idx> LivenessValues<N> { self.points.insert_all_into_row(row); } - /// True if the region `r` contains the given element. + /// Returns `true` if the region `r` contains the given element. crate fn contains(&self, row: N, location: Location) -> bool { let index = self.elements.point_from_location(location); self.points.contains(row, index) @@ -291,7 +291,7 @@ impl<N: Idx> RegionValues<N> { } } - /// Adds the given element to the value for the given region. Returns true if + /// Adds the given element to the value for the given region. Returns whether /// the element is newly added (i.e., was not already present). crate fn add_element(&mut self, r: N, elem: impl ToElementIndex) -> bool { debug!("add(r={:?}, elem={:?})", r, elem); @@ -303,7 +303,7 @@ impl<N: Idx> RegionValues<N> { self.points.insert_all_into_row(r); } - /// Add all elements in `r_from` to `r_to` (because e.g., `r_to: + /// Adds all elements in `r_from` to `r_to` (because e.g., `r_to: /// r_from`). crate fn add_region(&mut self, r_to: N, r_from: N) -> bool { self.points.union_rows(r_from, r_to) @@ -311,7 +311,7 @@ impl<N: Idx> RegionValues<N> { | self.placeholders.union_rows(r_from, r_to) } - /// True if the region `r` contains the given element. + /// Returns `true` if the region `r` contains the given element. crate fn contains(&self, r: N, elem: impl ToElementIndex) -> bool { elem.contained_in_row(self, r) } @@ -325,7 +325,7 @@ impl<N: Idx> RegionValues<N> { } } - /// True if `sup_region` contains all the CFG points that + /// Returns `true` if `sup_region` contains all the CFG points that /// `sub_region` contains. Ignores universal regions. crate fn contains_points(&self, sup_region: N, sub_region: N) -> bool { if let Some(sub_row) = self.points.row(sub_region) { diff --git a/src/librustc_mir/borrow_check/nll/type_check/free_region_relations.rs b/src/librustc_mir/borrow_check/nll/type_check/free_region_relations.rs index f549aea81f6..7ddfd55dbbb 100644 --- a/src/librustc_mir/borrow_check/nll/type_check/free_region_relations.rs +++ b/src/librustc_mir/borrow_check/nll/type_check/free_region_relations.rs @@ -19,7 +19,7 @@ crate struct UniversalRegionRelations<'tcx> { universal_regions: Rc<UniversalRegions<'tcx>>, /// Stores the outlives relations that are known to hold from the - /// implied bounds, in-scope where clauses, and that sort of + /// implied bounds, in-scope where-clauses, and that sort of /// thing. outlives: TransitiveRelation<RegionVid>, @@ -35,7 +35,7 @@ crate struct UniversalRegionRelations<'tcx> { /// added via implicit bounds. /// /// Each region here is guaranteed to be a key in the `indices` -/// map. We use the "original" regions (i.e., the keys from the +/// map. We use the "original" regions (i.e., the keys from the /// map, and not the values) because the code in /// `process_registered_region_obligations` has some special-cased /// logic expecting to see (e.g.) `ReStatic`, and if we supplied @@ -44,7 +44,7 @@ type RegionBoundPairs<'tcx> = Vec<(ty::Region<'tcx>, GenericKind<'tcx>)>; /// As part of computing the free region relations, we also have to /// normalize the input-output types, which we then need later. So we -/// return those. This vector consists of first the input types and +/// return those. This vector consists of first the input types and /// then the output type as the last element. type NormalizedInputsAndOutput<'tcx> = Vec<Ty<'tcx>>; @@ -129,8 +129,8 @@ impl UniversalRegionRelations<'tcx> { } /// Helper for `non_local_upper_bound` and - /// `non_local_lower_bound`. Repeatedly invokes `postdom_parent` - /// until we find something that is not local. Returns None if we + /// `non_local_lower_bound`. Repeatedly invokes `postdom_parent` + /// until we find something that is not local. Returns `None` if we /// never do so. fn non_local_bound( &self, @@ -177,7 +177,7 @@ impl UniversalRegionRelations<'tcx> { }) } - /// True if fr1 is known to outlive fr2. + /// Returns `true` if fr1 is known to outlive fr2. /// /// This will only ever be true for universally quantified regions. crate fn outlives(&self, fr1: RegionVid, fr2: RegionVid) -> bool { diff --git a/src/librustc_mir/borrow_check/nll/type_check/liveness/liveness_map.rs b/src/librustc_mir/borrow_check/nll/type_check/liveness/liveness_map.rs index 5e2e4407cbe..b9f9d83161b 100644 --- a/src/librustc_mir/borrow_check/nll/type_check/liveness/liveness_map.rs +++ b/src/librustc_mir/borrow_check/nll/type_check/liveness/liveness_map.rs @@ -79,7 +79,7 @@ impl NllLivenessMap { } } - /// True if there are no local variables that need liveness computation. + /// Returns `true` if there are no local variables that need liveness computation. crate fn is_empty(&self) -> bool { self.to_local.is_empty() } diff --git a/src/librustc_mir/borrow_check/nll/type_check/liveness/mod.rs b/src/librustc_mir/borrow_check/nll/type_check/liveness/mod.rs index a5510ba6936..28a8cad8ca2 100644 --- a/src/librustc_mir/borrow_check/nll/type_check/liveness/mod.rs +++ b/src/librustc_mir/borrow_check/nll/type_check/liveness/mod.rs @@ -23,7 +23,7 @@ mod trace; /// that indicate which types must be live at which point in the CFG. /// This vector is consumed by `constraint_generation`. /// -/// NB. This computation requires normalization; therefore, it must be +/// N.B., this computation requires normalization; therefore, it must be /// performed before pub(super) fn generate<'gcx, 'tcx>( typeck: &mut TypeChecker<'_, 'gcx, 'tcx>, @@ -46,7 +46,7 @@ pub(super) fn generate<'gcx, 'tcx>( trace::trace(typeck, mir, elements, flow_inits, move_data, &liveness_map, location_table); } -/// Compute all regions that are (currently) known to outlive free +/// Computes all regions that are (currently) known to outlive free /// regions. For these regions, we do not need to compute /// liveness, since the outlives constraints will ensure that they /// are live over the whole fn body anyhow. diff --git a/src/librustc_mir/borrow_check/nll/type_check/liveness/trace.rs b/src/librustc_mir/borrow_check/nll/type_check/liveness/trace.rs index d058be03f55..4a0b4b7c205 100644 --- a/src/librustc_mir/borrow_check/nll/type_check/liveness/trace.rs +++ b/src/librustc_mir/borrow_check/nll/type_check/liveness/trace.rs @@ -192,7 +192,7 @@ impl LivenessResults<'me, 'typeck, 'flow, 'gcx, 'tcx> { } } - /// Compute all points where local is "use live" -- meaning its + /// Computes all points where local is "use live" -- meaning its /// current value may be used later (except by a drop). This is /// done by walking backwards from each use of `live_local` until we /// find a `def` of local. @@ -215,7 +215,7 @@ impl LivenessResults<'me, 'typeck, 'flow, 'gcx, 'tcx> { } } - /// Compute all points where local is "drop live" -- meaning its + /// Computes all points where local is "drop live" -- meaning its /// current value may be dropped later (but not used). This is /// done by iterating over the drops of `local` where `local` (or /// some subpart of `local`) is initialized. For each such drop, @@ -407,7 +407,7 @@ impl LivenessResults<'me, 'typeck, 'flow, 'gcx, 'tcx> { } impl LivenessContext<'_, '_, '_, '_, 'tcx> { - /// True if the local variable (or some part of it) is initialized in + /// Returns `true` if the local variable (or some part of it) is initialized in /// the terminator of `block`. We need to check this to determine if a /// DROP of some local variable will have an effect -- note that /// drops, as they may unwind, are always terminators. @@ -429,7 +429,7 @@ impl LivenessContext<'_, '_, '_, '_, 'tcx> { self.flow_inits.has_any_child_of(mpi).is_some() } - /// True if the path `mpi` (or some part of it) is initialized at + /// Returns `true` if the path `mpi` (or some part of it) is initialized at /// the exit of `block`. /// /// **Warning:** Does not account for the result of `Call` @@ -439,7 +439,7 @@ impl LivenessContext<'_, '_, '_, '_, 'tcx> { self.flow_inits.has_any_child_of(mpi).is_some() } - /// Store the result that all regions in `value` are live for the + /// Stores the result that all regions in `value` are live for the /// points `live_at`. fn add_use_live_facts_for( &mut self, diff --git a/src/librustc_mir/borrow_check/nll/type_check/mod.rs b/src/librustc_mir/borrow_check/nll/type_check/mod.rs index add07b1ddfe..49f90eb90aa 100644 --- a/src/librustc_mir/borrow_check/nll/type_check/mod.rs +++ b/src/librustc_mir/borrow_check/nll/type_check/mod.rs @@ -839,7 +839,7 @@ pub enum Locations { /// older NLL analysis, we required this only at the entry point /// to the function. By the nature of the constraints, this wound /// up propagating to all points reachable from start (because - /// `'1` -- as a universal region -- is live everywhere). In the + /// `'1` -- as a universal region -- is live everywhere). In the /// newer analysis, though, this doesn't work: `_0` is considered /// dead at the start (it has no usable value) and hence this type /// equality is basically a no-op. Then, later on, when we do `_0 @@ -2079,7 +2079,7 @@ impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> { } } - /// Add the constraints that arise from a borrow expression `&'a P` at the location `L`. + /// Adds the constraints that arise from a borrow expression `&'a P` at the location `L`. /// /// # Parameters /// diff --git a/src/librustc_mir/borrow_check/nll/type_check/relate_tys.rs b/src/librustc_mir/borrow_check/nll/type_check/relate_tys.rs index 1748e300890..28835b959d7 100644 --- a/src/librustc_mir/borrow_check/nll/type_check/relate_tys.rs +++ b/src/librustc_mir/borrow_check/nll/type_check/relate_tys.rs @@ -14,7 +14,7 @@ use rustc::ty::{self, Ty}; /// - "Invariant" `a == b` /// - "Contravariant" `a :> b` /// -/// NB. The type `a` is permitted to have unresolved inference +/// N.B., the type `a` is permitted to have unresolved inference /// variables, but not the type `b`. pub(super) fn relate_types<'tcx>( infcx: &InferCtxt<'_, '_, 'tcx>, diff --git a/src/librustc_mir/borrow_check/nll/universal_regions.rs b/src/librustc_mir/borrow_check/nll/universal_regions.rs index 0a214e60bdd..76f0132bec5 100644 --- a/src/librustc_mir/borrow_check/nll/universal_regions.rs +++ b/src/librustc_mir/borrow_check/nll/universal_regions.rs @@ -35,15 +35,15 @@ pub struct UniversalRegions<'tcx> { pub fr_static: RegionVid, /// A special region vid created to represent the current MIR fn - /// body. It will outlive the entire CFG but it will not outlive + /// body. It will outlive the entire CFG but it will not outlive /// any other universal regions. pub fr_fn_body: RegionVid, /// We create region variables such that they are ordered by their /// `RegionClassification`. The first block are globals, then - /// externals, then locals. So things from: - /// - `FIRST_GLOBAL_INDEX..first_extern_index` are global; - /// - `first_extern_index..first_local_index` are external; and + /// externals, then locals. So, things from: + /// - `FIRST_GLOBAL_INDEX..first_extern_index` are global, + /// - `first_extern_index..first_local_index` are external, /// - `first_local_index..num_universals` are local. first_extern_index: usize, @@ -54,21 +54,21 @@ pub struct UniversalRegions<'tcx> { num_universals: usize, /// The "defining" type for this function, with all universal - /// regions instantiated. For a closure or generator, this is the + /// regions instantiated. For a closure or generator, this is the /// closure type, but for a top-level function it's the `FnDef`. pub defining_ty: DefiningTy<'tcx>, /// The return type of this function, with all regions replaced by /// their universal `RegionVid` equivalents. /// - /// NB. Associated types in this type have not been normalized, + /// N.B., associated types in this type have not been normalized, /// as the name suggests. =) pub unnormalized_output_ty: Ty<'tcx>, /// The fully liberated input types of this function, with all /// regions replaced by their universal `RegionVid` equivalents. /// - /// NB. Associated types in these types have not been normalized, + /// N.B., associated types in these types have not been normalized, /// as the name suggests. =) pub unnormalized_input_tys: &'tcx [Ty<'tcx>], @@ -92,7 +92,7 @@ pub enum DefiningTy<'tcx> { /// `ClosureSubsts::generator_return_ty`. Generator(DefId, ty::GeneratorSubsts<'tcx>, hir::GeneratorMovability), - /// The MIR is a fn item with the given def-id and substs. The signature + /// The MIR is a fn item with the given `DefId` and substs. The signature /// of the function can be bound then with the `fn_sig` query. FnDef(DefId, &'tcx Substs<'tcx>), @@ -174,13 +174,13 @@ pub enum RegionClassification { /// A **local** lifetime is one about which we know the full set /// of relevant constraints (that is, relationships to other named - /// regions). For a closure, this includes any region bound in - /// the closure's signature. For a fn item, this includes all + /// regions). For a closure, this includes any region bound in + /// the closure's signature. For a fn item, this includes all /// regions other than global ones. /// /// Continuing with the example from `External`, if we were /// analyzing the closure, then `'x` would be local (and `'a` and - /// `'b` are external). If we are analyzing the function item + /// `'b` are external). If we are analyzing the function item /// `foo`, then `'a` and `'b` are local (and `'x` is not in /// scope). Local, @@ -245,7 +245,7 @@ impl<'tcx> UniversalRegions<'tcx> { region_mapping } - /// True if `r` is a member of this set of universal regions. + /// Returns `true` if `r` is a member of this set of universal regions. pub fn is_universal_region(&self, r: RegionVid) -> bool { (FIRST_GLOBAL_INDEX..self.num_universals).contains(&r.index()) } @@ -271,7 +271,7 @@ impl<'tcx> UniversalRegions<'tcx> { (FIRST_GLOBAL_INDEX..self.num_universals).map(RegionVid::new) } - /// True if `r` is classified as an local region. + /// Returns `true` if `r` is classified as an local region. pub fn is_local_free_region(&self, r: RegionVid) -> bool { self.region_classification(r) == Some(RegionClassification::Local) } @@ -290,7 +290,7 @@ impl<'tcx> UniversalRegions<'tcx> { self.first_local_index } - /// Get an iterator over all the early-bound regions that have names. + /// Gets an iterator over all the early-bound regions that have names. pub fn named_universal_regions<'s>( &'s self, ) -> impl Iterator<Item = (ty::Region<'tcx>, ty::RegionVid)> + 's { @@ -692,7 +692,7 @@ impl<'cx, 'gcx, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'cx, 'gcx, 'tcx> { /// indices vector. Typically, we identify late-bound regions as we process the inputs and /// outputs of the closure/function. However, sometimes there are late-bound regions which do /// not appear in the fn parameters but which are nonetheless in scope. The simplest case of - /// this are unused functions, like fn foo<'a>() { } (see eg., #51351). Despite not being used, + /// this are unused functions, like fn foo<'a>() { } (see e.g., #51351). Despite not being used, /// users can still reference these regions (e.g., let x: &'a u32 = &22;), so we need to create /// entries for them and store them in the indices map. This code iterates over the complete /// set of late-bound regions and checks for any that we have not yet seen, adding them to the @@ -746,7 +746,7 @@ impl<'tcx> UniversalRegionIndices<'tcx> { } } - /// Replace all free regions in `value` with region vids, as + /// Replaces all free regions in `value` with region vids, as /// returned by `to_region_vid`. pub fn fold_to_region_vids<T>(&self, tcx: TyCtxt<'_, '_, 'tcx>, value: &T) -> T where diff --git a/src/librustc_mir/borrow_check/path_utils.rs b/src/librustc_mir/borrow_check/path_utils.rs index 1cea9f662d3..9073ae6bed5 100644 --- a/src/librustc_mir/borrow_check/path_utils.rs +++ b/src/librustc_mir/borrow_check/path_utils.rs @@ -8,7 +8,7 @@ use rustc::mir::{ProjectionElem, BorrowKind}; use rustc::ty::TyCtxt; use rustc_data_structures::graph::dominators::Dominators; -/// Returns true if the borrow represented by `kind` is +/// Returns `true` if the borrow represented by `kind` is /// allowed to be split into separate Reservation and /// Activation phases. pub(super) fn allow_two_phase_borrow<'a, 'tcx, 'gcx: 'tcx>( diff --git a/src/librustc_mir/borrow_check/place_ext.rs b/src/librustc_mir/borrow_check/place_ext.rs index bad236a6f52..d6d2861b557 100644 --- a/src/librustc_mir/borrow_check/place_ext.rs +++ b/src/librustc_mir/borrow_check/place_ext.rs @@ -6,7 +6,7 @@ use crate::borrow_check::borrow_set::LocalsStateAtExit; /// Extension methods for the `Place` type. crate trait PlaceExt<'tcx> { - /// Returns true if we can safely ignore borrows of this place. + /// Returns `true` if we can safely ignore borrows of this place. /// This is true whenever there is no action that the user can do /// to the place `self` that would invalidate the borrow. This is true /// for borrows of raw pointer dereferents as well as shared references. diff --git a/src/librustc_mir/borrow_check/places_conflict.rs b/src/librustc_mir/borrow_check/places_conflict.rs index cd33f22bf3c..b5175cf41dd 100644 --- a/src/librustc_mir/borrow_check/places_conflict.rs +++ b/src/librustc_mir/borrow_check/places_conflict.rs @@ -275,10 +275,10 @@ fn place_components_conflict<'gcx, 'tcx>( /// A linked list of places running up the stack; begins with the /// innermost place and extends to projections (e.g., `a.b` would have -/// the place `a` with a "next" pointer to `a.b`). Created by +/// the place `a` with a "next" pointer to `a.b`). Created by /// `unroll_place`. /// -/// N.B., this particular impl strategy is not the most obvious. It was +/// N.B., this particular impl strategy is not the most obvious. It was /// chosen because it makes a measurable difference to NLL /// performance, as this code (`borrow_conflicts_with_place`) is somewhat hot. struct PlaceComponents<'p, 'tcx: 'p> { diff --git a/src/librustc_mir/build/matches/mod.rs b/src/librustc_mir/build/matches/mod.rs index cf051ba2e0f..cb71ff27ceb 100644 --- a/src/librustc_mir/build/matches/mod.rs +++ b/src/librustc_mir/build/matches/mod.rs @@ -727,7 +727,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// /// The return value is a list of "otherwise" blocks. These are /// points in execution where we found that *NONE* of the - /// candidates apply. In principle, this means that the input + /// candidates apply. In principle, this means that the input /// list was not exhaustive, though at present we sometimes are /// not smart enough to recognize all exhaustive inputs. /// @@ -874,7 +874,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { } } - /// This is the most subtle part of the matching algorithm. At + /// This is the most subtle part of the matching algorithm. At /// this point, the input candidates have been fully simplified, /// and so we know that all remaining match-pairs require some /// sort of test. To decide what test to do, we take the highest @@ -894,10 +894,10 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// 4. etc. /// /// Once we know what sort of test we are going to perform, this - /// test may also help us with other candidates. So we walk over + /// Tests may also help us with other candidates. So we walk over /// the candidates (from high to low priority) and check. This /// gives us, for each outcome of the test, a transformed list of - /// candidates. For example, if we are testing the current + /// candidates. For example, if we are testing the current /// variant of `x.0`, and we have a candidate `{x.0 @ Some(v), x.1 /// @ 22}`, then we would have a resulting candidate of `{(x.0 as /// Some).0 @ v, x.1 @ 22}`. Note that the first match-pair is now @@ -1093,7 +1093,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// for the case where the guard fails. /// /// Note: we check earlier that if there is a guard, there cannot - /// be move bindings. This isn't really important for the + /// be move bindings. This isn't really important for the /// self-consistency of this fn, but the reason for it should be /// clear: after we've done the assignments, if there were move /// bindings, further tests would be a use-after-move (which would diff --git a/src/librustc_mir/build/matches/test.rs b/src/librustc_mir/build/matches/test.rs index efac4457b8e..75189777aa3 100644 --- a/src/librustc_mir/build/matches/test.rs +++ b/src/librustc_mir/build/matches/test.rs @@ -443,7 +443,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// appropriate. /// /// So, for example, if this candidate is `x @ Some(P0)` and the - /// test is a variant test, then we would add `(x as Option).0 @ + /// Tests is a variant test, then we would add `(x as Option).0 @ /// P0` to the `resulting_candidates` entry corresponding to the /// variant `Some`. /// diff --git a/src/librustc_mir/build/misc.rs b/src/librustc_mir/build/misc.rs index 096020b0f73..900f7f1744a 100644 --- a/src/librustc_mir/build/misc.rs +++ b/src/librustc_mir/build/misc.rs @@ -9,7 +9,7 @@ use rustc::mir::*; use syntax_pos::{Span, DUMMY_SP}; impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { - /// Add a new temporary value of type `ty` storing the result of + /// Adds a new temporary value of type `ty` storing the result of /// evaluating `expr`. /// /// N.B., **No cleanup is scheduled for this temporary.** You should diff --git a/src/librustc_mir/build/mod.rs b/src/librustc_mir/build/mod.rs index a52b032aeb5..3a58c95dd84 100644 --- a/src/librustc_mir/build/mod.rs +++ b/src/librustc_mir/build/mod.rs @@ -26,7 +26,7 @@ use syntax_pos::Span; use super::lints; -/// Construct the MIR for a given def-id. +/// Construct the MIR for a given `DefId`. pub fn mir_build<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Mir<'tcx> { let id = tcx.hir().as_local_node_id(def_id).unwrap(); @@ -173,9 +173,9 @@ pub fn mir_build<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Mir<'t }) } -/// A pass to lift all the types and substitutions in a Mir +/// A pass to lift all the types and substitutions in a MIR /// to the global tcx. Sadly, we don't have a "folder" that -/// can change 'tcx so we have to transmute afterwards. +/// can change `'tcx` so we have to transmute afterwards. struct GlobalizeMir<'a, 'gcx: 'a> { tcx: TyCtxt<'a, 'gcx, 'gcx>, span: Span @@ -335,47 +335,47 @@ struct Builder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { fn_span: Span, arg_count: usize, - /// the current set of scopes, updated as we traverse; - /// see the `scope` module for more details + /// The current set of scopes, updated as we traverse; + /// see the `scope` module for more details. scopes: Vec<scope::Scope<'tcx>>, - /// the block-context: each time we build the code within an hair::Block, + /// The block-context: each time we build the code within an hair::Block, /// we push a frame here tracking whether we are building a statement or /// if we are pushing the tail expression of the block. This is used to /// embed information in generated temps about whether they were created /// for a block tail expression or not. /// /// It would be great if we could fold this into `self.scopes` - /// somehow; but right now I think that is very tightly tied to + /// somehow, but right now I think that is very tightly tied to /// the code generation in ways that we cannot (or should not) /// start just throwing new entries onto that vector in order to /// distinguish the context of EXPR1 from the context of EXPR2 in - /// `{ STMTS; EXPR1 } + EXPR2` + /// `{ STMTS; EXPR1 } + EXPR2`. block_context: BlockContext, /// The current unsafe block in scope, even if it is hidden by - /// a PushUnsafeBlock + /// a `PushUnsafeBlock`. unpushed_unsafe: Safety, - /// The number of `push_unsafe_block` levels in scope + /// The number of `push_unsafe_block` levels in scope. push_unsafe_count: usize, - /// the current set of breakables; see the `scope` module for more - /// details + /// The current set of breakables; see the `scope` module for more + /// details. breakable_scopes: Vec<scope::BreakableScope<'tcx>>, - /// the vector of all scopes that we have created thus far; - /// we track this for debuginfo later + /// The vector of all scopes that we have created thus far; + /// we track this for debuginfo later. source_scopes: IndexVec<SourceScope, SourceScopeData>, source_scope_local_data: IndexVec<SourceScope, SourceScopeLocalData>, source_scope: SourceScope, - /// the guard-context: each time we build the guard expression for + /// The guard-context: each time we build the guard expression for /// a match arm, we push onto this stack, and then pop when we /// finish building it. guard_context: Vec<GuardFrame>, - /// Maps node ids of variable bindings to the `Local`s created for them. + /// Maps `NodeId`s of variable bindings to the `Local`s created for them. /// (A match binding can have two locals; the 2nd is for the arm's guard.) var_indices: NodeMap<LocalsForNode>, local_decls: IndexVec<Local, LocalDecl<'tcx>>, @@ -383,12 +383,12 @@ struct Builder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { upvar_decls: Vec<UpvarDecl>, unit_temp: Option<Place<'tcx>>, - /// cached block with the RESUME terminator; this is created + /// Cached block with the `RESUME` terminator; this is created /// when first set of cleanups are built. cached_resume_block: Option<BasicBlock>, - /// cached block with the RETURN terminator + /// Cached block with the `RETURN` terminator. cached_return_block: Option<BasicBlock>, - /// cached block with the UNREACHABLE terminator + /// Cached block with the `UNREACHABLE` terminator. cached_unreachable_block: Option<BasicBlock>, } @@ -407,7 +407,7 @@ impl BlockContext { fn push(&mut self, bf: BlockFrame) { self.0.push(bf); } fn pop(&mut self) -> Option<BlockFrame> { self.0.pop() } - /// Traverses the frames on the BlockContext, searching for either + /// Traverses the frames on the `BlockContext`, searching for either /// the first block-tail expression frame with no intervening /// statement frame. /// @@ -453,13 +453,13 @@ impl BlockContext { #[derive(Debug)] enum LocalsForNode { - /// In the usual case, a node-id for an identifier maps to at most - /// one Local declaration. + /// In the usual case, a `NodeId` for an identifier maps to at most + /// one `Local` declaration. One(Local), /// The exceptional case is identifiers in a match arm's pattern /// that are referenced in a guard of that match arm. For these, - /// we can have `2+k` Locals, where `k` is the number of candidate + /// we can have `2 + k` Locals, where `k` is the number of candidate /// patterns (separated by `|`) in the arm. /// /// * `for_arm_body` is the Local used in the arm body (which is @@ -505,11 +505,11 @@ struct GuardFrame { /// P1(id1) if (... (match E2 { P2(id2) if ... => B2 })) => B1, /// } /// - /// here, when building for FIXME + /// here, when building for FIXME. locals: Vec<GuardFrameLocal>, } -/// ForGuard indicates whether we are talking about: +/// `ForGuard` indicates whether we are talking about: /// 1. the temp for a local binding used solely within guard expressions, /// 2. the temp that holds reference to (1.), which is actually what the /// guard expressions see, or diff --git a/src/librustc_mir/build/scope.rs b/src/librustc_mir/build/scope.rs index 3872f5db262..3392495f7a1 100644 --- a/src/librustc_mir/build/scope.rs +++ b/src/librustc_mir/build/scope.rs @@ -210,7 +210,7 @@ impl DropKind { } impl<'tcx> Scope<'tcx> { - /// Invalidate all the cached blocks in the scope. + /// Invalidates all the cached blocks in the scope. /// /// Should always be run for all inner scopes when a drop is pushed into some scope enclosing a /// larger extent of code. @@ -390,7 +390,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// Branch out of `block` to `target`, exiting all scopes up to - /// and including `region_scope`. This will insert whatever drops are + /// and including `region_scope`. This will insert whatever drops are /// needed. See module comment for details. pub fn exit_scope(&mut self, span: Span, @@ -846,7 +846,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { next_target.unit() } - /// Create an Assert terminator and return the success block. + /// Creates an Assert terminator and return the success block. /// If the boolean condition operand is not the expected value, /// a runtime panic will be caused with the given message. pub fn assert(&mut self, block: BasicBlock, diff --git a/src/librustc_mir/const_eval.rs b/src/librustc_mir/const_eval.rs index d1b4486dd93..fb0c19f764c 100644 --- a/src/librustc_mir/const_eval.rs +++ b/src/librustc_mir/const_eval.rs @@ -38,7 +38,7 @@ const DETECTOR_SNAPSHOT_PERIOD: isize = 256; /// `simd_shuffle` and const patterns in match arms. /// /// The function containing the `match` that is currently being analyzed may have generic bounds -/// that inform us about the generic bounds of the constant. E.g. using an associated constant +/// that inform us about the generic bounds of the constant. E.g., using an associated constant /// of a function's generic parameter will require knowledge about the bounds on the generic /// parameter. These bounds are passed to `mk_eval_cx` via the `ParamEnv` argument. pub(crate) fn mk_eval_cx<'a, 'mir, 'tcx>( @@ -464,7 +464,7 @@ impl<'a, 'mir, 'tcx> interpret::Machine<'a, 'mir, 'tcx> Ok(()) } - /// Called immediately before a stack frame gets popped + /// Called immediately before a stack frame gets popped. #[inline(always)] fn stack_pop( _ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>, @@ -474,7 +474,7 @@ impl<'a, 'mir, 'tcx> interpret::Machine<'a, 'mir, 'tcx> } } -/// Project to a field of a (variant of a) const +/// Projects to a field of a (variant of a) const. pub fn const_field<'a, 'tcx>( tcx: TyCtxt<'a, 'tcx, 'tcx>, param_env: ty::ParamEnv<'tcx>, @@ -542,10 +542,10 @@ fn validate_and_turn_into_const<'a, 'tcx>( op, path, Some(&mut ref_tracking), - /* const_mode */ true, + true, // const mode )?; } - // Now that we validated, turn this into a proper constant + // Now that we validated, turn this into a proper constant. let def_id = cid.instance.def.def_id(); let normalize = tcx.is_static(def_id).is_none() && cid.promoted.is_none(); op_to_const(&ecx, op, normalize) diff --git a/src/librustc_mir/dataflow/at_location.rs b/src/librustc_mir/dataflow/at_location.rs index d0b9fbc99f0..d43fa4257e0 100644 --- a/src/librustc_mir/dataflow/at_location.rs +++ b/src/librustc_mir/dataflow/at_location.rs @@ -26,14 +26,14 @@ pub trait FlowsAtLocation { /// effects don't apply to the unwind edge). fn reset_to_exit_of(&mut self, bb: BasicBlock); - /// Build gen + kill sets for statement at `loc`. + /// Builds gen and kill sets for statement at `loc`. /// /// Note that invoking this method alone does not change the /// `curr_state` -- you must invoke `apply_local_effect` /// afterwards. fn reconstruct_statement_effect(&mut self, loc: Location); - /// Build gen + kill sets for terminator for `loc`. + /// Builds gen and kill sets for terminator for `loc`. /// /// Note that invoking this method alone does not change the /// `curr_state` -- you must invoke `apply_local_effect` diff --git a/src/librustc_mir/dataflow/drop_flag_effects.rs b/src/librustc_mir/dataflow/drop_flag_effects.rs index 49499cf928d..151a004dce9 100644 --- a/src/librustc_mir/dataflow/drop_flag_effects.rs +++ b/src/librustc_mir/dataflow/drop_flag_effects.rs @@ -44,8 +44,8 @@ pub fn move_path_children_matching<'tcx, F>(move_data: &MoveData<'tcx>, /// In both cases, the contents can only be accessed if and only if /// their parents are initialized. This implies for example that there /// is no need to maintain separate drop flags to track such state. -/// -/// FIXME: we have to do something for moving slice patterns. +// +// FIXME: we have to do something for moving slice patterns. fn place_contents_drop_state_cannot_differ<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, mir: &Mir<'tcx>, place: &mir::Place<'tcx>) -> bool { diff --git a/src/librustc_mir/dataflow/graphviz.rs b/src/librustc_mir/dataflow/graphviz.rs index 9d9f18d4b0d..c7f6983be61 100644 --- a/src/librustc_mir/dataflow/graphviz.rs +++ b/src/librustc_mir/dataflow/graphviz.rs @@ -143,7 +143,7 @@ where MWF: MirWithFlowState<'tcx>, Ok(()) } - /// Build the verbose row: full MIR data, and detailed gen/kill/entry sets + /// Builds the verbose row: full MIR data, and detailed gen/kill/entry sets. fn node_label_verbose_row<W: io::Write>(&self, n: &Node, w: &mut W, @@ -193,7 +193,7 @@ where MWF: MirWithFlowState<'tcx>, Ok(()) } - /// Build the summary row: terminator, gen/kill/entry bit sets + /// Builds the summary row: terminator, gen/kill/entry bit sets. fn node_label_final_row<W: io::Write>(&self, n: &Node, w: &mut W, diff --git a/src/librustc_mir/dataflow/impls/mod.rs b/src/librustc_mir/dataflow/impls/mod.rs index c8965b9f7f4..cc92ebfab89 100644 --- a/src/librustc_mir/dataflow/impls/mod.rs +++ b/src/librustc_mir/dataflow/impls/mod.rs @@ -143,13 +143,6 @@ impl<'a, 'gcx, 'tcx> HasMoveData<'tcx> for MaybeUninitializedPlaces<'a, 'gcx, 't /// initialized upon reaching a particular point in the control flow /// for a function. /// -/// FIXME: Note that once flow-analysis is complete, this should be -/// the set-complement of MaybeUninitializedPlaces; thus we can get rid -/// of one or the other of these two. I'm inclined to get rid of -/// MaybeUninitializedPlaces, simply because the sets will tend to be -/// smaller in this analysis and thus easier for humans to process -/// when debugging. -/// /// For example, in code like the following, we have corresponding /// dataflow information shown in the right-hand comments. /// diff --git a/src/librustc_mir/dataflow/mod.rs b/src/librustc_mir/dataflow/mod.rs index 1853b60efd7..c24a776605c 100644 --- a/src/librustc_mir/dataflow/mod.rs +++ b/src/librustc_mir/dataflow/mod.rs @@ -574,21 +574,21 @@ pub trait BitDenotation<'tcx>: BitSetOperator { /// the block's start, not necessarily the state immediately prior /// to the statement/terminator under analysis. /// - /// In either case, the passed reference is mutable; but this is a + /// In either case, the passed reference is mutable, but this is a /// wart from using the `BlockSets` type in the API; the intention /// is that the `statement_effect` and `terminator_effect` methods /// mutate only the gen/kill sets. - /// - /// FIXME: We should consider enforcing the intention described in - /// the previous paragraph by passing the three sets in separate - /// parameters to encode their distinct mutabilities. + // + // FIXME: we should consider enforcing the intention described in + // the previous paragraph by passing the three sets in separate + // parameters to encode their distinct mutabilities. fn accumulates_intrablock_state() -> bool { false } /// A name describing the dataflow analysis that this - /// BitDenotation is supporting. The name should be something - /// suitable for plugging in as part of a filename e.g., avoid + /// `BitDenotation` is supporting. The name should be something + /// suitable for plugging in as part of a filename (i.e., avoid /// space-characters or other things that tend to look bad on a - /// file system, like slashes or periods. It is also better for + /// file system, like slashes or periods). It is also better for /// the name to be reasonably short, again because it will be /// plugged into a filename. fn name() -> &'static str; @@ -676,11 +676,11 @@ pub trait BitDenotation<'tcx>: BitSetOperator { /// flow-dependent, the current MIR cannot encode them via just /// GEN and KILL sets attached to the block, and so instead we add /// this extra machinery to represent the flow-dependent effect. - /// - /// FIXME: Right now this is a bit of a wart in the API. It might - /// be better to represent this as an additional gen- and - /// kill-sets associated with each edge coming out of the basic - /// block. + // + // FIXME: right now this is a bit of a wart in the API. It might + // be better to represent this as an additional gen- and + // kill-sets associated with each edge coming out of the basic + // block. fn propagate_call_return( &self, in_out: &mut BitSet<Self::Idx>, diff --git a/src/librustc_mir/dataflow/move_paths/abs_domain.rs b/src/librustc_mir/dataflow/move_paths/abs_domain.rs index ff594fd3bed..6dcc0325ec1 100644 --- a/src/librustc_mir/dataflow/move_paths/abs_domain.rs +++ b/src/librustc_mir/dataflow/move_paths/abs_domain.rs @@ -1,12 +1,12 @@ //! The move-analysis portion of borrowck needs to work in an abstract -//! domain of lifted Places. Most of the Place variants fall into a +//! domain of lifted `Place`s. Most of the `Place` variants fall into a //! one-to-one mapping between the concrete and abstract (e.g., a -//! field-deref on a local-variable, `x.field`, has the same meaning -//! in both domains). Indexed-Projections are the exception: `a[x]` +//! field-deref on a local variable, `x.field`, has the same meaning +//! in both domains). Indexed projections are the exception: `a[x]` //! needs to be treated as mapping to the same move path as `a[y]` as -//! well as `a[13]`, et cetera. +//! well as `a[13]`, etc. //! -//! (In theory the analysis could be extended to work with sets of +//! (In theory, the analysis could be extended to work with sets of //! paths, so that `a[0]` and `a[13]` could be kept distinct, while //! `a[x]` would still overlap them both. But that is not this //! representation does today.) diff --git a/src/librustc_mir/hair/cx/mod.rs b/src/librustc_mir/hair/cx/mod.rs index 47be8223e8a..cd937d702fd 100644 --- a/src/librustc_mir/hair/cx/mod.rs +++ b/src/librustc_mir/hair/cx/mod.rs @@ -1,8 +1,6 @@ -//! This module contains the code to convert from the wacky tcx data -//! structures into the hair. The `builder` is generally ignorant of -//! the tcx etc, and instead goes through the `Cx` for most of its -//! work. -//! +//! This module contains the fcuntaiontliy to convert from the wacky tcx data +//! structures into the HAIR. The `builder` is generally ignorant of the tcx, +//! etc., and instead goes through the `Cx` for most of its work. use crate::hair::*; use crate::hair::util::UserAnnotatedTyHelpers; @@ -44,10 +42,10 @@ pub struct Cx<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { /// What kind of body is being compiled. pub body_owner_kind: hir::BodyOwnerKind, - /// True if this constant/function needs overflow checks. + /// Whether this constant/function needs overflow checks. check_overflow: bool, - /// See field with the same name on `Mir` + /// See field with the same name on `Mir`. control_flow_destroyed: Vec<(Span, String)>, } @@ -100,7 +98,7 @@ impl<'a, 'gcx, 'tcx> Cx<'a, 'gcx, 'tcx> { } impl<'a, 'gcx, 'tcx> Cx<'a, 'gcx, 'tcx> { - /// Normalizes `ast` into the appropriate `mirror` type. + /// Normalizes `ast` into the appropriate "mirror" type. pub fn mirror<M: Mirror<'tcx>>(&mut self, ast: M) -> M::Output { ast.make_mirror(self) } diff --git a/src/librustc_mir/hair/mod.rs b/src/librustc_mir/hair/mod.rs index f0f8acb31df..e615b009cf3 100644 --- a/src/librustc_mir/hair/mod.rs +++ b/src/librustc_mir/hair/mod.rs @@ -358,7 +358,7 @@ impl<'tcx> ExprRef<'tcx> { /// Mirroring is gradual: when you mirror an outer expression like `e1 /// + e2`, the references to the inner expressions `e1` and `e2` are /// `ExprRef<'tcx>` instances, and they may or may not be eagerly -/// mirrored. This allows a single AST node from the compiler to +/// mirrored. This allows a single AST node from the compiler to /// expand into one or more Hair nodes, which lets the Hair nodes be /// simpler. pub trait Mirror<'tcx> { diff --git a/src/librustc_mir/hair/pattern/_match.rs b/src/librustc_mir/hair/pattern/_match.rs index 5779a032acc..c52a57e17c6 100644 --- a/src/librustc_mir/hair/pattern/_match.rs +++ b/src/librustc_mir/hair/pattern/_match.rs @@ -6,8 +6,8 @@ /// /// The algorithm implemented here is a modified version of the one described in: /// http://moscova.inria.fr/~maranget/papers/warn/index.html -/// However, to save future implementors from reading the original paper, I'm going -/// to summarise the algorithm here to hopefully save time and be a little clearer +/// However, to save future implementors from reading the original paper, we +/// summarise the algorithm here to hopefully save time and be a little clearer /// (without being so rigorous). /// /// The core of the algorithm revolves about a "usefulness" check. In particular, we @@ -351,7 +351,7 @@ pub struct MatchCheckCtxt<'a, 'tcx: 'a> { /// The module in which the match occurs. This is necessary for /// checking inhabited-ness of types because whether a type is (visibly) /// inhabited can depend on whether it was defined in the current module or - /// not. eg. `struct Foo { _private: ! }` cannot be seen to be empty + /// not. E.g., `struct Foo { _private: ! }` cannot be seen to be empty /// outside it's module and should not be matchable with an empty match /// statement. pub module: DefId, @@ -896,7 +896,7 @@ impl<'tcx> IntRange<'tcx> { } } - /// Convert a `RangeInclusive` to a `ConstantValue` or inclusive `ConstantRange`. + /// Converts a `RangeInclusive` to a `ConstantValue` or inclusive `ConstantRange`. fn range_to_ctor( tcx: TyCtxt<'_, 'tcx, 'tcx>, ty: Ty<'tcx>, @@ -912,7 +912,7 @@ impl<'tcx> IntRange<'tcx> { } } - /// Return a collection of ranges that spans the values covered by `ranges`, subtracted + /// Returns a collection of ranges that spans the values covered by `ranges`, subtracted /// by the values covered by `self`: i.e., `ranges \ self` (in set notation). fn subtract_from(self, tcx: TyCtxt<'_, 'tcx, 'tcx>, @@ -1033,13 +1033,13 @@ fn compute_missing_ctors<'a, 'tcx: 'a>( } } -/// Algorithm from http://moscova.inria.fr/~maranget/papers/warn/index.html +/// Algorithm from http://moscova.inria.fr/~maranget/papers/warn/index.html. /// The algorithm from the paper has been modified to correctly handle empty /// types. The changes are: /// (0) We don't exit early if the pattern matrix has zero rows. We just /// continue to recurse over columns. /// (1) all_constructors will only return constructors that are statically -/// possible. eg. it will only return Ok for Result<T, !> +/// possible. E.g., it will only return `Ok` for `Result<T, !>`. /// /// This finds whether a (row) vector `v` of patterns is 'useful' in relation /// to a set of such vectors `m` - this is defined as there being a set of @@ -1047,8 +1047,8 @@ fn compute_missing_ctors<'a, 'tcx: 'a>( /// /// All the patterns at each column of the `matrix ++ v` matrix must /// have the same type, except that wildcard (PatternKind::Wild) patterns -/// with type TyErr are also allowed, even if the "type of the column" -/// is not TyErr. That is used to represent private fields, as using their +/// with type `TyErr` are also allowed, even if the "type of the column" +/// is not `TyErr`. That is used to represent private fields, as using their /// real type would assert that they are inhabited. /// /// This is used both for reachability checking (if a pattern isn't useful in @@ -1299,7 +1299,7 @@ fn is_useful_specialized<'p, 'a: 'p, 'tcx: 'a>( /// Slice patterns, however, can match slices of different lengths. For instance, /// `[a, b, ..tail]` can match a slice of length 2, 3, 4 and so on. /// -/// Returns None in case of a catch-all, which can't be specialized. +/// Returns `None` in case of a catch-all, which can't be specialized. fn pat_constructors<'tcx>(cx: &mut MatchCheckCtxt<'_, 'tcx>, pat: &Pattern<'tcx>, pcx: PatternContext<'_>) @@ -1614,7 +1614,7 @@ fn split_grouped_constructors<'p, 'a: 'p, 'tcx: 'a>( split_ctors } -/// Check whether there exists any shared value in either `ctor` or `pat` by intersecting them. +/// Checks whether there exists any shared value in either `ctor` or `pat` by intersecting them. fn constructor_intersects_pattern<'p, 'a: 'p, 'tcx: 'a>( tcx: TyCtxt<'a, 'tcx, 'tcx>, ctor: &Constructor<'tcx>, diff --git a/src/librustc_mir/hair/pattern/check_match.rs b/src/librustc_mir/hair/pattern/check_match.rs index 978051aab59..6addfa8589b 100644 --- a/src/librustc_mir/hair/pattern/check_match.rs +++ b/src/librustc_mir/hair/pattern/check_match.rs @@ -537,10 +537,9 @@ fn check_legality_of_move_bindings(cx: &MatchVisitor<'_, '_>, } } -/// Ensures that a pattern guard doesn't borrow by mutable reference or -/// assign. -/// -/// FIXME: this should be done by borrowck. +/// Ensures that a pattern guard doesn't borrow by mutable reference or assign. +// +// FIXME: this should be done by borrowck. fn check_for_mutation_in_guard(cx: &MatchVisitor<'_, '_>, guard: &hir::Guard) { let mut checker = MutationChecker { cx, diff --git a/src/librustc_mir/hair/pattern/mod.rs b/src/librustc_mir/hair/pattern/mod.rs index 84d8f32954c..dd12cd7781b 100644 --- a/src/librustc_mir/hair/pattern/mod.rs +++ b/src/librustc_mir/hair/pattern/mod.rs @@ -1,4 +1,4 @@ -//! Code to validate patterns/matches +//! Validation of patterns/matches. mod _match; mod check_match; @@ -116,7 +116,7 @@ pub enum PatternKind<'tcx> { user_ty_span: Span, }, - /// x, ref x, x @ P, etc + /// `x`, `ref x`, `x @ P`, etc. Binding { mutability: Mutability, name: ast::Name, @@ -126,7 +126,8 @@ pub enum PatternKind<'tcx> { subpattern: Option<Pattern<'tcx>>, }, - /// Foo(...) or Foo{...} or Foo, where `Foo` is a variant name from an adt with >1 variants + /// `Foo(...)` or `Foo{...}` or `Foo`, where `Foo` is a variant name from an ADT with + /// multiple variants. Variant { adt_def: &'tcx AdtDef, substs: &'tcx Substs<'tcx>, @@ -134,12 +135,13 @@ pub enum PatternKind<'tcx> { subpatterns: Vec<FieldPattern<'tcx>>, }, - /// (...), Foo(...), Foo{...}, or Foo, where `Foo` is a variant name from an adt with 1 variant + /// `(...)`, `Foo(...)`, `Foo{...}`, or `Foo`, where `Foo` is a variant name from an ADT with + /// a single variant. Leaf { subpatterns: Vec<FieldPattern<'tcx>>, }, - /// box P, &P, &mut P, etc + /// `box P`, `&P`, `&mut P`, etc. Deref { subpattern: Pattern<'tcx>, }, @@ -150,7 +152,7 @@ pub enum PatternKind<'tcx> { Range(PatternRange<'tcx>), - /// matches against a slice, checking the length and extracting elements. + /// Matches against a slice, checking the length and extracting elements. /// irrefutable when there is a slice pattern and both `prefix` and `suffix` are empty. /// e.g., `&[ref xs..]`. Slice { @@ -159,7 +161,7 @@ pub enum PatternKind<'tcx> { suffix: Vec<Pattern<'tcx>>, }, - /// fixed match against an array, irrefutable + /// Fixed match against an array; irrefutable. Array { prefix: Vec<Pattern<'tcx>>, slice: Option<Pattern<'tcx>>, @@ -767,7 +769,7 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { /// Takes a HIR Path. If the path is a constant, evaluates it and feeds /// it to `const_to_pat`. Any other path (like enum variants without fields) - /// is converted to the corresponding pattern via `lower_variant_or_leaf` + /// is converted to the corresponding pattern via `lower_variant_or_leaf`. fn lower_path(&mut self, qpath: &hir::QPath, id: hir::HirId, @@ -851,8 +853,8 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { } /// Converts literals, paths and negation of literals to patterns. - /// The special case for negation exists to allow things like -128i8 - /// which would overflow if we tried to evaluate 128i8 and then negate + /// The special case for negation exists to allow things like `-128_i8` + /// which would overflow if we tried to evaluate `128_i8` and then negate /// afterwards. fn lower_lit(&mut self, expr: &'tcx hir::Expr) -> PatternKind<'tcx> { match expr.node { @@ -901,7 +903,7 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { /// Converts an evaluated constant to a pattern (if possible). /// This means aggregate values (like structs and enums) are converted - /// to a pattern that matches the value (as if you'd compare via eq). + /// to a pattern that matches the value (as if you'd compared via equality). fn const_to_pat( &self, instance: ty::Instance<'tcx>, diff --git a/src/librustc_mir/interpret/eval_context.rs b/src/librustc_mir/interpret/eval_context.rs index c87338fb0ce..0c1b5d65b8b 100644 --- a/src/librustc_mir/interpret/eval_context.rs +++ b/src/librustc_mir/interpret/eval_context.rs @@ -55,7 +55,7 @@ pub struct Frame<'mir, 'tcx: 'mir, Tag=(), Extra=()> { /// The MIR for the function called on this frame. pub mir: &'mir mir::Mir<'tcx>, - /// The def_id and substs of the current function + /// The def_id and substs of the current function. pub instance: ty::Instance<'tcx>, /// The span of the call site. @@ -64,7 +64,7 @@ pub struct Frame<'mir, 'tcx: 'mir, Tag=(), Extra=()> { //////////////////////////////////////////////////////////////////////////////// // Return place and locals //////////////////////////////////////////////////////////////////////////////// - /// Work to perform when returning from this function + /// Work to perform when returning from this function. pub return_to_block: StackPopCleanup, /// The location where the result of the current stack frame should be written to, @@ -88,7 +88,7 @@ pub struct Frame<'mir, 'tcx: 'mir, Tag=(), Extra=()> { /// The index of the currently evaluated statement. pub stmt: usize, - /// Extra data for the machine + /// Extra data for the machine. pub extra: Extra, } @@ -99,7 +99,7 @@ pub enum StackPopCleanup { /// we can validate it at that layout. Goto(Option<mir::BasicBlock>), /// Just do nohing: Used by Main and for the box_alloc hook in miri. - /// `cleanup` says whether locals are deallocated. Static computation + /// `cleanup` says whether locals are deallocated. Static computation /// wants them leaked to intern what they need (and just throw away /// the entire `ecx` when it is done). None { cleanup: bool }, @@ -339,7 +339,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tc Ok(Immediate::new_slice(Scalar::Ptr(ptr), s.len() as u64, self)) } - /// Return the actual dynamic size and alignment of the place at the given type. + /// Returns the actual dynamic size and alignment of the place at the given type. /// Only the "meta" (metadata) part of the place matters. /// This can fail to provide an answer for extern types. pub(super) fn size_and_align_of( diff --git a/src/librustc_mir/interpret/intrinsics.rs b/src/librustc_mir/interpret/intrinsics.rs index d8778dfeef7..1d482b7b648 100644 --- a/src/librustc_mir/interpret/intrinsics.rs +++ b/src/librustc_mir/interpret/intrinsics.rs @@ -1,5 +1,5 @@ //! Intrinsics and other functions that the miri engine executes without -//! looking at their MIR. Intrinsics/functions supported here are shared by CTFE +//! looking at their MIR. Intrinsics/functions supported here are shared by CTFE //! and miri. use syntax::symbol::Symbol; @@ -37,7 +37,7 @@ fn numeric_intrinsic<'tcx, Tag>( } impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> { - /// Returns whether emulation happened. + /// Returns `true` if emulation happened. pub fn emulate_intrinsic( &mut self, instance: ty::Instance<'tcx>, @@ -169,7 +169,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> } /// "Intercept" a function call because we have something special to do for it. - /// Returns whether an intercept happened. + /// Returns `true` if an intercept happened. pub fn hook_fn( &mut self, instance: ty::Instance<'tcx>, diff --git a/src/librustc_mir/interpret/machine.rs b/src/librustc_mir/interpret/machine.rs index 26d526a6f5f..8f34b832f0b 100644 --- a/src/librustc_mir/interpret/machine.rs +++ b/src/librustc_mir/interpret/machine.rs @@ -21,23 +21,23 @@ pub trait MayLeak: Copy { /// The functionality needed by memory to manage its allocations pub trait AllocMap<K: Hash + Eq, V> { - /// Test if the map contains the given key. + /// Tests if the map contains the given key. /// Deliberately takes `&mut` because that is sufficient, and some implementations /// can be more efficient then (using `RefCell::get_mut`). fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool where K: Borrow<Q>; - /// Insert new entry into the map. + /// Inserts a new entry into the map. fn insert(&mut self, k: K, v: V) -> Option<V>; - /// Remove entry from the map. + /// Removes an entry from the map. fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V> where K: Borrow<Q>; - /// Return data based the keys and values in the map. + /// Returns data based the keys and values in the map. fn filter_map_collect<T>(&self, f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T>; - /// Return a reference to entry `k`. If no such entry exists, call + /// Returns a reference to entry `k`. If no such entry exists, call /// `vacant` and either forward its error, or add its result to the map /// and return a reference to *that*. fn get_or<E>( @@ -46,7 +46,7 @@ pub trait AllocMap<K: Hash + Eq, V> { vacant: impl FnOnce() -> Result<V, E> ) -> Result<&V, E>; - /// Return a mutable reference to entry `k`. If no such entry exists, call + /// Returns a mutable reference to entry `k`. If no such entry exists, call /// `vacant` and either forward its error, or add its result to the map /// and return a reference to *that*. fn get_mut_or<E>( @@ -62,7 +62,7 @@ pub trait Machine<'a, 'mir, 'tcx>: Sized { /// Additional memory kinds a machine wishes to distinguish from the builtin ones type MemoryKinds: ::std::fmt::Debug + MayLeak + Eq + 'static; - /// Tag tracked alongside every pointer. This is used to implement "Stacked Borrows" + /// Tag tracked alongside every pointer. This is used to implement "Stacked Borrows" /// <https://www.ralfj.de/blog/2018/08/07/stacked-borrows.html>. /// The `default()` is used for pointers to consts, statics, vtables and functions. type PointerTag: ::std::fmt::Debug + Default + Copy + Eq + Hash + 'static; @@ -70,7 +70,7 @@ pub trait Machine<'a, 'mir, 'tcx>: Sized { /// Extra data stored in every call frame. type FrameExtra; - /// Extra data stored in memory. A reference to this is available when `AllocExtra` + /// Extra data stored in memory. A reference to this is available when `AllocExtra` /// gets initialized, so you can e.g., have an `Rc` here if there is global state you /// need access to in the `AllocExtra` hooks. type MemoryExtra: Default; @@ -105,7 +105,7 @@ pub trait Machine<'a, 'mir, 'tcx>: Sized { /// /// Returns either the mir to use for the call, or `None` if execution should /// just proceed (which usually means this hook did all the work that the - /// called function should usually have done). In the latter case, it is + /// called function should usually have done). In the latter case, it is /// this hook's responsibility to call `goto_block(ret)` to advance the instruction pointer! /// (This is to support functions like `__rust_maybe_catch_panic` that neither find a MIR /// nor just jump to `ret`, but instead push their own stack frame.) @@ -170,7 +170,7 @@ pub trait Machine<'a, 'mir, 'tcx>: Sized { dest: PlaceTy<'tcx, Self::PointerTag>, ) -> EvalResult<'tcx>; - /// Add the tag for a newly allocated pointer. + /// Adds the tag for a newly allocated pointer. fn tag_new_allocation( ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>, ptr: Pointer, @@ -178,7 +178,7 @@ pub trait Machine<'a, 'mir, 'tcx>: Sized { ) -> Pointer<Self::PointerTag>; /// Executed when evaluating the `*` operator: Following a reference. - /// This has the chance to adjust the tag. It should not change anything else! + /// This has the chance to adjust the tag. It should not change anything else! /// `mutability` can be `None` in case a raw ptr is being dereferenced. #[inline] fn tag_dereference( @@ -189,7 +189,7 @@ pub trait Machine<'a, 'mir, 'tcx>: Sized { Ok(place.ptr) } - /// Execute a retagging operation + /// Executes a retagging operation #[inline] fn retag( _ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>, diff --git a/src/librustc_mir/interpret/memory.rs b/src/librustc_mir/interpret/memory.rs index 3832d7e8d62..88b936afaa4 100644 --- a/src/librustc_mir/interpret/memory.rs +++ b/src/librustc_mir/interpret/memory.rs @@ -3,7 +3,7 @@ //! Generally, we use `Pointer` to denote memory addresses. However, some operations //! have a "size"-like parameter, and they take `Scalar` for the address because //! if the size is 0, then the pointer can also be a (properly aligned, non-NULL) -//! integer. It is crucial that these operations call `check_align` *before* +//! integer. It is crucial that these operations call `check_align` *before* //! short-circuiting the empty case! use std::collections::VecDeque; @@ -47,10 +47,10 @@ impl<T: MayLeak> MayLeak for MemoryKind<T> { // `Memory` has to depend on the `Machine` because some of its operations // (e.g., `get`) call a `Machine` hook. pub struct Memory<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'a, 'mir, 'tcx>> { - /// Allocations local to this instance of the miri engine. The kind + /// Allocations local to this instance of the miri engine. The kind /// helps ensure that the same mechanism is used for allocation and - /// deallocation. When an allocation is not found here, it is a - /// static and looked up in the `tcx` for read access. Some machines may + /// deallocation. When an allocation is not found here, it is a + /// static and looked up in the `tcx` for read access. Some machines may /// have to mutate this map even on a read-only access to a static (because /// they do pointer provenance tracking and the allocations in `tcx` have /// the wrong type), so we let the machine override this type. @@ -240,7 +240,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> { Ok(()) } - /// Check that the pointer is aligned AND non-NULL. This supports ZSTs in two ways: + /// Checks that the pointer is aligned AND non-NULL. This supports ZSTs in two ways: /// You can pass a scalar, and a `Pointer` does not have to actually still be allocated. pub fn check_align( &self, @@ -284,7 +284,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> { } } - /// Check if the pointer is "in-bounds". Notice that a pointer pointing at the end + /// Checks if the pointer is "in-bounds". Notice that a pointer pointing at the end /// of an allocation (i.e., at the first *inaccessible* location) *is* considered /// in-bounds! This follows C's/LLVM's rules. /// If you want to check bounds before doing a memory access, better first obtain @@ -659,7 +659,7 @@ where } } -/// Reading and writing +/// Reading and writing. impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> { pub fn copy( &mut self, diff --git a/src/librustc_mir/interpret/operand.rs b/src/librustc_mir/interpret/operand.rs index 37e421c2e73..c0b26442dd9 100644 --- a/src/librustc_mir/interpret/operand.rs +++ b/src/librustc_mir/interpret/operand.rs @@ -87,7 +87,7 @@ impl<'tcx, Tag> Immediate<Tag> { } } - /// Convert the immediate into a pointer (or a pointer-sized integer). + /// Converts the immediate into a pointer (or a pointer-sized integer). /// Throws away the second half of a ScalarPair! #[inline] pub fn to_scalar_ptr(self) -> EvalResult<'tcx, Scalar<Tag>> { @@ -97,7 +97,7 @@ impl<'tcx, Tag> Immediate<Tag> { } } - /// Convert the value into its metadata. + /// Converts the value into its metadata. /// Throws away the first half of a ScalarPair! #[inline] pub fn to_meta(self) -> EvalResult<'tcx, Option<Scalar<Tag>>> { @@ -125,7 +125,7 @@ impl<'tcx, Tag> ::std::ops::Deref for ImmTy<'tcx, Tag> { } /// An `Operand` is the result of computing a `mir::Operand`. It can be immediate, -/// or still in memory. The latter is an optimization, to delay reading that chunk of +/// or still in memory. The latter is an optimization, to delay reading that chunk of /// memory and to avoid having to store arbitrary-sized data here. #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] pub enum Operand<Tag=(), Id=AllocId> { @@ -247,7 +247,7 @@ pub(super) fn from_known_layout<'tcx>( impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> { /// Try reading an immediate in memory; this is interesting particularly for ScalarPair. - /// Return None if the layout does not permit loading this as a value. + /// Returns `None` if the layout does not permit loading this as a value. pub(super) fn try_read_immediate_from_mplace( &self, mplace: MPlaceTy<'tcx, M::PointerTag>, diff --git a/src/librustc_mir/interpret/place.rs b/src/librustc_mir/interpret/place.rs index 9ca7f9d8e27..f147e6c639c 100644 --- a/src/librustc_mir/interpret/place.rs +++ b/src/librustc_mir/interpret/place.rs @@ -24,7 +24,7 @@ pub struct MemPlace<Tag=(), Id=AllocId> { /// However, it may never be undef. pub ptr: Scalar<Tag, Id>, pub align: Align, - /// Metadata for unsized places. Interpretation is up to the type. + /// Metadata for unsized places. Interpretation is up to the type. /// Must not be present for sized types, but can be missing for unsized types /// (e.g., `extern type`). pub meta: Option<Scalar<Tag, Id>>, @@ -516,7 +516,7 @@ where }) } - /// Get the place of a field inside the place, and also the field's type. + /// Gets the place of a field inside the place, and also the field's type. /// Just a convenience function, but used quite a bit. /// This is the only projection that might have a side-effect: We cannot project /// into the field of a local `ScalarPair`, we have to first allocate it. @@ -547,7 +547,7 @@ where }) } - /// Project into a place + /// Projects into a place. pub fn place_projection( &mut self, base: PlaceTy<'tcx, M::PointerTag>, @@ -567,7 +567,7 @@ where }) } - /// Evaluate statics and promoteds to an `MPlace`. Used to share some code between + /// Evaluate statics and promoteds to an `MPlace`. Used to share some code between /// `eval_place` and `eval_place_to_op`. pub(super) fn eval_place_to_mplace( &self, @@ -610,7 +610,7 @@ where }) } - /// Compute a place. You should only use this if you intend to write into this + /// Computes a place. You should only use this if you intend to write into this /// place; for reading, a more efficient alternative is `eval_place_for_read`. pub fn eval_place( &mut self, @@ -785,7 +785,7 @@ where } } - /// Copy the data from an operand to a place. This does not support transmuting! + /// Copies the data from an operand to a place. This does not support transmuting! /// Use `copy_op_transmute` if the layouts could disagree. #[inline(always)] pub fn copy_op( @@ -803,7 +803,7 @@ where Ok(()) } - /// Copy the data from an operand to a place. This does not support transmuting! + /// Copies the data from an operand to a place. This does not support transmuting! /// Use `copy_op_transmute` if the layouts could disagree. /// Also, if you use this you are responsible for validating that things git copied at the /// right type. @@ -842,7 +842,7 @@ where Ok(()) } - /// Copy the data from an operand to a place. The layouts may disagree, but they must + /// Copies the data from an operand to a place. The layouts may disagree, but they must /// have the same size. pub fn copy_op_transmute( &mut self, @@ -881,7 +881,7 @@ where Ok(()) } - /// Make sure that a place is in memory, and return where it is. + /// Ensures that a place is in memory, and returns where it is. /// If the place currently refers to a local that doesn't yet have a matching allocation, /// create such an allocation. /// This is essentially `force_to_memplace`. diff --git a/src/librustc_mir/interpret/step.rs b/src/librustc_mir/interpret/step.rs index 25f3e4c1f77..0c988eb6810 100644 --- a/src/librustc_mir/interpret/step.rs +++ b/src/librustc_mir/interpret/step.rs @@ -41,7 +41,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> Ok(()) } - /// Returns true as long as there are more things to do. + /// Returns `true` as long as there are more things to do. /// /// This is used by [priroda](https://github.com/oli-obk/priroda) pub fn step(&mut self) -> EvalResult<'tcx, bool> { diff --git a/src/librustc_mir/interpret/traits.rs b/src/librustc_mir/interpret/traits.rs index 63253bae907..65d7060b544 100644 --- a/src/librustc_mir/interpret/traits.rs +++ b/src/librustc_mir/interpret/traits.rs @@ -87,7 +87,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> Ok(vtable) } - /// Return the drop fn instance as well as the actual dynamic type + /// Returns the drop fn instance as well as the actual dynamic type pub fn read_drop_type_from_vtable( &self, vtable: Pointer<M::PointerTag>, diff --git a/src/librustc_mir/interpret/validity.rs b/src/librustc_mir/interpret/validity.rs index 8f5a5bf8ee3..8b97d9ded74 100644 --- a/src/librustc_mir/interpret/validity.rs +++ b/src/librustc_mir/interpret/validity.rs @@ -587,7 +587,7 @@ impl<'rt, 'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> } impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> { - /// This function checks the data at `op`. `op` is assumed to cover valid memory if it + /// This function checks the data at `op`. `op` is assumed to cover valid memory if it /// is an indirect operand. /// It will error if the bits at the destination do not match the ones described by the layout. /// diff --git a/src/librustc_mir/interpret/visitor.rs b/src/librustc_mir/interpret/visitor.rs index 930bcb44374..24fbb56dc52 100644 --- a/src/librustc_mir/interpret/visitor.rs +++ b/src/librustc_mir/interpret/visitor.rs @@ -16,26 +16,26 @@ use super::{ // that's just more convenient to work with (avoids repeating all the `Machine` bounds). pub trait Value<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>>: Copy { - /// Get this value's layout. + /// Gets this value's layout. fn layout(&self) -> TyLayout<'tcx>; - /// Make this into an `OpTy`. + /// Makes this into an `OpTy`. fn to_op( self, ecx: &EvalContext<'a, 'mir, 'tcx, M>, ) -> EvalResult<'tcx, OpTy<'tcx, M::PointerTag>>; - /// Create this from an `MPlaceTy`. + /// Creates this from an `MPlaceTy`. fn from_mem_place(mplace: MPlaceTy<'tcx, M::PointerTag>) -> Self; - /// Project to the given enum variant. + /// Projects to the given enum variant. fn project_downcast( self, ecx: &EvalContext<'a, 'mir, 'tcx, M>, variant: VariantIdx, ) -> EvalResult<'tcx, Self>; - /// Project to the n-th field. + /// Projects to the n-th field. fn project_field( self, ecx: &EvalContext<'a, 'mir, 'tcx, M>, @@ -135,19 +135,19 @@ macro_rules! make_value_visitor { -> &$($mutability)* EvalContext<'a, 'mir, 'tcx, M>; // Recursive actions, ready to be overloaded. - /// Visit the given value, dispatching as appropriate to more specialized visitors. + /// Visits the given value, dispatching as appropriate to more specialized visitors. #[inline(always)] fn visit_value(&mut self, v: Self::V) -> EvalResult<'tcx> { self.walk_value(v) } - /// Visit the given value as a union. No automatic recursion can happen here. + /// Visits the given value as a union. No automatic recursion can happen here. #[inline(always)] fn visit_union(&mut self, _v: Self::V) -> EvalResult<'tcx> { Ok(()) } - /// Visit this vale as an aggregate, you are even getting an iterator yielding + /// Visits this vale as an aggregate, you are even getting an iterator yielding /// all the fields (still in an `EvalResult`, you have to do error handling yourself). /// Recurses into the fields. #[inline(always)] @@ -173,7 +173,7 @@ macro_rules! make_value_visitor { self.visit_value(new_val) } - /// Called for recursing into the field of a generator. These are not known to be + /// Called for recursing into the field of a generator. These are not known to be /// initialized, so we treat them like unions. #[inline(always)] fn visit_generator_field( @@ -215,8 +215,8 @@ macro_rules! make_value_visitor { fn visit_scalar(&mut self, _v: Self::V, _layout: &layout::Scalar) -> EvalResult<'tcx> { Ok(()) } - /// Called whenever we reach a value of primitive type. There can be no recursion - /// below such a value. This is the leaf function. + /// Called whenever we reach a value of primitive type. There can be no recursion + /// below such a value. This is the leaf function. /// We do *not* provide an `ImmTy` here because some implementations might want /// to write to the place this primitive lives in. #[inline(always)] diff --git a/src/librustc_mir/monomorphize/item.rs b/src/librustc_mir/monomorphize/item.rs index d3381f463f4..6e639c3a117 100644 --- a/src/librustc_mir/monomorphize/item.rs +++ b/src/librustc_mir/monomorphize/item.rs @@ -122,7 +122,7 @@ pub trait MonoItemExt<'a, 'tcx>: fmt::Debug { codegen_fn_attrs.linkage } - /// Returns whether this instance is instantiable - whether it has no unsatisfied + /// Returns `true` if this instance is instantiable - whether it has no unsatisfied /// predicates. /// /// In order to codegen an item, all of its predicates must hold, because diff --git a/src/librustc_mir/monomorphize/partitioning.rs b/src/librustc_mir/monomorphize/partitioning.rs index d4c7ebefe17..f342017603e 100644 --- a/src/librustc_mir/monomorphize/partitioning.rs +++ b/src/librustc_mir/monomorphize/partitioning.rs @@ -118,7 +118,7 @@ use crate::monomorphize::item::{MonoItemExt, InstantiationMode}; pub use rustc::mir::mono::CodegenUnit; pub enum PartitioningStrategy { - /// Generate one codegen unit per source-level module. + /// Generates one codegen unit per source-level module. PerModule, /// Partition the whole crate into a fixed number of codegen units. diff --git a/src/librustc_mir/shim.rs b/src/librustc_mir/shim.rs index 09b81651502..d4145b8e47e 100644 --- a/src/librustc_mir/shim.rs +++ b/src/librustc_mir/shim.rs @@ -306,7 +306,7 @@ impl<'a, 'tcx> DropElaborator<'a, 'tcx> for DropShimElaborator<'a, 'tcx> { } } -/// Build a `Clone::clone` shim for `self_ty`. Here, `def_id` is `Clone::clone`. +/// Builds a `Clone::clone` shim for `self_ty`. Here, `def_id` is `Clone::clone`. fn build_clone_shim<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId, self_ty: Ty<'tcx>) @@ -691,7 +691,7 @@ impl<'a, 'tcx> CloneShimBuilder<'a, 'tcx> { } } -/// Build a "call" shim for `def_id`. The shim calls the +/// Builds a "call" shim for `def_id`. The shim calls the /// function specified by `call_kind`, first adjusting its first /// argument according to `rcvr_adjustment`. /// diff --git a/src/librustc_mir/transform/check_unsafety.rs b/src/librustc_mir/transform/check_unsafety.rs index b2e1afc519e..66529e57983 100644 --- a/src/librustc_mir/transform/check_unsafety.rs +++ b/src/librustc_mir/transform/check_unsafety.rs @@ -28,7 +28,7 @@ pub struct UnsafetyChecker<'a, 'tcx: 'a> { source_info: SourceInfo, tcx: TyCtxt<'a, 'tcx, 'tcx>, param_env: ty::ParamEnv<'tcx>, - /// mark an `unsafe` block as used, so we don't lint it + /// Mark an `unsafe` block as used, so we don't lint it. used_unsafe: FxHashSet<ast::NodeId>, inherited_blocks: Vec<(ast::NodeId, bool)>, } @@ -574,7 +574,7 @@ fn unsafe_derive_on_repr_packed<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: D &message); } -/// Return the NodeId for an enclosing scope that is also `unsafe` +/// Returns the `NodeId` for an enclosing scope that is also `unsafe`. fn is_enclosed(tcx: TyCtxt<'_, '_, '_>, used_unsafe: &FxHashSet<ast::NodeId>, id: ast::NodeId) -> Option<(String, ast::NodeId)> { diff --git a/src/librustc_mir/transform/elaborate_drops.rs b/src/librustc_mir/transform/elaborate_drops.rs index c56acbaeb26..0f8db5f7334 100644 --- a/src/librustc_mir/transform/elaborate_drops.rs +++ b/src/librustc_mir/transform/elaborate_drops.rs @@ -74,7 +74,7 @@ impl MirPass for ElaborateDrops { } } -/// Return the set of basic blocks whose unwind edges are known +/// Returns the set of basic blocks whose unwind edges are known /// to not be reachable, because they are `drop` terminators /// that can't drop anything. fn find_dead_unwinds<'a, 'tcx>( diff --git a/src/librustc_mir/transform/erase_regions.rs b/src/librustc_mir/transform/erase_regions.rs index d59bb3ec5b1..84f209f8776 100644 --- a/src/librustc_mir/transform/erase_regions.rs +++ b/src/librustc_mir/transform/erase_regions.rs @@ -1,8 +1,8 @@ //! This pass erases all early-bound regions from the types occurring in the MIR. //! We want to do this once just before codegen, so codegen does not have to take //! care erasing regions all over the place. -//! NOTE: We do NOT erase regions of statements that are relevant for -//! "types-as-contracts"-validation, namely, AcquireValid, ReleaseValid +//! N.B., we do _not_ erase regions of statements that are relevant for +//! "types-as-contracts"-validation, namely, `AcquireValid` and `ReleaseValid`. use rustc::ty::subst::Substs; use rustc::ty::{self, Ty, TyCtxt}; diff --git a/src/librustc_mir/transform/mod.rs b/src/librustc_mir/transform/mod.rs index 44061e689b3..28b9e082851 100644 --- a/src/librustc_mir/transform/mod.rs +++ b/src/librustc_mir/transform/mod.rs @@ -56,7 +56,7 @@ fn is_mir_available<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> boo tcx.mir_keys(def_id.krate).contains(&def_id) } -/// Finds the full set of def-ids within the current crate that have +/// Finds the full set of `DefId`s within the current crate that have /// MIR associated with them. fn mir_keys<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, krate: CrateNum) -> Lrc<DefIdSet> { diff --git a/src/librustc_mir/transform/promote_consts.rs b/src/librustc_mir/transform/promote_consts.rs index d1dc5cfec99..a6726718e2d 100644 --- a/src/librustc_mir/transform/promote_consts.rs +++ b/src/librustc_mir/transform/promote_consts.rs @@ -182,7 +182,7 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> { }); } - /// Copy the initialization of this temp to the + /// Copies the initialization of this temp to the /// promoted MIR, recursing through temps. fn promote_temp(&mut self, temp: Local) -> Local { let old_keep_original = self.keep_original; diff --git a/src/librustc_mir/transform/qualify_consts.rs b/src/librustc_mir/transform/qualify_consts.rs index 76b8b83031a..6c69114593b 100644 --- a/src/librustc_mir/transform/qualify_consts.rs +++ b/src/librustc_mir/transform/qualify_consts.rs @@ -176,18 +176,18 @@ impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> { } } - /// Add the given qualification to self.qualif. + /// Adds the given qualification to `self.qualif`. fn add(&mut self, qualif: Qualif) { self.qualif = self.qualif | qualif; } - /// Add the given type's qualification to self.qualif. + /// Adds the given type's qualification to `self.qualif`. fn add_type(&mut self, ty: Ty<'tcx>) { self.add(Qualif::MUTABLE_INTERIOR | Qualif::NEEDS_DROP); self.qualif.restrict(ty, self.tcx, self.param_env); } - /// Within the provided closure, self.qualif will start + /// Within the provided closure, `self.qualif` will start /// out empty, and its value after the closure returns will /// be combined with the value before the call to nest. fn nest<F: FnOnce(&mut Self)>(&mut self, f: F) { diff --git a/src/librustc_mir/transform/qualify_min_const_fn.rs b/src/librustc_mir/transform/qualify_min_const_fn.rs index 85bf1e70ebf..bfbb9c18257 100644 --- a/src/librustc_mir/transform/qualify_min_const_fn.rs +++ b/src/librustc_mir/transform/qualify_min_const_fn.rs @@ -358,7 +358,7 @@ fn check_terminator( } } -/// Returns true if the `def_id` refers to an intrisic which we've whitelisted +/// Returns `true` if the `def_id` refers to an intrisic which we've whitelisted /// for being called from stable `const fn`s (`min_const_fn`). /// /// Adding more intrinsics requires sign-off from @rust-lang/lang. diff --git a/src/librustc_mir/transform/remove_noop_landing_pads.rs b/src/librustc_mir/transform/remove_noop_landing_pads.rs index 68832b73ccd..0ad33bff9d6 100644 --- a/src/librustc_mir/transform/remove_noop_landing_pads.rs +++ b/src/librustc_mir/transform/remove_noop_landing_pads.rs @@ -4,7 +4,7 @@ use rustc_data_structures::bit_set::BitSet; use crate::transform::{MirPass, MirSource}; use crate::util::patch::MirPatch; -/// A pass that removes no-op landing pads and replaces jumps to them with +/// A pass that removes noop landing pads and replaces jumps to them with /// `None`. This is important because otherwise LLVM generates terrible /// code for these. pub struct RemoveNoopLandingPads; diff --git a/src/librustc_mir/util/alignment.rs b/src/librustc_mir/util/alignment.rs index 659b5beb305..7be34d001df 100644 --- a/src/librustc_mir/util/alignment.rs +++ b/src/librustc_mir/util/alignment.rs @@ -1,7 +1,7 @@ use rustc::ty::{self, TyCtxt}; use rustc::mir::*; -/// Return `true` if this place is allowed to be less aligned +/// Returns `true` if this place is allowed to be less aligned /// than its containing struct (because it is within a packed /// struct). pub fn is_disaligned<'a, 'tcx, L>(tcx: TyCtxt<'a, 'tcx, 'tcx>, diff --git a/src/librustc_mir/util/def_use.rs b/src/librustc_mir/util/def_use.rs index 3b9d7c3612a..2e41c6e493b 100644 --- a/src/librustc_mir/util/def_use.rs +++ b/src/librustc_mir/util/def_use.rs @@ -61,7 +61,7 @@ impl<'tcx> DefUseAnalysis<'tcx> { } } - /// FIXME(pcwalton): This should update the def-use chains. + // FIXME(pcwalton): this should update the def-use chains. pub fn replace_all_defs_and_uses_with(&self, local: Local, mir: &mut Mir<'tcx>, diff --git a/src/librustc_mir/util/elaborate_drops.rs b/src/librustc_mir/util/elaborate_drops.rs index 1bc956e5ffe..e86ece13830 100644 --- a/src/librustc_mir/util/elaborate_drops.rs +++ b/src/librustc_mir/util/elaborate_drops.rs @@ -144,9 +144,9 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D> /// joined together under the `rest` subpath. They are all controlled /// by the primary drop flag, but only the last rest-field dropped /// should clear it (and it must also not clear anything else). - /// - /// FIXME: I think we should just control the flags externally - /// and then we do not need this machinery. + // + // FIXME: I think we should just control the flags externally, + // and then we do not need this machinery. pub fn elaborate_drop<'a>(&mut self, bb: BasicBlock) { debug!("elaborate_drop({:?})", self); let style = self.elaborator.drop_style(self.path, DropFlagMode::Deep); @@ -183,7 +183,7 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D> } } - /// Return the place and move path for each field of `variant`, + /// Returns the place and move path for each field of `variant`, /// (the move path is `None` if the field is a rest field). fn move_paths_for_fields(&self, base_place: &Place<'tcx>, @@ -234,7 +234,7 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D> } } - /// Create one-half of the drop ladder for a list of fields, and return + /// Creates one-half of the drop ladder for a list of fields, and return /// the list of steps in it in reverse order, with the first step /// dropping 0 fields and so on. /// @@ -268,7 +268,7 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D> ) } - /// Create a full drop ladder, consisting of 2 connected half-drop-ladders + /// Creates a full drop ladder, consisting of 2 connected half-drop-ladders /// /// For example, with 3 fields, the drop ladder is /// @@ -818,7 +818,7 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D> } } - /// Return a basic block that drop a place using the context + /// Returns a basic block that drop a place using the context /// and path in `c`. If `mode` is something, also clear `c` /// according to it. /// diff --git a/src/librustc_mir/util/liveness.rs b/src/librustc_mir/util/liveness.rs index 847699cc500..dcbd9aa9af2 100644 --- a/src/librustc_mir/util/liveness.rs +++ b/src/librustc_mir/util/liveness.rs @@ -1,21 +1,22 @@ -//! Liveness analysis which computes liveness of MIR local variables at the boundary of basic blocks +//! Liveness analysis which computes liveness of MIR local variables at the boundary of basic +//! blocks. //! //! This analysis considers references as being used only at the point of the //! borrow. This means that this does not track uses because of references that //! already exist: //! -//! ```Rust -//! fn foo() { -//! x = 0; -//! // `x` is live here -//! GLOBAL = &x: *const u32; -//! // but not here, even while it can be accessed through `GLOBAL`. -//! foo(); -//! x = 1; -//! // `x` is live again here, because it is assigned to `OTHER_GLOBAL` -//! OTHER_GLOBAL = &x: *const u32; -//! // ... -//! } +//! ```rust +//! fn foo() { +//! x = 0; +//! // `x` is live here ... +//! GLOBAL = &x: *const u32; +//! // ... but not here, even while it can be accessed through `GLOBAL`. +//! foo(); +//! x = 1; +//! // `x` is live again here, because it is assigned to `OTHER_GLOBAL`. +//! OTHER_GLOBAL = &x: *const u32; +//! // ... +//! } //! ``` //! //! This means that users of this analysis still have to check whether @@ -91,7 +92,7 @@ impl<'a, 'tcx> LiveVariableMap for IdentityMap<'a, 'tcx> { } } -/// Compute which local variables are live within the given function +/// Computes which local variables are live within the given function /// `mir`. The liveness mode `mode` determines what sorts of uses are /// considered to make a variable live (e.g., do drops count?). pub fn liveness_of_locals<'tcx, V: Idx>( diff --git a/src/librustc_passes/ast_validation.rs b/src/librustc_passes/ast_validation.rs index a391a316312..bf5514c5335 100644 --- a/src/librustc_passes/ast_validation.rs +++ b/src/librustc_passes/ast_validation.rs @@ -181,21 +181,21 @@ impl<'a> AstValidator<'a> { } } - /// matches '-' lit | lit (cf. parser::Parser::parse_literal_maybe_minus), - /// or path for ranges. - /// - /// FIXME: do we want to allow expr -> pattern conversion to create path expressions? - /// That means making this work: - /// - /// ```rust,ignore (FIXME) - /// struct S; - /// macro_rules! m { - /// ($a:expr) => { - /// let $a = S; - /// } - /// } - /// m!(S); - /// ``` + /// Matches `'-' lit | lit (cf. parser::Parser::parse_literal_maybe_minus)`, + /// or paths for ranges. + // + // FIXME: do we want to allow `expr -> pattern` conversion to create path expressions? + // That means making this work: + // + // ```rust,ignore (FIXME) + // struct S; + // macro_rules! m { + // ($a:expr) => { + // let $a = S; + // } + // } + // m!(S); + // ``` fn check_expr_within_pat(&self, expr: &Expr, allow_paths: bool) { match expr.node { ExprKind::Lit(..) => {} diff --git a/src/librustc_passes/rvalue_promotion.rs b/src/librustc_passes/rvalue_promotion.rs index 8d33fef5303..50e80eb5a29 100644 --- a/src/librustc_passes/rvalue_promotion.rs +++ b/src/librustc_passes/rvalue_promotion.rs @@ -160,11 +160,11 @@ impl<'a, 'gcx> CheckCrateVisitor<'a, 'gcx> { } /// While the `ExprUseVisitor` walks, we will identify which - /// expressions are borrowed, and insert their ids into this + /// expressions are borrowed, and insert their IDs into this /// table. Actually, we insert the "borrow-id", which is normally - /// the id of the expression being borrowed: but in the case of + /// the ID of the expression being borrowed: but in the case of /// `ref mut` borrows, the `id` of the pattern is - /// inserted. Therefore later we remove that entry from the table + /// inserted. Therefore, later we remove that entry from the table /// and transfer it over to the value being matched. This will /// then prevent said value from being promoted. fn remove_mut_rvalue_borrow(&mut self, pat: &hir::Pat) -> bool { @@ -588,7 +588,7 @@ fn check_expr_kind<'a, 'tcx>( ty_result & node_result } -/// Check the adjustments of an expression +/// Checks the adjustments of an expression. fn check_adjustments<'a, 'tcx>( v: &mut CheckCrateVisitor<'a, 'tcx>, e: &hir::Expr) -> Promotability { diff --git a/src/librustc_plugin/build.rs b/src/librustc_plugin/build.rs index 46c452668c3..c1ba4d7b3d8 100644 --- a/src/librustc_plugin/build.rs +++ b/src/librustc_plugin/build.rs @@ -30,7 +30,7 @@ impl<'v> ItemLikeVisitor<'v> for RegistrarFinder { } } -/// Find the function marked with `#[plugin_registrar]`, if any. +/// Finds the function marked with `#[plugin_registrar]`, if any. pub fn find_plugin_registrar<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>) -> Option<DefId> { tcx.plugin_registrar_fn(LOCAL_CRATE) } diff --git a/src/librustc_plugin/lib.rs b/src/librustc_plugin/lib.rs index 32e003ff107..667b43c2ad0 100644 --- a/src/librustc_plugin/lib.rs +++ b/src/librustc_plugin/lib.rs @@ -4,7 +4,7 @@ //! in various ways. //! //! Plugin authors will use the `Registry` type re-exported by -//! this module, along with its methods. The rest of the module +//! this module, along with its methods. The rest of the module //! is for use by `rustc` itself. //! //! To define a plugin, build a dylib crate with a diff --git a/src/librustc_plugin/registry.rs b/src/librustc_plugin/registry.rs index b53d956a9c0..c6f3dd36ad9 100644 --- a/src/librustc_plugin/registry.rs +++ b/src/librustc_plugin/registry.rs @@ -68,7 +68,7 @@ impl<'a> Registry<'a> { } } - /// Get the plugin's arguments, if any. + /// Gets the plugin's arguments, if any. /// /// These are specified inside the `plugin` crate attribute as /// diff --git a/src/librustc_privacy/lib.rs b/src/librustc_privacy/lib.rs index 14a0922c477..e037ff961e2 100644 --- a/src/librustc_privacy/lib.rs +++ b/src/librustc_privacy/lib.rs @@ -40,9 +40,9 @@ mod diagnostics; /// Implemented to visit all `DefId`s in a type. /// Visiting `DefId`s is useful because visibilities and reachabilities are attached to them. /// The idea is to visit "all components of a type", as documented in -/// https://github.com/rust-lang/rfcs/blob/master/text/2145-type-privacy.md#how-to-determine-visibility-of-a-type -/// Default type visitor (`TypeVisitor`) does most of the job, but it has some shortcomings. -/// First, it doesn't have overridable `fn visit_trait_ref`, so we have to catch trait def-ids +/// https://github.com/rust-lang/rfcs/blob/master/text/2145-type-privacy.md#how-to-determine-visibility-of-a-type. +/// The default type visitor (`TypeVisitor`) does most of the job, but it has some shortcomings. +/// First, it doesn't have overridable `fn visit_trait_ref`, so we have to catch trait `DefId`s /// manually. Second, it doesn't visit some type components like signatures of fn types, or traits /// in `impl Trait`, see individual comments in `DefIdVisitorSkeleton::visit_ty`. trait DefIdVisitor<'a, 'tcx: 'a> { @@ -386,7 +386,7 @@ impl VisibilityLike for Option<AccessLevel> { } //////////////////////////////////////////////////////////////////////////////// -/// The embargo visitor, used to determine the exports of the ast +/// The embargo visitor, used to determine the exports of the AST. //////////////////////////////////////////////////////////////////////////////// struct EmbargoVisitor<'a, 'tcx: 'a> { diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs index 750eb35a988..45cb2b6d5a8 100644 --- a/src/librustc_resolve/build_reduced_graph.rs +++ b/src/librustc_resolve/build_reduced_graph.rs @@ -1,4 +1,4 @@ -//! Reduced graph building +//! Reduced graph building. //! //! Here we build the "reduced graph": the graph of the module tree without //! any imports resolved. @@ -790,7 +790,7 @@ impl<'a> Resolver<'a> { } } - // This returns true if we should consider the underlying `extern crate` to be used. + /// Returns `true` if we should consider the underlying `extern crate` to be used. fn process_legacy_macro_imports(&mut self, item: &Item, module: Module<'a>, parent_scope: &ParentScope<'a>) -> bool { let mut import_all = None; @@ -876,7 +876,7 @@ impl<'a> Resolver<'a> { import_all.is_some() || !single_imports.is_empty() } - // does this attribute list contain "macro_use"? + /// Returns `true` if this attribute list contains `macro_use`. fn contains_macro_use(&mut self, attrs: &[ast::Attribute]) -> bool { for attr in attrs { if attr.check_name("macro_escape") { diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index ecbfcec3c5e..74ca99a0f90 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -139,46 +139,46 @@ impl Ord for BindingError { } enum ResolutionError<'a> { - /// error E0401: can't use type or const parameters from outer function + /// Error E0401: can't use type or const parameters from outer function. GenericParamsFromOuterFunction(Def), - /// error E0403: the name is already used for a type/const parameter in this list of - /// generic parameters + /// Error E0403: the name is already used for a type or const parameter in this generic + /// parameter list. NameAlreadyUsedInParameterList(Name, &'a Span), - /// error E0407: method is not a member of trait + /// Error E0407: method is not a member of trait. MethodNotMemberOfTrait(Name, &'a str), - /// error E0437: type is not a member of trait + /// Error E0437: type is not a member of trait. TypeNotMemberOfTrait(Name, &'a str), - /// error E0438: const is not a member of trait + /// Error E0438: const is not a member of trait. ConstNotMemberOfTrait(Name, &'a str), - /// error E0408: variable `{}` is not bound in all patterns + /// Error E0408: variable `{}` is not bound in all patterns. VariableNotBoundInPattern(&'a BindingError), - /// error E0409: variable `{}` is bound in inconsistent ways within the same match arm + /// Error E0409: variable `{}` is bound in inconsistent ways within the same match arm. VariableBoundWithDifferentMode(Name, Span), - /// error E0415: identifier is bound more than once in this parameter list + /// Error E0415: identifier is bound more than once in this parameter list. IdentifierBoundMoreThanOnceInParameterList(&'a str), - /// error E0416: identifier is bound more than once in the same pattern + /// Error E0416: identifier is bound more than once in the same pattern. IdentifierBoundMoreThanOnceInSamePattern(&'a str), - /// error E0426: use of undeclared label + /// Error E0426: use of undeclared label. UndeclaredLabel(&'a str, Option<Name>), - /// error E0429: `self` imports are only allowed within a { } list + /// Error E0429: `self` imports are only allowed within a `{ }` list. SelfImportsOnlyAllowedWithin, - /// error E0430: `self` import can only appear once in the list + /// Error E0430: `self` import can only appear once in the list. SelfImportCanOnlyAppearOnceInTheList, - /// error E0431: `self` import can only appear in an import list with a non-empty prefix + /// Error E0431: `self` import can only appear in an import list with a non-empty prefix. SelfImportOnlyInImportListWithNonEmptyPrefix, - /// error E0433: failed to resolve + /// Error E0433: failed to resolve. FailedToResolve(&'a str), - /// error E0434: can't capture dynamic environment in a fn item + /// Error E0434: can't capture dynamic environment in a fn item. CannotCaptureDynamicEnvironmentInFnItem, - /// error E0435: attempt to use a non-constant value in a constant + /// Error E0435: attempt to use a non-constant value in a constant. AttemptToUseNonConstantValueInConstant, - /// error E0530: X bindings cannot shadow Ys + /// Error E0530: `X` bindings cannot shadow `Y`s. BindingShadowsSomethingUnacceptable(&'a str, Name, &'a NameBinding<'a>), - /// error E0128: type parameters with a default cannot use forward declared identifiers + /// Error E0128: type parameters with a default cannot use forward-declared identifiers. ForwardDeclaredTyParam, // FIXME(const_generics:defaults) } -/// Combines an error with provided span and emits it +/// Combines an error with provided span and emits it. /// /// This takes the error provided, combines it with the span and any additional spans inside the /// error and emits it. @@ -426,11 +426,11 @@ fn resolve_struct_error<'sess, 'a>(resolver: &'sess Resolver<'_>, /// Adjust the impl span so that just the `impl` keyword is taken by removing /// everything after `<` (`"impl<T> Iterator for A<T> {}" -> "impl"`) and -/// everything after the first whitespace (`"impl Iterator for A" -> "impl"`) +/// everything after the first whitespace (`"impl Iterator for A" -> "impl"`). /// -/// Attention: The method used is very fragile since it essentially duplicates the work of the +/// *Attention*: the method used is very fragile since it essentially duplicates the work of the /// parser. If you need to use this function or something similar, please consider updating the -/// source_map functions and this function to something more robust. +/// `source_map` functions and this function to something more robust. fn reduce_impl_span_to_impl_keyword(cm: &SourceMap, impl_span: Span) -> Span { let impl_span = cm.span_until_char(impl_span, '<'); let impl_span = cm.span_until_whitespace(impl_span); @@ -740,7 +740,7 @@ impl<'tcx> Visitor<'tcx> for UsePlacementFinder { } } -/// This thing walks the whole crate in DFS manner, visiting each item, resolving names as it goes. +/// Walks the whole crate in DFS order, visiting each item, resolving names as it goes. impl<'a, 'tcx> Visitor<'tcx> for Resolver<'a> { fn visit_item(&mut self, item: &'tcx Item) { self.resolve_item(item); @@ -933,7 +933,7 @@ enum RibKind<'a> { /// No translation needs to be applied. NormalRibKind, - /// We passed through a closure scope at the given node ID. + /// We passed through a closure scope at the given `NodeId`. /// Translate upvars as appropriate. ClosureRibKind(NodeId /* func id */), @@ -961,7 +961,7 @@ enum RibKind<'a> { ForwardTyParamBanRibKind, } -/// One local scope. +/// A single local scope. /// /// A rib represents a scope names can live in. Note that these appear in many places, not just /// around braces. At any place where the list of accessible names (of the given namespace) @@ -1055,7 +1055,7 @@ enum PathResult<'a> { } enum ModuleKind { - /// An anonymous module, eg. just a block. + /// An anonymous module; e.g., just a block. /// /// ``` /// fn main() { @@ -1250,11 +1250,11 @@ struct PrivacyError<'a>(Span, Ident, &'a NameBinding<'a>); struct UseError<'a> { err: DiagnosticBuilder<'a>, - /// Attach `use` statements for these candidates + /// Attach `use` statements for these candidates. candidates: Vec<ImportSuggestion>, - /// The node id of the module to place the use statements in + /// The `NodeId` of the module to place the use-statements in. node_id: NodeId, - /// Whether the diagnostic should state that it's "better" + /// Whether the diagnostic should state that it's "better". better: bool, } @@ -1496,7 +1496,7 @@ pub struct Resolver<'a> { prelude: Option<Module<'a>>, pub extern_prelude: FxHashMap<Ident, ExternPreludeEntry<'a>>, - /// n.b. This is used only for better diagnostics, not name resolution itself. + /// N.B., this is used only for better diagnostics, not name resolution itself. has_self: FxHashSet<DefId>, /// Names of fields of an item `DefId` accessible with dot syntax. @@ -1575,13 +1575,13 @@ pub struct Resolver<'a> { /// they are used (in a `break` or `continue` statement) pub unused_labels: FxHashMap<NodeId, Span>, - /// privacy errors are delayed until the end in order to deduplicate them + /// Privacy errors are delayed until the end in order to deduplicate them. privacy_errors: Vec<PrivacyError<'a>>, - /// ambiguity errors are delayed for deduplication + /// Ambiguity errors are delayed for deduplication. ambiguity_errors: Vec<AmbiguityError<'a>>, - /// `use` injections are delayed for better placement and deduplication + /// `use` injections are delayed for better placement and deduplication. use_injections: Vec<UseError<'a>>, - /// crate-local macro expanded `macro_export` referred to by a module-relative path + /// Crate-local macro expanded `macro_export` referred to by a module-relative path. macro_expanded_macro_export_errors: BTreeSet<(Span, Span)>, arenas: &'a ResolverArenas<'a>, @@ -1608,17 +1608,17 @@ pub struct Resolver<'a> { potentially_unused_imports: Vec<&'a ImportDirective<'a>>, - /// This table maps struct IDs into struct constructor IDs, + /// Table for mapping struct IDs into struct constructor IDs, /// it's not used during normal resolution, only for better error reporting. struct_constructors: DefIdMap<(Def, ty::Visibility)>, - /// Only used for better errors on `fn(): fn()` + /// Only used for better errors on `fn(): fn()`. current_type_ascription: Vec<Span>, injected_crate: Option<Module<'a>>, } -/// Nothing really interesting here, it just provides memory for the rest of the crate. +/// Nothing really interesting here; it just provides memory for the rest of the crate. #[derive(Default)] pub struct ResolverArenas<'a> { modules: arena::TypedArena<ModuleData<'a>>, @@ -1718,7 +1718,7 @@ impl<'a> hir::lowering::Resolver for Resolver<'a> { } impl<'a> Resolver<'a> { - /// Rustdoc uses this to resolve things in a recoverable way. ResolutionError<'a> + /// Rustdoc uses this to resolve things in a recoverable way. `ResolutionError<'a>` /// isn't something that can be returned because it can't be made to live that long, /// and also it's a private type. Fortunately rustdoc doesn't need to know the error, /// just that an error occurred. @@ -2346,7 +2346,7 @@ impl<'a> Resolver<'a> { } } - /// Searches the current set of local scopes for labels. Returns the first non-None label that + /// Searches the current set of local scopes for labels. Returns the first non-`None` label that /// is returned by the given predicate function /// /// Stops after meeting a closure. @@ -2653,7 +2653,7 @@ impl<'a> Resolver<'a> { result } - /// This is called to resolve a trait reference from an `impl` (i.e., `impl Trait for Foo`) + /// This is called to resolve a trait reference from an `impl` (i.e., `impl Trait for Foo`). fn with_optional_trait_ref<T, F>(&mut self, opt_trait_ref: Option<&TraitRef>, f: F) -> T where F: FnOnce(&mut Resolver<'_>, Option<DefId>) -> T { @@ -3154,7 +3154,7 @@ impl<'a> Resolver<'a> { /// sometimes needed for the lint that recommends rewriting /// absolute paths to `crate`, so that it knows how to frame the /// suggestion. If you are just resolving a path like `foo::bar` - /// that appears...somewhere, though, then you just want + /// that appears in an arbitrary location, then you just want /// `CrateLint::SimplePath`, which is what `smart_resolve_path` /// already provides. fn smart_resolve_path_with_crate_lint( @@ -4451,9 +4451,9 @@ impl<'a> Resolver<'a> { /// When name resolution fails, this method can be used to look up candidate /// entities with the expected name. It allows filtering them using the /// supplied predicate (which should be used to only accept the types of - /// definitions expected e.g., traits). The lookup spans across all crates. + /// definitions expected, e.g., traits). The lookup spans across all crates. /// - /// NOTE: The method does not look into imports, but this is not a problem, + /// N.B., the method does not look into imports, but this is not a problem, /// since we report the definitions (thus, the de-aliased imports). fn lookup_import_candidates<FilterFn>(&mut self, lookup_ident: Ident, @@ -5133,7 +5133,7 @@ fn path_names_to_string(path: &Path) -> String { .collect::<Vec<_>>()) } -/// Get the stringified path for an enum from an `ImportSuggestion` for an enum variant. +/// Gets the stringified path for an enum from an `ImportSuggestion` for an enum variant. fn import_candidate_to_enum_paths(suggestion: &ImportSuggestion) -> (String, String) { let variant_path = &suggestion.path; let variant_path_string = path_names_to_string(variant_path); @@ -5231,11 +5231,11 @@ fn err_path_resolution() -> PathResolution { #[derive(Copy, Clone, Debug)] enum CrateLint { - /// Do not issue the lint + /// Do not issue the lint. No, - /// This lint applies to some random path like `impl ::foo::Bar` - /// or whatever. In this case, we can take the span of that path. + /// This lint applies to some arbitrary path; e.g., `impl ::foo::Bar`. + /// In this case, we can take the span of that path. SimplePath(NodeId), /// This lint comes from a `use` statement. In this case, what we diff --git a/src/librustc_resolve/macros.rs b/src/librustc_resolve/macros.rs index c8c0847a28e..55b5cf90eb0 100644 --- a/src/librustc_resolve/macros.rs +++ b/src/librustc_resolve/macros.rs @@ -36,12 +36,12 @@ use rustc_data_structures::sync::Lrc; #[derive(Clone, Debug)] pub struct InvocationData<'a> { def_index: DefIndex, - /// Module in which the macro was invoked. + /// The module in which the macro was invoked. crate module: Cell<Module<'a>>, - /// Legacy scope in which the macro was invoked. + /// The legacy scope in which the macro was invoked. /// The invocation path is resolved in this scope. crate parent_legacy_scope: Cell<LegacyScope<'a>>, - /// Legacy scope *produced* by expanding this macro invocation, + /// The legacy scope *produced* by expanding this macro invocation, /// includes all the macro_rules items, other invocations, etc generated by it. /// `None` if the macro is not expanded yet. crate output_legacy_scope: Cell<Option<LegacyScope<'a>>>, @@ -68,21 +68,21 @@ pub struct LegacyBinding<'a> { ident: Ident, } -/// Scope introduced by a `macro_rules!` macro. -/// Starts at the macro's definition and ends at the end of the macro's parent module -/// (named or unnamed), or even further if it escapes with `#[macro_use]`. +/// The scope introduced by a `macro_rules!` macro. +/// This starts at the macro's definition and ends at the end of the macro's parent +/// module (named or unnamed), or even further if it escapes with `#[macro_use]`. /// Some macro invocations need to introduce legacy scopes too because they -/// potentially can expand into macro definitions. +/// can potentially expand into macro definitions. #[derive(Copy, Clone, Debug)] pub enum LegacyScope<'a> { - /// Created when invocation data is allocated in the arena, + /// Created when invocation data is allocated in the arena; /// must be replaced with a proper scope later. Uninitialized, /// Empty "root" scope at the crate start containing no names. Empty, - /// Scope introduced by a `macro_rules!` macro definition. + /// The scope introduced by a `macro_rules!` macro definition. Binding(&'a LegacyBinding<'a>), - /// Scope introduced by a macro invocation that can potentially + /// The scope introduced by a macro invocation that can potentially /// create a `macro_rules!` macro definition. Invocation(&'a InvocationData<'a>), } diff --git a/src/librustc_resolve/resolve_imports.rs b/src/librustc_resolve/resolve_imports.rs index 712871408fa..b930c30c511 100644 --- a/src/librustc_resolve/resolve_imports.rs +++ b/src/librustc_resolve/resolve_imports.rs @@ -64,23 +64,23 @@ pub enum ImportDirectiveSubclass<'a> { /// One import directive. #[derive(Debug,Clone)] crate struct ImportDirective<'a> { - /// The id of the `extern crate`, `UseTree` etc that imported this `ImportDirective`. + /// The ID of the `extern crate`, `UseTree` etc that imported this `ImportDirective`. /// /// In the case where the `ImportDirective` was expanded from a "nested" use tree, - /// this id is the id of the leaf tree. For example: + /// this id is the ID of the leaf tree. For example: /// /// ```ignore (pacify the mercilous tidy) /// use foo::bar::{a, b} /// ``` /// - /// If this is the import directive for `foo::bar::a`, we would have the id of the `UseTree` + /// If this is the import directive for `foo::bar::a`, we would have the ID of the `UseTree` /// for `a` in this field. pub id: NodeId, /// The `id` of the "root" use-kind -- this is always the same as /// `id` except in the case of "nested" use trees, in which case /// it will be the `id` of the root use tree. e.g., in the example - /// from `id`, this would be the id of the `use foo::bar` + /// from `id`, this would be the ID of the `use foo::bar` /// `UseTree` node. pub root_id: NodeId, diff --git a/src/librustc_save_analysis/dump_visitor.rs b/src/librustc_save_analysis/dump_visitor.rs index 1a49056bc7f..ee16d78f3b0 100644 --- a/src/librustc_save_analysis/dump_visitor.rs +++ b/src/librustc_save_analysis/dump_visitor.rs @@ -1,9 +1,9 @@ //! Write the output of rustc's analysis to an implementor of Dump. //! //! Dumping the analysis is implemented by walking the AST and getting a bunch of -//! info out from all over the place. We use Def IDs to identify objects. The +//! info out from all over the place. We use `DefId`s to identify objects. The //! tricky part is getting syntactic (span, source text) and semantic (reference -//! Def IDs) information for parts of expressions which the compiler has discarded. +//! `DefId`s) information for parts of expressions which the compiler has discarded. //! E.g., in a path `foo::bar::baz`, the compiler only keeps a span for the whole //! path and a reference to `baz`, but we want spans and references for all three //! idents. @@ -1021,7 +1021,7 @@ impl<'l, 'tcx: 'l, 'll, O: DumpOutput + 'll> DumpVisitor<'l, 'tcx, 'll, O> { } } - /// Extract macro use and definition information from the AST node defined + /// Extracts macro use and definition information from the AST node defined /// by the given NodeId, using the expansion information from the node's /// span. /// @@ -1184,7 +1184,7 @@ impl<'l, 'tcx: 'l, 'll, O: DumpOutput + 'll> DumpVisitor<'l, 'tcx, 'll, O> { /// /// A use tree is an import that may contain nested braces (RFC 2128). The `use_tree` parameter /// is the current use tree under scrutiny, while `id` and `prefix` are its corresponding node - /// id and path. `root_item` is the topmost use tree in the hierarchy. + /// ID and path. `root_item` is the topmost use tree in the hierarchy. /// /// If `use_tree` is a simple or glob import, it is dumped into the analysis data. Otherwise, /// each child use tree is dumped recursively. diff --git a/src/librustc_save_analysis/lib.rs b/src/librustc_save_analysis/lib.rs index c4a2ebeba65..dada06a1934 100644 --- a/src/librustc_save_analysis/lib.rs +++ b/src/librustc_save_analysis/lib.rs @@ -825,7 +825,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { /// Attempt to return MacroRef for any AST node. /// /// For a given piece of AST defined by the supplied Span and NodeId, - /// returns None if the node is not macro-generated or the span is malformed, + /// returns `None` if the node is not macro-generated or the span is malformed, /// else uses the expansion callsite and callee to return some MacroRef. pub fn get_macro_use_data(&self, span: Span) -> Option<MacroRef> { if !generated_code(span) { diff --git a/src/librustc_target/abi/call/mod.rs b/src/librustc_target/abi/call/mod.rs index 839c9a857e6..411eb192d90 100644 --- a/src/librustc_target/abi/call/mod.rs +++ b/src/librustc_target/abi/call/mod.rs @@ -160,11 +160,11 @@ pub struct Uniform { pub unit: Reg, /// The total size of the argument, which can be: - /// * equal to `unit.size` (one scalar/vector) - /// * a multiple of `unit.size` (an array of scalar/vectors) + /// * equal to `unit.size` (one scalar/vector), + /// * a multiple of `unit.size` (an array of scalar/vectors), /// * if `unit.kind` is `Integer`, the last element /// can be shorter, i.e., `{ i64, i64, i32 }` for - /// 64-bit integers with a total size of 20 bytes + /// 64-bit integers with a total size of 20 bytes. pub total: Size, } @@ -228,7 +228,7 @@ impl CastTarget { } } -/// Return value from the `homogeneous_aggregate` test function. +/// Returns value from the `homogeneous_aggregate` test function. #[derive(Copy, Clone, Debug)] pub enum HomogeneousAggregate { /// Yes, all the "leaf fields" of this struct are passed in the @@ -266,12 +266,12 @@ impl<'a, Ty> TyLayout<'a, Ty> { } } - /// True if this layout is an aggregate containing fields of only + /// Returns `true` if this layout is an aggregate containing fields of only /// a single type (e.g., `(u32, u32)`). Such aggregates are often /// special-cased in ABIs. /// /// Note: We generally ignore fields of zero-sized type when computing - /// this value (cc #56877). + /// this value (see #56877). /// /// This is public so that it can be used in unit tests, but /// should generally only be relevant to the ABI details of diff --git a/src/librustc_target/abi/mod.rs b/src/librustc_target/abi/mod.rs index bb194d5bb12..8b96a8c1658 100644 --- a/src/librustc_target/abi/mod.rs +++ b/src/librustc_target/abi/mod.rs @@ -174,7 +174,7 @@ impl TargetDataLayout { Ok(dl) } - /// Return exclusive upper bound on object size. + /// Returns exclusive upper bound on object size. /// /// The theoretical maximum object size is defined as the maximum positive `isize` value. /// This ensures that the `offset` semantics remain well-defined by allowing it to correctly @@ -396,7 +396,7 @@ impl Align { self.bytes() * 8 } - /// Compute the best alignment possible for the given offset + /// Computes the best alignment possible for the given offset /// (the largest power of two that the offset is a multiple of). /// /// N.B., for an offset of `0`, this happens to return `2^64`. @@ -476,7 +476,7 @@ impl Integer { } } - /// Find the smallest Integer type which can represent the signed value. + /// Finds the smallest Integer type which can represent the signed value. pub fn fit_signed(x: i128) -> Integer { match x { -0x0000_0000_0000_0080..=0x0000_0000_0000_007f => I8, @@ -487,7 +487,7 @@ impl Integer { } } - /// Find the smallest Integer type which can represent the unsigned value. + /// Finds the smallest Integer type which can represent the unsigned value. pub fn fit_unsigned(x: u128) -> Integer { match x { 0..=0x0000_0000_0000_00ff => I8, @@ -498,7 +498,7 @@ impl Integer { } } - /// Find the smallest integer with the given alignment. + /// Finds the smallest integer with the given alignment. pub fn for_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Option<Integer> { let dl = cx.data_layout(); @@ -734,7 +734,7 @@ impl FieldPlacement { } } - /// Get source indices of the fields by increasing offsets. + /// Gets source indices of the fields by increasing offsets. #[inline] pub fn index_by_increasing_offset<'a>(&'a self) -> impl Iterator<Item=usize>+'a { let mut inverse_small = [0u8; 64]; @@ -786,7 +786,7 @@ pub enum Abi { } impl Abi { - /// Returns true if the layout corresponds to an unsized type. + /// Returns `true` if the layout corresponds to an unsized type. pub fn is_unsized(&self) -> bool { match *self { Abi::Uninhabited | @@ -797,7 +797,7 @@ impl Abi { } } - /// Returns true if this is a single signed integer scalar + /// Returns `true` if this is a single signed integer scalar pub fn is_signed(&self) -> bool { match *self { Abi::Scalar(ref scal) => match scal.value { @@ -808,7 +808,7 @@ impl Abi { } } - /// Returns true if this is an uninhabited type + /// Returns `true` if this is an uninhabited type pub fn is_uninhabited(&self) -> bool { match *self { Abi::Uninhabited => true, @@ -924,12 +924,12 @@ impl<'a, Ty> TyLayout<'a, Ty> { } impl<'a, Ty> TyLayout<'a, Ty> { - /// Returns true if the layout corresponds to an unsized type. + /// Returns `true` if the layout corresponds to an unsized type. pub fn is_unsized(&self) -> bool { self.abi.is_unsized() } - /// Returns true if the type is a ZST and not unsized. + /// Returns `true` if the type is a ZST and not unsized. pub fn is_zst(&self) -> bool { match self.abi { Abi::Scalar(_) | diff --git a/src/librustc_target/lib.rs b/src/librustc_target/lib.rs index e831eb41410..efffb198572 100644 --- a/src/librustc_target/lib.rs +++ b/src/librustc_target/lib.rs @@ -4,7 +4,7 @@ //! compiler 'backend', though LLVM is rustc's backend, so rustc_target //! is really just odds-and-ends relating to code gen and linking. //! This crate mostly exists to make rustc smaller, so we might put -//! more 'stuff' here in the future. It does not have a dependency on +//! more 'stuff' here in the future. It does not have a dependency on //! LLVM. #![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] diff --git a/src/librustc_target/spec/mod.rs b/src/librustc_target/spec/mod.rs index 107583e4fc0..1df09c8f8fa 100644 --- a/src/librustc_target/spec/mod.rs +++ b/src/librustc_target/spec/mod.rs @@ -525,7 +525,7 @@ pub struct TargetOptions { pub pre_link_objects_exe_crt: Vec<String>, // ... when linking an executable with a bundled crt pub pre_link_objects_dll: Vec<String>, // ... when linking a dylib /// Linker arguments that are unconditionally passed after any - /// user-defined but before post_link_objects. Standard platform + /// user-defined but before post_link_objects. Standard platform /// libraries that should be always be linked to, usually go here. pub late_link_args: LinkArgs, /// Objects to link after all others, always found within the @@ -641,7 +641,7 @@ pub struct TargetOptions { pub allow_asm: bool, /// Whether the target uses a custom unwind resumption routine. /// By default LLVM lowers `resume` instructions into calls to `_Unwind_Resume` - /// defined in libgcc. If this option is enabled, the target must provide + /// defined in libgcc. If this option is enabled, the target must provide /// `eh_unwind_resume` lang item. pub custom_unwind_resume: bool, @@ -705,7 +705,7 @@ pub struct TargetOptions { /// for this target unconditionally. pub no_builtins: bool, - /// Whether to lower 128-bit operations to compiler_builtins calls. Use if + /// Whether to lower 128-bit operations to compiler_builtins calls. Use if /// your backend only supports 64-bit and smaller math. pub i128_lowering: bool, @@ -747,7 +747,7 @@ pub struct TargetOptions { } impl Default for TargetOptions { - /// Create a set of "sane defaults" for any target. This is still + /// Creates a set of "sane defaults" for any target. This is still /// incomplete, and if used for compilation, will certainly not work. fn default() -> TargetOptions { TargetOptions { @@ -872,7 +872,7 @@ impl Target { abi.generic() || !self.options.abi_blacklist.contains(&abi) } - /// Load a target descriptor from a JSON object. + /// Loads a target descriptor from a JSON object. pub fn from_json(obj: Json) -> TargetResult { // While ugly, this code must remain this way to retain // compatibility with existing JSON fields and the internal diff --git a/src/librustc_traits/chalk_context/mod.rs b/src/librustc_traits/chalk_context/mod.rs index ffa696c9080..a326d84725a 100644 --- a/src/librustc_traits/chalk_context/mod.rs +++ b/src/librustc_traits/chalk_context/mod.rs @@ -177,7 +177,7 @@ impl context::AggregateOps<ChalkArenas<'gcx>> for ChalkContext<'cx, 'gcx> { } impl context::ContextOps<ChalkArenas<'gcx>> for ChalkContext<'cx, 'gcx> { - /// True if this is a coinductive goal: basically proving that an auto trait + /// Returns `true` if this is a coinductive goal: basically proving that an auto trait /// is implemented or proving that a trait reference is well-formed. fn is_coinductive( &self, @@ -202,7 +202,7 @@ impl context::ContextOps<ChalkArenas<'gcx>> for ChalkContext<'cx, 'gcx> { } } - /// Create an inference table for processing a new goal and instantiate that goal + /// Creates an inference table for processing a new goal and instantiate that goal /// in that context, returning "all the pieces". /// /// More specifically: given a u-canonical goal `arg`, creates a @@ -211,9 +211,9 @@ impl context::ContextOps<ChalkArenas<'gcx>> for ChalkContext<'cx, 'gcx> { /// each bound variable in `arg` to a fresh inference variable /// from T. Returns: /// - /// - the table `T` - /// - the substitution `S` - /// - the environment and goal found by substitution `S` into `arg` + /// - the table `T`, + /// - the substitution `S`, + /// - the environment and goal found by substitution `S` into `arg`. fn instantiate_ucanonical_goal<R>( &self, arg: &Canonical<'gcx, InEnvironment<'gcx, Goal<'gcx>>>, @@ -241,7 +241,7 @@ impl context::ContextOps<ChalkArenas<'gcx>> for ChalkContext<'cx, 'gcx> { }) } - /// True if this solution has no region constraints. + /// Returns `true` if this solution has no region constraints. fn empty_constraints(ccs: &Canonical<'gcx, ConstrainedSubst<'gcx>>) -> bool { ccs.value.constraints.is_empty() } diff --git a/src/librustc_traits/dropck_outlives.rs b/src/librustc_traits/dropck_outlives.rs index 45b19e1dc06..7185c4ce446 100644 --- a/src/librustc_traits/dropck_outlives.rs +++ b/src/librustc_traits/dropck_outlives.rs @@ -145,7 +145,7 @@ fn dropck_outlives<'tcx>( ) } -/// Return a set of constraints that needs to be satisfied in +/// Returns a set of constraints that needs to be satisfied in /// order for `ty` to be valid for destruction. fn dtorck_constraint_for_ty<'a, 'gcx, 'tcx>( tcx: TyCtxt<'a, 'gcx, 'tcx>, diff --git a/src/librustc_typeck/astconv.rs b/src/librustc_typeck/astconv.rs index 757385aeb3e..31a76cf4107 100644 --- a/src/librustc_typeck/astconv.rs +++ b/src/librustc_typeck/astconv.rs @@ -224,7 +224,7 @@ impl<'o, 'gcx: 'tcx, 'tcx> dyn AstConv<'gcx, 'tcx> + 'o { impl_trait } - /// Check that the correct number of generic arguments have been provided. + /// Checks that the correct number of generic arguments have been provided. /// Used specifically for function calls. pub fn check_generic_arg_count_for_call( tcx: TyCtxt, @@ -256,7 +256,7 @@ impl<'o, 'gcx: 'tcx, 'tcx> dyn AstConv<'gcx, 'tcx> + 'o { ).0 } - /// Check that the correct number of generic arguments have been provided. + /// Checks that the correct number of generic arguments have been provided. /// This is used both for datatypes and function calls. fn check_generic_arg_count( tcx: TyCtxt, @@ -400,8 +400,8 @@ impl<'o, 'gcx: 'tcx, 'tcx> dyn AstConv<'gcx, 'tcx> + 'o { /// Creates the relevant generic argument substitutions /// corresponding to a set of generic parameters. This is a - /// rather complex little function. Let me try to explain the - /// role of each of its parameters: + /// rather complex function. Let us try to explain the role + /// of each of its parameters: /// /// To start, we are given the `def_id` of the thing we are /// creating the substitutions for, and a partial set of @@ -417,9 +417,9 @@ impl<'o, 'gcx: 'tcx, 'tcx> dyn AstConv<'gcx, 'tcx> + 'o { /// we can append those and move on. Otherwise, it invokes the /// three callback functions: /// - /// - `args_for_def_id`: given the def-id `P`, supplies back the + /// - `args_for_def_id`: given the `DefId` `P`, supplies back the /// generic arguments that were given to that parent from within - /// the path; so e.g., if you have `<T as Foo>::Bar`, the def-id + /// the path; so e.g., if you have `<T as Foo>::Bar`, the `DefId` /// might refer to the trait `Foo`, and the arguments might be /// `[T]`. The boolean value indicates whether to infer values /// for arguments whose values were not explicitly provided. @@ -680,7 +680,7 @@ impl<'o, 'gcx: 'tcx, 'tcx> dyn AstConv<'gcx, 'tcx> + 'o { /// bound to a valid trait type. Returns the def_id for the defining trait. /// The type _cannot_ be a type other than a trait type. /// - /// If the `projections` argument is `None`, then assoc type bindings like `Foo<T=X>` + /// If the `projections` argument is `None`, then assoc type bindings like `Foo<T = X>` /// are disallowed. Otherwise, they are pushed onto the vector given. pub fn instantiate_mono_trait_ref(&self, trait_ref: &hir::TraitRef, diff --git a/src/librustc_typeck/check/callee.rs b/src/librustc_typeck/check/callee.rs index 0afc1697d31..1bbb93b4e46 100644 --- a/src/librustc_typeck/check/callee.rs +++ b/src/librustc_typeck/check/callee.rs @@ -15,9 +15,9 @@ use syntax_pos::Span; use rustc::hir; -/// Check that it is legal to call methods of the trait corresponding +/// Checks that it is legal to call methods of the trait corresponding /// to `trait_id` (this only cares about the trait, not the specific -/// method that is called) +/// method that is called). pub fn check_legal_trait_for_method_call(tcx: TyCtxt, span: Span, trait_id: DefId) { if tcx.lang_items().drop_trait() == Some(trait_id) { struct_span_err!(tcx.sess, span, E0040, "explicit use of destructor method") @@ -29,7 +29,7 @@ pub fn check_legal_trait_for_method_call(tcx: TyCtxt, span: Span, trait_id: DefI enum CallStep<'tcx> { Builtin(Ty<'tcx>), DeferredClosure(ty::FnSig<'tcx>), - /// e.g., enum variant constructors + /// E.g., enum variant constructors. Overloaded(MethodCallee<'tcx>), } diff --git a/src/librustc_typeck/check/cast.rs b/src/librustc_typeck/check/cast.rs index 85cae17fd85..21c21b3fcc0 100644 --- a/src/librustc_typeck/check/cast.rs +++ b/src/librustc_typeck/check/cast.rs @@ -140,7 +140,7 @@ enum CastError { CastToBool, CastToChar, DifferingKinds, - /// Cast of thin to fat raw ptr (eg. `*const () as *const [u8]`) + /// Cast of thin to fat raw ptr (e.g., `*const () as *const [u8]`). SizedUnsizedCast, IllegalCast, NeedDeref, @@ -441,7 +441,7 @@ impl<'a, 'gcx, 'tcx> CastCheck<'tcx> { } } - /// Check a cast, and report an error if one exists. In some cases, this + /// Checks a cast, and report an error if one exists. In some cases, this /// can return Ok and create type errors in the fcx rather than returning /// directly. coercion-cast is handled in check instead of here. fn do_check(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Result<CastKind, CastError> { diff --git a/src/librustc_typeck/check/closure.rs b/src/librustc_typeck/check/closure.rs index df83c92fde5..df043fd6c46 100644 --- a/src/librustc_typeck/check/closure.rs +++ b/src/librustc_typeck/check/closure.rs @@ -365,7 +365,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { /// /// # Arguments /// - /// - `expr_def_id`: the def-id of the closure expression + /// - `expr_def_id`: the `DefId` of the closure expression /// - `decl`: the HIR declaration of the closure /// - `body`: the body of the closure /// - `expected_sig`: the expected signature (if any). Note that @@ -458,7 +458,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { self.closure_sigs(expr_def_id, body, error_sig) } - /// Enforce the user's types against the expectation. See + /// Enforce the user's types against the expectation. See /// `sig_of_closure_with_expectation` for details on the overall /// strategy. fn check_supplied_sig_against_expectation( diff --git a/src/librustc_typeck/check/coercion.rs b/src/librustc_typeck/check/coercion.rs index d1dfe9469fb..332f97a9ae7 100644 --- a/src/librustc_typeck/check/coercion.rs +++ b/src/librustc_typeck/check/coercion.rs @@ -1,7 +1,7 @@ //! # Type Coercion //! //! Under certain circumstances we will coerce from one type to another, -//! for example by auto-borrowing. This occurs in situations where the +//! for example by auto-borrowing. This occurs in situations where the //! compiler has a firm 'expected type' that was supplied from the user, //! and where the actual type is similar to that expected type in purpose //! but not in representation (so actual subtyping is inappropriate). @@ -9,24 +9,24 @@ //! ## Reborrowing //! //! Note that if we are expecting a reference, we will *reborrow* -//! even if the argument provided was already a reference. This is +//! even if the argument provided was already a reference. This is //! useful for freezing mut/const things (that is, when the expected is &T //! but you have &const T or &mut T) and also for avoiding the linearity -//! of mut things (when the expected is &mut T and you have &mut T). See +//! of mut things (when the expected is &mut T and you have &mut T). See //! the various `src/test/run-pass/coerce-reborrow-*.rs` tests for //! examples of where this is useful. //! //! ## Subtle note //! //! When deciding what type coercions to consider, we do not attempt to -//! resolve any type variables we may encounter. This is because `b` +//! resolve any type variables we may encounter. This is because `b` //! represents the expected type "as the user wrote it", meaning that if //! the user defined a generic function like //! //! fn foo<A>(a: A, b: A) { ... } //! //! and then we wrote `foo(&1, @2)`, we will not auto-borrow -//! either argument. In older code we went to some lengths to +//! either argument. In older code we went to some lengths to //! resolve the `b` variable, which could mean that we'd //! auto-borrow later arguments but not earlier ones, which //! seems very confusing. @@ -39,15 +39,15 @@ //! foo::<&int>(@1, @2) //! //! then we *will* auto-borrow, because we can't distinguish this from a -//! function that declared `&int`. This is inconsistent but it's easiest +//! function that declared `&int`. This is inconsistent but it's easiest //! at the moment. The right thing to do, I think, is to consider the //! *unsubstituted* type when deciding whether to auto-borrow, but the //! *substituted* type when considering the bounds and so forth. But most //! of our methods don't give access to the unsubstituted type, and -//! rightly so because they'd be error-prone. So maybe the thing to do is +//! rightly so because they'd be error-prone. So maybe the thing to do is //! to actually determine the kind of coercions that should occur -//! separately and pass them in. Or maybe it's ok as is. Anyway, it's -//! sort of a minor point so I've opted to leave it for later---after all +//! separately and pass them in. Or maybe it's ok as is. Anyway, it's +//! sort of a minor point so I've opted to leave it for later -- after all, //! we may want to adjust precisely when coercions occur. use check::{FnCtxt, Needs}; @@ -1031,8 +1031,8 @@ impl<'gcx, 'tcx, 'exprs, E> CoerceMany<'gcx, 'tcx, 'exprs, E> } } - /// Return the "expected type" with which this coercion was - /// constructed. This represents the "downward propagated" type + /// Returns the "expected type" with which this coercion was + /// constructed. This represents the "downward propagated" type /// that was given to us at the start of typing whatever construct /// we are typing (e.g., the match expression). /// diff --git a/src/librustc_typeck/check/compare_method.rs b/src/librustc_typeck/check/compare_method.rs index 0eb8d7d06b1..d9b1b50ced8 100644 --- a/src/librustc_typeck/check/compare_method.rs +++ b/src/librustc_typeck/check/compare_method.rs @@ -17,10 +17,10 @@ use super::{Inherited, FnCtxt, potentially_plural_count}; /// /// # Parameters /// -/// - impl_m: type of the method we are checking -/// - impl_m_span: span to use for reporting errors -/// - trait_m: the method in the trait -/// - impl_trait_ref: the TraitRef corresponding to the trait implementation +/// - `impl_m`: type of the method we are checking +/// - `impl_m_span`: span to use for reporting errors +/// - `trait_m`: the method in the trait +/// - `impl_trait_ref`: the TraitRef corresponding to the trait implementation pub fn compare_impl_method<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, impl_m: &ty::AssociatedItem, diff --git a/src/librustc_typeck/check/dropck.rs b/src/librustc_typeck/check/dropck.rs index 60b5db0d12c..09cd4f7cbe2 100644 --- a/src/librustc_typeck/check/dropck.rs +++ b/src/librustc_typeck/check/dropck.rs @@ -12,7 +12,7 @@ use util::common::ErrorReported; use syntax::ast; use syntax_pos::Span; -/// check_drop_impl confirms that the Drop implementation identified by +/// This function confirms that the `Drop` implementation identified by /// `drop_impl_did` is not any more specialized than the type it is /// attached to (Issue #8142). /// @@ -21,7 +21,7 @@ use syntax_pos::Span; /// 1. The self type must be nominal (this is already checked during /// coherence), /// -/// 2. The generic region/type parameters of the impl's self-type must +/// 2. The generic region/type parameters of the impl's self type must /// all be parameters of the Drop impl itself (i.e., no /// specialization like `impl Drop for Foo<i32>`), and, /// @@ -236,9 +236,9 @@ fn ensure_drop_predicates_are_implied_by_item_defn<'a, 'tcx>( result } -/// check_safety_of_destructor_if_necessary confirms that the type +/// This function confirms that the type /// expression `typ` conforms to the "Drop Check Rule" from the Sound -/// Generic Drop (RFC 769). +/// Generic Drop RFC (#769). /// /// ---- /// @@ -276,7 +276,7 @@ fn ensure_drop_predicates_are_implied_by_item_defn<'a, 'tcx>( /// expected to break the needed parametricity property beyond /// repair.) /// -/// Therefore we have scaled back Drop-Check to a more conservative +/// Therefore, we have scaled back Drop-Check to a more conservative /// rule that does not attempt to deduce whether a `Drop` /// implementation could not possible access data of a given lifetime; /// instead Drop-Check now simply assumes that if a destructor has @@ -287,7 +287,6 @@ fn ensure_drop_predicates_are_implied_by_item_defn<'a, 'tcx>( /// this conservative assumption (and thus assume the obligation of /// ensuring that they do not access data nor invoke methods of /// values that have been previously dropped). -/// pub fn check_safety_of_destructor_if_necessary<'a, 'gcx, 'tcx>( rcx: &mut RegionCtxt<'a, 'gcx, 'tcx>, ty: Ty<'tcx>, diff --git a/src/librustc_typeck/check/intrinsic.rs b/src/librustc_typeck/check/intrinsic.rs index 82d4300d996..d9c4391ffb5 100644 --- a/src/librustc_typeck/check/intrinsic.rs +++ b/src/librustc_typeck/check/intrinsic.rs @@ -62,7 +62,7 @@ fn equate_intrinsic_type<'a, 'tcx>( require_same_types(tcx, &cause, tcx.mk_fn_ptr(tcx.fn_sig(def_id)), fty); } -/// Returns whether the given intrinsic is unsafe to call or not. +/// Returns `true` if the given intrinsic is unsafe to call or not. pub fn intrisic_operation_unsafety(intrinsic: &str) -> hir::Unsafety { match intrinsic { "size_of" | "min_align_of" | "needs_drop" | diff --git a/src/librustc_typeck/check/method/mod.rs b/src/librustc_typeck/check/method/mod.rs index b7d015729b4..a7c6dafb9d9 100644 --- a/src/librustc_typeck/check/method/mod.rs +++ b/src/librustc_typeck/check/method/mod.rs @@ -124,7 +124,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { } } - /// Add a suggestion to call the given method to the provided diagnostic. + /// Adds a suggestion to call the given method to the provided diagnostic. crate fn suggest_method_call( &self, err: &mut DiagnosticBuilder<'a>, @@ -261,12 +261,12 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { /// `lookup_method_in_trait` is used for overloaded operators. /// It does a very narrow slice of what the normal probe/confirm path does. /// In particular, it doesn't really do any probing: it simply constructs - /// an obligation for a particular trait with the given self-type and checks + /// an obligation for a particular trait with the given self type and checks /// whether that trait is implemented. - /// - /// FIXME(#18741): it seems likely that we can consolidate some of this - /// code with the other method-lookup code. In particular, the second half - /// of this method is basically the same as confirmation. + // + // FIXME(#18741): it seems likely that we can consolidate some of this + // code with the other method-lookup code. In particular, the second half + // of this method is basically the same as confirmation. pub fn lookup_method_in_trait(&self, span: Span, m_name: ast::Ident, @@ -440,7 +440,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { Ok(def) } - /// Find item with name `item_name` defined in impl/trait `def_id` + /// Finds item with name `item_name` defined in impl/trait `def_id` /// and return it, or `None`, if no such item was defined there. pub fn associated_item(&self, def_id: DefId, item_name: ast::Ident, ns: Namespace) -> Option<ty::AssociatedItem> { diff --git a/src/librustc_typeck/check/method/probe.rs b/src/librustc_typeck/check/method/probe.rs index ada4a95ed7a..2d4846eebc8 100644 --- a/src/librustc_typeck/check/method/probe.rs +++ b/src/librustc_typeck/check/method/probe.rs @@ -1537,7 +1537,7 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { } } - /// Get the type of an impl and generate substitutions with placeholders. + /// Gets the type of an impl and generate substitutions with placeholders. fn impl_ty_and_substs(&self, impl_def_id: DefId) -> (Ty<'tcx>, &'tcx Substs<'tcx>) { (self.tcx.type_of(impl_def_id), self.fresh_item_substs(impl_def_id)) } @@ -1554,7 +1554,7 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { }) } - /// Replace late-bound-regions bound by `value` with `'static` using + /// Replaces late-bound-regions bound by `value` with `'static` using /// `ty::erase_late_bound_regions`. /// /// This is only a reasonable thing to do during the *probe* phase, not the *confirm* phase, of @@ -1578,7 +1578,7 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { self.tcx.erase_late_bound_regions(value) } - /// Find the method with the appropriate name (or return type, as the case may be). If + /// Finds the method with the appropriate name (or return type, as the case may be). If /// `allow_similar_names` is set, find methods with close-matching names. fn impl_or_trait_item(&self, def_id: DefId) -> Vec<ty::AssociatedItem> { if let Some(name) = self.method_name { diff --git a/src/librustc_typeck/check/method/suggest.rs b/src/librustc_typeck/check/method/suggest.rs index 55b6e8f099e..129712ce137 100644 --- a/src/librustc_typeck/check/method/suggest.rs +++ b/src/librustc_typeck/check/method/suggest.rs @@ -709,12 +709,12 @@ impl Ord for TraitInfo { } } -/// Retrieve all traits in this crate and any dependent crates. +/// Retrieves all traits in this crate and any dependent crates. pub fn all_traits<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Vec<TraitInfo> { tcx.all_traits(LOCAL_CRATE).iter().map(|&def_id| TraitInfo { def_id }).collect() } -/// Compute all traits in this crate and any dependent crates. +/// Computes all traits in this crate and any dependent crates. fn compute_all_traits<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Vec<DefId> { use hir::itemlikevisit; diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index fb8f6088121..8801f393a5a 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -1,6 +1,6 @@ /*! -# check.rs +# typeck: check phase Within the check phase of type check, we check each item one at a time (bodies of function expressions are checked as part of the containing @@ -154,7 +154,7 @@ pub struct LocalTy<'tcx> { revealed_ty: Ty<'tcx> } -/// A wrapper for InferCtxt's `in_progress_tables` field. +/// A wrapper for `InferCtxt`'s `in_progress_tables` field. #[derive(Copy, Clone)] struct MaybeInProgressTables<'a, 'tcx: 'a> { maybe_tables: Option<&'a RefCell<ty::TypeckTables<'tcx>>>, @@ -180,7 +180,7 @@ impl<'a, 'tcx> MaybeInProgressTables<'a, 'tcx> { } } -/// closures defined within the function. For example: +/// Closures defined within the function. For example: /// /// fn foo() { /// bar(move|| { ... }) @@ -249,10 +249,10 @@ pub enum Expectation<'tcx> { /// This expression is an `if` condition, it must resolve to `bool`. ExpectIfCondition, - /// This expression should have the type given (or some subtype) + /// This expression should have the type given (or some subtype). ExpectHasType(Ty<'tcx>), - /// This expression will be cast to the `Ty` + /// This expression will be cast to the `Ty`. ExpectCastableToType(Ty<'tcx>), /// This rvalue expression will be wrapped in `&` or `Box` and coerced @@ -294,7 +294,7 @@ impl<'a, 'gcx, 'tcx> Expectation<'tcx> { } } - /// Provide an expectation for an rvalue expression given an *optional* + /// Provides an expectation for an rvalue expression given an *optional* /// hint, which is not required for type safety (the resulting type might /// be checked higher up, as is the case with `&expr` and `box expr`), but /// is useful in determining the concrete type. @@ -449,7 +449,7 @@ pub enum Diverges { Always, /// Same as `Always` but with a reachability - /// warning already emitted + /// warning already emitted. WarnedAlways } @@ -534,16 +534,16 @@ pub struct FnCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { ps: RefCell<UnsafetyState>, /// Whether the last checked node generates a divergence (e.g., - /// `return` will set this to Always). In general, when entering + /// `return` will set this to `Always`). In general, when entering /// an expression or other node in the tree, the initial value /// indicates whether prior parts of the containing expression may /// have diverged. It is then typically set to `Maybe` (and the /// old value remembered) for processing the subparts of the /// current expression. As each subpart is processed, they may set - /// the flag to `Always` etc. Finally, at the end, we take the + /// the flag to `Always`, etc. Finally, at the end, we take the /// result and "union" it with the original value, so that when we /// return the flag indicates if any subpart of the parent - /// expression (up to and including this part) has diverged. So, + /// expression (up to and including this part) has diverged. So, /// if you read it after evaluating a subexpression `X`, the value /// you get indicates whether any subexpression that was /// evaluating up to and including `X` diverged. @@ -562,7 +562,7 @@ pub struct FnCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { /// foo();}` or `{return; 22}`, where we would warn on the /// `foo()` or `22`. /// - /// An expression represents dead-code if, after checking it, + /// An expression represents dead code if, after checking it, /// the diverges flag is set to something other than `Maybe`. diverges: Cell<Diverges>, @@ -581,9 +581,9 @@ impl<'a, 'gcx, 'tcx> Deref for FnCtxt<'a, 'gcx, 'tcx> { } } -/// Helper type of a temporary returned by Inherited::build(...). +/// Helper type of a temporary returned by `Inherited::build(...)`. /// Necessary because we can't write the following bound: -/// F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(Inherited<'b, 'gcx, 'tcx>). +/// `F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(Inherited<'b, 'gcx, 'tcx>)`. pub struct InheritedBuilder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { infcx: infer::InferCtxtBuilder<'a, 'gcx, 'tcx>, def_id: DefId, @@ -760,13 +760,13 @@ fn adt_destructor<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, tcx.calculate_dtor(def_id, &mut dropck::check_drop_impl) } -/// If this def-id is a "primary tables entry", returns `Some((body_id, decl))` +/// If this `DefId` is a "primary tables entry", returns `Some((body_id, decl))` /// with information about it's body-id and fn-decl (if any). Otherwise, /// returns `None`. /// /// If this function returns "some", then `typeck_tables(def_id)` will /// succeed; if it returns `None`, then `typeck_tables(def_id)` may or -/// may not succeed. In some cases where this function returns `None` +/// may not succeed. In some cases where this function returns `None` /// (notably closures), `typeck_tables(def_id)` would wind up /// redirecting to the owning function. fn primary_body_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, @@ -1037,7 +1037,7 @@ struct GeneratorTypes<'tcx> { /// Types that are captured (see `GeneratorInterior` for more). interior: ty::Ty<'tcx>, - /// Indicates if the generator is movable or static (immovable) + /// Indicates if the generator is movable or static (immovable). movability: hir::GeneratorMovability, } @@ -2051,7 +2051,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { self.tcx.sess.err_count() - self.err_count_on_creation } - /// Produce warning on the given node, if the current point in the + /// Produces warning on the given node, if the current point in the /// function is unreachable, and there hasn't been another warning. fn warn_if_unreachable(&self, id: ast::NodeId, span: Span, kind: &str) { if self.diverges.get() == Diverges::Always { @@ -2336,7 +2336,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { result } - /// Replace the opaque types from the given value with type variables, + /// Replaces the opaque types from the given value with type variables, /// and records the `OpaqueTypeMap` for later use during writeback. See /// `InferCtxt::instantiate_opaque_types` for more details. fn instantiate_opaque_types_from_value<T: TypeFoldable<'tcx>>( @@ -4742,8 +4742,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { } } - // Resolve associated value path into a base type and associated constant or method definition. - // The newly resolved definition is written into `type_dependent_defs`. + /// Resolves associated value path into a base type and associated constant or method + /// definition. The newly resolved definition is written into `type_dependent_defs`. pub fn resolve_ty_and_def_ufcs<'b>(&self, qpath: &'b QPath, node_id: ast::NodeId, @@ -5032,7 +5032,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { None } - /// Given a function block's `NodeId`, return its `FnDecl` if it exists, or `None` otherwise. + /// Given a function block's `NodeId`, returns its `FnDecl` if it exists, or `None` otherwise. fn get_parent_fn_decl(&self, blk_id: ast::NodeId) -> Option<(hir::FnDecl, ast::Ident)> { let parent = self.tcx.hir().get(self.tcx.hir().get_parent(blk_id)); self.get_node_fn_decl(parent).map(|(fn_decl, ident, _)| (fn_decl, ident)) @@ -5074,11 +5074,11 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { }) } - /// On implicit return expressions with mismatched types, provide the following suggestions: + /// On implicit return expressions with mismatched types, provides the following suggestions: /// - /// - Point out the method's return type as the reason for the expected type - /// - Possible missing semicolon - /// - Possible missing return type if the return type is the default, and not `fn main()` + /// - Points out the method's return type as the reason for the expected type. + /// - Possible missing semicolon. + /// - Possible missing return type if the return type is the default, and not `fn main()`. pub fn suggest_mismatched_types_on_tail( &self, err: &mut DiagnosticBuilder<'tcx>, @@ -5144,7 +5144,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { } } - /// A common error is to forget to add a semicolon at the end of a block: + /// A common error is to forget to add a semicolon at the end of a block, e.g., /// /// ``` /// fn foo() { @@ -5650,7 +5650,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { query_result) } - /// Returns whether an expression is contained inside the LHS of an assignment expression. + /// Returns `true` if an expression is contained inside the LHS of an assignment expression. fn expr_in_place(&self, mut expr_id: ast::NodeId) -> bool { let mut contained_in_place = false; diff --git a/src/librustc_typeck/check/op.rs b/src/librustc_typeck/check/op.rs index 5efa9f08404..9b1a656b1bc 100644 --- a/src/librustc_typeck/check/op.rs +++ b/src/librustc_typeck/check/op.rs @@ -12,7 +12,7 @@ use syntax::ast::Ident; use rustc::hir; impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { - /// Check a `a <op>= b` + /// Checks a `a <op>= b` pub fn check_binop_assign(&self, expr: &'gcx hir::Expr, op: hir::BinOp, @@ -42,7 +42,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { ty } - /// Check a potentially overloaded binary operator. + /// Checks a potentially overloaded binary operator. pub fn check_binop(&self, expr: &'gcx hir::Expr, op: hir::BinOp, @@ -672,7 +672,7 @@ enum Op { Unary(hir::UnOp, Span), } -/// Returns true if this is a built-in arithmetic operation (e.g., u32 +/// Returns `true` if this is a built-in arithmetic operation (e.g., u32 /// + u32, i16x4 == i16x4) and false if these types would have to be /// overloaded to be legal. There are two reasons that we distinguish /// builtin operations from overloaded ones (vs trying to drive @@ -681,7 +681,7 @@ enum Op { /// /// 1. Builtin operations can trivially be evaluated in constants. /// 2. For comparison operators applied to SIMD types the result is -/// not of type `bool`. For example, `i16x4==i16x4` yields a +/// not of type `bool`. For example, `i16x4 == i16x4` yields a /// type like `i16x4`. This means that the overloaded trait /// `PartialEq` is not applicable. /// diff --git a/src/librustc_typeck/check/regionck.rs b/src/librustc_typeck/check/regionck.rs index b90c18eb41c..c85fde11455 100644 --- a/src/librustc_typeck/check/regionck.rs +++ b/src/librustc_typeck/check/regionck.rs @@ -1,6 +1,6 @@ //! The region check is a final pass that runs over the AST after we have //! inferred the type constraints but before we have actually finalized -//! the types. Its purpose is to embed a variety of region constraints. +//! the types. Its purpose is to embed a variety of region constraints. //! Inserting these constraints as a separate pass is good because (1) it //! localizes the code that has to do with region inference and (2) often //! we cannot know what constraints are needed until the basic types have @@ -34,17 +34,17 @@ //! #### Reborrows //! //! Generally speaking, `regionck` does NOT try to ensure that the data -//! `data` will outlive the pointer `x`. That is the job of borrowck. The +//! `data` will outlive the pointer `x`. That is the job of borrowck. The //! one exception is when "re-borrowing" the contents of another borrowed //! pointer. For example, imagine you have a borrowed pointer `b` with -//! lifetime L1 and you have an expression `&*b`. The result of this -//! expression will be another borrowed pointer with lifetime L2 (which is +//! lifetime `L1` and you have an expression `&*b`. The result of this +//! expression will be another borrowed pointer with lifetime `L2` (which is //! an inference variable). The borrow checker is going to enforce the -//! constraint that L2 < L1, because otherwise you are re-borrowing data -//! for a lifetime larger than the original loan. However, without the +//! constraint that `L2 < L1`, because otherwise you are re-borrowing data +//! for a lifetime larger than the original loan. However, without the //! routines in this module, the region inferencer would not know of this -//! dependency and thus it might infer the lifetime of L2 to be greater -//! than L1 (issue #3148). +//! dependency and thus it might infer the lifetime of `L2` to be greater +//! than `L1` (issue #3148). //! //! There are a number of troublesome scenarios in the tests //! `region-dependent-*.rs`, but here is one example: @@ -62,13 +62,13 @@ //! //! The key point here is that when you are borrowing a value that //! is "guaranteed" by a borrowed pointer, you must link the -//! lifetime of that borrowed pointer (L1, here) to the lifetime of -//! the borrow itself (L2). What do I mean by "guaranteed" by a +//! lifetime of that borrowed pointer (`L1`, here) to the lifetime of +//! the borrow itself (`L2`). What do I mean by "guaranteed" by a //! borrowed pointer? I mean any data that is reached by first //! dereferencing a borrowed pointer and then either traversing -//! interior offsets or boxes. We say that the guarantor +//! interior offsets or boxes. We say that the guarantor //! of such data is the region of the borrowed pointer that was -//! traversed. This is essentially the same as the ownership +//! traversed. This is essentially the same as the ownership //! relation, except that a borrowed pointer never owns its //! contents. @@ -248,11 +248,11 @@ impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> { mem::replace(&mut self.repeating_scope, scope) } - /// Try to resolve the type for the given node, returning t_err if an error results. Note that + /// Try to resolve the type for the given node, returning `t_err` if an error results. Note that /// we never care about the details of the error, the same error will be detected and reported /// in the writeback phase. /// - /// Note one important point: we do not attempt to resolve *region variables* here. This is + /// Note one important point: we do not attempt to resolve *region variables* here. This is /// because regionck is essentially adding constraints to those region variables and so may yet /// influence how they are resolved. /// @@ -266,9 +266,9 @@ impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> { /// } /// ``` /// - /// Here, the region of `b` will be `<R0>`. `<R0>` is constrained to be some subregion of the - /// block B and some superregion of the call. If we forced it now, we'd choose the smaller - /// region (the call). But that would make the *b illegal. Since we don't resolve, the type + /// Here, the region of `b` will be `<R0>`. `<R0>` is constrained to be some subregion of the + /// block B and some superregion of the call. If we forced it now, we'd choose the smaller + /// region (the call). But that would make the *b illegal. Since we don't resolve, the type /// of b will be `&<R0>.i32` and then `*b` will require that `<R0>` be bigger than the let and /// the `*b` expression, so we will effectively resolve `<R0>` to be the block B. pub fn resolve_type(&self, unresolved_ty: Ty<'tcx>) -> Ty<'tcx> { @@ -826,7 +826,7 @@ impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> { } } - /// Create a temporary `MemCategorizationContext` and pass it to the closure. + /// Creates a temporary `MemCategorizationContext` and pass it to the closure. fn with_mc<F, R>(&self, f: F) -> R where F: for<'b> FnOnce(mc::MemCategorizationContext<'b, 'gcx, 'tcx>) -> R, diff --git a/src/librustc_typeck/check/wfcheck.rs b/src/librustc_typeck/check/wfcheck.rs index 97881708b0a..a72836cc796 100644 --- a/src/librustc_typeck/check/wfcheck.rs +++ b/src/librustc_typeck/check/wfcheck.rs @@ -408,7 +408,7 @@ fn check_impl<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, }); } -/// Checks where clauses and inline bounds that are declared on def_id. +/// Checks where-clauses and inline bounds that are declared on `def_id`. fn check_where_clauses<'a, 'gcx, 'fcx, 'tcx>( tcx: TyCtxt<'a, 'gcx, 'gcx>, fcx: &FnCtxt<'fcx, 'gcx, 'tcx>, @@ -790,7 +790,7 @@ fn check_method_receiver<'fcx, 'gcx, 'tcx>(fcx: &FnCtxt<'fcx, 'gcx, 'tcx>, /// through a `*const/mut T` raw pointer. If the feature is not enabled, the requirements are more /// strict: `receiver_ty` must implement `Receiver` and directly implement `Deref<Target=self_ty>`. /// -/// NB: there are cases this function returns `true` but causes an error to be emitted, +/// N.B., there are cases this function returns `true` but causes an error to be emitted, /// particularly when `receiver_ty` derefs to a type that is the same as `self_ty` but has the /// wrong lifetime. Be careful of this if you are calling this function speculatively. fn receiver_is_valid<'fcx, 'tcx, 'gcx>( @@ -963,7 +963,7 @@ fn reject_shadowing_parameters(tcx: TyCtxt, def_id: DefId) { } } -/// Feature gates RFC 2056 - trivial bounds, checking for global bounds that +/// Feature gates RFC 2056 -- trivial bounds, checking for global bounds that /// aren't true. fn check_false_global_bounds<'a, 'gcx, 'tcx>( fcx: &FnCtxt<'a, 'gcx, 'tcx>, diff --git a/src/librustc_typeck/check_unused.rs b/src/librustc_typeck/check_unused.rs index a7e19fc4237..9fa146771ec 100644 --- a/src/librustc_typeck/check_unused.rs +++ b/src/librustc_typeck/check_unused.rs @@ -194,7 +194,7 @@ struct CollectExternCrateVisitor<'a, 'tcx: 'a> { } struct ExternCrateToLint { - /// def-id of the extern crate + /// `DefId` of the extern crate def_id: DefId, /// span from the item diff --git a/src/librustc_typeck/coherence/mod.rs b/src/librustc_typeck/coherence/mod.rs index 853c4c85d3f..77300690f19 100644 --- a/src/librustc_typeck/coherence/mod.rs +++ b/src/librustc_typeck/coherence/mod.rs @@ -151,7 +151,7 @@ pub fn check_coherence<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { tcx.ensure().crate_inherent_impls_overlap_check(LOCAL_CRATE); } -/// Overlap: No two impls for the same trait are implemented for the +/// Overlap: no two impls for the same trait are implemented for the /// same type. Likewise, no two inherent impls for a given type /// constructor provide a method with the same name. fn check_impl_overlap<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, node_id: ast::NodeId) { diff --git a/src/librustc_typeck/coherence/orphan.rs b/src/librustc_typeck/coherence/orphan.rs index 2df137c3f50..b776a980b7c 100644 --- a/src/librustc_typeck/coherence/orphan.rs +++ b/src/librustc_typeck/coherence/orphan.rs @@ -17,7 +17,7 @@ struct OrphanChecker<'cx, 'tcx: 'cx> { impl<'cx, 'tcx, 'v> ItemLikeVisitor<'v> for OrphanChecker<'cx, 'tcx> { /// Checks exactly one impl for orphan rules and other such - /// restrictions. In this fn, it can happen that multiple errors + /// restrictions. In this fn, it can happen that multiple errors /// apply to a specific impl, so just return after reporting one /// to prevent inundating the user with a bunch of similar error /// reports. diff --git a/src/librustc_typeck/collect.rs b/src/librustc_typeck/collect.rs index 9dc74c5d63a..f2ee842d888 100644 --- a/src/librustc_typeck/collect.rs +++ b/src/librustc_typeck/collect.rs @@ -92,7 +92,7 @@ pub fn provide(providers: &mut Providers) { /////////////////////////////////////////////////////////////////////////// /// Context specific to some particular item. This is what implements -/// AstConv. It has information about the predicates that are defined +/// `AstConv`. It has information about the predicates that are defined /// on the trait. Unfortunately, this predicate information is /// available in various different forms at various points in the /// process. So we can't just store a pointer to e.g., the AST or the @@ -325,7 +325,7 @@ fn type_param_predicates<'a, 'tcx>( } impl<'a, 'tcx> ItemCtxt<'a, 'tcx> { - /// Find bounds from `hir::Generics`. This requires scanning through the + /// Finds bounds from `hir::Generics`. This requires scanning through the /// AST. We do this to avoid having to convert *all* the bounds, which /// would create artificial cycles. Instead we can only convert the /// bounds for a type parameter `X` if `X::Foo` is used. @@ -371,7 +371,7 @@ impl<'a, 'tcx> ItemCtxt<'a, 'tcx> { } /// Tests whether this is the AST for a reference to the type -/// parameter with id `param_id`. We use this so as to avoid running +/// parameter with ID `param_id`. We use this so as to avoid running /// `ast_ty_to_ty`, because we want to avoid triggering an all-out /// conversion of the type to avoid inducing unnecessary cycles. fn is_param<'a, 'tcx>( @@ -680,7 +680,7 @@ fn adt_def<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> &'tcx ty::Ad tcx.alloc_adt_def(def_id, kind, variants, repr) } -/// Ensures that the super-predicates of the trait with def-id +/// Ensures that the super-predicates of the trait with `DefId` /// trait_def_id are converted and stored. This also ensures that /// the transitive super-predicates are converted; fn super_predicates_of<'a, 'tcx>( @@ -1581,7 +1581,7 @@ fn is_unsized<'gcx: 'tcx, 'tcx>( } /// Returns the early-bound lifetimes declared in this generics -/// listing. For anything other than fns/methods, this is just all +/// listing. For anything other than fns/methods, this is just all /// the lifetimes that are declared. For fns or methods, we have to /// screen out those that do not appear in any where-clauses etc using /// `resolve_lifetime::early_bound_lifetimes`. @@ -1601,6 +1601,9 @@ fn early_bound_lifetimes_from_generics<'a, 'tcx>( }) } +/// Returns a list of type predicates for the definition with ID `def_id`, including inferred +/// lifetime constraints. This includes all predicates returned by `explicit_predicates_of`, plus +/// inferred constraints concerning which regions outlive other regions. fn predicates_defined_on<'a, 'tcx>( tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId, @@ -1628,6 +1631,9 @@ fn predicates_defined_on<'a, 'tcx>( result } +/// Returns a list of all type predicates (explicit and implicit) for the definition with +/// ID `def_id`. This includes all predicates returned by `predicates_defined_on`, plus +/// `Self: Trait` predicates for traits. fn predicates_of<'a, 'tcx>( tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId, @@ -1656,6 +1662,8 @@ fn predicates_of<'a, 'tcx>( result } +/// Returns a list of user-specified type predicates for the definition with ID `def_id`. +/// N.B., this does not include any implied/inferred constraints. fn explicit_predicates_of<'a, 'tcx>( tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId, @@ -2051,9 +2059,9 @@ pub fn compute_bounds<'gcx: 'tcx, 'tcx>( } /// Converts a specific `GenericBound` from the AST into a set of -/// predicates that apply to the self-type. A vector is returned -/// because this can be anywhere from zero predicates (`T : ?Sized` adds no -/// predicates) to one (`T : Foo`) to many (`T : Bar<X=i32>` adds `T : Bar` +/// predicates that apply to the self type. A vector is returned +/// because this can be anywhere from zero predicates (`T: ?Sized` adds no +/// predicates) to one (`T: Foo`) to many (`T: Bar<X=i32>` adds `T: Bar` /// and `<T as Bar>::X == i32`). fn predicates_from_bound<'tcx>( astconv: &dyn AstConv<'tcx, 'tcx>, diff --git a/src/librustc_typeck/constrained_type_params.rs b/src/librustc_typeck/constrained_type_params.rs index 199ea315896..3de9380fd27 100644 --- a/src/librustc_typeck/constrained_type_params.rs +++ b/src/librustc_typeck/constrained_type_params.rs @@ -14,7 +14,7 @@ impl From<ty::EarlyBoundRegion> for Parameter { fn from(param: ty::EarlyBoundRegion) -> Self { Parameter(param.index) } } -/// Return the set of parameters constrained by the impl header. +/// Returns the set of parameters constrained by the impl header. pub fn parameters_for_impl<'tcx>(impl_self_ty: Ty<'tcx>, impl_trait_ref: Option<ty::TraitRef<'tcx>>) -> FxHashSet<Parameter> @@ -89,7 +89,7 @@ pub fn identify_constrained_type_params<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>, /// parameters so constrained to `input_parameters`. For example, /// imagine the following impl: /// -/// impl<T: Debug, U: Iterator<Item=T>> Trait for U +/// impl<T: Debug, U: Iterator<Item = T>> Trait for U /// /// The impl's predicates are collected from left to right. Ignoring /// the implicit `Sized` bounds, these are @@ -112,10 +112,10 @@ pub fn identify_constrained_type_params<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>, /// We *do* have to be somewhat careful when projection targets contain /// projections themselves, for example in /// impl<S,U,V,W> Trait for U where -/// /* 0 */ S: Iterator<Item=U>, +/// /* 0 */ S: Iterator<Item = U>, /// /* - */ U: Iterator, /// /* 1 */ <U as Iterator>::Item: ToOwned<Owned=(W,<V as Iterator>::Item)> -/// /* 2 */ W: Iterator<Item=V> +/// /* 2 */ W: Iterator<Item = V> /// /* 3 */ V: Debug /// we have to evaluate the projections in the order I wrote them: /// `V: Debug` requires `V` to be evaluated. The only projection that diff --git a/src/librustc_typeck/impl_wf_check.rs b/src/librustc_typeck/impl_wf_check.rs index 07f5fca6fe6..0928d4a1236 100644 --- a/src/librustc_typeck/impl_wf_check.rs +++ b/src/librustc_typeck/impl_wf_check.rs @@ -20,14 +20,14 @@ use std::collections::hash_map::Entry::{Occupied, Vacant}; use syntax_pos::Span; /// Checks that all the type/lifetime parameters on an impl also -/// appear in the trait ref or self-type (or are constrained by a +/// appear in the trait ref or self type (or are constrained by a /// where-clause). These rules are needed to ensure that, given a /// trait ref like `<T as Trait<U>>`, we can derive the values of all /// parameters on the impl (which is needed to make specialization /// possible). /// /// However, in the case of lifetimes, we only enforce these rules if -/// the lifetime parameter is used in an associated type. This is a +/// the lifetime parameter is used in an associated type. This is a /// concession to backwards compatibility; see comment at the end of /// the fn for details. /// @@ -40,7 +40,7 @@ use syntax_pos::Span; /// impl<T> Trait<Foo<T>> for Bar { ... } /// // ^ T appears in `Foo<T>`, ok. /// -/// impl<T> Trait<Foo> for Bar where Bar: Iterator<Item=T> { ... } +/// impl<T> Trait<Foo> for Bar where Bar: Iterator<Item = T> { ... } /// // ^ T is bound to `<Bar as Iterator>::Item`, ok. /// /// impl<'a> Trait<Foo> for Bar { } diff --git a/src/librustc_typeck/lib.rs b/src/librustc_typeck/lib.rs index 8d77310f3d4..c72424b1306 100644 --- a/src/librustc_typeck/lib.rs +++ b/src/librustc_typeck/lib.rs @@ -1,6 +1,6 @@ /*! -# typeck.rs +# typeck The type checker is responsible for: diff --git a/src/librustc_typeck/outlives/implicit_infer.rs b/src/librustc_typeck/outlives/implicit_infer.rs index e388a3e0d0c..7500d247d43 100644 --- a/src/librustc_typeck/outlives/implicit_infer.rs +++ b/src/librustc_typeck/outlives/implicit_infer.rs @@ -12,7 +12,7 @@ use super::utils::*; /// Infer predicates for the items in the crate. /// -/// global_inferred_outlives: this is initially the empty map that +/// `global_inferred_outlives`: this is initially the empty map that /// was generated by walking the items in the crate. This will /// now be filled with inferred predicates. pub fn infer_predicates<'tcx>( diff --git a/src/librustc_typeck/variance/constraints.rs b/src/librustc_typeck/variance/constraints.rs index 868c1132e44..1d407870ee7 100644 --- a/src/librustc_typeck/variance/constraints.rs +++ b/src/librustc_typeck/variance/constraints.rs @@ -41,7 +41,7 @@ pub struct Constraint<'a> { /// } /// /// then while we are visiting `Bar<T>`, the `CurrentItem` would have -/// the def-id and the start of `Foo`'s inferreds. +/// the `DefId` and the start of `Foo`'s inferreds. pub struct CurrentItem { inferred_start: InferredIndex, } diff --git a/src/librustdoc/clean/cfg.rs b/src/librustdoc/clean/cfg.rs index c74a561e5a0..adfac98df86 100644 --- a/src/librustdoc/clean/cfg.rs +++ b/src/librustdoc/clean/cfg.rs @@ -1,4 +1,4 @@ -//! Representation of a `#[doc(cfg(...))]` attribute. +//! The representation of a `#[doc(cfg(...))]` attribute. // FIXME: Once the portability lint RFC is implemented (see tracking issue #41619), // switch to use those structures instead. @@ -24,7 +24,7 @@ pub enum Cfg { False, /// A generic configuration option, e.g., `test` or `target_os = "linux"`. Cfg(Symbol, Option<Symbol>), - /// Negate a configuration requirement, i.e., `not(x)`. + /// Negates a configuration requirement, i.e., `not(x)`. Not(Box<Cfg>), /// Union of a list of configuration requirements, i.e., `any(...)`. Any(Vec<Cfg>), diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index bd0525583f2..6743f9ea91f 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -685,7 +685,7 @@ impl AttributesExt for [ast::Attribute] { } pub trait NestedAttributesExt { - /// Returns whether the attribute list contains a specific `Word` + /// Returns `true` if the attribute list contains a specific `Word` fn has_word(self, word: &str) -> bool; } @@ -937,7 +937,7 @@ impl Attributes { } } - /// Get links as a vector + /// Gets links as a vector /// /// Cache must be populated before call pub fn links(&self, krate: &CrateNum) -> Vec<(String, String)> { @@ -2147,12 +2147,12 @@ pub struct PolyTrait { /// it does not preserve mutability or boxes. #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub enum Type { - /// structs/enums/traits (most that'd be an hir::TyKind::Path) + /// Structs/enums/traits (most that'd be an `hir::TyKind::Path`). ResolvedPath { path: Path, typarams: Option<Vec<GenericBound>>, did: DefId, - /// true if is a `T::Name` path for associated types + /// `true` if is a `T::Name` path for associated types. is_generic: bool, }, /// For parameterized types, so the consumer of the JSON don't go @@ -3955,7 +3955,7 @@ impl Clean<Deprecation> for attr::Deprecation { } } -/// An equality constraint on an associated type, e.g., `A=Bar` in `Foo<A=Bar>` +/// An equality constraint on an associated type, e.g., `A = Bar` in `Foo<A = Bar>` #[derive(Clone, PartialEq, Eq, RustcDecodable, RustcEncodable, Debug, Hash)] pub struct TypeBinding { pub name: String, diff --git a/src/librustdoc/clean/simplify.rs b/src/librustdoc/clean/simplify.rs index 31e842b3389..376cf3a2dee 100644 --- a/src/librustdoc/clean/simplify.rs +++ b/src/librustdoc/clean/simplify.rs @@ -1,11 +1,11 @@ -//! Simplification of where clauses and parameter bounds into a prettier and +//! Simplification of where-clauses and parameter bounds into a prettier and //! more canonical form. //! //! Currently all cross-crate-inlined function use `rustc::ty` to reconstruct //! the AST (e.g., see all of `clean::inline`), but this is not always a -//! non-lossy transformation. The current format of storage for where clauses +//! non-lossy transformation. The current format of storage for where-clauses //! for functions and such is simply a list of predicates. One example of this -//! is that the AST predicate of: `where T: Trait<Foo=Bar>` is encoded as: +//! is that the AST predicate of: `where T: Trait<Foo = Bar>` is encoded as: //! `where T: Trait, <T as Trait>::Foo = Bar`. //! //! This module attempts to reconstruct the original where and/or parameter diff --git a/src/librustdoc/config.rs b/src/librustdoc/config.rs index 635d071b8e0..ec4ee2d66a5 100644 --- a/src/librustdoc/config.rs +++ b/src/librustdoc/config.rs @@ -150,9 +150,9 @@ pub struct RenderOptions { pub playground_url: Option<String>, /// Whether to sort modules alphabetically on a module page instead of using declaration order. /// `true` by default. - /// - /// FIXME(misdreavus): the flag name is `--sort-modules-by-appearance` but the meaning is - /// inverted once read + // + // FIXME(misdreavus): the flag name is `--sort-modules-by-appearance` but the meaning is + // inverted once read. pub sort_modules_alphabetically: bool, /// List of themes to extend the docs with. Original argument name is included to assist in /// displaying errors if it fails a theme check. @@ -165,9 +165,9 @@ pub struct RenderOptions { pub resource_suffix: String, /// Whether to run the static CSS/JavaScript through a minifier when outputting them. `true` by /// default. - /// - /// FIXME(misdreavus): the flag name is `--disable-minification` but the meaning is inverted - /// once read + // + // FIXME(misdreavus): the flag name is `--disable-minification` but the meaning is inverted + // once read. pub enable_minification: bool, /// Whether to create an index page in the root of the output directory. If this is true but /// `enable_index_page` is None, generate a static listing of crates instead. @@ -484,7 +484,7 @@ impl Options { }) } - /// Returns whether the file given as `self.input` is a Markdown file. + /// Returns `true` if the file given as `self.input` is a Markdown file. pub fn markdown_input(&self) -> bool { self.input.extension() .map_or(false, |e| e == "md" || e == "markdown") diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs index 2cff6bb3924..e90127ca162 100644 --- a/src/librustdoc/core.rs +++ b/src/librustdoc/core.rs @@ -63,7 +63,7 @@ pub struct DocContext<'a, 'tcx: 'a, 'rcx: 'a> { /// Table type parameter definition -> substituted type pub ty_substs: RefCell<FxHashMap<Def, clean::Type>>, - /// Table node id of lifetime parameter definition -> substituted lifetime + /// Table `NodeId` of lifetime parameter definition -> substituted lifetime pub lt_substs: RefCell<FxHashMap<DefId, clean::Lifetime>>, /// Table DefId of `impl Trait` in argument position -> bounds pub impl_trait_bounds: RefCell<FxHashMap<DefId, Vec<clean::GenericBound>>>, diff --git a/src/librustdoc/html/escape.rs b/src/librustdoc/html/escape.rs index 690bcd8c070..35858d84150 100644 --- a/src/librustdoc/html/escape.rs +++ b/src/librustdoc/html/escape.rs @@ -1,6 +1,6 @@ -//! HTML Escaping +//! HTML escaping. //! -//! This module contains one unit-struct which can be used to HTML-escape a +//! This module contains one unit struct, which can be used to HTML-escape a //! string of text (for use in a format string). use std::fmt; diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs index 5a3e6984859..830e1402b96 100644 --- a/src/librustdoc/html/format.rs +++ b/src/librustdoc/html/format.rs @@ -54,13 +54,13 @@ pub struct Method<'a> { pub indent: usize, } -/// Wrapper struct for emitting a where clause from Generics. +/// Wrapper struct for emitting a where-clause from Generics. pub struct WhereClause<'a>{ - /// The Generics from which to emit a where clause. + /// The Generics from which to emit a where-clause. pub gens: &'a clean::Generics, /// The number of spaces to indent each line with. pub indent: usize, - /// Whether the where clause needs to add a comma and newline after the last bound. + /// Whether the where-clause needs to add a comma and newline after the last bound. pub end_newline: bool, } diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index e43251b7d5f..66b1b5b4d75 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -124,7 +124,7 @@ trait Writer { /// Called at the end of a span of highlighted text. fn exit_span(&mut self) -> io::Result<()>; - /// Called for a span of text. If the text should be highlighted differently from the + /// Called for a span of text. If the text should be highlighted differently from the /// surrounding text, then the `Class` argument will be a value other than `None`. /// /// The following sequences of callbacks are equivalent: diff --git a/src/librustdoc/html/markdown.rs b/src/librustdoc/html/markdown.rs index 6b7f54044ca..386480313dc 100644 --- a/src/librustdoc/html/markdown.rs +++ b/src/librustdoc/html/markdown.rs @@ -1,8 +1,8 @@ -//! Markdown formatting for rustdoc +//! Markdown formatting for rustdoc. //! //! This module implements markdown formatting through the pulldown-cmark //! rust-library. This module exposes all of the -//! functionality through a unit-struct, `Markdown`, which has an implementation +//! functionality through a unit struct, `Markdown`, which has an implementation //! of `fmt::Display`. Example usage: //! //! ``` @@ -139,7 +139,7 @@ thread_local!(pub static PLAYGROUND: RefCell<Option<(Option<String>, String)>> = RefCell::new(None) }); -/// Adds syntax highlighting and playground Run buttons to rust code blocks. +/// Adds syntax highlighting and playground Run buttons to Rust code blocks. struct CodeBlocks<'a, I: Iterator<Item = Event<'a>>> { inner: I, check_error_codes: ErrorCodes, @@ -277,7 +277,7 @@ impl<'a, I: Iterator<Item = Event<'a>>> Iterator for CodeBlocks<'a, I> { } } -/// Make headings links with anchor ids and build up TOC. +/// Make headings links with anchor IDs and build up TOC. struct LinkReplacer<'a, 'b, I: Iterator<Item = Event<'a>>> { inner: I, links: &'b [(String, String)], @@ -310,7 +310,7 @@ impl<'a, 'b, I: Iterator<Item = Event<'a>>> Iterator for LinkReplacer<'a, 'b, I> } } -/// Make headings links with anchor ids and build up TOC. +/// Make headings links with anchor IDs and build up TOC. struct HeadingLinks<'a, 'b, 'ids, I: Iterator<Item = Event<'a>>> { inner: I, toc: Option<&'b mut TocBuilder>, diff --git a/src/librustdoc/html/render.rs b/src/librustdoc/html/render.rs index a85ac19286a..2a45829f51b 100644 --- a/src/librustdoc/html/render.rs +++ b/src/librustdoc/html/render.rs @@ -1,4 +1,4 @@ -//! Rustdoc's HTML Rendering module +//! Rustdoc's HTML rendering module. //! //! This modules contains the bulk of the logic necessary for rendering a //! rustdoc `clean::Crate` instance to a set of static HTML pages. This @@ -163,7 +163,7 @@ impl SharedContext { } impl SharedContext { - /// Returns whether the `collapse-docs` pass was run on this crate. + /// Returns `true` if the `collapse-docs` pass was run on this crate. pub fn was_collapsed(&self) -> bool { self.passes.contains("collapse-docs") } @@ -267,11 +267,11 @@ macro_rules! try_err { #[derive(Default)] pub struct Cache { /// Mapping of typaram ids to the name of the type parameter. This is used - /// when pretty-printing a type (so pretty printing doesn't have to + /// when pretty-printing a type (so pretty-printing doesn't have to /// painfully maintain a context like this) pub typarams: FxHashMap<DefId, String>, - /// Maps a type id to all known implementations for that type. This is only + /// Maps a type ID to all known implementations for that type. This is only /// recognized for intra-crate `ResolvedPath` types, and is used to print /// out extra documentation on the page of an enum/struct. /// @@ -279,7 +279,7 @@ pub struct Cache { /// found on that implementation. pub impls: FxHashMap<DefId, Vec<Impl>>, - /// Maintains a mapping of local crate node ids to the fully qualified name + /// Maintains a mapping of local crate `NodeId`s to the fully qualified name /// and "short type description" of that node. This is used when generating /// URLs when a type is being linked to. External paths are not located in /// this map because the `External` type itself has all the information @@ -290,7 +290,7 @@ pub struct Cache { /// generating explicit hyperlinks to other crates. pub external_paths: FxHashMap<DefId, (Vec<String>, ItemType)>, - /// Maps local def ids of exported types to fully qualified paths. + /// Maps local `DefId`s of exported types to fully qualified paths. /// Unlike 'paths', this mapping ignores any renames that occur /// due to 'use' statements. /// @@ -692,7 +692,7 @@ pub fn run(mut krate: clean::Crate, cx.krate(krate) } -/// Build the search index from the collected metadata +/// Builds the search index from the collected metadata fn build_index(krate: &clean::Crate, cache: &mut Cache) -> String { let mut nodeid_to_pathid = FxHashMap::default(); let mut crate_items = Vec::with_capacity(cache.search_index.len()); @@ -2293,7 +2293,7 @@ impl Context { } impl<'a> Item<'a> { - /// Generate a url appropriate for an `href` attribute back to the source of + /// Generates a url appropriate for an `href` attribute back to the source of /// this item. /// /// The url generated, when clicked, will redirect the browser back to the diff --git a/src/librustdoc/html/toc.rs b/src/librustdoc/html/toc.rs index a1a11bc8128..b3da23060dc 100644 --- a/src/librustdoc/html/toc.rs +++ b/src/librustdoc/html/toc.rs @@ -58,7 +58,7 @@ impl TocBuilder { } - /// Convert into a true `Toc` struct. + /// Converts into a true `Toc` struct. pub fn into_toc(mut self) -> Toc { // we know all levels are >= 1. self.fold_until(0); diff --git a/src/librustdoc/markdown.rs b/src/librustdoc/markdown.rs index 65a96e9001b..18729146749 100644 --- a/src/librustdoc/markdown.rs +++ b/src/librustdoc/markdown.rs @@ -127,7 +127,7 @@ pub fn render(input: PathBuf, options: RenderOptions, diag: &errors::Handler) -> } } -/// Run any tests/code examples in the markdown file `input`. +/// Runs any tests/code examples in the markdown file `input`. pub fn test(mut options: Options, diag: &errors::Handler) -> isize { let input_str = match load_string(&options.input, diag) { Ok(s) => s, diff --git a/src/librustdoc/passes/collect_intra_doc_links.rs b/src/librustdoc/passes/collect_intra_doc_links.rs index 13ad05101e4..62b79646f6b 100644 --- a/src/librustdoc/passes/collect_intra_doc_links.rs +++ b/src/librustdoc/passes/collect_intra_doc_links.rs @@ -59,7 +59,7 @@ impl<'a, 'tcx, 'rcx> LinkCollector<'a, 'tcx, 'rcx> { } } - /// Resolve a given string as a path, along with whether or not it is + /// Resolves a given string as a path, along with whether or not it is /// in the value namespace. Also returns an optional URL fragment in the case /// of variants and methods. fn resolve(&self, @@ -422,7 +422,7 @@ impl<'a, 'tcx, 'rcx> DocFolder for LinkCollector<'a, 'tcx, 'rcx> { } } -/// Resolve a string as a macro. +/// Resolves a string as a macro. fn macro_resolve(cx: &DocContext, path_str: &str) -> Option<Def> { use syntax::ext::base::{MacroKind, SyntaxExtension}; let segment = ast::PathSegment::from_ident(Ident::from_str(path_str)); diff --git a/src/librustdoc/passes/mod.rs b/src/librustdoc/passes/mod.rs index c9a3a2c003f..5e4eb88eae7 100644 --- a/src/librustdoc/passes/mod.rs +++ b/src/librustdoc/passes/mod.rs @@ -403,7 +403,7 @@ pub fn look_for_tests<'a, 'tcx: 'a, 'rcx: 'a>( } } -/// Return a span encompassing all the given attributes. +/// Returns a span encompassing all the given attributes. crate fn span_of_attrs(attrs: &clean::Attributes) -> Span { if attrs.doc_strings.is_empty() { return DUMMY_SP; diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs index b8eb777a54b..f53dae8d230 100644 --- a/src/librustdoc/visit_ast.rs +++ b/src/librustdoc/visit_ast.rs @@ -259,7 +259,7 @@ impl<'a, 'tcx, 'rcx> RustdocVisitor<'a, 'tcx, 'rcx> { /// Cross-crate inlining occurs later on during crate cleaning /// and follows different rules. /// - /// Returns true if the target has been inlined. + /// Returns `true` if the target has been inlined. fn maybe_inline_local(&mut self, id: ast::NodeId, def: Def, diff --git a/src/libserialize/hex.rs b/src/libserialize/hex.rs index 6127440a6d7..903e81f1427 100644 --- a/src/libserialize/hex.rs +++ b/src/libserialize/hex.rs @@ -80,7 +80,7 @@ impl error::Error for FromHexError { impl FromHex for str { - /// Convert any hexadecimal encoded string (literal, `@`, `&`, or `~`) + /// Converts any hexadecimal encoded string (literal, `@`, `&`, or `~`) /// to the byte values it encodes. /// /// You can use the `String::from_utf8` function to turn a diff --git a/src/libserialize/json.rs b/src/libserialize/json.rs index 362b4574ee2..a34714c3280 100644 --- a/src/libserialize/json.rs +++ b/src/libserialize/json.rs @@ -717,7 +717,7 @@ impl<'a> PrettyEncoder<'a> { } } - /// Set the number of spaces to indent for each level. + /// Sets the number of spaces to indent for each level. /// This is safe to set during encoding. pub fn set_indent(&mut self, indent: usize) { // self.indent very well could be 0 so we need to use checked division. @@ -1011,13 +1011,13 @@ impl Encodable for Json { } } -/// Create an `AsJson` wrapper which can be used to print a value as JSON +/// Creates an `AsJson` wrapper which can be used to print a value as JSON /// on-the-fly via `write!` pub fn as_json<T>(t: &T) -> AsJson<T> { AsJson { inner: t } } -/// Create an `AsPrettyJson` wrapper which can be used to print a value as JSON +/// Creates an `AsPrettyJson` wrapper which can be used to print a value as JSON /// on-the-fly via `write!` pub fn as_pretty_json<T>(t: &T) -> AsPrettyJson<T> { AsPrettyJson { inner: t, indent: None } @@ -1040,7 +1040,7 @@ impl Json { } /// Attempts to get a nested Json Object for each key in `keys`. - /// If any key is found not to exist, find_path will return None. + /// If any key is found not to exist, `find_path` will return `None`. /// Otherwise, it will return the Json value associated with the final key. pub fn find_path<'a>(&'a self, keys: &[&str]) -> Option<&'a Json>{ let mut target = self; @@ -1052,7 +1052,7 @@ impl Json { /// If the Json value is an Object, performs a depth-first search until /// a value associated with the provided key is found. If no value is found - /// or the Json value is not an Object, returns None. + /// or the Json value is not an Object, returns `None`. pub fn search<'a>(&'a self, key: &str) -> Option<&'a Json> { match self { &Json::Object(ref map) => { @@ -1073,13 +1073,13 @@ impl Json { } } - /// Returns true if the Json value is an Object. Returns false otherwise. + /// Returns `true` if the Json value is an `Object`. pub fn is_object(&self) -> bool { self.as_object().is_some() } - /// If the Json value is an Object, returns the associated BTreeMap. - /// Returns None otherwise. + /// If the Json value is an `Object`, returns the associated `BTreeMap`; + /// returns `None` otherwise. pub fn as_object(&self) -> Option<&Object> { match *self { Json::Object(ref map) => Some(map), @@ -1087,13 +1087,13 @@ impl Json { } } - /// Returns true if the Json value is an Array. Returns false otherwise. + /// Returns `true` if the Json value is an `Array`. pub fn is_array(&self) -> bool { self.as_array().is_some() } - /// If the Json value is an Array, returns the associated vector. - /// Returns None otherwise. + /// If the Json value is an `Array`, returns the associated vector; + /// returns `None` otherwise. pub fn as_array(&self) -> Option<&Array> { match *self { Json::Array(ref array) => Some(&*array), @@ -1101,13 +1101,13 @@ impl Json { } } - /// Returns true if the Json value is a String. Returns false otherwise. + /// Returns `true` if the Json value is a `String`. pub fn is_string(&self) -> bool { self.as_string().is_some() } - /// If the Json value is a String, returns the associated str. - /// Returns None otherwise. + /// If the Json value is a `String`, returns the associated `str`; + /// returns `None` otherwise. pub fn as_string(&self) -> Option<&str> { match *self { Json::String(ref s) => Some(&s[..]), @@ -1115,7 +1115,7 @@ impl Json { } } - /// Returns true if the Json value is a Number. Returns false otherwise. + /// Returns `true` if the Json value is a `Number`. pub fn is_number(&self) -> bool { match *self { Json::I64(_) | Json::U64(_) | Json::F64(_) => true, @@ -1123,7 +1123,7 @@ impl Json { } } - /// Returns true if the Json value is a i64. Returns false otherwise. + /// Returns `true` if the Json value is a `i64`. pub fn is_i64(&self) -> bool { match *self { Json::I64(_) => true, @@ -1131,7 +1131,7 @@ impl Json { } } - /// Returns true if the Json value is a u64. Returns false otherwise. + /// Returns `true` if the Json value is a `u64`. pub fn is_u64(&self) -> bool { match *self { Json::U64(_) => true, @@ -1139,7 +1139,7 @@ impl Json { } } - /// Returns true if the Json value is a f64. Returns false otherwise. + /// Returns `true` if the Json value is a `f64`. pub fn is_f64(&self) -> bool { match *self { Json::F64(_) => true, @@ -1147,8 +1147,8 @@ impl Json { } } - /// If the Json value is a number, return or cast it to a i64. - /// Returns None otherwise. + /// If the Json value is a number, returns or cast it to a `i64`; + /// returns `None` otherwise. pub fn as_i64(&self) -> Option<i64> { match *self { Json::I64(n) => Some(n), @@ -1157,8 +1157,8 @@ impl Json { } } - /// If the Json value is a number, return or cast it to a u64. - /// Returns None otherwise. + /// If the Json value is a number, returns or cast it to a `u64`; + /// returns `None` otherwise. pub fn as_u64(&self) -> Option<u64> { match *self { Json::I64(n) => Some(n as u64), @@ -1167,8 +1167,8 @@ impl Json { } } - /// If the Json value is a number, return or cast it to a f64. - /// Returns None otherwise. + /// If the Json value is a number, returns or cast it to a `f64`; + /// returns `None` otherwise. pub fn as_f64(&self) -> Option<f64> { match *self { Json::I64(n) => Some(n as f64), @@ -1178,13 +1178,13 @@ impl Json { } } - /// Returns true if the Json value is a Boolean. Returns false otherwise. + /// Returns `true` if the Json value is a `Boolean`. pub fn is_boolean(&self) -> bool { self.as_boolean().is_some() } - /// If the Json value is a Boolean, returns the associated bool. - /// Returns None otherwise. + /// If the Json value is a `Boolean`, returns the associated `bool`; + /// returns `None` otherwise. pub fn as_boolean(&self) -> Option<bool> { match *self { Json::Boolean(b) => Some(b), @@ -1192,13 +1192,13 @@ impl Json { } } - /// Returns true if the Json value is a Null. Returns false otherwise. + /// Returns `true` if the Json value is a `Null`. pub fn is_null(&self) -> bool { self.as_null().is_some() } - /// If the Json value is a Null, returns (). - /// Returns None otherwise. + /// If the Json value is a `Null`, returns `()`; + /// returns `None` otherwise. pub fn as_null(&self) -> Option<()> { match *self { Json::Null => Some(()), @@ -1294,7 +1294,7 @@ impl Stack { /// Returns The number of elements in the Stack. pub fn len(&self) -> usize { self.stack.len() } - /// Returns true if the stack is empty. + /// Returns `true` if the stack is empty. pub fn is_empty(&self) -> bool { self.stack.is_empty() } /// Provides access to the StackElement at a given index. @@ -1320,7 +1320,7 @@ impl Stack { true } - /// Returns true if the bottom-most elements of this stack are the same as + /// Returns `true` if the bottom-most elements of this stack are the same as /// the ones passed as parameter. pub fn starts_with(&self, rhs: &[StackElement]) -> bool { if self.stack.len() < rhs.len() { return false; } @@ -1330,7 +1330,7 @@ impl Stack { true } - /// Returns true if the top-most elements of this stack are the same as + /// Returns `true` if the top-most elements of this stack are the same as /// the ones passed as parameter. pub fn ends_with(&self, rhs: &[StackElement]) -> bool { if self.stack.len() < rhs.len() { return false; } @@ -1955,7 +1955,7 @@ pub struct Builder<T> { } impl<T: Iterator<Item=char>> Builder<T> { - /// Create a JSON Builder. + /// Creates a JSON Builder. pub fn new(src: T) -> Builder<T> { Builder { parser: Parser::new(src), token: None, } } @@ -2553,7 +2553,7 @@ impl<'a, T: Encodable> fmt::Display for AsJson<'a, T> { } impl<'a, T> AsPrettyJson<'a, T> { - /// Set the indentation level for the emitted JSON + /// Sets the indentation level for the emitted JSON pub fn indent(mut self, indent: usize) -> AsPrettyJson<'a, T> { self.indent = Some(indent); self diff --git a/src/libserialize/serialize.rs b/src/libserialize/serialize.rs index 03844b387ac..8d1be7501b6 100644 --- a/src/libserialize/serialize.rs +++ b/src/libserialize/serialize.rs @@ -824,7 +824,7 @@ impl<T:Decodable> Decodable for Arc<T> { /// Implement this trait on your `{Encodable,Decodable}::Error` types /// to override the default panic behavior for missing specializations. pub trait SpecializationError { - /// Create an error for a missing method specialization. + /// Creates an error for a missing method specialization. /// Defaults to panicking with type, trait & method names. /// `S` is the encoder/decoder state type, /// `T` is the type being encoded/decoded, and diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 681d8eeaa0d..ab62dd2bc9b 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -129,14 +129,14 @@ impl PathSegment { } } -/// Arguments of a path segment. +/// The arguments of a path segment. /// /// E.g., `<A, B>` as in `Foo<A, B>` or `(A, B)` as in `Foo(A, B)`. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum GenericArgs { - /// The `<'a, A,B,C>` in `foo::bar::baz::<'a, A,B,C>` + /// The `<'a, A, B, C>` in `foo::bar::baz::<'a, A, B, C>`. AngleBracketed(AngleBracketedArgs), - /// The `(A,B)` and `C` in `Foo(A,B) -> C` + /// The `(A, B)` and `C` in `Foo(A, B) -> C`. Parenthesized(ParenthesizedArgs), } @@ -180,16 +180,15 @@ impl GenericArg { } } -/// A path like `Foo<'a, T>` +/// A path like `Foo<'a, T>`. #[derive(Clone, RustcEncodable, RustcDecodable, Debug, Default)] pub struct AngleBracketedArgs { - /// Overall span + /// The overall span. pub span: Span, /// The arguments for this path segment. pub args: Vec<GenericArg>, /// Bindings (equality constraints) on associated types, if present. - /// - /// E.g., `Foo<A=Bar>`. + /// E.g., `Foo<A = Bar>`. pub bindings: Vec<TypeBinding>, } @@ -205,7 +204,7 @@ impl Into<Option<P<GenericArgs>>> for ParenthesizedArgs { } } -/// A path like `Foo(A,B) -> C` +/// A path like `Foo(A, B) -> C`. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct ParenthesizedArgs { /// Overall span @@ -270,7 +269,7 @@ impl serialize::UseSpecializedDecodable for NodeId { } } -/// Node id used to represent the root of the crate. +/// `NodeId` used to represent the root of the crate. pub const CRATE_NODE_ID: NodeId = NodeId::from_u32_const(0); /// When parsing and doing expansions, we initially give all AST nodes this AST @@ -369,7 +368,7 @@ impl Default for Generics { } } -/// A `where` clause in a definition +/// A where-clause in a definition. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct WhereClause { pub id: NodeId, @@ -377,7 +376,7 @@ pub struct WhereClause { pub span: Span, } -/// A single predicate in a `where` clause +/// A single predicate in a where-clause. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum WherePredicate { /// A type binding (e.g., `for<'c> Foo: Send + Clone + 'c`). @@ -1592,7 +1591,7 @@ pub struct BareFnTy { pub decl: P<FnDecl>, } -/// The different kinds of types recognized by the compiler. +/// The various kinds of type recognized by the compiler. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum TyKind { /// A variable-length slice (`[T]`). @@ -1894,7 +1893,7 @@ impl fmt::Debug for ImplPolarity { #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum FunctionRetTy { - /// Return type is not specified. + /// Returns type is not specified. /// /// Functions default to `()` and closures default to inference. /// Span points to where return type would be inserted. @@ -2036,10 +2035,10 @@ pub struct Attribute { /// `TraitRef`s appear in impls. /// -/// Resolve maps each `TraitRef`'s `ref_id` to its defining trait; that's all +/// Resolution maps each `TraitRef`'s `ref_id` to its defining trait; that's all /// that the `ref_id` is for. The `impl_id` maps to the "self type" of this impl. /// If this impl is an `ItemKind::Impl`, the `impl_id` is redundant (it could be the -/// same as the impl's node-id). +/// same as the impl's `NodeId`). #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct TraitRef { pub path: Path, diff --git a/src/libsyntax/attr/builtin.rs b/src/libsyntax/attr/builtin.rs index 6f7761b54fc..520984b8091 100644 --- a/src/libsyntax/attr/builtin.rs +++ b/src/libsyntax/attr/builtin.rs @@ -163,7 +163,7 @@ pub struct RustcDeprecation { pub suggestion: Option<Symbol>, } -/// Check if `attrs` contains an attribute like `#![feature(feature_name)]`. +/// Checks if `attrs` contains an attribute like `#![feature(feature_name)]`. /// This will not perform any "sanity checks" on the form of the attributes. pub fn contains_feature_attr(attrs: &[Attribute], feature_name: &str) -> bool { attrs.iter().any(|item| { @@ -177,7 +177,7 @@ pub fn contains_feature_attr(attrs: &[Attribute], feature_name: &str) -> bool { }) } -/// Find the first stability attribute. `None` if none exists. +/// Finds the first stability attribute. `None` if none exists. pub fn find_stability(sess: &ParseSess, attrs: &[Attribute], item_sp: Span) -> Option<Stability> { find_stability_generic(sess, attrs.iter(), item_sp) @@ -580,7 +580,7 @@ pub struct Deprecation { pub note: Option<Symbol>, } -/// Find the deprecation attribute. `None` if none exists. +/// Finds the deprecation attribute. `None` if none exists. pub fn find_deprecation(sess: &ParseSess, attrs: &[Attribute], item_sp: Span) -> Option<Deprecation> { find_deprecation_generic(sess, attrs.iter(), item_sp) diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs index 0c3aedae715..a4f5449ec54 100644 --- a/src/libsyntax/attr/mod.rs +++ b/src/libsyntax/attr/mod.rs @@ -85,7 +85,7 @@ impl NestedMetaItem { self.span } - /// Returns true if this list item is a MetaItem with a name of `name`. + /// Returns `true` if this list item is a MetaItem with a name of `name`. pub fn check_name(&self, name: &str) -> bool { self.meta_item().map_or(false, |meta_item| meta_item.check_name(name)) } @@ -272,7 +272,7 @@ impl MetaItem { } impl Attribute { - /// Extract the MetaItem from inside this Attribute. + /// Extracts the MetaItem from inside this Attribute. pub fn meta(&self) -> Option<MetaItem> { let mut tokens = self.tokens.trees().peekable(); Some(MetaItem { @@ -328,7 +328,7 @@ impl Attribute { }) } - /// Convert self to a normal #[doc="foo"] comment, if it is a + /// Converts self to a normal #[doc="foo"] comment, if it is a /// comment like `///` or `/** */`. (Returns self unchanged for /// non-sugared doc attributes.) pub fn with_desugared_doc<T, F>(&self, f: F) -> T where diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs index bfc4457f054..5bab9e4e2c9 100644 --- a/src/libsyntax/config.rs +++ b/src/libsyntax/config.rs @@ -88,7 +88,7 @@ impl<'a> StripUnconfigured<'a> { /// /// Gives a compiler warning when the `cfg_attr` contains no attributes and /// is in the original source file. Gives a compiler error if the syntax of - /// the attribute is incorrect + /// the attribute is incorrect. fn process_cfg_attr(&mut self, attr: ast::Attribute) -> Vec<ast::Attribute> { if !attr.check_name("cfg_attr") { return vec![attr]; @@ -146,7 +146,7 @@ impl<'a> StripUnconfigured<'a> { } } - /// Determine if a node with the given attributes should be included in this configuration. + /// Determines if a node with the given attributes should be included in this configuration. pub fn in_cfg(&mut self, attrs: &[ast::Attribute]) -> bool { attrs.iter().all(|attr| { if !is_cfg(attr) { @@ -282,8 +282,8 @@ impl<'a> StripUnconfigured<'a> { } } - // deny #[cfg] on generic parameters until we decide what to do with it. - // see issue #51279. + /// Denies `#[cfg]` on generic parameters until we decide what to do with it. + /// See issue #51279. pub fn disallow_cfg_on_generic_param(&mut self, param: &ast::GenericParam) { for attr in param.attrs() { let offending_attr = if attr.check_name("cfg") { diff --git a/src/libsyntax/diagnostics/metadata.rs b/src/libsyntax/diagnostics/metadata.rs index 3abb820a678..704135fe1d5 100644 --- a/src/libsyntax/diagnostics/metadata.rs +++ b/src/libsyntax/diagnostics/metadata.rs @@ -34,7 +34,7 @@ pub struct ErrorLocation { } impl ErrorLocation { - /// Create an error location from a span. + /// Creates an error location from a span. pub fn from_span(ecx: &ExtCtxt<'_>, sp: Span) -> ErrorLocation { let loc = ecx.source_map().lookup_char_pos_adj(sp.lo()); ErrorLocation { @@ -44,7 +44,7 @@ impl ErrorLocation { } } -/// Get the directory where metadata for a given `prefix` should be stored. +/// Gets the directory where metadata for a given `prefix` should be stored. /// /// See `output_metadata`. pub fn get_metadata_dir(prefix: &str) -> PathBuf { diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 465b53184dc..fcb349205e3 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -327,34 +327,34 @@ macro_rules! make_stmts_default { /// The result of a macro expansion. The return values of the various /// methods are spliced into the AST at the callsite of the macro. pub trait MacResult { - /// Create an expression. + /// Creates an expression. fn make_expr(self: Box<Self>) -> Option<P<ast::Expr>> { None } - /// Create zero or more items. + /// Creates zero or more items. fn make_items(self: Box<Self>) -> Option<SmallVec<[P<ast::Item>; 1]>> { None } - /// Create zero or more impl items. + /// Creates zero or more impl items. fn make_impl_items(self: Box<Self>) -> Option<SmallVec<[ast::ImplItem; 1]>> { None } - /// Create zero or more trait items. + /// Creates zero or more trait items. fn make_trait_items(self: Box<Self>) -> Option<SmallVec<[ast::TraitItem; 1]>> { None } - /// Create zero or more items in an `extern {}` block + /// Creates zero or more items in an `extern {}` block fn make_foreign_items(self: Box<Self>) -> Option<SmallVec<[ast::ForeignItem; 1]>> { None } - /// Create a pattern. + /// Creates a pattern. fn make_pat(self: Box<Self>) -> Option<P<ast::Pat>> { None } - /// Create zero or more statements. + /// Creates zero or more statements. /// /// By default this attempts to create an expression statement, /// returning None if that fails. @@ -461,7 +461,7 @@ pub struct DummyResult { } impl DummyResult { - /// Create a default MacResult that can be anything. + /// Creates a default MacResult that can be anything. /// /// Use this as a return value after hitting any errors and /// calling `span_err`. @@ -474,7 +474,7 @@ impl DummyResult { Box::new(DummyResult { expr_only: false, is_error: false, span }) } - /// Create a default MacResult that can only be an expression. + /// Creates a default MacResult that can only be an expression. /// /// Use this for macros that must expand to an expression, so even /// if an error is encountered internally, the user will receive @@ -677,7 +677,7 @@ pub enum SyntaxExtension { } impl SyntaxExtension { - /// Return which kind of macro calls this syntax extension. + /// Returns which kind of macro calls this syntax extension. pub fn kind(&self) -> MacroKind { match *self { SyntaxExtension::DeclMacro { .. } | @@ -835,8 +835,8 @@ impl<'a> ExtCtxt<'a> { expand::MacroExpander::new(self, false) } - /// Returns a `Folder` that deeply expands all macros and assigns all node ids in an AST node. - /// Once node ids are assigned, the node may not be expanded, removed, or otherwise modified. + /// Returns a `Folder` that deeply expands all macros and assigns all `NodeId`s in an AST node. + /// Once `NodeId`s are assigned, the node may not be expanded, removed, or otherwise modified. pub fn monotonic_expander<'b>(&'b mut self) -> expand::MacroExpander<'b, 'a> { expand::MacroExpander::new(self, true) } @@ -976,9 +976,9 @@ impl<'a> ExtCtxt<'a> { } } -/// Extract a string literal from the macro expanded version of `expr`, +/// Extracts a string literal from the macro expanded version of `expr`, /// emitting `err_msg` if `expr` is not a string literal. This does not stop -/// compilation on error, merely emits a non-fatal error and returns None. +/// compilation on error, merely emits a non-fatal error and returns `None`. pub fn expr_to_spanned_string<'a>( cx: &'a mut ExtCtxt<'_>, mut expr: P<ast::Expr>, @@ -1022,7 +1022,7 @@ pub fn check_zero_tts(cx: &ExtCtxt<'_>, } /// Interpreting `tts` as a comma-separated sequence of expressions, -/// expect exactly one string literal, or emit an error and return None. +/// expect exactly one string literal, or emit an error and return `None`. pub fn get_single_str_from_tts(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree], @@ -1044,8 +1044,8 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt<'_>, }) } -/// Extract comma-separated expressions from `tts`. If there is a -/// parsing error, emit a non-fatal error and return None. +/// Extracts comma-separated expressions from `tts`. If there is a +/// parsing error, emit a non-fatal error and return `None`. pub fn get_exprs_from_tts(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree]) -> Option<Vec<P<ast::Expr>>> { diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 6708e3c12a0..48f6e4c0c82 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -347,7 +347,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { /// Constructs a qualified path. /// - /// Constructs a path like `<self_type as trait_path>::ident<'a, T, A=Bar>`. + /// Constructs a path like `<self_type as trait_path>::ident<'a, T, A = Bar>`. fn qpath_all(&self, self_type: P<ast::Ty>, trait_path: ast::Path, diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 89d59478a5d..3b97242daa1 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -444,7 +444,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } } - /// Collect all macro invocations reachable at this time in this AST fragment, and replace + /// Collects all macro invocations reachable at this time in this AST fragment, and replace /// them with "placeholders" - dummy macro invocations with specially crafted `NodeId`s. /// Then call into resolver that builds a skeleton ("reduced graph") of the fragment and /// prepares data for resolving paths of macro invocations. diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index a9000b89fb4..d4ea3b81a60 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -1,4 +1,4 @@ -//! This is an NFA-based parser, which calls out to the main rust parser for named nonterminals +//! This is an NFA-based parser, which calls out to the main rust parser for named non-terminals //! (which it commits to fully when it hits one in a grammar). There's a set of current NFA threads //! and a set of next ones. Instead of NTs, we have a special case for Kleene star. The big-O, in //! pathological cases, is worse than traditional use of NFA or Earley parsing, but it's an easier @@ -22,7 +22,7 @@ //! //! As it processes them, it fills up `eof_items` with threads that would be valid if //! the macro invocation is now over, `bb_items` with threads that are waiting on -//! a Rust nonterminal like `$e:expr`, and `next_items` with threads that are waiting +//! a Rust non-terminal like `$e:expr`, and `next_items` with threads that are waiting //! on a particular token. Most of the logic concerns moving the · through the //! repetitions indicated by Kleene stars. The rules for moving the · without //! consuming any input are called epsilon transitions. It only advances or calls @@ -216,7 +216,7 @@ struct MatcherPos<'root, 'tt: 'root> { } impl<'root, 'tt> MatcherPos<'root, 'tt> { - /// Add `m` as a named match for the `idx`-th metavar. + /// Adds `m` as a named match for the `idx`-th metavar. fn push_match(&mut self, idx: usize, m: NamedMatch) { let matches = Rc::make_mut(&mut self.matches[idx]); matches.push(m); @@ -304,7 +304,7 @@ fn create_matches(len: usize) -> Box<[Rc<NamedMatchVec>]> { }.into_boxed_slice() } -/// Generate the top-level matcher position in which the "dot" is before the first token of the +/// Generates the top-level matcher position in which the "dot" is before the first token of the /// matcher `ms` and we are going to start matching at the span `open` in the source. fn initial_matcher_pos<'root, 'tt>(ms: &'tt [TokenTree], open: Span) -> MatcherPos<'root, 'tt> { let match_idx_hi = count_names(ms); @@ -337,7 +337,7 @@ fn initial_matcher_pos<'root, 'tt>(ms: &'tt [TokenTree], open: Span) -> MatcherP /// `NamedMatch` is a pattern-match result for a single `token::MATCH_NONTERMINAL`: /// so it is associated with a single ident in a parse, and all -/// `MatchedNonterminal`s in the `NamedMatch` have the same nonterminal type +/// `MatchedNonterminal`s in the `NamedMatch` have the same non-terminal type /// (expr, item, etc). Each leaf in a single `NamedMatch` corresponds to a /// single `token::MATCH_NONTERMINAL` in the `TokenTree` that produced it. /// @@ -414,7 +414,7 @@ fn nameize<I: Iterator<Item = NamedMatch>>( Success(ret_val) } -/// Generate an appropriate parsing failure message. For EOF, this is "unexpected end...". For +/// Generates an appropriate parsing failure message. For EOF, this is "unexpected end...". For /// other tokens, this is "unexpected token...". pub fn parse_failure_msg(tok: Token) -> String { match tok { @@ -426,7 +426,7 @@ pub fn parse_failure_msg(tok: Token) -> String { } } -/// Perform a token equality check, ignoring syntax context (that is, an unhygienic comparison) +/// Performs a token equality check, ignoring syntax context (that is, an unhygienic comparison) fn token_name_eq(t1: &Token, t2: &Token) -> bool { if let (Some((id1, is_raw1)), Some((id2, is_raw2))) = (t1.ident(), t2.ident()) { id1.name == id2.name && is_raw1 == is_raw2 @@ -880,7 +880,7 @@ fn may_begin_with(name: &str, token: &Token) -> bool { } } -/// A call to the "black-box" parser to parse some rust nonterminal. +/// A call to the "black-box" parser to parse some Rust non-terminal. /// /// # Parameters /// @@ -891,7 +891,7 @@ fn may_begin_with(name: &str, token: &Token) -> bool { /// /// # Returns /// -/// The parsed nonterminal. +/// The parsed non-terminal. fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal { if name == "tt" { return token::NtTT(p.parse_token_tree()); diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 33ea675f9d1..897113ba885 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -432,7 +432,7 @@ fn check_lhs_nt_follows(sess: &ParseSess, // after parsing/expansion. we can report every error in every macro this way. } -/// Check that the lhs contains no repetition which could match an empty token +/// Checks that the lhs contains no repetition which could match an empty token /// tree, because then the matcher would hang indefinitely. fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool { use quoted::TokenTree; @@ -960,8 +960,8 @@ fn token_can_be_followed_by_any(tok: "ed::TokenTree) -> bool { } } -/// True if a fragment of type `frag` can be followed by any sort of -/// token. We use this (among other things) as a useful approximation +/// Returns `true` if a fragment of type `frag` can be followed by any sort of +/// token. We use this (among other things) as a useful approximation /// for when `frag` can be followed by a repetition like `$(...)*` or /// `$(...)+`. In general, these can be a bit tricky to reason about, /// so we adopt a conservative position that says that any fragment @@ -990,7 +990,7 @@ enum IsInFollow { Invalid(String, &'static str), } -/// True if `frag` can legally be followed by the token `tok`. For +/// Returns `true` if `frag` can legally be followed by the token `tok`. For /// fragments that can consume an unbounded number of tokens, `tok` /// must be within a well-defined follow set. This is intended to /// guarantee future compatibility: for example, without this rule, if diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index 6c3cf3e6312..255795f28c7 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -22,17 +22,17 @@ pub struct Delimited { } impl Delimited { - /// Return the opening delimiter (possibly `NoDelim`). + /// Returns the opening delimiter (possibly `NoDelim`). pub fn open_token(&self) -> token::Token { token::OpenDelim(self.delim) } - /// Return the closing delimiter (possibly `NoDelim`). + /// Returns the closing delimiter (possibly `NoDelim`). pub fn close_token(&self) -> token::Token { token::CloseDelim(self.delim) } - /// Return a `self::TokenTree` with a `Span` corresponding to the opening delimiter. + /// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter. pub fn open_tt(&self, span: Span) -> TokenTree { let open_span = if span.is_dummy() { span @@ -42,7 +42,7 @@ impl Delimited { TokenTree::Token(open_span, self.open_token()) } - /// Return a `self::TokenTree` with a `Span` corresponding to the closing delimiter. + /// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter. pub fn close_tt(&self, span: Span) -> TokenTree { let close_span = if span.is_dummy() { span @@ -107,7 +107,7 @@ impl TokenTree { } } - /// Returns true if the given token tree contains no other tokens. This is vacuously true for + /// Returns `true` if the given token tree contains no other tokens. This is vacuously true for /// single tokens or metavar/decls, but may be false for delimited trees or sequences. pub fn is_empty(&self) -> bool { match *self { @@ -120,7 +120,7 @@ impl TokenTree { } } - /// Get the `index`-th sub-token-tree. This only makes sense for delimited trees and sequences. + /// Gets the `index`-th sub-token-tree. This only makes sense for delimited trees and sequences. pub fn get_tt(&self, index: usize) -> TokenTree { match (self, index) { (&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => { @@ -140,7 +140,7 @@ impl TokenTree { } } - /// Retrieve the `TokenTree`'s span. + /// Retrieves the `TokenTree`'s span. pub fn span(&self) -> Span { match *self { TokenTree::Token(sp, _) @@ -411,8 +411,8 @@ where /// operator and separator, then a tuple with `(separator, KleeneOp)` is returned. Otherwise, an /// error with the appropriate span is emitted to `sess` and a dummy value is returned. /// -/// NOTE: In 2015 edition, * and + are the only Kleene operators and `?` is a separator. In 2018, -/// `?` is a Kleene op and not a separator. +/// N.B., in the 2015 edition, `*` and `+` are the only Kleene operators, and `?` is a separator. +/// In the 2018 edition however, `?` is a Kleene operator, and not a separator. fn parse_sep_and_kleene_op<I>( input: &mut Peekable<I>, span: Span, diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index 0853b4399d2..826149267e9 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -737,7 +737,7 @@ pub struct AttributeTemplate { } impl AttributeTemplate { - /// Check that the given meta-item is compatible with this template. + /// Checks that the given meta-item is compatible with this template. fn compatible(&self, meta_item_kind: &ast::MetaItemKind) -> bool { match meta_item_kind { ast::MetaItemKind::Word => self.word, @@ -749,7 +749,7 @@ impl AttributeTemplate { } /// A convenience macro for constructing attribute templates. -/// E.g. `template!(Word, List: "description")` means that the attribute +/// E.g., `template!(Word, List: "description")` means that the attribute /// supports forms `#[attr]` and `#[attr(description)]`. macro_rules! template { (Word) => { template!(@ true, None, None) }; @@ -2145,8 +2145,7 @@ pub fn check_crate(krate: &ast::Crate, #[derive(Clone, Copy, Hash)] pub enum UnstableFeatures { - /// Hard errors for unstable features are active, as on - /// beta/stable channels. + /// Hard errors for unstable features are active, as on beta/stable channels. Disallow, /// Allow features to be activated, as on nightly. Allow, diff --git a/src/libsyntax/json.rs b/src/libsyntax/json.rs index 2953b35298e..af785050532 100644 --- a/src/libsyntax/json.rs +++ b/src/libsyntax/json.rs @@ -342,7 +342,7 @@ impl DiagnosticSpanLine { } } - /// Create a list of DiagnosticSpanLines from span - each line with any part + /// Creates a list of DiagnosticSpanLines from span - each line with any part /// of `span` gets a DiagnosticSpanLine, with the highlight indicating the /// `span` within the line. fn from_span(span: Span, je: &JsonEmitter) -> Vec<DiagnosticSpanLine> { diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index 4632d814d5c..74fff3324ea 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -197,9 +197,9 @@ fn read_line_comments(rdr: &mut StringReader<'_>, } } -/// Returns None if the first col chars of s contain a non-whitespace char. -/// Otherwise returns Some(k) where k is first char offset after that leading -/// whitespace. Note k may be outside bounds of s. +/// Returns `None` if the first `col` chars of `s` contain a non-whitespace char. +/// Otherwise returns `Some(k)` where `k` is first char offset after that leading +/// whitespace. Note that `k` may be outside bounds of `s`. fn all_whitespace(s: &str, col: CharPos) -> Option<usize> { let mut idx = 0; for (i, ch) in s.char_indices().take(col.to_usize()) { diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index d3fc1c03634..9168d4b61c1 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -112,7 +112,7 @@ impl<'a> StringReader<'a> { self.unwrap_or_abort(res) } - /// Return the next token. EFFECT: advances the string_reader. + /// Returns the next token. EFFECT: advances the string_reader. pub fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> { assert!(self.fatal_errs.is_empty()); let ret_val = TokenAndSpan { @@ -425,7 +425,7 @@ impl<'a> StringReader<'a> { self.with_str_from_to(start, self.pos, f) } - /// Create a Name from a given offset to the current offset, each + /// Creates a Name from a given offset to the current offset, each /// adjusted 1 towards each other (assumes that on either side there is a /// single-byte delimiter). fn name_from(&self, start: BytePos) -> ast::Name { @@ -670,7 +670,7 @@ impl<'a> StringReader<'a> { } /// If there is whitespace, shebang, or a comment, scan it. Otherwise, - /// return None. + /// return `None`. fn scan_whitespace_or_comment(&mut self) -> Option<TokenAndSpan> { match self.ch.unwrap_or('\0') { // # to handle shebang at start of file -- this is the entry point @@ -920,7 +920,7 @@ impl<'a> StringReader<'a> { /// in a byte, (non-raw) byte string, char, or (non-raw) string literal. /// `start` is the position of `first_source_char`, which is already consumed. /// - /// Returns true if there was a valid char/byte, false otherwise. + /// Returns `true` if there was a valid char/byte. fn scan_char_or_byte(&mut self, start: BytePos, first_source_char: char, @@ -1152,7 +1152,7 @@ impl<'a> StringReader<'a> { } } - /// Check that a base is valid for a floating literal, emitting a nice + /// Checks that a base is valid for a floating literal, emitting a nice /// error if it isn't. fn check_float_base(&mut self, start_bpos: BytePos, last_bpos: BytePos, base: usize) { match base { @@ -1185,7 +1185,7 @@ impl<'a> StringReader<'a> { } } - /// Return the next token from the string, advances the input past that + /// Returns the next token from the string, advances the input past that /// token, and updates the interner fn next_token_inner(&mut self) -> Result<token::Token, ()> { let c = self.ch; diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 317d6933207..69940ae621c 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -1,4 +1,4 @@ -//! The main parser interface +//! The main parser interface. use crate::ast::{self, CrateConfig, NodeId}; use crate::early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId}; @@ -38,12 +38,11 @@ pub struct ParseSess { pub unstable_features: UnstableFeatures, pub config: CrateConfig, pub missing_fragment_specifiers: Lock<FxHashSet<Span>>, - /// Places where raw identifiers were used. This is used for feature gating - /// raw identifiers + /// Places where raw identifiers were used. This is used for feature-gating raw identifiers. pub raw_identifier_spans: Lock<Vec<Span>>, - /// The registered diagnostics codes + /// The registered diagnostics codes. crate registered_diagnostics: Lock<ErrorMap>, - /// Used to determine and report recursive mod inclusions + /// Used to determine and report recursive module inclusions. included_mod_stack: Lock<Vec<PathBuf>>, source_map: Lrc<SourceMap>, pub buffered_lints: Lock<Vec<BufferedEarlyLint>>, @@ -146,12 +145,12 @@ pub fn parse_stream_from_source_str( source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span) } -/// Create a new parser from a source string +/// Creates a new parser from a source string. pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Parser<'_> { panictry_buffer!(&sess.span_diagnostic, maybe_new_parser_from_source_str(sess, name, source)) } -/// Create a new parser from a source string. Returns any buffered errors from lexing the initial +/// Creates a new parser from a source string. Returns any buffered errors from lexing the initial /// token stream. pub fn maybe_new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Result<Parser<'_>, Vec<Diagnostic>> @@ -162,13 +161,13 @@ pub fn maybe_new_parser_from_source_str(sess: &ParseSess, name: FileName, source Ok(parser) } -/// Create a new parser, handling errors as appropriate +/// Creates a new parser, handling errors as appropriate /// if the file doesn't exist pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path) -> Parser<'a> { source_file_to_parser(sess, file_to_source_file(sess, path, None)) } -/// Create a new parser, returning buffered diagnostics if the file doesn't +/// Creates a new parser, returning buffered diagnostics if the file doesn't /// exist or from lexing the initial token stream. pub fn maybe_new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path) -> Result<Parser<'a>, Vec<Diagnostic>> { @@ -239,7 +238,7 @@ fn try_file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>) } /// Given a session and a path and an optional span (for error reporting), -/// add the path to the session's source_map and return the new source_file. +/// add the path to the session's `source_map` and return the new `source_file`. fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>) -> Lrc<SourceFile> { match try_file_to_source_file(sess, path, spanopt) { @@ -251,7 +250,7 @@ fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>) } } -/// Given a source_file, produce a sequence of token-trees +/// Given a source_file, produces a sequence of token trees. pub fn source_file_to_stream( sess: &ParseSess, source_file: Lrc<SourceFile>, @@ -260,7 +259,7 @@ pub fn source_file_to_stream( panictry_buffer!(&sess.span_diagnostic, maybe_file_to_stream(sess, source_file, override_span)) } -/// Given a source file, produce a sequence of token-trees. Returns any buffered errors from +/// Given a source file, produces a sequence of token trees. Returns any buffered errors from /// parsing the token tream. pub fn maybe_file_to_stream( sess: &ParseSess, @@ -295,12 +294,12 @@ pub fn maybe_file_to_stream( } } -/// Given stream and the `ParseSess`, produce a parser +/// Given stream and the `ParseSess`, produces a parser. pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser<'_> { Parser::new(sess, stream, None, true, false) } -/// Parse a string representing a character literal into its final form. +/// Parses a string representing a character literal into its final form. /// Rather than just accepting/rejecting a given literal, unescapes it as /// well. Can take any slice prefixed by a character escape. Returns the /// character and the number of characters consumed. @@ -359,15 +358,14 @@ fn char_lit(lit: &str, diag: Option<(Span, &Handler)>) -> (char, isize) { } } -/// Parse a string representing a string literal into its final form. Does -/// unescaping. +/// Parses a string representing a string literal into its final form. Does unescaping. pub fn str_lit(lit: &str, diag: Option<(Span, &Handler)>) -> String { debug!("str_lit: given {}", lit.escape_default()); let mut res = String::with_capacity(lit.len()); let error = |i| format!("lexer should have rejected {} at {}", lit, i); - /// Eat everything up to a non-whitespace + /// Eat everything up to a non-whitespace. fn eat<'a>(it: &mut iter::Peekable<str::CharIndices<'a>>) { loop { match it.peek().map(|x| x.1) { @@ -428,7 +426,7 @@ pub fn str_lit(lit: &str, diag: Option<(Span, &Handler)>) -> String { res } -/// Parse a string representing a raw string literal into its final form. The +/// Parses a string representing a raw string literal into its final form. The /// only operation this does is convert embedded CRLF into a single LF. fn raw_str_lit(lit: &str) -> String { debug!("raw_str_lit: given {}", lit.escape_default()); @@ -554,7 +552,7 @@ fn float_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>) filtered_float_lit(Symbol::intern(s), suffix, diag) } -/// Parse a string representing a byte literal into its final form. Similar to `char_lit` +/// Parses a string representing a byte literal into its final form. Similar to `char_lit`. fn byte_lit(lit: &str) -> (u8, usize) { let err = |i| format!("lexer accepted invalid byte literal {} step {}", lit, i); @@ -591,7 +589,7 @@ fn byte_str_lit(lit: &str) -> Lrc<Vec<u8>> { let error = |i| panic!("lexer should have rejected {} at {}", lit, i); - /// Eat everything up to a non-whitespace + /// Eat everything up to a non-whitespace. fn eat<I: Iterator<Item=(usize, u8)>>(it: &mut iter::Peekable<I>) { loop { match it.peek().map(|x| x.1) { @@ -758,10 +756,11 @@ fn integer_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>) }) } -/// `SeqSep` : a sequence separator (token) -/// and whether a trailing separator is allowed. +/// A sequence separator. pub struct SeqSep { + /// The seperator token. pub sep: Option<token::Token>, + /// `true` if a trailing separator is allowed. pub trailing_sep_allowed: bool, } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 69d6407d506..67154305735 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -75,7 +75,7 @@ bitflags::bitflags! { type ItemInfo = (Ident, ItemKind, Option<Vec<Attribute>>); -/// How to parse a path. +/// Specifies how to parse a path. #[derive(Copy, Clone, PartialEq)] pub enum PathStyle { /// In some contexts, notably in expressions, paths with generic arguments are ambiguous @@ -111,7 +111,7 @@ enum BlockMode { Ignore, } -/// Possibly accept an `token::Interpolated` expression (a pre-parsed expression +/// Possibly accepts an `token::Interpolated` expression (a pre-parsed expression /// dropped into the token stream, which happens while parsing the result of /// macro expansion). Placement of these is not as complex as I feared it would /// be. The important thing is to make sure that lookahead doesn't balk at @@ -420,11 +420,11 @@ impl TokenType { } } -/// Returns true if `IDENT t` can start a type - `IDENT::a::b`, `IDENT<u8, u8>`, +/// Returns `true` if `IDENT t` can start a type -- `IDENT::a::b`, `IDENT<u8, u8>`, /// `IDENT<<u8 as Trait>::AssocTy>`. /// /// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes -/// that IDENT is not the ident of a fn trait +/// that `IDENT` is not the ident of a fn trait. fn can_continue_type_after_non_fn_ident(t: &token::Token) -> bool { t == &token::ModSep || t == &token::Lt || t == &token::BinOp(token::Shl) @@ -525,7 +525,7 @@ impl From<P<Expr>> for LhsExpr { } } -/// Create a placeholder argument. +/// Creates a placeholder argument. fn dummy_arg(span: Span) -> Arg { let ident = Ident::new(keywords::Invalid.name(), span); let pat = P(Pat { @@ -614,7 +614,7 @@ impl<'a> Parser<'a> { next } - /// Convert the current token to a string using self's reader + /// Converts the current token to a string using `self`'s reader. pub fn this_token_to_string(&self) -> String { pprust::token_to_string(&self.token) } @@ -649,8 +649,7 @@ impl<'a> Parser<'a> { } } - /// Expect and consume the token t. Signal an error if - /// the next token is not t. + /// Expects and consumes the token `t`. Signals an error if the next token is not `t`. pub fn expect(&mut self, t: &token::Token) -> PResult<'a, bool /* recovered */> { if self.expected_tokens.is_empty() { if self.token == *t { @@ -867,7 +866,7 @@ impl<'a> Parser<'a> { } } - /// returns the span of expr, if it was not interpolated or the span of the interpolated token + /// Returns the span of expr, if it was not interpolated or the span of the interpolated token. fn interpolated_or_expr_span(&self, expr: PResult<'a, P<Expr>>) -> PResult<'a, (Span, P<Expr>)> { @@ -941,7 +940,7 @@ impl<'a> Parser<'a> { } } - /// Check if the next token is `tok`, and return `true` if so. + /// Checks if the next token is `tok`, and returns `true` if so. /// /// This method will automatically add `tok` to `expected_tokens` if `tok` is not /// encountered. @@ -951,8 +950,7 @@ impl<'a> Parser<'a> { is_present } - /// Consume token 'tok' if it exists. Returns true if the given - /// token was present, false otherwise. + /// Consumes a token 'tok' if it exists. Returns whether the given token was present. pub fn eat(&mut self, tok: &token::Token) -> bool { let is_present = self.check(tok); if is_present { self.bump() } @@ -964,8 +962,8 @@ impl<'a> Parser<'a> { self.token.is_keyword(kw) } - /// If the next token is the given keyword, eat it and return - /// true. Otherwise, return false. + /// If the next token is the given keyword, eats it and returns + /// `true`. Otherwise, returns `false`. pub fn eat_keyword(&mut self, kw: keywords::Keyword) -> bool { if self.check_keyword(kw) { self.bump(); @@ -984,9 +982,9 @@ impl<'a> Parser<'a> { } } - /// If the given word is not a keyword, signal an error. - /// If the next token is not the given word, signal an error. - /// Otherwise, eat it. + /// If the given word is not a keyword, signals an error. + /// If the next token is not the given word, signals an error. + /// Otherwise, eats it. fn expect_keyword(&mut self, kw: keywords::Keyword) -> PResult<'a, ()> { if !self.eat_keyword(kw) { self.unexpected() @@ -1031,11 +1029,11 @@ impl<'a> Parser<'a> { } } - /// Expect and consume a `+`. if `+=` is seen, replace it with a `=` - /// and continue. If a `+` is not seen, return false. + /// Expects and consumes a `+`. if `+=` is seen, replaces it with a `=` + /// and continues. If a `+` is not seen, returns `false`. /// - /// This is using when token splitting += into +. - /// See issue 47856 for an example of when this may occur. + /// This is used when token-splitting `+=` into `+`. + /// See issue #47856 for an example of when this may occur. fn eat_plus(&mut self) -> bool { self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus))); match self.token { @@ -1054,7 +1052,7 @@ impl<'a> Parser<'a> { /// Checks to see if the next token is either `+` or `+=`. - /// Otherwise returns false. + /// Otherwise returns `false`. fn check_plus(&mut self) -> bool { if self.token.is_like_plus() { true @@ -1065,8 +1063,8 @@ impl<'a> Parser<'a> { } } - /// Expect and consume an `&`. If `&&` is seen, replace it with a single - /// `&` and continue. If an `&` is not seen, signal an error. + /// Expects and consumes an `&`. If `&&` is seen, replaces it with a single + /// `&` and continues. If an `&` is not seen, signals an error. fn expect_and(&mut self) -> PResult<'a, ()> { self.expected_tokens.push(TokenType::Token(token::BinOp(token::And))); match self.token { @@ -1082,8 +1080,8 @@ impl<'a> Parser<'a> { } } - /// Expect and consume an `|`. If `||` is seen, replace it with a single - /// `|` and continue. If an `|` is not seen, signal an error. + /// Expects and consumes an `|`. If `||` is seen, replaces it with a single + /// `|` and continues. If an `|` is not seen, signals an error. fn expect_or(&mut self) -> PResult<'a, ()> { self.expected_tokens.push(TokenType::Token(token::BinOp(token::Or))); match self.token { @@ -1115,9 +1113,9 @@ impl<'a> Parser<'a> { } } - /// Attempt to consume a `<`. If `<<` is seen, replace it with a single - /// `<` and continue. If `<-` is seen, replace it with a single `<` - /// and continue. If a `<` is not seen, return false. + /// Attempts to consume a `<`. If `<<` is seen, replaces it with a single + /// `<` and continue. If `<-` is seen, replaces it with a single `<` + /// and continue. If a `<` is not seen, returns false. /// /// This is meant to be used when parsing generics on a path to get the /// starting token. @@ -1159,9 +1157,8 @@ impl<'a> Parser<'a> { } } - /// Expect and consume a GT. if a >> is seen, replace it - /// with a single > and continue. If a GT is not seen, - /// signal an error. + /// Expects and consumes a single `>` token. if a `>>` is seen, replaces it + /// with a single `>` and continues. If a `>` is not seen, signals an error. fn expect_gt(&mut self) -> PResult<'a, ()> { self.expected_tokens.push(TokenType::Token(token::Gt)); let ate = match self.token { @@ -1196,7 +1193,7 @@ impl<'a> Parser<'a> { } } - /// Eat and discard tokens until one of `kets` is encountered. Respects token trees, + /// Eats and discards tokens until one of `kets` is encountered. Respects token trees, /// passes through any errors encountered. Used for error recovery. fn eat_to_tokens(&mut self, kets: &[&token::Token]) { let handler = self.diagnostic(); @@ -1209,8 +1206,8 @@ impl<'a> Parser<'a> { } } - /// Parse a sequence, including the closing delimiter. The function - /// f must consume tokens until reaching the next separator or + /// Parses a sequence, including the closing delimiter. The function + /// `f` must consume tokens until reaching the next separator or /// closing bracket. pub fn parse_seq_to_end<T, F>(&mut self, ket: &token::Token, @@ -1226,8 +1223,8 @@ impl<'a> Parser<'a> { Ok(val) } - /// Parse a sequence, not including the closing delimiter. The function - /// f must consume tokens until reaching the next separator or + /// Parses a sequence, not including the closing delimiter. The function + /// `f` must consume tokens until reaching the next separator or /// closing bracket. pub fn parse_seq_to_before_end<T, F>( &mut self, @@ -1311,8 +1308,8 @@ impl<'a> Parser<'a> { Ok((v, recovered)) } - /// Parse a sequence, including the closing delimiter. The function - /// f must consume tokens until reaching the next separator or + /// Parses a sequence, including the closing delimiter. The function + /// `f` must consume tokens until reaching the next separator or /// closing bracket. fn parse_unspanned_seq<T, F>( &mut self, @@ -1429,15 +1426,14 @@ impl<'a> Parser<'a> { &self.sess.span_diagnostic } - /// Is the current token one of the keywords that signals a bare function - /// type? + /// Is the current token one of the keywords that signals a bare function type? fn token_is_bare_fn_keyword(&mut self) -> bool { self.check_keyword(keywords::Fn) || self.check_keyword(keywords::Unsafe) || self.check_keyword(keywords::Extern) } - /// parse a `TyKind::BareFn` type: + /// Parses a `TyKind::BareFn` type. fn parse_ty_bare_fn(&mut self, generic_params: Vec<GenericParam>) -> PResult<'a, TyKind> { /* @@ -1474,7 +1470,7 @@ impl<'a> Parser<'a> { }))) } - /// Parse asyncness: `async` or nothing + /// Parses asyncness: `async` or nothing. fn parse_asyncness(&mut self) -> IsAsync { if self.eat_keyword(keywords::Async) { IsAsync::Async { @@ -1486,7 +1482,7 @@ impl<'a> Parser<'a> { } } - /// Parse unsafety: `unsafe` or nothing. + /// Parses unsafety: `unsafe` or nothing. fn parse_unsafety(&mut self) -> Unsafety { if self.eat_keyword(keywords::Unsafe) { Unsafety::Unsafe @@ -1495,7 +1491,7 @@ impl<'a> Parser<'a> { } } - /// Parse the items in a trait declaration + /// Parses the items in a trait declaration. pub fn parse_trait_item(&mut self, at_end: &mut bool) -> PResult<'a, TraitItem> { maybe_whole!(self, NtTraitItem, |x| x); let attrs = self.parse_outer_attributes()?; @@ -1612,7 +1608,7 @@ impl<'a> Parser<'a> { }) } - /// Parse optional return type [ -> TY ] in function decl + /// Parses an optional return type `[ -> TY ]` in a function declaration. fn parse_ret_ty(&mut self, allow_plus: bool) -> PResult<'a, FunctionRetTy> { if self.eat(&token::RArrow) { Ok(FunctionRetTy::Ty(self.parse_ty_common(allow_plus, true)?)) @@ -1621,12 +1617,13 @@ impl<'a> Parser<'a> { } } - // Parse a type + /// Parses a type. pub fn parse_ty(&mut self) -> PResult<'a, P<Ty>> { self.parse_ty_common(true, true) } - /// Parse a type in restricted contexts where `+` is not permitted. + /// Parses a type in restricted contexts where `+` is not permitted. + /// /// Example 1: `&'a TYPE` /// `+` is prohibited to maintain operator priority (P(+) < P(&)). /// Example 2: `value1 as TYPE + value2` @@ -1929,7 +1926,8 @@ impl<'a> Parser<'a> { self.look_ahead(offset + 1, |t| t == &token::Colon) } - /// Skip unexpected attributes and doc comments in this position and emit an appropriate error. + /// Skips unexpected attributes and doc comments in this position and emits an appropriate + /// error. fn eat_incorrect_doc_comment(&mut self, applied_to: &str) { if let token::DocComment(_) = self.token { let mut err = self.diagnostic().struct_span_err( @@ -1958,8 +1956,7 @@ impl<'a> Parser<'a> { } } - /// This version of parse arg doesn't necessarily require - /// identifier names. + /// This version of parse arg doesn't necessarily require identifier names. fn parse_arg_general(&mut self, require_name: bool, is_trait_item: bool) -> PResult<'a, Arg> { maybe_whole!(self, NtArg, |x| x); @@ -2067,12 +2064,12 @@ impl<'a> Parser<'a> { Ok(Arg { ty, pat, id: ast::DUMMY_NODE_ID }) } - /// Parse a single function argument + /// Parses a single function argument. crate fn parse_arg(&mut self) -> PResult<'a, Arg> { self.parse_arg_general(true, false) } - /// Parse an argument in a lambda header e.g., |arg, arg| + /// Parses an argument in a lambda header (e.g., `|arg, arg|`). fn parse_fn_block_arg(&mut self) -> PResult<'a, Arg> { let pat = self.parse_pat(Some("argument name"))?; let t = if self.eat(&token::Colon) { @@ -2099,7 +2096,7 @@ impl<'a> Parser<'a> { } } - /// Matches token_lit = LIT_INTEGER | ... + /// Matches `token_lit = LIT_INTEGER | ...`. fn parse_lit_token(&mut self) -> PResult<'a, LitKind> { let out = match self.token { token::Interpolated(ref nt) => match nt.0 { @@ -2165,7 +2162,7 @@ impl<'a> Parser<'a> { Ok(out) } - /// Matches lit = true | false | token_lit + /// Matches `lit = true | false | token_lit`. crate fn parse_lit(&mut self) -> PResult<'a, Lit> { let lo = self.span; let lit = if self.eat_keyword(keywords::True) { @@ -2179,7 +2176,7 @@ impl<'a> Parser<'a> { Ok(source_map::Spanned { node: lit, span: lo.to(self.prev_span) }) } - /// matches '-' lit | lit (cf. ast_validation::AstValidator::check_expr_within_pat) + /// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`). crate fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> { maybe_whole_expr!(self); @@ -2221,7 +2218,7 @@ impl<'a> Parser<'a> { } } - /// Parses qualified path. + /// Parses a qualified path. /// Assumes that the leading `<` has been parsed already. /// /// `qualified_path = <type [as trait_ref]>::path` @@ -2297,8 +2294,9 @@ impl<'a> Parser<'a> { Ok(ast::Path { segments, span: lo.to(self.prev_span) }) } - /// Like `parse_path`, but also supports parsing `Word` meta items into paths for back-compat. - /// This is used when parsing derive macro paths in `#[derive]` attributes. + /// Like `parse_path`, but also supports parsing `Word` meta items into paths for + /// backwards-compatibility. This is used when parsing derive macro paths in `#[derive]` + /// attributes. pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, ast::Path> { let meta_ident = match self.token { token::Interpolated(ref nt) => match nt.0 { @@ -2423,7 +2421,7 @@ impl<'a> Parser<'a> { self.token.is_lifetime() } - /// Parse single lifetime 'a or panic. + /// Parses a single lifetime `'a` or panics. crate fn expect_lifetime(&mut self) -> Lifetime { if let Some(ident) = self.token.lifetime() { let span = self.span; @@ -2444,7 +2442,7 @@ impl<'a> Parser<'a> { } } - /// Parse mutability (`mut` or nothing). + /// Parses mutability (`mut` or nothing). fn parse_mutability(&mut self) -> Mutability { if self.eat_keyword(keywords::Mut) { Mutability::Mutable @@ -2575,12 +2573,10 @@ impl<'a> Parser<'a> { } /// At the bottom (top?) of the precedence hierarchy, - /// parse things like parenthesized exprs, - /// macros, return, etc. + /// Parses things like parenthesized exprs, macros, `return`, etc. /// - /// N.B., this does not parse outer attributes, - /// and is private because it only works - /// correctly if called from parse_dot_or_call_expr(). + /// N.B., this does not parse outer attributes, and is private because it only works + /// correctly if called from `parse_dot_or_call_expr()`. fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> { maybe_whole_expr!(self); @@ -2965,7 +2961,7 @@ impl<'a> Parser<'a> { } } - /// Parse a block or unsafe block + /// Parses a block or unsafe block. fn parse_block_expr(&mut self, opt_label: Option<Label>, lo: Span, blk_mode: BlockCheckMode, outer_attrs: ThinVec<Attribute>) @@ -2979,7 +2975,7 @@ impl<'a> Parser<'a> { return Ok(self.mk_expr(blk.span, ExprKind::Block(blk, opt_label), attrs)); } - /// parse a.b or a(13) or a[4] or just a + /// Parses `a.b` or `a(13)` or `a[4]` or just `a`. fn parse_dot_or_call_expr(&mut self, already_parsed_attrs: Option<ThinVec<Attribute>>) -> PResult<'a, P<Expr>> { @@ -3287,7 +3283,7 @@ impl<'a> Parser<'a> { self.span = span; } - /// parse a single token tree from the input. + /// Parses a single token tree from the input. crate fn parse_token_tree(&mut self) -> TokenTree { match self.token { token::OpenDelim(..) => { @@ -3447,7 +3443,7 @@ impl<'a> Parser<'a> { return Ok(self.mk_expr(lo.to(hi), ex, attrs)); } - /// Parse an associative expression + /// Parses an associative expression. /// /// This parses an expression accounting for associativity and precedence of the operators in /// the expression. @@ -3458,7 +3454,7 @@ impl<'a> Parser<'a> { self.parse_assoc_expr_with(0, already_parsed_attrs.into()) } - /// Parse an associative expression with operators of at least `min_prec` precedence + /// Parses an associative expression with operators of at least `min_prec` precedence. fn parse_assoc_expr_with(&mut self, min_prec: usize, lhs: LhsExpr) @@ -3793,7 +3789,7 @@ impl<'a> Parser<'a> { } } - /// Parse an 'if' or 'if let' expression ('if' token already eaten) + /// Parses an `if` or `if let` expression (`if` token already eaten). fn parse_if_expr(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { if self.check_keyword(keywords::Let) { return self.parse_if_let_expr(attrs); @@ -3829,7 +3825,7 @@ impl<'a> Parser<'a> { Ok(self.mk_expr(lo.to(hi), ExprKind::If(cond, thn, els), attrs)) } - /// Parse an 'if let' expression ('if' token already eaten) + /// Parses an `if let` expression (`if` token already eaten). fn parse_if_let_expr(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { let lo = self.prev_span; @@ -3847,7 +3843,7 @@ impl<'a> Parser<'a> { Ok(self.mk_expr(lo.to(hi), ExprKind::IfLet(pats, expr, thn, els), attrs)) } - // `move |args| expr` + /// Parses `move |args| expr`. fn parse_lambda_expr(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> @@ -3943,7 +3939,7 @@ impl<'a> Parser<'a> { Ok(self.mk_expr(span_lo.to(hi), ExprKind::ForLoop(pat, expr, loop_block, opt_label), attrs)) } - /// Parse a 'while' or 'while let' expression ('while' token already eaten) + /// Parses a `while` or `while let` expression (`while` token already eaten). fn parse_while_expr(&mut self, opt_label: Option<Label>, span_lo: Span, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { @@ -3957,7 +3953,7 @@ impl<'a> Parser<'a> { return Ok(self.mk_expr(span, ExprKind::While(cond, body, opt_label), attrs)); } - /// Parse a 'while let' expression ('while' token already eaten) + /// Parses a `while let` expression (`while` token already eaten). fn parse_while_let_expr(&mut self, opt_label: Option<Label>, span_lo: Span, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { @@ -3981,7 +3977,7 @@ impl<'a> Parser<'a> { Ok(self.mk_expr(span, ExprKind::Loop(body, opt_label), attrs)) } - /// Parse an `async move {...}` expression + /// Parses an `async move {...}` expression. pub fn parse_async_block(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { @@ -3999,7 +3995,7 @@ impl<'a> Parser<'a> { ExprKind::Async(capture_clause, ast::DUMMY_NODE_ID, body), attrs)) } - /// Parse a `try {...}` expression (`try` token already eaten) + /// Parses a `try {...}` expression (`try` token already eaten). fn parse_try_block(&mut self, span_lo: Span, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { @@ -4117,15 +4113,15 @@ impl<'a> Parser<'a> { }) } - /// Parse an expression + /// Parses an expression. #[inline] pub fn parse_expr(&mut self) -> PResult<'a, P<Expr>> { self.parse_expr_res(Restrictions::empty(), None) } - /// Evaluate the closure with restrictions in place. + /// Evaluates the closure with restrictions in place. /// - /// After the closure is evaluated, restrictions are reset. + /// Afters the closure is evaluated, restrictions are reset. fn with_res<F, T>(&mut self, r: Restrictions, f: F) -> T where F: FnOnce(&mut Self) -> T { @@ -4137,7 +4133,7 @@ impl<'a> Parser<'a> { } - /// Parse an expression, subject to the given restrictions + /// Parses an expression, subject to the given restrictions. #[inline] fn parse_expr_res(&mut self, r: Restrictions, already_parsed_attrs: Option<ThinVec<Attribute>>) @@ -4145,7 +4141,7 @@ impl<'a> Parser<'a> { self.with_res(r, |this| this.parse_assoc_expr(already_parsed_attrs)) } - /// Parse the RHS of a local variable declaration (e.g., '= 14;') + /// Parses the RHS of a local variable declaration (e.g., '= 14;'). fn parse_initializer(&mut self, skip_eq: bool) -> PResult<'a, Option<P<Expr>>> { if self.eat(&token::Eq) { Ok(Some(self.parse_expr()?)) @@ -4156,7 +4152,7 @@ impl<'a> Parser<'a> { } } - /// Parse patterns, separated by '|' s + /// Parses patterns, separated by '|' s. fn parse_pats(&mut self) -> PResult<'a, Vec<P<Pat>>> { // Allow a '|' before the pats (RFC 1925 + RFC 2530) self.eat(&token::BinOp(token::Or)); @@ -4346,7 +4342,7 @@ impl<'a> Parser<'a> { }) } - /// Parse the fields of a struct-like pattern + /// Parses the fields of a struct-like pattern. fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<source_map::Spanned<ast::FieldPat>>, bool)> { let mut fields = Vec::new(); let mut etc = false; @@ -4538,13 +4534,13 @@ impl<'a> Parser<'a> { Ok(pat) } - /// Parse a pattern. + /// Parses a pattern. pub fn parse_pat(&mut self, expected: Option<&'static str>) -> PResult<'a, P<Pat>> { self.parse_pat_with_range_pat(true, expected) } - /// Parse a pattern, with a setting whether modern range patterns e.g., `a..=b`, `a..b` are - /// allowed. + /// Parses a pattern, with a setting whether modern range patterns (e.g., `a..=b`, `a..b` are + /// allowed). fn parse_pat_with_range_pat( &mut self, allow_range_pat: bool, @@ -4754,9 +4750,9 @@ impl<'a> Parser<'a> { Ok(P(pat)) } - /// Parse ident or ident @ pat + /// Parses `ident` or `ident @ pat`. /// used by the copy foo and ref foo patterns to give a good - /// error message when parsing mistakes like ref foo(a,b) + /// error message when parsing mistakes like `ref foo(a, b)`. fn parse_pat_ident(&mut self, binding_mode: ast::BindingMode) -> PResult<'a, PatKind> { @@ -4782,7 +4778,7 @@ impl<'a> Parser<'a> { Ok(PatKind::Ident(binding_mode, ident, sub)) } - /// Parse a local variable declaration + /// Parses a local variable declaration. fn parse_local(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Local>> { let lo = self.prev_span; let pat = self.parse_top_level_pat()?; @@ -4855,7 +4851,7 @@ impl<'a> Parser<'a> { })) } - /// Parse a structure field + /// Parses a structure field. fn parse_name_and_ty(&mut self, lo: Span, vis: Visibility, @@ -4874,7 +4870,7 @@ impl<'a> Parser<'a> { }) } - /// Emit an expected item after attributes error. + /// Emits an expected-item-after-attributes error. fn expected_item_err(&mut self, attrs: &[Attribute]) -> PResult<'a, ()> { let message = match attrs.last() { Some(&Attribute { is_sugared_doc: true, .. }) => "expected item after doc comment", @@ -5297,13 +5293,13 @@ impl<'a> Parser<'a> { })) } - /// Is this expression a successfully-parsed statement? + /// Checks if this expression is a successfully parsed statement. fn expr_is_complete(&mut self, e: &Expr) -> bool { self.restrictions.contains(Restrictions::STMT_EXPR) && !classify::expr_requires_semi_to_be_stmt(e) } - /// Parse a block. No inner attrs are allowed. + /// Parses a block. No inner attributes are allowed. pub fn parse_block(&mut self) -> PResult<'a, P<Block>> { maybe_whole!(self, NtBlock, |x| x); @@ -5381,7 +5377,7 @@ impl<'a> Parser<'a> { self.parse_block_tail(lo, BlockCheckMode::Default) } - /// Parse a block. Inner attrs are allowed. + /// Parses a block. Inner attributes are allowed. fn parse_inner_attrs_and_block(&mut self) -> PResult<'a, (Vec<Attribute>, P<Block>)> { maybe_whole!(self, NtBlock, |x| (Vec::new(), x)); @@ -5391,7 +5387,7 @@ impl<'a> Parser<'a> { self.parse_block_tail(lo, BlockCheckMode::Default)?)) } - /// Parse the rest of a block expression or function body + /// Parses the rest of a block expression or function body. /// Precondition: already parsed the '{'. fn parse_block_tail(&mut self, lo: Span, s: BlockCheckMode) -> PResult<'a, P<Block>> { let mut stmts = vec![]; @@ -5425,7 +5421,7 @@ impl<'a> Parser<'a> { })) } - /// Parse a statement, including the trailing semicolon. + /// Parses a statement, including the trailing semicolon. crate fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> { // skip looking for a trailing semicolon when we have an interpolated statement maybe_whole!(self, NtStmt, |x| Some(x)); @@ -5487,11 +5483,14 @@ impl<'a> Parser<'a> { ).emit(); } - // Parse bounds of a type parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`. - // BOUND = TY_BOUND | LT_BOUND - // LT_BOUND = LIFETIME (e.g., `'a`) - // TY_BOUND = TY_BOUND_NOPAREN | (TY_BOUND_NOPAREN) - // TY_BOUND_NOPAREN = [?] [for<LT_PARAM_DEFS>] SIMPLE_PATH (e.g., `?for<'a: 'b> m::Trait<'a>`) + /// Parses bounds of a type parameter `BOUND + BOUND + ...`, possibly with trailing `+`. + /// + /// ``` + /// BOUND = TY_BOUND | LT_BOUND + /// LT_BOUND = LIFETIME (e.g., `'a`) + /// TY_BOUND = TY_BOUND_NOPAREN | (TY_BOUND_NOPAREN) + /// TY_BOUND_NOPAREN = [?] [for<LT_PARAM_DEFS>] SIMPLE_PATH (e.g., `?for<'a: 'b> m::Trait<'a>`) + /// ``` fn parse_generic_bounds_common(&mut self, allow_plus: bool) -> PResult<'a, GenericBounds> { let mut bounds = Vec::new(); loop { @@ -5545,8 +5544,11 @@ impl<'a> Parser<'a> { self.parse_generic_bounds_common(true) } - // Parse bounds of a lifetime parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`. - // BOUND = LT_BOUND (e.g., `'a`) + /// Parses bounds of a lifetime parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`. + /// + /// ``` + /// BOUND = LT_BOUND (e.g., `'a`) + /// ``` fn parse_lt_param_bounds(&mut self) -> GenericBounds { let mut lifetimes = Vec::new(); while self.check_lifetime() { @@ -5559,7 +5561,7 @@ impl<'a> Parser<'a> { lifetimes } - /// Matches typaram = IDENT (`?` unbound)? optbounds ( EQ ty )? + /// Matches `typaram = IDENT (`?` unbound)? optbounds ( EQ ty )?`. fn parse_ty_param(&mut self, preceding_attrs: Vec<Attribute>) -> PResult<'a, GenericParam> { @@ -5590,6 +5592,7 @@ impl<'a> Parser<'a> { } /// Parses the following grammar: + /// /// TraitItemAssocTy = Ident ["<"...">"] [":" [GenericBounds]] ["where" ...] ["=" Ty] fn parse_trait_item_assoc_ty(&mut self) -> PResult<'a, (Ident, TraitItemKind, ast::Generics)> { @@ -5631,8 +5634,8 @@ impl<'a> Parser<'a> { }) } - /// Parses (possibly empty) list of lifetime and type parameters, possibly including - /// trailing comma and erroneous trailing attributes. + /// Parses a (possibly empty) list of lifetime and type parameters, possibly including + /// a trailing comma and erroneous trailing attributes. crate fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> { let mut params = Vec::new(); loop { @@ -5690,7 +5693,7 @@ impl<'a> Parser<'a> { Ok(params) } - /// Parse a set of optional generic type parameter declarations. Where + /// Parses a set of optional generic type parameter declarations. Where /// clauses are not parsed here, and must be added later via /// `parse_where_clause()`. /// @@ -5718,7 +5721,7 @@ impl<'a> Parser<'a> { } } - /// Parse generic args (within a path segment) with recovery for extra leading angle brackets. + /// Parses generic args (within a path segment) with recovery for extra leading angle brackets. /// For the purposes of understanding the parsing logic of generic arguments, this function /// can be thought of being the same as just calling `self.parse_generic_args()` if the source /// had the correct amount of leading angle brackets. @@ -5952,7 +5955,7 @@ impl<'a> Parser<'a> { Ok((args, bindings)) } - /// Parses an optional `where` clause and places it in `generics`. + /// Parses an optional where-clause and places it in `generics`. /// /// ```ignore (only-for-syntax-highlight) /// where T : Trait<U, V> + 'b, 'a : 'b @@ -6116,7 +6119,7 @@ impl<'a> Parser<'a> { Ok((args, variadic)) } - /// Parse the argument list and result type of a function declaration + /// Parses the argument list and result type of a function declaration. fn parse_fn_decl(&mut self, allow_variadic: bool) -> PResult<'a, P<FnDecl>> { let (args, variadic) = self.parse_fn_args(true, allow_variadic)?; @@ -6238,7 +6241,7 @@ impl<'a> Parser<'a> { Ok(Some(Arg::from_self(eself, eself_ident))) } - /// Parse the parameter list and result type of a function that may have a `self` parameter. + /// Parses the parameter list and result type of a function that may have a `self` parameter. fn parse_fn_decl_with_self<F>(&mut self, parse_arg_fn: F) -> PResult<'a, P<FnDecl>> where F: FnMut(&mut Parser<'a>) -> PResult<'a, Arg>, { @@ -6276,7 +6279,7 @@ impl<'a> Parser<'a> { })) } - // parse the |arg, arg| header on a lambda + /// Parses the `|arg, arg|` header of a closure. fn parse_fn_block_decl(&mut self) -> PResult<'a, P<FnDecl>> { let inputs_captures = { if self.eat(&token::OrOr) { @@ -6302,7 +6305,7 @@ impl<'a> Parser<'a> { })) } - /// Parse the name and optional generic types of a function header. + /// Parses the name and optional generic types of a function header. fn parse_fn_header(&mut self) -> PResult<'a, (Ident, ast::Generics)> { let id = self.parse_ident()?; let generics = self.parse_generics()?; @@ -6322,7 +6325,7 @@ impl<'a> Parser<'a> { }) } - /// Parse an item-position function declaration. + /// Parses an item-position function declaration. fn parse_item_fn(&mut self, unsafety: Unsafety, asyncness: IsAsync, @@ -6337,21 +6340,22 @@ impl<'a> Parser<'a> { Ok((ident, ItemKind::Fn(decl, header, generics, body), Some(inner_attrs))) } - /// true if we are looking at `const ID`, false for things like `const fn` etc + /// Returns `true` if we are looking at `const ID` + /// (returns `false` for things like `const fn`, etc.). fn is_const_item(&mut self) -> bool { self.token.is_keyword(keywords::Const) && !self.look_ahead(1, |t| t.is_keyword(keywords::Fn)) && !self.look_ahead(1, |t| t.is_keyword(keywords::Unsafe)) } - /// parses all the "front matter" for a `fn` declaration, up to + /// Parses all the "front matter" for a `fn` declaration, up to /// and including the `fn` keyword: /// /// - `const fn` /// - `unsafe fn` /// - `const unsafe fn` /// - `extern fn` - /// - etc + /// - etc. fn parse_fn_front_matter(&mut self) -> PResult<'a, ( Spanned<Constness>, @@ -6378,7 +6382,7 @@ impl<'a> Parser<'a> { Ok((constness, unsafety, asyncness, abi)) } - /// Parse an impl item. + /// Parses an impl item. pub fn parse_impl_item(&mut self, at_end: &mut bool) -> PResult<'a, ImplItem> { maybe_whole!(self, NtImplItem, |x| x); let attrs = self.parse_outer_attributes()?; @@ -6517,7 +6521,7 @@ impl<'a> Parser<'a> { } } - /// Parse `trait Foo { ... }` or `trait Foo = Bar;` + /// Parses `trait Foo { ... }` or `trait Foo = Bar;`. fn parse_item_trait(&mut self, is_auto: IsAuto, unsafety: Unsafety) -> PResult<'a, ItemInfo> { let ident = self.parse_ident()?; let mut tps = self.parse_generics()?; @@ -6608,9 +6612,11 @@ impl<'a> Parser<'a> { } /// Parses an implementation item, `impl` keyword is already parsed. + /// /// impl<'a, T> TYPE { /* impl items */ } /// impl<'a, T> TRAIT for TYPE { /* impl items */ } /// impl<'a, T> !TRAIT for TYPE { /* impl items */ } + /// /// We actually parse slightly more relaxed grammar for better error reporting and recovery. /// `impl` GENERICS `!`? TYPE `for`? (TYPE | `..`) (`where` PREDICATES)? `{` BODY `}` /// `impl` GENERICS `!`? TYPE (`where` PREDICATES)? `{` BODY `}` @@ -6702,7 +6708,7 @@ impl<'a> Parser<'a> { } } - /// Parse struct Foo { ... } + /// Parses `struct Foo { ... }`. fn parse_item_struct(&mut self) -> PResult<'a, ItemInfo> { let class_name = self.parse_ident()?; @@ -6756,7 +6762,7 @@ impl<'a> Parser<'a> { Ok((class_name, ItemKind::Struct(vdata, generics), None)) } - /// Parse union Foo { ... } + /// Parses `union Foo { ... }`. fn parse_item_union(&mut self) -> PResult<'a, ItemInfo> { let class_name = self.parse_ident()?; @@ -6850,7 +6856,7 @@ impl<'a> Parser<'a> { Ok(fields) } - /// Parse a structure field declaration + /// Parses a structure field declaration. fn parse_single_struct_field(&mut self, lo: Span, vis: Visibility, @@ -6912,7 +6918,7 @@ impl<'a> Parser<'a> { Ok(a_var) } - /// Parse an element of a struct definition + /// Parses an element of a struct declaration. fn parse_struct_decl_field(&mut self) -> PResult<'a, StructField> { let attrs = self.parse_outer_attributes()?; let lo = self.span; @@ -6920,11 +6926,11 @@ impl<'a> Parser<'a> { self.parse_single_struct_field(lo, vis, attrs) } - /// Parse `pub`, `pub(crate)` and `pub(in path)` plus shortcuts `crate` for `pub(crate)`, + /// Parses `pub`, `pub(crate)` and `pub(in path)` plus shortcuts `crate` for `pub(crate)`, /// `pub(self)` for `pub(in self)` and `pub(super)` for `pub(in super)`. - /// If the following element can't be a tuple (i.e., it's a function definition, - /// it's not a tuple struct field) and the contents within the parens - /// isn't valid, emit a proper diagnostic. + /// If the following element can't be a tuple (i.e., it's a function definition), then + /// it's not a tuple struct field), and the contents within the parentheses isn't valid, + /// so emit a proper diagnostic. pub fn parse_visibility(&mut self, can_take_tuple: bool) -> PResult<'a, Visibility> { maybe_whole!(self, NtVis, |x| x); @@ -7005,7 +7011,7 @@ impl<'a> Parser<'a> { Ok(respan(lo, VisibilityKind::Public)) } - /// Parse defaultness: `default` or nothing. + /// Parses defaultness (i.e., `default` or nothing). fn parse_defaultness(&mut self) -> Defaultness { // `pub` is included for better error messages if self.check_keyword(keywords::Default) && @@ -7054,7 +7060,7 @@ impl<'a> Parser<'a> { } } - /// Given a termination token, parse all of the items in a module + /// Given a termination token, parses all of the items in a module. fn parse_mod_items(&mut self, term: &token::Token, inner_lo: Span) -> PResult<'a, Mod> { let mut items = vec![]; while let Some(item) = self.parse_item()? { @@ -7191,7 +7197,7 @@ impl<'a> Parser<'a> { } } - /// Returns either a path to a module, or . + /// Returns a path to a module. pub fn default_submod_path( id: ast::Ident, relative: Option<ast::Ident>, @@ -7334,7 +7340,7 @@ impl<'a> Parser<'a> { } } - /// Read a module from a source file. + /// Reads a module from a source file. fn eval_src_mod(&mut self, path: PathBuf, directory_ownership: DirectoryOwnership, @@ -7366,7 +7372,7 @@ impl<'a> Parser<'a> { Ok((m0, mod_attrs)) } - /// Parse a function declaration from a foreign module + /// Parses a function declaration from a foreign module. fn parse_item_foreign_fn(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>) -> PResult<'a, ForeignItem> { self.expect_keyword(keywords::Fn)?; @@ -7386,7 +7392,7 @@ impl<'a> Parser<'a> { }) } - /// Parse a static item from a foreign module. + /// Parses a static item from a foreign module. /// Assumes that the `static` keyword is already parsed. fn parse_item_foreign_static(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>) -> PResult<'a, ForeignItem> { @@ -7406,7 +7412,7 @@ impl<'a> Parser<'a> { }) } - /// Parse a type from a foreign module + /// Parses a type from a foreign module. fn parse_item_foreign_type(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>) -> PResult<'a, ForeignItem> { self.expect_keyword(keywords::Type)?; @@ -7465,12 +7471,14 @@ impl<'a> Parser<'a> { Ok(ident) } - /// Parse extern crate links + /// Parses `extern crate` links. /// /// # Examples /// + /// ``` /// extern crate foo; /// extern crate bar as foo; + /// ``` fn parse_item_extern_crate(&mut self, lo: Span, visibility: Visibility, @@ -7489,16 +7497,17 @@ impl<'a> Parser<'a> { Ok(self.mk_item(span, item_name, ItemKind::ExternCrate(orig_name), visibility, attrs)) } - /// Parse `extern` for foreign ABIs - /// modules. + /// Parses `extern` for foreign ABIs modules. /// /// `extern` is expected to have been - /// consumed before calling this method + /// consumed before calling this method. /// - /// # Examples: + /// # Examples /// + /// ```ignore (only-for-syntax-highlight) /// extern "C" {} /// extern {} + /// ``` fn parse_item_foreign_mod(&mut self, lo: Span, opt_abi: Option<Abi>, @@ -7525,11 +7534,12 @@ impl<'a> Parser<'a> { Ok(self.mk_item(lo.to(prev_span), invalid, ItemKind::ForeignMod(m), visibility, attrs)) } - /// Parse `type Foo = Bar;` + /// Parses `type Foo = Bar;` /// or /// `existential type Foo: Bar;` /// or - /// `return None` without modifying the parser state + /// `return `None`` + /// without modifying the parser state. fn eat_type(&mut self) -> Option<PResult<'a, (Ident, AliasKind, ast::Generics)>> { // This parses the grammar: // Ident ["<"...">"] ["where" ...] ("=" | ":") Ty ";" @@ -7544,7 +7554,7 @@ impl<'a> Parser<'a> { } } - /// Parse type alias or existential type + /// Parses a type alias or existential type. fn parse_existential_or_alias( &mut self, existential: bool, @@ -7565,7 +7575,7 @@ impl<'a> Parser<'a> { Ok((ident, alias, tps)) } - /// Parse the part of an "enum" decl following the '{' + /// Parses the part of an enum declaration following the `{`. fn parse_enum_def(&mut self, _generics: &ast::Generics) -> PResult<'a, EnumDef> { let mut variants = Vec::new(); let mut all_nullary = true; @@ -7624,7 +7634,7 @@ impl<'a> Parser<'a> { Ok(ast::EnumDef { variants }) } - /// Parse an "enum" declaration + /// Parses an enum declaration. fn parse_item_enum(&mut self) -> PResult<'a, ItemInfo> { let id = self.parse_ident()?; let mut generics = self.parse_generics()?; @@ -7720,7 +7730,7 @@ impl<'a> Parser<'a> { })) } - /// Parse one of the items allowed by the flags. + /// Parses one of the items allowed by the flags. fn parse_item_implementation( &mut self, attrs: Vec<Attribute>, @@ -8145,7 +8155,7 @@ impl<'a> Parser<'a> { self.parse_macro_use_or_failure(attrs, macros_allowed, attributes_allowed, lo, visibility) } - /// Parse a foreign item. + /// Parses a foreign item. crate fn parse_foreign_item(&mut self) -> PResult<'a, ForeignItem> { maybe_whole!(self, NtForeignItem, |ni| ni); @@ -8261,7 +8271,7 @@ impl<'a> Parser<'a> { Ok(None) } - /// Parse a macro invocation inside a `trait`, `impl` or `extern` block + /// Parses a macro invocation inside a `trait`, `impl` or `extern` block. fn parse_assoc_macro_invoc(&mut self, item_kind: &str, vis: Option<&Visibility>, at_end: &mut bool) -> PResult<'a, Option<Mac>> { @@ -8364,13 +8374,15 @@ impl<'a> Parser<'a> { *t == token::BinOp(token::Star)) } - /// Parse UseTree + /// Parses a `UseTree`. /// + /// ``` /// USE_TREE = [`::`] `*` | /// [`::`] `{` USE_TREE_LIST `}` | /// PATH `::` `*` | /// PATH `::` `{` USE_TREE_LIST `}` | /// PATH [`as` IDENT] + /// ``` fn parse_use_tree(&mut self) -> PResult<'a, UseTree> { let lo = self.span; @@ -8409,9 +8421,11 @@ impl<'a> Parser<'a> { Ok(UseTree { prefix, kind, span: lo.to(self.prev_span) }) } - /// Parse UseTreeKind::Nested(list) + /// Parses a `UseTreeKind::Nested(list)`. /// + /// ``` /// USE_TREE_LIST = Ø | (USE_TREE `,`)* USE_TREE [`,`] + /// ``` fn parse_use_tree_list(&mut self) -> PResult<'a, Vec<(UseTree, ast::NodeId)>> { self.parse_unspanned_seq(&token::OpenDelim(token::Brace), &token::CloseDelim(token::Brace), @@ -8428,8 +8442,7 @@ impl<'a> Parser<'a> { } } - /// Parses a source module as a crate. This is the main - /// entry point for the parser. + /// Parses a source module as a crate. This is the main entry point for the parser. pub fn parse_crate_mod(&mut self) -> PResult<'a, Crate> { let lo = self.span; let krate = Ok(ast::Crate { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 09924e304cf..ff7f3e0bfae 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -38,16 +38,16 @@ pub enum BinOpToken { Shr, } -/// A delimiter token +/// A delimiter token. #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum DelimToken { - /// A round parenthesis: `(` or `)` + /// A round parenthesis (i.e., `(` or `)`). Paren, - /// A square bracket: `[` or `]` + /// A square bracket (i.e., `[` or `]`). Bracket, - /// A curly brace: `{` or `}` + /// A curly brace (i.e., `{` or `}`). Brace, - /// An empty delimiter + /// An empty delimiter. NoDelim, } @@ -172,9 +172,9 @@ pub enum Token { Question, /// Used by proc macros for representing lifetimes, not generated by lexer right now. SingleQuote, - /// An opening delimiter, eg. `{` + /// An opening delimiter (e.g., `{`). OpenDelim(DelimToken), - /// A closing delimiter, eg. `}` + /// A closing delimiter (e.g., `}`). CloseDelim(DelimToken), /* Literals */ @@ -188,16 +188,16 @@ pub enum Token { // and so the `LazyTokenStream` can be ignored by Eq, Hash, etc. Interpolated(Lrc<(Nonterminal, LazyTokenStream)>), // Can be expanded into several tokens. - /// Doc comment + /// A doc comment. DocComment(ast::Name), // Junk. These carry no data because we don't really care about the data // they *would* carry, and don't really want to allocate a new ident for // them. Instead, users could extract that from the associated span. - /// Whitespace + /// Whitespace. Whitespace, - /// Comment + /// A comment. Comment, Shebang(ast::Name), diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index 2d837cb565b..d8a8cbb655b 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -1,10 +1,10 @@ //! This pretty-printer is a direct reimplementation of Philip Karlton's //! Mesa pretty-printer, as described in appendix A of //! -//! ````text +//! ```text //! STAN-CS-79-770: "Pretty Printing", by Derek C. Oppen. //! Stanford Department of Computer Science, 1979. -//! ```` +//! ``` //! //! The algorithm's aim is to break a stream into as few lines as possible //! while respecting the indentation-consistency requirements of the enclosing diff --git a/src/libsyntax/ptr.rs b/src/libsyntax/ptr.rs index 0ec83447d52..bc43630ae59 100644 --- a/src/libsyntax/ptr.rs +++ b/src/libsyntax/ptr.rs @@ -1,4 +1,4 @@ -//! The AST pointer +//! The AST pointer. //! //! Provides `P<T>`, a frozen owned smart pointer, as a replacement for `@T` in //! the AST. diff --git a/src/libsyntax/source_map.rs b/src/libsyntax/source_map.rs index 552a3d30261..1784bad0362 100644 --- a/src/libsyntax/source_map.rs +++ b/src/libsyntax/source_map.rs @@ -26,7 +26,7 @@ use log::debug; use crate::errors::SourceMapper; -/// Return the span itself if it doesn't come from a macro expansion, +/// Returns the span itself if it doesn't come from a macro expansion, /// otherwise return the call site span up to the `enclosing_sp` by /// following the `expn_info` chain. pub fn original_sp(sp: Span, enclosing_sp: Span) -> Span { @@ -62,7 +62,7 @@ pub trait FileLoader { /// Query the existence of a file. fn file_exists(&self, path: &Path) -> bool; - /// Return an absolute path to a file, if possible. + /// Returns an absolute path to a file, if possible. fn abs_path(&self, path: &Path) -> Option<PathBuf>; /// Read the contents of an UTF-8 file into memory. @@ -398,7 +398,7 @@ impl SourceMap { } } - /// Returns `Some(span)`, a union of the lhs and rhs span. The lhs must precede the rhs. If + /// Returns `Some(span)`, a union of the lhs and rhs span. The lhs must precede the rhs. If /// there are gaps between lhs and rhs, the resulting union will cross these gaps. /// For this to work, the spans have to be: /// @@ -511,7 +511,7 @@ impl SourceMap { Ok(FileLines {file: lo.file, lines: lines}) } - /// Extract the source surrounding the given `Span` using the `extract_source` function. The + /// Extracts the source surrounding the given `Span` using the `extract_source` function. The /// extract function takes three arguments: a string slice containing the source, an index in /// the slice for the beginning of the span and an index in the slice for the end of the span. fn span_to_source<F>(&self, sp: Span, extract_source: F) -> Result<String, SpanSnippetError> @@ -561,7 +561,7 @@ impl SourceMap { } } - /// Return the source snippet as `String` corresponding to the given `Span` + /// Returns the source snippet as `String` corresponding to the given `Span` pub fn span_to_snippet(&self, sp: Span) -> Result<String, SpanSnippetError> { self.span_to_source(sp, |src, start_index, end_index| src[start_index..end_index] .to_string()) @@ -576,7 +576,7 @@ impl SourceMap { } } - /// Return the source snippet as `String` before the given `Span` + /// Returns the source snippet as `String` before the given `Span` pub fn span_to_prev_source(&self, sp: Span) -> Result<String, SpanSnippetError> { self.span_to_source(sp, |src, start_index, _| src[..start_index].to_string()) } @@ -1123,7 +1123,7 @@ mod tests { /// Given a string like " ~~~~~~~~~~~~ ", produces a span /// converting that range. The idea is that the string has the same - /// length as the input, and we uncover the byte positions. Note + /// length as the input, and we uncover the byte positions. Note /// that this can span lines and so on. fn span_from_selection(input: &str, selection: &str) -> Span { assert_eq!(input.len(), selection.len()); @@ -1132,7 +1132,7 @@ mod tests { Span::new(BytePos(left_index), BytePos(right_index + 1), NO_EXPANSION) } - /// Test span_to_snippet and span_to_lines for a span converting 3 + /// Tests span_to_snippet and span_to_lines for a span converting 3 /// lines in the middle of a file. #[test] fn span_to_snippet_and_lines_spanning_multiple_lines() { @@ -1175,7 +1175,7 @@ mod tests { assert_eq!(sstr, "blork.rs:2:1: 2:12"); } - /// Test failing to merge two spans on different lines + /// Tests failing to merge two spans on different lines #[test] fn span_merging_fail() { let sm = SourceMap::new(FilePathMapping::empty()); diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index b6e4d4cd976..c4f2cffb097 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -5,6 +5,7 @@ //! which are themselves a single `Token` or a `Delimited` subsequence of tokens. //! //! ## Ownership +//! //! `TokenStreams` are persistent data structures constructed as ropes with reference //! counted-children. In general, this means that calling an operation on a `TokenStream` //! (such as `slice`) produces an entirely new `TokenStream` from the borrowed reference to @@ -59,7 +60,7 @@ impl TokenTree { macro_parser::parse(cx.parse_sess(), tts, mtch, Some(directory), true) } - /// Check if this TokenTree is equal to the other, regardless of span information. + /// Checks if this TokenTree is equal to the other, regardless of span information. pub fn eq_unspanned(&self, other: &TokenTree) -> bool { match (self, other) { (&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2, @@ -89,7 +90,7 @@ impl TokenTree { } } - /// Retrieve the TokenTree's span. + /// Retrieves the TokenTree's span. pub fn span(&self) -> Span { match *self { TokenTree::Token(sp, _) => sp, diff --git a/src/libsyntax/util/lev_distance.rs b/src/libsyntax/util/lev_distance.rs index c989fc7a5b8..2f150d22159 100644 --- a/src/libsyntax/util/lev_distance.rs +++ b/src/libsyntax/util/lev_distance.rs @@ -1,7 +1,7 @@ use std::cmp; use crate::symbol::Symbol; -/// Find the Levenshtein distance between two strings +/// Finds the Levenshtein distance between two strings pub fn lev_distance(a: &str, b: &str) -> usize { // cases which don't require further computation if a.is_empty() { @@ -32,7 +32,7 @@ pub fn lev_distance(a: &str, b: &str) -> usize { dcol[t_last + 1] } -/// Find the best match for a given word in the given iterator +/// Finds the best match for a given word in the given iterator /// /// As a loose rule to avoid the obviously incorrect suggestions, it takes /// an optional limit for the maximum allowable edit distance, which defaults diff --git a/src/libsyntax/util/parser.rs b/src/libsyntax/util/parser.rs index 61729a08060..5f15ede7b0b 100644 --- a/src/libsyntax/util/parser.rs +++ b/src/libsyntax/util/parser.rs @@ -70,7 +70,7 @@ pub enum Fixity { } impl AssocOp { - /// Create a new AssocOP from a token + /// Creates a new AssocOP from a token pub fn from_token(t: &Token) -> Option<AssocOp> { use AssocOp::*; match *t { @@ -105,7 +105,7 @@ impl AssocOp { } } - /// Create a new AssocOp from ast::BinOpKind. + /// Creates a new AssocOp from ast::BinOpKind. pub fn from_ast_binop(op: BinOpKind) -> Self { use AssocOp::*; match op { diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs index bcf1da66c04..733c4f83e37 100644 --- a/src/libsyntax/util/parser_testing.rs +++ b/src/libsyntax/util/parser_testing.rs @@ -66,7 +66,7 @@ pub fn string_to_pat(source_str: String) -> P<ast::Pat> { }) } -/// Convert a vector of strings to a vector of Ident's +/// Converts a vector of strings to a vector of Ident's pub fn strs_to_idents(ids: Vec<&str> ) -> Vec<Ident> { ids.iter().map(|u| Ident::from_str(*u)).collect() } diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index bb3b0ea7359..a002394c710 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -6,7 +6,7 @@ //! Note: it is an important invariant that the default visitor walks the body //! of a function in "execution order" (more concretely, reverse post-order //! with respect to the CFG implied by the AST), meaning that if AST node A may -//! execute before AST node B, then A is visited first. The borrow checker in +//! execute before AST node B, then A is visited first. The borrow checker in //! particular relies on this property. //! //! Note: walking an AST before macro expansion is probably a bad idea. For @@ -32,12 +32,12 @@ pub enum FnKind<'a> { } /// Each method of the Visitor trait is a hook to be potentially -/// overridden. Each method's default implementation recursively visits +/// overridden. Each method's default implementation recursively visits /// the substructure of the input via the corresponding `walk` method; /// e.g., the `visit_mod` method by default calls `visit::walk_mod`. /// /// If you want to ensure that your code handles every variant -/// explicitly, you need to override each method. (And you also need +/// explicitly, you need to override each method. (And you also need /// to monitor future changes to `Visitor` in case a new method with a /// new default implementation gets introduced.) pub trait Visitor<'ast>: Sized { diff --git a/src/libsyntax_ext/deriving/decodable.rs b/src/libsyntax_ext/deriving/decodable.rs index b082351d5f6..d773f3ff7bc 100644 --- a/src/libsyntax_ext/deriving/decodable.rs +++ b/src/libsyntax_ext/deriving/decodable.rs @@ -165,7 +165,7 @@ fn decodable_substructure(cx: &mut ExtCtxt<'_>, }; } -/// Create a decoder for a single enum variant/struct: +/// Creates a decoder for a single enum variant/struct: /// - `outer_pat_path` is the path to this enum variant/struct /// - `getarg` should retrieve the `usize`-th field with name `@str`. fn decode_static_fields<F>(cx: &mut ExtCtxt<'_>, diff --git a/src/libsyntax_ext/deriving/encodable.rs b/src/libsyntax_ext/deriving/encodable.rs index dd5646342b3..faaedba3e77 100644 --- a/src/libsyntax_ext/deriving/encodable.rs +++ b/src/libsyntax_ext/deriving/encodable.rs @@ -1,5 +1,5 @@ //! The compiler code necessary to implement the `#[derive(Encodable)]` -//! (and `Decodable`, in decodable.rs) extension. The idea here is that +//! (and `Decodable`, in `decodable.rs`) extension. The idea here is that //! type-defining items may be tagged with `#[derive(Encodable, Decodable)]`. //! //! For example, a type like: @@ -37,7 +37,7 @@ //! ``` //! //! Other interesting scenarios are when the item has type parameters or -//! references other non-built-in types. A type definition like: +//! references other non-built-in types. A type definition like: //! //! ``` //! # #[derive(Encodable, Decodable)] struct Span; diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs index 4678c752045..b8f96c5bc0e 100644 --- a/src/libsyntax_ext/deriving/generic/mod.rs +++ b/src/libsyntax_ext/deriving/generic/mod.rs @@ -243,7 +243,7 @@ pub struct MethodDef<'a> { /// Arguments other than the self argument pub args: Vec<(Ty<'a>, &'a str)>, - /// Return type + /// Returns type pub ret_ty: Ty<'a>, pub attributes: Vec<ast::Attribute>, @@ -303,7 +303,7 @@ pub enum SubstructureFields<'a> { EnumMatching(usize, usize, &'a ast::Variant, Vec<FieldInfo<'a>>), /// Non-matching variants of the enum, but with all state hidden from - /// the consequent code. The first component holds `Ident`s for all of + /// the consequent code. The first component holds `Ident`s for all of /// the `Self` arguments; the second component is a slice of all of the /// variants for the enum itself, and the third component is a list of /// `Ident`s bound to the variant index values for each of the actual @@ -323,10 +323,10 @@ pub enum SubstructureFields<'a> { pub type CombineSubstructureFunc<'a> = Box<dyn FnMut(&mut ExtCtxt<'_>, Span, &Substructure<'_>) -> P<Expr> + 'a>; -/// Deal with non-matching enum variants. The tuple is a list of +/// Deal with non-matching enum variants. The tuple is a list of /// identifiers (one for each `Self` argument, which could be any of the /// variants since they have been collapsed together) and the identifiers -/// holding the variant index value for each of the `Self` arguments. The +/// holding the variant index value for each of the `Self` arguments. The /// last argument is all the non-`Self` args of the method being derived. pub type EnumNonMatchCollapsedFunc<'a> = Box<dyn FnMut(&mut ExtCtxt<'_>, Span, (&[Ident], &[Ident]), &[P<Expr>]) -> P<Expr> + 'a>; @@ -497,7 +497,7 @@ impl<'a> TraitDef<'a> { /// create an impl like: /// /// ```ignore (only-for-syntax-highlight) - /// impl<'a, ..., 'z, A, B: DeclaredTrait, C, ... Z> where + /// impl<'a, ..., 'z, A, B: DeclaredTrait, C, ... Z> where /// C: WhereTrait, /// A: DerivedTrait + B1 + ... + BN, /// B: DerivedTrait + B1 + ... + BN, @@ -1126,7 +1126,7 @@ impl<'a> MethodDef<'a> { /// /// (Of course `__self_vi` and `__arg_1_vi` are unused for /// `PartialEq`, and those subcomputations will hopefully be removed - /// as their results are unused. The point of `__self_vi` and + /// as their results are unused. The point of `__self_vi` and /// `__arg_1_vi` is for `PartialOrd`; see #15503.) fn expand_enum_method_body<'b>(&self, cx: &mut ExtCtxt<'_>, @@ -1724,7 +1724,7 @@ pub fn cs_fold<F>(use_foldl: bool, /// Function to fold over fields, with three cases, to generate more efficient and concise code. /// When the `substructure` has grouped fields, there are two cases: -/// Zero fields: call the base case function with None (like the usual base case of `cs_fold`). +/// Zero fields: call the base case function with `None` (like the usual base case of `cs_fold`). /// One or more fields: call the base case function on the first value (which depends on /// `use_fold`), and use that as the base case. Then perform `cs_fold` on the remainder of the /// fields. @@ -1815,7 +1815,7 @@ pub fn cs_same_method<F>(f: F, } } -/// Return true if the type has no value fields +/// Returns `true` if the type has no value fields /// (for an enum, no variant has any fields) pub fn is_type_without_fields(item: &Annotatable) -> bool { if let Annotatable::Item(ref item) = *item { diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs index 6bb7ee1d5dd..798951406a5 100644 --- a/src/libsyntax_ext/format.rs +++ b/src/libsyntax_ext/format.rs @@ -424,7 +424,7 @@ impl<'a, 'b> Context<'a, 'b> { self.ecx.expr_str(sp, s) } - /// Build a static `rt::Argument` from a `parse::Piece` or append + /// Builds a static `rt::Argument` from a `parse::Piece` or append /// to the `literal` string. fn build_piece(&mut self, piece: &parse::Piece<'_>, @@ -545,7 +545,7 @@ impl<'a, 'b> Context<'a, 'b> { } /// Actually builds the expression which the format_args! block will be - /// expanded to + /// expanded to. fn into_expr(self) -> P<ast::Expr> { let mut locals = Vec::with_capacity( (0..self.args.len()).map(|i| self.arg_unique_types[i].len()).sum() @@ -788,7 +788,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt<'_>, }, }; - /// Find the indices of all characters that have been processed and differ between the actual + /// Finds the indices of all characters that have been processed and differ between the actual /// written code (code snippet) and the `InternedString` that get's processed in the `Parser` /// in order to properly synthethise the intra-string `Span`s for error diagnostics. fn find_skips(snippet: &str, is_raw: bool) -> Vec<usize> { diff --git a/src/libsyntax_ext/format_foreign.rs b/src/libsyntax_ext/format_foreign.rs index 381325b2963..261b2f373ce 100644 --- a/src/libsyntax_ext/format_foreign.rs +++ b/src/libsyntax_ext/format_foreign.rs @@ -718,7 +718,7 @@ pub mod printf { ); } - /// Check that the translations are what we expect. + /// Checks that the translations are what we expect. #[test] fn test_translation() { assert_eq_pnsat!("%c", Some("{}")); diff --git a/src/libsyntax_pos/analyze_source_file.rs b/src/libsyntax_pos/analyze_source_file.rs index 18387bd5a09..353b4e4ab36 100644 --- a/src/libsyntax_pos/analyze_source_file.rs +++ b/src/libsyntax_pos/analyze_source_file.rs @@ -1,7 +1,7 @@ use unicode_width::UnicodeWidthChar; use super::*; -/// Find all newlines, multi-byte characters, and non-narrow characters in a +/// Finds all newlines, multi-byte characters, and non-narrow characters in a /// SourceFile. /// /// This function will use an SSE2 enhanced implementation if hardware support @@ -62,7 +62,7 @@ cfg_if::cfg_if! { } } - /// Check 16 byte chunks of text at a time. If the chunk contains + /// Checks 16 byte chunks of text at a time. If the chunk contains /// something other than printable ASCII characters and newlines, the /// function falls back to the generic implementation. Otherwise it uses /// SSE2 intrinsics to quickly find all newlines. diff --git a/src/libsyntax_pos/hygiene.rs b/src/libsyntax_pos/hygiene.rs index 0c645fc678c..6736a083b2f 100644 --- a/src/libsyntax_pos/hygiene.rs +++ b/src/libsyntax_pos/hygiene.rs @@ -31,7 +31,7 @@ struct SyntaxContextData { dollar_crate_name: Symbol, } -/// A mark is a unique id associated with a macro expansion. +/// A mark is a unique ID associated with a macro expansion. #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)] pub struct Mark(u32); @@ -590,7 +590,7 @@ pub enum CompilerDesugaringKind { QuestionMark, TryBlock, /// Desugaring of an `impl Trait` in return type position - /// to an `existential type Foo: Trait;` + replacing the + /// to an `existential type Foo: Trait;` and replacing the /// `impl Trait` with `Foo`. ExistentialReturnType, Async, diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index 70c45f7f9a7..bffc686b816 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -67,7 +67,7 @@ scoped_tls::scoped_thread_local!(pub static GLOBALS: Globals); #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Hash, RustcDecodable, RustcEncodable)] pub enum FileName { Real(PathBuf), - /// A macro. This includes the full name of the macro, so that there are no clashes. + /// A macro. This includes the full name of the macro, so that there are no clashes. Macros(String), /// Call to `quote!`. QuoteExpansion(u64), @@ -304,21 +304,21 @@ impl Span { if self.is_dummy() { other } else { self } } - /// Return `true` if `self` fully encloses `other`. + /// Returns `true` if `self` fully encloses `other`. pub fn contains(self, other: Span) -> bool { let span = self.data(); let other = other.data(); span.lo <= other.lo && other.hi <= span.hi } - /// Return `true` if `self` touches `other`. + /// Returns `true` if `self` touches `other`. pub fn overlaps(self, other: Span) -> bool { let span = self.data(); let other = other.data(); span.lo < other.hi && other.lo < span.hi } - /// Return true if the spans are equal with regards to the source text. + /// Returns `true` if the spans are equal with regards to the source text. /// /// Use this instead of `==` when either span could be generated code, /// and you only care that they point to the same bytes of source text. @@ -339,7 +339,7 @@ impl Span { } } - /// Return the source span -- this is either the supplied span, or the span for + /// Returns the source span -- this is either the supplied span, or the span for /// the macro callsite that expanded to it. pub fn source_callsite(self) -> Span { self.ctxt().outer().expn_info().map(|info| info.call_site.source_callsite()).unwrap_or(self) @@ -367,7 +367,7 @@ impl Span { self.edition() >= edition::Edition::Edition2018 } - /// Return the source callee. + /// Returns the source callee. /// /// Returns `None` if the supplied span has no expansion trace, /// else returns the `ExpnInfo` for the macro definition @@ -382,7 +382,7 @@ impl Span { self.ctxt().outer().expn_info().map(source_callee) } - /// Check if a span is "internal" to a macro in which `#[unstable]` + /// Checks if a span is "internal" to a macro in which `#[unstable]` /// items can be used (that is, a macro marked with /// `#[allow_internal_unstable]`). pub fn allows_unstable(&self) -> bool { @@ -392,7 +392,7 @@ impl Span { } } - /// Check if this span arises from a compiler desugaring of kind `kind`. + /// Checks if this span arises from a compiler desugaring of kind `kind`. pub fn is_compiler_desugaring(&self, kind: CompilerDesugaringKind) -> bool { match self.ctxt().outer().expn_info() { Some(info) => match info.format { @@ -403,7 +403,7 @@ impl Span { } } - /// Return the compiler desugaring that created this span, or `None` + /// Returns the compiler desugaring that created this span, or `None` /// if this span is not from a desugaring. pub fn compiler_desugaring_kind(&self) -> Option<CompilerDesugaringKind> { match self.ctxt().outer().expn_info() { @@ -415,7 +415,7 @@ impl Span { } } - /// Check if a span is "internal" to a macro in which `unsafe` + /// Checks if a span is "internal" to a macro in which `unsafe` /// can be used without triggering the `unsafe_code` lint // (that is, a macro marked with `#[allow_internal_unsafe]`). pub fn allows_unsafe(&self) -> bool { @@ -449,7 +449,7 @@ impl Span { result } - /// Return a `Span` that would enclose both `self` and `end`. + /// Returns a `Span` that would enclose both `self` and `end`. pub fn to(self, end: Span) -> Span { let span_data = self.data(); let end_data = end.data(); @@ -472,7 +472,7 @@ impl Span { ) } - /// Return a `Span` between the end of `self` to the beginning of `end`. + /// Returns a `Span` between the end of `self` to the beginning of `end`. pub fn between(self, end: Span) -> Span { let span = self.data(); let end = end.data(); @@ -483,7 +483,7 @@ impl Span { ) } - /// Return a `Span` between the beginning of `self` to the beginning of `end`. + /// Returns a `Span` between the beginning of `self` to the beginning of `end`. pub fn until(self, end: Span) -> Span { let span = self.data(); let end = end.data(); @@ -655,7 +655,7 @@ impl MultiSpan { &self.primary_spans } - /// Returns whether any of the primary spans is displayable. + /// Returns `true` if any of the primary spans are displayable. pub fn has_primary_spans(&self) -> bool { self.primary_spans.iter().any(|sp| !sp.is_dummy()) } @@ -672,7 +672,7 @@ impl MultiSpan { } /// Replaces all occurrences of one Span with another. Used to move `Span`s in areas that don't - /// display well (like std macros). Returns true if replacements occurred. + /// display well (like std macros). Returns whether replacements occurred. pub fn replace(&mut self, before: Span, after: Span) -> bool { let mut replacements_occurred = false; for primary_span in &mut self.primary_spans { @@ -719,7 +719,7 @@ impl MultiSpan { span_labels } - /// Returns whether any of the span labels is displayable. + /// Returns `true` if any of the span labels is displayable. pub fn has_span_labels(&self) -> bool { self.span_labels.iter().any(|(sp, _)| !sp.is_dummy()) } @@ -848,7 +848,7 @@ pub struct SourceFile { /// originate from files has names between angle brackets by convention /// (e.g., `<anon>`). pub name: FileName, - /// True if the `name` field above has been modified by `--remap-path-prefix`. + /// `true` if the `name` field above has been modified by `--remap-path-prefix`. pub name_was_remapped: bool, /// The unmapped path of the file that the source came from. /// Set to `None` if the `SourceFile` was imported from an external crate. @@ -1057,7 +1057,7 @@ impl SourceFile { } } - /// Return the `BytePos` of the beginning of the current line. + /// Returns the `BytePos` of the beginning of the current line. pub fn line_begin_pos(&self, pos: BytePos) -> BytePos { let line_index = self.lookup_line(pos).unwrap(); self.lines[line_index] @@ -1096,7 +1096,7 @@ impl SourceFile { } } - /// Get a line from the list of pre-computed line-beginnings. + /// Gets a line from the list of pre-computed line-beginnings. /// The line number here is 0-based. pub fn get_line(&self, line_number: usize) -> Option<Cow<'_, str>> { fn get_until_newline(src: &str, begin: usize) -> &str { @@ -1144,7 +1144,7 @@ impl SourceFile { self.lines.len() } - /// Find the line containing the given position. The return value is the + /// Finds the line containing the given position. The return value is the /// index into the `lines` array of this `SourceFile`, not the 1-based line /// number. If the source_file is empty or the position is located before the /// first line, `None` is returned. @@ -1181,7 +1181,7 @@ impl SourceFile { } } -/// Remove utf-8 BOM if any. +/// Removes UTF-8 BOM, if any. fn remove_bom(src: &mut String) { if src.starts_with("\u{feff}") { src.drain(..3); diff --git a/src/libsyntax_pos/symbol.rs b/src/libsyntax_pos/symbol.rs index 0eecdbfa976..c5301f9f174 100644 --- a/src/libsyntax_pos/symbol.rs +++ b/src/libsyntax_pos/symbol.rs @@ -43,7 +43,7 @@ impl Ident { Ident::with_empty_ctxt(Symbol::intern(string)) } - /// Replace `lo` and `hi` with those from `span`, but keep hygiene context. + /// Replaces `lo` and `hi` with those from `span`, but keep hygiene context. pub fn with_span_pos(self, span: Span) -> Ident { Ident::new(self.name, span.with_ctxt(self.span.ctxt())) } @@ -135,12 +135,12 @@ impl Decodable for Ident { } } -/// A symbol is an interned or gensymed string. The use of newtype_index! means -/// that Option<Symbol> only takes up 4 bytes, because newtype_index! reserves +/// A symbol is an interned or gensymed string. The use of `newtype_index!` means +/// that `Option<Symbol>` only takes up 4 bytes, because `newtype_index! reserves /// the last 256 values for tagging purposes. /// -/// Note that Symbol cannot be a newtype_index! directly because it implements -/// fmt::Debug, Encodable, and Decodable in special ways. +/// Note that `Symbol` cannot directly be a `newtype_index!` because it implements +/// `fmt::Debug`, `Encodable`, and `Decodable` in special ways. #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct Symbol(SymbolIndex); @@ -170,7 +170,7 @@ impl Symbol { with_interner(|interner| interner.interned(self)) } - /// Gensyms a new usize, using the current interner. + /// Gensyms a new `usize`, using the current interner. pub fn gensym(string: &str) -> Self { with_interner(|interner| interner.gensym(string)) } diff --git a/src/libterm/lib.rs b/src/libterm/lib.rs index 4d3126212dc..5132a459f95 100644 --- a/src/libterm/lib.rs +++ b/src/libterm/lib.rs @@ -60,14 +60,14 @@ pub type StdoutTerminal = dyn Terminal<Output = Stdout> + Send; pub type StderrTerminal = dyn Terminal<Output = Stderr> + Send; #[cfg(not(windows))] -/// Return a Terminal wrapping stdout, or None if a terminal couldn't be +/// Returns a Terminal wrapping stdout, or None if a terminal couldn't be /// opened. pub fn stdout() -> Option<Box<StdoutTerminal>> { TerminfoTerminal::new(io::stdout()).map(|t| Box::new(t) as Box<StdoutTerminal>) } #[cfg(windows)] -/// Return a Terminal wrapping stdout, or None if a terminal couldn't be +/// Returns a Terminal wrapping stdout, or None if a terminal couldn't be /// opened. pub fn stdout() -> Option<Box<StdoutTerminal>> { TerminfoTerminal::new(io::stdout()) @@ -76,14 +76,14 @@ pub fn stdout() -> Option<Box<StdoutTerminal>> { } #[cfg(not(windows))] -/// Return a Terminal wrapping stderr, or None if a terminal couldn't be +/// Returns a Terminal wrapping stderr, or None if a terminal couldn't be /// opened. pub fn stderr() -> Option<Box<StderrTerminal>> { TerminfoTerminal::new(io::stderr()).map(|t| Box::new(t) as Box<StderrTerminal>) } #[cfg(windows)] -/// Return a Terminal wrapping stderr, or None if a terminal couldn't be +/// Returns a Terminal wrapping stderr, or None if a terminal couldn't be /// opened. pub fn stderr() -> Option<Box<StderrTerminal>> { TerminfoTerminal::new(io::stderr()) @@ -170,12 +170,12 @@ pub trait Terminal: Write { /// if there was an I/O error. fn bg(&mut self, color: color::Color) -> io::Result<bool>; - /// Sets the given terminal attribute, if supported. Returns `Ok(true)` + /// Sets the given terminal attribute, if supported. Returns `Ok(true)` /// if the attribute was supported, `Ok(false)` otherwise, and `Err(e)` if /// there was an I/O error. fn attr(&mut self, attr: Attr) -> io::Result<bool>; - /// Returns whether the given terminal attribute is supported. + /// Returns `true` if the given terminal attribute is supported. fn supports_attr(&self, attr: Attr) -> bool; /// Resets all terminal attributes and colors to their defaults. diff --git a/src/libterm/terminfo/mod.rs b/src/libterm/terminfo/mod.rs index eaa96df3f3b..0dbaa3d3dfd 100644 --- a/src/libterm/terminfo/mod.rs +++ b/src/libterm/terminfo/mod.rs @@ -69,7 +69,7 @@ impl fmt::Display for Error { } impl TermInfo { - /// Create a TermInfo based on current environment. + /// Creates a TermInfo based on current environment. pub fn from_env() -> Result<TermInfo, Error> { let term = match env::var("TERM") { Ok(name) => TermInfo::from_name(&name), @@ -84,7 +84,7 @@ impl TermInfo { } } - /// Create a TermInfo for the named terminal. + /// Creates a TermInfo for the named terminal. pub fn from_name(name: &str) -> Result<TermInfo, Error> { get_dbpath_for_term(name) .ok_or_else(|| { @@ -211,7 +211,7 @@ impl<T: Write + Send> Terminal for TerminfoTerminal<T> { } impl<T: Write + Send> TerminfoTerminal<T> { - /// Create a new TerminfoTerminal with the given TermInfo and Write. + /// Creates a new TerminfoTerminal with the given TermInfo and Write. pub fn new_with_terminfo(out: T, terminfo: TermInfo) -> TerminfoTerminal<T> { let nc = if terminfo.strings.contains_key("setaf") && terminfo.strings.contains_key("setab") { @@ -227,7 +227,7 @@ impl<T: Write + Send> TerminfoTerminal<T> { } } - /// Create a new TerminfoTerminal for the current environment with the given Write. + /// Creates a new TerminfoTerminal for the current environment with the given Write. /// /// Returns `None` when the terminfo cannot be found or parsed. pub fn new(out: T) -> Option<TerminfoTerminal<T>> { diff --git a/src/libterm/terminfo/parm.rs b/src/libterm/terminfo/parm.rs index 434dd4a9fbc..e55307dcdf0 100644 --- a/src/libterm/terminfo/parm.rs +++ b/src/libterm/terminfo/parm.rs @@ -46,7 +46,7 @@ pub struct Variables { } impl Variables { - /// Return a new zero-initialized Variables + /// Returns a new zero-initialized Variables pub fn new() -> Variables { Variables { sta: [Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), Number(0), diff --git a/src/libterm/terminfo/parser/compiled.rs b/src/libterm/terminfo/parser/compiled.rs index 63d01831269..47124f5df69 100644 --- a/src/libterm/terminfo/parser/compiled.rs +++ b/src/libterm/terminfo/parser/compiled.rs @@ -313,7 +313,7 @@ pub fn parse(file: &mut dyn io::Read, longnames: bool) -> Result<TermInfo, Strin }) } -/// Create a dummy TermInfo struct for msys terminals +/// Creates a dummy TermInfo struct for msys terminals pub fn msys_terminfo() -> TermInfo { let mut strings = HashMap::new(); strings.insert("sgr0".to_string(), b"\x1B[0m".to_vec()); diff --git a/src/libterm/terminfo/searcher.rs b/src/libterm/terminfo/searcher.rs index a9e2626a57e..0b17ed36fc8 100644 --- a/src/libterm/terminfo/searcher.rs +++ b/src/libterm/terminfo/searcher.rs @@ -1,4 +1,4 @@ -//! ncurses-compatible database discovery +//! ncurses-compatible database discovery. //! //! Does not support hashed database, only filesystem! diff --git a/src/libterm/win.rs b/src/libterm/win.rs index 25b03ba59ac..a09a5f6da21 100644 --- a/src/libterm/win.rs +++ b/src/libterm/win.rs @@ -11,7 +11,7 @@ use Attr; use color; use Terminal; -/// A Terminal implementation which uses the Win32 Console API. +/// A Terminal implementation that uses the Win32 Console API. pub struct WinConsole<T> { buf: T, def_foreground: color::Color, @@ -103,8 +103,7 @@ impl<T: Write + Send + 'static> WinConsole<T> { } } - /// Returns `None` whenever the terminal cannot be created for some - /// reason. + /// Returns `None` whenever the terminal cannot be created for some reason. pub fn new(out: T) -> io::Result<WinConsole<T>> { let fg; let bg; |
