From a06baa56b95674fc626b3c3fd680d6a65357fe60 Mon Sep 17 00:00:00 2001 From: Mark Rousskov Date: Sun, 22 Dec 2019 17:42:04 -0500 Subject: Format the world --- src/tools/cargotest/main.rs | 44 +- src/tools/compiletest/src/common.rs | 2 +- src/tools/compiletest/src/errors.rs | 29 +- src/tools/compiletest/src/header.rs | 167 +++--- src/tools/compiletest/src/json.rs | 66 ++- src/tools/compiletest/src/main.rs | 344 +++-------- src/tools/compiletest/src/raise_fd_limit.rs | 15 +- src/tools/compiletest/src/read2.rs | 5 +- src/tools/compiletest/src/runtest.rs | 851 ++++++++++------------------ src/tools/compiletest/src/runtest/tests.rs | 19 +- src/tools/compiletest/src/util.rs | 10 +- src/tools/error_index_generator/build.rs | 13 +- src/tools/error_index_generator/main.rs | 111 ++-- src/tools/linkchecker/main.rs | 168 +++--- src/tools/remote-test-client/src/main.rs | 162 +++--- src/tools/remote-test-server/src/main.rs | 49 +- src/tools/rustdoc-themes/main.rs | 14 +- src/tools/rustdoc/main.rs | 4 +- src/tools/tidy/src/bins.rs | 58 +- src/tools/tidy/src/cargo.rs | 23 +- src/tools/tidy/src/debug_artifacts.rs | 3 +- src/tools/tidy/src/deps.rs | 20 +- src/tools/tidy/src/error_codes_check.rs | 64 +-- src/tools/tidy/src/errors.rs | 89 ++- src/tools/tidy/src/extdeps.rs | 6 +- src/tools/tidy/src/features.rs | 408 ++++++------- src/tools/tidy/src/features/version.rs | 9 +- src/tools/tidy/src/lib.rs | 42 +- src/tools/tidy/src/main.rs | 4 +- src/tools/tidy/src/pal.rs | 72 ++- src/tools/tidy/src/style.rs | 63 +- src/tools/tidy/src/ui_tests.rs | 6 +- src/tools/tidy/src/unit_tests.rs | 58 +- src/tools/tidy/src/unstable_book.rs | 62 +- src/tools/unstable-book-gen/src/main.rs | 92 ++- 35 files changed, 1330 insertions(+), 1822 deletions(-) (limited to 'src/tools') diff --git a/src/tools/cargotest/main.rs b/src/tools/cargotest/main.rs index bdbc544d4fc..cf00cb1ab8a 100644 --- a/src/tools/cargotest/main.rs +++ b/src/tools/cargotest/main.rs @@ -1,9 +1,9 @@ #![deny(warnings)] use std::env; -use std::process::Command; -use std::path::{Path, PathBuf}; use std::fs; +use std::path::{Path, PathBuf}; +use std::process::Command; struct Test { repo: &'static str, @@ -93,11 +93,7 @@ fn clone_repo(test: &Test, out_dir: &Path) -> PathBuf { let out_dir = out_dir.join(test.name); if !out_dir.join(".git").is_dir() { - let status = Command::new("git") - .arg("init") - .arg(&out_dir) - .status() - .expect(""); + let status = Command::new("git").arg("init").arg(&out_dir).status().expect(""); assert!(status.success()); } @@ -106,23 +102,23 @@ fn clone_repo(test: &Test, out_dir: &Path) -> PathBuf { for depth in &[0, 1, 10, 100, 1000, 100000] { if *depth > 0 { let status = Command::new("git") - .arg("fetch") - .arg(test.repo) - .arg("master") - .arg(&format!("--depth={}", depth)) - .current_dir(&out_dir) - .status() - .expect(""); + .arg("fetch") + .arg(test.repo) + .arg("master") + .arg(&format!("--depth={}", depth)) + .current_dir(&out_dir) + .status() + .expect(""); assert!(status.success()); } let status = Command::new("git") - .arg("reset") - .arg(test.sha) - .arg("--hard") - .current_dir(&out_dir) - .status() - .expect(""); + .arg("reset") + .arg(test.sha) + .arg("--hard") + .current_dir(&out_dir) + .status() + .expect(""); if status.success() { found = true; @@ -133,12 +129,8 @@ fn clone_repo(test: &Test, out_dir: &Path) -> PathBuf { if !found { panic!("unable to find commit {}", test.sha) } - let status = Command::new("git") - .arg("clean") - .arg("-fdx") - .current_dir(&out_dir) - .status() - .unwrap(); + let status = + Command::new("git").arg("clean").arg("-fdx").current_dir(&out_dir).status().unwrap(); assert!(status.success()); out_dir diff --git a/src/tools/compiletest/src/common.rs b/src/tools/compiletest/src/common.rs index 09733ffbe6a..01001ff708c 100644 --- a/src/tools/compiletest/src/common.rs +++ b/src/tools/compiletest/src/common.rs @@ -5,8 +5,8 @@ use std::fmt; use std::path::{Path, PathBuf}; use std::str::FromStr; -use test::ColorConfig; use crate::util::PathBufExt; +use test::ColorConfig; #[derive(Clone, Copy, PartialEq, Debug)] pub enum Mode { diff --git a/src/tools/compiletest/src/errors.rs b/src/tools/compiletest/src/errors.rs index 0ec2738181e..c3d699b3e23 100644 --- a/src/tools/compiletest/src/errors.rs +++ b/src/tools/compiletest/src/errors.rs @@ -126,8 +126,7 @@ fn parse_expected( match (cfg, captures.name("cfgs")) { // Only error messages that contain our `cfg` betweeen the square brackets apply to us. - (Some(cfg), Some(filter)) if !filter.as_str().split(',').any(|s| s == cfg) - => return None, + (Some(cfg), Some(filter)) if !filter.as_str().split(',').any(|s| s == cfg) => return None, (Some(_), Some(_)) => {} (None, Some(_)) => panic!("Only tests with revisions should use `//[X]~`"), @@ -145,10 +144,7 @@ fn parse_expected( let whole_match = captures.get(0).unwrap(); let (_, mut msg) = line.split_at(whole_match.end()); - let first_word = msg - .split_whitespace() - .next() - .expect("Encountered unexpected empty comment"); + let first_word = msg.split_whitespace().next().expect("Encountered unexpected empty comment"); // If we find `//~ ERROR foo` or something like that, skip the first word. let kind = first_word.parse::().ok(); @@ -166,25 +162,18 @@ fn parse_expected( ); (FollowPrevious(line_num), line_num) } else { - let which = if adjusts > 0 { - AdjustBackward(adjusts) - } else { - ThisLine - }; + let which = if adjusts > 0 { AdjustBackward(adjusts) } else { ThisLine }; let line_num = line_num - adjusts; (which, line_num) }; debug!( "line={} tag={:?} which={:?} kind={:?} msg={:?}", - line_num, whole_match.as_str(), which, kind, msg - ); - Some(( + line_num, + whole_match.as_str(), which, - Error { - line_num, - kind, - msg, - }, - )) + kind, + msg + ); + Some((which, Error { line_num, kind, msg })) } diff --git a/src/tools/compiletest/src/header.rs b/src/tools/compiletest/src/header.rs index a9be7ba5f96..ab43fb01a4f 100644 --- a/src/tools/compiletest/src/header.rs +++ b/src/tools/compiletest/src/header.rs @@ -6,9 +6,9 @@ use std::path::{Path, PathBuf}; use log::*; -use crate::common::{self, CompareMode, Config, Mode, PassMode, FailMode}; -use crate::util; +use crate::common::{self, CompareMode, Config, FailMode, Mode, PassMode}; use crate::extract_gdb_version; +use crate::util; #[cfg(test)] mod tests; @@ -125,18 +125,17 @@ impl EarlyProps { props.ignore = Ignore::Ignore; } - if config.run_clang_based_tests_with.is_none() && - config.parse_needs_matching_clang(ln) { + if config.run_clang_based_tests_with.is_none() + && config.parse_needs_matching_clang(ln) + { props.ignore = Ignore::Ignore; } - if !rustc_has_profiler_support && - config.parse_needs_profiler_support(ln) { + if !rustc_has_profiler_support && config.parse_needs_profiler_support(ln) { props.ignore = Ignore::Ignore; } - if !rustc_has_sanitizer_support && - config.parse_needs_sanitizer_support(ln) { + if !rustc_has_sanitizer_support && config.parse_needs_sanitizer_support(ln) { props.ignore = Ignore::Ignore; } @@ -145,13 +144,17 @@ impl EarlyProps { } } - if (config.mode == common::DebugInfoGdb || config.mode == common::DebugInfoGdbLldb) && - props.ignore.can_run_gdb() && ignore_gdb(config, ln) { + if (config.mode == common::DebugInfoGdb || config.mode == common::DebugInfoGdbLldb) + && props.ignore.can_run_gdb() + && ignore_gdb(config, ln) + { props.ignore = props.ignore.no_gdb(); } - if (config.mode == common::DebugInfoLldb || config.mode == common::DebugInfoGdbLldb) && - props.ignore.can_run_lldb() && ignore_lldb(config, ln) { + if (config.mode == common::DebugInfoLldb || config.mode == common::DebugInfoGdbLldb) + && props.ignore.can_run_lldb() + && ignore_lldb(config, ln) + { props.ignore = props.ignore.no_lldb(); } @@ -207,12 +210,13 @@ impl EarlyProps { fn extract_gdb_version_range(line: &str) -> (u32, u32) { const ERROR_MESSAGE: &'static str = "Malformed GDB version directive"; - let range_components = line.split(&[' ', '-'][..]) - .filter(|word| !word.is_empty()) - .map(extract_gdb_version) - .skip_while(Option::is_none) - .take(3) // 3 or more = invalid, so take at most 3. - .collect::>>(); + let range_components = line + .split(&[' ', '-'][..]) + .filter(|word| !word.is_empty()) + .map(extract_gdb_version) + .skip_while(Option::is_none) + .take(3) // 3 or more = invalid, so take at most 3. + .collect::>>(); match range_components.len() { 1 => { @@ -231,7 +235,8 @@ impl EarlyProps { fn ignore_lldb(config: &Config, line: &str) -> bool { if let Some(ref actual_version) = config.lldb_version { if line.starts_with("min-lldb-version") { - let min_version = line.trim_end() + let min_version = line + .trim_end() .rsplit(' ') .next() .expect("Malformed lldb version directive"); @@ -254,7 +259,8 @@ impl EarlyProps { } if let Some(ref actual_version) = config.llvm_version { if line.starts_with("min-llvm-version") { - let min_version = line.trim_end() + let min_version = line + .trim_end() .rsplit(' ') .next() .expect("Malformed llvm version directive"); @@ -262,7 +268,8 @@ impl EarlyProps { // version &actual_version[..] < min_version } else if line.starts_with("min-system-llvm-version") { - let min_version = line.trim_end() + let min_version = line + .trim_end() .rsplit(' ') .next() .expect("Malformed llvm version directive"); @@ -271,16 +278,15 @@ impl EarlyProps { config.system_llvm && &actual_version[..] < min_version } else if line.starts_with("ignore-llvm-version") { // Syntax is: "ignore-llvm-version [- ]" - let range_components = line.split(' ') + let range_components = line + .split(' ') .skip(1) // Skip the directive. .map(|s| s.trim()) .filter(|word| !word.is_empty() && word != &"-") .take(3) // 3 or more = invalid, so take at most 3. .collect::>(); match range_components.len() { - 1 => { - &actual_version[..] == range_components[0] - } + 1 => &actual_version[..] == range_components[0], 2 => { let v_min = range_components[0]; let v_max = range_components[1]; @@ -461,8 +467,7 @@ impl TestProps { } if let Some(flags) = config.parse_compile_flags(ln) { - self.compile_flags - .extend(flags.split_whitespace().map(|s| s.to_owned())); + self.compile_flags.extend(flags.split_whitespace().map(|s| s.to_owned())); } if let Some(edition) = config.parse_edition(ln) { @@ -614,8 +619,10 @@ impl TestProps { } fn update_fail_mode(&mut self, ln: &str, config: &Config) { - let check_ui = |mode: &str| if config.mode != Mode::Ui { - panic!("`{}-fail` header is only supported in UI tests", mode); + let check_ui = |mode: &str| { + if config.mode != Mode::Ui { + panic!("`{}-fail` header is only supported in UI tests", mode); + } }; let fail_mode = if config.parse_name_directive(ln, "check-fail") { check_ui("check"); @@ -641,9 +648,10 @@ impl TestProps { if config.mode != Mode::Ui && config.mode != Mode::Incremental { panic!("`{}` header is only supported in UI and incremental tests", s); } - if config.mode == Mode::Incremental && - !revision.map_or(false, |r| r.starts_with("cfail")) && - !self.revisions.iter().all(|r| r.starts_with("cfail")) { + if config.mode == Mode::Incremental + && !revision.map_or(false, |r| r.starts_with("cfail")) + && !self.revisions.iter().all(|r| r.starts_with("cfail")) + { panic!("`{}` header is only supported in `cfail` incremental tests", s); } }; @@ -688,11 +696,7 @@ fn iter_header(testfile: &Path, cfg: Option<&str>, it: &mut dyn FnMut(&str)) { return; } - let comment = if testfile.to_string_lossy().ends_with(".rs") { - "//" - } else { - "#" - }; + let comment = if testfile.to_string_lossy().ends_with(".rs") { "//" } else { "#" }; // FIXME: would be nice to allow some whitespace between comment and brace :) // It took me like 2 days to debug why compile-flags weren’t taken into account for my test :) @@ -711,7 +715,7 @@ fn iter_header(testfile: &Path, cfg: Option<&str>, it: &mut dyn FnMut(&str)) { // A comment like `//[foo]` is specific to revision `foo` if let Some(close_brace) = ln.find(']') { let open_brace = ln.find('[').unwrap(); - let lncfg = &ln[open_brace + 1 .. close_brace]; + let lncfg = &ln[open_brace + 1..close_brace]; let matches = match cfg { Some(s) => s == &lncfg[..], None => false, @@ -720,11 +724,13 @@ fn iter_header(testfile: &Path, cfg: Option<&str>, it: &mut dyn FnMut(&str)) { it(ln[(close_brace + 1)..].trim_start()); } } else { - panic!("malformed condition directive: expected `{}foo]`, found `{}`", - comment_with_brace, ln) + panic!( + "malformed condition directive: expected `{}foo]`, found `{}`", + comment_with_brace, ln + ) } } else if ln.starts_with(comment) { - it(ln[comment.len() ..].trim_start()); + it(ln[comment.len()..].trim_start()); } } return; @@ -743,8 +749,7 @@ impl Config { } fn parse_aux_build(&self, line: &str) -> Option { - self.parse_name_value_directive(line, "aux-build") - .map(|r| r.trim().to_string()) + self.parse_name_value_directive(line, "aux-build").map(|r| r.trim().to_string()) } fn parse_aux_crate(&self, line: &str) -> Option<(String, String)> { @@ -834,8 +839,7 @@ impl Config { } fn parse_assembly_output(&self, line: &str) -> Option { - self.parse_name_value_directive(line, "assembly-output") - .map(|r| r.trim().to_string()) + self.parse_name_value_directive(line, "assembly-output").map(|r| r.trim().to_string()) } fn parse_env(&self, line: &str, name: &str) -> Option<(String, String)> { @@ -890,10 +894,7 @@ impl Config { /// or `normalize-stderr-32bit`. fn parse_cfg_name_directive(&self, line: &str, prefix: &str) -> ParsedNameDirective { if line.starts_with(prefix) && line.as_bytes().get(prefix.len()) == Some(&b'-') { - let name = line[prefix.len() + 1..] - .split(&[':', ' '][..]) - .next() - .unwrap(); + let name = line[prefix.len() + 1..].split(&[':', ' '][..]).next().unwrap(); if name == "test" || util::matches_os(&self.target, name) || // target @@ -907,7 +908,8 @@ impl Config { Some(CompareMode::Polonius) => name == "compare-mode-polonius", None => false, } || - (cfg!(debug_assertions) && name == "debug") { + (cfg!(debug_assertions) && name == "debug") + { ParsedNameDirective::Match } else { match self.mode { @@ -919,27 +921,35 @@ impl Config { } else { ParsedNameDirective::NoMatch } - }, - common::DebugInfoCdb => if name == "cdb" { - ParsedNameDirective::Match - } else { - ParsedNameDirective::NoMatch - }, - common::DebugInfoGdb => if name == "gdb" { - ParsedNameDirective::Match - } else { - ParsedNameDirective::NoMatch - }, - common::DebugInfoLldb => if name == "lldb" { - ParsedNameDirective::Match - } else { - ParsedNameDirective::NoMatch - }, - common::Pretty => if name == "pretty" { - ParsedNameDirective::Match - } else { - ParsedNameDirective::NoMatch - }, + } + common::DebugInfoCdb => { + if name == "cdb" { + ParsedNameDirective::Match + } else { + ParsedNameDirective::NoMatch + } + } + common::DebugInfoGdb => { + if name == "gdb" { + ParsedNameDirective::Match + } else { + ParsedNameDirective::NoMatch + } + } + common::DebugInfoLldb => { + if name == "lldb" { + ParsedNameDirective::Match + } else { + ParsedNameDirective::NoMatch + } + } + common::Pretty => { + if name == "pretty" { + ParsedNameDirective::Match + } else { + ParsedNameDirective::NoMatch + } + } _ => ParsedNameDirective::NoMatch, } } @@ -958,10 +968,11 @@ impl Config { fn parse_name_directive(&self, line: &str, directive: &str) -> bool { // Ensure the directive is a whole word. Do not match "ignore-x86" when // the line says "ignore-x86_64". - line.starts_with(directive) && match line.as_bytes().get(directive.len()) { - None | Some(&b' ') | Some(&b':') => true, - _ => false, - } + line.starts_with(directive) + && match line.as_bytes().get(directive.len()) { + None | Some(&b' ') | Some(&b':') => true, + _ => false, + } } pub fn parse_name_value_directive(&self, line: &str, directive: &str) -> Option { @@ -1002,10 +1013,8 @@ impl Config { } pub fn lldb_version_to_int(version_string: &str) -> isize { - let error_string = format!( - "Encountered LLDB version string with unexpected format: {}", - version_string - ); + let error_string = + format!("Encountered LLDB version string with unexpected format: {}", version_string); version_string.parse().expect(&error_string) } diff --git a/src/tools/compiletest/src/json.rs b/src/tools/compiletest/src/json.rs index 7930d1249e7..52d0cbd4bfd 100644 --- a/src/tools/compiletest/src/json.rs +++ b/src/tools/compiletest/src/json.rs @@ -95,10 +95,7 @@ pub fn extract_rendered(output: &str) -> String { } pub fn parse_output(file_name: &str, output: &str, proc_res: &ProcRes) -> Vec { - output - .lines() - .flat_map(|line| parse_line(file_name, line, output, proc_res)) - .collect() + output.lines().flat_map(|line| parse_line(file_name, line, output, proc_res)).collect() } fn parse_line(file_name: &str, line: &str, output: &str, proc_res: &ProcRes) -> Vec { @@ -138,11 +135,10 @@ fn push_expected_errors( .filter(|(_, span)| Path::new(&span.file_name) == Path::new(&file_name)) .collect(); - let spans_in_this_file: Vec<_> = spans_info_in_this_file.iter() - .map(|(_, span)| span) - .collect(); + let spans_in_this_file: Vec<_> = spans_info_in_this_file.iter().map(|(_, span)| span).collect(); - let primary_spans: Vec<_> = spans_info_in_this_file.iter() + let primary_spans: Vec<_> = spans_info_in_this_file + .iter() .filter(|(is_primary, _)| *is_primary) .map(|(_, span)| span) .take(1) // sometimes we have more than one showing up in the json; pick first @@ -166,24 +162,33 @@ fn push_expected_errors( let with_code = |span: &DiagnosticSpan, text: &str| { match diagnostic.code { Some(ref code) => - // FIXME(#33000) -- it'd be better to use a dedicated - // UI harness than to include the line/col number like - // this, but some current tests rely on it. - // - // Note: Do NOT include the filename. These can easily - // cause false matches where the expected message - // appears in the filename, and hence the message - // changes but the test still passes. - format!("{}:{}: {}:{}: {} [{}]", - span.line_start, span.column_start, - span.line_end, span.column_end, - text, code.code.clone()), + // FIXME(#33000) -- it'd be better to use a dedicated + // UI harness than to include the line/col number like + // this, but some current tests rely on it. + // + // Note: Do NOT include the filename. These can easily + // cause false matches where the expected message + // appears in the filename, and hence the message + // changes but the test still passes. + { + format!( + "{}:{}: {}:{}: {} [{}]", + span.line_start, + span.column_start, + span.line_end, + span.column_end, + text, + code.code.clone() + ) + } None => - // FIXME(#33000) -- it'd be better to use a dedicated UI harness - format!("{}:{}: {}:{}: {}", - span.line_start, span.column_start, - span.line_end, span.column_end, - text), + // FIXME(#33000) -- it'd be better to use a dedicated UI harness + { + format!( + "{}:{}: {}:{}: {}", + span.line_start, span.column_start, span.line_end, span.column_end, text + ) + } } }; @@ -195,11 +200,7 @@ fn push_expected_errors( for span in primary_spans { let msg = with_code(span, first_line); let kind = ErrorKind::from_str(&diagnostic.level).ok(); - expected_errors.push(Error { - line_num: span.line_start, - kind, - msg, - }); + expected_errors.push(Error { line_num: span.line_start, kind, msg }); } } for next_line in message_lines { @@ -233,10 +234,7 @@ fn push_expected_errors( } // Add notes for any labels that appear in the message. - for span in spans_in_this_file - .iter() - .filter(|span| span.label.is_some()) - { + for span in spans_in_this_file.iter().filter(|span| span.label.is_some()) { expected_errors.push(Error { line_num: span.line_start, kind: Some(ErrorKind::Note), diff --git a/src/tools/compiletest/src/main.rs b/src/tools/compiletest/src/main.rs index 32965bbb292..487c1d5fb93 100644 --- a/src/tools/compiletest/src/main.rs +++ b/src/tools/compiletest/src/main.rs @@ -5,11 +5,15 @@ extern crate test; -use crate::common::{CompareMode, PassMode}; use crate::common::{expected_output_path, output_base_dir, output_relative_path, UI_EXTENSIONS}; +use crate::common::{CompareMode, PassMode}; use crate::common::{Config, TestPaths}; -use crate::common::{DebugInfoCdb, DebugInfoGdbLldb, DebugInfoGdb, DebugInfoLldb, Mode, Pretty}; +use crate::common::{DebugInfoCdb, DebugInfoGdb, DebugInfoGdbLldb, DebugInfoLldb, Mode, Pretty}; +use crate::util::logv; +use env_logger; +use getopts; use getopts::Options; +use log::*; use std::env; use std::ffi::OsString; use std::fs; @@ -18,11 +22,7 @@ use std::path::{Path, PathBuf}; use std::process::Command; use std::time::SystemTime; use test::ColorConfig; -use crate::util::logv; use walkdir::WalkDir; -use env_logger; -use getopts; -use log::*; use self::header::{EarlyProps, Ignore}; @@ -53,77 +53,19 @@ fn main() { pub fn parse_config(args: Vec) -> Config { let mut opts = Options::new(); - opts.reqopt( - "", - "compile-lib-path", - "path to host shared libraries", - "PATH", - ).reqopt( - "", - "run-lib-path", - "path to target shared libraries", - "PATH", - ) - .reqopt( - "", - "rustc-path", - "path to rustc to use for compiling", - "PATH", - ) - .optopt( - "", - "rustdoc-path", - "path to rustdoc to use for compiling", - "PATH", - ) - .reqopt( - "", - "lldb-python", - "path to python to use for doc tests", - "PATH", - ) - .reqopt( - "", - "docck-python", - "path to python to use for doc tests", - "PATH", - ) - .optopt( - "", - "valgrind-path", - "path to Valgrind executable for Valgrind tests", - "PROGRAM", - ) - .optflag( - "", - "force-valgrind", - "fail if Valgrind tests cannot be run under Valgrind", - ) - .optopt( - "", - "run-clang-based-tests-with", - "path to Clang executable", - "PATH", - ) - .optopt( - "", - "llvm-filecheck", - "path to LLVM's FileCheck binary", - "DIR", - ) + opts.reqopt("", "compile-lib-path", "path to host shared libraries", "PATH") + .reqopt("", "run-lib-path", "path to target shared libraries", "PATH") + .reqopt("", "rustc-path", "path to rustc to use for compiling", "PATH") + .optopt("", "rustdoc-path", "path to rustdoc to use for compiling", "PATH") + .reqopt("", "lldb-python", "path to python to use for doc tests", "PATH") + .reqopt("", "docck-python", "path to python to use for doc tests", "PATH") + .optopt("", "valgrind-path", "path to Valgrind executable for Valgrind tests", "PROGRAM") + .optflag("", "force-valgrind", "fail if Valgrind tests cannot be run under Valgrind") + .optopt("", "run-clang-based-tests-with", "path to Clang executable", "PATH") + .optopt("", "llvm-filecheck", "path to LLVM's FileCheck binary", "DIR") .reqopt("", "src-base", "directory to scan for test files", "PATH") - .reqopt( - "", - "build-base", - "directory to deposit test outputs", - "PATH", - ) - .reqopt( - "", - "stage-id", - "the target-stage identifier", - "stageN-TARGET", - ) + .reqopt("", "build-base", "directory to deposit test outputs", "PATH") + .reqopt("", "stage-id", "the target-stage identifier", "stageN-TARGET") .reqopt( "", "mode", @@ -134,7 +76,7 @@ pub fn parse_config(args: Vec) -> Config { "", "pass", "force {check,build,run}-pass tests to this mode.", - "check | build | run" + "check | build | run", ) .optflag("", "ignored", "run tests marked as ignored") .optflag("", "exact", "filters match exactly") @@ -145,97 +87,38 @@ pub fn parse_config(args: Vec) -> Config { (eg. emulator, valgrind)", "PROGRAM", ) - .optopt( - "", - "host-rustcflags", - "flags to pass to rustc for host", - "FLAGS", - ) - .optopt( - "", - "target-rustcflags", - "flags to pass to rustc for target", - "FLAGS", - ) + .optopt("", "host-rustcflags", "flags to pass to rustc for host", "FLAGS") + .optopt("", "target-rustcflags", "flags to pass to rustc for target", "FLAGS") .optflag("", "verbose", "run tests verbosely, showing all output") .optflag( "", "bless", "overwrite stderr/stdout files instead of complaining about a mismatch", ) - .optflag( - "", - "quiet", - "print one character per test instead of one line", - ) + .optflag("", "quiet", "print one character per test instead of one line") .optopt("", "color", "coloring: auto, always, never", "WHEN") .optopt("", "logfile", "file to log test execution to", "FILE") .optopt("", "target", "the target to build for", "TARGET") .optopt("", "host", "the host to build for", "HOST") - .optopt( - "", - "cdb", - "path to CDB to use for CDB debuginfo tests", - "PATH", - ) - .optopt( - "", - "gdb", - "path to GDB to use for GDB debuginfo tests", - "PATH", - ) - .optopt( - "", - "lldb-version", - "the version of LLDB used", - "VERSION STRING", - ) - .optopt( - "", - "llvm-version", - "the version of LLVM used", - "VERSION STRING", - ) + .optopt("", "cdb", "path to CDB to use for CDB debuginfo tests", "PATH") + .optopt("", "gdb", "path to GDB to use for GDB debuginfo tests", "PATH") + .optopt("", "lldb-version", "the version of LLDB used", "VERSION STRING") + .optopt("", "llvm-version", "the version of LLVM used", "VERSION STRING") .optflag("", "system-llvm", "is LLVM the system LLVM") - .optopt( - "", - "android-cross-path", - "Android NDK standalone path", - "PATH", - ) + .optopt("", "android-cross-path", "Android NDK standalone path", "PATH") .optopt("", "adb-path", "path to the android debugger", "PATH") - .optopt( - "", - "adb-test-dir", - "path to tests for the android debugger", - "PATH", - ) - .optopt( - "", - "lldb-python-dir", - "directory containing LLDB's python module", - "PATH", - ) + .optopt("", "adb-test-dir", "path to tests for the android debugger", "PATH") + .optopt("", "lldb-python-dir", "directory containing LLDB's python module", "PATH") .reqopt("", "cc", "path to a C compiler", "PATH") .reqopt("", "cxx", "path to a C++ compiler", "PATH") .reqopt("", "cflags", "flags for the C compiler", "FLAGS") .optopt("", "ar", "path to an archiver", "PATH") .optopt("", "linker", "path to a linker", "PATH") - .reqopt( - "", - "llvm-components", - "list of LLVM components built in", - "LIST", - ) + .reqopt("", "llvm-components", "list of LLVM components built in", "LIST") .reqopt("", "llvm-cxxflags", "C++ flags for LLVM", "FLAGS") .optopt("", "llvm-bin-dir", "Path to LLVM's `bin` directory", "PATH") .optopt("", "nodejs", "the name of nodejs", "PATH") - .optopt( - "", - "remote-test-client", - "path to the remote test client", - "PATH", - ) + .optopt("", "remote-test-client", "path to the remote test client", "PATH") .optopt( "", "compare-mode", @@ -278,28 +161,21 @@ pub fn parse_config(args: Vec) -> Config { } fn make_absolute(path: PathBuf) -> PathBuf { - if path.is_relative() { - env::current_dir().unwrap().join(path) - } else { - path - } + if path.is_relative() { env::current_dir().unwrap().join(path) } else { path } } let target = opt_str2(matches.opt_str("target")); let android_cross_path = opt_path(matches, "android-cross-path"); let cdb = analyze_cdb(matches.opt_str("cdb"), &target); - let (gdb, gdb_version, gdb_native_rust) = analyze_gdb(matches.opt_str("gdb"), &target, - &android_cross_path); + let (gdb, gdb_version, gdb_native_rust) = + analyze_gdb(matches.opt_str("gdb"), &target, &android_cross_path); let (lldb_version, lldb_native_rust) = extract_lldb_version(matches.opt_str("lldb-version")); let color = match matches.opt_str("color").as_ref().map(|x| &**x) { Some("auto") | None => ColorConfig::AutoColor, Some("always") => ColorConfig::AlwaysColor, Some("never") => ColorConfig::NeverColor, - Some(x) => panic!( - "argument for --color must be auto, always, or never, but found `{}`", - x - ), + Some(x) => panic!("argument for --color must be auto, always, or never, but found `{}`", x), }; let src_base = opt_path(matches, "src-base"); @@ -320,18 +196,14 @@ pub fn parse_config(args: Vec) -> Config { src_base, build_base: opt_path(matches, "build-base"), stage_id: matches.opt_str("stage-id").unwrap(), - mode: matches - .opt_str("mode") - .unwrap() - .parse() - .expect("invalid mode"), + mode: matches.opt_str("mode").unwrap().parse().expect("invalid mode"), run_ignored, filter: matches.free.first().cloned(), filter_exact: matches.opt_present("exact"), - force_pass_mode: matches.opt_str("pass").map(|mode| + force_pass_mode: matches.opt_str("pass").map(|mode| { mode.parse::() .unwrap_or_else(|_| panic!("unknown `--pass` option `{}` given", mode)) - ), + }), logfile: matches.opt_str("logfile").map(|s| PathBuf::from(&s)), runtool: matches.opt_str("runtool"), host_rustcflags: matches.opt_str("host-rustcflags"), @@ -374,10 +246,7 @@ pub fn parse_config(args: Vec) -> Config { pub fn log_config(config: &Config) { let c = config; logv(c, "configuration:".to_string()); - logv( - c, - format!("compile_lib_path: {:?}", config.compile_lib_path), - ); + logv(c, format!("compile_lib_path: {:?}", config.compile_lib_path)); logv(c, format!("run_lib_path: {:?}", config.run_lib_path)); logv(c, format!("rustc_path: {:?}", config.rustc_path.display())); logv(c, format!("rustdoc_path: {:?}", config.rustdoc_path)); @@ -386,42 +255,21 @@ pub fn log_config(config: &Config) { logv(c, format!("stage_id: {}", config.stage_id)); logv(c, format!("mode: {}", config.mode)); logv(c, format!("run_ignored: {}", config.run_ignored)); - logv( - c, - format!( - "filter: {}", - opt_str(&config.filter.as_ref().map(|re| re.to_owned())) - ), - ); + logv(c, format!("filter: {}", opt_str(&config.filter.as_ref().map(|re| re.to_owned())))); logv(c, format!("filter_exact: {}", config.filter_exact)); - logv(c, format!( - "force_pass_mode: {}", - opt_str(&config.force_pass_mode.map(|m| format!("{}", m))), - )); - logv(c, format!("runtool: {}", opt_str(&config.runtool))); logv( c, - format!("host-rustcflags: {}", opt_str(&config.host_rustcflags)), - ); - logv( - c, - format!("target-rustcflags: {}", opt_str(&config.target_rustcflags)), + format!("force_pass_mode: {}", opt_str(&config.force_pass_mode.map(|m| format!("{}", m))),), ); + logv(c, format!("runtool: {}", opt_str(&config.runtool))); + logv(c, format!("host-rustcflags: {}", opt_str(&config.host_rustcflags))); + logv(c, format!("target-rustcflags: {}", opt_str(&config.target_rustcflags))); logv(c, format!("target: {}", config.target)); logv(c, format!("host: {}", config.host)); - logv( - c, - format!( - "android-cross-path: {:?}", - config.android_cross_path.display() - ), - ); + logv(c, format!("android-cross-path: {:?}", config.android_cross_path.display())); logv(c, format!("adb_path: {:?}", config.adb_path)); logv(c, format!("adb_test_dir: {:?}", config.adb_test_dir)); - logv( - c, - format!("adb_device_status: {}", config.adb_device_status), - ); + logv(c, format!("adb_device_status: {}", config.adb_device_status)); logv(c, format!("ar: {}", config.ar)); logv(c, format!("linker: {:?}", config.linker)); logv(c, format!("verbose: {}", config.verbose)); @@ -546,16 +394,8 @@ pub fn test_opts(config: &Config) -> test::TestOpts { exclude_should_panic: false, filter: config.filter.clone(), filter_exact: config.filter_exact, - run_ignored: if config.run_ignored { - test::RunIgnored::Yes - } else { - test::RunIgnored::No - }, - format: if config.quiet { - test::OutputFormat::Terse - } else { - test::OutputFormat::Pretty - }, + run_ignored: if config.run_ignored { test::RunIgnored::Yes } else { test::RunIgnored::No }, + format: if config.quiet { test::OutputFormat::Terse } else { test::OutputFormat::Pretty }, logfile: config.logfile.clone(), run_tests: true, bench_benchmarks: true, @@ -584,15 +424,14 @@ pub fn make_tests(config: &Config) -> Vec { &PathBuf::new(), &inputs, &mut tests, - ).expect(&format!("Could not read tests from {}", config.src_base.display())); + ) + .expect(&format!("Could not read tests from {}", config.src_base.display())); tests } /// Returns a stamp constructed from input files common to all test cases. fn common_inputs_stamp(config: &Config) -> Stamp { - let rust_src_dir = config - .find_rust_src_root() - .expect("Could not find Rust source root"); + let rust_src_dir = config.find_rust_src_root().expect("Could not find Rust source root"); let mut stamp = Stamp::from_path(&config.rustc_path); @@ -661,18 +500,21 @@ fn collect_tests_from_dir( let file_name = file.file_name(); if is_test(&file_name) { debug!("found test file: {:?}", file_path.display()); - let paths = TestPaths { - file: file_path, - relative_dir: relative_dir_path.to_path_buf(), - }; + let paths = + TestPaths { file: file_path, relative_dir: relative_dir_path.to_path_buf() }; tests.extend(make_test(config, &paths, inputs)) } else if file_path.is_dir() { let relative_file_path = relative_dir_path.join(file.file_name()); if &file_name != "auxiliary" { debug!("found directory: {:?}", file_path.display()); collect_tests_from_dir( - config, base, &file_path, &relative_file_path, - inputs, tests)?; + config, + base, + &file_path, + &relative_file_path, + inputs, + tests, + )?; } } else { debug!("found other file/directory: {:?}", file_path.display()); @@ -681,7 +523,6 @@ fn collect_tests_from_dir( Ok(()) } - /// Returns true if `file_name` looks like a proper test file name. pub fn is_test(file_name: &OsString) -> bool { let file_name = file_name.to_str().unwrap(); @@ -708,11 +549,13 @@ fn make_test(config: &Config, testpaths: &TestPaths, inputs: &Stamp) -> Vec test::ShouldPanic::No, - _ => if early_props.should_fail { - test::ShouldPanic::Yes - } else { - test::ShouldPanic::No - }, + _ => { + if early_props.should_fail { + test::ShouldPanic::Yes + } else { + test::ShouldPanic::No + } + } }; // Incremental tests are special, they inherently cannot be run in parallel. @@ -782,10 +625,7 @@ fn is_up_to_date( inputs.add_path(&testpaths.file); for aux in &props.aux { - let path = testpaths.file.parent() - .unwrap() - .join("auxiliary") - .join(aux); + let path = testpaths.file.parent().unwrap().join("auxiliary").join(aux); inputs.add_path(&path); } @@ -821,7 +661,9 @@ impl Stamp { for entry in WalkDir::new(path) { let entry = entry.unwrap(); if entry.file_type().is_file() { - let modified = entry.metadata().ok() + let modified = entry + .metadata() + .ok() .and_then(|metadata| metadata.modified().ok()) .unwrap_or(SystemTime::UNIX_EPOCH); self.time = self.time.max(modified); @@ -898,13 +740,13 @@ fn find_cdb(target: &String) -> Option { } let pf86 = env::var_os("ProgramFiles(x86)").or(env::var_os("ProgramFiles"))?; - let cdb_arch = if cfg!(target_arch="x86") { + let cdb_arch = if cfg!(target_arch = "x86") { "x86" - } else if cfg!(target_arch="x86_64") { + } else if cfg!(target_arch = "x86_64") { "x64" - } else if cfg!(target_arch="aarch64") { + } else if cfg!(target_arch = "aarch64") { "arm64" - } else if cfg!(target_arch="arm") { + } else if cfg!(target_arch = "arm") { "arm" } else { return None; // No compatible CDB.exe in the Windows 10 SDK @@ -929,8 +771,11 @@ fn analyze_cdb(cdb: Option, target: &String) -> Option { } /// Returns (Path to GDB, GDB Version, GDB has Rust Support) -fn analyze_gdb(gdb: Option, target: &String, android_cross_path: &PathBuf) - -> (Option, Option, bool) { +fn analyze_gdb( + gdb: Option, + target: &String, + android_cross_path: &PathBuf, +) -> (Option, Option, bool) { #[cfg(not(windows))] const GDB_FALLBACK: &str = "gdb"; #[cfg(windows)] @@ -1008,23 +853,20 @@ fn extract_gdb_version(full_version_line: &str) -> Option { let line = &line[next_split + 1..]; let (minor, patch) = match line.find(|c: char| !c.is_digit(10)) { - Some(idx) => if line.as_bytes()[idx] == b'.' { - let patch = &line[idx + 1..]; - - let patch_len = patch - .find(|c: char| !c.is_digit(10)) - .unwrap_or_else(|| patch.len()); - let patch = &patch[..patch_len]; - let patch = if patch_len > 3 || patch_len == 0 { - None - } else { - Some(patch) - }; + Some(idx) => { + if line.as_bytes()[idx] == b'.' { + let patch = &line[idx + 1..]; - (&line[..idx], patch) - } else { - (&line[..idx], None) - }, + let patch_len = + patch.find(|c: char| !c.is_digit(10)).unwrap_or_else(|| patch.len()); + let patch = &patch[..patch_len]; + let patch = if patch_len > 3 || patch_len == 0 { None } else { Some(patch) }; + + (&line[..idx], patch) + } else { + (&line[..idx], None) + } + } None => (line, None), }; diff --git a/src/tools/compiletest/src/raise_fd_limit.rs b/src/tools/compiletest/src/raise_fd_limit.rs index e9c91094104..faded7c8024 100644 --- a/src/tools/compiletest/src/raise_fd_limit.rs +++ b/src/tools/compiletest/src/raise_fd_limit.rs @@ -23,24 +23,15 @@ pub unsafe fn raise_fd_limit() { let mut mib: [libc::c_int; 2] = [CTL_KERN, KERN_MAXFILESPERPROC]; let mut maxfiles: libc::c_int = 0; let mut size: libc::size_t = size_of_val(&maxfiles) as libc::size_t; - if libc::sysctl( - &mut mib[0], - 2, - &mut maxfiles as *mut _ as *mut _, - &mut size, - null_mut(), - 0, - ) != 0 + if libc::sysctl(&mut mib[0], 2, &mut maxfiles as *mut _ as *mut _, &mut size, null_mut(), 0) + != 0 { let err = io::Error::last_os_error(); panic!("raise_fd_limit: error calling sysctl: {}", err); } // Fetch the current resource limits - let mut rlim = libc::rlimit { - rlim_cur: 0, - rlim_max: 0, - }; + let mut rlim = libc::rlimit { rlim_cur: 0, rlim_max: 0 }; if libc::getrlimit(libc::RLIMIT_NOFILE, &mut rlim) != 0 { let err = io::Error::last_os_error(); panic!("raise_fd_limit: error calling getrlimit: {}", err); diff --git a/src/tools/compiletest/src/read2.rs b/src/tools/compiletest/src/read2.rs index 6dfd8e97c63..da1d3db49d7 100644 --- a/src/tools/compiletest/src/read2.rs +++ b/src/tools/compiletest/src/read2.rs @@ -197,9 +197,6 @@ mod imp { if v.capacity() == v.len() { v.reserve(1); } - slice::from_raw_parts_mut( - v.as_mut_ptr().offset(v.len() as isize), - v.capacity() - v.len(), - ) + slice::from_raw_parts_mut(v.as_mut_ptr().offset(v.len() as isize), v.capacity() - v.len()) } } diff --git a/src/tools/compiletest/src/runtest.rs b/src/tools/compiletest/src/runtest.rs index 83a69c321bb..02225d0ea01 100644 --- a/src/tools/compiletest/src/runtest.rs +++ b/src/tools/compiletest/src/runtest.rs @@ -1,21 +1,21 @@ // ignore-tidy-filelength -use crate::common::{CompareMode, PassMode, FailMode}; use crate::common::{expected_output_path, UI_EXTENSIONS, UI_FIXED, UI_STDERR, UI_STDOUT}; -use crate::common::{UI_RUN_STDERR, UI_RUN_STDOUT}; use crate::common::{output_base_dir, output_base_name, output_testname_unique}; +use crate::common::{Assembly, Incremental, JsDocTest, MirOpt, RunMake, Ui}; use crate::common::{Codegen, CodegenUnits, Rustdoc}; -use crate::common::{DebugInfoCdb, DebugInfoGdbLldb, DebugInfoGdb, DebugInfoLldb}; +use crate::common::{CompareMode, FailMode, PassMode}; use crate::common::{CompileFail, Pretty, RunFail, RunPassValgrind}; use crate::common::{Config, TestPaths}; -use crate::common::{Incremental, MirOpt, RunMake, Ui, JsDocTest, Assembly}; -use diff; +use crate::common::{DebugInfoCdb, DebugInfoGdb, DebugInfoGdbLldb, DebugInfoLldb}; +use crate::common::{UI_RUN_STDERR, UI_RUN_STDOUT}; use crate::errors::{self, Error, ErrorKind}; use crate::header::TestProps; use crate::json; +use crate::util::{logv, PathBufExt}; +use diff; use regex::{Captures, Regex}; use rustfix::{apply_suggestions, get_suggestions_from_json, Filter}; -use crate::util::{logv, PathBufExt}; use std::collections::hash_map::DefaultHasher; use std::collections::{HashMap, HashSet, VecDeque}; @@ -117,10 +117,7 @@ pub struct Mismatch { impl Mismatch { fn new(line_number: u32) -> Mismatch { - Mismatch { - line_number: line_number, - lines: Vec::new(), - } + Mismatch { line_number: line_number, lines: Vec::new() } } } @@ -210,21 +207,13 @@ pub fn run(config: Config, testpaths: &TestPaths, revision: Option<&str>) { debug!("running {:?}", testpaths.file.display()); let props = TestProps::from_file(&testpaths.file, revision, &config); - let cx = TestCx { - config: &config, - props: &props, - testpaths, - revision: revision, - }; + let cx = TestCx { config: &config, props: &props, testpaths, revision: revision }; create_dir_all(&cx.output_base_dir()).unwrap(); if config.mode == Incremental { // Incremental tests are special because they cannot be run in // parallel. - assert!( - !props.revisions.is_empty(), - "Incremental tests require revisions." - ); + assert!(!props.revisions.is_empty(), "Incremental tests require revisions."); cx.init_incremental_test(); for revision in &props.revisions { let revision_props = TestProps::from_file(&testpaths.file, Some(revision), &config); @@ -296,19 +285,24 @@ enum TestOutput { /// Will this test be executed? Should we use `make_exe_name`? #[derive(Copy, Clone, PartialEq)] -enum WillExecute { Yes, No } +enum WillExecute { + Yes, + No, +} /// Should `--emit metadata` be used? #[derive(Copy, Clone)] -enum EmitMetadata { Yes, No } +enum EmitMetadata { + Yes, + No, +} impl<'test> TestCx<'test> { /// Code executed for each revision in turn (or, if there are no /// revisions, exactly once, with revision == None). fn run_revision(&self) { if self.props.should_ice { - if self.config.mode != CompileFail && - self.config.mode != Incremental { + if self.config.mode != CompileFail && self.config.mode != Incremental { self.fatal("cannot use should-ice in a test that is not cfail"); } } @@ -320,7 +314,7 @@ impl<'test> TestCx<'test> { DebugInfoGdbLldb => { self.run_debuginfo_gdb_test(); self.run_debuginfo_lldb_test(); - }, + } DebugInfoCdb => self.run_debuginfo_cdb_test(), DebugInfoGdb => self.run_debuginfo_gdb_test(), DebugInfoLldb => self.run_debuginfo_lldb_test(), @@ -363,8 +357,8 @@ impl<'test> TestCx<'test> { JsDocTest => true, Ui => pm.is_some() || self.props.fail_mode > Some(FailMode::Build), Incremental => { - let revision = self.revision - .expect("incremental tests require a list of revisions"); + let revision = + self.revision.expect("incremental tests require a list of revisions"); if revision.starts_with("rpass") || revision.starts_with("rfail") { true } else if revision.starts_with("cfail") { @@ -502,10 +496,7 @@ impl<'test> TestCx<'test> { let mut new_config = self.config.clone(); new_config.runtool = new_config.valgrind_path.clone(); - let new_cx = TestCx { - config: &new_config, - ..*self - }; + let new_cx = TestCx { config: &new_config, ..*self }; proc_res = new_cx.exec_compiled_test(); if !proc_res.status.success() { @@ -517,10 +508,7 @@ impl<'test> TestCx<'test> { if self.props.pp_exact.is_some() { logv(self.config, "testing for exact pretty-printing".to_owned()); } else { - logv( - self.config, - "testing for converging pretty-printing".to_owned(), - ); + logv(self.config, "testing for converging pretty-printing".to_owned()); } let rounds = match self.props.pp_exact { @@ -535,19 +523,12 @@ impl<'test> TestCx<'test> { while round < rounds { logv( self.config, - format!( - "pretty-printing round {} revision {:?}", - round, self.revision - ), + format!("pretty-printing round {} revision {:?}", round, self.revision), ); - let read_from = if round == 0 { - ReadFrom::Path - } else { - ReadFrom::Stdin(srcs[round].to_owned()) - }; + let read_from = + if round == 0 { ReadFrom::Path } else { ReadFrom::Stdin(srcs[round].to_owned()) }; - let proc_res = self.print_source(read_from, - &self.props.pretty_mode); + let proc_res = self.print_source(read_from, &self.props.pretty_mode); if !proc_res.status.success() { self.fatal_proc_rec( &format!( @@ -602,16 +583,10 @@ impl<'test> TestCx<'test> { self.fatal_proc_rec("pretty-printing (expanded) failed", &proc_res); } - let ProcRes { - stdout: expanded_src, - .. - } = proc_res; + let ProcRes { stdout: expanded_src, .. } = proc_res; let proc_res = self.typecheck_source(expanded_src); if !proc_res.status.success() { - self.fatal_proc_rec( - "pretty-printed source (expanded) does not typecheck", - &proc_res, - ); + self.fatal_proc_rec("pretty-printed source (expanded) does not typecheck", &proc_res); } } @@ -631,12 +606,14 @@ impl<'test> TestCx<'test> { .arg(&aux_dir) .args(&self.props.compile_flags) .envs(self.props.exec_env.clone()); - self.maybe_add_external_args(&mut rustc, - self.split_maybe_args(&self.config.target_rustcflags)); + self.maybe_add_external_args( + &mut rustc, + self.split_maybe_args(&self.config.target_rustcflags), + ); let src = match read_from { ReadFrom::Stdin(src) => Some(src), - ReadFrom::Path => None + ReadFrom::Path => None, }; self.compose_and_run( @@ -660,8 +637,8 @@ impl<'test> TestCx<'test> { {}\n\ ------------------------------------------\n\ \n", - expected, actual) - ); + expected, actual + )); } } @@ -681,11 +658,7 @@ impl<'test> TestCx<'test> { let _ = fs::remove_dir_all(&out_dir); create_dir_all(&out_dir).unwrap(); - let target = if self.props.force_host { - &*self.config.host - } else { - &*self.config.target - }; + let target = if self.props.force_host { &*self.config.host } else { &*self.config.target }; let aux_dir = self.aux_output_dir_name(); @@ -700,8 +673,10 @@ impl<'test> TestCx<'test> { .arg("-L") .arg(aux_dir); self.set_revision_flags(&mut rustc); - self.maybe_add_external_args(&mut rustc, - self.split_maybe_args(&self.config.target_rustcflags)); + self.maybe_add_external_args( + &mut rustc, + self.split_maybe_args(&self.config.target_rustcflags), + ); rustc.args(&self.props.compile_flags); self.compose_and_run_compiler(rustc, Some(src)) @@ -717,10 +692,7 @@ impl<'test> TestCx<'test> { ..self.config.clone() }; - let test_cx = TestCx { - config: &config, - ..*self - }; + let test_cx = TestCx { config: &config, ..*self }; test_cx.run_debuginfo_cdb_test_no_opt(); } @@ -741,12 +713,8 @@ impl<'test> TestCx<'test> { }; // Parse debugger commands etc from test files - let DebuggerCommands { - commands, - check_lines, - breakpoint_lines, - .. - } = self.parse_debugger_commands(prefixes); + let DebuggerCommands { commands, check_lines, breakpoint_lines, .. } = + self.parse_debugger_commands(prefixes); // https://docs.microsoft.com/en-us/windows-hardware/drivers/debugger/debugger-commands let mut script_str = String::with_capacity(2048); @@ -756,10 +724,7 @@ impl<'test> TestCx<'test> { // Set breakpoints on every line that contains the string "#break" let source_file_name = self.testpaths.file.file_name().unwrap().to_string_lossy(); for line in &breakpoint_lines { - script_str.push_str(&format!( - "bp `{}:{}`\n", - source_file_name, line - )); + script_str.push_str(&format!("bp `{}:{}`\n", source_file_name, line)); } // Append the other `cdb-command:`s @@ -777,16 +742,16 @@ impl<'test> TestCx<'test> { let cdb_path = &self.config.cdb.as_ref().unwrap(); let mut cdb = Command::new(cdb_path); - cdb - .arg("-lines") // Enable source line debugging. - .arg("-cf").arg(&debugger_script) + cdb.arg("-lines") // Enable source line debugging. + .arg("-cf") + .arg(&debugger_script) .arg(&exe_file); let debugger_run_result = self.compose_and_run( cdb, self.config.run_lib_path.to_str().unwrap(), None, // aux_path - None // input + None, // input ); if !debugger_run_result.status.success() { @@ -806,10 +771,7 @@ impl<'test> TestCx<'test> { ..self.config.clone() }; - let test_cx = TestCx { - config: &config, - ..*self - }; + let test_cx = TestCx { config: &config, ..*self }; test_cx.run_debuginfo_gdb_test_no_opt(); } @@ -827,11 +789,8 @@ impl<'test> TestCx<'test> { PREFIXES }; - let DebuggerCommands { - commands, - check_lines, - breakpoint_lines, - } = self.parse_debugger_commands(prefixes); + let DebuggerCommands { commands, check_lines, breakpoint_lines } = + self.parse_debugger_commands(prefixes); let mut cmds = commands.join("\n"); // compile test file (it should have 'compile-flags:-g' in the header) @@ -895,11 +854,7 @@ impl<'test> TestCx<'test> { "export LD_LIBRARY_PATH={}; \ gdbserver{} :5039 {}/{}", self.config.adb_test_dir.clone(), - if self.config.target.contains("aarch64") { - "64" - } else { - "" - }, + if self.config.target.contains("aarch64") { "64" } else { "" }, self.config.adb_test_dir.clone(), exe_file.file_name().unwrap().to_str().unwrap() ); @@ -928,19 +883,11 @@ impl<'test> TestCx<'test> { let mut debugger_script = OsString::from("-command="); debugger_script.push(self.make_out_name("debugger.script")); - let debugger_opts: &[&OsStr] = &[ - "-quiet".as_ref(), - "-batch".as_ref(), - "-nx".as_ref(), - &debugger_script, - ]; + let debugger_opts: &[&OsStr] = + &["-quiet".as_ref(), "-batch".as_ref(), "-nx".as_ref(), &debugger_script]; let gdb_path = self.config.gdb.as_ref().unwrap(); - let Output { - status, - stdout, - stderr, - } = Command::new(&gdb_path) + let Output { status, stdout, stderr } = Command::new(&gdb_path) .args(debugger_opts) .output() .expect(&format!("failed to exec `{:?}`", gdb_path)); @@ -962,16 +909,11 @@ impl<'test> TestCx<'test> { println!("Adb process is already finished."); } } else { - let rust_src_root = self - .config - .find_rust_src_root() - .expect("Could not find Rust source root"); + let rust_src_root = + self.config.find_rust_src_root().expect("Could not find Rust source root"); let rust_pp_module_rel_path = Path::new("./src/etc"); - let rust_pp_module_abs_path = rust_src_root - .join(rust_pp_module_rel_path) - .to_str() - .unwrap() - .to_owned(); + let rust_pp_module_abs_path = + rust_src_root.join(rust_pp_module_rel_path).to_str().unwrap().to_owned(); // write debugger script let mut script_str = String::with_capacity(2048); script_str.push_str(&format!("set charset {}\n", Self::charset())); @@ -979,10 +921,7 @@ impl<'test> TestCx<'test> { match self.config.gdb_version { Some(version) => { - println!( - "NOTE: compiletest thinks it is using GDB version {}", - version - ); + println!("NOTE: compiletest thinks it is using GDB version {}", version); if version > extract_gdb_version("7.4").unwrap() { // Add the directory containing the pretty printers to @@ -1009,10 +948,8 @@ impl<'test> TestCx<'test> { script_str.push_str(&format!("directory {}\n", rust_pp_module_abs_path)); // Load the target executable - script_str.push_str(&format!( - "file {}\n", - exe_file.to_str().unwrap().replace(r"\", r"\\") - )); + script_str + .push_str(&format!("file {}\n", exe_file.to_str().unwrap().replace(r"\", r"\\"))); // Force GDB to print values in the Rust format. if self.config.gdb_native_rust { @@ -1037,23 +974,14 @@ impl<'test> TestCx<'test> { let mut debugger_script = OsString::from("-command="); debugger_script.push(self.make_out_name("debugger.script")); - let debugger_opts: &[&OsStr] = &[ - "-quiet".as_ref(), - "-batch".as_ref(), - "-nx".as_ref(), - &debugger_script, - ]; + let debugger_opts: &[&OsStr] = + &["-quiet".as_ref(), "-batch".as_ref(), "-nx".as_ref(), &debugger_script]; let mut gdb = Command::new(self.config.gdb.as_ref().unwrap()); - gdb.args(debugger_opts) - .env("PYTHONPATH", rust_pp_module_abs_path); - - debugger_run_result = self.compose_and_run( - gdb, - self.config.run_lib_path.to_str().unwrap(), - None, - None, - ); + gdb.args(debugger_opts).env("PYTHONPATH", rust_pp_module_abs_path); + + debugger_run_result = + self.compose_and_run(gdb, self.config.run_lib_path.to_str().unwrap(), None, None); } if !debugger_run_result.status.success() { @@ -1077,10 +1005,7 @@ impl<'test> TestCx<'test> { ..self.config.clone() }; - let test_cx = TestCx { - config: &config, - ..*self - }; + let test_cx = TestCx { config: &config, ..*self }; test_cx.run_debuginfo_lldb_test_no_opt(); } @@ -1096,10 +1021,7 @@ impl<'test> TestCx<'test> { match self.config.lldb_version { Some(ref version) => { - println!( - "NOTE: compiletest thinks it is using LLDB version {}", - version - ); + println!("NOTE: compiletest thinks it is using LLDB version {}", version); } _ => { println!( @@ -1120,12 +1042,8 @@ impl<'test> TestCx<'test> { }; // Parse debugger commands etc from test files - let DebuggerCommands { - commands, - check_lines, - breakpoint_lines, - .. - } = self.parse_debugger_commands(prefixes); + let DebuggerCommands { commands, check_lines, breakpoint_lines, .. } = + self.parse_debugger_commands(prefixes); // Write debugger script: // We don't want to hang when calling `quit` while the process is still running @@ -1135,16 +1053,11 @@ impl<'test> TestCx<'test> { script_str.push_str("version\n"); // Switch LLDB into "Rust mode" - let rust_src_root = self - .config - .find_rust_src_root() - .expect("Could not find Rust source root"); + let rust_src_root = + self.config.find_rust_src_root().expect("Could not find Rust source root"); let rust_pp_module_rel_path = Path::new("./src/etc/lldb_rust_formatters.py"); - let rust_pp_module_abs_path = rust_src_root - .join(rust_pp_module_rel_path) - .to_str() - .unwrap() - .to_owned(); + let rust_pp_module_abs_path = + rust_src_root.join(rust_pp_module_rel_path).to_str().unwrap().to_owned(); script_str .push_str(&format!("command script import {}\n", &rust_pp_module_abs_path[..])[..]); @@ -1205,15 +1118,9 @@ impl<'test> TestCx<'test> { fn cmd2procres(&self, cmd: &mut Command) -> ProcRes { let (status, out, err) = match cmd.output() { - Ok(Output { - status, - stdout, - stderr, - }) => ( - status, - String::from_utf8(stdout).unwrap(), - String::from_utf8(stderr).unwrap(), - ), + Ok(Output { status, stdout, stderr }) => { + (status, String::from_utf8(stdout).unwrap(), String::from_utf8(stderr).unwrap()) + } Err(e) => self.fatal(&format!( "Failed to setup Python process for \ LLDB script: {}", @@ -1222,12 +1129,7 @@ impl<'test> TestCx<'test> { }; self.dump_output(&out, &err); - ProcRes { - status, - stdout: out, - stderr: err, - cmdline: format!("{:?}", cmd), - } + ProcRes { status, stdout: out, stderr: err, cmdline: format!("{:?}", cmd) } } fn parse_debugger_commands(&self, debugger_prefixes: &[&str]) -> DebuggerCommands { @@ -1244,11 +1146,8 @@ impl<'test> TestCx<'test> { for line in reader.lines() { match line { Ok(line) => { - let line = if line.starts_with("//") { - line[2..].trim_start() - } else { - line.as_str() - }; + let line = + if line.starts_with("//") { line[2..].trim_start() } else { line.as_str() }; if line.contains("#break") { breakpoint_lines.push(counter); @@ -1269,11 +1168,7 @@ impl<'test> TestCx<'test> { counter += 1; } - DebuggerCommands { - commands, - check_lines, - breakpoint_lines, - } + DebuggerCommands { commands, check_lines, breakpoint_lines } } fn cleanup_debug_info_options(&self, options: &Option) -> Option { @@ -1297,18 +1192,19 @@ impl<'test> TestCx<'test> { // // Notable use-cases are: do not add our optimisation flag if // `compile-flags: -Copt-level=x` and similar for debug-info level as well. - const OPT_FLAGS: &[&str] = &["-O", "-Copt-level=", /*-C*/"opt-level="]; - const DEBUG_FLAGS: &[&str] = &["-g", "-Cdebuginfo=", /*-C*/"debuginfo="]; + const OPT_FLAGS: &[&str] = &["-O", "-Copt-level=", /*-C*/ "opt-level="]; + const DEBUG_FLAGS: &[&str] = &["-g", "-Cdebuginfo=", /*-C*/ "debuginfo="]; // FIXME: ideally we would "just" check the `cmd` itself, but it does not allow inspecting // its arguments. They need to be collected separately. For now I cannot be bothered to // implement this the "right" way. - let have_opt_flag = self.props.compile_flags.iter().any(|arg| { - OPT_FLAGS.iter().any(|f| arg.starts_with(f)) - }); - let have_debug_flag = self.props.compile_flags.iter().any(|arg| { - DEBUG_FLAGS.iter().any(|f| arg.starts_with(f)) - }); + let have_opt_flag = + self.props.compile_flags.iter().any(|arg| OPT_FLAGS.iter().any(|f| arg.starts_with(f))); + let have_debug_flag = self + .props + .compile_flags + .iter() + .any(|arg| DEBUG_FLAGS.iter().any(|f| arg.starts_with(f))); for arg in args { if OPT_FLAGS.iter().any(|f| arg.starts_with(f)) && have_opt_flag { @@ -1336,10 +1232,7 @@ impl<'test> TestCx<'test> { } if check_line_index != num_check_lines && num_check_lines > 0 { self.fatal_proc_rec( - &format!( - "line not found in debugger output: {}", - check_lines[check_line_index] - ), + &format!("line not found in debugger output: {}", check_lines[check_line_index]), debugger_run_result, ); } @@ -1352,10 +1245,8 @@ impl<'test> TestCx<'test> { let can_start_anywhere = check_line.starts_with("[...]"); let can_end_anywhere = check_line.ends_with("[...]"); - let check_fragments: Vec<&str> = check_line - .split("[...]") - .filter(|frag| !frag.is_empty()) - .collect(); + let check_fragments: Vec<&str> = + check_line.split("[...]").filter(|frag| !frag.is_empty()).collect(); if check_fragments.is_empty() { return true; } @@ -1451,12 +1342,12 @@ impl<'test> TestCx<'test> { } fn check_expected_errors(&self, expected_errors: Vec, proc_res: &ProcRes) { - debug!("check_expected_errors: expected_errors={:?} proc_res.status={:?}", - expected_errors, proc_res.status); + debug!( + "check_expected_errors: expected_errors={:?} proc_res.status={:?}", + expected_errors, proc_res.status + ); if proc_res.status.success() - && expected_errors - .iter() - .any(|x| x.kind == Some(ErrorKind::Error)) + && expected_errors.iter().any(|x| x.kind == Some(ErrorKind::Error)) { self.fatal_proc_rec("process did not return an error status", proc_res); } @@ -1472,26 +1363,22 @@ impl<'test> TestCx<'test> { // message, then we'll ensure that all "help" messages are expected. // Otherwise, all "help" messages reported by the compiler will be ignored. // This logic also applies to "note" messages. - let expect_help = expected_errors - .iter() - .any(|ee| ee.kind == Some(ErrorKind::Help)); - let expect_note = expected_errors - .iter() - .any(|ee| ee.kind == Some(ErrorKind::Note)); + let expect_help = expected_errors.iter().any(|ee| ee.kind == Some(ErrorKind::Help)); + let expect_note = expected_errors.iter().any(|ee| ee.kind == Some(ErrorKind::Note)); // Parse the JSON output from the compiler and extract out the messages. let actual_errors = json::parse_output(&os_file_name, &proc_res.stderr, proc_res); let mut unexpected = Vec::new(); let mut found = vec![false; expected_errors.len()]; for actual_error in &actual_errors { - let opt_index = expected_errors.iter().enumerate().position( - |(index, expected_error)| { - !found[index] && actual_error.line_num == expected_error.line_num + let opt_index = + expected_errors.iter().enumerate().position(|(index, expected_error)| { + !found[index] + && actual_error.line_num == expected_error.line_num && (expected_error.kind.is_none() || actual_error.kind == expected_error.kind) && actual_error.msg.contains(&expected_error.msg) - }, - ); + }); match opt_index { Some(index) => { @@ -1526,10 +1413,7 @@ impl<'test> TestCx<'test> { "{}:{}: expected {} not found: {}", file_name, expected_error.line_num, - expected_error - .kind - .as_ref() - .map_or("message".into(), |k| k.to_string()), + expected_error.kind.as_ref().map_or("message".into(), |k| k.to_string()), expected_error.msg )); not_found.push(expected_error); @@ -1625,8 +1509,7 @@ impl<'test> TestCx<'test> { for rel_ab in &self.props.aux_builds { let aux_testpaths = self.compute_aux_test_paths(rel_ab); let aux_props = - self.props - .from_aux_file(&aux_testpaths.file, self.revision, self.config); + self.props.from_aux_file(&aux_testpaths.file, self.revision, self.config); let aux_cx = TestCx { config: self.config, props: &aux_props, @@ -1644,11 +1527,7 @@ impl<'test> TestCx<'test> { let aux_dir = self.aux_output_dir_name(); - let rustdoc_path = self - .config - .rustdoc_path - .as_ref() - .expect("--rustdoc-path passed"); + let rustdoc_path = self.config.rustdoc_path.as_ref().expect("--rustdoc-path passed"); let mut rustdoc = Command::new(rustdoc_path); rustdoc @@ -1700,10 +1579,7 @@ impl<'test> TestCx<'test> { } let mut test_client = Command::new(self.config.remote_test_client.as_ref().unwrap()); - test_client - .args(&["run", &prog]) - .args(args) - .envs(env.clone()); + test_client.args(&["run", &prog]).args(args).envs(env.clone()); self.compose_and_run( test_client, self.config.run_lib_path.to_str().unwrap(), @@ -1727,10 +1603,7 @@ impl<'test> TestCx<'test> { let aux_dir = self.aux_output_dir_name(); let ProcArgs { prog, args } = self.make_run_args(); let mut program = Command::new(&prog); - program - .args(args) - .current_dir(&self.output_base_dir()) - .envs(env.clone()); + program.args(args).current_dir(&self.output_base_dir()).envs(env.clone()); self.compose_and_run( program, self.config.run_lib_path.to_str().unwrap(), @@ -1760,10 +1633,7 @@ impl<'test> TestCx<'test> { .join("auxiliary") .join(rel_ab); if !test_ab.exists() { - self.fatal(&format!( - "aux-build `{}` source not found", - test_ab.display() - )) + self.fatal(&format!("aux-build `{}` source not found", test_ab.display())) } TestPaths { @@ -1784,7 +1654,7 @@ impl<'test> TestCx<'test> { if self.config.target.contains("vxworks") { match env::var("RUST_VXWORKS_TEST_DYLINK") { Ok(s) => s != "1", - _ => true + _ => true, } } else { false @@ -1809,15 +1679,12 @@ impl<'test> TestCx<'test> { for (aux_name, aux_path) in &self.props.aux_crates { let is_dylib = self.build_auxiliary(&aux_path, &aux_dir); - let lib_name = get_lib_name(&aux_path.trim_end_matches(".rs").replace('-', "_"), - is_dylib); - rustc.arg("--extern") - .arg(format!("{}={}/{}", aux_name, aux_dir.display(), lib_name)); + let lib_name = + get_lib_name(&aux_path.trim_end_matches(".rs").replace('-', "_"), is_dylib); + rustc.arg("--extern").arg(format!("{}={}/{}", aux_name, aux_dir.display(), lib_name)); } - self.props.unset_rustc_env.clone() - .iter() - .fold(&mut rustc, |rustc, v| rustc.env_remove(v)); + self.props.unset_rustc_env.clone().iter().fold(&mut rustc, |rustc, v| rustc.env_remove(v)); rustc.envs(self.props.rustc_env.clone()); self.compose_and_run( rustc, @@ -1832,9 +1699,7 @@ impl<'test> TestCx<'test> { /// Returns whether or not it is a dylib. fn build_auxiliary(&self, source_path: &str, aux_dir: &Path) -> bool { let aux_testpaths = self.compute_aux_test_paths(source_path); - let aux_props = - self.props - .from_aux_file(&aux_testpaths.file, self.revision, self.config); + let aux_props = self.props.from_aux_file(&aux_testpaths.file, self.revision, self.config); let aux_output = TargetLocation::ThisDirectory(self.aux_output_dir_name()); let aux_cx = TestCx { config: self.config, @@ -1909,10 +1774,7 @@ impl<'test> TestCx<'test> { cmdline }; - command - .stdout(Stdio::piped()) - .stderr(Stdio::piped()) - .stdin(Stdio::piped()); + command.stdout(Stdio::piped()).stderr(Stdio::piped()).stdin(Stdio::piped()); // Need to be sure to put both the lib_path and the aux path in the dylib // search path for the child. @@ -1930,19 +1792,11 @@ impl<'test> TestCx<'test> { let mut child = disable_error_reporting(|| command.spawn()) .expect(&format!("failed to exec `{:?}`", &command)); if let Some(input) = input { - child - .stdin - .as_mut() - .unwrap() - .write_all(input.as_bytes()) - .unwrap(); + child.stdin.as_mut().unwrap().write_all(input.as_bytes()).unwrap(); } - let Output { - status, - stdout, - stderr, - } = read2_abbreviated(child).expect("failed to read output"); + let Output { status, stdout, stderr } = + read2_abbreviated(child).expect("failed to read output"); let result = ProcRes { status, @@ -1957,8 +1811,7 @@ impl<'test> TestCx<'test> { } fn is_rustdoc(&self) -> bool { - self.config.src_base.ends_with("rustdoc-ui") - || self.config.src_base.ends_with("rustdoc-js") + self.config.src_base.ends_with("rustdoc-ui") || self.config.src_base.ends_with("rustdoc-js") } fn make_compile_args( @@ -1971,13 +1824,7 @@ impl<'test> TestCx<'test> { let mut rustc = if !is_rustdoc { Command::new(&self.config.rustc_path) } else { - Command::new( - &self - .config - .rustdoc_path - .clone() - .expect("no rustdoc built yet"), - ) + Command::new(&self.config.rustdoc_path.clone().expect("no rustdoc built yet")) }; // FIXME Why is -L here? rustc.arg(input_file); //.arg("-L").arg(&self.config.build_base); @@ -1986,18 +1833,11 @@ impl<'test> TestCx<'test> { rustc.arg("-Zthreads=1"); // Optionally prevent default --target if specified in test compile-flags. - let custom_target = self - .props - .compile_flags - .iter() - .any(|x| x.starts_with("--target")); + let custom_target = self.props.compile_flags.iter().any(|x| x.starts_with("--target")); if !custom_target { - let target = if self.props.force_host { - &*self.config.host - } else { - &*self.config.target - }; + let target = + if self.props.force_host { &*self.config.host } else { &*self.config.target }; rustc.arg(&format!("--target={}", target)); } @@ -2028,12 +1868,7 @@ impl<'test> TestCx<'test> { } } Ui => { - if !self - .props - .compile_flags - .iter() - .any(|s| s.starts_with("--error-format")) - { + if !self.props.compile_flags.iter().any(|s| s.starts_with("--error-format")) { rustc.args(&["--error-format", "json"]); } if !self.props.disable_ui_testing_normalization { @@ -2067,8 +1902,7 @@ impl<'test> TestCx<'test> { } if !is_rustdoc { - if self.config.target == "wasm32-unknown-unknown" - || self.is_vxworks_pure_static() { + if self.config.target == "wasm32-unknown-unknown" || self.is_vxworks_pure_static() { // rustc.arg("-g"); // get any backtrace at all on errors } else if !self.props.no_prefer_dynamic { rustc.args(&["-C", "prefer-dynamic"]); @@ -2100,11 +1934,15 @@ impl<'test> TestCx<'test> { } if self.props.force_host { - self.maybe_add_external_args(&mut rustc, - self.split_maybe_args(&self.config.host_rustcflags)); + self.maybe_add_external_args( + &mut rustc, + self.split_maybe_args(&self.config.host_rustcflags), + ); } else { - self.maybe_add_external_args(&mut rustc, - self.split_maybe_args(&self.config.target_rustcflags)); + self.maybe_add_external_args( + &mut rustc, + self.split_maybe_args(&self.config.target_rustcflags), + ); if !is_rustdoc { if let Some(ref linker) = self.config.linker { rustc.arg(format!("-Clinker={}", linker)); @@ -2113,8 +1951,7 @@ impl<'test> TestCx<'test> { } // Use dynamic musl for tests because static doesn't allow creating dylibs - if self.config.host.contains("musl") - || self.is_vxworks_pure_dynamic() { + if self.config.host.contains("musl") || self.is_vxworks_pure_dynamic() { rustc.arg("-Ctarget-feature=-crt-static"); } @@ -2161,10 +1998,15 @@ impl<'test> TestCx<'test> { self.fatal("no NodeJS binary found (--nodejs)"); } - let src = self.config.src_base - .parent().unwrap() // chop off `ui` - .parent().unwrap() // chop off `test` - .parent().unwrap(); // chop off `src` + let src = self + .config + .src_base + .parent() + .unwrap() // chop off `ui` + .parent() + .unwrap() // chop off `test` + .parent() + .unwrap(); // chop off `src` args.push(src.join("src/etc/wasm32-shim.js").display().to_string()); } @@ -2185,11 +2027,7 @@ impl<'test> TestCx<'test> { Some(ref s) => s .split(' ') .filter_map(|s| { - if s.chars().all(|c| c.is_whitespace()) { - None - } else { - Some(s.to_owned()) - } + if s.chars().all(|c| c.is_whitespace()) { None } else { Some(s.to_owned()) } }) .collect(), None => Vec::new(), @@ -2206,11 +2044,7 @@ impl<'test> TestCx<'test> { // Build the LD_LIBRARY_PATH variable as it would be seen on the command line // for diagnostic purposes fn lib_path_cmd_prefix(path: &str) -> String { - format!( - "{}=\"{}\"", - util::lib_path_env_var(), - util::make_new_path(path) - ) + format!("{}=\"{}\"", util::lib_path_env_var(), util::make_new_path(path)) } format!("{} {:?}", lib_path_cmd_prefix(libpath), command) @@ -2218,11 +2052,7 @@ impl<'test> TestCx<'test> { } fn dump_output(&self, out: &str, err: &str) { - let revision = if let Some(r) = self.revision { - format!("{}.", r) - } else { - String::new() - }; + let revision = if let Some(r) = self.revision { format!("{}.", r) } else { String::new() }; self.dump_output_file(out, &format!("{}out", revision)); self.dump_output_file(err, &format!("{}err", revision)); @@ -2256,11 +2086,7 @@ impl<'test> TestCx<'test> { /// The revision, ignored for incremental compilation since it wants all revisions in /// the same directory. fn safe_revision(&self) -> Option<&str> { - if self.config.mode == Incremental { - None - } else { - self.revision - } + if self.config.mode == Incremental { None } else { self.revision } } /// Gets the absolute path to the directory where all output for the given @@ -2347,10 +2173,7 @@ impl<'test> TestCx<'test> { fn verify_with_filecheck(&self, output: &Path) -> ProcRes { let mut filecheck = Command::new(self.config.llvm_filecheck.as_ref().unwrap()); - filecheck - .arg("--input-file") - .arg(output) - .arg(&self.testpaths.file); + filecheck.arg("--input-file").arg(output).arg(&self.testpaths.file); // It would be more appropriate to make most of the arguments configurable through // a comment-attribute similar to `compile-flags`. For example, --check-prefixes is a very // useful flag. @@ -2398,11 +2221,7 @@ impl<'test> TestCx<'test> { fn charset() -> &'static str { // FreeBSD 10.1 defaults to GDB 6.1.1 which doesn't support "auto" charset - if cfg!(target_os = "freebsd") { - "ISO-8859-1" - } else { - "UTF-8" - } + if cfg!(target_os = "freebsd") { "ISO-8859-1" } else { "UTF-8" } } fn run_rustdoc_test(&self) { @@ -2489,47 +2308,39 @@ impl<'test> TestCx<'test> { let mut path = self.testpaths.file.clone(); path.set_file_name(&format!("{}.rs", other_file)); files.insert( - path.strip_prefix(&cwd) - .unwrap_or(&path) - .to_str() - .unwrap() - .replace('\\', "/"), + path.strip_prefix(&cwd).unwrap_or(&path).to_str().unwrap().replace('\\', "/"), self.get_lines(&path, None), ); } let mut tested = 0; - for _ in res - .stdout - .split('\n') - .filter(|s| s.starts_with("test ")) - .inspect(|s| { - let tmp: Vec<&str> = s.split(" - ").collect(); - if tmp.len() == 2 { - let path = tmp[0].rsplit("test ").next().unwrap(); - if let Some(ref mut v) = files.get_mut(&path.replace('\\', "/")) { - tested += 1; - let mut iter = tmp[1].split("(line "); - iter.next(); - let line = iter - .next() - .unwrap_or(")") - .split(')') - .next() - .unwrap_or("0") - .parse() - .unwrap_or(0); - if let Ok(pos) = v.binary_search(&line) { - v.remove(pos); - } else { - self.fatal_proc_rec( - &format!("Not found doc test: \"{}\" in \"{}\":{:?}", s, path, v), - &res, - ); - } + for _ in res.stdout.split('\n').filter(|s| s.starts_with("test ")).inspect(|s| { + let tmp: Vec<&str> = s.split(" - ").collect(); + if tmp.len() == 2 { + let path = tmp[0].rsplit("test ").next().unwrap(); + if let Some(ref mut v) = files.get_mut(&path.replace('\\', "/")) { + tested += 1; + let mut iter = tmp[1].split("(line "); + iter.next(); + let line = iter + .next() + .unwrap_or(")") + .split(')') + .next() + .unwrap_or("0") + .parse() + .unwrap_or(0); + if let Ok(pos) = v.binary_search(&line) { + v.remove(pos); + } else { + self.fatal_proc_rec( + &format!("Not found doc test: \"{}\" in \"{}\":{:?}", s, path, v), + &res, + ); } } - }) {} + } + }) {} if tested == 0 { self.fatal_proc_rec(&format!("No test has been found... {:?}", files), &res); } else { @@ -2633,14 +2444,8 @@ impl<'test> TestCx<'test> { for &(ref expected_item, ref actual_item) in &wrong_cgus { println!("{}", expected_item.name); - println!( - " expected: {}", - codegen_units_to_str(&expected_item.codegen_units) - ); - println!( - " actual: {}", - codegen_units_to_str(&actual_item.codegen_units) - ); + println!(" expected: {}", codegen_units_to_str(&expected_item.codegen_units)); + println!(" actual: {}", codegen_units_to_str(&actual_item.codegen_units)); println!(); } } @@ -2658,19 +2463,12 @@ impl<'test> TestCx<'test> { // [MONO_ITEM] name [@@ (cgu)+] fn str_to_mono_item(s: &str, cgu_has_crate_disambiguator: bool) -> MonoItem { - let s = if s.starts_with(PREFIX) { - (&s[PREFIX.len()..]).trim() - } else { - s.trim() - }; + let s = if s.starts_with(PREFIX) { (&s[PREFIX.len()..]).trim() } else { s.trim() }; let full_string = format!("{}{}", PREFIX, s); - let parts: Vec<&str> = s - .split(CGU_MARKER) - .map(str::trim) - .filter(|s| !s.is_empty()) - .collect(); + let parts: Vec<&str> = + s.split(CGU_MARKER).map(str::trim).filter(|s| !s.is_empty()).collect(); let name = parts[0].trim(); @@ -2693,11 +2491,7 @@ impl<'test> TestCx<'test> { HashSet::new() }; - MonoItem { - name: name.to_owned(), - codegen_units: cgus, - string: full_string, - } + MonoItem { name: name.to_owned(), codegen_units: cgus, string: full_string } } fn codegen_units_to_str(cgus: &HashSet) -> String { @@ -2718,23 +2512,22 @@ impl<'test> TestCx<'test> { // remove all crate-disambiguators. fn remove_crate_disambiguator_from_cgu(cgu: &str) -> String { lazy_static! { - static ref RE: Regex = Regex::new( - r"^[^\.]+(?P\.[[:alnum:]]+)(-in-[^\.]+(?P\.[[:alnum:]]+))?" - ).unwrap(); + static ref RE: Regex = + Regex::new(r"^[^\.]+(?P\.[[:alnum:]]+)(-in-[^\.]+(?P\.[[:alnum:]]+))?") + .unwrap(); } - let captures = RE.captures(cgu).unwrap_or_else(|| { - panic!("invalid cgu name encountered: {}", cgu) - }); + let captures = + RE.captures(cgu).unwrap_or_else(|| panic!("invalid cgu name encountered: {}", cgu)); let mut new_name = cgu.to_owned(); if let Some(d2) = captures.name("d2") { - new_name.replace_range(d2.start() .. d2.end(), ""); + new_name.replace_range(d2.start()..d2.end(), ""); } let d1 = captures.name("d1").unwrap(); - new_name.replace_range(d1.start() .. d1.end(), ""); + new_name.replace_range(d1.start()..d1.end(), ""); new_name } @@ -2757,10 +2550,7 @@ impl<'test> TestCx<'test> { fs::create_dir_all(&incremental_dir).unwrap(); if self.config.verbose { - print!( - "init_incremental_test: incremental_dir={}", - incremental_dir.display() - ); + print!("init_incremental_test: incremental_dir={}", incremental_dir.display()); } } @@ -2784,16 +2574,11 @@ impl<'test> TestCx<'test> { // FIXME -- use non-incremental mode as an oracle? That doesn't apply // to #[rustc_dirty] and clean tests I guess - let revision = self - .revision - .expect("incremental tests require a list of revisions"); + let revision = self.revision.expect("incremental tests require a list of revisions"); // Incremental workproduct directory should have already been created. let incremental_dir = self.incremental_dir(); - assert!( - incremental_dir.exists(), - "init_incremental_test failed to create incremental dir" - ); + assert!(incremental_dir.exists(), "init_incremental_test failed to create incremental dir"); // Add an extra flag pointing at the incremental directory. let mut revision_props = self.props.clone(); @@ -2807,10 +2592,7 @@ impl<'test> TestCx<'test> { }; if self.config.verbose { - print!( - "revision={:?} revision_props={:#?}", - revision, revision_props - ); + print!("revision={:?} revision_props={:#?}", revision, revision_props); } if revision.starts_with("rpass") { @@ -2837,15 +2619,7 @@ impl<'test> TestCx<'test> { fn run_rmake_test(&self) { let cwd = env::current_dir().unwrap(); - let src_root = self - .config - .src_base - .parent() - .unwrap() - .parent() - .unwrap() - .parent() - .unwrap(); + let src_root = self.config.src_base.parent().unwrap().parent().unwrap().parent().unwrap(); let src_root = cwd.join(&src_root); let tmpdir = cwd.join(self.output_base_name()); @@ -2880,7 +2654,6 @@ impl<'test> TestCx<'test> { .env("TARGET_RPATH_DIR", cwd.join(&self.config.run_lib_path)) .env("LLVM_COMPONENTS", &self.config.llvm_components) .env("LLVM_CXXFLAGS", &self.config.llvm_cxxflags) - // We for sure don't want these tests to run in parallel, so make // sure they don't have access to these vars if we run via `make` // at the top level @@ -2918,8 +2691,7 @@ impl<'test> TestCx<'test> { // Use dynamic musl for tests because static doesn't allow creating dylibs if self.config.host.contains("musl") { - cmd.env("RUSTFLAGS", "-Ctarget-feature=-crt-static") - .env("IS_MUSL_HOST", "1"); + cmd.env("RUSTFLAGS", "-Ctarget-feature=-crt-static").env("IS_MUSL_HOST", "1"); } if self.config.target.contains("msvc") && self.config.cc != "" { @@ -2953,10 +2725,7 @@ impl<'test> TestCx<'test> { } } - let output = cmd - .spawn() - .and_then(read2_abbreviated) - .expect("failed to spawn `make`"); + let output = cmd.spawn().and_then(read2_abbreviated).expect("failed to spawn `make`"); if !output.status.success() { let res = ProcRes { status: output.status, @@ -3012,12 +2781,15 @@ impl<'test> TestCx<'test> { } } - fn load_compare_outputs(&self, proc_res: &ProcRes, - output_kind: TestOutput, explicit_format: bool) -> usize { - + fn load_compare_outputs( + &self, + proc_res: &ProcRes, + output_kind: TestOutput, + explicit_format: bool, + ) -> usize { let (stderr_kind, stdout_kind) = match output_kind { TestOutput::Compile => (UI_STDERR, UI_STDOUT), - TestOutput::Run => (UI_RUN_STDERR, UI_RUN_STDOUT) + TestOutput::Run => (UI_RUN_STDERR, UI_RUN_STDOUT), }; let expected_stderr = self.load_expected_output(stderr_kind); @@ -3032,14 +2804,17 @@ impl<'test> TestCx<'test> { lazy_static! { static ref REMOTE_TEST_RE: Regex = Regex::new( "^uploaded \"\\$TEST_BUILD_DIR(/[[:alnum:]_\\-]+)+\", waiting for result\n" - ).unwrap(); + ) + .unwrap(); } - REMOTE_TEST_RE.replace( - &self.normalize_output(&proc_res.stdout, &self.props.normalize_stdout), - "" - ).to_string() - } - _ => self.normalize_output(&proc_res.stdout, &self.props.normalize_stdout) + REMOTE_TEST_RE + .replace( + &self.normalize_output(&proc_res.stdout, &self.props.normalize_stdout), + "", + ) + .to_string() + } + _ => self.normalize_output(&proc_res.stdout, &self.props.normalize_stdout), }; let stderr = if explicit_format { @@ -3084,11 +2859,7 @@ impl<'test> TestCx<'test> { // if the user specified a format in the ui test // print the output to the stderr file, otherwise extract // the rendered error messages from json and print them - let explicit = self - .props - .compile_flags - .iter() - .any(|s| s.contains("--error-format")); + let explicit = self.props.compile_flags.iter().any(|s| s.contains("--error-format")); let expected_fixed = self.load_expected_output(UI_FIXED); @@ -3108,30 +2879,30 @@ impl<'test> TestCx<'test> { let suggestions = get_suggestions_from_json( &proc_res.stderr, &HashSet::new(), - Filter::MachineApplicableOnly - ).unwrap_or_default(); + Filter::MachineApplicableOnly, + ) + .unwrap_or_default(); if suggestions.len() > 0 && !self.props.run_rustfix - && !self.props.rustfix_only_machine_applicable { - let mut coverage_file_path = self.config.build_base.clone(); - coverage_file_path.push("rustfix_missing_coverage.txt"); - debug!("coverage_file_path: {}", coverage_file_path.display()); - - let mut file = OpenOptions::new() - .create(true) - .append(true) - .open(coverage_file_path.as_path()) - .expect("could not create or open file"); - - if let Err(_) = writeln!(file, "{}", self.testpaths.file.display()) { - panic!("couldn't write to {}", coverage_file_path.display()); - } + && !self.props.rustfix_only_machine_applicable + { + let mut coverage_file_path = self.config.build_base.clone(); + coverage_file_path.push("rustfix_missing_coverage.txt"); + debug!("coverage_file_path: {}", coverage_file_path.display()); + + let mut file = OpenOptions::new() + .create(true) + .append(true) + .open(coverage_file_path.as_path()) + .expect("could not create or open file"); + + if let Err(_) = writeln!(file, "{}", self.testpaths.file.display()) { + panic!("couldn't write to {}", coverage_file_path.display()); + } } } else if self.props.run_rustfix { // Apply suggestions from rustc to the code itself - let unfixed_code = self - .load_expected_output_from_path(&self.testpaths.file) - .unwrap(); + let unfixed_code = self.load_expected_output_from_path(&self.testpaths.file).unwrap(); let suggestions = get_suggestions_from_json( &proc_res.stderr, &HashSet::new(), @@ -3140,7 +2911,8 @@ impl<'test> TestCx<'test> { } else { Filter::Everything }, - ).unwrap(); + ) + .unwrap(); let fixed_code = apply_suggestions(&unfixed_code, &suggestions).expect(&format!( "failed to apply suggestions for {:?} with rustfix", self.testpaths.file @@ -3156,10 +2928,8 @@ impl<'test> TestCx<'test> { if errors > 0 { println!("To update references, rerun the tests and pass the `--bless` flag"); - let relative_path_to_file = self - .testpaths - .relative_dir - .join(self.testpaths.file.file_name().unwrap()); + let relative_path_to_file = + self.testpaths.relative_dir.join(self.testpaths.file.file_name().unwrap()); println!( "To only update this specific test, also pass `--test-args {}`", relative_path_to_file.display(), @@ -3200,18 +2970,20 @@ impl<'test> TestCx<'test> { } } - debug!("run_ui_test: explicit={:?} config.compare_mode={:?} expected_errors={:?} \ + debug!( + "run_ui_test: explicit={:?} config.compare_mode={:?} expected_errors={:?} \ proc_res.status={:?} props.error_patterns={:?}", - explicit, self.config.compare_mode, expected_errors, proc_res.status, - self.props.error_patterns); + explicit, + self.config.compare_mode, + expected_errors, + proc_res.status, + self.props.error_patterns + ); if !explicit && self.config.compare_mode.is_none() { let check_patterns = - should_run == WillExecute::No && - !self.props.error_patterns.is_empty(); + should_run == WillExecute::No && !self.props.error_patterns.is_empty(); - let check_annotations = - !check_patterns || - !expected_errors.is_empty(); + let check_annotations = !check_patterns || !expected_errors.is_empty(); if check_patterns { // "// error-pattern" comments @@ -3297,10 +3069,7 @@ impl<'test> TestCx<'test> { let output_time = t(output_file); let source_time = t(source_file); if source_time > output_time { - debug!( - "source file time: {:?} output file time: {:?}", - source_time, output_time - ); + debug!("source file time: {:?} output file time: {:?}", source_time, output_time); panic!( "test source file `{}` is newer than potentially stale output file `{}`.", source_file.display(), @@ -3324,19 +3093,11 @@ impl<'test> TestCx<'test> { self.check_mir_test_timestamp(test_name, &output_file); let dumped_string = fs::read_to_string(&output_file).unwrap(); - let mut dumped_lines = dumped_string - .lines() - .map(|l| nocomment_mir_line(l)) - .filter(|l| !l.is_empty()); + let mut dumped_lines = + dumped_string.lines().map(|l| nocomment_mir_line(l)).filter(|l| !l.is_empty()); let mut expected_lines = expected_content .iter() - .filter(|&l| { - if let &ExpectedLine::Text(l) = l { - !l.is_empty() - } else { - true - } - }) + .filter(|&l| if let &ExpectedLine::Text(l) = l { !l.is_empty() } else { true }) .peekable(); let compare = |expected_line, dumped_line| { @@ -3358,11 +3119,8 @@ impl<'test> TestCx<'test> { &ExpectedLine::Elision => "... (elided)".into(), &ExpectedLine::Text(t) => t, }; - let expected_content = expected_content - .iter() - .map(|l| f(l)) - .collect::>() - .join("\n"); + let expected_content = + expected_content.iter().map(|l| f(l)).collect::>().join("\n"); panic!( "Did not find expected line, error: {}\n\ Expected Line: {:?}\n\ @@ -3480,16 +3238,18 @@ impl<'test> TestCx<'test> { // with placeholders as we do not want tests needing updated when compiler source code // changes. // eg. $SRC_DIR/libcore/mem.rs:323:14 becomes $SRC_DIR/libcore/mem.rs:LL:COL - normalized = Regex::new("SRC_DIR(.+):\\d+:\\d+").unwrap() - .replace_all(&normalized, "SRC_DIR$1:LL:COL").into_owned(); + normalized = Regex::new("SRC_DIR(.+):\\d+:\\d+") + .unwrap() + .replace_all(&normalized, "SRC_DIR$1:LL:COL") + .into_owned(); normalized = Self::normalize_platform_differences(&normalized); normalized = normalized.replace("\t", "\\t"); // makes tabs visible // Remove test annotations like `//~ ERROR text` from the output, // since they duplicate actual errors and make the output hard to read. - normalized = Regex::new("\\s*//(\\[.*\\])?~.*").unwrap() - .replace_all(&normalized, "").into_owned(); + normalized = + Regex::new("\\s*//(\\[.*\\])?~.*").unwrap().replace_all(&normalized, "").into_owned(); for rule in custom_rules { let re = Regex::new(&rule.0).expect("bad regex in custom normalization rule"); @@ -3522,19 +3282,17 @@ impl<'test> TestCx<'test> { let output = output.replace(r"\\", r"\"); - PATH_BACKSLASH_RE.replace_all(&output, |caps: &Captures<'_>| { - println!("{}", &caps[0]); - caps[0].replace(r"\", "/") - }).replace("\r\n", "\n") + PATH_BACKSLASH_RE + .replace_all(&output, |caps: &Captures<'_>| { + println!("{}", &caps[0]); + caps[0].replace(r"\", "/") + }) + .replace("\r\n", "\n") } fn expected_output_path(&self, kind: &str) -> PathBuf { - let mut path = expected_output_path( - &self.testpaths, - self.revision, - &self.config.compare_mode, - kind, - ); + let mut path = + expected_output_path(&self.testpaths, self.revision, &self.config.compare_mode, kind); if !path.exists() { if let Some(CompareMode::Polonius) = self.config.compare_mode { @@ -3574,11 +3332,7 @@ impl<'test> TestCx<'test> { fn delete_file(&self, file: &PathBuf) { if let Err(e) = fs::remove_file(file) { - self.fatal(&format!( - "failed to delete `{}`: {}", - file.display(), - e, - )); + self.fatal(&format!("failed to delete `{}`: {}", file.display(), e,)); } } @@ -3649,24 +3403,14 @@ impl<'test> TestCx<'test> { for output_file in files { println!("Actual {} saved to {}", kind, output_file.display()); } - if self.config.bless { - 0 - } else { - 1 - } + if self.config.bless { 0 } else { 1 } } fn prune_duplicate_output(&self, mode: CompareMode, kind: &str, canon_content: &str) { - let examined_path = expected_output_path( - &self.testpaths, - self.revision, - &Some(mode), - kind, - ); + let examined_path = expected_output_path(&self.testpaths, self.revision, &Some(mode), kind); - let examined_content = self - .load_expected_output_from_path(&examined_path) - .unwrap_or_else(|_| String::new()); + let examined_content = + self.load_expected_output_from_path(&examined_path).unwrap_or_else(|_| String::new()); if examined_path.exists() && canon_content == &examined_content { self.delete_file(&examined_path); @@ -3676,12 +3420,8 @@ impl<'test> TestCx<'test> { fn prune_duplicate_outputs(&self, modes: &[CompareMode]) { if self.config.bless { for kind in UI_EXTENSIONS { - let canon_comparison_path = expected_output_path( - &self.testpaths, - self.revision, - &None, - kind, - ); + let canon_comparison_path = + expected_output_path(&self.testpaths, self.revision, &None, kind); if let Ok(canon) = self.load_expected_output_from_path(&canon_comparison_path) { for mode in modes { @@ -3728,7 +3468,8 @@ impl ProcRes { {}\n\ ------------------------------------------\n\ \n", - self.status, self.cmdline, + self.status, + self.cmdline, json::extract_rendered(&self.stdout), json::extract_rendered(&self.stderr), ); @@ -3784,11 +3525,7 @@ fn read2_abbreviated(mut child: Child) -> io::Result { enum ProcOutput { Full(Vec), - Abbreviated { - head: Vec, - skipped: usize, - tail: Box<[u8]>, - }, + Abbreviated { head: Vec, skipped: usize, tail: Box<[u8]> }, } impl ProcOutput { @@ -3803,17 +3540,9 @@ fn read2_abbreviated(mut child: Child) -> io::Result { let tail = bytes.split_off(new_len - TAIL_LEN).into_boxed_slice(); let head = replace(bytes, Vec::new()); let skipped = new_len - HEAD_LEN - TAIL_LEN; - ProcOutput::Abbreviated { - head, - skipped, - tail, - } + ProcOutput::Abbreviated { head, skipped, tail } } - ProcOutput::Abbreviated { - ref mut skipped, - ref mut tail, - .. - } => { + ProcOutput::Abbreviated { ref mut skipped, ref mut tail, .. } => { *skipped += data.len(); if data.len() <= TAIL_LEN { tail[..data.len()].copy_from_slice(data); @@ -3830,11 +3559,7 @@ fn read2_abbreviated(mut child: Child) -> io::Result { fn into_bytes(self) -> Vec { match self { ProcOutput::Full(bytes) => bytes, - ProcOutput::Abbreviated { - mut head, - skipped, - tail, - } => { + ProcOutput::Abbreviated { mut head, skipped, tail } => { write!(&mut head, "\n\n<<<<<< SKIPPED {} BYTES >>>>>>\n\n", skipped).unwrap(); head.extend_from_slice(&tail); head @@ -3857,9 +3582,5 @@ fn read2_abbreviated(mut child: Child) -> io::Result { )?; let status = child.wait()?; - Ok(Output { - status, - stdout: stdout.into_bytes(), - stderr: stderr.into_bytes(), - }) + Ok(Output { status, stdout: stdout.into_bytes(), stderr: stderr.into_bytes() }) } diff --git a/src/tools/compiletest/src/runtest/tests.rs b/src/tools/compiletest/src/runtest/tests.rs index 79128aa9c69..51105111175 100644 --- a/src/tools/compiletest/src/runtest/tests.rs +++ b/src/tools/compiletest/src/runtest/tests.rs @@ -2,10 +2,7 @@ use super::*; #[test] fn normalize_platform_differences() { - assert_eq!( - TestCx::normalize_platform_differences(r"$DIR\foo.rs"), - "$DIR/foo.rs" - ); + assert_eq!(TestCx::normalize_platform_differences(r"$DIR\foo.rs"), "$DIR/foo.rs"); assert_eq!( TestCx::normalize_platform_differences(r"$BUILD_DIR\..\parser.rs"), "$BUILD_DIR/../parser.rs" @@ -18,14 +15,8 @@ fn normalize_platform_differences() { TestCx::normalize_platform_differences(r"either bar\baz.rs or bar\baz\mod.rs"), r"either bar/baz.rs or bar/baz/mod.rs", ); - assert_eq!( - TestCx::normalize_platform_differences(r"`.\some\path.rs`"), - r"`./some/path.rs`", - ); - assert_eq!( - TestCx::normalize_platform_differences(r"`some\path.rs`"), - r"`some/path.rs`", - ); + assert_eq!(TestCx::normalize_platform_differences(r"`.\some\path.rs`"), r"`./some/path.rs`",); + assert_eq!(TestCx::normalize_platform_differences(r"`some\path.rs`"), r"`some/path.rs`",); assert_eq!( TestCx::normalize_platform_differences(r"$DIR\path-with-dashes.rs"), r"$DIR/path-with-dashes.rs" @@ -34,9 +25,7 @@ fn normalize_platform_differences() { TestCx::normalize_platform_differences(r"$DIR\path_with_underscores.rs"), r"$DIR/path_with_underscores.rs", ); - assert_eq!( - TestCx::normalize_platform_differences(r"$DIR\foo.rs:12:11"), "$DIR/foo.rs:12:11", - ); + assert_eq!(TestCx::normalize_platform_differences(r"$DIR\foo.rs:12:11"), "$DIR/foo.rs:12:11",); assert_eq!( TestCx::normalize_platform_differences(r"$DIR\path with spaces 'n' quotes"), "$DIR/path with spaces 'n' quotes", diff --git a/src/tools/compiletest/src/util.rs b/src/tools/compiletest/src/util.rs index 3a2ee445087..003f51a0f43 100644 --- a/src/tools/compiletest/src/util.rs +++ b/src/tools/compiletest/src/util.rs @@ -1,7 +1,7 @@ -use std::ffi::OsStr; +use crate::common::Config; use std::env; +use std::ffi::OsStr; use std::path::PathBuf; -use crate::common::Config; use log::*; @@ -106,11 +106,7 @@ pub fn get_arch(triple: &str) -> &'static str { } pub fn matches_env(triple: &str, name: &str) -> bool { - if let Some(env) = triple.split('-').nth(3) { - env.starts_with(name) - } else { - false - } + if let Some(env) = triple.split('-').nth(3) { env.starts_with(name) } else { false } } pub fn get_pointer_width(triple: &str) -> &'static str { diff --git a/src/tools/error_index_generator/build.rs b/src/tools/error_index_generator/build.rs index b6e7ede17e7..efa4177d1d8 100644 --- a/src/tools/error_index_generator/build.rs +++ b/src/tools/error_index_generator/build.rs @@ -1,6 +1,6 @@ -use walkdir::WalkDir; use std::path::PathBuf; use std::{env, fs}; +use walkdir::WalkDir; fn main() { // The src directory (we are in src/tools/error_index_generator) @@ -12,8 +12,9 @@ fn main() { let error_codes_path = "../../../src/librustc_error_codes/error_codes.rs"; println!("cargo:rerun-if-changed={}", error_codes_path); - let file = fs::read_to_string(error_codes_path).unwrap() - .replace(": include_str!(\"./error_codes/", ": include_str!(\"./"); + let file = fs::read_to_string(error_codes_path) + .unwrap() + .replace(": include_str!(\"./error_codes/", ": include_str!(\"./"); let contents = format!("(|| {{\n{}\n}})()", file); fs::write(&out_dir.join("all_error_codes.rs"), &contents).unwrap(); @@ -30,7 +31,8 @@ fn main() { } let mut all = String::new(); - all.push_str(r###" + all.push_str( + r###" fn register_all() -> Vec<(&'static str, Option<&'static str>)> { let mut long_codes: Vec<(&'static str, Option<&'static str>)> = Vec::new(); macro_rules! register_diagnostics { @@ -51,7 +53,8 @@ fn register_all() -> Vec<(&'static str, Option<&'static str>)> { )* ) } -"###); +"###, + ); all.push_str(r#"include!(concat!(env!("OUT_DIR"), "/all_error_codes.rs"));"#); all.push_str("\nlong_codes\n"); all.push_str("}\n"); diff --git a/src/tools/error_index_generator/main.rs b/src/tools/error_index_generator/main.rs index 31a802706cb..1a4df167183 100644 --- a/src/tools/error_index_generator/main.rs +++ b/src/tools/error_index_generator/main.rs @@ -4,6 +4,7 @@ extern crate env_logger; extern crate syntax; +use std::cell::RefCell; use std::collections::BTreeMap; use std::env; use std::error::Error; @@ -11,11 +12,10 @@ use std::fs::File; use std::io::Write; use std::path::Path; use std::path::PathBuf; -use std::cell::RefCell; use syntax::edition::DEFAULT_EDITION; -use rustdoc::html::markdown::{Markdown, IdMap, ErrorCodes, Playground}; +use rustdoc::html::markdown::{ErrorCodes, IdMap, Markdown, Playground}; pub struct ErrorMetadata { pub description: Option, @@ -33,10 +33,12 @@ enum OutputFormat { impl OutputFormat { fn from(format: &str, resource_suffix: &str) -> OutputFormat { match &*format.to_lowercase() { - "html" => OutputFormat::HTML(HTMLFormatter(RefCell::new(IdMap::new()), - resource_suffix.to_owned())), + "html" => OutputFormat::HTML(HTMLFormatter( + RefCell::new(IdMap::new()), + resource_suffix.to_owned(), + )), "markdown" => OutputFormat::Markdown(MarkdownFormatter), - s => OutputFormat::Unknown(s.to_owned()), + s => OutputFormat::Unknown(s.to_owned()), } } } @@ -44,8 +46,12 @@ impl OutputFormat { trait Formatter { fn header(&self, output: &mut dyn Write) -> Result<(), Box>; fn title(&self, output: &mut dyn Write) -> Result<(), Box>; - fn error_code_block(&self, output: &mut dyn Write, info: &ErrorMetadata, - err_code: &str) -> Result<(), Box>; + fn error_code_block( + &self, + output: &mut dyn Write, + info: &ErrorMetadata, + err_code: &str, + ) -> Result<(), Box>; fn footer(&self, output: &mut dyn Write) -> Result<(), Box>; } @@ -54,7 +60,9 @@ struct MarkdownFormatter; impl Formatter for HTMLFormatter { fn header(&self, output: &mut dyn Write) -> Result<(), Box> { - write!(output, r##" + write!( + output, + r##" Rust Compiler Error Index @@ -69,7 +77,9 @@ impl Formatter for HTMLFormatter { -"##, suffix=self.1)?; +"##, + suffix = self.1 + )?; Ok(()) } @@ -78,8 +88,12 @@ impl Formatter for HTMLFormatter { Ok(()) } - fn error_code_block(&self, output: &mut dyn Write, info: &ErrorMetadata, - err_code: &str) -> Result<(), Box> { + fn error_code_block( + &self, + output: &mut dyn Write, + info: &ErrorMetadata, + err_code: &str, + ) -> Result<(), Box> { // Enclose each error in a div so they can be shown/hidden en masse. let desc_desc = match info.description { Some(_) => "error-described", @@ -88,9 +102,11 @@ impl Formatter for HTMLFormatter { write!(output, "
", desc_desc)?; // Error title (with self-link). - write!(output, - "

{0}

\n", - err_code)?; + write!( + output, + "

{0}

\n", + err_code + )?; // Description rendered as markdown. match info.description { @@ -100,10 +116,20 @@ impl Formatter for HTMLFormatter { crate_name: None, url: String::from("https://play.rust-lang.org/"), }; - write!(output, "{}", - Markdown(desc, &[], &mut id_map, - ErrorCodes::Yes, DEFAULT_EDITION, &Some(playground)).to_string())? - }, + write!( + output, + "{}", + Markdown( + desc, + &[], + &mut id_map, + ErrorCodes::Yes, + DEFAULT_EDITION, + &Some(playground) + ) + .to_string() + )? + } None => write!(output, "

No description.

\n")?, } @@ -112,7 +138,9 @@ impl Formatter for HTMLFormatter { } fn footer(&self, output: &mut dyn Write) -> Result<(), Box> { - write!(output, r##" -"##)?; +"## + )?; Ok(()) } } @@ -186,8 +215,12 @@ impl Formatter for MarkdownFormatter { Ok(()) } - fn error_code_block(&self, output: &mut dyn Write, info: &ErrorMetadata, - err_code: &str) -> Result<(), Box> { + fn error_code_block( + &self, + output: &mut dyn Write, + info: &ErrorMetadata, + err_code: &str, + ) -> Result<(), Box> { Ok(match info.description { Some(ref desc) => write!(output, "## {}\n{}\n", err_code, desc)?, None => (), @@ -201,8 +234,11 @@ impl Formatter for MarkdownFormatter { } /// Output an HTML page for the errors in `err_map` to `output_path`. -fn render_error_page(err_map: &ErrorMetadataMap, output_path: &Path, - formatter: T) -> Result<(), Box> { +fn render_error_page( + err_map: &ErrorMetadataMap, + output_path: &Path, + formatter: T, +) -> Result<(), Box> { let mut output_file = File::create(output_path)?; formatter.header(&mut output_file)?; @@ -219,13 +255,11 @@ fn main_with_result(format: OutputFormat, dst: &Path) -> Result<(), Box panic!("Unknown output format: {}", s), - OutputFormat::HTML(h) => render_error_page(&err_map, dst, h)?, + OutputFormat::Unknown(s) => panic!("Unknown output format: {}", s), + OutputFormat::HTML(h) => render_error_page(&err_map, dst, h)?, OutputFormat::Markdown(m) => render_error_page(&err_map, dst, m)?, } Ok(()) @@ -236,14 +270,13 @@ fn parse_args() -> (OutputFormat, PathBuf) { let format = args.next(); let dst = args.next(); let resource_suffix = args.next().unwrap_or_else(String::new); - let format = format.map(|a| OutputFormat::from(&a, &resource_suffix)) - .unwrap_or(OutputFormat::from("html", &resource_suffix)); - let dst = dst.map(PathBuf::from).unwrap_or_else(|| { - match format { - OutputFormat::HTML(..) => PathBuf::from("doc/error-index.html"), - OutputFormat::Markdown(..) => PathBuf::from("doc/error-index.md"), - OutputFormat::Unknown(..) => PathBuf::from(""), - } + let format = format + .map(|a| OutputFormat::from(&a, &resource_suffix)) + .unwrap_or(OutputFormat::from("html", &resource_suffix)); + let dst = dst.map(PathBuf::from).unwrap_or_else(|| match format { + OutputFormat::HTML(..) => PathBuf::from("doc/error-index.html"), + OutputFormat::Markdown(..) => PathBuf::from("doc/error-index.md"), + OutputFormat::Unknown(..) => PathBuf::from(""), }); (format, dst) } @@ -251,9 +284,7 @@ fn parse_args() -> (OutputFormat, PathBuf) { fn main() { env_logger::init(); let (format, dst) = parse_args(); - let result = syntax::with_default_globals(move || { - main_with_result(format, &dst) - }); + let result = syntax::with_default_globals(move || main_with_result(format, &dst)); if let Err(e) = result { panic!("{}", e.description()); } diff --git a/src/tools/linkchecker/main.rs b/src/tools/linkchecker/main.rs index 79c98b780eb..558913d84ad 100644 --- a/src/tools/linkchecker/main.rs +++ b/src/tools/linkchecker/main.rs @@ -20,16 +20,18 @@ use std::collections::hash_map::Entry; use std::collections::{HashMap, HashSet}; use std::env; use std::fs; -use std::path::{Path, PathBuf, Component}; +use std::path::{Component, Path, PathBuf}; use std::rc::Rc; use crate::Redirect::*; macro_rules! t { - ($e:expr) => (match $e { - Ok(e) => e, - Err(e) => panic!("{} failed with {:?}", stringify!($e), e), - }) + ($e:expr) => { + match $e { + Ok(e) => e, + Err(e) => panic!("{} failed with {:?}", stringify!($e), e), + } + }; } fn main() { @@ -63,17 +65,17 @@ type Cache = HashMap; fn small_url_encode(s: &str) -> String { s.replace("<", "%3C") - .replace(">", "%3E") - .replace(" ", "%20") - .replace("?", "%3F") - .replace("'", "%27") - .replace("&", "%26") - .replace(",", "%2C") - .replace(":", "%3A") - .replace(";", "%3B") - .replace("[", "%5B") - .replace("]", "%5D") - .replace("\"", "%22") + .replace(">", "%3E") + .replace(" ", "%20") + .replace("?", "%3F") + .replace("'", "%27") + .replace("&", "%26") + .replace(",", "%2C") + .replace(":", "%3A") + .replace(";", "%3B") + .replace("[", "%5B") + .replace("]", "%5D") + .replace("\"", "%22") } impl FileEntry { @@ -111,11 +113,7 @@ fn walk(cache: &mut Cache, root: &Path, dir: &Path, errors: &mut bool) { } } -fn check(cache: &mut Cache, - root: &Path, - file: &Path, - errors: &mut bool) - -> Option { +fn check(cache: &mut Cache, root: &Path, file: &Path, errors: &mut bool) -> Option { // Ignore none HTML files. if file.extension().and_then(|s| s.to_str()) != Some("html") { return None; @@ -124,18 +122,19 @@ fn check(cache: &mut Cache, // Unfortunately we're not 100% full of valid links today to we need a few // whitelists to get this past `make check` today. // FIXME(#32129) - if file.ends_with("std/string/struct.String.html") || - file.ends_with("interpret/struct.ImmTy.html") || - file.ends_with("ast/struct.ThinVec.html") || - file.ends_with("util/struct.ThinVec.html") || - file.ends_with("layout/struct.TyLayout.html") || - file.ends_with("humantime/struct.Timestamp.html") || - file.ends_with("log/index.html") || - file.ends_with("ty/struct.Slice.html") || - file.ends_with("ty/enum.Attributes.html") || - file.ends_with("ty/struct.SymbolName.html") || - file.ends_with("io/struct.IoSlice.html") || - file.ends_with("io/struct.IoSliceMut.html") { + if file.ends_with("std/string/struct.String.html") + || file.ends_with("interpret/struct.ImmTy.html") + || file.ends_with("ast/struct.ThinVec.html") + || file.ends_with("util/struct.ThinVec.html") + || file.ends_with("layout/struct.TyLayout.html") + || file.ends_with("humantime/struct.Timestamp.html") + || file.ends_with("log/index.html") + || file.ends_with("ty/struct.Slice.html") + || file.ends_with("ty/enum.Attributes.html") + || file.ends_with("ty/struct.SymbolName.html") + || file.ends_with("io/struct.IoSlice.html") + || file.ends_with("io/struct.IoSliceMut.html") + { return None; } // FIXME(#32553) @@ -143,13 +142,14 @@ fn check(cache: &mut Cache, return None; } // FIXME(#32130) - if file.ends_with("btree_set/struct.BTreeSet.html") || - file.ends_with("struct.BTreeSet.html") || - file.ends_with("btree_map/struct.BTreeMap.html") || - file.ends_with("hash_map/struct.HashMap.html") || - file.ends_with("hash_set/struct.HashSet.html") || - file.ends_with("sync/struct.Lrc.html") || - file.ends_with("sync/struct.RwLock.html") { + if file.ends_with("btree_set/struct.BTreeSet.html") + || file.ends_with("struct.BTreeSet.html") + || file.ends_with("btree_map/struct.BTreeMap.html") + || file.ends_with("hash_map/struct.HashMap.html") + || file.ends_with("hash_set/struct.HashSet.html") + || file.ends_with("sync/struct.Lrc.html") + || file.ends_with("sync/struct.RwLock.html") + { return None; } @@ -159,17 +159,19 @@ fn check(cache: &mut Cache, Err(_) => return None, }; { - cache.get_mut(&pretty_file) - .unwrap() - .parse_ids(&pretty_file, &contents, errors); + cache.get_mut(&pretty_file).unwrap().parse_ids(&pretty_file, &contents, errors); } // Search for anything that's the regex 'href[ ]*=[ ]*".*?"' with_attrs_in_source(&contents, " href", |url, i, base| { // Ignore external URLs - if url.starts_with("http:") || url.starts_with("https:") || - url.starts_with("javascript:") || url.starts_with("ftp:") || - url.starts_with("irc:") || url.starts_with("data:") { + if url.starts_with("http:") + || url.starts_with("https:") + || url.starts_with("javascript:") + || url.starts_with("ftp:") + || url.starts_with("irc:") + || url.starts_with("data:") + { return; } let mut parts = url.splitn(2, "#"); @@ -185,21 +187,26 @@ fn check(cache: &mut Cache, path.pop(); for part in Path::new(base).join(url).components() { match part { - Component::Prefix(_) | - Component::RootDir => { + Component::Prefix(_) | Component::RootDir => { // Avoid absolute paths as they make the docs not // relocatable by making assumptions on where the docs // are hosted relative to the site root. *errors = true; - println!("{}:{}: absolute path - {}", - pretty_file.display(), - i + 1, - Path::new(base).join(url).display()); + println!( + "{}:{}: absolute path - {}", + pretty_file.display(), + i + 1, + Path::new(base).join(url).display() + ); return; } Component::CurDir => {} - Component::ParentDir => { path.pop(); } - Component::Normal(s) => { path.push(s); } + Component::ParentDir => { + path.pop(); + } + Component::Normal(s) => { + path.push(s); + } } } } @@ -212,10 +219,12 @@ fn check(cache: &mut Cache, // the docs offline so it's best to avoid them. *errors = true; let pretty_path = path.strip_prefix(root).unwrap_or(&path); - println!("{}:{}: directory link - {}", - pretty_file.display(), - i + 1, - pretty_path.display()); + println!( + "{}:{}: directory link - {}", + pretty_file.display(), + i + 1, + pretty_path.display() + ); return; } if let Some(extension) = path.extension() { @@ -232,10 +241,12 @@ fn check(cache: &mut Cache, } Err(LoadError::BrokenRedirect(target, _)) => { *errors = true; - println!("{}:{}: broken redirect to {}", - pretty_file.display(), - i + 1, - target.display()); + println!( + "{}:{}: broken redirect to {}", + pretty_file.display(), + i + 1, + target.display() + ); return; } Err(LoadError::IsRedirect) => unreachable!(), @@ -244,8 +255,7 @@ fn check(cache: &mut Cache, if let Some(ref fragment) = fragment { // Fragments like `#1-6` are most likely line numbers to be // interpreted by javascript, so we're ignoring these - if fragment.splitn(2, '-') - .all(|f| f.chars().all(|c| c.is_numeric())) { + if fragment.splitn(2, '-').all(|f| f.chars().all(|c| c.is_numeric())) { return; } @@ -259,9 +269,7 @@ fn check(cache: &mut Cache, if !entry.ids.contains(*fragment) { *errors = true; - print!("{}:{}: broken link fragment ", - pretty_file.display(), - i + 1); + print!("{}:{}: broken link fragment ", pretty_file.display(), i + 1); println!("`#{}` pointing to `{}`", fragment, pretty_path.display()); }; } @@ -275,17 +283,16 @@ fn check(cache: &mut Cache, Some(pretty_file) } -fn load_file(cache: &mut Cache, - root: &Path, - file: &Path, - redirect: Redirect) - -> Result<(PathBuf, Rc), LoadError> { +fn load_file( + cache: &mut Cache, + root: &Path, + file: &Path, + redirect: Redirect, +) -> Result<(PathBuf, Rc), LoadError> { let pretty_file = PathBuf::from(file.strip_prefix(root).unwrap_or(&file)); let (maybe_redirect, contents) = match cache.entry(pretty_file.clone()) { - Entry::Occupied(entry) => { - (None, entry.get().source.clone()) - } + Entry::Occupied(entry) => (None, entry.get().source.clone()), Entry::Vacant(entry) => { let contents = match fs::read_to_string(file) { Ok(s) => Rc::new(s), @@ -294,7 +301,7 @@ fn load_file(cache: &mut Cache, LoadError::BrokenRedirect(file.to_path_buf(), err) } else { LoadError::IOError(err) - }) + }); } }; @@ -304,18 +311,13 @@ fn load_file(cache: &mut Cache, return Err(LoadError::IsRedirect); } } else { - entry.insert(FileEntry { - source: contents.clone(), - ids: HashSet::new(), - }); + entry.insert(FileEntry { source: contents.clone(), ids: HashSet::new() }); } (maybe, contents) } }; match maybe_redirect.map(|url| file.parent().unwrap().join(url)) { - Some(redirect_file) => { - load_file(cache, root, &redirect_file, FromRedirect(true)) - } + Some(redirect_file) => load_file(cache, root, &redirect_file, FromRedirect(true)), None => Ok((pretty_file, contents)), } } diff --git a/src/tools/remote-test-client/src/main.rs b/src/tools/remote-test-client/src/main.rs index d7f031a6150..d0ae8300bd6 100644 --- a/src/tools/remote-test-client/src/main.rs +++ b/src/tools/remote-test-client/src/main.rs @@ -20,36 +20,31 @@ use std::time::Duration; const REMOTE_ADDR_ENV: &str = "TEST_DEVICE_ADDR"; macro_rules! t { - ($e:expr) => (match $e { - Ok(e) => e, - Err(e) => panic!("{} failed with {}", stringify!($e), e), - }) + ($e:expr) => { + match $e { + Ok(e) => e, + Err(e) => panic!("{} failed with {}", stringify!($e), e), + } + }; } fn main() { let mut args = env::args().skip(1); match &args.next().unwrap()[..] { - "spawn-emulator" => { - spawn_emulator(&args.next().unwrap(), - Path::new(&args.next().unwrap()), - Path::new(&args.next().unwrap()), - args.next().map(|s| s.into())) - } - "push" => { - push(Path::new(&args.next().unwrap())) - } - "run" => { - run(args.next().unwrap(), args.collect()) - } + "spawn-emulator" => spawn_emulator( + &args.next().unwrap(), + Path::new(&args.next().unwrap()), + Path::new(&args.next().unwrap()), + args.next().map(|s| s.into()), + ), + "push" => push(Path::new(&args.next().unwrap())), + "run" => run(args.next().unwrap(), args.collect()), cmd => panic!("unknown command: {}", cmd), } } -fn spawn_emulator(target: &str, - server: &Path, - tmpdir: &Path, - rootfs: Option) { +fn spawn_emulator(target: &str, server: &Path, tmpdir: &Path, rootfs: Option) { let device_address = env::var(REMOTE_ADDR_ENV).unwrap_or("127.0.0.1:12345".to_string()); if env::var(REMOTE_ADDR_ENV).is_ok() { @@ -70,7 +65,7 @@ fn spawn_emulator(target: &str, if client.write_all(b"ping").is_ok() { let mut b = [0; 4]; if client.read_exact(&mut b).is_ok() { - break + break; } } } @@ -80,42 +75,24 @@ fn spawn_emulator(target: &str, fn start_android_emulator(server: &Path) { println!("waiting for device to come online"); - let status = Command::new("adb") - .arg("wait-for-device") - .status() - .unwrap(); + let status = Command::new("adb").arg("wait-for-device").status().unwrap(); assert!(status.success()); println!("pushing server"); - let status = Command::new("adb") - .arg("push") - .arg(server) - .arg("/data/tmp/testd") - .status() - .unwrap(); + let status = + Command::new("adb").arg("push").arg(server).arg("/data/tmp/testd").status().unwrap(); assert!(status.success()); println!("forwarding tcp"); - let status = Command::new("adb") - .arg("forward") - .arg("tcp:12345") - .arg("tcp:12345") - .status() - .unwrap(); + let status = + Command::new("adb").arg("forward").arg("tcp:12345").arg("tcp:12345").status().unwrap(); assert!(status.success()); println!("executing server"); - Command::new("adb") - .arg("shell") - .arg("/data/tmp/testd") - .spawn() - .unwrap(); + Command::new("adb").arg("shell").arg("/data/tmp/testd").spawn().unwrap(); } -fn start_qemu_emulator(target: &str, - rootfs: &Path, - server: &Path, - tmpdir: &Path) { +fn start_qemu_emulator(target: &str, rootfs: &Path, server: &Path, tmpdir: &Path) { // Generate a new rootfs image now that we've updated the test server // executable. This is the equivalent of: // @@ -124,49 +101,61 @@ fn start_qemu_emulator(target: &str, let rootfs_img = tmpdir.join("rootfs.img"); let mut cmd = Command::new("cpio"); cmd.arg("--null") - .arg("-o") - .arg("--format=newc") - .stdin(Stdio::piped()) - .stdout(Stdio::piped()) - .current_dir(rootfs); + .arg("-o") + .arg("--format=newc") + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .current_dir(rootfs); let mut child = t!(cmd.spawn()); let mut stdin = child.stdin.take().unwrap(); let rootfs = rootfs.to_path_buf(); thread::spawn(move || add_files(&mut stdin, &rootfs, &rootfs)); - t!(io::copy(&mut child.stdout.take().unwrap(), - &mut t!(File::create(&rootfs_img)))); + t!(io::copy(&mut child.stdout.take().unwrap(), &mut t!(File::create(&rootfs_img)))); assert!(t!(child.wait()).success()); // Start up the emulator, in the background match target { "arm-unknown-linux-gnueabihf" => { let mut cmd = Command::new("qemu-system-arm"); - cmd.arg("-M").arg("vexpress-a15") - .arg("-m").arg("1024") - .arg("-kernel").arg("/tmp/zImage") - .arg("-initrd").arg(&rootfs_img) - .arg("-dtb").arg("/tmp/vexpress-v2p-ca15-tc1.dtb") - .arg("-append") - .arg("console=ttyAMA0 root=/dev/ram rdinit=/sbin/init init=/sbin/init") - .arg("-nographic") - .arg("-redir").arg("tcp:12345::12345"); + cmd.arg("-M") + .arg("vexpress-a15") + .arg("-m") + .arg("1024") + .arg("-kernel") + .arg("/tmp/zImage") + .arg("-initrd") + .arg(&rootfs_img) + .arg("-dtb") + .arg("/tmp/vexpress-v2p-ca15-tc1.dtb") + .arg("-append") + .arg("console=ttyAMA0 root=/dev/ram rdinit=/sbin/init init=/sbin/init") + .arg("-nographic") + .arg("-redir") + .arg("tcp:12345::12345"); t!(cmd.spawn()); } "aarch64-unknown-linux-gnu" => { let mut cmd = Command::new("qemu-system-aarch64"); - cmd.arg("-machine").arg("virt") - .arg("-cpu").arg("cortex-a57") - .arg("-m").arg("1024") - .arg("-kernel").arg("/tmp/Image") - .arg("-initrd").arg(&rootfs_img) - .arg("-append") - .arg("console=ttyAMA0 root=/dev/ram rdinit=/sbin/init init=/sbin/init") - .arg("-nographic") - .arg("-netdev").arg("user,id=net0,hostfwd=tcp::12345-:12345") - .arg("-device").arg("virtio-net-device,netdev=net0,mac=00:00:00:00:00:00"); + cmd.arg("-machine") + .arg("virt") + .arg("-cpu") + .arg("cortex-a57") + .arg("-m") + .arg("1024") + .arg("-kernel") + .arg("/tmp/Image") + .arg("-initrd") + .arg(&rootfs_img) + .arg("-append") + .arg("console=ttyAMA0 root=/dev/ram rdinit=/sbin/init init=/sbin/init") + .arg("-nographic") + .arg("-netdev") + .arg("user,id=net0,hostfwd=tcp::12345-:12345") + .arg("-device") + .arg("virtio-net-device,netdev=net0,mac=00:00:00:00:00:00"); t!(cmd.spawn()); } - _ => panic!("cannot start emulator for: {}"< target), + _ => panic!("cannot start emulator for: {}" < target), } fn add_files(w: &mut dyn Write, root: &Path, cur: &Path) { @@ -218,9 +207,7 @@ fn run(files: String, args: Vec) { // by the client. for (k, v) in env::vars() { match &k[..] { - "PATH" | - "LD_LIBRARY_PATH" | - "PWD" => continue, + "PATH" | "LD_LIBRARY_PATH" | "PWD" => continue, _ => {} } t!(client.write_all(k.as_bytes())); @@ -253,10 +240,10 @@ fn run(files: String, args: Vec) { let mut stderr = io::stderr(); while !stdout_done || !stderr_done { t!(client.read_exact(&mut header)); - let amt = ((header[1] as u64) << 24) | - ((header[2] as u64) << 16) | - ((header[3] as u64) << 8) | - ((header[4] as u64) << 0); + let amt = ((header[1] as u64) << 24) + | ((header[2] as u64) << 16) + | ((header[3] as u64) << 8) + | ((header[4] as u64) << 0); if header[0] == 0 { if amt == 0 { stdout_done = true; @@ -277,10 +264,10 @@ fn run(files: String, args: Vec) { // Finally, read out the exit status let mut status = [0; 5]; t!(client.read_exact(&mut status)); - let code = ((status[1] as i32) << 24) | - ((status[2] as i32) << 16) | - ((status[3] as i32) << 8) | - ((status[4] as i32) << 0); + let code = ((status[1] as i32) << 24) + | ((status[2] as i32) << 16) + | ((status[3] as i32) << 8) + | ((status[4] as i32) << 0); if status[0] == 0 { std::process::exit(code); } else { @@ -294,11 +281,6 @@ fn send(path: &Path, dst: &mut dyn Write) { t!(dst.write_all(&[0])); let mut file = t!(File::open(&path)); let amt = t!(file.metadata()).len(); - t!(dst.write_all(&[ - (amt >> 24) as u8, - (amt >> 16) as u8, - (amt >> 8) as u8, - (amt >> 0) as u8, - ])); + t!(dst.write_all(&[(amt >> 24) as u8, (amt >> 16) as u8, (amt >> 8) as u8, (amt >> 0) as u8,])); t!(io::copy(&mut file, dst)); } diff --git a/src/tools/remote-test-server/src/main.rs b/src/tools/remote-test-server/src/main.rs index 0462b719b7b..826e3d05111 100644 --- a/src/tools/remote-test-server/src/main.rs +++ b/src/tools/remote-test-server/src/main.rs @@ -27,10 +27,12 @@ use std::sync::{Arc, Mutex}; use std::thread; macro_rules! t { - ($e:expr) => (match $e { - Ok(e) => e, - Err(e) => panic!("{} failed with {}", stringify!($e), e), - }) + ($e:expr) => { + match $e { + Ok(e) => e, + Err(e) => panic!("{} failed with {}", stringify!($e), e), + } + }; } static TEST: AtomicUsize = AtomicUsize::new(0); @@ -42,10 +44,7 @@ struct Config { impl Config { pub fn default() -> Config { - Config { - remote: false, - verbose: false, - } + Config { remote: false, verbose: false } } pub fn parse_args() -> Config { @@ -56,7 +55,7 @@ impl Config { match &argument[..] { "remote" => { config.remote = true; - }, + } "verbose" | "-v" => { config.verbose = true; } @@ -95,7 +94,7 @@ fn main() { let mut socket = t!(socket); let mut buf = [0; 4]; if socket.read_exact(&mut buf).is_err() { - continue + continue; } if &buf[..] == b"ping" { t!(socket.write_all(b"pong")); @@ -207,15 +206,12 @@ fn handle_run(socket: TcpStream, work: &Path, lock: &Mutex<()>) { // Support libraries were uploaded to `work` earlier, so make sure that's // in `LD_LIBRARY_PATH`. Also include our own current dir which may have // had some libs uploaded. - cmd.env("LD_LIBRARY_PATH", - format!("{}:{}", work.display(), path.display())); + cmd.env("LD_LIBRARY_PATH", format!("{}:{}", work.display(), path.display())); // Spawn the child and ferry over stdout/stderr to the socket in a framed // fashion (poor man's style) - let mut child = t!(cmd.stdin(Stdio::null()) - .stdout(Stdio::piped()) - .stderr(Stdio::piped()) - .spawn()); + let mut child = + t!(cmd.stdin(Stdio::null()).stdout(Stdio::piped()).stderr(Stdio::piped()).spawn()); drop(lock); let mut stdout = child.stdout.take().unwrap(); let mut stderr = child.stderr.take().unwrap(); @@ -235,8 +231,8 @@ fn handle_run(socket: TcpStream, work: &Path, lock: &Mutex<()>) { which, (code >> 24) as u8, (code >> 16) as u8, - (code >> 8) as u8, - (code >> 0) as u8, + (code >> 8) as u8, + (code >> 0) as u8, ])); } @@ -256,8 +252,7 @@ fn recv(dir: &Path, io: &mut B) -> PathBuf { let len = cmp::min(filename.len() - 1, 50); let dst = dir.join(t!(str::from_utf8(&filename[..len]))); let amt = read_u32(io) as u64; - t!(io::copy(&mut io.take(amt), - &mut t!(File::create(&dst)))); + t!(io::copy(&mut io.take(amt), &mut t!(File::create(&dst)))); t!(fs::set_permissions(&dst, Permissions::from_mode(0o755))); dst } @@ -271,13 +266,13 @@ fn my_copy(src: &mut dyn Read, which: u8, dst: &Mutex) { which, (n >> 24) as u8, (n >> 16) as u8, - (n >> 8) as u8, - (n >> 0) as u8, + (n >> 8) as u8, + (n >> 0) as u8, ])); if n > 0 { t!(dst.write_all(&b[..n])); } else { - break + break; } } } @@ -285,8 +280,8 @@ fn my_copy(src: &mut dyn Read, which: u8, dst: &Mutex) { fn read_u32(r: &mut dyn Read) -> u32 { let mut len = [0; 4]; t!(r.read_exact(&mut len)); - ((len[0] as u32) << 24) | - ((len[1] as u32) << 16) | - ((len[2] as u32) << 8) | - ((len[3] as u32) << 0) + ((len[0] as u32) << 24) + | ((len[1] as u32) << 16) + | ((len[2] as u32) << 8) + | ((len[3] as u32) << 0) } diff --git a/src/tools/rustdoc-themes/main.rs b/src/tools/rustdoc-themes/main.rs index a549b0d0efa..7cac985a9a3 100644 --- a/src/tools/rustdoc-themes/main.rs +++ b/src/tools/rustdoc-themes/main.rs @@ -1,7 +1,7 @@ use std::env::args; use std::fs::read_dir; use std::path::Path; -use std::process::{Command, exit}; +use std::process::{exit, Command}; const FILES_TO_IGNORE: &[&str] = &["light.css"]; @@ -13,11 +13,11 @@ fn get_folders>(folder_path: P) -> Vec { let path = entry.path(); if !path.is_file() { - continue + continue; } let filename = path.file_name().expect("file_name failed"); if FILES_TO_IGNORE.iter().any(|x| x == &filename) { - continue + continue; } ret.push(format!("{}", path.display())); } @@ -40,11 +40,9 @@ fn main() { } let arg_name = "--check-theme".to_owned(); let status = Command::new(rustdoc_bin) - .args(&themes.iter() - .flat_map(|t| vec![&arg_name, t].into_iter()) - .collect::>()) - .status() - .expect("failed to execute child"); + .args(&themes.iter().flat_map(|t| vec![&arg_name, t].into_iter()).collect::>()) + .status() + .expect("failed to execute child"); if !status.success() { exit(1); } diff --git a/src/tools/rustdoc/main.rs b/src/tools/rustdoc/main.rs index 99573cadb95..5b499a1fa1f 100644 --- a/src/tools/rustdoc/main.rs +++ b/src/tools/rustdoc/main.rs @@ -1 +1,3 @@ -fn main() { rustdoc::main() } +fn main() { + rustdoc::main() +} diff --git a/src/tools/tidy/src/bins.rs b/src/tools/tidy/src/bins.rs index 680585a6e04..589be26dc27 100644 --- a/src/tools/tidy/src/bins.rs +++ b/src/tools/tidy/src/bins.rs @@ -14,8 +14,8 @@ pub fn check(_path: &Path, _bad: &mut bool) {} #[cfg(unix)] pub fn check(path: &Path, bad: &mut bool) { use std::fs; - use std::process::{Command, Stdio}; use std::os::unix::prelude::*; + use std::process::{Command, Stdio}; if let Ok(contents) = fs::read_to_string("/proc/version") { // Probably on Windows Linux Subsystem or Docker via VirtualBox, @@ -25,33 +25,35 @@ pub fn check(path: &Path, bad: &mut bool) { } } - super::walk_no_read(path, - &mut |path| super::filter_dirs(path) || path.ends_with("src/etc"), - &mut |entry| { - let file = entry.path(); - let filename = file.file_name().unwrap().to_string_lossy(); - let extensions = [".py", ".sh"]; - if extensions.iter().any(|e| filename.ends_with(e)) { - return; - } + super::walk_no_read( + path, + &mut |path| super::filter_dirs(path) || path.ends_with("src/etc"), + &mut |entry| { + let file = entry.path(); + let filename = file.file_name().unwrap().to_string_lossy(); + let extensions = [".py", ".sh"]; + if extensions.iter().any(|e| filename.ends_with(e)) { + return; + } - let metadata = t!(entry.metadata(), file); - if metadata.mode() & 0o111 != 0 { - let rel_path = file.strip_prefix(path).unwrap(); - let git_friendly_path = rel_path.to_str().unwrap().replace("\\", "/"); - let output = Command::new("git") - .arg("ls-files") - .arg(&git_friendly_path) - .current_dir(path) - .stderr(Stdio::null()) - .output() - .unwrap_or_else(|e| { - panic!("could not run git ls-files: {}", e); - }); - let path_bytes = rel_path.as_os_str().as_bytes(); - if output.status.success() && output.stdout.starts_with(path_bytes) { - tidy_error!(bad, "binary checked into source: {}", file.display()); + let metadata = t!(entry.metadata(), file); + if metadata.mode() & 0o111 != 0 { + let rel_path = file.strip_prefix(path).unwrap(); + let git_friendly_path = rel_path.to_str().unwrap().replace("\\", "/"); + let output = Command::new("git") + .arg("ls-files") + .arg(&git_friendly_path) + .current_dir(path) + .stderr(Stdio::null()) + .output() + .unwrap_or_else(|e| { + panic!("could not run git ls-files: {}", e); + }); + let path_bytes = rel_path.as_os_str().as_bytes(); + if output.status.success() && output.stdout.starts_with(path_bytes) { + tidy_error!(bad, "binary checked into source: {}", file.display()); + } } - } - }) + }, + ) } diff --git a/src/tools/tidy/src/cargo.rs b/src/tools/tidy/src/cargo.rs index 26ced7fc829..7c45efba5ea 100644 --- a/src/tools/tidy/src/cargo.rs +++ b/src/tools/tidy/src/cargo.rs @@ -10,7 +10,7 @@ use std::path::Path; pub fn check(path: &Path, bad: &mut bool) { if !super::filter_dirs(path) { - return + return; } for entry in t!(path.read_dir(), path).map(|e| t!(e)) { // Look for `Cargo.toml` with a sibling `src/lib.rs` or `lib.rs`. @@ -34,7 +34,7 @@ fn verify(tomlfile: &Path, libfile: &Path, bad: &mut bool) { let librs = t!(fs::read_to_string(&libfile)); if toml.contains("name = \"bootstrap\"") { - return + return; } // "Poor man's TOML parser" -- just assume we use one syntax for now. @@ -51,36 +51,41 @@ fn verify(tomlfile: &Path, libfile: &Path, bad: &mut bool) { // If we encounter a line starting with `[` then we assume it's the end of // the dependency section and bail out. let deps = match toml.find("[dependencies]") { - Some(i) => &toml[i+1..], + Some(i) => &toml[i + 1..], None => return, }; for line in deps.lines() { if line.starts_with('[') { - break + break; } let mut parts = line.splitn(2, '='); let krate = parts.next().unwrap().trim(); if parts.next().is_none() { - continue + continue; } // Don't worry about depending on core/std while not writing `extern crate // core/std` -- that's intentional. if krate == "core" || krate == "std" { - continue + continue; } // This is intentional -- this dependency just makes the crate available // for others later on. let whitelisted = krate.starts_with("panic"); if toml.contains("name = \"std\"") && whitelisted { - continue + continue; } if !librs.contains(&format!("extern crate {}", krate)) { - tidy_error!(bad, "{} doesn't have `extern crate {}`, but Cargo.toml \ - depends on it", libfile.display(), krate); + tidy_error!( + bad, + "{} doesn't have `extern crate {}`, but Cargo.toml \ + depends on it", + libfile.display(), + krate + ); } } } diff --git a/src/tools/tidy/src/debug_artifacts.rs b/src/tools/tidy/src/debug_artifacts.rs index ee555a3e5bd..4664e2ef9a5 100644 --- a/src/tools/tidy/src/debug_artifacts.rs +++ b/src/tools/tidy/src/debug_artifacts.rs @@ -2,8 +2,7 @@ use std::path::{Path, PathBuf}; -const GRAPHVIZ_POSTFLOW_MSG: &'static str = - "`borrowck_graphviz_postflow` attribute in test"; +const GRAPHVIZ_POSTFLOW_MSG: &'static str = "`borrowck_graphviz_postflow` attribute in test"; pub fn check(path: &Path, bad: &mut bool) { let test_dir: PathBuf = path.join("test"); diff --git a/src/tools/tidy/src/deps.rs b/src/tools/tidy/src/deps.rs index 36e412975b9..cb48f723d86 100644 --- a/src/tools/tidy/src/deps.rs +++ b/src/tools/tidy/src/deps.rs @@ -1,6 +1,6 @@ //! Checks the licenses of third-party dependencies by inspecting vendors. -use std::collections::{BTreeSet, HashSet, HashMap}; +use std::collections::{BTreeSet, HashMap, HashSet}; use std::fs; use std::path::Path; use std::process::Command; @@ -62,10 +62,8 @@ const EXCEPTIONS: &[&str] = &[ ]; /// Which crates to check against the whitelist? -const WHITELIST_CRATES: &[CrateVersion<'_>] = &[ - CrateVersion("rustc", "0.0.0"), - CrateVersion("rustc_codegen_llvm", "0.0.0"), -]; +const WHITELIST_CRATES: &[CrateVersion<'_>] = + &[CrateVersion("rustc", "0.0.0"), CrateVersion("rustc_codegen_llvm", "0.0.0")]; /// Whitelist of crates rustc is allowed to depend on. Avoid adding to the list if possible. const WHITELIST: &[Crate<'_>] = &[ @@ -258,10 +256,7 @@ pub fn check(path: &Path, bad: &mut bool) { // Skip our exceptions. let is_exception = EXCEPTIONS.iter().any(|exception| { - dir.path() - .to_str() - .unwrap() - .contains(&format!("vendor/{}", exception)) + dir.path().to_str().unwrap().contains(&format!("vendor/{}", exception)) }); if is_exception { continue; @@ -408,20 +403,17 @@ fn check_crate_duplicate(resolve: &Resolve, bad: &mut bool) { // These two crates take quite a long time to build, so don't allow two versions of them // to accidentally sneak into our dependency graph, in order to ensure we keep our CI times // under control. - "cargo", "rustc-ap-syntax", ]; let mut name_to_id: HashMap<_, Vec<_>> = HashMap::new(); for node in resolve.nodes.iter() { - name_to_id.entry(node.id.split_whitespace().next().unwrap()) - .or_default() - .push(&node.id); + name_to_id.entry(node.id.split_whitespace().next().unwrap()).or_default().push(&node.id); } for name in FORBIDDEN_TO_HAVE_DUPLICATES { if name_to_id[name].len() <= 1 { - continue + continue; } println!("crate `{}` is duplicated in `Cargo.lock`", name); for id in name_to_id[name].iter() { diff --git a/src/tools/tidy/src/error_codes_check.rs b/src/tools/tidy/src/error_codes_check.rs index 22e4111ff3a..ebaa81d2a8d 100644 --- a/src/tools/tidy/src/error_codes_check.rs +++ b/src/tools/tidy/src/error_codes_check.rs @@ -8,48 +8,11 @@ use std::path::Path; // A few of those error codes can't be tested but all the others can and *should* be tested! const WHITELIST: &[&str] = &[ - "E0183", - "E0227", - "E0279", - "E0280", - "E0311", - "E0313", - "E0314", - "E0315", - "E0377", - "E0456", - "E0461", - "E0462", - "E0464", - "E0465", - "E0472", - "E0473", - "E0474", - "E0475", - "E0476", - "E0479", - "E0480", - "E0481", - "E0482", - "E0483", - "E0484", - "E0485", - "E0486", - "E0487", - "E0488", - "E0489", - "E0514", - "E0519", - "E0523", - "E0553", - "E0554", - "E0570", - "E0629", - "E0630", - "E0640", - "E0717", - "E0727", - "E0729", + "E0183", "E0227", "E0279", "E0280", "E0311", "E0313", "E0314", "E0315", "E0377", "E0456", + "E0461", "E0462", "E0464", "E0465", "E0472", "E0473", "E0474", "E0475", "E0476", "E0479", + "E0480", "E0481", "E0482", "E0483", "E0484", "E0485", "E0486", "E0487", "E0488", "E0489", + "E0514", "E0519", "E0523", "E0553", "E0554", "E0570", "E0629", "E0630", "E0640", "E0717", + "E0727", "E0729", ]; fn check_error_code_explanation( @@ -70,12 +33,12 @@ fn check_error_code_explanation( } macro_rules! some_or_continue { - ($e:expr) => ( + ($e:expr) => { match $e { Some(e) => e, None => continue, } - ); + }; } fn extract_error_codes(f: &str, error_codes: &mut HashMap, path: &Path) { @@ -95,11 +58,7 @@ fn extract_error_codes(f: &str, error_codes: &mut HashMap, path: & let path = some_or_continue!(path.parent()).join(md_file_name); match read_to_string(&path) { Ok(content) => { - check_error_code_explanation( - &content, - error_codes, - err_code, - ); + check_error_code_explanation(&content, error_codes, err_code); } Err(e) => { eprintln!("Couldn't read `{}`: {}", path.display(), e); @@ -109,7 +68,8 @@ fn extract_error_codes(f: &str, error_codes: &mut HashMap, path: & } else if reached_no_explanation && s.starts_with('E') { if let Some(err_code) = s.splitn(2, ',').next() { let err_code = err_code.to_owned(); - if !error_codes.contains_key(&err_code) { // this check should *never* fail! + if !error_codes.contains_key(&err_code) { + // this check should *never* fail! error_codes.insert(err_code, false); } } @@ -136,9 +96,7 @@ fn extract_error_codes_from_tests(f: &str, error_codes: &mut HashMap = HashMap::new(); - super::walk(path, - &mut |path| super::filter_dirs(path), - &mut |entry, contents| { + super::walk(path, &mut |path| super::filter_dirs(path), &mut |entry, contents| { let file_name = entry.file_name(); if file_name == "error_codes.rs" { extract_error_codes(contents, &mut error_codes, entry.path()); diff --git a/src/tools/tidy/src/errors.rs b/src/tools/tidy/src/errors.rs index 1bc27745376..dbcc9341a08 100644 --- a/src/tools/tidy/src/errors.rs +++ b/src/tools/tidy/src/errors.rs @@ -8,54 +8,51 @@ use std::path::Path; pub fn check(path: &Path, bad: &mut bool) { let mut map: HashMap<_, Vec<_>> = HashMap::new(); - super::walk(path, - &mut |path| super::filter_dirs(path) || path.ends_with("src/test"), - &mut |entry, contents| { - let file = entry.path(); - let filename = file.file_name().unwrap().to_string_lossy(); - if filename != "error_codes.rs" { - return - } - - // In the `register_long_diagnostics!` macro, entries look like this: - // - // ``` - // EXXXX: r##" - // - // "##, - // ``` - // - // and these long messages often have error codes themselves inside - // them, but we don't want to report duplicates in these cases. This - // variable keeps track of whether we're currently inside one of these - // long diagnostic messages. - let mut inside_long_diag = false; - for (num, line) in contents.lines().enumerate() { - if inside_long_diag { - inside_long_diag = !line.contains("\"##"); - continue + super::walk( + path, + &mut |path| super::filter_dirs(path) || path.ends_with("src/test"), + &mut |entry, contents| { + let file = entry.path(); + let filename = file.file_name().unwrap().to_string_lossy(); + if filename != "error_codes.rs" { + return; } - let mut search = line; - while let Some(i) = search.find('E') { - search = &search[i + 1..]; - let code = if search.len() > 4 { - search[..4].parse::() - } else { - continue - }; - let code = match code { - Ok(n) => n, - Err(..) => continue, - }; - map.entry(code).or_default() - .push((file.to_owned(), num + 1, line.to_owned())); - break - } + // In the `register_long_diagnostics!` macro, entries look like this: + // + // ``` + // EXXXX: r##" + // + // "##, + // ``` + // + // and these long messages often have error codes themselves inside + // them, but we don't want to report duplicates in these cases. This + // variable keeps track of whether we're currently inside one of these + // long diagnostic messages. + let mut inside_long_diag = false; + for (num, line) in contents.lines().enumerate() { + if inside_long_diag { + inside_long_diag = !line.contains("\"##"); + continue; + } - inside_long_diag = line.contains("r##\""); - } - }); + let mut search = line; + while let Some(i) = search.find('E') { + search = &search[i + 1..]; + let code = if search.len() > 4 { search[..4].parse::() } else { continue }; + let code = match code { + Ok(n) => n, + Err(..) => continue, + }; + map.entry(code).or_default().push((file.to_owned(), num + 1, line.to_owned())); + break; + } + + inside_long_diag = line.contains("r##\""); + } + }, + ); let mut max = 0; for (&code, entries) in map.iter() { @@ -63,7 +60,7 @@ pub fn check(path: &Path, bad: &mut bool) { max = code; } if entries.len() == 1 { - continue + continue; } tidy_error!(bad, "duplicate error code: {}", code); diff --git a/src/tools/tidy/src/extdeps.rs b/src/tools/tidy/src/extdeps.rs index 52e263df5e3..e3f92d48061 100644 --- a/src/tools/tidy/src/extdeps.rs +++ b/src/tools/tidy/src/extdeps.rs @@ -4,9 +4,7 @@ use std::fs; use std::path::Path; /// List of whitelisted sources for packages. -const WHITELISTED_SOURCES: &[&str] = &[ - "\"registry+https://github.com/rust-lang/crates.io-index\"", -]; +const WHITELISTED_SOURCES: &[&str] = &["\"registry+https://github.com/rust-lang/crates.io-index\""]; /// Checks for external package sources. pub fn check(path: &Path, bad: &mut bool) { @@ -19,7 +17,7 @@ pub fn check(path: &Path, bad: &mut bool) { // Process each line. for line in cargo_lock.lines() { // Consider only source entries. - if ! line.starts_with("source = ") { + if !line.starts_with("source = ") { continue; } diff --git a/src/tools/tidy/src/features.rs b/src/tools/tidy/src/features.rs index 82292a6912c..325b45e0a70 100644 --- a/src/tools/tidy/src/features.rs +++ b/src/tools/tidy/src/features.rs @@ -67,20 +67,22 @@ pub fn collect_lib_features(base_src_path: &Path) -> Features { // has been moved out-of-tree. Now it can no longer be auto-discovered by // `tidy`, because we need to filter out its (submodule) directory. Manually // add it to the set of known library features so we can still generate docs. - lib_features.insert("compiler_builtins_lib".to_owned(), Feature { - level: Status::Unstable, - since: None, - has_gate_test: false, - tracking_issue: None, - }); - - map_lib_features(base_src_path, - &mut |res, _, _| { + lib_features.insert( + "compiler_builtins_lib".to_owned(), + Feature { + level: Status::Unstable, + since: None, + has_gate_test: false, + tracking_issue: None, + }, + ); + + map_lib_features(base_src_path, &mut |res, _, _| { if let Ok((name, feature)) = res { lib_features.insert(name.to_owned(), feature); } }); - lib_features + lib_features } pub fn check(path: &Path, bad: &mut bool, verbose: bool) -> CollectedFeatures { @@ -90,67 +92,77 @@ pub fn check(path: &Path, bad: &mut bool, verbose: bool) -> CollectedFeatures { let lib_features = get_and_check_lib_features(path, bad, &features); assert!(!lib_features.is_empty()); - super::walk_many(&[&path.join("test/ui"), - &path.join("test/ui-fulldeps"), - &path.join("test/compile-fail")], - &mut |path| super::filter_dirs(path), - &mut |entry, contents| { - let file = entry.path(); - let filename = file.file_name().unwrap().to_string_lossy(); - if !filename.ends_with(".rs") || filename == "features.rs" || - filename == "diagnostic_list.rs" { - return; - } + super::walk_many( + &[&path.join("test/ui"), &path.join("test/ui-fulldeps"), &path.join("test/compile-fail")], + &mut |path| super::filter_dirs(path), + &mut |entry, contents| { + let file = entry.path(); + let filename = file.file_name().unwrap().to_string_lossy(); + if !filename.ends_with(".rs") + || filename == "features.rs" + || filename == "diagnostic_list.rs" + { + return; + } - let filen_underscore = filename.replace('-',"_").replace(".rs",""); - let filename_is_gate_test = test_filen_gate(&filen_underscore, &mut features); + let filen_underscore = filename.replace('-', "_").replace(".rs", ""); + let filename_is_gate_test = test_filen_gate(&filen_underscore, &mut features); - for (i, line) in contents.lines().enumerate() { - let mut err = |msg: &str| { - tidy_error!(bad, "{}:{}: {}", file.display(), i + 1, msg); - }; + for (i, line) in contents.lines().enumerate() { + let mut err = |msg: &str| { + tidy_error!(bad, "{}:{}: {}", file.display(), i + 1, msg); + }; - let gate_test_str = "gate-test-"; + let gate_test_str = "gate-test-"; - let feature_name = match line.find(gate_test_str) { - Some(i) => { - line[i+gate_test_str.len()..].splitn(2, ' ').next().unwrap() - }, - None => continue, - }; - match features.get_mut(feature_name) { - Some(f) => { - if filename_is_gate_test { - err(&format!("The file is already marked as gate test \ + let feature_name = match line.find(gate_test_str) { + Some(i) => line[i + gate_test_str.len()..].splitn(2, ' ').next().unwrap(), + None => continue, + }; + match features.get_mut(feature_name) { + Some(f) => { + if filename_is_gate_test { + err(&format!( + "The file is already marked as gate test \ through its name, no need for a \ 'gate-test-{}' comment", - feature_name)); + feature_name + )); + } + f.has_gate_test = true; + } + None => { + err(&format!( + "gate-test test found referencing a nonexistent feature '{}'", + feature_name + )); } - f.has_gate_test = true; - } - None => { - err(&format!("gate-test test found referencing a nonexistent feature '{}'", - feature_name)); } } - } - }); + }, + ); // Only check the number of lang features. // Obligatory testing for library features is dumb. - let gate_untested = features.iter() - .filter(|&(_, f)| f.level == Status::Unstable) - .filter(|&(_, f)| !f.has_gate_test) - .collect::>(); + let gate_untested = features + .iter() + .filter(|&(_, f)| f.level == Status::Unstable) + .filter(|&(_, f)| !f.has_gate_test) + .collect::>(); for &(name, _) in gate_untested.iter() { println!("Expected a gate test for the feature '{}'.", name); - println!("Hint: create a failing test file named 'feature-gate-{}.rs'\ + println!( + "Hint: create a failing test file named 'feature-gate-{}.rs'\ \n in the 'ui' test suite, with its failures due to\ - \n missing usage of `#![feature({})]`.", name, name); - println!("Hint: If you already have such a test and don't want to rename it,\ + \n missing usage of `#![feature({})]`.", + name, name + ); + println!( + "Hint: If you already have such a test and don't want to rename it,\ \n you can also add a // gate-test-{} line to the test file.", - name); + name + ); } if !gate_untested.is_empty() { @@ -182,12 +194,13 @@ fn format_features<'a>( family: &'a str, ) -> impl Iterator + 'a { features.iter().map(move |(name, feature)| { - format!("{:<32} {:<8} {:<12} {:<8}", - name, - family, - feature.level, - feature.since.map_or("None".to_owned(), - |since| since.to_string())) + format!( + "{:<32} {:<8} {:<12} {:<8}", + name, + family, + feature.level, + feature.since.map_or("None".to_owned(), |since| since.to_string()) + ) }) } @@ -205,9 +218,7 @@ fn find_attr_val<'a>(line: &'a str, attr: &str) -> Option<&'a str> { _ => unimplemented!("{} not handled", attr), }; - r.captures(line) - .and_then(|c| c.get(1)) - .map(|m| m.as_str()) + r.captures(line).and_then(|c| c.get(1)).map(|m| m.as_str()) } fn test_filen_gate(filen_underscore: &str, features: &mut Features) -> bool { @@ -243,7 +254,9 @@ fn collect_lang_features_in(base: &Path, file: &str, bad: &mut bool) -> Features let mut in_feature_group = false; let mut prev_since = None; - contents.lines().zip(1..) + contents + .lines() + .zip(1..) .filter_map(|(line, line_number)| { let line = line.trim(); @@ -334,160 +347,157 @@ fn collect_lang_features_in(base: &Path, file: &str, bad: &mut bool) -> Features let s = issue_str.split('(').nth(1).unwrap().split(')').nth(0).unwrap(); Some(s.parse().unwrap()) }; - Some((name.to_owned(), - Feature { - level, - since, - has_gate_test: false, - tracking_issue, - })) + Some((name.to_owned(), Feature { level, since, has_gate_test: false, tracking_issue })) }) .collect() } -fn get_and_check_lib_features(base_src_path: &Path, - bad: &mut bool, - lang_features: &Features) -> Features { +fn get_and_check_lib_features( + base_src_path: &Path, + bad: &mut bool, + lang_features: &Features, +) -> Features { let mut lib_features = Features::new(); - map_lib_features(base_src_path, - &mut |res, file, line| { - match res { - Ok((name, f)) => { - let mut check_features = |f: &Feature, list: &Features, display: &str| { - if let Some(ref s) = list.get(name) { - if f.tracking_issue != s.tracking_issue && f.level != Status::Stable { - tidy_error!(bad, - "{}:{}: mismatches the `issue` in {}", - file.display(), - line, - display); - } - } - }; - check_features(&f, &lang_features, "corresponding lang feature"); - check_features(&f, &lib_features, "previous"); - lib_features.insert(name.to_owned(), f); - }, - Err(msg) => { - tidy_error!(bad, "{}:{}: {}", file.display(), line, msg); - }, - } - + map_lib_features(base_src_path, &mut |res, file, line| match res { + Ok((name, f)) => { + let mut check_features = |f: &Feature, list: &Features, display: &str| { + if let Some(ref s) = list.get(name) { + if f.tracking_issue != s.tracking_issue && f.level != Status::Stable { + tidy_error!( + bad, + "{}:{}: mismatches the `issue` in {}", + file.display(), + line, + display + ); + } + } + }; + check_features(&f, &lang_features, "corresponding lang feature"); + check_features(&f, &lib_features, "previous"); + lib_features.insert(name.to_owned(), f); + } + Err(msg) => { + tidy_error!(bad, "{}:{}: {}", file.display(), line, msg); + } }); lib_features } -fn map_lib_features(base_src_path: &Path, - mf: &mut dyn FnMut(Result<(&str, Feature), &str>, &Path, usize)) { - super::walk(base_src_path, - &mut |path| super::filter_dirs(path) || path.ends_with("src/test"), - &mut |entry, contents| { - let file = entry.path(); - let filename = file.file_name().unwrap().to_string_lossy(); - if !filename.ends_with(".rs") || filename == "features.rs" || - filename == "diagnostic_list.rs" || filename == "error_codes.rs" { - return; - } - - // This is an early exit -- all the attributes we're concerned with must contain this: - // * rustc_const_unstable( - // * unstable( - // * stable( - if !contents.contains("stable(") { - return; - } +fn map_lib_features( + base_src_path: &Path, + mf: &mut dyn FnMut(Result<(&str, Feature), &str>, &Path, usize), +) { + super::walk( + base_src_path, + &mut |path| super::filter_dirs(path) || path.ends_with("src/test"), + &mut |entry, contents| { + let file = entry.path(); + let filename = file.file_name().unwrap().to_string_lossy(); + if !filename.ends_with(".rs") + || filename == "features.rs" + || filename == "diagnostic_list.rs" + || filename == "error_codes.rs" + { + return; + } - let handle_issue_none = |s| match s { - "none" => None, - issue => { - let n = issue.parse().expect("issue number is not a valid integer"); - assert_ne!(n, 0, "\"none\" should be used when there is no issue, not \"0\""); - NonZeroU32::new(n) + // This is an early exit -- all the attributes we're concerned with must contain this: + // * rustc_const_unstable( + // * unstable( + // * stable( + if !contents.contains("stable(") { + return; } - }; - let mut becoming_feature: Option<(&str, Feature)> = None; - let mut iter_lines = contents.lines().enumerate().peekable(); - while let Some((i, line)) = iter_lines.next() { - macro_rules! err { - ($msg:expr) => {{ - mf(Err($msg), file, i + 1); - continue; - }}; + + let handle_issue_none = |s| match s { + "none" => None, + issue => { + let n = issue.parse().expect("issue number is not a valid integer"); + assert_ne!(n, 0, "\"none\" should be used when there is no issue, not \"0\""); + NonZeroU32::new(n) + } }; - if let Some((ref name, ref mut f)) = becoming_feature { - if f.tracking_issue.is_none() { - f.tracking_issue = find_attr_val(line, "issue").and_then(handle_issue_none); + let mut becoming_feature: Option<(&str, Feature)> = None; + let mut iter_lines = contents.lines().enumerate().peekable(); + while let Some((i, line)) = iter_lines.next() { + macro_rules! err { + ($msg:expr) => {{ + mf(Err($msg), file, i + 1); + continue; + }}; + }; + if let Some((ref name, ref mut f)) = becoming_feature { + if f.tracking_issue.is_none() { + f.tracking_issue = find_attr_val(line, "issue").and_then(handle_issue_none); + } + if line.ends_with(']') { + mf(Ok((name, f.clone())), file, i + 1); + } else if !line.ends_with(',') && !line.ends_with('\\') && !line.ends_with('"') + { + // We need to bail here because we might have missed the + // end of a stability attribute above because the ']' + // might not have been at the end of the line. + // We could then get into the very unfortunate situation that + // we continue parsing the file assuming the current stability + // attribute has not ended, and ignoring possible feature + // attributes in the process. + err!("malformed stability attribute"); + } else { + continue; + } } - if line.ends_with(']') { - mf(Ok((name, f.clone())), file, i + 1); - } else if !line.ends_with(',') && !line.ends_with('\\') && !line.ends_with('"') { - // We need to bail here because we might have missed the - // end of a stability attribute above because the ']' - // might not have been at the end of the line. - // We could then get into the very unfortunate situation that - // we continue parsing the file assuming the current stability - // attribute has not ended, and ignoring possible feature - // attributes in the process. - err!("malformed stability attribute"); - } else { + becoming_feature = None; + if line.contains("rustc_const_unstable(") { + // `const fn` features are handled specially. + let feature_name = match find_attr_val(line, "feature") { + Some(name) => name, + None => err!("malformed stability attribute: missing `feature` key"), + }; + let feature = Feature { + level: Status::Unstable, + since: None, + has_gate_test: false, + // FIXME(#57563): #57563 is now used as a common tracking issue, + // although we would like to have specific tracking issues for each + // `rustc_const_unstable` in the future. + tracking_issue: NonZeroU32::new(57563), + }; + mf(Ok((feature_name, feature)), file, i + 1); continue; } - } - becoming_feature = None; - if line.contains("rustc_const_unstable(") { - // `const fn` features are handled specially. - let feature_name = match find_attr_val(line, "feature") { + let level = if line.contains("[unstable(") { + Status::Unstable + } else if line.contains("[stable(") { + Status::Stable + } else { + continue; + }; + let feature_name = match find_attr_val(line, "feature") + .or_else(|| iter_lines.peek().and_then(|next| find_attr_val(next.1, "feature"))) + { Some(name) => name, None => err!("malformed stability attribute: missing `feature` key"), }; - let feature = Feature { - level: Status::Unstable, - since: None, - has_gate_test: false, - // FIXME(#57563): #57563 is now used as a common tracking issue, - // although we would like to have specific tracking issues for each - // `rustc_const_unstable` in the future. - tracking_issue: NonZeroU32::new(57563), + let since = match find_attr_val(line, "since").map(|x| x.parse()) { + Some(Ok(since)) => Some(since), + Some(Err(_err)) => { + err!("malformed stability attribute: can't parse `since` key"); + } + None if level == Status::Stable => { + err!("malformed stability attribute: missing the `since` key"); + } + None => None, }; - mf(Ok((feature_name, feature)), file, i + 1); - continue; - } - let level = if line.contains("[unstable(") { - Status::Unstable - } else if line.contains("[stable(") { - Status::Stable - } else { - continue; - }; - let feature_name = match find_attr_val(line, "feature") - .or_else(|| iter_lines.peek().and_then(|next| find_attr_val(next.1, "feature"))) - { - Some(name) => name, - None => err!("malformed stability attribute: missing `feature` key"), - }; - let since = match find_attr_val(line, "since").map(|x| x.parse()) { - Some(Ok(since)) => Some(since), - Some(Err(_err)) => { - err!("malformed stability attribute: can't parse `since` key"); - }, - None if level == Status::Stable => { - err!("malformed stability attribute: missing the `since` key"); - } - None => None, - }; - let tracking_issue = find_attr_val(line, "issue").and_then(handle_issue_none); + let tracking_issue = find_attr_val(line, "issue").and_then(handle_issue_none); - let feature = Feature { - level, - since, - has_gate_test: false, - tracking_issue, - }; - if line.contains(']') { - mf(Ok((feature_name, feature)), file, i + 1); - } else { - becoming_feature = Some((feature_name, feature)); + let feature = Feature { level, since, has_gate_test: false, tracking_issue }; + if line.contains(']') { + mf(Ok((feature_name, feature)), file, i + 1); + } else { + becoming_feature = Some((feature_name, feature)); + } } - } - }); + }, + ); } diff --git a/src/tools/tidy/src/features/version.rs b/src/tools/tidy/src/features/version.rs index cc3a6510244..c8c39ad27e0 100644 --- a/src/tools/tidy/src/features/version.rs +++ b/src/tools/tidy/src/features/version.rs @@ -1,6 +1,6 @@ -use std::str::FromStr; -use std::num::ParseIntError; use std::fmt; +use std::num::ParseIntError; +use std::str::FromStr; #[cfg(test)] mod tests; @@ -34,10 +34,7 @@ impl FromStr for Version { fn from_str(s: &str) -> Result { let mut iter = s.split('.').map(|part| Ok(part.parse()?)); - let mut part = || { - iter.next() - .unwrap_or(Err(ParseVersionError::WrongNumberOfParts)) - }; + let mut part = || iter.next().unwrap_or(Err(ParseVersionError::WrongNumberOfParts)); let parts = [part()?, part()?, part()?]; diff --git a/src/tools/tidy/src/lib.rs b/src/tools/tidy/src/lib.rs index d9db68ff66e..c8c61b6fb50 100644 --- a/src/tools/tidy/src/lib.rs +++ b/src/tools/tidy/src/lib.rs @@ -3,22 +3,26 @@ //! This library contains the tidy lints and exposes it //! to be used by tools. -use walkdir::{DirEntry, WalkDir}; use std::fs::File; use std::io::Read; +use walkdir::{DirEntry, WalkDir}; use std::path::Path; macro_rules! t { - ($e:expr, $p:expr) => (match $e { - Ok(e) => e, - Err(e) => panic!("{} failed on {} with {}", stringify!($e), ($p).display(), e), - }); + ($e:expr, $p:expr) => { + match $e { + Ok(e) => e, + Err(e) => panic!("{} failed on {} with {}", stringify!($e), ($p).display(), e), + } + }; - ($e:expr) => (match $e { - Ok(e) => e, - Err(e) => panic!("{} failed with {}", stringify!($e), e), - }) + ($e:expr) => { + match $e { + Ok(e) => e, + Err(e) => panic!("{} failed with {}", stringify!($e), e), + } + }; } macro_rules! tidy_error { @@ -30,19 +34,19 @@ macro_rules! tidy_error { } pub mod bins; -pub mod style; +pub mod cargo; pub mod debug_artifacts; +pub mod deps; +pub mod edition; +pub mod error_codes_check; pub mod errors; +pub mod extdeps; pub mod features; -pub mod cargo; -pub mod edition; pub mod pal; -pub mod deps; -pub mod extdeps; +pub mod style; pub mod ui_tests; pub mod unit_tests; pub mod unstable_book; -pub mod error_codes_check; fn filter_dirs(path: &Path) -> bool { let skip = [ @@ -54,7 +58,6 @@ fn filter_dirs(path: &Path) -> bool { "src/tools/rls", "src/tools/rust-installer", "src/tools/rustfmt", - // Filter RLS output directories "target/rls", ]; @@ -62,7 +65,9 @@ fn filter_dirs(path: &Path) -> bool { } fn walk_many( - paths: &[&Path], skip: &mut dyn FnMut(&Path) -> bool, f: &mut dyn FnMut(&DirEntry, &str) + paths: &[&Path], + skip: &mut dyn FnMut(&Path) -> bool, + f: &mut dyn FnMut(&DirEntry, &str), ) { for path in paths { walk(path, skip, f); @@ -81,8 +86,7 @@ fn walk(path: &Path, skip: &mut dyn FnMut(&Path) -> bool, f: &mut dyn FnMut(&Dir } fn walk_no_read(path: &Path, skip: &mut dyn FnMut(&Path) -> bool, f: &mut dyn FnMut(&DirEntry)) { - let walker = WalkDir::new(path).into_iter() - .filter_entry(|e| !skip(e.path())); + let walker = WalkDir::new(path).into_iter().filter_entry(|e| !skip(e.path())); for entry in walker { if let Ok(entry) = entry { if entry.file_type().is_dir() { diff --git a/src/tools/tidy/src/main.rs b/src/tools/tidy/src/main.rs index de6b0c5b28d..ec8b14c288a 100644 --- a/src/tools/tidy/src/main.rs +++ b/src/tools/tidy/src/main.rs @@ -8,9 +8,9 @@ use tidy::*; -use std::process; -use std::path::PathBuf; use std::env; +use std::path::PathBuf; +use std::process; fn main() { let path: PathBuf = env::args_os().nth(1).expect("need path to src").into(); diff --git a/src/tools/tidy/src/pal.rs b/src/tools/tidy/src/pal.rs index c6bb16318b6..dcd4c9e8ef7 100644 --- a/src/tools/tidy/src/pal.rs +++ b/src/tools/tidy/src/pal.rs @@ -31,8 +31,8 @@ //! platform-specific cfgs are allowed. Not sure yet how to deal with //! this in the long term. -use std::path::Path; use std::iter::Iterator; +use std::path::Path; // Paths that may contain platform-specific code. const EXCEPTION_PATHS: &[&str] = &[ @@ -45,17 +45,16 @@ const EXCEPTION_PATHS: &[&str] = &[ // (e.g. `wasm32-unknown-emscripten` vs `wasm32-unknown-unknown`): "src/libcore/hint.rs", "src/libstd/sys/", // Platform-specific code for std lives here. - // This has the trailing slash so that sys_common is not excepted. + // This has the trailing slash so that sys_common is not excepted. "src/libstd/os", // Platform-specific public interfaces "src/rtstartup", // Not sure what to do about this. magic stuff for mingw - // temporary exceptions "src/libstd/lib.rs", "src/libstd/path.rs", "src/libstd/f32.rs", "src/libstd/f64.rs", // Integration test for platform-specific run-time feature detection: - "src/libstd/tests/run-time-detect.rs" , + "src/libstd/tests/run-time-detect.rs", "src/libstd/net/test.rs", "src/libstd/sys_common/mod.rs", "src/libstd/sys_common/net.rs", @@ -63,18 +62,15 @@ const EXCEPTION_PATHS: &[&str] = &[ "src/libterm", // Not sure how to make this crate portable, but test crate needs it. "src/libtest", // Probably should defer to unstable `std::sys` APIs. "src/libstd/sync/mpsc", // some tests are only run on non-emscripten - // std testing crates, okay for now at least "src/libcore/tests", "src/liballoc/tests/lib.rs", - // The `VaList` implementation must have platform specific code. // The Windows implementation of a `va_list` is always a character // pointer regardless of the target architecture. As a result, // we must use `#[cfg(windows)]` to conditionally compile the // correct `VaList` structure for windows. "src/libcore/ffi.rs", - // non-std crates "src/test", "src/tools", @@ -91,10 +87,14 @@ pub fn check(path: &Path, bad: &mut bool) { super::walk(path, &mut super::filter_dirs, &mut |entry, contents| { let file = entry.path(); let filestr = file.to_string_lossy().replace("\\", "/"); - if !filestr.ends_with(".rs") { return } + if !filestr.ends_with(".rs") { + return; + } let is_exception_path = EXCEPTION_PATHS.iter().any(|s| filestr.contains(&**s)); - if is_exception_path { return } + if is_exception_path { + return; + } check_cfgs(contents, &file, bad, &mut saw_target_arch, &mut saw_cfg_bang); }); @@ -103,8 +103,13 @@ pub fn check(path: &Path, bad: &mut bool) { assert!(saw_cfg_bang); } -fn check_cfgs(contents: &str, file: &Path, - bad: &mut bool, saw_target_arch: &mut bool, saw_cfg_bang: &mut bool) { +fn check_cfgs( + contents: &str, + file: &Path, + bad: &mut bool, + saw_target_arch: &mut bool, + saw_cfg_bang: &mut bool, +) { // For now it's ok to have platform-specific code after 'mod tests'. let mod_tests_idx = find_test_mod(contents); let contents = &contents[..mod_tests_idx]; @@ -119,24 +124,29 @@ fn check_cfgs(contents: &str, file: &Path, let line_numbers = line_numbers.as_ref().expect(""); let line = match line_numbers.binary_search(&idx) { Ok(_) => unreachable!(), - Err(i) => i + 1 + Err(i) => i + 1, }; tidy_error!(bad, "{}:{}: platform-specific cfg: {}", file.display(), line, cfg); }; for (idx, cfg) in cfgs { // Sanity check that the parsing here works. - if !*saw_target_arch && cfg.contains("target_arch") { *saw_target_arch = true } - if !*saw_cfg_bang && cfg.contains("cfg!") { *saw_cfg_bang = true } + if !*saw_target_arch && cfg.contains("target_arch") { + *saw_target_arch = true + } + if !*saw_cfg_bang && cfg.contains("cfg!") { + *saw_cfg_bang = true + } - let contains_platform_specific_cfg = - cfg.contains("target_os") + let contains_platform_specific_cfg = cfg.contains("target_os") || cfg.contains("target_env") || cfg.contains("target_vendor") || cfg.contains("unix") || cfg.contains("windows"); - if !contains_platform_specific_cfg { continue } + if !contains_platform_specific_cfg { + continue; + } let preceeded_by_doc_comment = { let pre_contents = &contents[..idx]; @@ -149,7 +159,9 @@ fn check_cfgs(contents: &str, file: &Path, } }; - if preceeded_by_doc_comment { continue } + if preceeded_by_doc_comment { + continue; + } err(idx, cfg); } @@ -161,9 +173,10 @@ fn find_test_mod(contents: &str) -> usize { let prev_newline_idx = contents[..mod_tests_idx].rfind('\n').unwrap_or(mod_tests_idx); let prev_newline_idx = contents[..prev_newline_idx].rfind('\n'); if let Some(nl) = prev_newline_idx { - let prev_line = &contents[nl + 1 .. mod_tests_idx]; + let prev_line = &contents[nl + 1..mod_tests_idx]; if prev_line.contains("cfg(all(test, not(target_os") - || prev_line.contains("cfg(all(test, not(any(target_os") { + || prev_line.contains("cfg(all(test, not(any(target_os") + { nl } else { mod_tests_idx @@ -183,7 +196,9 @@ fn parse_cfgs<'a>(contents: &'a str) -> Vec<(usize, &'a str)> { // that appear to be tokens followed by a parenthesis. let cfgs = candidate_cfg_idxs.filter(|i| { let pre_idx = i.saturating_sub(*i); - let succeeds_non_ident = !contents.as_bytes().get(pre_idx) + let succeeds_non_ident = !contents + .as_bytes() + .get(pre_idx) .cloned() .map(char::from) .map(char::is_alphanumeric) @@ -191,10 +206,12 @@ fn parse_cfgs<'a>(contents: &'a str) -> Vec<(usize, &'a str)> { let contents_after = &contents[*i..]; let first_paren = contents_after.find('('); let paren_idx = first_paren.map(|ip| i + ip); - let preceeds_whitespace_and_paren = paren_idx.map(|ip| { - let maybe_space = &contents[*i + "cfg".len() .. ip]; - maybe_space.chars().all(|c| char::is_whitespace(c) || c == '!') - }).unwrap_or(false); + let preceeds_whitespace_and_paren = paren_idx + .map(|ip| { + let maybe_space = &contents[*i + "cfg".len()..ip]; + maybe_space.chars().all(|c| char::is_whitespace(c) || c == '!') + }) + .unwrap_or(false); succeeds_non_ident && preceeds_whitespace_and_paren }); @@ -213,12 +230,13 @@ fn parse_cfgs<'a>(contents: &'a str) -> Vec<(usize, &'a str)> { return Some((i, &contents_from[..=j])); } } - _ => { } + _ => {} } } // if the parentheses are unbalanced just ignore this cfg -- it'll be caught when attempting // to run the compiler, and there's no real reason to lint it separately here None - }).collect() + }) + .collect() } diff --git a/src/tools/tidy/src/style.rs b/src/tools/tidy/src/style.rs index 20636c86e1e..addee1108e8 100644 --- a/src/tools/tidy/src/style.rs +++ b/src/tools/tidy/src/style.rs @@ -55,8 +55,7 @@ enum LIUState { fn line_is_url(columns: usize, line: &str) -> bool { // more basic check for error_codes.rs, to avoid complexity in implementing two state machines if columns == ERROR_CODE_COLS { - return line.starts_with("[") && - line.contains("]:") && line.contains("http"); + return line.starts_with("[") && line.contains("]:") && line.contains("http"); } use self::LIUState::*; @@ -65,25 +64,21 @@ fn line_is_url(columns: usize, line: &str) -> bool { for tok in line.split_whitespace() { match (state, tok) { - (EXP_COMMENT_START, "//") | - (EXP_COMMENT_START, "///") | - (EXP_COMMENT_START, "//!") => state = EXP_LINK_LABEL_OR_URL, + (EXP_COMMENT_START, "//") | (EXP_COMMENT_START, "///") | (EXP_COMMENT_START, "//!") => { + state = EXP_LINK_LABEL_OR_URL + } (EXP_LINK_LABEL_OR_URL, w) - if w.len() >= 4 && w.starts_with('[') && w.ends_with("]:") - => state = EXP_URL, + if w.len() >= 4 && w.starts_with('[') && w.ends_with("]:") => + { + state = EXP_URL + } - (EXP_LINK_LABEL_OR_URL, w) - if is_url(w) - => state = EXP_END, + (EXP_LINK_LABEL_OR_URL, w) if is_url(w) => state = EXP_END, - (EXP_URL, w) - if is_url(w) || w.starts_with("../") - => state = EXP_END, + (EXP_URL, w) if is_url(w) || w.starts_with("../") => state = EXP_END, - (_, w) - if w.len() > columns && is_url(w) - => state = EXP_END, + (_, w) if w.len() > columns && is_url(w) => state = EXP_END, (_, _) => {} } @@ -119,8 +114,9 @@ fn contains_ignore_directive(can_contain: bool, contents: &str, check: &str) -> return Directive::Deny; } // Update `can_contain` when changing this - if contents.contains(&format!("// ignore-tidy-{}", check)) || - contents.contains(&format!("# ignore-tidy-{}", check)) { + if contents.contains(&format!("// ignore-tidy-{}", check)) + || contents.contains(&format!("# ignore-tidy-{}", check)) + { Directive::Ignore(false) } else { Directive::Deny @@ -142,17 +138,13 @@ pub fn check(path: &Path, bad: &mut bool) { let file = entry.path(); let filename = file.file_name().unwrap().to_string_lossy(); let extensions = [".rs", ".py", ".js", ".sh", ".c", ".cpp", ".h", ".md"]; - if extensions.iter().all(|e| !filename.ends_with(e)) || - filename.starts_with(".#") { - return + if extensions.iter().all(|e| !filename.ends_with(e)) || filename.starts_with(".#") { + return; } - if filename.ends_with(".md") && - file.parent() - .unwrap() - .file_name() - .unwrap() - .to_string_lossy() != "error_codes" { + if filename.ends_with(".md") + && file.parent().unwrap().file_name().unwrap().to_string_lossy() != "error_codes" + { // We don't want to check all ".md" files (almost of of them aren't compliant // currently), just the long error code explanation ones. return; @@ -168,8 +160,8 @@ pub fn check(path: &Path, bad: &mut bool) { COLS }; - let can_contain = contents.contains("// ignore-tidy-") || - contents.contains("# ignore-tidy-"); + let can_contain = + contents.contains("// ignore-tidy-") || contents.contains("# ignore-tidy-"); let mut skip_cr = contains_ignore_directive(can_contain, &contents, "cr"); let mut skip_undocumented_unsafe = contains_ignore_directive(can_contain, &contents, "undocumented-unsafe"); @@ -189,8 +181,7 @@ pub fn check(path: &Path, bad: &mut bool) { let mut err = |msg: &str| { tidy_error!(bad, "{}:{}: {}", file.display(), i + 1, msg); }; - if line.chars().count() > max_columns && - !long_line_is_ok(max_columns, line) { + if line.chars().count() > max_columns && !long_line_is_ok(max_columns, line) { suppressible_tidy_err!( err, skip_line_length, @@ -228,11 +219,11 @@ pub fn check(path: &Path, bad: &mut bool) { } else { last_safety_comment = false; } - if (line.starts_with("// Copyright") || - line.starts_with("# Copyright") || - line.starts_with("Copyright")) - && (line.contains("Rust Developers") || - line.contains("Rust Project Developers")) { + if (line.starts_with("// Copyright") + || line.starts_with("# Copyright") + || line.starts_with("Copyright")) + && (line.contains("Rust Developers") || line.contains("Rust Project Developers")) + { suppressible_tidy_err!( err, skip_copyright, diff --git a/src/tools/tidy/src/ui_tests.rs b/src/tools/tidy/src/ui_tests.rs index 2c52cecccb5..47b328dae47 100644 --- a/src/tools/tidy/src/ui_tests.rs +++ b/src/tools/tidy/src/ui_tests.rs @@ -27,11 +27,7 @@ pub fn check(path: &Path, bad: &mut bool) { .splitn(2, '.') .next() .unwrap(); - if !file_path - .with_file_name(testname) - .with_extension("rs") - .exists() - { + if !file_path.with_file_name(testname).with_extension("rs").exists() { println!("Stray file with UI testing output: {:?}", file_path); *bad = true; } diff --git a/src/tools/tidy/src/unit_tests.rs b/src/tools/tidy/src/unit_tests.rs index 6286945ad26..5f33f32f8f9 100644 --- a/src/tools/tidy/src/unit_tests.rs +++ b/src/tools/tidy/src/unit_tests.rs @@ -14,8 +14,8 @@ pub fn check(root_path: &Path, bad: &mut bool) { let libcore_tests = &root_path.join("libcore/tests"); let libcore_benches = &root_path.join("libcore/benches"); let is_core = |path: &Path| { - path.starts_with(libcore) && - !(path.starts_with(libcore_tests) || path.starts_with(libcore_benches)) + path.starts_with(libcore) + && !(path.starts_with(libcore_tests) || path.starts_with(libcore_benches)) }; let mut skip = |path: &Path| { @@ -28,38 +28,38 @@ pub fn check(root_path: &Path, bad: &mut bool) { (file_name == "tests" || file_name == "benches") && !is_core(path) } else { let extension = path.extension().unwrap_or_default(); - extension != "rs" || - (file_name == "tests.rs" || file_name == "benches.rs") && !is_core(path) + extension != "rs" + || (file_name == "tests.rs" || file_name == "benches.rs") && !is_core(path) } }; - super::walk( - root_path, - &mut skip, - &mut |entry, contents| { - let path = entry.path(); - let is_libcore = path.starts_with(libcore); - for (i, line) in contents.lines().enumerate() { - let line = line.trim(); - let is_test = || line.contains("#[test]") && !line.contains("`#[test]"); - let is_bench = || line.contains("#[bench]") && !line.contains("`#[bench]"); - if !line.starts_with("//") && (is_test() || is_bench()) { - let explanation = if is_libcore { - "libcore unit tests and benchmarks must be placed into \ + super::walk(root_path, &mut skip, &mut |entry, contents| { + let path = entry.path(); + let is_libcore = path.starts_with(libcore); + for (i, line) in contents.lines().enumerate() { + let line = line.trim(); + let is_test = || line.contains("#[test]") && !line.contains("`#[test]"); + let is_bench = || line.contains("#[bench]") && !line.contains("`#[bench]"); + if !line.starts_with("//") && (is_test() || is_bench()) { + let explanation = if is_libcore { + "libcore unit tests and benchmarks must be placed into \ `libcore/tests` or `libcore/benches`" - } else { - "unit tests and benchmarks must be placed into \ + } else { + "unit tests and benchmarks must be placed into \ separate files or directories named \ `tests.rs`, `benches.rs`, `tests` or `benches`" - }; - let name = if is_test() { "test" } else { "bench" }; - tidy_error!( - bad, "`{}:{}` contains `#[{}]`; {}", - path.display(), i + 1, name, explanation, - ); - return; - } + }; + let name = if is_test() { "test" } else { "bench" }; + tidy_error!( + bad, + "`{}:{}` contains `#[{}]`; {}", + path.display(), + i + 1, + name, + explanation, + ); + return; } - }, - ); + } + }); } diff --git a/src/tools/tidy/src/unstable_book.rs b/src/tools/tidy/src/unstable_book.rs index fb63520f068..472d66459d7 100644 --- a/src/tools/tidy/src/unstable_book.rs +++ b/src/tools/tidy/src/unstable_book.rs @@ -1,7 +1,7 @@ +use crate::features::{CollectedFeatures, Feature, Features, Status}; use std::collections::BTreeSet; use std::fs; -use std::path::{PathBuf, Path}; -use crate::features::{CollectedFeatures, Features, Feature, Status}; +use std::path::{Path, PathBuf}; pub const PATH_STR: &str = "doc/unstable-book"; @@ -30,10 +30,7 @@ pub fn unstable_book_lib_features_path(base_src_path: &Path) -> PathBuf { /// Tests whether `DirEntry` is a file. fn dir_entry_is_file(dir_entry: &fs::DirEntry) -> bool { - dir_entry - .file_type() - .expect("could not determine file type of directory entry") - .is_file() + dir_entry.file_type().expect("could not determine file type of directory entry").is_file() } /// Retrieves names of all unstable features. @@ -60,8 +57,9 @@ pub fn collect_unstable_book_section_file_names(dir: &Path) -> BTreeSet /// /// * hyphens replaced by underscores, /// * the markdown suffix ('.md') removed. -fn collect_unstable_book_lang_features_section_file_names(base_src_path: &Path) - -> BTreeSet { +fn collect_unstable_book_lang_features_section_file_names( + base_src_path: &Path, +) -> BTreeSet { collect_unstable_book_section_file_names(&unstable_book_lang_features_path(base_src_path)) } @@ -75,20 +73,25 @@ fn collect_unstable_book_lib_features_section_file_names(base_src_path: &Path) - pub fn check(path: &Path, features: CollectedFeatures, bad: &mut bool) { let lang_features = features.lang; - let mut lib_features = features.lib.into_iter().filter(|&(ref name, _)| { - !lang_features.contains_key(name) - }).collect::(); + let mut lib_features = features + .lib + .into_iter() + .filter(|&(ref name, _)| !lang_features.contains_key(name)) + .collect::(); // This library feature is defined in the `compiler_builtins` crate, which // has been moved out-of-tree. Now it can no longer be auto-discovered by // `tidy`, because we need to filter out its (submodule) directory. Manually // add it to the set of known library features so we can still generate docs. - lib_features.insert("compiler_builtins_lib".to_owned(), Feature { - level: Status::Unstable, - since: None, - has_gate_test: false, - tracking_issue: None, - }); + lib_features.insert( + "compiler_builtins_lib".to_owned(), + Feature { + level: Status::Unstable, + since: None, + has_gate_test: false, + tracking_issue: None, + }, + ); // Library features let unstable_lib_feature_names = collect_unstable_feature_names(&lib_features); @@ -101,23 +104,28 @@ pub fn check(path: &Path, features: CollectedFeatures, bad: &mut bool) { collect_unstable_book_lang_features_section_file_names(path); // Check for Unstable Book sections that don't have a corresponding unstable feature - for feature_name in &unstable_book_lib_features_section_file_names - - &unstable_lib_feature_names { + for feature_name in &unstable_book_lib_features_section_file_names - &unstable_lib_feature_names + { if !unstable_lang_feature_names.contains(&feature_name) { - tidy_error!(bad, - "The Unstable Book has a 'library feature' section '{}' which doesn't \ + tidy_error!( + bad, + "The Unstable Book has a 'library feature' section '{}' which doesn't \ correspond to an unstable library feature", - feature_name); + feature_name + ); } } // Check for Unstable Book sections that don't have a corresponding unstable feature. - for feature_name in &unstable_book_lang_features_section_file_names - - &unstable_lang_feature_names { - tidy_error!(bad, - "The Unstable Book has a 'language feature' section '{}' which doesn't \ + for feature_name in + &unstable_book_lang_features_section_file_names - &unstable_lang_feature_names + { + tidy_error!( + bad, + "The Unstable Book has a 'language feature' section '{}' which doesn't \ correspond to an unstable language feature", - feature_name) + feature_name + ) } // List unstable features that don't have Unstable Book sections. diff --git a/src/tools/unstable-book-gen/src/main.rs b/src/tools/unstable-book-gen/src/main.rs index 39e8d61edeb..c9d1561a9d3 100644 --- a/src/tools/unstable-book-gen/src/main.rs +++ b/src/tools/unstable-book-gen/src/main.rs @@ -2,14 +2,16 @@ #![deny(warnings)] -use tidy::features::{Features, collect_lib_features, collect_lang_features}; -use tidy::unstable_book::{collect_unstable_feature_names, collect_unstable_book_section_file_names, - PATH_STR, LANG_FEATURES_DIR, LIB_FEATURES_DIR}; use std::collections::BTreeSet; -use std::io::Write; -use std::fs::{self, File}; use std::env; +use std::fs::{self, File}; +use std::io::Write; use std::path::Path; +use tidy::features::{collect_lang_features, collect_lib_features, Features}; +use tidy::unstable_book::{ + collect_unstable_book_section_file_names, collect_unstable_feature_names, LANG_FEATURES_DIR, + LIB_FEATURES_DIR, PATH_STR, +}; /// A helper macro to `unwrap` a result except also print out details like: /// @@ -17,60 +19,51 @@ use std::path::Path; /// * The expression that failed /// * The error itself macro_rules! t { - ($e:expr) => (match $e { - Ok(e) => e, - Err(e) => panic!("{} failed with {}", stringify!($e), e), - }) + ($e:expr) => { + match $e { + Ok(e) => e, + Err(e) => panic!("{} failed with {}", stringify!($e), e), + } + }; } fn generate_stub_issue(path: &Path, name: &str, issue: u32) { let mut file = t!(File::create(path)); - t!(file.write_fmt(format_args!(include_str!("stub-issue.md"), - name = name, - issue = issue))); + t!(file.write_fmt(format_args!(include_str!("stub-issue.md"), name = name, issue = issue))); } fn generate_stub_no_issue(path: &Path, name: &str) { let mut file = t!(File::create(path)); - t!(file.write_fmt(format_args!(include_str!("stub-no-issue.md"), - name = name))); + t!(file.write_fmt(format_args!(include_str!("stub-no-issue.md"), name = name))); } -fn set_to_summary_str(set: &BTreeSet, dir: &str -) -> String { - set - .iter() - .map(|ref n| format!(" - [{}]({}/{}.md)", - n.replace('-', "_"), - dir, - n)) +fn set_to_summary_str(set: &BTreeSet, dir: &str) -> String { + set.iter() + .map(|ref n| format!(" - [{}]({}/{}.md)", n.replace('-', "_"), dir, n)) .fold("".to_owned(), |s, a| s + &a + "\n") } fn generate_summary(path: &Path, lang_features: &Features, lib_features: &Features) { - let compiler_flags = collect_unstable_book_section_file_names( - &path.join("src/compiler-flags")); + let compiler_flags = collect_unstable_book_section_file_names(&path.join("src/compiler-flags")); - let compiler_flags_str = set_to_summary_str(&compiler_flags, - "compiler-flags"); + let compiler_flags_str = set_to_summary_str(&compiler_flags, "compiler-flags"); let unstable_lang_features = collect_unstable_feature_names(&lang_features); let unstable_lib_features = collect_unstable_feature_names(&lib_features); - let lang_features_str = set_to_summary_str(&unstable_lang_features, - "language-features"); - let lib_features_str = set_to_summary_str(&unstable_lib_features, - "library-features"); + let lang_features_str = set_to_summary_str(&unstable_lang_features, "language-features"); + let lib_features_str = set_to_summary_str(&unstable_lib_features, "library-features"); let mut file = t!(File::create(&path.join("src/SUMMARY.md"))); - t!(file.write_fmt(format_args!(include_str!("SUMMARY.md"), - compiler_flags = compiler_flags_str, - language_features = lang_features_str, - library_features = lib_features_str))); - + t!(file.write_fmt(format_args!( + include_str!("SUMMARY.md"), + compiler_flags = compiler_flags_str, + language_features = lang_features_str, + library_features = lib_features_str + ))); } -fn generate_unstable_book_files(src :&Path, out: &Path, features :&Features) { +fn generate_unstable_book_files(src: &Path, out: &Path, features: &Features) { let unstable_features = collect_unstable_feature_names(features); let unstable_section_file_names = collect_unstable_book_section_file_names(src); t!(fs::create_dir_all(&out)); @@ -81,9 +74,7 @@ fn generate_unstable_book_files(src :&Path, out: &Path, features :&Features) { let feature = &features[&feature_name_underscore]; if let Some(issue) = feature.tracking_issue { - generate_stub_issue(&out_file_path, - &feature_name_underscore, - issue.get()); + generate_stub_issue(&out_file_path, &feature_name_underscore, issue.get()); } else { generate_stub_no_issue(&out_file_path, &feature_name_underscore); } @@ -111,20 +102,25 @@ fn main() { let dest_path = Path::new(&dest_path_str); let lang_features = collect_lang_features(src_path, &mut false); - let lib_features = collect_lib_features(src_path).into_iter().filter(|&(ref name, _)| { - !lang_features.contains_key(name) - }).collect(); + let lib_features = collect_lib_features(src_path) + .into_iter() + .filter(|&(ref name, _)| !lang_features.contains_key(name)) + .collect(); let doc_src_path = src_path.join(PATH_STR); t!(fs::create_dir_all(&dest_path)); - generate_unstable_book_files(&doc_src_path.join(LANG_FEATURES_DIR), - &dest_path.join(LANG_FEATURES_DIR), - &lang_features); - generate_unstable_book_files(&doc_src_path.join(LIB_FEATURES_DIR), - &dest_path.join(LIB_FEATURES_DIR), - &lib_features); + generate_unstable_book_files( + &doc_src_path.join(LANG_FEATURES_DIR), + &dest_path.join(LANG_FEATURES_DIR), + &lang_features, + ); + generate_unstable_book_files( + &doc_src_path.join(LIB_FEATURES_DIR), + &dest_path.join(LIB_FEATURES_DIR), + &lib_features, + ); copy_recursive(&doc_src_path, &dest_path); -- cgit 1.4.1-3-g733a5