about summary refs log tree commit diff
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2017-07-22 18:27:29 +0000
committerbors <bors@rust-lang.org>2017-07-22 18:27:29 +0000
commit8d22af87d812d9a132f0a030753a5cdf53c87ee8 (patch)
tree8e63144c83e9b2a43701de768dc8ddaece405c31
parentf8d485f53dbe87e0d7b4ad14904fd7b0447a8cbe (diff)
parent1c118231adaa941899773420b21bc6b55ca3014f (diff)
downloadrust-8d22af87d812d9a132f0a030753a5cdf53c87ee8.tar.gz
rust-8d22af87d812d9a132f0a030753a5cdf53c87ee8.zip
Auto merge of #43059 - Mark-Simulacrum:rustbuild-2.0, r=alexcrichton
Rework Rustbuild to an eagerly compiling approach

This introduces a new dependency on `serde`; I don't believe that's a problem since bootstrap is compiled with nightly/beta always so proc macros are available. Compile times are slightly longer -- about 2-3x (30 seconds vs. 10 seconds). I don't think this is too big a problem, especially since recompiling bootstrap is somewhat rare. I think we can remove the dependency on Serde if necessary, though, so let me know.

r? @alexcrichton
-rw-r--r--src/Cargo.lock21
-rw-r--r--src/bootstrap/Cargo.toml7
-rw-r--r--src/bootstrap/builder.rs618
-rw-r--r--src/bootstrap/cache.rs267
-rw-r--r--src/bootstrap/cc.rs15
-rw-r--r--src/bootstrap/check.rs1529
-rw-r--r--src/bootstrap/compile.rs930
-rw-r--r--src/bootstrap/config.rs146
-rw-r--r--src/bootstrap/dist.rs2024
-rw-r--r--src/bootstrap/doc.rs857
-rw-r--r--src/bootstrap/flags.rs27
-rw-r--r--src/bootstrap/install.rs267
-rw-r--r--src/bootstrap/lib.rs553
-rw-r--r--src/bootstrap/metadata.rs30
-rw-r--r--src/bootstrap/native.rs624
-rw-r--r--src/bootstrap/sanity.rs10
-rw-r--r--src/bootstrap/step.rs1820
-rw-r--r--src/bootstrap/tool.rs353
-rw-r--r--src/tools/build-manifest/Cargo.toml5
-rw-r--r--src/tools/build-manifest/src/main.rs34
20 files changed, 5460 insertions, 4677 deletions
diff --git a/src/Cargo.lock b/src/Cargo.lock
index 04a556705b5..692252a8b05 100644
--- a/src/Cargo.lock
+++ b/src/Cargo.lock
@@ -134,10 +134,13 @@ dependencies = [
  "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
  "gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)",
  "getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
  "libc 0.2.26 (registry+https://github.com/rust-lang/crates.io-index)",
  "num_cpus 1.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "toml 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -153,8 +156,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 name = "build-manifest"
 version = "0.1.0"
 dependencies = [
- "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "toml 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -1903,14 +1907,6 @@ version = "0.1.0"
 
 [[package]]
 name = "toml"
-version = "0.1.30"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
-[[package]]
-name = "toml"
 version = "0.2.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
@@ -2206,7 +2202,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 "checksum thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a9539db560102d1cef46b8b78ce737ff0bb64e7e18d35b2a5688f7d097d0ff03"
 "checksum thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "8576dbbfcaef9641452d5cf0df9b0e7eeab7694956dd33bb61515fb8f18cfdd5"
 "checksum thread_local 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1697c4b57aeeb7a536b647165a2825faddffb1d3bad386d507709bd51a90bb14"
-"checksum toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)" = "0590d72182e50e879c4da3b11c6488dae18fccb1ae0c7a3eda18e16795844796"
 "checksum toml 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "736b60249cb25337bc196faa43ee12c705e426f3d55c214d73a4e7be06f92cb4"
 "checksum toml 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b0601da6c97135c8d330c7a13a013ca6cd4143221b01de2f8d4edc50a9e551c7"
 "checksum typed-arena 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5934776c3ac1bea4a9d56620d6bf2d483b20d394e49581db40f187e1118ff667"
diff --git a/src/bootstrap/Cargo.toml b/src/bootstrap/Cargo.toml
index 8842dce0257..daa2a3d0a0f 100644
--- a/src/bootstrap/Cargo.toml
+++ b/src/bootstrap/Cargo.toml
@@ -33,8 +33,11 @@ build_helper = { path = "../build_helper" }
 cmake = "0.1.23"
 filetime = "0.1"
 num_cpus = "1.0"
-toml = "0.1"
 getopts = "0.2"
-rustc-serialize = "0.3"
 gcc = "0.3.50"
 libc = "0.2"
+serde = "1.0.8"
+serde_derive = "1.0.8"
+serde_json = "1.0.2"
+toml = "0.4"
+lazy_static = "0.2"
diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs
new file mode 100644
index 00000000000..7be391e5420
--- /dev/null
+++ b/src/bootstrap/builder.rs
@@ -0,0 +1,618 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use std::fmt::Debug;
+use std::hash::Hash;
+use std::cell::RefCell;
+use std::path::{Path, PathBuf};
+use std::process::Command;
+use std::fs;
+use std::ops::Deref;
+use std::any::Any;
+use std::collections::BTreeSet;
+
+use compile;
+use install;
+use dist;
+use util::{exe, libdir, add_lib_path};
+use {Build, Mode};
+use cache::{INTERNER, Interned, Cache};
+use check;
+use flags::Subcommand;
+use doc;
+use tool;
+
+pub use Compiler;
+
+pub struct Builder<'a> {
+    pub build: &'a Build,
+    pub top_stage: u32,
+    pub kind: Kind,
+    cache: Cache,
+    stack: RefCell<Vec<Box<Any>>>,
+}
+
+impl<'a> Deref for Builder<'a> {
+    type Target = Build;
+
+    fn deref(&self) -> &Self::Target {
+        self.build
+    }
+}
+
+pub trait Step: 'static + Clone + Debug + PartialEq + Eq + Hash {
+    /// `PathBuf` when directories are created or to return a `Compiler` once
+    /// it's been assembled.
+    type Output: Clone;
+
+    const DEFAULT: bool = false;
+
+    /// Run this rule for all hosts without cross compiling.
+    const ONLY_HOSTS: bool = false;
+
+    /// Run this rule for all targets, but only with the native host.
+    const ONLY_BUILD_TARGETS: bool = false;
+
+    /// Only run this step with the build triple as host and target.
+    const ONLY_BUILD: bool = false;
+
+    /// Primary function to execute this rule. Can call `builder.ensure(...)`
+    /// with other steps to run those.
+    fn run(self, builder: &Builder) -> Self::Output;
+
+    /// When bootstrap is passed a set of paths, this controls whether this rule
+    /// will execute. However, it does not get called in a "default" context
+    /// when we are not passed any paths; in that case, make_run is called
+    /// directly.
+    fn should_run(run: ShouldRun) -> ShouldRun;
+
+    /// Build up a "root" rule, either as a default rule or from a path passed
+    /// to us.
+    ///
+    /// When path is `None`, we are executing in a context where no paths were
+    /// passed. When `./x.py build` is run, for example, this rule could get
+    /// called if it is in the correct list below with a path of `None`.
+    fn make_run(_run: RunConfig) {
+        // It is reasonable to not have an implementation of make_run for rules
+        // who do not want to get called from the root context. This means that
+        // they are likely dependencies (e.g., sysroot creation) or similar, and
+        // as such calling them from ./x.py isn't logical.
+        unimplemented!()
+    }
+}
+
+pub struct RunConfig<'a> {
+    pub builder: &'a Builder<'a>,
+    pub host: Interned<String>,
+    pub target: Interned<String>,
+    pub path: Option<&'a Path>,
+}
+
+struct StepDescription {
+    default: bool,
+    only_hosts: bool,
+    only_build_targets: bool,
+    only_build: bool,
+    should_run: fn(ShouldRun) -> ShouldRun,
+    make_run: fn(RunConfig),
+}
+
+impl StepDescription {
+    fn from<S: Step>() -> StepDescription {
+        StepDescription {
+            default: S::DEFAULT,
+            only_hosts: S::ONLY_HOSTS,
+            only_build_targets: S::ONLY_BUILD_TARGETS,
+            only_build: S::ONLY_BUILD,
+            should_run: S::should_run,
+            make_run: S::make_run,
+        }
+    }
+
+    fn maybe_run(&self, builder: &Builder, path: Option<&Path>) {
+        let build = builder.build;
+        let hosts = if self.only_build_targets || self.only_build {
+            &build.config.host[..1]
+        } else {
+            &build.hosts
+        };
+
+        // Determine the actual targets participating in this rule.
+        // NOTE: We should keep the full projection from build triple to
+        // the hosts for the dist steps, now that the hosts array above is
+        // truncated to avoid duplication of work in that case. Therefore
+        // the original non-shadowed hosts array is used below.
+        let targets = if self.only_hosts {
+            // If --target was specified but --host wasn't specified,
+            // don't run any host-only tests. Also, respect any `--host`
+            // overrides as done for `hosts`.
+            if build.flags.host.len() > 0 {
+                &build.flags.host[..]
+            } else if build.flags.target.len() > 0 {
+                &[]
+            } else if self.only_build {
+                &build.config.host[..1]
+            } else {
+                &build.config.host[..]
+            }
+        } else {
+            &build.targets
+        };
+
+        for host in hosts {
+            for target in targets {
+                let run = RunConfig {
+                    builder,
+                    path,
+                    host: *host,
+                    target: *target,
+                };
+                (self.make_run)(run);
+            }
+        }
+    }
+
+    fn run(v: &[StepDescription], builder: &Builder, paths: &[PathBuf]) {
+        let should_runs = v.iter().map(|desc| {
+            (desc.should_run)(ShouldRun::new(builder))
+        }).collect::<Vec<_>>();
+        if paths.is_empty() {
+            for (desc, should_run) in v.iter().zip(should_runs) {
+                if desc.default && should_run.is_really_default {
+                    desc.maybe_run(builder, None);
+                }
+            }
+        } else {
+            for path in paths {
+                let mut attempted_run = false;
+                for (desc, should_run) in v.iter().zip(&should_runs) {
+                    if should_run.run(path) {
+                        attempted_run = true;
+                        desc.maybe_run(builder, Some(path));
+                    }
+                }
+
+                if !attempted_run {
+                    eprintln!("Warning: no rules matched {}.", path.display());
+                }
+            }
+        }
+    }
+}
+
+#[derive(Clone)]
+pub struct ShouldRun<'a> {
+    pub builder: &'a Builder<'a>,
+    // use a BTreeSet to maintain sort order
+    paths: BTreeSet<PathBuf>,
+
+    // If this is a default rule, this is an additional constraint placed on
+    // it's run. Generally something like compiler docs being enabled.
+    is_really_default: bool,
+}
+
+impl<'a> ShouldRun<'a> {
+    fn new(builder: &'a Builder) -> ShouldRun<'a> {
+        ShouldRun {
+            builder: builder,
+            paths: BTreeSet::new(),
+            is_really_default: true, // by default no additional conditions
+        }
+    }
+
+    pub fn default_condition(mut self, cond: bool) -> Self {
+        self.is_really_default = cond;
+        self
+    }
+
+    pub fn krate(mut self, name: &str) -> Self {
+        for (_, krate_path) in self.builder.crates(name) {
+            self.paths.insert(PathBuf::from(krate_path));
+        }
+        self
+    }
+
+    pub fn path(mut self, path: &str) -> Self {
+        self.paths.insert(PathBuf::from(path));
+        self
+    }
+
+    // allows being more explicit about why should_run in Step returns the value passed to it
+    pub fn never(self) -> ShouldRun<'a> {
+        self
+    }
+
+    fn run(&self, path: &Path) -> bool {
+        self.paths.iter().any(|p| path.ends_with(p))
+    }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Debug)]
+pub enum Kind {
+    Build,
+    Test,
+    Bench,
+    Dist,
+    Doc,
+    Install,
+}
+
+impl<'a> Builder<'a> {
+    fn get_step_descriptions(kind: Kind) -> Vec<StepDescription> {
+        macro_rules! describe {
+            ($($rule:ty),+ $(,)*) => {{
+                vec![$(StepDescription::from::<$rule>()),+]
+            }};
+        }
+        match kind {
+            Kind::Build => describe!(compile::Std, compile::Test, compile::Rustc,
+                compile::StartupObjects, tool::BuildManifest, tool::Rustbook, tool::ErrorIndex,
+                tool::UnstableBookGen, tool::Tidy, tool::Linkchecker, tool::CargoTest,
+                tool::Compiletest, tool::RemoteTestServer, tool::RemoteTestClient,
+                tool::RustInstaller, tool::Cargo, tool::Rls),
+            Kind::Test => describe!(check::Tidy, check::Bootstrap, check::DefaultCompiletest,
+                check::HostCompiletest, check::Crate, check::CrateLibrustc, check::Linkcheck,
+                check::Cargotest, check::Cargo, check::Rls, check::Docs, check::ErrorIndex,
+                check::Distcheck),
+            Kind::Bench => describe!(check::Crate, check::CrateLibrustc),
+            Kind::Doc => describe!(doc::UnstableBook, doc::UnstableBookGen, doc::TheBook,
+                doc::Standalone, doc::Std, doc::Test, doc::Rustc, doc::ErrorIndex, doc::Nomicon,
+                doc::Reference),
+            Kind::Dist => describe!(dist::Docs, dist::Mingw, dist::Rustc, dist::DebuggerScripts,
+                dist::Std, dist::Analysis, dist::Src, dist::PlainSourceTarball, dist::Cargo,
+                dist::Rls, dist::Extended, dist::HashSign),
+            Kind::Install => describe!(install::Docs, install::Std, install::Cargo, install::Rls,
+                install::Analysis, install::Src, install::Rustc),
+        }
+    }
+
+    pub fn get_help(build: &Build, subcommand: &str) -> Option<String> {
+        let kind = match subcommand {
+            "build" => Kind::Build,
+            "doc" => Kind::Doc,
+            "test" => Kind::Test,
+            "bench" => Kind::Bench,
+            "dist" => Kind::Dist,
+            "install" => Kind::Install,
+            _ => return None,
+        };
+
+        let builder = Builder {
+            build: build,
+            top_stage: build.flags.stage.unwrap_or(2),
+            kind: kind,
+            cache: Cache::new(),
+            stack: RefCell::new(Vec::new()),
+        };
+
+        let builder = &builder;
+        let mut should_run = ShouldRun::new(builder);
+        for desc in Builder::get_step_descriptions(builder.kind) {
+            should_run = (desc.should_run)(should_run);
+        }
+        let mut help = String::from("Available paths:\n");
+        for path in should_run.paths {
+            help.push_str(format!("    ./x.py {} {}\n", subcommand, path.display()).as_str());
+        }
+        Some(help)
+    }
+
+    pub fn run(build: &Build) {
+        let (kind, paths) = match build.flags.cmd {
+            Subcommand::Build { ref paths } => (Kind::Build, &paths[..]),
+            Subcommand::Doc { ref paths } => (Kind::Doc, &paths[..]),
+            Subcommand::Test { ref paths, .. } => (Kind::Test, &paths[..]),
+            Subcommand::Bench { ref paths, .. } => (Kind::Bench, &paths[..]),
+            Subcommand::Dist { ref paths } => (Kind::Dist, &paths[..]),
+            Subcommand::Install { ref paths } => (Kind::Install, &paths[..]),
+            Subcommand::Clean => panic!(),
+        };
+
+        let builder = Builder {
+            build: build,
+            top_stage: build.flags.stage.unwrap_or(2),
+            kind: kind,
+            cache: Cache::new(),
+            stack: RefCell::new(Vec::new()),
+        };
+
+        StepDescription::run(&Builder::get_step_descriptions(builder.kind), &builder, paths);
+    }
+
+    pub fn default_doc(&self, paths: Option<&[PathBuf]>) {
+        let paths = paths.unwrap_or(&[]);
+        StepDescription::run(&Builder::get_step_descriptions(Kind::Doc), self, paths);
+    }
+
+    /// Obtain a compiler at a given stage and for a given host. Explictly does
+    /// not take `Compiler` since all `Compiler` instances are meant to be
+    /// obtained through this function, since it ensures that they are valid
+    /// (i.e., built and assembled).
+    pub fn compiler(&self, stage: u32, host: Interned<String>) -> Compiler {
+        self.ensure(compile::Assemble { target_compiler: Compiler { stage, host } })
+    }
+
+    pub fn sysroot(&self, compiler: Compiler) -> Interned<PathBuf> {
+        self.ensure(compile::Sysroot { compiler })
+    }
+
+    /// Returns the libdir where the standard library and other artifacts are
+    /// found for a compiler's sysroot.
+    pub fn sysroot_libdir(
+        &self, compiler: Compiler, target: Interned<String>
+    ) -> Interned<PathBuf> {
+        #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+        struct Libdir {
+            compiler: Compiler,
+            target: Interned<String>,
+        }
+        impl Step for Libdir {
+            type Output = Interned<PathBuf>;
+
+            fn should_run(run: ShouldRun) -> ShouldRun {
+                run.never()
+            }
+
+            fn run(self, builder: &Builder) -> Interned<PathBuf> {
+                let compiler = self.compiler;
+                let lib = if compiler.stage >= 2 && builder.build.config.libdir_relative.is_some() {
+                    builder.build.config.libdir_relative.clone().unwrap()
+                } else {
+                    PathBuf::from("lib")
+                };
+                let sysroot = builder.sysroot(self.compiler).join(lib)
+                    .join("rustlib").join(self.target).join("lib");
+                let _ = fs::remove_dir_all(&sysroot);
+                t!(fs::create_dir_all(&sysroot));
+                INTERNER.intern_path(sysroot)
+            }
+        }
+        self.ensure(Libdir { compiler, target })
+    }
+
+    /// Returns the compiler's libdir where it stores the dynamic libraries that
+    /// it itself links against.
+    ///
+    /// For example this returns `<sysroot>/lib` on Unix and `<sysroot>/bin` on
+    /// Windows.
+    pub fn rustc_libdir(&self, compiler: Compiler) -> PathBuf {
+        if compiler.is_snapshot(self) {
+            self.build.rustc_snapshot_libdir()
+        } else {
+            self.sysroot(compiler).join(libdir(&compiler.host))
+        }
+    }
+
+    /// Adds the compiler's directory of dynamic libraries to `cmd`'s dynamic
+    /// library lookup path.
+    pub fn add_rustc_lib_path(&self, compiler: Compiler, cmd: &mut Command) {
+        // Windows doesn't need dylib path munging because the dlls for the
+        // compiler live next to the compiler and the system will find them
+        // automatically.
+        if cfg!(windows) {
+            return
+        }
+
+        add_lib_path(vec![self.rustc_libdir(compiler)], cmd);
+    }
+
+    /// Get a path to the compiler specified.
+    pub fn rustc(&self, compiler: Compiler) -> PathBuf {
+        if compiler.is_snapshot(self) {
+            self.initial_rustc.clone()
+        } else {
+            self.sysroot(compiler).join("bin").join(exe("rustc", &compiler.host))
+        }
+    }
+
+    /// Get the `rustdoc` executable next to the specified compiler
+    pub fn rustdoc(&self, compiler: Compiler) -> PathBuf {
+        let mut rustdoc = self.rustc(compiler);
+        rustdoc.pop();
+        rustdoc.push(exe("rustdoc", &compiler.host));
+        rustdoc
+    }
+
+    /// Prepares an invocation of `cargo` to be run.
+    ///
+    /// This will create a `Command` that represents a pending execution of
+    /// Cargo. This cargo will be configured to use `compiler` as the actual
+    /// rustc compiler, its output will be scoped by `mode`'s output directory,
+    /// it will pass the `--target` flag for the specified `target`, and will be
+    /// executing the Cargo command `cmd`.
+    pub fn cargo(&self,
+             compiler: Compiler,
+             mode: Mode,
+             target: Interned<String>,
+             cmd: &str) -> Command {
+        let mut cargo = Command::new(&self.initial_cargo);
+        let out_dir = self.stage_out(compiler, mode);
+        cargo.env("CARGO_TARGET_DIR", out_dir)
+             .arg(cmd)
+             .arg("-j").arg(self.jobs().to_string())
+             .arg("--target").arg(target);
+
+        // FIXME: Temporary fix for https://github.com/rust-lang/cargo/issues/3005
+        // Force cargo to output binaries with disambiguating hashes in the name
+        cargo.env("__CARGO_DEFAULT_LIB_METADATA", &self.config.channel);
+
+        let stage;
+        if compiler.stage == 0 && self.local_rebuild {
+            // Assume the local-rebuild rustc already has stage1 features.
+            stage = 1;
+        } else {
+            stage = compiler.stage;
+        }
+
+        // Customize the compiler we're running. Specify the compiler to cargo
+        // as our shim and then pass it some various options used to configure
+        // how the actual compiler itself is called.
+        //
+        // These variables are primarily all read by
+        // src/bootstrap/bin/{rustc.rs,rustdoc.rs}
+        cargo.env("RUSTBUILD_NATIVE_DIR", self.native_dir(target))
+             .env("RUSTC", self.out.join("bootstrap/debug/rustc"))
+             .env("RUSTC_REAL", self.rustc(compiler))
+             .env("RUSTC_STAGE", stage.to_string())
+             .env("RUSTC_CODEGEN_UNITS",
+                  self.config.rust_codegen_units.to_string())
+             .env("RUSTC_DEBUG_ASSERTIONS",
+                  self.config.rust_debug_assertions.to_string())
+             .env("RUSTC_SYSROOT", self.sysroot(compiler))
+             .env("RUSTC_LIBDIR", self.rustc_libdir(compiler))
+             .env("RUSTC_RPATH", self.config.rust_rpath.to_string())
+             .env("RUSTDOC", self.out.join("bootstrap/debug/rustdoc"))
+             .env("RUSTDOC_REAL", self.rustdoc(compiler))
+             .env("RUSTC_FLAGS", self.rustc_flags(target).join(" "));
+
+        if mode != Mode::Tool {
+            // Tools don't get debuginfo right now, e.g. cargo and rls don't
+            // get compiled with debuginfo.
+            cargo.env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string())
+                 .env("RUSTC_DEBUGINFO_LINES", self.config.rust_debuginfo_lines.to_string())
+                 .env("RUSTC_FORCE_UNSTABLE", "1");
+
+            // Currently the compiler depends on crates from crates.io, and
+            // then other crates can depend on the compiler (e.g. proc-macro
+            // crates). Let's say, for example that rustc itself depends on the
+            // bitflags crate. If an external crate then depends on the
+            // bitflags crate as well, we need to make sure they don't
+            // conflict, even if they pick the same verison of bitflags. We'll
+            // want to make sure that e.g. a plugin and rustc each get their
+            // own copy of bitflags.
+
+            // Cargo ensures that this works in general through the -C metadata
+            // flag. This flag will frob the symbols in the binary to make sure
+            // they're different, even though the source code is the exact
+            // same. To solve this problem for the compiler we extend Cargo's
+            // already-passed -C metadata flag with our own. Our rustc.rs
+            // wrapper around the actual rustc will detect -C metadata being
+            // passed and frob it with this extra string we're passing in.
+            cargo.env("RUSTC_METADATA_SUFFIX", "rustc");
+        }
+
+        // Enable usage of unstable features
+        cargo.env("RUSTC_BOOTSTRAP", "1");
+        self.add_rust_test_threads(&mut cargo);
+
+        // Almost all of the crates that we compile as part of the bootstrap may
+        // have a build script, including the standard library. To compile a
+        // build script, however, it itself needs a standard library! This
+        // introduces a bit of a pickle when we're compiling the standard
+        // library itself.
+        //
+        // To work around this we actually end up using the snapshot compiler
+        // (stage0) for compiling build scripts of the standard library itself.
+        // The stage0 compiler is guaranteed to have a libstd available for use.
+        //
+        // For other crates, however, we know that we've already got a standard
+        // library up and running, so we can use the normal compiler to compile
+        // build scripts in that situation.
+        if mode == Mode::Libstd {
+            cargo.env("RUSTC_SNAPSHOT", &self.initial_rustc)
+                 .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_snapshot_libdir());
+        } else {
+            cargo.env("RUSTC_SNAPSHOT", self.rustc(compiler))
+                 .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_libdir(compiler));
+        }
+
+        // Ignore incremental modes except for stage0, since we're
+        // not guaranteeing correctness across builds if the compiler
+        // is changing under your feet.`
+        if self.flags.incremental && compiler.stage == 0 {
+            let incr_dir = self.incremental_dir(compiler);
+            cargo.env("RUSTC_INCREMENTAL", incr_dir);
+        }
+
+        if let Some(ref on_fail) = self.flags.on_fail {
+            cargo.env("RUSTC_ON_FAIL", on_fail);
+        }
+
+        cargo.env("RUSTC_VERBOSE", format!("{}", self.verbosity));
+
+        // Specify some various options for build scripts used throughout
+        // the build.
+        //
+        // FIXME: the guard against msvc shouldn't need to be here
+        if !target.contains("msvc") {
+            cargo.env(format!("CC_{}", target), self.cc(target))
+                 .env(format!("AR_{}", target), self.ar(target).unwrap()) // only msvc is None
+                 .env(format!("CFLAGS_{}", target), self.cflags(target).join(" "));
+
+            if let Ok(cxx) = self.cxx(target) {
+                 cargo.env(format!("CXX_{}", target), cxx);
+            }
+        }
+
+        if mode == Mode::Libstd && self.config.extended && compiler.is_final_stage(self) {
+            cargo.env("RUSTC_SAVE_ANALYSIS", "api".to_string());
+        }
+
+        // Environment variables *required* throughout the build
+        //
+        // FIXME: should update code to not require this env var
+        cargo.env("CFG_COMPILER_HOST_TRIPLE", target);
+
+        if self.is_verbose() {
+            cargo.arg("-v");
+        }
+        // FIXME: cargo bench does not accept `--release`
+        if self.config.rust_optimize && cmd != "bench" {
+            cargo.arg("--release");
+        }
+        if self.config.locked_deps {
+            cargo.arg("--locked");
+        }
+        if self.config.vendor || self.is_sudo {
+            cargo.arg("--frozen");
+        }
+
+        self.ci_env.force_coloring_in_ci(&mut cargo);
+
+        cargo
+    }
+
+    /// Ensure that a given step is built, returning it's output. This will
+    /// cache the step, so it is safe (and good!) to call this as often as
+    /// needed to ensure that all dependencies are built.
+    pub fn ensure<S: Step>(&'a self, step: S) -> S::Output {
+        {
+            let mut stack = self.stack.borrow_mut();
+            for stack_step in stack.iter() {
+                // should skip
+                if stack_step.downcast_ref::<S>().map_or(true, |stack_step| *stack_step != step) {
+                    continue;
+                }
+                let mut out = String::new();
+                out += &format!("\n\nCycle in build detected when adding {:?}\n", step);
+                for el in stack.iter().rev() {
+                    out += &format!("\t{:?}\n", el);
+                }
+                panic!(out);
+            }
+            if let Some(out) = self.cache.get(&step) {
+                self.build.verbose(&format!("{}c {:?}", "  ".repeat(stack.len()), step));
+
+                return out;
+            }
+            self.build.verbose(&format!("{}> {:?}", "  ".repeat(stack.len()), step));
+            stack.push(Box::new(step.clone()));
+        }
+        let out = step.clone().run(self);
+        {
+            let mut stack = self.stack.borrow_mut();
+            let cur_step = stack.pop().expect("step stack empty");
+            assert_eq!(cur_step.downcast_ref(), Some(&step));
+        }
+        self.build.verbose(&format!("{}< {:?}", "  ".repeat(self.stack.borrow().len()), step));
+        self.cache.put(step, out.clone());
+        out
+    }
+}
diff --git a/src/bootstrap/cache.rs b/src/bootstrap/cache.rs
new file mode 100644
index 00000000000..c2749315882
--- /dev/null
+++ b/src/bootstrap/cache.rs
@@ -0,0 +1,267 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use std::any::{Any, TypeId};
+use std::borrow::Borrow;
+use std::cell::RefCell;
+use std::collections::HashMap;
+use std::convert::AsRef;
+use std::ffi::OsStr;
+use std::fmt;
+use std::hash::{Hash, Hasher};
+use std::marker::PhantomData;
+use std::mem;
+use std::ops::Deref;
+use std::path::{Path, PathBuf};
+use std::sync::Mutex;
+
+use builder::Step;
+
+pub struct Interned<T>(usize, PhantomData<*const T>);
+
+impl Default for Interned<String> {
+    fn default() -> Self {
+        INTERNER.intern_string(String::default())
+    }
+}
+
+impl Default for Interned<PathBuf> {
+    fn default() -> Self {
+        INTERNER.intern_path(PathBuf::default())
+    }
+}
+
+impl<T> Copy for Interned<T> {}
+impl<T> Clone for Interned<T> {
+    fn clone(&self) -> Interned<T> {
+        *self
+    }
+}
+
+impl<T> PartialEq for Interned<T> {
+    fn eq(&self, other: &Self) -> bool {
+        self.0 == other.0
+    }
+}
+impl<T> Eq for Interned<T> {}
+
+impl PartialEq<str> for Interned<String> {
+    fn eq(&self, other: &str) -> bool {
+       *self == other
+    }
+}
+impl<'a> PartialEq<&'a str> for Interned<String> {
+    fn eq(&self, other: &&str) -> bool {
+        **self == **other
+    }
+}
+impl<'a, T> PartialEq<&'a Interned<T>> for Interned<T> {
+    fn eq(&self, other: &&Self) -> bool {
+        self.0 == other.0
+    }
+}
+impl<'a, T> PartialEq<Interned<T>> for &'a Interned<T> {
+    fn eq(&self, other: &Interned<T>) -> bool {
+        self.0 == other.0
+    }
+}
+
+unsafe impl<T> Send for Interned<T> {}
+unsafe impl<T> Sync for Interned<T> {}
+
+impl fmt::Display for Interned<String> {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        let s: &str = &*self;
+        f.write_str(s)
+    }
+}
+
+impl fmt::Debug for Interned<String> {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        let s: &str = &*self;
+        f.write_fmt(format_args!("{:?}", s))
+    }
+}
+impl fmt::Debug for Interned<PathBuf> {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        let s: &Path = &*self;
+        f.write_fmt(format_args!("{:?}", s))
+    }
+}
+
+impl Hash for Interned<String> {
+    fn hash<H: Hasher>(&self, state: &mut H) {
+        let l = INTERNER.strs.lock().unwrap();
+        l.get(*self).hash(state)
+    }
+}
+
+impl Hash for Interned<PathBuf> {
+    fn hash<H: Hasher>(&self, state: &mut H) {
+        let l = INTERNER.paths.lock().unwrap();
+        l.get(*self).hash(state)
+    }
+}
+
+impl Deref for Interned<String> {
+    type Target = str;
+    fn deref(&self) -> &'static str {
+        let l = INTERNER.strs.lock().unwrap();
+        unsafe { mem::transmute::<&str, &'static str>(l.get(*self)) }
+    }
+}
+
+impl Deref for Interned<PathBuf> {
+    type Target = Path;
+    fn deref(&self) -> &'static Path {
+        let l = INTERNER.paths.lock().unwrap();
+        unsafe { mem::transmute::<&Path, &'static Path>(l.get(*self)) }
+    }
+}
+
+impl AsRef<Path> for Interned<PathBuf> {
+    fn as_ref(&self) -> &'static Path {
+        let l = INTERNER.paths.lock().unwrap();
+        unsafe { mem::transmute::<&Path, &'static Path>(l.get(*self)) }
+    }
+}
+
+impl AsRef<Path> for Interned<String> {
+    fn as_ref(&self) -> &'static Path {
+        let l = INTERNER.strs.lock().unwrap();
+        unsafe { mem::transmute::<&Path, &'static Path>(l.get(*self).as_ref()) }
+    }
+}
+
+impl AsRef<OsStr> for Interned<PathBuf> {
+    fn as_ref(&self) -> &'static OsStr {
+        let l = INTERNER.paths.lock().unwrap();
+        unsafe { mem::transmute::<&OsStr, &'static OsStr>(l.get(*self).as_ref()) }
+    }
+}
+
+impl AsRef<OsStr> for Interned<String> {
+    fn as_ref(&self) -> &'static OsStr {
+        let l = INTERNER.strs.lock().unwrap();
+        unsafe { mem::transmute::<&OsStr, &'static OsStr>(l.get(*self).as_ref()) }
+    }
+}
+
+
+struct TyIntern<T> {
+    items: Vec<T>,
+    set: HashMap<T, Interned<T>>,
+}
+
+impl<T: Hash + Clone + Eq> TyIntern<T> {
+    fn new() -> TyIntern<T> {
+        TyIntern {
+            items: Vec::new(),
+            set: HashMap::new(),
+        }
+    }
+
+    fn intern_borrow<B>(&mut self, item: &B) -> Interned<T>
+    where
+        B: Eq + Hash + ToOwned<Owned=T> + ?Sized,
+        T: Borrow<B>,
+    {
+        if let Some(i) = self.set.get(&item) {
+            return *i;
+        }
+        let item = item.to_owned();
+        let interned =  Interned(self.items.len(), PhantomData::<*const T>);
+        self.set.insert(item.clone(), interned);
+        self.items.push(item);
+        interned
+    }
+
+    fn intern(&mut self, item: T) -> Interned<T> {
+        if let Some(i) = self.set.get(&item) {
+            return *i;
+        }
+        let interned =  Interned(self.items.len(), PhantomData::<*const T>);
+        self.set.insert(item.clone(), interned);
+        self.items.push(item);
+        interned
+    }
+
+    fn get(&self, i: Interned<T>) -> &T {
+        &self.items[i.0]
+    }
+}
+
+pub struct Interner {
+    strs: Mutex<TyIntern<String>>,
+    paths: Mutex<TyIntern<PathBuf>>,
+}
+
+impl Interner {
+    fn new() -> Interner {
+        Interner {
+            strs: Mutex::new(TyIntern::new()),
+            paths: Mutex::new(TyIntern::new()),
+        }
+    }
+
+    pub fn intern_str(&self, s: &str) -> Interned<String> {
+        self.strs.lock().unwrap().intern_borrow(s)
+    }
+    pub fn intern_string(&self, s: String) -> Interned<String> {
+        self.strs.lock().unwrap().intern(s)
+    }
+
+    pub fn intern_path(&self, s: PathBuf) -> Interned<PathBuf> {
+        self.paths.lock().unwrap().intern(s)
+    }
+}
+
+lazy_static! {
+    pub static ref INTERNER: Interner = Interner::new();
+}
+
+/// This is essentially a HashMap which allows storing any type in its input and
+/// any type in its output. It is a write-once cache; values are never evicted,
+/// which means that references to the value can safely be returned from the
+/// get() method.
+#[derive(Debug)]
+pub struct Cache(
+    RefCell<HashMap<
+        TypeId,
+        Box<Any>, // actually a HashMap<Step, Interned<Step::Output>>
+    >>
+);
+
+impl Cache {
+    pub fn new() -> Cache {
+        Cache(RefCell::new(HashMap::new()))
+    }
+
+    pub fn put<S: Step>(&self, step: S, value: S::Output) {
+        let mut cache = self.0.borrow_mut();
+        let type_id = TypeId::of::<S>();
+        let stepcache = cache.entry(type_id)
+                        .or_insert_with(|| Box::new(HashMap::<S, S::Output>::new()))
+                        .downcast_mut::<HashMap<S, S::Output>>()
+                        .expect("invalid type mapped");
+        assert!(!stepcache.contains_key(&step), "processing {:?} a second time", step);
+        stepcache.insert(step, value);
+    }
+
+    pub fn get<S: Step>(&self, step: &S) -> Option<S::Output> {
+        let mut cache = self.0.borrow_mut();
+        let type_id = TypeId::of::<S>();
+        let stepcache = cache.entry(type_id)
+                        .or_insert_with(|| Box::new(HashMap::<S, S::Output>::new()))
+                        .downcast_mut::<HashMap<S, S::Output>>()
+                        .expect("invalid type mapped");
+        stepcache.get(step).cloned()
+    }
+}
diff --git a/src/bootstrap/cc.rs b/src/bootstrap/cc.rs
index 7c7161916ee..739904e4f7c 100644
--- a/src/bootstrap/cc.rs
+++ b/src/bootstrap/cc.rs
@@ -38,6 +38,7 @@ use gcc;
 
 use Build;
 use config::Target;
+use cache::Interned;
 
 pub fn find(build: &mut Build) {
     // For all targets we're going to need a C compiler for building some shims
@@ -50,11 +51,11 @@ pub fn find(build: &mut Build) {
         cfg.cargo_metadata(false).opt_level(0).debug(false)
            .target(target).host(&build.build);
 
-        let config = build.config.target_config.get(target);
+        let config = build.config.target_config.get(&target);
         if let Some(cc) = config.and_then(|c| c.cc.as_ref()) {
             cfg.compiler(cc);
         } else {
-            set_compiler(&mut cfg, "gcc", target, config, build);
+            set_compiler(&mut cfg, "gcc", *target, config, build);
         }
 
         let compiler = cfg.get_compiler();
@@ -63,7 +64,7 @@ pub fn find(build: &mut Build) {
         if let Some(ref ar) = ar {
             build.verbose(&format!("AR_{} = {:?}", target, ar));
         }
-        build.cc.insert(target.to_string(), (compiler, ar));
+        build.cc.insert(*target, (compiler, ar));
     }
 
     // For all host triples we need to find a C++ compiler as well
@@ -78,20 +79,20 @@ pub fn find(build: &mut Build) {
         if let Some(cxx) = config.and_then(|c| c.cxx.as_ref()) {
             cfg.compiler(cxx);
         } else {
-            set_compiler(&mut cfg, "g++", host, config, build);
+            set_compiler(&mut cfg, "g++", *host, config, build);
         }
         let compiler = cfg.get_compiler();
         build.verbose(&format!("CXX_{} = {:?}", host, compiler.path()));
-        build.cxx.insert(host.to_string(), compiler);
+        build.cxx.insert(*host, compiler);
     }
 }
 
 fn set_compiler(cfg: &mut gcc::Config,
                 gnu_compiler: &str,
-                target: &str,
+                target: Interned<String>,
                 config: Option<&Target>,
                 build: &Build) {
-    match target {
+    match &*target {
         // When compiling for android we may have the NDK configured in the
         // config.toml in which case we look there. Otherwise the default
         // compiler already takes into account the triple in question.
diff --git a/src/bootstrap/check.rs b/src/bootstrap/check.rs
index e4b0e2fb9ca..ee589261752 100644
--- a/src/bootstrap/check.rs
+++ b/src/bootstrap/check.rs
@@ -25,14 +25,20 @@ use std::io::Read;
 
 use build_helper::{self, output};
 
-use {Build, Compiler, Mode};
+use {Build, Mode};
 use dist;
-use util::{self, dylib_path, dylib_path_var, exe};
+use util::{self, dylib_path, dylib_path_var};
+
+use compile;
+use native;
+use builder::{Kind, RunConfig, ShouldRun, Builder, Compiler, Step};
+use tool::{self, Tool};
+use cache::{INTERNER, Interned};
 
 const ADB_TEST_DIR: &str = "/data/tmp/work";
 
 /// The two modes of the test runner; tests or benchmarks.
-#[derive(Copy, Clone)]
+#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)]
 pub enum TestKind {
     /// Run `cargo test`
     Test,
@@ -81,320 +87,718 @@ fn try_run_quiet(build: &Build, cmd: &mut Command) {
     }
 }
 
-/// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler.
-///
-/// This tool in `src/tools` will verify the validity of all our links in the
-/// documentation to ensure we don't have a bunch of dead ones.
-pub fn linkcheck(build: &Build, host: &str) {
-    println!("Linkcheck ({})", host);
-    let compiler = Compiler::new(0, host);
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Linkcheck {
+    host: Interned<String>,
+}
+
+impl Step for Linkcheck {
+    type Output = ();
+    const ONLY_HOSTS: bool = true;
+    const DEFAULT: bool = true;
 
-    let _time = util::timeit();
-    try_run(build, build.tool_cmd(&compiler, "linkchecker")
-                        .arg(build.out.join(host).join("doc")));
+    /// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler.
+    ///
+    /// This tool in `src/tools` will verify the validity of all our links in the
+    /// documentation to ensure we don't have a bunch of dead ones.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let host = self.host;
+
+        println!("Linkcheck ({})", host);
+
+        builder.default_doc(None);
+
+        let _time = util::timeit();
+        try_run(build, builder.tool_cmd(Tool::Linkchecker)
+                            .arg(build.out.join(host).join("doc")));
+    }
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        let builder = run.builder;
+        run.path("src/tools/linkchecker").default_condition(builder.build.config.docs)
+    }
+
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(Linkcheck { host: run.host });
+    }
 }
 
-/// Runs the `cargotest` tool as compiled in `stage` by the `host` compiler.
-///
-/// This tool in `src/tools` will check out a few Rust projects and run `cargo
-/// test` to ensure that we don't regress the test suites there.
-pub fn cargotest(build: &Build, stage: u32, host: &str) {
-    let compiler = Compiler::new(stage, host);
-
-    // Note that this is a short, cryptic, and not scoped directory name. This
-    // is currently to minimize the length of path on Windows where we otherwise
-    // quickly run into path name limit constraints.
-    let out_dir = build.out.join("ct");
-    t!(fs::create_dir_all(&out_dir));
-
-    let _time = util::timeit();
-    let mut cmd = Command::new(build.tool(&Compiler::new(0, host), "cargotest"));
-    build.prepare_tool_cmd(&compiler, &mut cmd);
-    try_run(build, cmd.arg(&build.initial_cargo)
-                      .arg(&out_dir)
-                      .env("RUSTC", build.compiler_path(&compiler))
-                      .env("RUSTDOC", build.rustdoc(&compiler)));
-}
-
-/// Runs `cargo test` for `cargo` packaged with Rust.
-pub fn cargo(build: &Build, stage: u32, host: &str) {
-    let compiler = &Compiler::new(stage, host);
-
-    let mut cargo = build.cargo(compiler, Mode::Tool, host, "test");
-    cargo.arg("--manifest-path").arg(build.src.join("src/tools/cargo/Cargo.toml"));
-    if !build.fail_fast {
-        cargo.arg("--no-fail-fast");
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Cargotest {
+    stage: u32,
+    host: Interned<String>,
+}
+
+impl Step for Cargotest {
+    type Output = ();
+    const ONLY_HOSTS: bool = true;
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.path("src/tools/cargotest")
+    }
+
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(Cargotest {
+            stage: run.builder.top_stage,
+            host: run.host,
+        });
+    }
+
+    /// Runs the `cargotest` tool as compiled in `stage` by the `host` compiler.
+    ///
+    /// This tool in `src/tools` will check out a few Rust projects and run `cargo
+    /// test` to ensure that we don't regress the test suites there.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let compiler = builder.compiler(self.stage, self.host);
+        builder.ensure(compile::Rustc { compiler, target: compiler.host });
+
+        // Note that this is a short, cryptic, and not scoped directory name. This
+        // is currently to minimize the length of path on Windows where we otherwise
+        // quickly run into path name limit constraints.
+        let out_dir = build.out.join("ct");
+        t!(fs::create_dir_all(&out_dir));
+
+        let _time = util::timeit();
+        let mut cmd = builder.tool_cmd(Tool::CargoTest);
+        try_run(build, cmd.arg(&build.initial_cargo)
+                          .arg(&out_dir)
+                          .env("RUSTC", builder.rustc(compiler))
+                          .env("RUSTDOC", builder.rustdoc(compiler)));
+    }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Cargo {
+    stage: u32,
+    host: Interned<String>,
+}
+
+impl Step for Cargo {
+    type Output = ();
+    const ONLY_HOSTS: bool = true;
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.path("src/tools/cargo")
+    }
+
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(Cargo {
+            stage: run.builder.top_stage,
+            host: run.target,
+        });
     }
 
-    // Don't build tests dynamically, just a pain to work with
-    cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
+    /// Runs `cargo test` for `cargo` packaged with Rust.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let compiler = builder.compiler(self.stage, self.host);
 
-    // Don't run cross-compile tests, we may not have cross-compiled libstd libs
-    // available.
-    cargo.env("CFG_DISABLE_CROSS_TESTS", "1");
+        builder.ensure(tool::Cargo { stage: self.stage, target: self.host });
+        let mut cargo = builder.cargo(compiler, Mode::Tool, self.host, "test");
+        cargo.arg("--manifest-path").arg(build.src.join("src/tools/cargo/Cargo.toml"));
+        if !build.fail_fast {
+            cargo.arg("--no-fail-fast");
+        }
+
+        // Don't build tests dynamically, just a pain to work with
+        cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
+
+        // Don't run cross-compile tests, we may not have cross-compiled libstd libs
+        // available.
+        cargo.env("CFG_DISABLE_CROSS_TESTS", "1");
+
+        try_run(build, cargo.env("PATH", &path_for_cargo(builder, compiler)));
+    }
+}
 
-    try_run(build, cargo.env("PATH", &path_for_cargo(build, compiler)));
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Rls {
+    stage: u32,
+    host: Interned<String>,
 }
 
-/// Runs `cargo test` for the rls.
-pub fn rls(build: &Build, stage: u32, host: &str) {
-    let compiler = &Compiler::new(stage, host);
+impl Step for Rls {
+    type Output = ();
+    const ONLY_HOSTS: bool = true;
 
-    let mut cargo = build.cargo(compiler, Mode::Tool, host, "test");
-    cargo.arg("--manifest-path").arg(build.src.join("src/tools/rls/Cargo.toml"));
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.path("src/tools/rls")
+    }
 
-    // Don't build tests dynamically, just a pain to work with
-    cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(Rls {
+            stage: run.builder.top_stage,
+            host: run.target,
+        });
+    }
 
-    build.add_rustc_lib_path(compiler, &mut cargo);
+    /// Runs `cargo test` for the rls.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let stage = self.stage;
+        let host = self.host;
+        let compiler = builder.compiler(stage, host);
 
-    try_run(build, &mut cargo);
+        builder.ensure(tool::Rls { stage: self.stage, target: self.host });
+        let mut cargo = builder.cargo(compiler, Mode::Tool, host, "test");
+        cargo.arg("--manifest-path").arg(build.src.join("src/tools/rls/Cargo.toml"));
+
+        // Don't build tests dynamically, just a pain to work with
+        cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
+
+        builder.add_rustc_lib_path(compiler, &mut cargo);
+
+        try_run(build, &mut cargo);
+    }
 }
 
-fn path_for_cargo(build: &Build, compiler: &Compiler) -> OsString {
+fn path_for_cargo(builder: &Builder, compiler: Compiler) -> OsString {
     // Configure PATH to find the right rustc. NB. we have to use PATH
     // and not RUSTC because the Cargo test suite has tests that will
     // fail if rustc is not spelled `rustc`.
-    let path = build.sysroot(compiler).join("bin");
+    let path = builder.sysroot(compiler).join("bin");
     let old_path = env::var_os("PATH").unwrap_or_default();
     env::join_paths(iter::once(path).chain(env::split_paths(&old_path))).expect("")
 }
 
-/// Runs the `tidy` tool as compiled in `stage` by the `host` compiler.
-///
-/// This tool in `src/tools` checks up on various bits and pieces of style and
-/// otherwise just implements a few lint-like checks that are specific to the
-/// compiler itself.
-pub fn tidy(build: &Build, host: &str) {
-    let _folder = build.fold_output(|| "tidy");
-    println!("tidy check ({})", host);
-    let compiler = Compiler::new(0, host);
-    let mut cmd = build.tool_cmd(&compiler, "tidy");
-    cmd.arg(build.src.join("src"));
-    if !build.config.vendor {
-        cmd.arg("--no-vendor");
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Tidy {
+    host: Interned<String>,
+}
+
+impl Step for Tidy {
+    type Output = ();
+    const DEFAULT: bool = true;
+    const ONLY_HOSTS: bool = true;
+    const ONLY_BUILD: bool = true;
+
+    /// Runs the `tidy` tool as compiled in `stage` by the `host` compiler.
+    ///
+    /// This tool in `src/tools` checks up on various bits and pieces of style and
+    /// otherwise just implements a few lint-like checks that are specific to the
+    /// compiler itself.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let host = self.host;
+
+        let _folder = build.fold_output(|| "tidy");
+        println!("tidy check ({})", host);
+        let mut cmd = builder.tool_cmd(Tool::Tidy);
+        cmd.arg(build.src.join("src"));
+        if !build.config.vendor {
+            cmd.arg("--no-vendor");
+        }
+        if build.config.quiet_tests {
+            cmd.arg("--quiet");
+        }
+        try_run(build, &mut cmd);
     }
-    if build.config.quiet_tests {
-        cmd.arg("--quiet");
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.path("src/tools/tidy")
+    }
+
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(Tidy {
+            host: run.builder.build.build,
+        });
     }
-    try_run(build, &mut cmd);
 }
 
-fn testdir(build: &Build, host: &str) -> PathBuf {
+fn testdir(build: &Build, host: Interned<String>) -> PathBuf {
     build.out.join(host).join("test")
 }
 
-/// Executes the `compiletest` tool to run a suite of tests.
-///
-/// Compiles all tests with `compiler` for `target` with the specified
-/// compiletest `mode` and `suite` arguments. For example `mode` can be
-/// "run-pass" or `suite` can be something like `debuginfo`.
-pub fn compiletest(build: &Build,
-                   compiler: &Compiler,
-                   target: &str,
-                   mode: &str,
-                   suite: &str) {
-    let _folder = build.fold_output(|| format!("test_{}", suite));
-    println!("Check compiletest suite={} mode={} ({} -> {})",
-             suite, mode, compiler.host, target);
-    let mut cmd = Command::new(build.tool(&Compiler::new(0, compiler.host),
-                                          "compiletest"));
-    build.prepare_tool_cmd(compiler, &mut cmd);
-
-    // compiletest currently has... a lot of arguments, so let's just pass all
-    // of them!
-
-    cmd.arg("--compile-lib-path").arg(build.rustc_libdir(compiler));
-    cmd.arg("--run-lib-path").arg(build.sysroot_libdir(compiler, target));
-    cmd.arg("--rustc-path").arg(build.compiler_path(compiler));
-    cmd.arg("--rustdoc-path").arg(build.rustdoc(compiler));
-    cmd.arg("--src-base").arg(build.src.join("src/test").join(suite));
-    cmd.arg("--build-base").arg(testdir(build, compiler.host).join(suite));
-    cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target));
-    cmd.arg("--mode").arg(mode);
-    cmd.arg("--target").arg(target);
-    cmd.arg("--host").arg(compiler.host);
-    cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(&build.build));
-
-    if let Some(ref nodejs) = build.config.nodejs {
-        cmd.arg("--nodejs").arg(nodejs);
-    }
-
-    let mut flags = vec!["-Crpath".to_string()];
-    if build.config.rust_optimize_tests {
-        flags.push("-O".to_string());
-    }
-    if build.config.rust_debuginfo_tests {
-        flags.push("-g".to_string());
-    }
-
-    let mut hostflags = build.rustc_flags(&compiler.host);
-    hostflags.extend(flags.clone());
-    cmd.arg("--host-rustcflags").arg(hostflags.join(" "));
-
-    let mut targetflags = build.rustc_flags(&target);
-    targetflags.extend(flags);
-    targetflags.push(format!("-Lnative={}",
-                             build.test_helpers_out(target).display()));
-    cmd.arg("--target-rustcflags").arg(targetflags.join(" "));
-
-    cmd.arg("--docck-python").arg(build.python());
-
-    if build.build.ends_with("apple-darwin") {
-        // Force /usr/bin/python on macOS for LLDB tests because we're loading the
-        // LLDB plugin's compiled module which only works with the system python
-        // (namely not Homebrew-installed python)
-        cmd.arg("--lldb-python").arg("/usr/bin/python");
-    } else {
-        cmd.arg("--lldb-python").arg(build.python());
-    }
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+struct Test {
+    path: &'static str,
+    mode: &'static str,
+    suite: &'static str,
+}
 
-    if let Some(ref gdb) = build.config.gdb {
-        cmd.arg("--gdb").arg(gdb);
-    }
-    if let Some(ref vers) = build.lldb_version {
-        cmd.arg("--lldb-version").arg(vers);
+static DEFAULT_COMPILETESTS: &[Test] = &[
+    Test { path: "src/test/ui", mode: "ui", suite: "ui" },
+    Test { path: "src/test/run-pass", mode: "run-pass", suite: "run-pass" },
+    Test { path: "src/test/compile-fail", mode: "compile-fail", suite: "compile-fail" },
+    Test { path: "src/test/parse-fail", mode: "parse-fail", suite: "parse-fail" },
+    Test { path: "src/test/run-fail", mode: "run-fail", suite: "run-fail" },
+    Test {
+        path: "src/test/run-pass-valgrind",
+        mode: "run-pass-valgrind",
+        suite: "run-pass-valgrind"
+    },
+    Test { path: "src/test/mir-opt", mode: "mir-opt", suite: "mir-opt" },
+    Test { path: "src/test/codegen", mode: "codegen", suite: "codegen" },
+    Test { path: "src/test/codegen-units", mode: "codegen-units", suite: "codegen-units" },
+    Test { path: "src/test/incremental", mode: "incremental", suite: "incremental" },
+
+    // What this runs varies depending on the native platform being apple
+    Test { path: "src/test/debuginfo", mode: "debuginfo-XXX", suite: "debuginfo" },
+];
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct DefaultCompiletest {
+    compiler: Compiler,
+    target: Interned<String>,
+    mode: &'static str,
+    suite: &'static str,
+}
+
+impl Step for DefaultCompiletest {
+    type Output = ();
+    const DEFAULT: bool = true;
+
+    fn should_run(mut run: ShouldRun) -> ShouldRun {
+        for test in DEFAULT_COMPILETESTS {
+            run = run.path(test.path);
+        }
+        run
     }
-    if let Some(ref dir) = build.lldb_python_dir {
-        cmd.arg("--lldb-python-dir").arg(dir);
+
+    fn make_run(run: RunConfig) {
+        let compiler = run.builder.compiler(run.builder.top_stage, run.host);
+
+        let test = run.path.map(|path| {
+            DEFAULT_COMPILETESTS.iter().find(|&&test| {
+                path.ends_with(test.path)
+            }).unwrap_or_else(|| {
+                panic!("make_run in compile test to receive test path, received {:?}", path);
+            })
+        });
+
+        if let Some(test) = test {
+            run.builder.ensure(DefaultCompiletest {
+                compiler,
+                target: run.target,
+                mode: test.mode,
+                suite: test.suite,
+            });
+        } else {
+            for test in DEFAULT_COMPILETESTS {
+                run.builder.ensure(DefaultCompiletest {
+                    compiler,
+                    target: run.target,
+                    mode: test.mode,
+                    suite: test.suite
+                });
+            }
+        }
     }
-    let llvm_config = build.llvm_config(target);
-    let llvm_version = output(Command::new(&llvm_config).arg("--version"));
-    cmd.arg("--llvm-version").arg(llvm_version);
-    if !build.is_rust_llvm(target) {
-        cmd.arg("--system-llvm");
+
+    fn run(self, builder: &Builder) {
+        builder.ensure(Compiletest {
+            compiler: self.compiler,
+            target: self.target,
+            mode: self.mode,
+            suite: self.suite,
+        })
     }
+}
 
-    cmd.args(&build.flags.cmd.test_args());
+// Also default, but host-only.
+static HOST_COMPILETESTS: &[Test] = &[
+    Test { path: "src/test/ui-fulldeps", mode: "ui", suite: "ui-fulldeps" },
+    Test { path: "src/test/run-pass-fulldeps", mode: "run-pass", suite: "run-pass-fulldeps" },
+    Test { path: "src/test/run-fail-fulldeps", mode: "run-fail", suite: "run-fail-fulldeps" },
+    Test {
+        path: "src/test/compile-fail-fulldeps",
+        mode: "compile-fail",
+        suite: "compile-fail-fulldeps",
+    },
+    Test { path: "src/test/run-make", mode: "run-make", suite: "run-make" },
+    Test { path: "src/test/rustdoc", mode: "rustdoc", suite: "rustdoc" },
+
+    Test { path: "src/test/pretty", mode: "pretty", suite: "pretty" },
+    Test { path: "src/test/run-pass/pretty", mode: "pretty", suite: "run-pass" },
+    Test { path: "src/test/run-fail/pretty", mode: "pretty", suite: "run-fail" },
+    Test { path: "src/test/run-pass-valgrind/pretty", mode: "pretty", suite: "run-pass-valgrind" },
+    Test { path: "src/test/run-pass-fulldeps/pretty", mode: "pretty", suite: "run-pass-fulldeps" },
+    Test { path: "src/test/run-fail-fulldeps/pretty", mode: "pretty", suite: "run-fail-fulldeps" },
+];
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct HostCompiletest {
+    compiler: Compiler,
+    target: Interned<String>,
+    mode: &'static str,
+    suite: &'static str,
+}
 
-    if build.is_verbose() {
-        cmd.arg("--verbose");
+impl Step for HostCompiletest {
+    type Output = ();
+    const DEFAULT: bool = true;
+    const ONLY_HOSTS: bool = true;
+
+    fn should_run(mut run: ShouldRun) -> ShouldRun {
+        for test in HOST_COMPILETESTS {
+            run = run.path(test.path);
+        }
+        run
     }
 
-    if build.config.quiet_tests {
-        cmd.arg("--quiet");
-    }
-
-    // Only pass correct values for these flags for the `run-make` suite as it
-    // requires that a C++ compiler was configured which isn't always the case.
-    if suite == "run-make" {
-        let llvm_components = output(Command::new(&llvm_config).arg("--components"));
-        let llvm_cxxflags = output(Command::new(&llvm_config).arg("--cxxflags"));
-        cmd.arg("--cc").arg(build.cc(target))
-           .arg("--cxx").arg(build.cxx(target).unwrap())
-           .arg("--cflags").arg(build.cflags(target).join(" "))
-           .arg("--llvm-components").arg(llvm_components.trim())
-           .arg("--llvm-cxxflags").arg(llvm_cxxflags.trim());
-    } else {
-        cmd.arg("--cc").arg("")
-           .arg("--cxx").arg("")
-           .arg("--cflags").arg("")
-           .arg("--llvm-components").arg("")
-           .arg("--llvm-cxxflags").arg("");
-    }
-
-    if build.remote_tested(target) {
-        cmd.arg("--remote-test-client")
-           .arg(build.tool(&Compiler::new(0, &build.build),
-                           "remote-test-client"));
-    }
-
-    // Running a C compiler on MSVC requires a few env vars to be set, to be
-    // sure to set them here.
-    //
-    // Note that if we encounter `PATH` we make sure to append to our own `PATH`
-    // rather than stomp over it.
-    if target.contains("msvc") {
-        for &(ref k, ref v) in build.cc[target].0.env() {
-            if k != "PATH" {
-                cmd.env(k, v);
+    fn make_run(run: RunConfig) {
+        let compiler = run.builder.compiler(run.builder.top_stage, run.host);
+
+        let test = run.path.map(|path| {
+            HOST_COMPILETESTS.iter().find(|&&test| {
+                path.ends_with(test.path)
+            }).unwrap_or_else(|| {
+                panic!("make_run in compile test to receive test path, received {:?}", path);
+            })
+        });
+
+        if let Some(test) = test {
+            run.builder.ensure(HostCompiletest {
+                compiler,
+                target: run.target,
+                mode: test.mode,
+                suite: test.suite,
+            });
+        } else {
+            for test in HOST_COMPILETESTS {
+                run.builder.ensure(HostCompiletest {
+                    compiler,
+                    target: run.target,
+                    mode: test.mode,
+                    suite: test.suite
+                });
             }
         }
     }
-    cmd.env("RUSTC_BOOTSTRAP", "1");
-    build.add_rust_test_threads(&mut cmd);
 
-    if build.config.sanitizers {
-        cmd.env("SANITIZER_SUPPORT", "1");
+    fn run(self, builder: &Builder) {
+        builder.ensure(Compiletest {
+            compiler: self.compiler,
+            target: self.target,
+            mode: self.mode,
+            suite: self.suite,
+        })
     }
+}
 
-    if build.config.profiler {
-        cmd.env("PROFILER_SUPPORT", "1");
-    }
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+struct Compiletest {
+    compiler: Compiler,
+    target: Interned<String>,
+    mode: &'static str,
+    suite: &'static str,
+}
 
-    cmd.arg("--adb-path").arg("adb");
-    cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR);
-    if target.contains("android") {
-        // Assume that cc for this target comes from the android sysroot
-        cmd.arg("--android-cross-path")
-           .arg(build.cc(target).parent().unwrap().parent().unwrap());
-    } else {
-        cmd.arg("--android-cross-path").arg("");
+impl Step for Compiletest {
+    type Output = ();
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.never()
     }
 
-    build.ci_env.force_coloring_in_ci(&mut cmd);
+    /// Executes the `compiletest` tool to run a suite of tests.
+    ///
+    /// Compiles all tests with `compiler` for `target` with the specified
+    /// compiletest `mode` and `suite` arguments. For example `mode` can be
+    /// "run-pass" or `suite` can be something like `debuginfo`.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let compiler = self.compiler;
+        let target = self.target;
+        let mode = self.mode;
+        let suite = self.suite;
+
+        // Skip codegen tests if they aren't enabled in configuration.
+        if !build.config.codegen_tests && suite == "codegen" {
+            return;
+        }
 
-    let _time = util::timeit();
-    try_run(build, &mut cmd);
-}
+        if suite == "debuginfo" {
+            // Skip debuginfo tests on MSVC
+            if build.build.contains("msvc") {
+                return;
+            }
 
-/// Run `rustdoc --test` for all documentation in `src/doc`.
-///
-/// This will run all tests in our markdown documentation (e.g. the book)
-/// located in `src/doc`. The `rustdoc` that's run is the one that sits next to
-/// `compiler`.
-pub fn docs(build: &Build, compiler: &Compiler) {
-    // Do a breadth-first traversal of the `src/doc` directory and just run
-    // tests for all files that end in `*.md`
-    let mut stack = vec![build.src.join("src/doc")];
-    let _time = util::timeit();
-    let _folder = build.fold_output(|| "test_docs");
-
-    while let Some(p) = stack.pop() {
-        if p.is_dir() {
-            stack.extend(t!(p.read_dir()).map(|p| t!(p).path()));
-            continue
+            if mode == "debuginfo-XXX" {
+                return if build.build.contains("apple") {
+                    builder.ensure(Compiletest {
+                        mode: "debuginfo-lldb",
+                        ..self
+                    });
+                } else {
+                    builder.ensure(Compiletest {
+                        mode: "debuginfo-gdb",
+                        ..self
+                    });
+                };
+            }
+
+            builder.ensure(dist::DebuggerScripts {
+                sysroot: builder.sysroot(compiler),
+                target: target
+            });
+        }
+
+        if suite.ends_with("fulldeps") ||
+            // FIXME: Does pretty need librustc compiled? Note that there are
+            // fulldeps test suites with mode = pretty as well.
+            mode == "pretty" ||
+            mode == "rustdoc" ||
+            mode == "run-make" {
+            builder.ensure(compile::Rustc { compiler, target });
+        }
+
+        builder.ensure(compile::Test { compiler, target });
+        builder.ensure(native::TestHelpers { target });
+        builder.ensure(RemoteCopyLibs { compiler, target });
+
+        let _folder = build.fold_output(|| format!("test_{}", suite));
+        println!("Check compiletest suite={} mode={} ({} -> {})",
+                 suite, mode, &compiler.host, target);
+        let mut cmd = builder.tool_cmd(Tool::Compiletest);
+
+        // compiletest currently has... a lot of arguments, so let's just pass all
+        // of them!
+
+        cmd.arg("--compile-lib-path").arg(builder.rustc_libdir(compiler));
+        cmd.arg("--run-lib-path").arg(builder.sysroot_libdir(compiler, target));
+        cmd.arg("--rustc-path").arg(builder.rustc(compiler));
+        cmd.arg("--rustdoc-path").arg(builder.rustdoc(compiler));
+        cmd.arg("--src-base").arg(build.src.join("src/test").join(suite));
+        cmd.arg("--build-base").arg(testdir(build, compiler.host).join(suite));
+        cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target));
+        cmd.arg("--mode").arg(mode);
+        cmd.arg("--target").arg(target);
+        cmd.arg("--host").arg(&*compiler.host);
+        cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(build.build));
+
+        if let Some(ref nodejs) = build.config.nodejs {
+            cmd.arg("--nodejs").arg(nodejs);
+        }
+
+        let mut flags = vec!["-Crpath".to_string()];
+        if build.config.rust_optimize_tests {
+            flags.push("-O".to_string());
+        }
+        if build.config.rust_debuginfo_tests {
+            flags.push("-g".to_string());
+        }
+
+        let mut hostflags = build.rustc_flags(compiler.host);
+        hostflags.extend(flags.clone());
+        cmd.arg("--host-rustcflags").arg(hostflags.join(" "));
+
+        let mut targetflags = build.rustc_flags(target);
+        targetflags.extend(flags);
+        targetflags.push(format!("-Lnative={}",
+                                 build.test_helpers_out(target).display()));
+        cmd.arg("--target-rustcflags").arg(targetflags.join(" "));
+
+        cmd.arg("--docck-python").arg(build.python());
+
+        if build.build.ends_with("apple-darwin") {
+            // Force /usr/bin/python on macOS for LLDB tests because we're loading the
+            // LLDB plugin's compiled module which only works with the system python
+            // (namely not Homebrew-installed python)
+            cmd.arg("--lldb-python").arg("/usr/bin/python");
+        } else {
+            cmd.arg("--lldb-python").arg(build.python());
+        }
+
+        if let Some(ref gdb) = build.config.gdb {
+            cmd.arg("--gdb").arg(gdb);
+        }
+        if let Some(ref vers) = build.lldb_version {
+            cmd.arg("--lldb-version").arg(vers);
+        }
+        if let Some(ref dir) = build.lldb_python_dir {
+            cmd.arg("--lldb-python-dir").arg(dir);
+        }
+        let llvm_config = build.llvm_config(target);
+        let llvm_version = output(Command::new(&llvm_config).arg("--version"));
+        cmd.arg("--llvm-version").arg(llvm_version);
+        if !build.is_rust_llvm(target) {
+            cmd.arg("--system-llvm");
+        }
+
+        cmd.args(&build.flags.cmd.test_args());
+
+        if build.is_verbose() {
+            cmd.arg("--verbose");
+        }
+
+        if build.config.quiet_tests {
+            cmd.arg("--quiet");
+        }
+
+        // Only pass correct values for these flags for the `run-make` suite as it
+        // requires that a C++ compiler was configured which isn't always the case.
+        if suite == "run-make" {
+            let llvm_components = output(Command::new(&llvm_config).arg("--components"));
+            let llvm_cxxflags = output(Command::new(&llvm_config).arg("--cxxflags"));
+            cmd.arg("--cc").arg(build.cc(target))
+               .arg("--cxx").arg(build.cxx(target).unwrap())
+               .arg("--cflags").arg(build.cflags(target).join(" "))
+               .arg("--llvm-components").arg(llvm_components.trim())
+               .arg("--llvm-cxxflags").arg(llvm_cxxflags.trim());
+        } else {
+            cmd.arg("--cc").arg("")
+               .arg("--cxx").arg("")
+               .arg("--cflags").arg("")
+               .arg("--llvm-components").arg("")
+               .arg("--llvm-cxxflags").arg("");
+        }
+
+        if build.remote_tested(target) {
+            cmd.arg("--remote-test-client").arg(builder.tool_exe(Tool::RemoteTestClient));
+        }
+
+        // Running a C compiler on MSVC requires a few env vars to be set, to be
+        // sure to set them here.
+        //
+        // Note that if we encounter `PATH` we make sure to append to our own `PATH`
+        // rather than stomp over it.
+        if target.contains("msvc") {
+            for &(ref k, ref v) in build.cc[&target].0.env() {
+                if k != "PATH" {
+                    cmd.env(k, v);
+                }
+            }
+        }
+        cmd.env("RUSTC_BOOTSTRAP", "1");
+        build.add_rust_test_threads(&mut cmd);
+
+        if build.config.sanitizers {
+            cmd.env("SANITIZER_SUPPORT", "1");
         }
 
-        if p.extension().and_then(|s| s.to_str()) != Some("md") {
-            continue;
+        if build.config.profiler {
+            cmd.env("PROFILER_SUPPORT", "1");
         }
 
-        // The nostarch directory in the book is for no starch, and so isn't
-        // guaranteed to build. We don't care if it doesn't build, so skip it.
-        if p.to_str().map_or(false, |p| p.contains("nostarch")) {
-            continue;
+        cmd.arg("--adb-path").arg("adb");
+        cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR);
+        if target.contains("android") {
+            // Assume that cc for this target comes from the android sysroot
+            cmd.arg("--android-cross-path")
+               .arg(build.cc(target).parent().unwrap().parent().unwrap());
+        } else {
+            cmd.arg("--android-cross-path").arg("");
         }
 
-        markdown_test(build, compiler, &p);
+        build.ci_env.force_coloring_in_ci(&mut cmd);
+
+        let _time = util::timeit();
+        try_run(build, &mut cmd);
     }
 }
 
-/// Run the error index generator tool to execute the tests located in the error
-/// index.
-///
-/// The `error_index_generator` tool lives in `src/tools` and is used to
-/// generate a markdown file from the error indexes of the code base which is
-/// then passed to `rustdoc --test`.
-pub fn error_index(build: &Build, compiler: &Compiler) {
-    let _folder = build.fold_output(|| "test_error_index");
-    println!("Testing error-index stage{}", compiler.stage);
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Docs {
+    compiler: Compiler,
+}
+
+impl Step for Docs {
+    type Output = ();
+    const DEFAULT: bool = true;
+    const ONLY_HOSTS: bool = true;
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.path("src/doc")
+    }
+
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(Docs {
+            compiler: run.builder.compiler(run.builder.top_stage, run.host),
+        });
+    }
+
+    /// Run `rustdoc --test` for all documentation in `src/doc`.
+    ///
+    /// This will run all tests in our markdown documentation (e.g. the book)
+    /// located in `src/doc`. The `rustdoc` that's run is the one that sits next to
+    /// `compiler`.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let compiler = self.compiler;
+
+        builder.ensure(compile::Test { compiler, target: compiler.host });
+
+        // Do a breadth-first traversal of the `src/doc` directory and just run
+        // tests for all files that end in `*.md`
+        let mut stack = vec![build.src.join("src/doc")];
+        let _time = util::timeit();
+        let _folder = build.fold_output(|| "test_docs");
+
+        while let Some(p) = stack.pop() {
+            if p.is_dir() {
+                stack.extend(t!(p.read_dir()).map(|p| t!(p).path()));
+                continue
+            }
+
+            if p.extension().and_then(|s| s.to_str()) != Some("md") {
+                continue;
+            }
+
+            // The nostarch directory in the book is for no starch, and so isn't
+            // guaranteed to build. We don't care if it doesn't build, so skip it.
+            if p.to_str().map_or(false, |p| p.contains("nostarch")) {
+                continue;
+            }
+
+            markdown_test(builder, compiler, &p);
+        }
+    }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct ErrorIndex {
+    compiler: Compiler,
+}
+
+impl Step for ErrorIndex {
+    type Output = ();
+    const DEFAULT: bool = true;
+    const ONLY_HOSTS: bool = true;
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.path("src/tools/error_index_generator")
+    }
 
-    let dir = testdir(build, compiler.host);
-    t!(fs::create_dir_all(&dir));
-    let output = dir.join("error-index.md");
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(ErrorIndex {
+            compiler: run.builder.compiler(run.builder.top_stage, run.host),
+        });
+    }
+
+    /// Run the error index generator tool to execute the tests located in the error
+    /// index.
+    ///
+    /// The `error_index_generator` tool lives in `src/tools` and is used to
+    /// generate a markdown file from the error indexes of the code base which is
+    /// then passed to `rustdoc --test`.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let compiler = self.compiler;
+
+        builder.ensure(compile::Std { compiler, target: compiler.host });
+
+        let _folder = build.fold_output(|| "test_error_index");
+        println!("Testing error-index stage{}", compiler.stage);
 
-    let _time = util::timeit();
-    build.run(build.tool_cmd(&Compiler::new(0, compiler.host),
-                             "error_index_generator")
-                   .arg("markdown")
-                   .arg(&output)
-                   .env("CFG_BUILD", &build.build));
+        let dir = testdir(build, compiler.host);
+        t!(fs::create_dir_all(&dir));
+        let output = dir.join("error-index.md");
 
-    markdown_test(build, compiler, &output);
+        let _time = util::timeit();
+        build.run(builder.tool_cmd(Tool::ErrorIndex)
+                    .arg("markdown")
+                    .arg(&output)
+                    .env("CFG_BUILD", &build.build));
+
+        markdown_test(builder, compiler, &output);
+    }
 }
 
-fn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) {
+fn markdown_test(builder: &Builder, compiler: Compiler, markdown: &Path) {
+    let build = builder.build;
     let mut file = t!(File::open(markdown));
     let mut contents = String::new();
     t!(file.read_to_string(&mut contents));
@@ -403,8 +807,8 @@ fn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) {
     }
 
     println!("doc tests for: {}", markdown.display());
-    let mut cmd = Command::new(build.rustdoc(compiler));
-    build.add_rustc_lib_path(compiler, &mut cmd);
+    let mut cmd = Command::new(builder.rustdoc(compiler));
+    builder.add_rustc_lib_path(compiler, &mut cmd);
     build.add_rust_test_threads(&mut cmd);
     cmd.arg("--test");
     cmd.arg(markdown);
@@ -420,126 +824,251 @@ fn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) {
     }
 }
 
-/// Run all unit tests plus documentation tests for an entire crate DAG defined
-/// by a `Cargo.toml`
-///
-/// This is what runs tests for crates like the standard library, compiler, etc.
-/// It essentially is the driver for running `cargo test`.
-///
-/// Currently this runs all tests for a DAG by passing a bunch of `-p foo`
-/// arguments, and those arguments are discovered from `cargo metadata`.
-pub fn krate(build: &Build,
-             compiler: &Compiler,
-             target: &str,
-             mode: Mode,
-             test_kind: TestKind,
-             krate: Option<&str>) {
-    let (name, path, features, root) = match mode {
-        Mode::Libstd => {
-            ("libstd", "src/libstd", build.std_features(), "std")
-        }
-        Mode::Libtest => {
-            ("libtest", "src/libtest", String::new(), "test")
-        }
-        Mode::Librustc => {
-            ("librustc", "src/rustc", build.rustc_features(), "rustc-main")
-        }
-        _ => panic!("can only test libraries"),
-    };
-    let _folder = build.fold_output(|| {
-        format!("{}_stage{}-{}", test_kind.subcommand(), compiler.stage, name)
-    });
-    println!("{} {} stage{} ({} -> {})", test_kind, name, compiler.stage,
-             compiler.host, target);
-
-    // If we're not doing a full bootstrap but we're testing a stage2 version of
-    // libstd, then what we're actually testing is the libstd produced in
-    // stage1. Reflect that here by updating the compiler that we're working
-    // with automatically.
-    let compiler = if build.force_use_stage1(compiler, target) {
-        Compiler::new(1, compiler.host)
-    } else {
-        compiler.clone()
-    };
-
-    // Build up the base `cargo test` command.
-    //
-    // Pass in some standard flags then iterate over the graph we've discovered
-    // in `cargo metadata` with the maps above and figure out what `-p`
-    // arguments need to get passed.
-    let mut cargo = build.cargo(&compiler, mode, target, test_kind.subcommand());
-    cargo.arg("--manifest-path")
-         .arg(build.src.join(path).join("Cargo.toml"))
-         .arg("--features").arg(features);
-    if test_kind.subcommand() == "test" && !build.fail_fast {
-        cargo.arg("--no-fail-fast");
-    }
-
-    match krate {
-        Some(krate) => {
-            cargo.arg("-p").arg(krate);
-        }
-        None => {
-            let mut visited = HashSet::new();
-            let mut next = vec![root];
-            while let Some(name) = next.pop() {
-                // Right now jemalloc is our only target-specific crate in the
-                // sense that it's not present on all platforms. Custom skip it
-                // here for now, but if we add more this probably wants to get
-                // more generalized.
-                //
-                // Also skip `build_helper` as it's not compiled normally for
-                // target during the bootstrap and it's just meant to be a
-                // helper crate, not tested. If it leaks through then it ends up
-                // messing with various mtime calculations and such.
-                if !name.contains("jemalloc") && name != "build_helper" {
-                    cargo.arg("-p").arg(&format!("{}:0.0.0", name));
-                }
-                for dep in build.crates[name].deps.iter() {
-                    if visited.insert(dep) {
-                        next.push(dep);
-                    }
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct CrateLibrustc {
+    compiler: Compiler,
+    target: Interned<String>,
+    test_kind: TestKind,
+    krate: Option<Interned<String>>,
+}
+
+impl Step for CrateLibrustc {
+    type Output = ();
+    const DEFAULT: bool = true;
+    const ONLY_HOSTS: bool = true;
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.krate("rustc-main")
+    }
+
+    fn make_run(run: RunConfig) {
+        let builder = run.builder;
+        let compiler = builder.compiler(builder.top_stage, run.host);
+
+        let make = |name: Option<Interned<String>>| {
+            let test_kind = if builder.kind == Kind::Test {
+                TestKind::Test
+            } else if builder.kind == Kind::Bench {
+                TestKind::Bench
+            } else {
+                panic!("unexpected builder.kind in crate: {:?}", builder.kind);
+            };
+
+            builder.ensure(CrateLibrustc {
+                compiler,
+                target: run.target,
+                test_kind: test_kind,
+                krate: name,
+            });
+        };
+
+        if let Some(path) = run.path {
+            for (name, krate_path) in builder.crates("rustc-main") {
+                if path.ends_with(krate_path) {
+                    make(Some(name));
                 }
             }
+        } else {
+            make(None);
         }
     }
 
-    // The tests are going to run with the *target* libraries, so we need to
-    // ensure that those libraries show up in the LD_LIBRARY_PATH equivalent.
-    //
-    // Note that to run the compiler we need to run with the *host* libraries,
-    // but our wrapper scripts arrange for that to be the case anyway.
-    let mut dylib_path = dylib_path();
-    dylib_path.insert(0, build.sysroot_libdir(&compiler, target));
-    cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
 
-    if target.contains("emscripten") || build.remote_tested(target) {
-        cargo.arg("--no-run");
+    fn run(self, builder: &Builder) {
+        builder.ensure(Crate {
+            compiler: self.compiler,
+            target: self.target,
+            mode: Mode::Librustc,
+            test_kind: self.test_kind,
+            krate: self.krate,
+        });
     }
+}
 
-    cargo.arg("--");
 
-    if build.config.quiet_tests {
-        cargo.arg("--quiet");
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Crate {
+    compiler: Compiler,
+    target: Interned<String>,
+    mode: Mode,
+    test_kind: TestKind,
+    krate: Option<Interned<String>>,
+}
+
+impl Step for Crate {
+    type Output = ();
+    const DEFAULT: bool = true;
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.krate("std").krate("test")
     }
 
-    let _time = util::timeit();
+    fn make_run(run: RunConfig) {
+        let builder = run.builder;
+        let compiler = builder.compiler(builder.top_stage, run.host);
+
+        let make = |mode: Mode, name: Option<Interned<String>>| {
+            let test_kind = if builder.kind == Kind::Test {
+                TestKind::Test
+            } else if builder.kind == Kind::Bench {
+                TestKind::Bench
+            } else {
+                panic!("unexpected builder.kind in crate: {:?}", builder.kind);
+            };
+
+            builder.ensure(Crate {
+                compiler,
+                target: run.target,
+                mode: mode,
+                test_kind: test_kind,
+                krate: name,
+            });
+        };
+
+        if let Some(path) = run.path {
+            for (name, krate_path) in builder.crates("std") {
+                if path.ends_with(krate_path) {
+                    make(Mode::Libstd, Some(name));
+                }
+            }
+            for (name, krate_path) in builder.crates("test") {
+                if path.ends_with(krate_path) {
+                    make(Mode::Libtest, Some(name));
+                }
+            }
+        } else {
+            make(Mode::Libstd, None);
+            make(Mode::Libtest, None);
+        }
+    }
 
-    if target.contains("emscripten") {
-        build.run(&mut cargo);
-        krate_emscripten(build, &compiler, target, mode);
-    } else if build.remote_tested(target) {
-        build.run(&mut cargo);
-        krate_remote(build, &compiler, target, mode);
-    } else {
-        cargo.args(&build.flags.cmd.test_args());
-        try_run(build, &mut cargo);
+    /// Run all unit tests plus documentation tests for an entire crate DAG defined
+    /// by a `Cargo.toml`
+    ///
+    /// This is what runs tests for crates like the standard library, compiler, etc.
+    /// It essentially is the driver for running `cargo test`.
+    ///
+    /// Currently this runs all tests for a DAG by passing a bunch of `-p foo`
+    /// arguments, and those arguments are discovered from `cargo metadata`.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let compiler = self.compiler;
+        let target = self.target;
+        let mode = self.mode;
+        let test_kind = self.test_kind;
+        let krate = self.krate;
+
+        builder.ensure(compile::Test { compiler, target });
+        builder.ensure(RemoteCopyLibs { compiler, target });
+        let (name, path, features, root) = match mode {
+            Mode::Libstd => {
+                ("libstd", "src/libstd", build.std_features(), "std")
+            }
+            Mode::Libtest => {
+                ("libtest", "src/libtest", String::new(), "test")
+            }
+            Mode::Librustc => {
+                builder.ensure(compile::Rustc { compiler, target });
+                ("librustc", "src/rustc", build.rustc_features(), "rustc-main")
+            }
+            _ => panic!("can only test libraries"),
+        };
+        let root = INTERNER.intern_string(String::from(root));
+        let _folder = build.fold_output(|| {
+            format!("{}_stage{}-{}", test_kind.subcommand(), compiler.stage, name)
+        });
+        println!("{} {} stage{} ({} -> {})", test_kind, name, compiler.stage,
+                &compiler.host, target);
+
+        // If we're not doing a full bootstrap but we're testing a stage2 version of
+        // libstd, then what we're actually testing is the libstd produced in
+        // stage1. Reflect that here by updating the compiler that we're working
+        // with automatically.
+        let compiler = if build.force_use_stage1(compiler, target) {
+            builder.compiler(1, compiler.host)
+        } else {
+            compiler.clone()
+        };
+
+        // Build up the base `cargo test` command.
+        //
+        // Pass in some standard flags then iterate over the graph we've discovered
+        // in `cargo metadata` with the maps above and figure out what `-p`
+        // arguments need to get passed.
+        let mut cargo = builder.cargo(compiler, mode, target, test_kind.subcommand());
+        cargo.arg("--manifest-path")
+            .arg(build.src.join(path).join("Cargo.toml"))
+            .arg("--features").arg(features);
+        if test_kind.subcommand() == "test" && !build.fail_fast {
+            cargo.arg("--no-fail-fast");
+        }
+
+        match krate {
+            Some(krate) => {
+                cargo.arg("-p").arg(krate);
+            }
+            None => {
+                let mut visited = HashSet::new();
+                let mut next = vec![root];
+                while let Some(name) = next.pop() {
+                    // Right now jemalloc is our only target-specific crate in the
+                    // sense that it's not present on all platforms. Custom skip it
+                    // here for now, but if we add more this probably wants to get
+                    // more generalized.
+                    //
+                    // Also skip `build_helper` as it's not compiled normally for
+                    // target during the bootstrap and it's just meant to be a
+                    // helper crate, not tested. If it leaks through then it ends up
+                    // messing with various mtime calculations and such.
+                    if !name.contains("jemalloc") && *name != *"build_helper" {
+                        cargo.arg("-p").arg(&format!("{}:0.0.0", name));
+                    }
+                    for dep in build.crates[&name].deps.iter() {
+                        if visited.insert(dep) {
+                            next.push(*dep);
+                        }
+                    }
+                }
+            }
+        }
+
+        // The tests are going to run with the *target* libraries, so we need to
+        // ensure that those libraries show up in the LD_LIBRARY_PATH equivalent.
+        //
+        // Note that to run the compiler we need to run with the *host* libraries,
+        // but our wrapper scripts arrange for that to be the case anyway.
+        let mut dylib_path = dylib_path();
+        dylib_path.insert(0, PathBuf::from(&*builder.sysroot_libdir(compiler, target)));
+        cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
+
+        if target.contains("emscripten") || build.remote_tested(target) {
+            cargo.arg("--no-run");
+        }
+
+        cargo.arg("--");
+
+        if build.config.quiet_tests {
+            cargo.arg("--quiet");
+        }
+
+        let _time = util::timeit();
+
+        if target.contains("emscripten") {
+            build.run(&mut cargo);
+            krate_emscripten(build, compiler, target, mode);
+        } else if build.remote_tested(target) {
+            build.run(&mut cargo);
+            krate_remote(builder, compiler, target, mode);
+        } else {
+            cargo.args(&build.flags.cmd.test_args());
+            try_run(build, &mut cargo);
+        }
     }
 }
 
 fn krate_emscripten(build: &Build,
-                    compiler: &Compiler,
-                    target: &str,
+                    compiler: Compiler,
+                    target: Interned<String>,
                     mode: Mode) {
     let out_dir = build.cargo_out(compiler, mode, target);
     let tests = find_tests(&out_dir.join("deps"), target);
@@ -556,15 +1085,15 @@ fn krate_emscripten(build: &Build,
     }
 }
 
-fn krate_remote(build: &Build,
-                compiler: &Compiler,
-                target: &str,
+fn krate_remote(builder: &Builder,
+                compiler: Compiler,
+                target: Interned<String>,
                 mode: Mode) {
+    let build = builder.build;
     let out_dir = build.cargo_out(compiler, mode, target);
     let tests = find_tests(&out_dir.join("deps"), target);
 
-    let tool = build.tool(&Compiler::new(0, &build.build),
-                          "remote-test-client");
+    let tool = builder.tool_exe(Tool::RemoteTestClient);
     for test in tests {
         let mut cmd = Command::new(&tool);
         cmd.arg("run")
@@ -577,7 +1106,7 @@ fn krate_remote(build: &Build,
     }
 }
 
-fn find_tests(dir: &Path, target: &str) -> Vec<PathBuf> {
+fn find_tests(dir: &Path, target: Interned<String>) -> Vec<PathBuf> {
     let mut dst = Vec::new();
     for e in t!(dir.read_dir()).map(|e| t!(e)) {
         let file_type = t!(e.file_type());
@@ -596,105 +1125,171 @@ fn find_tests(dir: &Path, target: &str) -> Vec<PathBuf> {
     dst
 }
 
-pub fn remote_copy_libs(build: &Build, compiler: &Compiler, target: &str) {
-    if !build.remote_tested(target) {
-        return
-    }
-
-    println!("REMOTE copy libs to emulator ({})", target);
-    t!(fs::create_dir_all(build.out.join("tmp")));
-
-    let server = build.cargo_out(compiler, Mode::Tool, target)
-                      .join(exe("remote-test-server", target));
-
-    // Spawn the emulator and wait for it to come online
-    let tool = build.tool(&Compiler::new(0, &build.build),
-                          "remote-test-client");
-    let mut cmd = Command::new(&tool);
-    cmd.arg("spawn-emulator")
-       .arg(target)
-       .arg(&server)
-       .arg(build.out.join("tmp"));
-    if let Some(rootfs) = build.qemu_rootfs(target) {
-        cmd.arg(rootfs);
-    }
-    build.run(&mut cmd);
-
-    // Push all our dylibs to the emulator
-    for f in t!(build.sysroot_libdir(compiler, target).read_dir()) {
-        let f = t!(f);
-        let name = f.file_name().into_string().unwrap();
-        if util::is_dylib(&name) {
-            build.run(Command::new(&tool)
-                              .arg("push")
-                              .arg(f.path()));
-        }
-    }
-}
-
-/// Run "distcheck", a 'make check' from a tarball
-pub fn distcheck(build: &Build) {
-    if build.build != "x86_64-unknown-linux-gnu" {
-        return
-    }
-    if !build.config.host.iter().any(|s| s == "x86_64-unknown-linux-gnu") {
-        return
-    }
-    if !build.config.target.iter().any(|s| s == "x86_64-unknown-linux-gnu") {
-        return
-    }
-
-    println!("Distcheck");
-    let dir = build.out.join("tmp").join("distcheck");
-    let _ = fs::remove_dir_all(&dir);
-    t!(fs::create_dir_all(&dir));
-
-    let mut cmd = Command::new("tar");
-    cmd.arg("-xzf")
-       .arg(dist::rust_src_location(build))
-       .arg("--strip-components=1")
-       .current_dir(&dir);
-    build.run(&mut cmd);
-    build.run(Command::new("./configure")
-                     .args(&build.config.configure_args)
-                     .arg("--enable-vendor")
-                     .current_dir(&dir));
-    build.run(Command::new(build_helper::make(&build.build))
-                     .arg("check")
-                     .current_dir(&dir));
-
-    // Now make sure that rust-src has all of libstd's dependencies
-    println!("Distcheck rust-src");
-    let dir = build.out.join("tmp").join("distcheck-src");
-    let _ = fs::remove_dir_all(&dir);
-    t!(fs::create_dir_all(&dir));
-
-    let mut cmd = Command::new("tar");
-    cmd.arg("-xzf")
-       .arg(dist::rust_src_installer(build))
-       .arg("--strip-components=1")
-       .current_dir(&dir);
-    build.run(&mut cmd);
-
-    let toml = dir.join("rust-src/lib/rustlib/src/rust/src/libstd/Cargo.toml");
-    build.run(Command::new(&build.initial_cargo)
-                     .arg("generate-lockfile")
-                     .arg("--manifest-path")
-                     .arg(&toml)
-                     .current_dir(&dir));
-}
-
-/// Test the build system itself
-pub fn bootstrap(build: &Build) {
-    let mut cmd = Command::new(&build.initial_cargo);
-    cmd.arg("test")
-       .current_dir(build.src.join("src/bootstrap"))
-       .env("CARGO_TARGET_DIR", build.out.join("bootstrap"))
-       .env("RUSTC_BOOTSTRAP", "1")
-       .env("RUSTC", &build.initial_rustc);
-    if !build.fail_fast {
-        cmd.arg("--no-fail-fast");
+/// Some test suites are run inside emulators or on remote devices, and most
+/// of our test binaries are linked dynamically which means we need to ship
+/// the standard library and such to the emulator ahead of time. This step
+/// represents this and is a dependency of all test suites.
+///
+/// Most of the time this is a noop. For some steps such as shipping data to
+/// QEMU we have to build our own tools so we've got conditional dependencies
+/// on those programs as well. Note that the remote test client is built for
+/// the build target (us) and the server is built for the target.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct RemoteCopyLibs {
+    compiler: Compiler,
+    target: Interned<String>,
+}
+
+impl Step for RemoteCopyLibs {
+    type Output = ();
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.never()
+    }
+
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let compiler = self.compiler;
+        let target = self.target;
+        if !build.remote_tested(target) {
+            return
+        }
+
+        builder.ensure(compile::Test { compiler, target });
+
+        println!("REMOTE copy libs to emulator ({})", target);
+        t!(fs::create_dir_all(build.out.join("tmp")));
+
+        let server = builder.ensure(tool::RemoteTestServer { stage: compiler.stage, target });
+
+        // Spawn the emulator and wait for it to come online
+        let tool = builder.tool_exe(Tool::RemoteTestClient);
+        let mut cmd = Command::new(&tool);
+        cmd.arg("spawn-emulator")
+           .arg(target)
+           .arg(&server)
+           .arg(build.out.join("tmp"));
+        if let Some(rootfs) = build.qemu_rootfs(target) {
+            cmd.arg(rootfs);
+        }
+        build.run(&mut cmd);
+
+        // Push all our dylibs to the emulator
+        for f in t!(builder.sysroot_libdir(compiler, target).read_dir()) {
+            let f = t!(f);
+            let name = f.file_name().into_string().unwrap();
+            if util::is_dylib(&name) {
+                build.run(Command::new(&tool)
+                                  .arg("push")
+                                  .arg(f.path()));
+            }
+        }
+    }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Distcheck;
+
+impl Step for Distcheck {
+    type Output = ();
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.path("distcheck")
+    }
+
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(Distcheck);
+    }
+
+    /// Run "distcheck", a 'make check' from a tarball
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+
+        if *build.build != *"x86_64-unknown-linux-gnu" {
+            return
+        }
+        if !build.config.host.iter().any(|s| s == "x86_64-unknown-linux-gnu") {
+            return
+        }
+        if !build.config.target.iter().any(|s| s == "x86_64-unknown-linux-gnu") {
+            return
+        }
+
+        println!("Distcheck");
+        let dir = build.out.join("tmp").join("distcheck");
+        let _ = fs::remove_dir_all(&dir);
+        t!(fs::create_dir_all(&dir));
+
+        // Guarantee that these are built before we begin running.
+        builder.ensure(dist::PlainSourceTarball);
+        builder.ensure(dist::Src);
+
+        let mut cmd = Command::new("tar");
+        cmd.arg("-xzf")
+           .arg(builder.ensure(dist::PlainSourceTarball))
+           .arg("--strip-components=1")
+           .current_dir(&dir);
+        build.run(&mut cmd);
+        build.run(Command::new("./configure")
+                         .args(&build.config.configure_args)
+                         .arg("--enable-vendor")
+                         .current_dir(&dir));
+        build.run(Command::new(build_helper::make(&build.build))
+                         .arg("check")
+                         .current_dir(&dir));
+
+        // Now make sure that rust-src has all of libstd's dependencies
+        println!("Distcheck rust-src");
+        let dir = build.out.join("tmp").join("distcheck-src");
+        let _ = fs::remove_dir_all(&dir);
+        t!(fs::create_dir_all(&dir));
+
+        let mut cmd = Command::new("tar");
+        cmd.arg("-xzf")
+           .arg(builder.ensure(dist::Src))
+           .arg("--strip-components=1")
+           .current_dir(&dir);
+        build.run(&mut cmd);
+
+        let toml = dir.join("rust-src/lib/rustlib/src/rust/src/libstd/Cargo.toml");
+        build.run(Command::new(&build.initial_cargo)
+                         .arg("generate-lockfile")
+                         .arg("--manifest-path")
+                         .arg(&toml)
+                         .current_dir(&dir));
+    }
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Bootstrap;
+
+impl Step for Bootstrap {
+    type Output = ();
+    const DEFAULT: bool = true;
+    const ONLY_HOSTS: bool = true;
+    const ONLY_BUILD: bool = true;
+
+    /// Test the build system itself
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let mut cmd = Command::new(&build.initial_cargo);
+        cmd.arg("test")
+           .current_dir(build.src.join("src/bootstrap"))
+           .env("CARGO_TARGET_DIR", build.out.join("bootstrap"))
+           .env("RUSTC_BOOTSTRAP", "1")
+           .env("RUSTC", &build.initial_rustc);
+        if !build.fail_fast {
+            cmd.arg("--no-fail-fast");
+        }
+        cmd.arg("--").args(&build.flags.cmd.test_args());
+        try_run(build, &mut cmd);
+    }
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.path("src/bootstrap")
+    }
+
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(Bootstrap);
     }
-    cmd.arg("--").args(&build.flags.cmd.test_args());
-    try_run(build, &mut cmd);
 }
diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs
index 5a3106c7d5e..cbe2be2d26a 100644
--- a/src/bootstrap/compile.rs
+++ b/src/bootstrap/compile.rs
@@ -23,113 +23,181 @@ use std::io::prelude::*;
 use std::path::{Path, PathBuf};
 use std::process::{Command, Stdio};
 use std::str;
+use std::cmp::min;
 
 use build_helper::{output, mtime, up_to_date};
 use filetime::FileTime;
-use rustc_serialize::json;
+use serde_json;
 
-use channel::GitInfo;
 use util::{exe, libdir, is_dylib, copy};
 use {Build, Compiler, Mode};
+use native;
 
-/// Build the standard library.
-///
-/// This will build the standard library for a particular stage of the build
-/// using the `compiler` targeting the `target` architecture. The artifacts
-/// created will also be linked into the sysroot directory.
-pub fn std(build: &Build, target: &str, compiler: &Compiler) {
-    let libdir = build.sysroot_libdir(compiler, target);
-    t!(fs::create_dir_all(&libdir));
-
-    let _folder = build.fold_output(|| format!("stage{}-std", compiler.stage));
-    println!("Building stage{} std artifacts ({} -> {})", compiler.stage,
-             compiler.host, target);
-
-    let out_dir = build.cargo_out(compiler, Mode::Libstd, target);
-    build.clear_if_dirty(&out_dir, &build.compiler_path(compiler));
-    let mut cargo = build.cargo(compiler, Mode::Libstd, target, "build");
-    let mut features = build.std_features();
-
-    if let Some(target) = env::var_os("MACOSX_STD_DEPLOYMENT_TARGET") {
-        cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
-    }
-
-    // When doing a local rebuild we tell cargo that we're stage1 rather than
-    // stage0. This works fine if the local rust and being-built rust have the
-    // same view of what the default allocator is, but fails otherwise. Since
-    // we don't have a way to express an allocator preference yet, work
-    // around the issue in the case of a local rebuild with jemalloc disabled.
-    if compiler.stage == 0 && build.local_rebuild && !build.config.use_jemalloc {
-        features.push_str(" force_alloc_system");
-    }
-
-    if compiler.stage != 0 && build.config.sanitizers {
-        // This variable is used by the sanitizer runtime crates, e.g.
-        // rustc_lsan, to build the sanitizer runtime from C code
-        // When this variable is missing, those crates won't compile the C code,
-        // so we don't set this variable during stage0 where llvm-config is
-        // missing
-        // We also only build the runtimes when --enable-sanitizers (or its
-        // config.toml equivalent) is used
-        cargo.env("LLVM_CONFIG", build.llvm_config(target));
+use cache::{INTERNER, Interned};
+use builder::{Step, RunConfig, ShouldRun, Builder};
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Std {
+    pub target: Interned<String>,
+    pub compiler: Compiler,
+}
+
+impl Step for Std {
+    type Output = ();
+    const DEFAULT: bool = true;
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.path("src/libstd").krate("std")
     }
-    cargo.arg("--features").arg(features)
-         .arg("--manifest-path")
-         .arg(build.src.join("src/libstd/Cargo.toml"));
 
-    if let Some(target) = build.config.target_config.get(target) {
-        if let Some(ref jemalloc) = target.jemalloc {
-            cargo.env("JEMALLOC_OVERRIDE", jemalloc);
-        }
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(Std {
+            compiler: run.builder.compiler(run.builder.top_stage, run.host),
+            target: run.target,
+        });
     }
-    if target.contains("musl") {
-        if let Some(p) = build.musl_root(target) {
-            cargo.env("MUSL_ROOT", p);
+
+    /// Build the standard library.
+    ///
+    /// This will build the standard library for a particular stage of the build
+    /// using the `compiler` targeting the `target` architecture. The artifacts
+    /// created will also be linked into the sysroot directory.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let target = self.target;
+        let compiler = self.compiler;
+
+        builder.ensure(StartupObjects { compiler, target });
+
+        if build.force_use_stage1(compiler, target) {
+            let from = builder.compiler(1, build.build);
+            builder.ensure(Std {
+                compiler: from,
+                target: target,
+            });
+            println!("Uplifting stage1 std ({} -> {})", from.host, target);
+            builder.ensure(StdLink {
+                compiler: from,
+                target_compiler: compiler,
+                target: target,
+            });
+            return;
+        }
+
+        let _folder = build.fold_output(|| format!("stage{}-std", compiler.stage));
+        println!("Building stage{} std artifacts ({} -> {})", compiler.stage,
+                &compiler.host, target);
+
+        let out_dir = build.cargo_out(compiler, Mode::Libstd, target);
+        build.clear_if_dirty(&out_dir, &builder.rustc(compiler));
+        let mut cargo = builder.cargo(compiler, Mode::Libstd, target, "build");
+        let mut features = build.std_features();
+
+        if let Some(target) = env::var_os("MACOSX_STD_DEPLOYMENT_TARGET") {
+            cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
+        }
+
+        // When doing a local rebuild we tell cargo that we're stage1 rather than
+        // stage0. This works fine if the local rust and being-built rust have the
+        // same view of what the default allocator is, but fails otherwise. Since
+        // we don't have a way to express an allocator preference yet, work
+        // around the issue in the case of a local rebuild with jemalloc disabled.
+        if compiler.stage == 0 && build.local_rebuild && !build.config.use_jemalloc {
+            features.push_str(" force_alloc_system");
+        }
+
+        if compiler.stage != 0 && build.config.sanitizers {
+            // This variable is used by the sanitizer runtime crates, e.g.
+            // rustc_lsan, to build the sanitizer runtime from C code
+            // When this variable is missing, those crates won't compile the C code,
+            // so we don't set this variable during stage0 where llvm-config is
+            // missing
+            // We also only build the runtimes when --enable-sanitizers (or its
+            // config.toml equivalent) is used
+            cargo.env("LLVM_CONFIG", build.llvm_config(target));
+        }
+
+        cargo.arg("--features").arg(features)
+            .arg("--manifest-path")
+            .arg(build.src.join("src/libstd/Cargo.toml"));
+
+        if let Some(target) = build.config.target_config.get(&target) {
+            if let Some(ref jemalloc) = target.jemalloc {
+                cargo.env("JEMALLOC_OVERRIDE", jemalloc);
+            }
+        }
+        if target.contains("musl") {
+            if let Some(p) = build.musl_root(target) {
+                cargo.env("MUSL_ROOT", p);
+            }
         }
+
+        run_cargo(build,
+                &mut cargo,
+                &libstd_stamp(build, compiler, target));
+
+        builder.ensure(StdLink {
+            compiler: builder.compiler(compiler.stage, build.build),
+            target_compiler: compiler,
+            target: target,
+        });
     }
+}
 
-    run_cargo(build,
-              &mut cargo,
-              &libstd_stamp(build, &compiler, target));
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+struct StdLink {
+    pub compiler: Compiler,
+    pub target_compiler: Compiler,
+    pub target: Interned<String>,
 }
 
-/// Link all libstd rlibs/dylibs into the sysroot location.
-///
-/// Links those artifacts generated by `compiler` to a the `stage` compiler's
-/// sysroot for the specified `host` and `target`.
-///
-/// Note that this assumes that `compiler` has already generated the libstd
-/// libraries for `target`, and this method will find them in the relevant
-/// output directory.
-pub fn std_link(build: &Build,
-                compiler: &Compiler,
-                target_compiler: &Compiler,
-                target: &str) {
-    println!("Copying stage{} std from stage{} ({} -> {} / {})",
-             target_compiler.stage,
-             compiler.stage,
-             compiler.host,
-             target_compiler.host,
-             target);
-    let libdir = build.sysroot_libdir(target_compiler, target);
-    add_to_sysroot(&libdir, &libstd_stamp(build, compiler, target));
-
-    if target.contains("musl") && !target.contains("mips") {
-        copy_musl_third_party_objects(build, target, &libdir);
-    }
-
-    if build.config.sanitizers && compiler.stage != 0 && target == "x86_64-apple-darwin" {
-        // The sanitizers are only built in stage1 or above, so the dylibs will
-        // be missing in stage0 and causes panic. See the `std()` function above
-        // for reason why the sanitizers are not built in stage0.
-        copy_apple_sanitizer_dylibs(&build.native_dir(target), "osx", &libdir);
+impl Step for StdLink {
+    type Output = ();
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.never()
+    }
+
+    /// Link all libstd rlibs/dylibs into the sysroot location.
+    ///
+    /// Links those artifacts generated by `compiler` to a the `stage` compiler's
+    /// sysroot for the specified `host` and `target`.
+    ///
+    /// Note that this assumes that `compiler` has already generated the libstd
+    /// libraries for `target`, and this method will find them in the relevant
+    /// output directory.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let compiler = self.compiler;
+        let target_compiler = self.target_compiler;
+        let target = self.target;
+        println!("Copying stage{} std from stage{} ({} -> {} / {})",
+                target_compiler.stage,
+                compiler.stage,
+                &compiler.host,
+                target_compiler.host,
+                target);
+        let libdir = builder.sysroot_libdir(target_compiler, target);
+        add_to_sysroot(&libdir, &libstd_stamp(build, compiler, target));
+
+        if target.contains("musl") && !target.contains("mips") {
+            copy_musl_third_party_objects(build, target, &libdir);
+        }
+
+        if build.config.sanitizers && compiler.stage != 0 && target == "x86_64-apple-darwin" {
+            // The sanitizers are only built in stage1 or above, so the dylibs will
+            // be missing in stage0 and causes panic. See the `std()` function above
+            // for reason why the sanitizers are not built in stage0.
+            copy_apple_sanitizer_dylibs(&build.native_dir(target), "osx", &libdir);
+        }
     }
 }
 
 /// Copies the crt(1,i,n).o startup objects
 ///
 /// Only required for musl targets that statically link to libc
-fn copy_musl_third_party_objects(build: &Build, target: &str, into: &Path) {
+fn copy_musl_third_party_objects(build: &Build, target: Interned<String>, into: &Path) {
     for &obj in &["crt1.o", "crti.o", "crtn.o"] {
         copy(&build.musl_root(target).unwrap().join("lib").join(obj), &into.join(obj));
     }
@@ -147,192 +215,353 @@ fn copy_apple_sanitizer_dylibs(native_dir: &Path, platform: &str, into: &Path) {
     }
 }
 
-/// Build and prepare startup objects like rsbegin.o and rsend.o
-///
-/// These are primarily used on Windows right now for linking executables/dlls.
-/// They don't require any library support as they're just plain old object
-/// files, so we just use the nightly snapshot compiler to always build them (as
-/// no other compilers are guaranteed to be available).
-pub fn build_startup_objects(build: &Build, for_compiler: &Compiler, target: &str) {
-    if !target.contains("pc-windows-gnu") {
-        return
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct StartupObjects {
+    pub compiler: Compiler,
+    pub target: Interned<String>,
+}
+
+impl Step for StartupObjects {
+    type Output = ();
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.path("src/rtstartup")
     }
 
-    let compiler = Compiler::new(0, &build.build);
-    let compiler_path = build.compiler_path(&compiler);
-    let src_dir = &build.src.join("src/rtstartup");
-    let dst_dir = &build.native_dir(target).join("rtstartup");
-    let sysroot_dir = &build.sysroot_libdir(for_compiler, target);
-    t!(fs::create_dir_all(dst_dir));
-    t!(fs::create_dir_all(sysroot_dir));
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(StartupObjects {
+            compiler: run.builder.compiler(run.builder.top_stage, run.host),
+            target: run.target,
+        });
+    }
 
-    for file in &["rsbegin", "rsend"] {
-        let src_file = &src_dir.join(file.to_string() + ".rs");
-        let dst_file = &dst_dir.join(file.to_string() + ".o");
-        if !up_to_date(src_file, dst_file) {
-            let mut cmd = Command::new(&compiler_path);
-            build.run(cmd.env("RUSTC_BOOTSTRAP", "1")
-                        .arg("--cfg").arg(format!("stage{}", compiler.stage))
-                        .arg("--target").arg(target)
-                        .arg("--emit=obj")
-                        .arg("--out-dir").arg(dst_dir)
-                        .arg(src_file));
+    /// Build and prepare startup objects like rsbegin.o and rsend.o
+    ///
+    /// These are primarily used on Windows right now for linking executables/dlls.
+    /// They don't require any library support as they're just plain old object
+    /// files, so we just use the nightly snapshot compiler to always build them (as
+    /// no other compilers are guaranteed to be available).
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let for_compiler = self.compiler;
+        let target = self.target;
+        if !target.contains("pc-windows-gnu") {
+            return
         }
 
-        copy(dst_file, &sysroot_dir.join(file.to_string() + ".o"));
-    }
+        let src_dir = &build.src.join("src/rtstartup");
+        let dst_dir = &build.native_dir(target).join("rtstartup");
+        let sysroot_dir = &builder.sysroot_libdir(for_compiler, target);
+        t!(fs::create_dir_all(dst_dir));
+
+        for file in &["rsbegin", "rsend"] {
+            let src_file = &src_dir.join(file.to_string() + ".rs");
+            let dst_file = &dst_dir.join(file.to_string() + ".o");
+            if !up_to_date(src_file, dst_file) {
+                let mut cmd = Command::new(&build.initial_rustc);
+                build.run(cmd.env("RUSTC_BOOTSTRAP", "1")
+                            .arg("--cfg").arg("stage0")
+                            .arg("--target").arg(target)
+                            .arg("--emit=obj")
+                            .arg("-o").arg(dst_file)
+                            .arg(src_file));
+            }
+
+            copy(dst_file, &sysroot_dir.join(file.to_string() + ".o"));
+        }
 
-    for obj in ["crt2.o", "dllcrt2.o"].iter() {
-        copy(&compiler_file(build.cc(target), obj), &sysroot_dir.join(obj));
+        for obj in ["crt2.o", "dllcrt2.o"].iter() {
+            copy(&compiler_file(build.cc(target), obj), &sysroot_dir.join(obj));
+        }
     }
 }
 
-/// Build libtest.
-///
-/// This will build libtest and supporting libraries for a particular stage of
-/// the build using the `compiler` targeting the `target` architecture. The
-/// artifacts created will also be linked into the sysroot directory.
-pub fn test(build: &Build, target: &str, compiler: &Compiler) {
-    let _folder = build.fold_output(|| format!("stage{}-test", compiler.stage));
-    println!("Building stage{} test artifacts ({} -> {})", compiler.stage,
-             compiler.host, target);
-    let out_dir = build.cargo_out(compiler, Mode::Libtest, target);
-    build.clear_if_dirty(&out_dir, &libstd_stamp(build, compiler, target));
-    let mut cargo = build.cargo(compiler, Mode::Libtest, target, "build");
-    if let Some(target) = env::var_os("MACOSX_STD_DEPLOYMENT_TARGET") {
-        cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
-    }
-    cargo.arg("--manifest-path")
-         .arg(build.src.join("src/libtest/Cargo.toml"));
-    run_cargo(build,
-              &mut cargo,
-              &libtest_stamp(build, compiler, target));
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Test {
+    pub compiler: Compiler,
+    pub target: Interned<String>,
 }
 
-/// Same as `std_link`, only for libtest
-pub fn test_link(build: &Build,
-                 compiler: &Compiler,
-                 target_compiler: &Compiler,
-                 target: &str) {
-    println!("Copying stage{} test from stage{} ({} -> {} / {})",
-             target_compiler.stage,
-             compiler.stage,
-             compiler.host,
-             target_compiler.host,
-             target);
-    add_to_sysroot(&build.sysroot_libdir(target_compiler, target),
-                   &libtest_stamp(build, compiler, target));
-}
+impl Step for Test {
+    type Output = ();
+    const DEFAULT: bool = true;
 
-/// Build the compiler.
-///
-/// This will build the compiler for a particular stage of the build using
-/// the `compiler` targeting the `target` architecture. The artifacts
-/// created will also be linked into the sysroot directory.
-pub fn rustc(build: &Build, target: &str, compiler: &Compiler) {
-    let _folder = build.fold_output(|| format!("stage{}-rustc", compiler.stage));
-    println!("Building stage{} compiler artifacts ({} -> {})",
-             compiler.stage, compiler.host, target);
-
-    let out_dir = build.cargo_out(compiler, Mode::Librustc, target);
-    build.clear_if_dirty(&out_dir, &libtest_stamp(build, compiler, target));
-
-    let mut cargo = build.cargo(compiler, Mode::Librustc, target, "build");
-    cargo.arg("--features").arg(build.rustc_features())
-         .arg("--manifest-path")
-         .arg(build.src.join("src/rustc/Cargo.toml"));
-
-    // Set some configuration variables picked up by build scripts and
-    // the compiler alike
-    cargo.env("CFG_RELEASE", build.rust_release())
-         .env("CFG_RELEASE_CHANNEL", &build.config.channel)
-         .env("CFG_VERSION", build.rust_version())
-         .env("CFG_PREFIX", build.config.prefix.clone().unwrap_or_default());
-
-    if compiler.stage == 0 {
-        cargo.env("CFG_LIBDIR_RELATIVE", "lib");
-    } else {
-        let libdir_relative = build.config.libdir_relative.clone().unwrap_or(PathBuf::from("lib"));
-        cargo.env("CFG_LIBDIR_RELATIVE", libdir_relative);
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.path("src/libtest").krate("test")
     }
 
-    // If we're not building a compiler with debugging information then remove
-    // these two env vars which would be set otherwise.
-    if build.config.rust_debuginfo_only_std {
-        cargo.env_remove("RUSTC_DEBUGINFO");
-        cargo.env_remove("RUSTC_DEBUGINFO_LINES");
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(Test {
+            compiler: run.builder.compiler(run.builder.top_stage, run.host),
+            target: run.target,
+        });
     }
 
-    if let Some(ref ver_date) = build.rust_info.commit_date() {
-        cargo.env("CFG_VER_DATE", ver_date);
-    }
-    if let Some(ref ver_hash) = build.rust_info.sha() {
-        cargo.env("CFG_VER_HASH", ver_hash);
-    }
-    if !build.unstable_features() {
-        cargo.env("CFG_DISABLE_UNSTABLE_FEATURES", "1");
-    }
-    // Flag that rust llvm is in use
-    if build.is_rust_llvm(target) {
-        cargo.env("LLVM_RUSTLLVM", "1");
+    /// Build libtest.
+    ///
+    /// This will build libtest and supporting libraries for a particular stage of
+    /// the build using the `compiler` targeting the `target` architecture. The
+    /// artifacts created will also be linked into the sysroot directory.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let target = self.target;
+        let compiler = self.compiler;
+
+        builder.ensure(Std { compiler, target });
+
+        if build.force_use_stage1(compiler, target) {
+            builder.ensure(Test {
+                compiler: builder.compiler(1, build.build),
+                target: target,
+            });
+            println!("Uplifting stage1 test ({} -> {})", &build.build, target);
+            builder.ensure(TestLink {
+                compiler: builder.compiler(1, build.build),
+                target_compiler: compiler,
+                target: target,
+            });
+            return;
+        }
+
+        let _folder = build.fold_output(|| format!("stage{}-test", compiler.stage));
+        println!("Building stage{} test artifacts ({} -> {})", compiler.stage,
+                &compiler.host, target);
+        let out_dir = build.cargo_out(compiler, Mode::Libtest, target);
+        build.clear_if_dirty(&out_dir, &libstd_stamp(build, compiler, target));
+        let mut cargo = builder.cargo(compiler, Mode::Libtest, target, "build");
+        if let Some(target) = env::var_os("MACOSX_STD_DEPLOYMENT_TARGET") {
+            cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
+        }
+        cargo.arg("--manifest-path")
+            .arg(build.src.join("src/libtest/Cargo.toml"));
+        run_cargo(build,
+                &mut cargo,
+                &libtest_stamp(build, compiler, target));
+
+        builder.ensure(TestLink {
+            compiler: builder.compiler(compiler.stage, build.build),
+            target_compiler: compiler,
+            target: target,
+        });
     }
-    cargo.env("LLVM_CONFIG", build.llvm_config(target));
-    let target_config = build.config.target_config.get(target);
-    if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
-        cargo.env("CFG_LLVM_ROOT", s);
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct TestLink {
+    pub compiler: Compiler,
+    pub target_compiler: Compiler,
+    pub target: Interned<String>,
+}
+
+impl Step for TestLink {
+    type Output = ();
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.never()
     }
-    // Building with a static libstdc++ is only supported on linux right now,
-    // not for MSVC or macOS
-    if build.config.llvm_static_stdcpp &&
-       !target.contains("windows") &&
-       !target.contains("apple") {
-        cargo.env("LLVM_STATIC_STDCPP",
-                  compiler_file(build.cxx(target).unwrap(), "libstdc++.a"));
+
+    /// Same as `std_link`, only for libtest
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let compiler = self.compiler;
+        let target_compiler = self.target_compiler;
+        let target = self.target;
+        println!("Copying stage{} test from stage{} ({} -> {} / {})",
+                target_compiler.stage,
+                compiler.stage,
+                &compiler.host,
+                target_compiler.host,
+                target);
+        add_to_sysroot(&builder.sysroot_libdir(target_compiler, target),
+                    &libtest_stamp(build, compiler, target));
     }
-    if build.config.llvm_link_shared {
-        cargo.env("LLVM_LINK_SHARED", "1");
+}
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Rustc {
+    pub compiler: Compiler,
+    pub target: Interned<String>,
+}
+
+impl Step for Rustc {
+    type Output = ();
+    const ONLY_HOSTS: bool = true;
+    const DEFAULT: bool = true;
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.path("src/librustc").krate("rustc-main")
     }
-    if let Some(ref s) = build.config.rustc_default_linker {
-        cargo.env("CFG_DEFAULT_LINKER", s);
+
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(Rustc {
+            compiler: run.builder.compiler(run.builder.top_stage, run.host),
+            target: run.target,
+        });
     }
-    if let Some(ref s) = build.config.rustc_default_ar {
-        cargo.env("CFG_DEFAULT_AR", s);
+
+    /// Build the compiler.
+    ///
+    /// This will build the compiler for a particular stage of the build using
+    /// the `compiler` targeting the `target` architecture. The artifacts
+    /// created will also be linked into the sysroot directory.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let compiler = self.compiler;
+        let target = self.target;
+
+        builder.ensure(Test { compiler, target });
+
+        // Build LLVM for our target. This will implicitly build the host LLVM
+        // if necessary.
+        builder.ensure(native::Llvm { target });
+
+        if build.force_use_stage1(compiler, target) {
+            builder.ensure(Rustc {
+                compiler: builder.compiler(1, build.build),
+                target: target,
+            });
+            println!("Uplifting stage1 rustc ({} -> {})", &build.build, target);
+            builder.ensure(RustcLink {
+                compiler: builder.compiler(1, build.build),
+                target_compiler: compiler,
+                target,
+            });
+            return;
+        }
+
+        // Ensure that build scripts have a std to link against.
+        builder.ensure(Std {
+            compiler: builder.compiler(self.compiler.stage, build.build),
+            target: build.build,
+        });
+
+        let _folder = build.fold_output(|| format!("stage{}-rustc", compiler.stage));
+        println!("Building stage{} compiler artifacts ({} -> {})",
+                 compiler.stage, &compiler.host, target);
+
+        let out_dir = build.cargo_out(compiler, Mode::Librustc, target);
+        build.clear_if_dirty(&out_dir, &libtest_stamp(build, compiler, target));
+
+        let mut cargo = builder.cargo(compiler, Mode::Librustc, target, "build");
+        cargo.arg("--features").arg(build.rustc_features())
+             .arg("--manifest-path")
+             .arg(build.src.join("src/rustc/Cargo.toml"));
+
+        // Set some configuration variables picked up by build scripts and
+        // the compiler alike
+        cargo.env("CFG_RELEASE", build.rust_release())
+             .env("CFG_RELEASE_CHANNEL", &build.config.channel)
+             .env("CFG_VERSION", build.rust_version())
+             .env("CFG_PREFIX", build.config.prefix.clone().unwrap_or_default());
+
+        if compiler.stage == 0 {
+            cargo.env("CFG_LIBDIR_RELATIVE", "lib");
+        } else {
+            let libdir_relative =
+                build.config.libdir_relative.clone().unwrap_or(PathBuf::from("lib"));
+            cargo.env("CFG_LIBDIR_RELATIVE", libdir_relative);
+        }
+
+        // If we're not building a compiler with debugging information then remove
+        // these two env vars which would be set otherwise.
+        if build.config.rust_debuginfo_only_std {
+            cargo.env_remove("RUSTC_DEBUGINFO");
+            cargo.env_remove("RUSTC_DEBUGINFO_LINES");
+        }
+
+        if let Some(ref ver_date) = build.rust_info.commit_date() {
+            cargo.env("CFG_VER_DATE", ver_date);
+        }
+        if let Some(ref ver_hash) = build.rust_info.sha() {
+            cargo.env("CFG_VER_HASH", ver_hash);
+        }
+        if !build.unstable_features() {
+            cargo.env("CFG_DISABLE_UNSTABLE_FEATURES", "1");
+        }
+        // Flag that rust llvm is in use
+        if build.is_rust_llvm(target) {
+            cargo.env("LLVM_RUSTLLVM", "1");
+        }
+        cargo.env("LLVM_CONFIG", build.llvm_config(target));
+        let target_config = build.config.target_config.get(&target);
+        if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
+            cargo.env("CFG_LLVM_ROOT", s);
+        }
+        // Building with a static libstdc++ is only supported on linux right now,
+        // not for MSVC or macOS
+        if build.config.llvm_static_stdcpp &&
+           !target.contains("windows") &&
+           !target.contains("apple") {
+            cargo.env("LLVM_STATIC_STDCPP",
+                      compiler_file(build.cxx(target).unwrap(), "libstdc++.a"));
+        }
+        if build.config.llvm_link_shared {
+            cargo.env("LLVM_LINK_SHARED", "1");
+        }
+        if let Some(ref s) = build.config.rustc_default_linker {
+            cargo.env("CFG_DEFAULT_LINKER", s);
+        }
+        if let Some(ref s) = build.config.rustc_default_ar {
+            cargo.env("CFG_DEFAULT_AR", s);
+        }
+        run_cargo(build,
+                  &mut cargo,
+                  &librustc_stamp(build, compiler, target));
+
+        builder.ensure(RustcLink {
+            compiler: builder.compiler(compiler.stage, build.build),
+            target_compiler: compiler,
+            target,
+        });
     }
-    run_cargo(build,
-              &mut cargo,
-              &librustc_stamp(build, compiler, target));
 }
 
-/// Same as `std_link`, only for librustc
-pub fn rustc_link(build: &Build,
-                  compiler: &Compiler,
-                  target_compiler: &Compiler,
-                  target: &str) {
-    println!("Copying stage{} rustc from stage{} ({} -> {} / {})",
-             target_compiler.stage,
-             compiler.stage,
-             compiler.host,
-             target_compiler.host,
-             target);
-    add_to_sysroot(&build.sysroot_libdir(target_compiler, target),
-                   &librustc_stamp(build, compiler, target));
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+struct RustcLink {
+    pub compiler: Compiler,
+    pub target_compiler: Compiler,
+    pub target: Interned<String>,
+}
+
+impl Step for RustcLink {
+    type Output = ();
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.never()
+    }
+
+    /// Same as `std_link`, only for librustc
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let compiler = self.compiler;
+        let target_compiler = self.target_compiler;
+        let target = self.target;
+        println!("Copying stage{} rustc from stage{} ({} -> {} / {})",
+                 target_compiler.stage,
+                 compiler.stage,
+                 &compiler.host,
+                 target_compiler.host,
+                 target);
+        add_to_sysroot(&builder.sysroot_libdir(target_compiler, target),
+                       &librustc_stamp(build, compiler, target));
+    }
 }
 
 /// Cargo's output path for the standard library in a given stage, compiled
 /// by a particular compiler for the specified target.
-fn libstd_stamp(build: &Build, compiler: &Compiler, target: &str) -> PathBuf {
+pub fn libstd_stamp(build: &Build, compiler: Compiler, target: Interned<String>) -> PathBuf {
     build.cargo_out(compiler, Mode::Libstd, target).join(".libstd.stamp")
 }
 
 /// Cargo's output path for libtest in a given stage, compiled by a particular
 /// compiler for the specified target.
-fn libtest_stamp(build: &Build, compiler: &Compiler, target: &str) -> PathBuf {
+pub fn libtest_stamp(build: &Build, compiler: Compiler, target: Interned<String>) -> PathBuf {
     build.cargo_out(compiler, Mode::Libtest, target).join(".libtest.stamp")
 }
 
 /// Cargo's output path for librustc in a given stage, compiled by a particular
 /// compiler for the specified target.
-fn librustc_stamp(build: &Build, compiler: &Compiler, target: &str) -> PathBuf {
+pub fn librustc_stamp(build: &Build, compiler: Compiler, target: Interned<String>) -> PathBuf {
     build.cargo_out(compiler, Mode::Librustc, target).join(".librustc.stamp")
 }
 
@@ -342,60 +571,141 @@ fn compiler_file(compiler: &Path, file: &str) -> PathBuf {
     PathBuf::from(out.trim())
 }
 
-pub fn create_sysroot(build: &Build, compiler: &Compiler) {
-    let sysroot = build.sysroot(compiler);
-    let _ = fs::remove_dir_all(&sysroot);
-    t!(fs::create_dir_all(&sysroot));
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Sysroot {
+    pub compiler: Compiler,
 }
 
-/// Prepare a new compiler from the artifacts in `stage`
-///
-/// This will assemble a compiler in `build/$host/stage$stage`. The compiler
-/// must have been previously produced by the `stage - 1` build.build
-/// compiler.
-pub fn assemble_rustc(build: &Build, stage: u32, host: &str) {
-    // nothing to do in stage0
-    if stage == 0 {
-        return
+impl Step for Sysroot {
+    type Output = Interned<PathBuf>;
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.never()
     }
 
-    println!("Copying stage{} compiler ({})", stage, host);
+    /// Returns the sysroot for the `compiler` specified that *this build system
+    /// generates*.
+    ///
+    /// That is, the sysroot for the stage0 compiler is not what the compiler
+    /// thinks it is by default, but it's the same as the default for stages
+    /// 1-3.
+    fn run(self, builder: &Builder) -> Interned<PathBuf> {
+        let build = builder.build;
+        let compiler = self.compiler;
+        let sysroot = if compiler.stage == 0 {
+            build.out.join(&compiler.host).join("stage0-sysroot")
+        } else {
+            build.out.join(&compiler.host).join(format!("stage{}", compiler.stage))
+        };
+        let _ = fs::remove_dir_all(&sysroot);
+        t!(fs::create_dir_all(&sysroot));
+        INTERNER.intern_path(sysroot)
+    }
+}
 
-    // The compiler that we're assembling
-    let target_compiler = Compiler::new(stage, host);
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct Assemble {
+    /// The compiler which we will produce in this step. Assemble itself will
+    /// take care of ensuring that the necessary prerequisites to do so exist,
+    /// that is, this target can be a stage2 compiler and Assemble will build
+    /// previous stages for you.
+    pub target_compiler: Compiler,
+}
 
-    // The compiler that compiled the compiler we're assembling
-    let build_compiler = Compiler::new(stage - 1, &build.build);
+impl Step for Assemble {
+    type Output = Compiler;
 
-    // Link in all dylibs to the libdir
-    let sysroot = build.sysroot(&target_compiler);
-    let sysroot_libdir = sysroot.join(libdir(host));
-    t!(fs::create_dir_all(&sysroot_libdir));
-    let src_libdir = build.sysroot_libdir(&build_compiler, host);
-    for f in t!(fs::read_dir(&src_libdir)).map(|f| t!(f)) {
-        let filename = f.file_name().into_string().unwrap();
-        if is_dylib(&filename) {
-            copy(&f.path(), &sysroot_libdir.join(&filename));
-        }
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.path("src/rustc")
     }
 
-    let out_dir = build.cargo_out(&build_compiler, Mode::Librustc, host);
+    /// Prepare a new compiler from the artifacts in `stage`
+    ///
+    /// This will assemble a compiler in `build/$host/stage$stage`. The compiler
+    /// must have been previously produced by the `stage - 1` build.build
+    /// compiler.
+    fn run(self, builder: &Builder) -> Compiler {
+        let build = builder.build;
+        let target_compiler = self.target_compiler;
+
+        if target_compiler.stage == 0 {
+            assert_eq!(build.build, target_compiler.host,
+                "Cannot obtain compiler for non-native build triple at stage 0");
+            // The stage 0 compiler for the build triple is always pre-built.
+            return target_compiler;
+        }
+
+        // Get the compiler that we'll use to bootstrap ourselves.
+        let build_compiler = if target_compiler.host != build.build {
+            // Build a compiler for the host platform. We cannot use the stage0
+            // compiler for the host platform for this because it doesn't have
+            // the libraries we need.  FIXME: Perhaps we should download those
+            // libraries? It would make builds faster...
+            // FIXME: It may be faster if we build just a stage 1
+            // compiler and then use that to bootstrap this compiler
+            // forward.
+            builder.compiler(target_compiler.stage - 1, build.build)
+        } else {
+            // Build the compiler we'll use to build the stage requested. This
+            // may build more than one compiler (going down to stage 0).
+            builder.compiler(target_compiler.stage - 1, target_compiler.host)
+        };
+
+        // Build the libraries for this compiler to link to (i.e., the libraries
+        // it uses at runtime). NOTE: Crates the target compiler compiles don't
+        // link to these. (FIXME: Is that correct? It seems to be correct most
+        // of the time but I think we do link to these for stage2/bin compilers
+        // when not performing a full bootstrap).
+        if builder.build.flags.keep_stage.map_or(false, |s| target_compiler.stage <= s) {
+            builder.verbose("skipping compilation of compiler due to --keep-stage");
+            let compiler = build_compiler;
+            for stage in 0..min(target_compiler.stage, builder.flags.keep_stage.unwrap()) {
+                let target_compiler = builder.compiler(stage, target_compiler.host);
+                let target = target_compiler.host;
+                builder.ensure(StdLink { compiler, target_compiler, target });
+                builder.ensure(TestLink { compiler, target_compiler, target });
+                builder.ensure(RustcLink { compiler, target_compiler, target });
+            }
+        } else {
+            builder.ensure(Rustc { compiler: build_compiler, target: target_compiler.host });
+        }
 
-    // Link the compiler binary itself into place
-    let rustc = out_dir.join(exe("rustc", host));
-    let bindir = sysroot.join("bin");
-    t!(fs::create_dir_all(&bindir));
-    let compiler = build.compiler_path(&target_compiler);
-    let _ = fs::remove_file(&compiler);
-    copy(&rustc, &compiler);
+        let stage = target_compiler.stage;
+        let host = target_compiler.host;
+        println!("Assembling stage{} compiler ({})", stage, host);
+
+        // Link in all dylibs to the libdir
+        let sysroot = builder.sysroot(target_compiler);
+        let sysroot_libdir = sysroot.join(libdir(&*host));
+        t!(fs::create_dir_all(&sysroot_libdir));
+        let src_libdir = builder.sysroot_libdir(build_compiler, host);
+        for f in t!(fs::read_dir(&src_libdir)).map(|f| t!(f)) {
+            let filename = f.file_name().into_string().unwrap();
+            if is_dylib(&filename) {
+                copy(&f.path(), &sysroot_libdir.join(&filename));
+            }
+        }
 
-    // See if rustdoc exists to link it into place
-    let rustdoc = exe("rustdoc", host);
-    let rustdoc_src = out_dir.join(&rustdoc);
-    let rustdoc_dst = bindir.join(&rustdoc);
-    if fs::metadata(&rustdoc_src).is_ok() {
-        let _ = fs::remove_file(&rustdoc_dst);
-        copy(&rustdoc_src, &rustdoc_dst);
+        let out_dir = build.cargo_out(build_compiler, Mode::Librustc, host);
+
+        // Link the compiler binary itself into place
+        let rustc = out_dir.join(exe("rustc", &*host));
+        let bindir = sysroot.join("bin");
+        t!(fs::create_dir_all(&bindir));
+        let compiler = builder.rustc(target_compiler);
+        let _ = fs::remove_file(&compiler);
+        copy(&rustc, &compiler);
+
+        // See if rustdoc exists to link it into place
+        let rustdoc = exe("rustdoc", &*host);
+        let rustdoc_src = out_dir.join(&rustdoc);
+        let rustdoc_dst = bindir.join(&rustdoc);
+        if fs::metadata(&rustdoc_src).is_ok() {
+            let _ = fs::remove_file(&rustdoc_dst);
+            copy(&rustdoc_src, &rustdoc_dst);
+        }
+
+        target_compiler
     }
 }
 
@@ -418,64 +728,6 @@ fn add_to_sysroot(sysroot_dst: &Path, stamp: &Path) {
     }
 }
 
-/// Build a tool in `src/tools`
-///
-/// This will build the specified tool with the specified `host` compiler in
-/// `stage` into the normal cargo output directory.
-pub fn maybe_clean_tools(build: &Build, stage: u32, target: &str, mode: Mode) {
-    let compiler = Compiler::new(stage, &build.build);
-
-    let stamp = match mode {
-        Mode::Libstd => libstd_stamp(build, &compiler, target),
-        Mode::Libtest => libtest_stamp(build, &compiler, target),
-        Mode::Librustc => librustc_stamp(build, &compiler, target),
-        _ => panic!(),
-    };
-    let out_dir = build.cargo_out(&compiler, Mode::Tool, target);
-    build.clear_if_dirty(&out_dir, &stamp);
-}
-
-/// Build a tool in `src/tools`
-///
-/// This will build the specified tool with the specified `host` compiler in
-/// `stage` into the normal cargo output directory.
-pub fn tool(build: &Build, stage: u32, target: &str, tool: &str) {
-    let _folder = build.fold_output(|| format!("stage{}-{}", stage, tool));
-    println!("Building stage{} tool {} ({})", stage, tool, target);
-
-    let compiler = Compiler::new(stage, &build.build);
-
-    let mut cargo = build.cargo(&compiler, Mode::Tool, target, "build");
-    let dir = build.src.join("src/tools").join(tool);
-    cargo.arg("--manifest-path").arg(dir.join("Cargo.toml"));
-
-    // We don't want to build tools dynamically as they'll be running across
-    // stages and such and it's just easier if they're not dynamically linked.
-    cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
-
-    if let Some(dir) = build.openssl_install_dir(target) {
-        cargo.env("OPENSSL_STATIC", "1");
-        cargo.env("OPENSSL_DIR", dir);
-        cargo.env("LIBZ_SYS_STATIC", "1");
-    }
-
-    cargo.env("CFG_RELEASE_CHANNEL", &build.config.channel);
-
-    let info = GitInfo::new(&dir);
-    if let Some(sha) = info.sha() {
-        cargo.env("CFG_COMMIT_HASH", sha);
-    }
-    if let Some(sha_short) = info.sha_short() {
-        cargo.env("CFG_SHORT_COMMIT_HASH", sha_short);
-    }
-    if let Some(date) = info.commit_date() {
-        cargo.env("CFG_COMMIT_DATE", date);
-    }
-
-    build.run(&mut cargo);
-}
-
-
 // Avoiding a dependency on winapi to keep compile times down
 #[cfg(unix)]
 fn stderr_isatty() -> bool {
@@ -535,18 +787,18 @@ fn run_cargo(build: &Build, cargo: &mut Command, stamp: &Path) {
     let stdout = BufReader::new(child.stdout.take().unwrap());
     for line in stdout.lines() {
         let line = t!(line);
-        let json = if line.starts_with("{") {
-            t!(line.parse::<json::Json>())
+        let json: serde_json::Value = if line.starts_with("{") {
+            t!(serde_json::from_str(&line))
         } else {
             // If this was informational, just print it out and continue
             println!("{}", line);
             continue
         };
-        if json.find("reason").and_then(|j| j.as_string()) != Some("compiler-artifact") {
+        if json["reason"].as_str() != Some("compiler-artifact") {
             continue
         }
         for filename in json["filenames"].as_array().unwrap() {
-            let filename = filename.as_string().unwrap();
+            let filename = filename.as_str().unwrap();
             // Skip files like executables
             if !filename.ends_with(".rlib") &&
                !filename.ends_with(".lib") &&
diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs
index 34628852ab3..b0c58235fc3 100644
--- a/src/bootstrap/config.rs
+++ b/src/bootstrap/config.rs
@@ -21,9 +21,9 @@ use std::path::PathBuf;
 use std::process;
 
 use num_cpus;
-use rustc_serialize::Decodable;
-use toml::{Parser, Decoder, Value};
+use toml;
 use util::{exe, push_exe_path};
+use cache::{INTERNER, Interned};
 
 /// Global configuration for the entire build and/or bootstrap.
 ///
@@ -46,7 +46,7 @@ pub struct Config {
     pub docs: bool,
     pub locked_deps: bool,
     pub vendor: bool,
-    pub target_config: HashMap<String, Target>,
+    pub target_config: HashMap<Interned<String>, Target>,
     pub full_bootstrap: bool,
     pub extended: bool,
     pub sanitizers: bool,
@@ -78,9 +78,9 @@ pub struct Config {
     pub rust_debuginfo_tests: bool,
     pub rust_dist_src: bool,
 
-    pub build: String,
-    pub host: Vec<String>,
-    pub target: Vec<String>,
+    pub build: Interned<String>,
+    pub host: Vec<Interned<String>>,
+    pub target: Vec<Interned<String>>,
     pub local_rebuild: bool,
 
     // dist misc
@@ -138,7 +138,8 @@ pub struct Target {
 /// This structure uses `Decodable` to automatically decode a TOML configuration
 /// file into this format, and then this is traversed and written into the above
 /// `Config` structure.
-#[derive(RustcDecodable, Default)]
+#[derive(Deserialize, Default)]
+#[serde(deny_unknown_fields, rename_all = "kebab-case")]
 struct TomlConfig {
     build: Option<Build>,
     install: Option<Install>,
@@ -149,10 +150,13 @@ struct TomlConfig {
 }
 
 /// TOML representation of various global build decisions.
-#[derive(RustcDecodable, Default, Clone)]
+#[derive(Deserialize, Default, Clone)]
+#[serde(deny_unknown_fields, rename_all = "kebab-case")]
 struct Build {
     build: Option<String>,
+    #[serde(default)]
     host: Vec<String>,
+    #[serde(default)]
     target: Vec<String>,
     cargo: Option<String>,
     rustc: Option<String>,
@@ -174,7 +178,8 @@ struct Build {
 }
 
 /// TOML representation of various global install decisions.
-#[derive(RustcDecodable, Default, Clone)]
+#[derive(Deserialize, Default, Clone)]
+#[serde(deny_unknown_fields, rename_all = "kebab-case")]
 struct Install {
     prefix: Option<String>,
     sysconfdir: Option<String>,
@@ -185,7 +190,8 @@ struct Install {
 }
 
 /// TOML representation of how the LLVM build is configured.
-#[derive(RustcDecodable, Default)]
+#[derive(Deserialize, Default)]
+#[serde(deny_unknown_fields, rename_all = "kebab-case")]
 struct Llvm {
     ccache: Option<StringOrBool>,
     ninja: Option<bool>,
@@ -200,7 +206,8 @@ struct Llvm {
     clean_rebuild: Option<bool>,
 }
 
-#[derive(RustcDecodable, Default, Clone)]
+#[derive(Deserialize, Default, Clone)]
+#[serde(deny_unknown_fields, rename_all = "kebab-case")]
 struct Dist {
     sign_folder: Option<String>,
     gpg_password_file: Option<String>,
@@ -208,7 +215,8 @@ struct Dist {
     src_tarball: Option<bool>,
 }
 
-#[derive(RustcDecodable)]
+#[derive(Deserialize)]
+#[serde(untagged)]
 enum StringOrBool {
     String(String),
     Bool(bool),
@@ -221,7 +229,8 @@ impl Default for StringOrBool {
 }
 
 /// TOML representation of how the Rust build is configured.
-#[derive(RustcDecodable, Default)]
+#[derive(Deserialize, Default)]
+#[serde(deny_unknown_fields, rename_all = "kebab-case")]
 struct Rust {
     optimize: Option<bool>,
     codegen_units: Option<u32>,
@@ -243,7 +252,8 @@ struct Rust {
 }
 
 /// TOML representation of how each build target is configured.
-#[derive(RustcDecodable, Default)]
+#[derive(Deserialize, Default)]
+#[serde(deny_unknown_fields, rename_all = "kebab-case")]
 struct TomlTarget {
     llvm_config: Option<String>,
     jemalloc: Option<String>,
@@ -266,50 +276,39 @@ impl Config {
         config.docs = true;
         config.rust_rpath = true;
         config.rust_codegen_units = 1;
-        config.build = build.to_string();
+        config.build = INTERNER.intern_str(build);
         config.channel = "dev".to_string();
         config.codegen_tests = true;
         config.rust_dist_src = true;
 
         let toml = file.map(|file| {
             let mut f = t!(File::open(&file));
-            let mut toml = String::new();
-            t!(f.read_to_string(&mut toml));
-            let mut p = Parser::new(&toml);
-            let table = match p.parse() {
-                Some(table) => table,
-                None => {
-                    println!("failed to parse TOML configuration '{}':", file.to_str().unwrap());
-                    for err in p.errors.iter() {
-                        let (loline, locol) = p.to_linecol(err.lo);
-                        let (hiline, hicol) = p.to_linecol(err.hi);
-                        println!("{}:{}-{}:{}: {}", loline, locol, hiline,
-                                 hicol, err.desc);
-                    }
-                    process::exit(2);
-                }
-            };
-            let mut d = Decoder::new(Value::Table(table));
-            match Decodable::decode(&mut d) {
-                Ok(cfg) => cfg,
-                Err(e) => {
-                    println!("failed to decode TOML: {}", e);
+            let mut contents = String::new();
+            t!(f.read_to_string(&mut contents));
+            match toml::from_str(&contents) {
+                Ok(table) => table,
+                Err(err) => {
+                    println!("failed to parse TOML configuration '{}': {}",
+                        file.display(), err);
                     process::exit(2);
                 }
             }
         }).unwrap_or_else(|| TomlConfig::default());
 
         let build = toml.build.clone().unwrap_or(Build::default());
-        set(&mut config.build, build.build.clone());
+        set(&mut config.build, build.build.clone().map(|x| INTERNER.intern_string(x)));
         config.host.push(config.build.clone());
         for host in build.host.iter() {
-            if !config.host.contains(host) {
-                config.host.push(host.clone());
+            let host = INTERNER.intern_str(host);
+            if !config.host.contains(&host) {
+                config.host.push(host);
             }
         }
-        for target in config.host.iter().chain(&build.target) {
-            if !config.target.contains(target) {
-                config.target.push(target.clone());
+        for target in config.host.iter().cloned()
+            .chain(build.target.iter().map(|s| INTERNER.intern_str(s)))
+        {
+            if !config.target.contains(&target) {
+                config.target.push(target);
             }
         }
         config.nodejs = build.nodejs.map(PathBuf::from);
@@ -402,7 +401,7 @@ impl Config {
                 target.musl_root = cfg.musl_root.clone().map(PathBuf::from);
                 target.qemu_rootfs = cfg.qemu_rootfs.clone().map(PathBuf::from);
 
-                config.target_config.insert(triple.clone(), target);
+                config.target_config.insert(INTERNER.intern_string(triple.clone()), target);
             }
         }
 
@@ -504,13 +503,13 @@ impl Config {
             }
 
             match key {
-                "CFG_BUILD" if value.len() > 0 => self.build = value.to_string(),
+                "CFG_BUILD" if value.len() > 0 => self.build = INTERNER.intern_str(value),
                 "CFG_HOST" if value.len() > 0 => {
-                    self.host.extend(value.split(" ").map(|s| s.to_string()));
+                    self.host.extend(value.split(" ").map(|s| INTERNER.intern_str(s)));
 
                 }
                 "CFG_TARGET" if value.len() > 0 => {
-                    self.target.extend(value.split(" ").map(|s| s.to_string()));
+                    self.target.extend(value.split(" ").map(|s| INTERNER.intern_str(s)));
                 }
                 "CFG_EXPERIMENTAL_TARGETS" if value.len() > 0 => {
                     self.llvm_experimental_targets = Some(value.to_string());
@@ -519,33 +518,28 @@ impl Config {
                     self.musl_root = Some(parse_configure_path(value));
                 }
                 "CFG_MUSL_ROOT_X86_64" if value.len() > 0 => {
-                    let target = "x86_64-unknown-linux-musl".to_string();
-                    let target = self.target_config.entry(target)
-                                     .or_insert(Target::default());
+                    let target = INTERNER.intern_str("x86_64-unknown-linux-musl");
+                    let target = self.target_config.entry(target).or_insert(Target::default());
                     target.musl_root = Some(parse_configure_path(value));
                 }
                 "CFG_MUSL_ROOT_I686" if value.len() > 0 => {
-                    let target = "i686-unknown-linux-musl".to_string();
-                    let target = self.target_config.entry(target)
-                                     .or_insert(Target::default());
+                    let target = INTERNER.intern_str("i686-unknown-linux-musl");
+                    let target = self.target_config.entry(target).or_insert(Target::default());
                     target.musl_root = Some(parse_configure_path(value));
                 }
                 "CFG_MUSL_ROOT_ARM" if value.len() > 0 => {
-                    let target = "arm-unknown-linux-musleabi".to_string();
-                    let target = self.target_config.entry(target)
-                                     .or_insert(Target::default());
+                    let target = INTERNER.intern_str("arm-unknown-linux-musleabi");
+                    let target = self.target_config.entry(target).or_insert(Target::default());
                     target.musl_root = Some(parse_configure_path(value));
                 }
                 "CFG_MUSL_ROOT_ARMHF" if value.len() > 0 => {
-                    let target = "arm-unknown-linux-musleabihf".to_string();
-                    let target = self.target_config.entry(target)
-                                     .or_insert(Target::default());
+                    let target = INTERNER.intern_str("arm-unknown-linux-musleabihf");
+                    let target = self.target_config.entry(target).or_insert(Target::default());
                     target.musl_root = Some(parse_configure_path(value));
                 }
                 "CFG_MUSL_ROOT_ARMV7" if value.len() > 0 => {
-                    let target = "armv7-unknown-linux-musleabihf".to_string();
-                    let target = self.target_config.entry(target)
-                                     .or_insert(Target::default());
+                    let target = INTERNER.intern_str("armv7-unknown-linux-musleabihf");
+                    let target = self.target_config.entry(target).or_insert(Target::default());
                     target.musl_root = Some(parse_configure_path(value));
                 }
                 "CFG_DEFAULT_AR" if value.len() > 0 => {
@@ -593,33 +587,28 @@ impl Config {
                     target.jemalloc = Some(parse_configure_path(value).join("libjemalloc_pic.a"));
                 }
                 "CFG_ARM_LINUX_ANDROIDEABI_NDK" if value.len() > 0 => {
-                    let target = "arm-linux-androideabi".to_string();
-                    let target = self.target_config.entry(target)
-                                     .or_insert(Target::default());
+                    let target = INTERNER.intern_str("arm-linux-androideabi");
+                    let target = self.target_config.entry(target).or_insert(Target::default());
                     target.ndk = Some(parse_configure_path(value));
                 }
                 "CFG_ARMV7_LINUX_ANDROIDEABI_NDK" if value.len() > 0 => {
-                    let target = "armv7-linux-androideabi".to_string();
-                    let target = self.target_config.entry(target)
-                                     .or_insert(Target::default());
+                    let target = INTERNER.intern_str("armv7-linux-androideabi");
+                    let target = self.target_config.entry(target).or_insert(Target::default());
                     target.ndk = Some(parse_configure_path(value));
                 }
                 "CFG_I686_LINUX_ANDROID_NDK" if value.len() > 0 => {
-                    let target = "i686-linux-android".to_string();
-                    let target = self.target_config.entry(target)
-                                     .or_insert(Target::default());
+                    let target = INTERNER.intern_str("i686-linux-android");
+                    let target = self.target_config.entry(target).or_insert(Target::default());
                     target.ndk = Some(parse_configure_path(value));
                 }
                 "CFG_AARCH64_LINUX_ANDROID_NDK" if value.len() > 0 => {
-                    let target = "aarch64-linux-android".to_string();
-                    let target = self.target_config.entry(target)
-                                     .or_insert(Target::default());
+                    let target = INTERNER.intern_str("aarch64-linux-android");
+                    let target = self.target_config.entry(target).or_insert(Target::default());
                     target.ndk = Some(parse_configure_path(value));
                 }
                 "CFG_X86_64_LINUX_ANDROID_NDK" if value.len() > 0 => {
-                    let target = "x86_64-linux-android".to_string();
-                    let target = self.target_config.entry(target)
-                                     .or_insert(Target::default());
+                    let target = INTERNER.intern_str("x86_64-linux-android");
+                    let target = self.target_config.entry(target).or_insert(Target::default());
                     target.ndk = Some(parse_configure_path(value));
                 }
                 "CFG_LOCAL_RUST_ROOT" if value.len() > 0 => {
@@ -643,9 +632,8 @@ impl Config {
                                                .collect();
                 }
                 "CFG_QEMU_ARMHF_ROOTFS" if value.len() > 0 => {
-                    let target = "arm-unknown-linux-gnueabihf".to_string();
-                    let target = self.target_config.entry(target)
-                                     .or_insert(Target::default());
+                    let target = INTERNER.intern_str("arm-unknown-linux-gnueabihf");
+                    let target = self.target_config.entry(target).or_insert(Target::default());
                     target.qemu_rootfs = Some(parse_configure_path(value));
                 }
                 _ => {}
diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs
index 8fae1dd99d8..cdaab9d9c8d 100644
--- a/src/bootstrap/dist.rs
+++ b/src/bootstrap/dist.rs
@@ -29,6 +29,10 @@ use build_helper::output;
 use {Build, Compiler, Mode};
 use channel;
 use util::{cp_r, libdir, is_dylib, cp_filtered, copy, exe};
+use builder::{Builder, RunConfig, ShouldRun, Step};
+use compile;
+use tool::{self, Tool};
+use cache::{INTERNER, Interned};
 
 pub fn pkgname(build: &Build, component: &str) -> String {
     if component == "cargo" {
@@ -49,50 +53,81 @@ pub fn tmpdir(build: &Build) -> PathBuf {
     build.out.join("tmp/dist")
 }
 
-fn rust_installer(build: &Build) -> Command {
-    build.tool_cmd(&Compiler::new(0, &build.build), "rust-installer")
+fn rust_installer(builder: &Builder) -> Command {
+    builder.tool_cmd(Tool::RustInstaller)
 }
 
-/// Builds the `rust-docs` installer component.
-///
-/// Slurps up documentation from the `stage`'s `host`.
-pub fn docs(build: &Build, stage: u32, host: &str) {
-    println!("Dist docs stage{} ({})", stage, host);
-    if !build.config.docs {
-        println!("\tskipping - docs disabled");
-        return
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct Docs {
+    pub stage: u32,
+    pub target: Interned<String>,
+}
+
+impl Step for Docs {
+    type Output = Option<PathBuf>;
+    const DEFAULT: bool = true;
+    const ONLY_BUILD_TARGETS: bool = true;
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.path("src/doc")
     }
 
-    let name = pkgname(build, "rust-docs");
-    let image = tmpdir(build).join(format!("{}-{}-image", name, host));
-    let _ = fs::remove_dir_all(&image);
-
-    let dst = image.join("share/doc/rust/html");
-    t!(fs::create_dir_all(&dst));
-    let src = build.out.join(host).join("doc");
-    cp_r(&src, &dst);
-
-    let mut cmd = rust_installer(build);
-    cmd.arg("generate")
-       .arg("--product-name=Rust-Documentation")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=Rust-documentation-is-installed.")
-       .arg("--image-dir").arg(&image)
-       .arg("--work-dir").arg(&tmpdir(build))
-       .arg("--output-dir").arg(&distdir(build))
-       .arg(format!("--package-name={}-{}", name, host))
-       .arg("--component-name=rust-docs")
-       .arg("--legacy-manifest-dirs=rustlib,cargo")
-       .arg("--bulk-dirs=share/doc/rust/html");
-    build.run(&mut cmd);
-    t!(fs::remove_dir_all(&image));
-
-    // As part of this step, *also* copy the docs directory to a directory which
-    // buildbot typically uploads.
-    if host == build.build {
-        let dst = distdir(build).join("doc").join(build.rust_package_vers());
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(Docs {
+            stage: run.builder.top_stage,
+            target: run.target,
+        });
+    }
+
+    /// Builds the `rust-docs` installer component.
+    ///
+    /// Slurps up documentation from the `stage`'s `target`.
+    fn run(self, builder: &Builder) -> Option<PathBuf> {
+        let build = builder.build;
+        let stage = self.stage;
+        let target = self.target;
+
+        builder.default_doc(None);
+
+        println!("Dist docs stage{} ({})", stage, target);
+        if !build.config.docs {
+            println!("\tskipping - docs disabled");
+            return None;
+        }
+
+        let name = pkgname(build, "rust-docs");
+        let image = tmpdir(build).join(format!("{}-{}-image", name, target));
+        let _ = fs::remove_dir_all(&image);
+
+        let dst = image.join("share/doc/rust/html");
         t!(fs::create_dir_all(&dst));
+        let src = build.out.join(target).join("doc");
         cp_r(&src, &dst);
+
+        let mut cmd = rust_installer(builder);
+        cmd.arg("generate")
+           .arg("--product-name=Rust-Documentation")
+           .arg("--rel-manifest-dir=rustlib")
+           .arg("--success-message=Rust-documentation-is-installed.")
+           .arg("--image-dir").arg(&image)
+           .arg("--work-dir").arg(&tmpdir(build))
+           .arg("--output-dir").arg(&distdir(build))
+           .arg(format!("--package-name={}-{}", name, target))
+           .arg("--component-name=rust-docs")
+           .arg("--legacy-manifest-dirs=rustlib,cargo")
+           .arg("--bulk-dirs=share/doc/rust/html");
+        build.run(&mut cmd);
+        t!(fs::remove_dir_all(&image));
+
+        // As part of this step, *also* copy the docs directory to a directory which
+        // buildbot typically uploads.
+        if target == build.build {
+            let dst = distdir(build).join("doc").join(build.rust_package_vers());
+            t!(fs::create_dir_all(&dst));
+            cp_r(&src, &dst);
+        }
+
+        Some(distdir(build).join(format!("{}-{}.tar.gz", name, target)))
     }
 }
 
@@ -115,7 +150,9 @@ fn find_files(files: &[&str], path: &[PathBuf]) -> Vec<PathBuf> {
     found
 }
 
-fn make_win_dist(rust_root: &Path, plat_root: &Path, target_triple: &str, build: &Build) {
+fn make_win_dist(
+    rust_root: &Path, plat_root: &Path, target_triple: Interned<String>, build: &Build
+) {
     //Ask gcc where it keeps its stuff
     let mut cmd = Command::new(build.cc(target_triple));
     cmd.arg("-print-search-dirs");
@@ -222,262 +259,403 @@ fn make_win_dist(rust_root: &Path, plat_root: &Path, target_triple: &str, build:
     }
 }
 
-/// Build the `rust-mingw` installer component.
-///
-/// This contains all the bits and pieces to run the MinGW Windows targets
-/// without any extra installed software (e.g. we bundle gcc, libraries, etc).
-pub fn mingw(build: &Build, host: &str) {
-    println!("Dist mingw ({})", host);
-    let name = pkgname(build, "rust-mingw");
-    let image = tmpdir(build).join(format!("{}-{}-image", name, host));
-    let _ = fs::remove_dir_all(&image);
-    t!(fs::create_dir_all(&image));
-
-    // The first argument is a "temporary directory" which is just
-    // thrown away (this contains the runtime DLLs included in the rustc package
-    // above) and the second argument is where to place all the MinGW components
-    // (which is what we want).
-    make_win_dist(&tmpdir(build), &image, host, &build);
-
-    let mut cmd = rust_installer(build);
-    cmd.arg("generate")
-       .arg("--product-name=Rust-MinGW")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=Rust-MinGW-is-installed.")
-       .arg("--image-dir").arg(&image)
-       .arg("--work-dir").arg(&tmpdir(build))
-       .arg("--output-dir").arg(&distdir(build))
-       .arg(format!("--package-name={}-{}", name, host))
-       .arg("--component-name=rust-mingw")
-       .arg("--legacy-manifest-dirs=rustlib,cargo");
-    build.run(&mut cmd);
-    t!(fs::remove_dir_all(&image));
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct Mingw {
+    target: Interned<String>,
 }
 
-/// Creates the `rustc` installer component.
-pub fn rustc(build: &Build, stage: u32, host: &str) {
-    println!("Dist rustc stage{} ({})", stage, host);
-    let name = pkgname(build, "rustc");
-    let image = tmpdir(build).join(format!("{}-{}-image", name, host));
-    let _ = fs::remove_dir_all(&image);
-    let overlay = tmpdir(build).join(format!("{}-{}-overlay", name, host));
-    let _ = fs::remove_dir_all(&overlay);
-
-    // Prepare the rustc "image", what will actually end up getting installed
-    prepare_image(build, stage, host, &image);
-
-    // Prepare the overlay which is part of the tarball but won't actually be
-    // installed
-    let cp = |file: &str| {
-        install(&build.src.join(file), &overlay, 0o644);
-    };
-    cp("COPYRIGHT");
-    cp("LICENSE-APACHE");
-    cp("LICENSE-MIT");
-    cp("README.md");
-    // tiny morsel of metadata is used by rust-packaging
-    let version = build.rust_version();
-    t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
-
-    // On MinGW we've got a few runtime DLL dependencies that we need to
-    // include. The first argument to this script is where to put these DLLs
-    // (the image we're creating), and the second argument is a junk directory
-    // to ignore all other MinGW stuff the script creates.
-    //
-    // On 32-bit MinGW we're always including a DLL which needs some extra
-    // licenses to distribute. On 64-bit MinGW we don't actually distribute
-    // anything requiring us to distribute a license, but it's likely the
-    // install will *also* include the rust-mingw package, which also needs
-    // licenses, so to be safe we just include it here in all MinGW packages.
-    if host.contains("pc-windows-gnu") {
-        make_win_dist(&image, &tmpdir(build), host, build);
-
-        let dst = image.join("share/doc");
-        t!(fs::create_dir_all(&dst));
-        cp_r(&build.src.join("src/etc/third-party"), &dst);
+impl Step for Mingw {
+    type Output = Option<PathBuf>;
+    const DEFAULT: bool = true;
+    const ONLY_BUILD_TARGETS: bool = true;
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.never()
     }
 
-    // Finally, wrap everything up in a nice tarball!
-    let mut cmd = rust_installer(build);
-    cmd.arg("generate")
-       .arg("--product-name=Rust")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=Rust-is-ready-to-roll.")
-       .arg("--image-dir").arg(&image)
-       .arg("--work-dir").arg(&tmpdir(build))
-       .arg("--output-dir").arg(&distdir(build))
-       .arg("--non-installed-overlay").arg(&overlay)
-       .arg(format!("--package-name={}-{}", name, host))
-       .arg("--component-name=rustc")
-       .arg("--legacy-manifest-dirs=rustlib,cargo");
-    build.run(&mut cmd);
-    t!(fs::remove_dir_all(&image));
-    t!(fs::remove_dir_all(&overlay));
-
-    fn prepare_image(build: &Build, stage: u32, host: &str, image: &Path) {
-        let src = build.sysroot(&Compiler::new(stage, host));
-        let libdir = libdir(host);
-
-        // Copy rustc/rustdoc binaries
-        t!(fs::create_dir_all(image.join("bin")));
-        cp_r(&src.join("bin"), &image.join("bin"));
-
-        // Copy runtime DLLs needed by the compiler
-        if libdir != "bin" {
-            for entry in t!(src.join(libdir).read_dir()).map(|e| t!(e)) {
-                let name = entry.file_name();
-                if let Some(s) = name.to_str() {
-                    if is_dylib(s) {
-                        install(&entry.path(), &image.join(libdir), 0o644);
-                    }
-                }
-            }
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(Mingw { target: run.target });
+    }
+
+    /// Build the `rust-mingw` installer component.
+    ///
+    /// This contains all the bits and pieces to run the MinGW Windows targets
+    /// without any extra installed software (e.g. we bundle gcc, libraries, etc).
+    fn run(self, builder: &Builder) -> Option<PathBuf> {
+        let build = builder.build;
+        let target = self.target;
+
+        if !target.contains("pc-windows-gnu") {
+            return None;
         }
 
-        // Man pages
-        t!(fs::create_dir_all(image.join("share/man/man1")));
-        cp_r(&build.src.join("man"), &image.join("share/man/man1"));
+        println!("Dist mingw ({})", target);
+        let name = pkgname(build, "rust-mingw");
+        let image = tmpdir(build).join(format!("{}-{}-image", name, target));
+        let _ = fs::remove_dir_all(&image);
+        t!(fs::create_dir_all(&image));
+
+        // The first argument is a "temporary directory" which is just
+        // thrown away (this contains the runtime DLLs included in the rustc package
+        // above) and the second argument is where to place all the MinGW components
+        // (which is what we want).
+        make_win_dist(&tmpdir(build), &image, target, &build);
+
+        let mut cmd = rust_installer(builder);
+        cmd.arg("generate")
+           .arg("--product-name=Rust-MinGW")
+           .arg("--rel-manifest-dir=rustlib")
+           .arg("--success-message=Rust-MinGW-is-installed.")
+           .arg("--image-dir").arg(&image)
+           .arg("--work-dir").arg(&tmpdir(build))
+           .arg("--output-dir").arg(&distdir(build))
+           .arg(format!("--package-name={}-{}", name, target))
+           .arg("--component-name=rust-mingw")
+           .arg("--legacy-manifest-dirs=rustlib,cargo");
+        build.run(&mut cmd);
+        t!(fs::remove_dir_all(&image));
+        Some(distdir(build).join(format!("{}-{}.tar.gz", name, target)))
+    }
+}
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct Rustc {
+    pub stage: u32,
+    pub target: Interned<String>,
+}
+
+impl Step for Rustc {
+    type Output = PathBuf;
+    const DEFAULT: bool = true;
+    const ONLY_HOSTS: bool = true;
+    const ONLY_BUILD_TARGETS: bool = true;
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.path("src/librustc")
+    }
+
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(Rustc {
+            stage: run.builder.top_stage,
+            target: run.target,
+        });
+    }
 
-        // Debugger scripts
-        debugger_scripts(build, &image, host);
+    /// Creates the `rustc` installer component.
+    fn run(self, builder: &Builder) -> PathBuf {
+        let build = builder.build;
+        let stage = self.stage;
+        let target = self.target;
 
-        // Misc license info
+        let compiler = builder.ensure(compile::Assemble {
+            target_compiler: builder.compiler(stage, build.build),
+        });
+
+        println!("Dist rustc stage{} ({})", stage, target);
+        let name = pkgname(build, "rustc");
+        let image = tmpdir(build).join(format!("{}-{}-image", name, target));
+        let _ = fs::remove_dir_all(&image);
+        let overlay = tmpdir(build).join(format!("{}-{}-overlay", name, target));
+        let _ = fs::remove_dir_all(&overlay);
+
+        // Prepare the rustc "image", what will actually end up getting installed
+        prepare_image(builder, compiler, target, &image);
+
+        // Prepare the overlay which is part of the tarball but won't actually be
+        // installed
         let cp = |file: &str| {
-            install(&build.src.join(file), &image.join("share/doc/rust"), 0o644);
+            install(&build.src.join(file), &overlay, 0o644);
         };
         cp("COPYRIGHT");
         cp("LICENSE-APACHE");
         cp("LICENSE-MIT");
         cp("README.md");
+        // tiny morsel of metadata is used by rust-packaging
+        let version = build.rust_version();
+        t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
+
+        // On MinGW we've got a few runtime DLL dependencies that we need to
+        // include. The first argument to this script is where to put these DLLs
+        // (the image we're creating), and the second argument is a junk directory
+        // to ignore all other MinGW stuff the script creates.
+        //
+        // On 32-bit MinGW we're always including a DLL which needs some extra
+        // licenses to distribute. On 64-bit MinGW we don't actually distribute
+        // anything requiring us to distribute a license, but it's likely the
+        // install will *also* include the rust-mingw package, which also needs
+        // licenses, so to be safe we just include it here in all MinGW packages.
+        if target.contains("pc-windows-gnu") {
+            make_win_dist(&image, &tmpdir(build), target, build);
+
+            let dst = image.join("share/doc");
+            t!(fs::create_dir_all(&dst));
+            cp_r(&build.src.join("src/etc/third-party"), &dst);
+        }
+
+        // Finally, wrap everything up in a nice tarball!
+        let mut cmd = rust_installer(builder);
+        cmd.arg("generate")
+           .arg("--product-name=Rust")
+           .arg("--rel-manifest-dir=rustlib")
+           .arg("--success-message=Rust-is-ready-to-roll.")
+           .arg("--image-dir").arg(&image)
+           .arg("--work-dir").arg(&tmpdir(build))
+           .arg("--output-dir").arg(&distdir(build))
+           .arg("--non-installed-overlay").arg(&overlay)
+           .arg(format!("--package-name={}-{}", name, target))
+           .arg("--component-name=rustc")
+           .arg("--legacy-manifest-dirs=rustlib,cargo");
+        build.run(&mut cmd);
+        t!(fs::remove_dir_all(&image));
+        t!(fs::remove_dir_all(&overlay));
+
+        return distdir(build).join(format!("{}-{}.tar.gz", name, target));
+
+        fn prepare_image(
+            builder: &Builder, compiler: Compiler, target: Interned<String>, image: &Path
+        ) {
+            let build = builder.build;
+            let src = builder.sysroot(compiler);
+            let libdir = libdir(&target);
+
+            // Copy rustc/rustdoc binaries
+            t!(fs::create_dir_all(image.join("bin")));
+            cp_r(&src.join("bin"), &image.join("bin"));
+
+            // Copy runtime DLLs needed by the compiler
+            if libdir != "bin" {
+                for entry in t!(src.join(libdir).read_dir()).map(|e| t!(e)) {
+                    let name = entry.file_name();
+                    if let Some(s) = name.to_str() {
+                        if is_dylib(s) {
+                            install(&entry.path(), &image.join(libdir), 0o644);
+                        }
+                    }
+                }
+            }
+
+            // Man pages
+            t!(fs::create_dir_all(image.join("share/man/man1")));
+            cp_r(&build.src.join("man"), &image.join("share/man/man1"));
+
+            // Debugger scripts
+            builder.ensure(DebuggerScripts {
+                sysroot: INTERNER.intern_path(image.to_owned()),
+                target: target,
+            });
+
+            // Misc license info
+            let cp = |file: &str| {
+                install(&build.src.join(file), &image.join("share/doc/rust"), 0o644);
+            };
+            cp("COPYRIGHT");
+            cp("LICENSE-APACHE");
+            cp("LICENSE-MIT");
+            cp("README.md");
+        }
     }
 }
 
-/// Copies debugger scripts for `host` into the `sysroot` specified.
-pub fn debugger_scripts(build: &Build,
-                        sysroot: &Path,
-                        host: &str) {
-    let dst = sysroot.join("lib/rustlib/etc");
-    t!(fs::create_dir_all(&dst));
-    let cp_debugger_script = |file: &str| {
-        install(&build.src.join("src/etc/").join(file), &dst, 0o644);
-    };
-    if host.contains("windows-msvc") {
-        // windbg debugger scripts
-        install(&build.src.join("src/etc/rust-windbg.cmd"), &sysroot.join("bin"),
-            0o755);
-
-        cp_debugger_script("natvis/liballoc.natvis");
-        cp_debugger_script("natvis/libcore.natvis");
-    } else {
-        cp_debugger_script("debugger_pretty_printers_common.py");
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct DebuggerScripts {
+    pub sysroot: Interned<PathBuf>,
+    pub target: Interned<String>,
+}
 
-        // gdb debugger scripts
-        install(&build.src.join("src/etc/rust-gdb"), &sysroot.join("bin"),
-                0o755);
+impl Step for DebuggerScripts {
+    type Output = ();
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.path("src/lldb_batchmode.py")
+    }
 
-        cp_debugger_script("gdb_load_rust_pretty_printers.py");
-        cp_debugger_script("gdb_rust_pretty_printing.py");
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(DebuggerScripts {
+            sysroot: run.builder.sysroot(run.builder.compiler(run.builder.top_stage, run.host)),
+            target: run.target,
+        });
+    }
 
-        // lldb debugger scripts
-        install(&build.src.join("src/etc/rust-lldb"), &sysroot.join("bin"),
+    /// Copies debugger scripts for `target` into the `sysroot` specified.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let target = self.target;
+        let sysroot = self.sysroot;
+        let dst = sysroot.join("lib/rustlib/etc");
+        t!(fs::create_dir_all(&dst));
+        let cp_debugger_script = |file: &str| {
+            install(&build.src.join("src/etc/").join(file), &dst, 0o644);
+        };
+        if target.contains("windows-msvc") {
+            // windbg debugger scripts
+            install(&build.src.join("src/etc/rust-windbg.cmd"), &sysroot.join("bin"),
                 0o755);
 
-        cp_debugger_script("lldb_rust_formatters.py");
+            cp_debugger_script("natvis/liballoc.natvis");
+            cp_debugger_script("natvis/libcore.natvis");
+        } else {
+            cp_debugger_script("debugger_pretty_printers_common.py");
+
+            // gdb debugger scripts
+            install(&build.src.join("src/etc/rust-gdb"), &sysroot.join("bin"),
+                    0o755);
+
+            cp_debugger_script("gdb_load_rust_pretty_printers.py");
+            cp_debugger_script("gdb_rust_pretty_printing.py");
+
+            // lldb debugger scripts
+            install(&build.src.join("src/etc/rust-lldb"), &sysroot.join("bin"),
+                    0o755);
+
+            cp_debugger_script("lldb_rust_formatters.py");
+        }
     }
 }
 
-/// Creates the `rust-std` installer component as compiled by `compiler` for the
-/// target `target`.
-pub fn std(build: &Build, compiler: &Compiler, target: &str) {
-    println!("Dist std stage{} ({} -> {})", compiler.stage, compiler.host,
-             target);
-
-    // The only true set of target libraries came from the build triple, so
-    // let's reduce redundant work by only producing archives from that host.
-    if compiler.host != build.build {
-        println!("\tskipping, not a build host");
-        return
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct Std {
+    pub compiler: Compiler,
+    pub target: Interned<String>,
+}
+
+impl Step for Std {
+    type Output = Option<PathBuf>;
+    const DEFAULT: bool = true;
+    const ONLY_BUILD_TARGETS: bool = true;
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.path("src/libstd")
     }
 
-    let name = pkgname(build, "rust-std");
-    let image = tmpdir(build).join(format!("{}-{}-image", name, target));
-    let _ = fs::remove_dir_all(&image);
-
-    let dst = image.join("lib/rustlib").join(target);
-    t!(fs::create_dir_all(&dst));
-    let mut src = build.sysroot_libdir(compiler, target);
-    src.pop(); // Remove the trailing /lib folder from the sysroot_libdir
-    cp_r(&src, &dst);
-
-    let mut cmd = rust_installer(build);
-    cmd.arg("generate")
-       .arg("--product-name=Rust")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=std-is-standing-at-the-ready.")
-       .arg("--image-dir").arg(&image)
-       .arg("--work-dir").arg(&tmpdir(build))
-       .arg("--output-dir").arg(&distdir(build))
-       .arg(format!("--package-name={}-{}", name, target))
-       .arg(format!("--component-name=rust-std-{}", target))
-       .arg("--legacy-manifest-dirs=rustlib,cargo");
-    build.run(&mut cmd);
-    t!(fs::remove_dir_all(&image));
-}
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(Std {
+            compiler: run.builder.compiler(run.builder.top_stage, run.host),
+            target: run.target,
+        });
+    }
+
+    fn run(self, builder: &Builder) -> Option<PathBuf> {
+        let build = builder.build;
+        let compiler = self.compiler;
+        let target = self.target;
 
-/// The path to the complete rustc-src tarball
-pub fn rust_src_location(build: &Build) -> PathBuf {
-    let plain_name = format!("rustc-{}-src", build.rust_package_vers());
-    distdir(build).join(&format!("{}.tar.gz", plain_name))
+        println!("Dist std stage{} ({} -> {})", compiler.stage, &compiler.host,
+                 target);
+
+        // The only true set of target libraries came from the build triple, so
+        // let's reduce redundant work by only producing archives from that host.
+        if compiler.host != build.build {
+            println!("\tskipping, not a build host");
+            return None;
+        }
+
+        // We want to package up as many target libraries as possible
+        // for the `rust-std` package, so if this is a host target we
+        // depend on librustc and otherwise we just depend on libtest.
+        if build.config.host.iter().any(|t| t == target) {
+            builder.ensure(compile::Rustc { compiler, target });
+        } else {
+            builder.ensure(compile::Test { compiler, target });
+        }
+
+        let name = pkgname(build, "rust-std");
+        let image = tmpdir(build).join(format!("{}-{}-image", name, target));
+        let _ = fs::remove_dir_all(&image);
+
+        let dst = image.join("lib/rustlib").join(target);
+        t!(fs::create_dir_all(&dst));
+        let mut src = builder.sysroot_libdir(compiler, target).to_path_buf();
+        src.pop(); // Remove the trailing /lib folder from the sysroot_libdir
+        cp_r(&src, &dst);
+
+        let mut cmd = rust_installer(builder);
+        cmd.arg("generate")
+           .arg("--product-name=Rust")
+           .arg("--rel-manifest-dir=rustlib")
+           .arg("--success-message=std-is-standing-at-the-ready.")
+           .arg("--image-dir").arg(&image)
+           .arg("--work-dir").arg(&tmpdir(build))
+           .arg("--output-dir").arg(&distdir(build))
+           .arg(format!("--package-name={}-{}", name, target))
+           .arg(format!("--component-name=rust-std-{}", target))
+           .arg("--legacy-manifest-dirs=rustlib,cargo");
+        build.run(&mut cmd);
+        t!(fs::remove_dir_all(&image));
+        Some(distdir(build).join(format!("{}-{}.tar.gz", name, target)))
+    }
 }
 
-/// The path to the rust-src component installer
-pub fn rust_src_installer(build: &Build) -> PathBuf {
-    let name = pkgname(build, "rust-src");
-    distdir(build).join(&format!("{}.tar.gz", name))
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct Analysis {
+    pub compiler: Compiler,
+    pub target: Interned<String>,
 }
 
-/// Creates a tarball of save-analysis metadata, if available.
-pub fn analysis(build: &Build, compiler: &Compiler, target: &str) {
-    assert!(build.config.extended);
-    println!("Dist analysis");
+impl Step for Analysis {
+    type Output = Option<PathBuf>;
+    const DEFAULT: bool = true;
+    const ONLY_BUILD_TARGETS: bool = true;
 
-    if compiler.host != build.build {
-        println!("\tskipping, not a build host");
-        return;
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        let builder = run.builder;
+        run.path("analysis").default_condition(builder.build.config.extended)
     }
 
-    // Package save-analysis from stage1 if not doing a full bootstrap, as the
-    // stage2 artifacts is simply copied from stage1 in that case.
-    let compiler = if build.force_use_stage1(compiler, target) {
-        Compiler::new(1, compiler.host)
-    } else {
-        compiler.clone()
-    };
-
-    let name = pkgname(build, "rust-analysis");
-    let image = tmpdir(build).join(format!("{}-{}-image", name, target));
-
-    let src = build.stage_out(&compiler, Mode::Libstd).join(target).join("release").join("deps");
-
-    let image_src = src.join("save-analysis");
-    let dst = image.join("lib/rustlib").join(target).join("analysis");
-    t!(fs::create_dir_all(&dst));
-    println!("image_src: {:?}, dst: {:?}", image_src, dst);
-    cp_r(&image_src, &dst);
-
-    let mut cmd = rust_installer(build);
-    cmd.arg("generate")
-       .arg("--product-name=Rust")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=save-analysis-saved.")
-       .arg("--image-dir").arg(&image)
-       .arg("--work-dir").arg(&tmpdir(build))
-       .arg("--output-dir").arg(&distdir(build))
-       .arg(format!("--package-name={}-{}", name, target))
-       .arg(format!("--component-name=rust-analysis-{}", target))
-       .arg("--legacy-manifest-dirs=rustlib,cargo");
-    build.run(&mut cmd);
-    t!(fs::remove_dir_all(&image));
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(Analysis {
+            compiler: run.builder.compiler(run.builder.top_stage, run.host),
+            target: run.target,
+        });
+    }
+
+    /// Creates a tarball of save-analysis metadata, if available.
+    fn run(self, builder: &Builder) -> Option<PathBuf> {
+        let build = builder.build;
+        let compiler = self.compiler;
+        let target = self.target;
+        assert!(build.config.extended);
+        println!("Dist analysis");
+
+        if &compiler.host != build.build {
+            println!("\tskipping, not a build host");
+            return None;
+        }
+
+        // Package save-analysis from stage1 if not doing a full bootstrap, as the
+        // stage2 artifacts is simply copied from stage1 in that case.
+        let compiler = if build.force_use_stage1(compiler, target) {
+            builder.compiler(1, compiler.host)
+        } else {
+            compiler.clone()
+        };
+
+        let name = pkgname(build, "rust-analysis");
+        let image = tmpdir(build).join(format!("{}-{}-image", name, target));
+
+        let src = build.stage_out(compiler, Mode::Libstd)
+            .join(target).join("release").join("deps");
+
+        let image_src = src.join("save-analysis");
+        let dst = image.join("lib/rustlib").join(target).join("analysis");
+        t!(fs::create_dir_all(&dst));
+        println!("image_src: {:?}, dst: {:?}", image_src, dst);
+        cp_r(&image_src, &dst);
+
+        let mut cmd = rust_installer(builder);
+        cmd.arg("generate")
+           .arg("--product-name=Rust")
+           .arg("--rel-manifest-dir=rustlib")
+           .arg("--success-message=save-analysis-saved.")
+           .arg("--image-dir").arg(&image)
+           .arg("--work-dir").arg(&tmpdir(build))
+           .arg("--output-dir").arg(&distdir(build))
+           .arg(format!("--package-name={}-{}", name, target))
+           .arg(format!("--component-name=rust-analysis-{}", target))
+           .arg("--legacy-manifest-dirs=rustlib,cargo");
+        build.run(&mut cmd);
+        t!(fs::remove_dir_all(&image));
+        Some(distdir(build).join(format!("{}-{}.tar.gz", name, target)))
+    }
 }
 
 fn copy_src_dirs(build: &Build, src_dirs: &[&str], exclude_dirs: &[&str], dst_dir: &Path) {
@@ -520,149 +698,196 @@ fn copy_src_dirs(build: &Build, src_dirs: &[&str], exclude_dirs: &[&str], dst_di
     }
 }
 
-/// Creates the `rust-src` installer component
-pub fn rust_src(build: &Build) {
-    println!("Dist src");
-
-    let name = pkgname(build, "rust-src");
-    let image = tmpdir(build).join(format!("{}-image", name));
-    let _ = fs::remove_dir_all(&image);
-
-    let dst = image.join("lib/rustlib/src");
-    let dst_src = dst.join("rust");
-    t!(fs::create_dir_all(&dst_src));
-
-    // This is the reduced set of paths which will become the rust-src component
-    // (essentially libstd and all of its path dependencies)
-    let std_src_dirs = [
-        "src/build_helper",
-        "src/liballoc",
-        "src/liballoc_jemalloc",
-        "src/liballoc_system",
-        "src/libbacktrace",
-        "src/libcollections",
-        "src/libcompiler_builtins",
-        "src/libcore",
-        "src/liblibc",
-        "src/libpanic_abort",
-        "src/libpanic_unwind",
-        "src/librand",
-        "src/librustc_asan",
-        "src/librustc_lsan",
-        "src/librustc_msan",
-        "src/librustc_tsan",
-        "src/libstd",
-        "src/libstd_unicode",
-        "src/libunwind",
-        "src/rustc/compiler_builtins_shim",
-        "src/rustc/libc_shim",
-        "src/libtest",
-        "src/libterm",
-        "src/jemalloc",
-        "src/libprofiler_builtins",
-    ];
-    let std_src_dirs_exclude = [
-        "src/compiler-rt/test",
-        "src/jemalloc/test/unit",
-    ];
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct Src;
+
+impl Step for Src {
+    /// The output path of the src installer tarball
+    type Output = PathBuf;
+    const DEFAULT: bool = true;
+    const ONLY_HOSTS: bool = true;
+    const ONLY_BUILD_TARGETS: bool = true;
+    const ONLY_BUILD: bool = true;
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.path("src")
+    }
+
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(Src);
+    }
+
+    /// Creates the `rust-src` installer component
+    fn run(self, builder: &Builder) -> PathBuf {
+        let build = builder.build;
+        println!("Dist src");
+
+        let name = pkgname(build, "rust-src");
+        let image = tmpdir(build).join(format!("{}-image", name));
+        let _ = fs::remove_dir_all(&image);
+
+        let dst = image.join("lib/rustlib/src");
+        let dst_src = dst.join("rust");
+        t!(fs::create_dir_all(&dst_src));
+
+        // This is the reduced set of paths which will become the rust-src component
+        // (essentially libstd and all of its path dependencies)
+        let std_src_dirs = [
+            "src/build_helper",
+            "src/liballoc",
+            "src/liballoc_jemalloc",
+            "src/liballoc_system",
+            "src/libbacktrace",
+            "src/libcollections",
+            "src/libcompiler_builtins",
+            "src/libcore",
+            "src/liblibc",
+            "src/libpanic_abort",
+            "src/libpanic_unwind",
+            "src/librand",
+            "src/librustc_asan",
+            "src/librustc_lsan",
+            "src/librustc_msan",
+            "src/librustc_tsan",
+            "src/libstd",
+            "src/libstd_unicode",
+            "src/libunwind",
+            "src/rustc/compiler_builtins_shim",
+            "src/rustc/libc_shim",
+            "src/libtest",
+            "src/libterm",
+            "src/jemalloc",
+            "src/libprofiler_builtins",
+        ];
+        let std_src_dirs_exclude = [
+            "src/compiler-rt/test",
+            "src/jemalloc/test/unit",
+        ];
+
+        copy_src_dirs(build, &std_src_dirs[..], &std_src_dirs_exclude[..], &dst_src);
+
+        // Create source tarball in rust-installer format
+        let mut cmd = rust_installer(builder);
+        cmd.arg("generate")
+           .arg("--product-name=Rust")
+           .arg("--rel-manifest-dir=rustlib")
+           .arg("--success-message=Awesome-Source.")
+           .arg("--image-dir").arg(&image)
+           .arg("--work-dir").arg(&tmpdir(build))
+           .arg("--output-dir").arg(&distdir(build))
+           .arg(format!("--package-name={}", name))
+           .arg("--component-name=rust-src")
+           .arg("--legacy-manifest-dirs=rustlib,cargo");
+        build.run(&mut cmd);
 
-    copy_src_dirs(build, &std_src_dirs[..], &std_src_dirs_exclude[..], &dst_src);
-
-    // Create source tarball in rust-installer format
-    let mut cmd = rust_installer(build);
-    cmd.arg("generate")
-       .arg("--product-name=Rust")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=Awesome-Source.")
-       .arg("--image-dir").arg(&image)
-       .arg("--work-dir").arg(&tmpdir(build))
-       .arg("--output-dir").arg(&distdir(build))
-       .arg(format!("--package-name={}", name))
-       .arg("--component-name=rust-src")
-       .arg("--legacy-manifest-dirs=rustlib,cargo");
-    build.run(&mut cmd);
-
-    t!(fs::remove_dir_all(&image));
+        t!(fs::remove_dir_all(&image));
+        distdir(build).join(&format!("{}.tar.gz", name))
+    }
 }
 
 const CARGO_VENDOR_VERSION: &str = "0.1.4";
 
-/// Creates the plain source tarball
-pub fn plain_source_tarball(build: &Build) {
-    println!("Create plain source tarball");
-
-    // Make sure that the root folder of tarball has the correct name
-    let plain_name = format!("{}-src", pkgname(build, "rustc"));
-    let plain_dst_src = tmpdir(build).join(&plain_name);
-    let _ = fs::remove_dir_all(&plain_dst_src);
-    t!(fs::create_dir_all(&plain_dst_src));
-
-    // This is the set of root paths which will become part of the source package
-    let src_files = [
-        "COPYRIGHT",
-        "LICENSE-APACHE",
-        "LICENSE-MIT",
-        "CONTRIBUTING.md",
-        "README.md",
-        "RELEASES.md",
-        "configure",
-        "x.py",
-    ];
-    let src_dirs = [
-        "man",
-        "src",
-    ];
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct PlainSourceTarball;
 
-    copy_src_dirs(build, &src_dirs[..], &[], &plain_dst_src);
+impl Step for PlainSourceTarball {
+    /// Produces the location of the tarball generated
+    type Output = PathBuf;
+    const DEFAULT: bool = true;
+    const ONLY_HOSTS: bool = true;
+    const ONLY_BUILD_TARGETS: bool = true;
+    const ONLY_BUILD: bool = true;
 
-    // Copy the files normally
-    for item in &src_files {
-        copy(&build.src.join(item), &plain_dst_src.join(item));
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        let builder = run.builder;
+        run.path("src").default_condition(builder.config.rust_dist_src)
     }
 
-    // Create the version file
-    write_file(&plain_dst_src.join("version"), build.rust_version().as_bytes());
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(PlainSourceTarball);
+    }
+
+    /// Creates the plain source tarball
+    fn run(self, builder: &Builder) -> PathBuf {
+        let build = builder.build;
+        println!("Create plain source tarball");
+
+        // Make sure that the root folder of tarball has the correct name
+        let plain_name = format!("{}-src", pkgname(build, "rustc"));
+        let plain_dst_src = tmpdir(build).join(&plain_name);
+        let _ = fs::remove_dir_all(&plain_dst_src);
+        t!(fs::create_dir_all(&plain_dst_src));
+
+        // This is the set of root paths which will become part of the source package
+        let src_files = [
+            "COPYRIGHT",
+            "LICENSE-APACHE",
+            "LICENSE-MIT",
+            "CONTRIBUTING.md",
+            "README.md",
+            "RELEASES.md",
+            "configure",
+            "x.py",
+        ];
+        let src_dirs = [
+            "man",
+            "src",
+        ];
 
-    // If we're building from git sources, we need to vendor a complete distribution.
-    if build.rust_info.is_git() {
-        // Get cargo-vendor installed, if it isn't already.
-        let mut has_cargo_vendor = false;
-        let mut cmd = Command::new(&build.initial_cargo);
-        for line in output(cmd.arg("install").arg("--list")).lines() {
-            has_cargo_vendor |= line.starts_with("cargo-vendor ");
+        copy_src_dirs(build, &src_dirs[..], &[], &plain_dst_src);
+
+        // Copy the files normally
+        for item in &src_files {
+            copy(&build.src.join(item), &plain_dst_src.join(item));
         }
-        if !has_cargo_vendor {
+
+        // Create the version file
+        write_file(&plain_dst_src.join("version"), build.rust_version().as_bytes());
+
+        // If we're building from git sources, we need to vendor a complete distribution.
+        if build.rust_info.is_git() {
+            // Get cargo-vendor installed, if it isn't already.
+            let mut has_cargo_vendor = false;
             let mut cmd = Command::new(&build.initial_cargo);
-            cmd.arg("install")
-               .arg("--force")
-               .arg("--debug")
-               .arg("--vers").arg(CARGO_VENDOR_VERSION)
-               .arg("cargo-vendor")
-               .env("RUSTC", &build.initial_rustc);
+            for line in output(cmd.arg("install").arg("--list")).lines() {
+                has_cargo_vendor |= line.starts_with("cargo-vendor ");
+            }
+            if !has_cargo_vendor {
+                let mut cmd = Command::new(&build.initial_cargo);
+                cmd.arg("install")
+                   .arg("--force")
+                   .arg("--debug")
+                   .arg("--vers").arg(CARGO_VENDOR_VERSION)
+                   .arg("cargo-vendor")
+                   .env("RUSTC", &build.initial_rustc);
+                build.run(&mut cmd);
+            }
+
+            // Vendor all Cargo dependencies
+            let mut cmd = Command::new(&build.initial_cargo);
+            cmd.arg("vendor")
+               .current_dir(&plain_dst_src.join("src"));
             build.run(&mut cmd);
         }
 
-        // Vendor all Cargo dependencies
-        let mut cmd = Command::new(&build.initial_cargo);
-        cmd.arg("vendor")
-           .current_dir(&plain_dst_src.join("src"));
+        // Create plain source tarball
+        let plain_name = format!("rustc-{}-src", build.rust_package_vers());
+        let mut tarball = distdir(build).join(&format!("{}.tar.gz", plain_name));
+        tarball.set_extension(""); // strip .gz
+        tarball.set_extension(""); // strip .tar
+        if let Some(dir) = tarball.parent() {
+            t!(fs::create_dir_all(dir));
+        }
+        println!("running installer");
+        let mut cmd = rust_installer(builder);
+        cmd.arg("tarball")
+           .arg("--input").arg(&plain_name)
+           .arg("--output").arg(&tarball)
+           .arg("--work-dir=.")
+           .current_dir(tmpdir(build));
         build.run(&mut cmd);
+        distdir(build).join(&format!("{}.tar.gz", plain_name))
     }
-
-    // Create plain source tarball
-    let mut tarball = rust_src_location(build);
-    tarball.set_extension(""); // strip .gz
-    tarball.set_extension(""); // strip .tar
-    if let Some(dir) = tarball.parent() {
-        t!(fs::create_dir_all(dir));
-    }
-    let mut cmd = rust_installer(build);
-    cmd.arg("tarball")
-       .arg("--input").arg(&plain_name)
-       .arg("--output").arg(&tarball)
-       .arg("--work-dir=.")
-       .current_dir(tmpdir(build));
-    build.run(&mut cmd);
 }
 
 fn install(src: &Path, dstdir: &Path, perms: u32) {
@@ -704,471 +929,546 @@ fn write_file(path: &Path, data: &[u8]) {
     t!(vf.write_all(data));
 }
 
-pub fn cargo(build: &Build, stage: u32, target: &str) {
-    println!("Dist cargo stage{} ({})", stage, target);
-    let compiler = Compiler::new(stage, &build.build);
-
-    let src = build.src.join("src/tools/cargo");
-    let etc = src.join("src/etc");
-    let release_num = build.release_num("cargo");
-    let name = pkgname(build, "cargo");
-    let version = build.cargo_info.version(build, &release_num);
-
-    let tmp = tmpdir(build);
-    let image = tmp.join("cargo-image");
-    drop(fs::remove_dir_all(&image));
-    t!(fs::create_dir_all(&image));
-
-    // Prepare the image directory
-    t!(fs::create_dir_all(image.join("share/zsh/site-functions")));
-    t!(fs::create_dir_all(image.join("etc/bash_completion.d")));
-    let cargo = build.cargo_out(&compiler, Mode::Tool, target)
-                     .join(exe("cargo", target));
-    install(&cargo, &image.join("bin"), 0o755);
-    for man in t!(etc.join("man").read_dir()) {
-        let man = t!(man);
-        install(&man.path(), &image.join("share/man/man1"), 0o644);
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct Cargo {
+    pub stage: u32,
+    pub target: Interned<String>,
+}
+
+impl Step for Cargo {
+    type Output = PathBuf;
+    const ONLY_BUILD_TARGETS: bool = true;
+    const ONLY_HOSTS: bool = true;
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.path("cargo")
+    }
+
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(Cargo {
+            stage: run.builder.top_stage,
+            target: run.target,
+        });
+    }
+
+    fn run(self, builder: &Builder) -> PathBuf {
+        let build = builder.build;
+        let stage = self.stage;
+        let target = self.target;
+
+        builder.ensure(tool::Cargo { stage, target });
+
+        println!("Dist cargo stage{} ({})", stage, target);
+        let compiler = builder.compiler(stage, build.build);
+
+        let src = build.src.join("src/tools/cargo");
+        let etc = src.join("src/etc");
+        let release_num = build.release_num("cargo");
+        let name = pkgname(build, "cargo");
+        let version = builder.cargo_info.version(build, &release_num);
+
+        let tmp = tmpdir(build);
+        let image = tmp.join("cargo-image");
+        drop(fs::remove_dir_all(&image));
+        t!(fs::create_dir_all(&image));
+
+        // Prepare the image directory
+        t!(fs::create_dir_all(image.join("share/zsh/site-functions")));
+        t!(fs::create_dir_all(image.join("etc/bash_completion.d")));
+        let cargo = build.cargo_out(compiler, Mode::Tool, target)
+                         .join(exe("cargo", &target));
+        install(&cargo, &image.join("bin"), 0o755);
+        for man in t!(etc.join("man").read_dir()) {
+            let man = t!(man);
+            install(&man.path(), &image.join("share/man/man1"), 0o644);
+        }
+        install(&etc.join("_cargo"), &image.join("share/zsh/site-functions"), 0o644);
+        copy(&etc.join("cargo.bashcomp.sh"),
+             &image.join("etc/bash_completion.d/cargo"));
+        let doc = image.join("share/doc/cargo");
+        install(&src.join("README.md"), &doc, 0o644);
+        install(&src.join("LICENSE-MIT"), &doc, 0o644);
+        install(&src.join("LICENSE-APACHE"), &doc, 0o644);
+        install(&src.join("LICENSE-THIRD-PARTY"), &doc, 0o644);
+
+        // Prepare the overlay
+        let overlay = tmp.join("cargo-overlay");
+        drop(fs::remove_dir_all(&overlay));
+        t!(fs::create_dir_all(&overlay));
+        install(&src.join("README.md"), &overlay, 0o644);
+        install(&src.join("LICENSE-MIT"), &overlay, 0o644);
+        install(&src.join("LICENSE-APACHE"), &overlay, 0o644);
+        install(&src.join("LICENSE-THIRD-PARTY"), &overlay, 0o644);
+        t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
+
+        // Generate the installer tarball
+        let mut cmd = rust_installer(builder);
+        cmd.arg("generate")
+           .arg("--product-name=Rust")
+           .arg("--rel-manifest-dir=rustlib")
+           .arg("--success-message=Rust-is-ready-to-roll.")
+           .arg("--image-dir").arg(&image)
+           .arg("--work-dir").arg(&tmpdir(build))
+           .arg("--output-dir").arg(&distdir(build))
+           .arg("--non-installed-overlay").arg(&overlay)
+           .arg(format!("--package-name={}-{}", name, target))
+           .arg("--component-name=cargo")
+           .arg("--legacy-manifest-dirs=rustlib,cargo");
+        build.run(&mut cmd);
+        distdir(build).join(format!("{}-{}.tar.gz", name, target))
     }
-    install(&etc.join("_cargo"), &image.join("share/zsh/site-functions"), 0o644);
-    copy(&etc.join("cargo.bashcomp.sh"),
-         &image.join("etc/bash_completion.d/cargo"));
-    let doc = image.join("share/doc/cargo");
-    install(&src.join("README.md"), &doc, 0o644);
-    install(&src.join("LICENSE-MIT"), &doc, 0o644);
-    install(&src.join("LICENSE-APACHE"), &doc, 0o644);
-    install(&src.join("LICENSE-THIRD-PARTY"), &doc, 0o644);
-
-    // Prepare the overlay
-    let overlay = tmp.join("cargo-overlay");
-    drop(fs::remove_dir_all(&overlay));
-    t!(fs::create_dir_all(&overlay));
-    install(&src.join("README.md"), &overlay, 0o644);
-    install(&src.join("LICENSE-MIT"), &overlay, 0o644);
-    install(&src.join("LICENSE-APACHE"), &overlay, 0o644);
-    install(&src.join("LICENSE-THIRD-PARTY"), &overlay, 0o644);
-    t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
-
-    // Generate the installer tarball
-    let mut cmd = rust_installer(build);
-    cmd.arg("generate")
-       .arg("--product-name=Rust")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=Rust-is-ready-to-roll.")
-       .arg("--image-dir").arg(&image)
-       .arg("--work-dir").arg(&tmpdir(build))
-       .arg("--output-dir").arg(&distdir(build))
-       .arg("--non-installed-overlay").arg(&overlay)
-       .arg(format!("--package-name={}-{}", name, target))
-       .arg("--component-name=cargo")
-       .arg("--legacy-manifest-dirs=rustlib,cargo");
-    build.run(&mut cmd);
 }
 
-pub fn rls(build: &Build, stage: u32, target: &str) {
-    assert!(build.config.extended);
-    println!("Dist RLS stage{} ({})", stage, target);
-    let compiler = Compiler::new(stage, &build.build);
-
-    let src = build.src.join("src/tools/rls");
-    let release_num = build.release_num("rls");
-    let name = pkgname(build, "rls");
-    let version = build.rls_info.version(build, &release_num);
-
-    let tmp = tmpdir(build);
-    let image = tmp.join("rls-image");
-    drop(fs::remove_dir_all(&image));
-    t!(fs::create_dir_all(&image));
-
-    // Prepare the image directory
-    let rls = build.cargo_out(&compiler, Mode::Tool, target)
-                     .join(exe("rls", target));
-    install(&rls, &image.join("bin"), 0o755);
-    let doc = image.join("share/doc/rls");
-    install(&src.join("README.md"), &doc, 0o644);
-    install(&src.join("LICENSE-MIT"), &doc, 0o644);
-    install(&src.join("LICENSE-APACHE"), &doc, 0o644);
-
-    // Prepare the overlay
-    let overlay = tmp.join("rls-overlay");
-    drop(fs::remove_dir_all(&overlay));
-    t!(fs::create_dir_all(&overlay));
-    install(&src.join("README.md"), &overlay, 0o644);
-    install(&src.join("LICENSE-MIT"), &overlay, 0o644);
-    install(&src.join("LICENSE-APACHE"), &overlay, 0o644);
-    t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
-
-    // Generate the installer tarball
-    let mut cmd = rust_installer(build);
-    cmd.arg("generate")
-       .arg("--product-name=Rust")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=RLS-ready-to-serve.")
-       .arg("--image-dir").arg(&image)
-       .arg("--work-dir").arg(&tmpdir(build))
-       .arg("--output-dir").arg(&distdir(build))
-       .arg("--non-installed-overlay").arg(&overlay)
-       .arg(format!("--package-name={}-{}", name, target))
-       .arg("--component-name=rls")
-       .arg("--legacy-manifest-dirs=rustlib,cargo");
-    build.run(&mut cmd);
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct Rls {
+    pub stage: u32,
+    pub target: Interned<String>,
 }
 
-/// Creates a combined installer for the specified target in the provided stage.
-pub fn extended(build: &Build, stage: u32, target: &str) {
-    println!("Dist extended stage{} ({})", stage, target);
-
-    let dist = distdir(build);
-    let rustc_installer = dist.join(format!("{}-{}.tar.gz",
-                                            pkgname(build, "rustc"),
-                                            target));
-    let cargo_installer = dist.join(format!("{}-{}.tar.gz",
-                                            pkgname(build, "cargo"),
-                                            target));
-    let rls_installer = dist.join(format!("{}-{}.tar.gz",
-                                          pkgname(build, "rls"),
-                                          target));
-    let analysis_installer = dist.join(format!("{}-{}.tar.gz",
-                                               pkgname(build, "rust-analysis"),
-                                               target));
-    let docs_installer = dist.join(format!("{}-{}.tar.gz",
-                                           pkgname(build, "rust-docs"),
-                                           target));
-    let mingw_installer = dist.join(format!("{}-{}.tar.gz",
-                                            pkgname(build, "rust-mingw"),
-                                            target));
-    let std_installer = dist.join(format!("{}-{}.tar.gz",
-                                          pkgname(build, "rust-std"),
-                                          target));
-
-    let tmp = tmpdir(build);
-    let overlay = tmp.join("extended-overlay");
-    let etc = build.src.join("src/etc/installer");
-    let work = tmp.join("work");
-
-    let _ = fs::remove_dir_all(&overlay);
-    install(&build.src.join("COPYRIGHT"), &overlay, 0o644);
-    install(&build.src.join("LICENSE-APACHE"), &overlay, 0o644);
-    install(&build.src.join("LICENSE-MIT"), &overlay, 0o644);
-    let version = build.rust_version();
-    t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
-    install(&etc.join("README.md"), &overlay, 0o644);
-
-    // When rust-std package split from rustc, we needed to ensure that during
-    // upgrades rustc was upgraded before rust-std. To avoid rustc clobbering
-    // the std files during uninstall. To do this ensure that rustc comes
-    // before rust-std in the list below.
-    let mut tarballs = vec![rustc_installer, cargo_installer, rls_installer,
-                            analysis_installer, docs_installer, std_installer];
-    if target.contains("pc-windows-gnu") {
-        tarballs.push(mingw_installer);
-    }
-    let mut input_tarballs = tarballs[0].as_os_str().to_owned();
-    for tarball in &tarballs[1..] {
-        input_tarballs.push(",");
-        input_tarballs.push(tarball);
+impl Step for Rls {
+    type Output = PathBuf;
+    const ONLY_BUILD_TARGETS: bool = true;
+    const ONLY_HOSTS: bool = true;
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.path("rls")
     }
 
-    let mut cmd = rust_installer(build);
-    cmd.arg("combine")
-       .arg("--product-name=Rust")
-       .arg("--rel-manifest-dir=rustlib")
-       .arg("--success-message=Rust-is-ready-to-roll.")
-       .arg("--work-dir").arg(&work)
-       .arg("--output-dir").arg(&distdir(build))
-       .arg(format!("--package-name={}-{}", pkgname(build, "rust"), target))
-       .arg("--legacy-manifest-dirs=rustlib,cargo")
-       .arg("--input-tarballs").arg(input_tarballs)
-       .arg("--non-installed-overlay").arg(&overlay);
-    build.run(&mut cmd);
-
-    let mut license = String::new();
-    t!(t!(File::open(build.src.join("COPYRIGHT"))).read_to_string(&mut license));
-    license.push_str("\n");
-    t!(t!(File::open(build.src.join("LICENSE-APACHE"))).read_to_string(&mut license));
-    license.push_str("\n");
-    t!(t!(File::open(build.src.join("LICENSE-MIT"))).read_to_string(&mut license));
-
-    let rtf = r"{\rtf1\ansi\deff0{\fonttbl{\f0\fnil\fcharset0 Arial;}}\nowwrap\fs18";
-    let mut rtf = rtf.to_string();
-    rtf.push_str("\n");
-    for line in license.lines() {
-        rtf.push_str(line);
-        rtf.push_str("\\line ");
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(Rls {
+            stage: run.builder.top_stage,
+            target: run.target,
+        });
     }
-    rtf.push_str("}");
-
-    if target.contains("apple-darwin") {
-        let pkg = tmp.join("pkg");
-        let _ = fs::remove_dir_all(&pkg);
-        t!(fs::create_dir_all(pkg.join("rustc")));
-        t!(fs::create_dir_all(pkg.join("cargo")));
-        t!(fs::create_dir_all(pkg.join("rust-docs")));
-        t!(fs::create_dir_all(pkg.join("rust-std")));
-        t!(fs::create_dir_all(pkg.join("rls")));
-        t!(fs::create_dir_all(pkg.join("rust-analysis")));
-
-        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rustc"), target)),
-             &pkg.join("rustc"));
-        cp_r(&work.join(&format!("{}-{}", pkgname(build, "cargo"), target)),
-             &pkg.join("cargo"));
-        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-docs"), target)),
-             &pkg.join("rust-docs"));
-        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-std"), target)),
-             &pkg.join("rust-std"));
-        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rls"), target)),
-             &pkg.join("rls"));
-        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-analysis"), target)),
-             &pkg.join("rust-analysis"));
-
-        install(&etc.join("pkg/postinstall"), &pkg.join("rustc"), 0o755);
-        install(&etc.join("pkg/postinstall"), &pkg.join("cargo"), 0o755);
-        install(&etc.join("pkg/postinstall"), &pkg.join("rust-docs"), 0o755);
-        install(&etc.join("pkg/postinstall"), &pkg.join("rust-std"), 0o755);
-        install(&etc.join("pkg/postinstall"), &pkg.join("rls"), 0o755);
-        install(&etc.join("pkg/postinstall"), &pkg.join("rust-analysis"), 0o755);
-
-        let pkgbuild = |component: &str| {
-            let mut cmd = Command::new("pkgbuild");
-            cmd.arg("--identifier").arg(format!("org.rust-lang.{}", component))
-               .arg("--scripts").arg(pkg.join(component))
-               .arg("--nopayload")
-               .arg(pkg.join(component).with_extension("pkg"));
-            build.run(&mut cmd);
-        };
-        pkgbuild("rustc");
-        pkgbuild("cargo");
-        pkgbuild("rust-docs");
-        pkgbuild("rust-std");
-        pkgbuild("rls");
-        pkgbuild("rust-analysis");
-
-        // create an 'uninstall' package
-        install(&etc.join("pkg/postinstall"), &pkg.join("uninstall"), 0o755);
-        pkgbuild("uninstall");
-
-        t!(fs::create_dir_all(pkg.join("res")));
-        t!(t!(File::create(pkg.join("res/LICENSE.txt"))).write_all(license.as_bytes()));
-        install(&etc.join("gfx/rust-logo.png"), &pkg.join("res"), 0o644);
-        let mut cmd = Command::new("productbuild");
-        cmd.arg("--distribution").arg(etc.join("pkg/Distribution.xml"))
-           .arg("--resources").arg(pkg.join("res"))
-           .arg(distdir(build).join(format!("{}-{}.pkg",
-                                             pkgname(build, "rust"),
-                                             target)))
-           .arg("--package-path").arg(&pkg);
+
+    fn run(self, builder: &Builder) -> PathBuf {
+        let build = builder.build;
+        let stage = self.stage;
+        let target = self.target;
+        assert!(build.config.extended);
+
+        builder.ensure(tool::Rls { stage, target });
+
+        println!("Dist RLS stage{} ({})", stage, target);
+        let compiler = builder.compiler(stage, build.build);
+
+        let src = build.src.join("src/tools/rls");
+        let release_num = build.release_num("rls");
+        let name = pkgname(build, "rls");
+        let version = build.rls_info.version(build, &release_num);
+
+        let tmp = tmpdir(build);
+        let image = tmp.join("rls-image");
+        drop(fs::remove_dir_all(&image));
+        t!(fs::create_dir_all(&image));
+
+        // Prepare the image directory
+        let rls = build.cargo_out(compiler, Mode::Tool, target)
+                         .join(exe("rls", &target));
+        install(&rls, &image.join("bin"), 0o755);
+        let doc = image.join("share/doc/rls");
+        install(&src.join("README.md"), &doc, 0o644);
+        install(&src.join("LICENSE-MIT"), &doc, 0o644);
+        install(&src.join("LICENSE-APACHE"), &doc, 0o644);
+
+        // Prepare the overlay
+        let overlay = tmp.join("rls-overlay");
+        drop(fs::remove_dir_all(&overlay));
+        t!(fs::create_dir_all(&overlay));
+        install(&src.join("README.md"), &overlay, 0o644);
+        install(&src.join("LICENSE-MIT"), &overlay, 0o644);
+        install(&src.join("LICENSE-APACHE"), &overlay, 0o644);
+        t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
+
+        // Generate the installer tarball
+        let mut cmd = rust_installer(builder);
+        cmd.arg("generate")
+           .arg("--product-name=Rust")
+           .arg("--rel-manifest-dir=rustlib")
+           .arg("--success-message=RLS-ready-to-serve.")
+           .arg("--image-dir").arg(&image)
+           .arg("--work-dir").arg(&tmpdir(build))
+           .arg("--output-dir").arg(&distdir(build))
+           .arg("--non-installed-overlay").arg(&overlay)
+           .arg(format!("--package-name={}-{}", name, target))
+           .arg("--component-name=rls")
+           .arg("--legacy-manifest-dirs=rustlib,cargo");
         build.run(&mut cmd);
+        distdir(build).join(format!("{}-{}.tar.gz", name, target))
     }
+}
 
-    if target.contains("windows") {
-        let exe = tmp.join("exe");
-        let _ = fs::remove_dir_all(&exe);
-        t!(fs::create_dir_all(exe.join("rustc")));
-        t!(fs::create_dir_all(exe.join("cargo")));
-        t!(fs::create_dir_all(exe.join("rls")));
-        t!(fs::create_dir_all(exe.join("rust-analysis")));
-        t!(fs::create_dir_all(exe.join("rust-docs")));
-        t!(fs::create_dir_all(exe.join("rust-std")));
-        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rustc"), target))
-                  .join("rustc"),
-             &exe.join("rustc"));
-        cp_r(&work.join(&format!("{}-{}", pkgname(build, "cargo"), target))
-                  .join("cargo"),
-             &exe.join("cargo"));
-        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-docs"), target))
-                  .join("rust-docs"),
-             &exe.join("rust-docs"));
-        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-std"), target))
-                  .join(format!("rust-std-{}", target)),
-             &exe.join("rust-std"));
-        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rls"), target))
-                  .join("rls"),
-             &exe.join("rls"));
-        cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-analysis"), target))
-                  .join(format!("rust-analysis-{}", target)),
-             &exe.join("rust-analysis"));
-
-        t!(fs::remove_file(exe.join("rustc/manifest.in")));
-        t!(fs::remove_file(exe.join("cargo/manifest.in")));
-        t!(fs::remove_file(exe.join("rust-docs/manifest.in")));
-        t!(fs::remove_file(exe.join("rust-std/manifest.in")));
-        t!(fs::remove_file(exe.join("rls/manifest.in")));
-        t!(fs::remove_file(exe.join("rust-analysis/manifest.in")));
-
-        if target.contains("windows-gnu") {
-            t!(fs::create_dir_all(exe.join("rust-mingw")));
-            cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-mingw"), target))
-                      .join("rust-mingw"),
-                 &exe.join("rust-mingw"));
-            t!(fs::remove_file(exe.join("rust-mingw/manifest.in")));
-        }
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct Extended {
+    stage: u32,
+    target: Interned<String>,
+}
+
+impl Step for Extended {
+    type Output = ();
+    const DEFAULT: bool = true;
+    const ONLY_BUILD_TARGETS: bool = true;
+    const ONLY_HOSTS: bool = true;
 
-        install(&etc.join("exe/rust.iss"), &exe, 0o644);
-        install(&etc.join("exe/modpath.iss"), &exe, 0o644);
-        install(&etc.join("exe/upgrade.iss"), &exe, 0o644);
-        install(&etc.join("gfx/rust-logo.ico"), &exe, 0o644);
-        t!(t!(File::create(exe.join("LICENSE.txt"))).write_all(license.as_bytes()));
-
-        // Generate exe installer
-        let mut cmd = Command::new("iscc");
-        cmd.arg("rust.iss")
-           .current_dir(&exe);
-        if target.contains("windows-gnu") {
-            cmd.arg("/dMINGW");
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        let builder = run.builder;
+        run.path("cargo").default_condition(builder.config.extended)
+    }
+
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(Extended {
+            stage: run.builder.top_stage,
+            target: run.target,
+        });
+    }
+
+    /// Creates a combined installer for the specified target in the provided stage.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let stage = self.stage;
+        let target = self.target;
+        let compiler = builder.compiler(stage, build.build);
+
+        println!("Dist extended stage{} ({})", stage, target);
+
+        let rustc_installer = builder.ensure(Rustc { stage, target });
+        let cargo_installer = builder.ensure(Cargo { stage, target });
+        let rls_installer = builder.ensure(Rls { stage, target });
+        let analysis_installer = builder.ensure(Analysis { compiler, target }).unwrap();
+        let docs_installer = builder.ensure(Docs { stage, target }).unwrap();
+        let mingw_installer = builder.ensure(Mingw { target });
+        let std_installer = builder.ensure(Std { compiler, target }).unwrap();
+
+        let tmp = tmpdir(build);
+        let overlay = tmp.join("extended-overlay");
+        let etc = build.src.join("src/etc/installer");
+        let work = tmp.join("work");
+
+        let _ = fs::remove_dir_all(&overlay);
+        install(&build.src.join("COPYRIGHT"), &overlay, 0o644);
+        install(&build.src.join("LICENSE-APACHE"), &overlay, 0o644);
+        install(&build.src.join("LICENSE-MIT"), &overlay, 0o644);
+        let version = build.rust_version();
+        t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
+        install(&etc.join("README.md"), &overlay, 0o644);
+
+        // When rust-std package split from rustc, we needed to ensure that during
+        // upgrades rustc was upgraded before rust-std. To avoid rustc clobbering
+        // the std files during uninstall. To do this ensure that rustc comes
+        // before rust-std in the list below.
+        let mut tarballs = vec![rustc_installer, cargo_installer, rls_installer,
+                                analysis_installer, docs_installer, std_installer];
+        if target.contains("pc-windows-gnu") {
+            tarballs.push(mingw_installer.unwrap());
+        }
+        let mut input_tarballs = tarballs[0].as_os_str().to_owned();
+        for tarball in &tarballs[1..] {
+            input_tarballs.push(",");
+            input_tarballs.push(tarball);
         }
-        add_env(build, &mut cmd, target);
+
+        let mut cmd = rust_installer(builder);
+        cmd.arg("combine")
+            .arg("--product-name=Rust")
+            .arg("--rel-manifest-dir=rustlib")
+            .arg("--success-message=Rust-is-ready-to-roll.")
+            .arg("--work-dir").arg(&work)
+            .arg("--output-dir").arg(&distdir(build))
+            .arg(format!("--package-name={}-{}", pkgname(build, "rust"), target))
+            .arg("--legacy-manifest-dirs=rustlib,cargo")
+            .arg("--input-tarballs").arg(input_tarballs)
+            .arg("--non-installed-overlay").arg(&overlay);
         build.run(&mut cmd);
-        install(&exe.join(format!("{}-{}.exe", pkgname(build, "rust"), target)),
-                &distdir(build),
-                0o755);
 
-        // Generate msi installer
-        let wix = PathBuf::from(env::var_os("WIX").unwrap());
-        let heat = wix.join("bin/heat.exe");
-        let candle = wix.join("bin/candle.exe");
-        let light = wix.join("bin/light.exe");
-
-        let heat_flags = ["-nologo", "-gg", "-sfrag", "-srd", "-sreg"];
-        build.run(Command::new(&heat)
-                        .current_dir(&exe)
-                        .arg("dir")
-                        .arg("rustc")
-                        .args(&heat_flags)
-                        .arg("-cg").arg("RustcGroup")
-                        .arg("-dr").arg("Rustc")
-                        .arg("-var").arg("var.RustcDir")
-                        .arg("-out").arg(exe.join("RustcGroup.wxs")));
-        build.run(Command::new(&heat)
-                        .current_dir(&exe)
-                        .arg("dir")
-                        .arg("rust-docs")
-                        .args(&heat_flags)
-                        .arg("-cg").arg("DocsGroup")
-                        .arg("-dr").arg("Docs")
-                        .arg("-var").arg("var.DocsDir")
-                        .arg("-out").arg(exe.join("DocsGroup.wxs"))
-                        .arg("-t").arg(etc.join("msi/squash-components.xsl")));
-        build.run(Command::new(&heat)
-                        .current_dir(&exe)
-                        .arg("dir")
-                        .arg("cargo")
-                        .args(&heat_flags)
-                        .arg("-cg").arg("CargoGroup")
-                        .arg("-dr").arg("Cargo")
-                        .arg("-var").arg("var.CargoDir")
-                        .arg("-out").arg(exe.join("CargoGroup.wxs"))
-                        .arg("-t").arg(etc.join("msi/remove-duplicates.xsl")));
-        build.run(Command::new(&heat)
-                        .current_dir(&exe)
-                        .arg("dir")
-                        .arg("rust-std")
-                        .args(&heat_flags)
-                        .arg("-cg").arg("StdGroup")
-                        .arg("-dr").arg("Std")
-                        .arg("-var").arg("var.StdDir")
-                        .arg("-out").arg(exe.join("StdGroup.wxs")));
-        build.run(Command::new(&heat)
-                        .current_dir(&exe)
-                        .arg("dir")
-                        .arg("rls")
-                        .args(&heat_flags)
-                        .arg("-cg").arg("RlsGroup")
-                        .arg("-dr").arg("Rls")
-                        .arg("-var").arg("var.RlsDir")
-                        .arg("-out").arg(exe.join("RlsGroup.wxs"))
-                        .arg("-t").arg(etc.join("msi/remove-duplicates.xsl")));
-        build.run(Command::new(&heat)
-                        .current_dir(&exe)
-                        .arg("dir")
-                        .arg("rust-analysis")
-                        .args(&heat_flags)
-                        .arg("-cg").arg("AnalysisGroup")
-                        .arg("-dr").arg("Analysis")
-                        .arg("-var").arg("var.AnalysisDir")
-                        .arg("-out").arg(exe.join("AnalysisGroup.wxs"))
-                        .arg("-t").arg(etc.join("msi/remove-duplicates.xsl")));
-        if target.contains("windows-gnu") {
+        let mut license = String::new();
+        t!(t!(File::open(build.src.join("COPYRIGHT"))).read_to_string(&mut license));
+        license.push_str("\n");
+        t!(t!(File::open(build.src.join("LICENSE-APACHE"))).read_to_string(&mut license));
+        license.push_str("\n");
+        t!(t!(File::open(build.src.join("LICENSE-MIT"))).read_to_string(&mut license));
+
+        let rtf = r"{\rtf1\ansi\deff0{\fonttbl{\f0\fnil\fcharset0 Arial;}}\nowwrap\fs18";
+        let mut rtf = rtf.to_string();
+        rtf.push_str("\n");
+        for line in license.lines() {
+            rtf.push_str(line);
+            rtf.push_str("\\line ");
+        }
+        rtf.push_str("}");
+
+        if target.contains("apple-darwin") {
+            let pkg = tmp.join("pkg");
+            let _ = fs::remove_dir_all(&pkg);
+            t!(fs::create_dir_all(pkg.join("rustc")));
+            t!(fs::create_dir_all(pkg.join("cargo")));
+            t!(fs::create_dir_all(pkg.join("rust-docs")));
+            t!(fs::create_dir_all(pkg.join("rust-std")));
+            t!(fs::create_dir_all(pkg.join("rls")));
+            t!(fs::create_dir_all(pkg.join("rust-analysis")));
+
+            cp_r(&work.join(&format!("{}-{}", pkgname(build, "rustc"), target)),
+                    &pkg.join("rustc"));
+            cp_r(&work.join(&format!("{}-{}", pkgname(build, "cargo"), target)),
+                    &pkg.join("cargo"));
+            cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-docs"), target)),
+                    &pkg.join("rust-docs"));
+            cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-std"), target)),
+                    &pkg.join("rust-std"));
+            cp_r(&work.join(&format!("{}-{}", pkgname(build, "rls"), target)),
+                    &pkg.join("rls"));
+            cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-analysis"), target)),
+                    &pkg.join("rust-analysis"));
+
+            install(&etc.join("pkg/postinstall"), &pkg.join("rustc"), 0o755);
+            install(&etc.join("pkg/postinstall"), &pkg.join("cargo"), 0o755);
+            install(&etc.join("pkg/postinstall"), &pkg.join("rust-docs"), 0o755);
+            install(&etc.join("pkg/postinstall"), &pkg.join("rust-std"), 0o755);
+            install(&etc.join("pkg/postinstall"), &pkg.join("rls"), 0o755);
+            install(&etc.join("pkg/postinstall"), &pkg.join("rust-analysis"), 0o755);
+
+            let pkgbuild = |component: &str| {
+                let mut cmd = Command::new("pkgbuild");
+                cmd.arg("--identifier").arg(format!("org.rust-lang.{}", component))
+                    .arg("--scripts").arg(pkg.join(component))
+                    .arg("--nopayload")
+                    .arg(pkg.join(component).with_extension("pkg"));
+                build.run(&mut cmd);
+            };
+            pkgbuild("rustc");
+            pkgbuild("cargo");
+            pkgbuild("rust-docs");
+            pkgbuild("rust-std");
+            pkgbuild("rls");
+            pkgbuild("rust-analysis");
+
+            // create an 'uninstall' package
+            install(&etc.join("pkg/postinstall"), &pkg.join("uninstall"), 0o755);
+            pkgbuild("uninstall");
+
+            t!(fs::create_dir_all(pkg.join("res")));
+            t!(t!(File::create(pkg.join("res/LICENSE.txt"))).write_all(license.as_bytes()));
+            install(&etc.join("gfx/rust-logo.png"), &pkg.join("res"), 0o644);
+            let mut cmd = Command::new("productbuild");
+            cmd.arg("--distribution").arg(etc.join("pkg/Distribution.xml"))
+                .arg("--resources").arg(pkg.join("res"))
+                .arg(distdir(build).join(format!("{}-{}.pkg",
+                                                    pkgname(build, "rust"),
+                                                    target)))
+                .arg("--package-path").arg(&pkg);
+            build.run(&mut cmd);
+        }
+
+        if target.contains("windows") {
+            let exe = tmp.join("exe");
+            let _ = fs::remove_dir_all(&exe);
+            t!(fs::create_dir_all(exe.join("rustc")));
+            t!(fs::create_dir_all(exe.join("cargo")));
+            t!(fs::create_dir_all(exe.join("rls")));
+            t!(fs::create_dir_all(exe.join("rust-analysis")));
+            t!(fs::create_dir_all(exe.join("rust-docs")));
+            t!(fs::create_dir_all(exe.join("rust-std")));
+            cp_r(&work.join(&format!("{}-{}", pkgname(build, "rustc"), target))
+                        .join("rustc"),
+                    &exe.join("rustc"));
+            cp_r(&work.join(&format!("{}-{}", pkgname(build, "cargo"), target))
+                        .join("cargo"),
+                    &exe.join("cargo"));
+            cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-docs"), target))
+                        .join("rust-docs"),
+                    &exe.join("rust-docs"));
+            cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-std"), target))
+                        .join(format!("rust-std-{}", target)),
+                    &exe.join("rust-std"));
+            cp_r(&work.join(&format!("{}-{}", pkgname(build, "rls"), target))
+                        .join("rls"),
+                    &exe.join("rls"));
+            cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-analysis"), target))
+                        .join(format!("rust-analysis-{}", target)),
+                    &exe.join("rust-analysis"));
+
+            t!(fs::remove_file(exe.join("rustc/manifest.in")));
+            t!(fs::remove_file(exe.join("cargo/manifest.in")));
+            t!(fs::remove_file(exe.join("rust-docs/manifest.in")));
+            t!(fs::remove_file(exe.join("rust-std/manifest.in")));
+            t!(fs::remove_file(exe.join("rls/manifest.in")));
+            t!(fs::remove_file(exe.join("rust-analysis/manifest.in")));
+
+            if target.contains("windows-gnu") {
+                t!(fs::create_dir_all(exe.join("rust-mingw")));
+                cp_r(&work.join(&format!("{}-{}", pkgname(build, "rust-mingw"), target))
+                            .join("rust-mingw"),
+                        &exe.join("rust-mingw"));
+                t!(fs::remove_file(exe.join("rust-mingw/manifest.in")));
+            }
+
+            install(&etc.join("exe/rust.iss"), &exe, 0o644);
+            install(&etc.join("exe/modpath.iss"), &exe, 0o644);
+            install(&etc.join("exe/upgrade.iss"), &exe, 0o644);
+            install(&etc.join("gfx/rust-logo.ico"), &exe, 0o644);
+            t!(t!(File::create(exe.join("LICENSE.txt"))).write_all(license.as_bytes()));
+
+            // Generate exe installer
+            let mut cmd = Command::new("iscc");
+            cmd.arg("rust.iss")
+                .current_dir(&exe);
+            if target.contains("windows-gnu") {
+                cmd.arg("/dMINGW");
+            }
+            add_env(build, &mut cmd, target);
+            build.run(&mut cmd);
+            install(&exe.join(format!("{}-{}.exe", pkgname(build, "rust"), target)),
+                    &distdir(build),
+                    0o755);
+
+            // Generate msi installer
+            let wix = PathBuf::from(env::var_os("WIX").unwrap());
+            let heat = wix.join("bin/heat.exe");
+            let candle = wix.join("bin/candle.exe");
+            let light = wix.join("bin/light.exe");
+
+            let heat_flags = ["-nologo", "-gg", "-sfrag", "-srd", "-sreg"];
             build.run(Command::new(&heat)
                             .current_dir(&exe)
                             .arg("dir")
-                            .arg("rust-mingw")
+                            .arg("rustc")
                             .args(&heat_flags)
-                            .arg("-cg").arg("GccGroup")
-                            .arg("-dr").arg("Gcc")
-                            .arg("-var").arg("var.GccDir")
-                            .arg("-out").arg(exe.join("GccGroup.wxs")));
-        }
+                            .arg("-cg").arg("RustcGroup")
+                            .arg("-dr").arg("Rustc")
+                            .arg("-var").arg("var.RustcDir")
+                            .arg("-out").arg(exe.join("RustcGroup.wxs")));
+            build.run(Command::new(&heat)
+                            .current_dir(&exe)
+                            .arg("dir")
+                            .arg("rust-docs")
+                            .args(&heat_flags)
+                            .arg("-cg").arg("DocsGroup")
+                            .arg("-dr").arg("Docs")
+                            .arg("-var").arg("var.DocsDir")
+                            .arg("-out").arg(exe.join("DocsGroup.wxs"))
+                            .arg("-t").arg(etc.join("msi/squash-components.xsl")));
+            build.run(Command::new(&heat)
+                            .current_dir(&exe)
+                            .arg("dir")
+                            .arg("cargo")
+                            .args(&heat_flags)
+                            .arg("-cg").arg("CargoGroup")
+                            .arg("-dr").arg("Cargo")
+                            .arg("-var").arg("var.CargoDir")
+                            .arg("-out").arg(exe.join("CargoGroup.wxs"))
+                            .arg("-t").arg(etc.join("msi/remove-duplicates.xsl")));
+            build.run(Command::new(&heat)
+                            .current_dir(&exe)
+                            .arg("dir")
+                            .arg("rust-std")
+                            .args(&heat_flags)
+                            .arg("-cg").arg("StdGroup")
+                            .arg("-dr").arg("Std")
+                            .arg("-var").arg("var.StdDir")
+                            .arg("-out").arg(exe.join("StdGroup.wxs")));
+            build.run(Command::new(&heat)
+                            .current_dir(&exe)
+                            .arg("dir")
+                            .arg("rls")
+                            .args(&heat_flags)
+                            .arg("-cg").arg("RlsGroup")
+                            .arg("-dr").arg("Rls")
+                            .arg("-var").arg("var.RlsDir")
+                            .arg("-out").arg(exe.join("RlsGroup.wxs"))
+                            .arg("-t").arg(etc.join("msi/remove-duplicates.xsl")));
+            build.run(Command::new(&heat)
+                            .current_dir(&exe)
+                            .arg("dir")
+                            .arg("rust-analysis")
+                            .args(&heat_flags)
+                            .arg("-cg").arg("AnalysisGroup")
+                            .arg("-dr").arg("Analysis")
+                            .arg("-var").arg("var.AnalysisDir")
+                            .arg("-out").arg(exe.join("AnalysisGroup.wxs"))
+                            .arg("-t").arg(etc.join("msi/remove-duplicates.xsl")));
+            if target.contains("windows-gnu") {
+                build.run(Command::new(&heat)
+                                .current_dir(&exe)
+                                .arg("dir")
+                                .arg("rust-mingw")
+                                .args(&heat_flags)
+                                .arg("-cg").arg("GccGroup")
+                                .arg("-dr").arg("Gcc")
+                                .arg("-var").arg("var.GccDir")
+                                .arg("-out").arg(exe.join("GccGroup.wxs")));
+            }
 
-        let candle = |input: &Path| {
-            let output = exe.join(input.file_stem().unwrap())
-                            .with_extension("wixobj");
-            let arch = if target.contains("x86_64") {"x64"} else {"x86"};
-            let mut cmd = Command::new(&candle);
-            cmd.current_dir(&exe)
-               .arg("-nologo")
-               .arg("-dRustcDir=rustc")
-               .arg("-dDocsDir=rust-docs")
-               .arg("-dCargoDir=cargo")
-               .arg("-dStdDir=rust-std")
-               .arg("-dRlsDir=rls")
-               .arg("-dAnalysisDir=rust-analysis")
-               .arg("-arch").arg(&arch)
-               .arg("-out").arg(&output)
-               .arg(&input);
-            add_env(build, &mut cmd, target);
+            let candle = |input: &Path| {
+                let output = exe.join(input.file_stem().unwrap())
+                                .with_extension("wixobj");
+                let arch = if target.contains("x86_64") {"x64"} else {"x86"};
+                let mut cmd = Command::new(&candle);
+                cmd.current_dir(&exe)
+                    .arg("-nologo")
+                    .arg("-dRustcDir=rustc")
+                    .arg("-dDocsDir=rust-docs")
+                    .arg("-dCargoDir=cargo")
+                    .arg("-dStdDir=rust-std")
+                    .arg("-dRlsDir=rls")
+                    .arg("-dAnalysisDir=rust-analysis")
+                    .arg("-arch").arg(&arch)
+                    .arg("-out").arg(&output)
+                    .arg(&input);
+                add_env(build, &mut cmd, target);
+
+                if target.contains("windows-gnu") {
+                    cmd.arg("-dGccDir=rust-mingw");
+                }
+                build.run(&mut cmd);
+            };
+            candle(&etc.join("msi/rust.wxs"));
+            candle(&etc.join("msi/ui.wxs"));
+            candle(&etc.join("msi/rustwelcomedlg.wxs"));
+            candle("RustcGroup.wxs".as_ref());
+            candle("DocsGroup.wxs".as_ref());
+            candle("CargoGroup.wxs".as_ref());
+            candle("StdGroup.wxs".as_ref());
+            candle("RlsGroup.wxs".as_ref());
+            candle("AnalysisGroup.wxs".as_ref());
 
             if target.contains("windows-gnu") {
-               cmd.arg("-dGccDir=rust-mingw");
+                candle("GccGroup.wxs".as_ref());
             }
-            build.run(&mut cmd);
-        };
-        candle(&etc.join("msi/rust.wxs"));
-        candle(&etc.join("msi/ui.wxs"));
-        candle(&etc.join("msi/rustwelcomedlg.wxs"));
-        candle("RustcGroup.wxs".as_ref());
-        candle("DocsGroup.wxs".as_ref());
-        candle("CargoGroup.wxs".as_ref());
-        candle("StdGroup.wxs".as_ref());
-        candle("RlsGroup.wxs".as_ref());
-        candle("AnalysisGroup.wxs".as_ref());
-
-        if target.contains("windows-gnu") {
-            candle("GccGroup.wxs".as_ref());
-        }
 
-        t!(t!(File::create(exe.join("LICENSE.rtf"))).write_all(rtf.as_bytes()));
-        install(&etc.join("gfx/banner.bmp"), &exe, 0o644);
-        install(&etc.join("gfx/dialogbg.bmp"), &exe, 0o644);
-
-        let filename = format!("{}-{}.msi", pkgname(build, "rust"), target);
-        let mut cmd = Command::new(&light);
-        cmd.arg("-nologo")
-           .arg("-ext").arg("WixUIExtension")
-           .arg("-ext").arg("WixUtilExtension")
-           .arg("-out").arg(exe.join(&filename))
-           .arg("rust.wixobj")
-           .arg("ui.wixobj")
-           .arg("rustwelcomedlg.wixobj")
-           .arg("RustcGroup.wixobj")
-           .arg("DocsGroup.wixobj")
-           .arg("CargoGroup.wixobj")
-           .arg("StdGroup.wixobj")
-           .arg("RlsGroup.wixobj")
-           .arg("AnalysisGroup.wixobj")
-           .current_dir(&exe);
-
-        if target.contains("windows-gnu") {
-           cmd.arg("GccGroup.wixobj");
-        }
-        // ICE57 wrongly complains about the shortcuts
-        cmd.arg("-sice:ICE57");
+            t!(t!(File::create(exe.join("LICENSE.rtf"))).write_all(rtf.as_bytes()));
+            install(&etc.join("gfx/banner.bmp"), &exe, 0o644);
+            install(&etc.join("gfx/dialogbg.bmp"), &exe, 0o644);
+
+            let filename = format!("{}-{}.msi", pkgname(build, "rust"), target);
+            let mut cmd = Command::new(&light);
+            cmd.arg("-nologo")
+                .arg("-ext").arg("WixUIExtension")
+                .arg("-ext").arg("WixUtilExtension")
+                .arg("-out").arg(exe.join(&filename))
+                .arg("rust.wixobj")
+                .arg("ui.wixobj")
+                .arg("rustwelcomedlg.wixobj")
+                .arg("RustcGroup.wixobj")
+                .arg("DocsGroup.wixobj")
+                .arg("CargoGroup.wixobj")
+                .arg("StdGroup.wixobj")
+                .arg("RlsGroup.wixobj")
+                .arg("AnalysisGroup.wixobj")
+                .current_dir(&exe);
 
-        build.run(&mut cmd);
+            if target.contains("windows-gnu") {
+                cmd.arg("GccGroup.wixobj");
+            }
+            // ICE57 wrongly complains about the shortcuts
+            cmd.arg("-sice:ICE57");
+
+            build.run(&mut cmd);
 
-        t!(fs::rename(exe.join(&filename), distdir(build).join(&filename)));
+            t!(fs::rename(exe.join(&filename), distdir(build).join(&filename)));
+        }
     }
 }
 
-fn add_env(build: &Build, cmd: &mut Command, target: &str) {
+fn add_env(build: &Build, cmd: &mut Command, target: Interned<String>) {
     let mut parts = channel::CFG_RELEASE_NUM.split('.');
     cmd.env("CFG_RELEASE_INFO", build.rust_version())
        .env("CFG_RELEASE_NUM", channel::CFG_RELEASE_NUM)
@@ -1198,35 +1498,53 @@ fn add_env(build: &Build, cmd: &mut Command, target: &str) {
     }
 }
 
-pub fn hash_and_sign(build: &Build) {
-    let compiler = Compiler::new(0, &build.build);
-    let mut cmd = build.tool_cmd(&compiler, "build-manifest");
-    let sign = build.config.dist_sign_folder.as_ref().unwrap_or_else(|| {
-        panic!("\n\nfailed to specify `dist.sign-folder` in `config.toml`\n\n")
-    });
-    let addr = build.config.dist_upload_addr.as_ref().unwrap_or_else(|| {
-        panic!("\n\nfailed to specify `dist.upload-addr` in `config.toml`\n\n")
-    });
-    let file = build.config.dist_gpg_password_file.as_ref().unwrap_or_else(|| {
-        panic!("\n\nfailed to specify `dist.gpg-password-file` in `config.toml`\n\n")
-    });
-    let mut pass = String::new();
-    t!(t!(File::open(&file)).read_to_string(&mut pass));
-
-    let today = output(Command::new("date").arg("+%Y-%m-%d"));
-
-    cmd.arg(sign);
-    cmd.arg(distdir(build));
-    cmd.arg(today.trim());
-    cmd.arg(build.rust_package_vers());
-    cmd.arg(build.package_vers(&build.release_num("cargo")));
-    cmd.arg(build.package_vers(&build.release_num("rls")));
-    cmd.arg(addr);
-
-    t!(fs::create_dir_all(distdir(build)));
-
-    let mut child = t!(cmd.stdin(Stdio::piped()).spawn());
-    t!(child.stdin.take().unwrap().write_all(pass.as_bytes()));
-    let status = t!(child.wait());
-    assert!(status.success());
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct HashSign;
+
+impl Step for HashSign {
+    type Output = ();
+    const ONLY_BUILD_TARGETS: bool = true;
+    const ONLY_HOSTS: bool = true;
+    const ONLY_BUILD: bool = true;
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.path("hash-and-sign")
+    }
+
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(HashSign);
+    }
+
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let mut cmd = builder.tool_cmd(Tool::BuildManifest);
+        let sign = build.config.dist_sign_folder.as_ref().unwrap_or_else(|| {
+            panic!("\n\nfailed to specify `dist.sign-folder` in `config.toml`\n\n")
+        });
+        let addr = build.config.dist_upload_addr.as_ref().unwrap_or_else(|| {
+            panic!("\n\nfailed to specify `dist.upload-addr` in `config.toml`\n\n")
+        });
+        let file = build.config.dist_gpg_password_file.as_ref().unwrap_or_else(|| {
+            panic!("\n\nfailed to specify `dist.gpg-password-file` in `config.toml`\n\n")
+        });
+        let mut pass = String::new();
+        t!(t!(File::open(&file)).read_to_string(&mut pass));
+
+        let today = output(Command::new("date").arg("+%Y-%m-%d"));
+
+        cmd.arg(sign);
+        cmd.arg(distdir(build));
+        cmd.arg(today.trim());
+        cmd.arg(build.rust_package_vers());
+        cmd.arg(build.package_vers(&build.release_num("cargo")));
+        cmd.arg(build.package_vers(&build.release_num("rls")));
+        cmd.arg(addr);
+
+        t!(fs::create_dir_all(distdir(build)));
+
+        let mut child = t!(cmd.stdin(Stdio::piped()).spawn());
+        t!(child.stdin.take().unwrap().write_all(pass.as_bytes()));
+        let status = t!(child.wait());
+        assert!(status.success());
+    }
 }
diff --git a/src/bootstrap/doc.rs b/src/bootstrap/doc.rs
index 7dbc3e55539..8834fa24d69 100644
--- a/src/bootstrap/doc.rs
+++ b/src/bootstrap/doc.rs
@@ -20,86 +20,233 @@
 use std::fs::{self, File};
 use std::io::prelude::*;
 use std::io;
-use std::path::Path;
+use std::path::{PathBuf, Path};
 use std::process::Command;
 
-use {Build, Compiler, Mode};
-use util::{cp_r, symlink_dir};
+use Mode;
 use build_helper::up_to_date;
 
-/// Invoke `rustbook` for `target` for the doc book `name`.
-///
-/// This will not actually generate any documentation if the documentation has
-/// already been generated.
-pub fn rustbook(build: &Build, target: &str, name: &str) {
-    let src = build.src.join("src/doc");
-    rustbook_src(build, target, name, &src);
+use util::{cp_r, symlink_dir};
+use builder::{Builder, RunConfig, ShouldRun, Step};
+use tool::Tool;
+use compile;
+use cache::{INTERNER, Interned};
+
+macro_rules! book {
+    ($($name:ident, $path:expr, $book_name:expr;)+) => {
+        $(
+            #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+        pub struct $name {
+            target: Interned<String>,
+        }
+
+        impl Step for $name {
+            type Output = ();
+            const DEFAULT: bool = true;
+
+            fn should_run(run: ShouldRun) -> ShouldRun {
+                let builder = run.builder;
+                run.path($path).default_condition(builder.build.config.docs)
+            }
+
+            fn make_run(run: RunConfig) {
+                run.builder.ensure($name {
+                    target: run.target,
+                });
+            }
+
+            fn run(self, builder: &Builder) {
+                builder.ensure(Rustbook {
+                    target: self.target,
+                    name: INTERNER.intern_str($book_name),
+                })
+            }
+        }
+        )+
+    }
 }
 
-/// Invoke `rustbook` for `target` for the doc book `name` from the `src` path.
-///
-/// This will not actually generate any documentation if the documentation has
-/// already been generated.
-pub fn rustbook_src(build: &Build, target: &str, name: &str, src: &Path) {
-    let out = build.doc_out(target);
-    t!(fs::create_dir_all(&out));
-
-    let out = out.join(name);
-    let compiler = Compiler::new(0, &build.build);
-    let src = src.join(name);
-    let index = out.join("index.html");
-    let rustbook = build.tool(&compiler, "rustbook");
-    if up_to_date(&src, &index) && up_to_date(&rustbook, &index) {
-        return
+book!(
+    Nomicon, "src/doc/book", "nomicon";
+    Reference, "src/doc/reference", "reference";
+);
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+struct Rustbook {
+    target: Interned<String>,
+    name: Interned<String>,
+}
+
+impl Step for Rustbook {
+    type Output = ();
+
+    // rustbook is never directly called, and only serves as a shim for the nomicon and the
+    // reference.
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.never()
+    }
+
+    /// Invoke `rustbook` for `target` for the doc book `name`.
+    ///
+    /// This will not actually generate any documentation if the documentation has
+    /// already been generated.
+    fn run(self, builder: &Builder) {
+        let src = builder.build.src.join("src/doc");
+        builder.ensure(RustbookSrc {
+            target: self.target,
+            name: self.name,
+            src: INTERNER.intern_path(src),
+        });
+    }
+}
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct UnstableBook {
+    target: Interned<String>,
+}
+
+impl Step for UnstableBook {
+    type Output = ();
+    const DEFAULT: bool = true;
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        let builder = run.builder;
+        run.path("src/doc/unstable-book").default_condition(builder.build.config.docs)
+    }
+
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(UnstableBook {
+            target: run.target,
+        });
+    }
+
+    fn run(self, builder: &Builder) {
+        builder.ensure(UnstableBookGen {
+            target: self.target,
+        });
+        builder.ensure(RustbookSrc {
+            target: self.target,
+            name: INTERNER.intern_str("unstable-book"),
+            src: builder.build.md_doc_out(self.target),
+        })
+    }
+}
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+struct RustbookSrc {
+    target: Interned<String>,
+    name: Interned<String>,
+    src: Interned<PathBuf>,
+}
+
+impl Step for RustbookSrc {
+    type Output = ();
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.never()
     }
-    println!("Rustbook ({}) - {}", target, name);
-    let _ = fs::remove_dir_all(&out);
-    build.run(build.tool_cmd(&compiler, "rustbook")
-                   .arg("build")
-                   .arg(&src)
-                   .arg("-d")
-                   .arg(out));
+
+    /// Invoke `rustbook` for `target` for the doc book `name` from the `src` path.
+    ///
+    /// This will not actually generate any documentation if the documentation has
+    /// already been generated.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let target = self.target;
+        let name = self.name;
+        let src = self.src;
+        let out = build.doc_out(target);
+        t!(fs::create_dir_all(&out));
+
+        let out = out.join(name);
+        let src = src.join(name);
+        let index = out.join("index.html");
+        let rustbook = builder.tool_exe(Tool::Rustbook);
+        if up_to_date(&src, &index) && up_to_date(&rustbook, &index) {
+            return
+        }
+        println!("Rustbook ({}) - {}", target, name);
+        let _ = fs::remove_dir_all(&out);
+        build.run(builder.tool_cmd(Tool::Rustbook)
+                       .arg("build")
+                       .arg(&src)
+                       .arg("-d")
+                       .arg(out));
+    }
+}
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct TheBook {
+    target: Interned<String>,
+    name: &'static str,
 }
 
-/// Build the book and associated stuff.
-///
-/// We need to build:
-///
-/// * Book (first edition)
-/// * Book (second edition)
-/// * Index page
-/// * Redirect pages
-pub fn book(build: &Build, target: &str, name: &str) {
-    // build book first edition
-    rustbook(build, target, &format!("{}/first-edition", name));
-
-    // build book second edition
-    rustbook(build, target, &format!("{}/second-edition", name));
-
-    // build the index page
-    let index = format!("{}/index.md", name);
-    println!("Documenting book index ({})", target);
-    invoke_rustdoc(build, target, &index);
-
-    // build the redirect pages
-    println!("Documenting book redirect pages ({})", target);
-    for file in t!(fs::read_dir(build.src.join("src/doc/book/redirects"))) {
-        let file = t!(file);
-        let path = file.path();
-        let path = path.to_str().unwrap();
-
-        invoke_rustdoc(build, target, path);
+impl Step for TheBook {
+    type Output = ();
+    const DEFAULT: bool = true;
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        let builder = run.builder;
+        run.path("src/doc/book").default_condition(builder.build.config.docs)
+    }
+
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(TheBook {
+            target: run.target,
+            name: "book",
+        });
+    }
+
+    /// Build the book and associated stuff.
+    ///
+    /// We need to build:
+    ///
+    /// * Book (first edition)
+    /// * Book (second edition)
+    /// * Index page
+    /// * Redirect pages
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let target = self.target;
+        let name = self.name;
+        // build book first edition
+        builder.ensure(Rustbook {
+            target: target,
+            name: INTERNER.intern_string(format!("{}/first-edition", name)),
+        });
+
+        // build book second edition
+        builder.ensure(Rustbook {
+            target: target,
+            name: INTERNER.intern_string(format!("{}/second-edition", name)),
+        });
+
+        // build the index page
+        let index = format!("{}/index.md", name);
+        println!("Documenting book index ({})", target);
+        invoke_rustdoc(builder, target, &index);
+
+        // build the redirect pages
+        println!("Documenting book redirect pages ({})", target);
+        for file in t!(fs::read_dir(build.src.join("src/doc/book/redirects"))) {
+            let file = t!(file);
+            let path = file.path();
+            let path = path.to_str().unwrap();
+
+            invoke_rustdoc(builder, target, path);
+        }
     }
 }
 
-fn invoke_rustdoc(build: &Build, target: &str, markdown: &str) {
+fn invoke_rustdoc(builder: &Builder, target: Interned<String>, markdown: &str) {
+    let build = builder.build;
     let out = build.doc_out(target);
 
-    let compiler = Compiler::new(0, &build.build);
+    let compiler = builder.compiler(0, build.build);
 
     let path = build.src.join("src/doc").join(markdown);
 
-    let rustdoc = build.rustdoc(&compiler);
+    let rustdoc = builder.rustdoc(compiler);
 
     let favicon = build.src.join("src/doc/favicon.inc");
     let footer = build.src.join("src/doc/footer.inc");
@@ -118,7 +265,7 @@ fn invoke_rustdoc(build: &Build, target: &str, markdown: &str) {
 
     let mut cmd = Command::new(&rustdoc);
 
-    build.add_rustc_lib_path(&compiler, &mut cmd);
+    builder.add_rustc_lib_path(compiler, &mut cmd);
 
     let out = out.join("book");
 
@@ -137,242 +284,414 @@ fn invoke_rustdoc(build: &Build, target: &str, markdown: &str) {
     build.run(&mut cmd);
 }
 
-/// Generates all standalone documentation as compiled by the rustdoc in `stage`
-/// for the `target` into `out`.
-///
-/// This will list all of `src/doc` looking for markdown files and appropriately
-/// perform transformations like substituting `VERSION`, `SHORT_HASH`, and
-/// `STAMP` alongw ith providing the various header/footer HTML we've cutomized.
-///
-/// In the end, this is just a glorified wrapper around rustdoc!
-pub fn standalone(build: &Build, target: &str) {
-    println!("Documenting standalone ({})", target);
-    let out = build.doc_out(target);
-    t!(fs::create_dir_all(&out));
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct Standalone {
+    target: Interned<String>,
+}
 
-    let compiler = Compiler::new(0, &build.build);
+impl Step for Standalone {
+    type Output = ();
+    const DEFAULT: bool = true;
 
-    let favicon = build.src.join("src/doc/favicon.inc");
-    let footer = build.src.join("src/doc/footer.inc");
-    let full_toc = build.src.join("src/doc/full-toc.inc");
-    t!(fs::copy(build.src.join("src/doc/rust.css"), out.join("rust.css")));
-
-    let version_input = build.src.join("src/doc/version_info.html.template");
-    let version_info = out.join("version_info.html");
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        let builder = run.builder;
+        run.path("src/doc").default_condition(builder.build.config.docs)
+    }
 
-    if !up_to_date(&version_input, &version_info) {
-        let mut info = String::new();
-        t!(t!(File::open(&version_input)).read_to_string(&mut info));
-        let info = info.replace("VERSION", &build.rust_release())
-                       .replace("SHORT_HASH", build.rust_info.sha_short().unwrap_or(""))
-                       .replace("STAMP", build.rust_info.sha().unwrap_or(""));
-        t!(t!(File::create(&version_info)).write_all(info.as_bytes()));
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(Standalone {
+            target: run.target,
+        });
     }
 
-    for file in t!(fs::read_dir(build.src.join("src/doc"))) {
-        let file = t!(file);
-        let path = file.path();
-        let filename = path.file_name().unwrap().to_str().unwrap();
-        if !filename.ends_with(".md") || filename == "README.md" {
-            continue
+    /// Generates all standalone documentation as compiled by the rustdoc in `stage`
+    /// for the `target` into `out`.
+    ///
+    /// This will list all of `src/doc` looking for markdown files and appropriately
+    /// perform transformations like substituting `VERSION`, `SHORT_HASH`, and
+    /// `STAMP` alongw ith providing the various header/footer HTML we've cutomized.
+    ///
+    /// In the end, this is just a glorified wrapper around rustdoc!
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let target = self.target;
+        println!("Documenting standalone ({})", target);
+        let out = build.doc_out(target);
+        t!(fs::create_dir_all(&out));
+
+        let compiler = builder.compiler(0, build.build);
+
+        let favicon = build.src.join("src/doc/favicon.inc");
+        let footer = build.src.join("src/doc/footer.inc");
+        let full_toc = build.src.join("src/doc/full-toc.inc");
+        t!(fs::copy(build.src.join("src/doc/rust.css"), out.join("rust.css")));
+
+        let version_input = build.src.join("src/doc/version_info.html.template");
+        let version_info = out.join("version_info.html");
+
+        if !up_to_date(&version_input, &version_info) {
+            let mut info = String::new();
+            t!(t!(File::open(&version_input)).read_to_string(&mut info));
+            let info = info.replace("VERSION", &build.rust_release())
+                           .replace("SHORT_HASH", build.rust_info.sha_short().unwrap_or(""))
+                           .replace("STAMP", build.rust_info.sha().unwrap_or(""));
+            t!(t!(File::create(&version_info)).write_all(info.as_bytes()));
         }
 
-        let html = out.join(filename).with_extension("html");
-        let rustdoc = build.rustdoc(&compiler);
-        if up_to_date(&path, &html) &&
-           up_to_date(&footer, &html) &&
-           up_to_date(&favicon, &html) &&
-           up_to_date(&full_toc, &html) &&
-           up_to_date(&version_info, &html) &&
-           up_to_date(&rustdoc, &html) {
-            continue
+        for file in t!(fs::read_dir(build.src.join("src/doc"))) {
+            let file = t!(file);
+            let path = file.path();
+            let filename = path.file_name().unwrap().to_str().unwrap();
+            if !filename.ends_with(".md") || filename == "README.md" {
+                continue
+            }
+
+            let html = out.join(filename).with_extension("html");
+            let rustdoc = builder.rustdoc(compiler);
+            if up_to_date(&path, &html) &&
+               up_to_date(&footer, &html) &&
+               up_to_date(&favicon, &html) &&
+               up_to_date(&full_toc, &html) &&
+               up_to_date(&version_info, &html) &&
+               up_to_date(&rustdoc, &html) {
+                continue
+            }
+
+            let mut cmd = Command::new(&rustdoc);
+            builder.add_rustc_lib_path(compiler, &mut cmd);
+            cmd.arg("--html-after-content").arg(&footer)
+               .arg("--html-before-content").arg(&version_info)
+               .arg("--html-in-header").arg(&favicon)
+               .arg("--markdown-playground-url")
+               .arg("https://play.rust-lang.org/")
+               .arg("-o").arg(&out)
+               .arg(&path);
+
+            if filename == "not_found.md" {
+                cmd.arg("--markdown-no-toc")
+                   .arg("--markdown-css")
+                   .arg("https://doc.rust-lang.org/rust.css");
+            } else {
+                cmd.arg("--markdown-css").arg("rust.css");
+            }
+            build.run(&mut cmd);
         }
+    }
+}
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct Std {
+    stage: u32,
+    target: Interned<String>,
+}
+
+impl Step for Std {
+    type Output = ();
+    const DEFAULT: bool = true;
 
-        let mut cmd = Command::new(&rustdoc);
-        build.add_rustc_lib_path(&compiler, &mut cmd);
-        cmd.arg("--html-after-content").arg(&footer)
-           .arg("--html-before-content").arg(&version_info)
-           .arg("--html-in-header").arg(&favicon)
-           .arg("--markdown-playground-url")
-           .arg("https://play.rust-lang.org/")
-           .arg("-o").arg(&out)
-           .arg(&path);
-
-        if filename == "not_found.md" {
-            cmd.arg("--markdown-no-toc")
-               .arg("--markdown-css")
-               .arg("https://doc.rust-lang.org/rust.css");
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        let builder = run.builder;
+        run.krate("std").default_condition(builder.build.config.docs)
+    }
+
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(Std {
+            stage: run.builder.top_stage,
+            target: run.target
+        });
+    }
+
+    /// Compile all standard library documentation.
+    ///
+    /// This will generate all documentation for the standard library and its
+    /// dependencies. This is largely just a wrapper around `cargo doc`.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let stage = self.stage;
+        let target = self.target;
+        println!("Documenting stage{} std ({})", stage, target);
+        let out = build.doc_out(target);
+        t!(fs::create_dir_all(&out));
+        let compiler = builder.compiler(stage, build.build);
+        let compiler = if build.force_use_stage1(compiler, target) {
+            builder.compiler(1, compiler.host)
         } else {
-            cmd.arg("--markdown-css").arg("rust.css");
+            compiler
+        };
+
+        builder.ensure(compile::Std { compiler, target });
+        let out_dir = build.stage_out(compiler, Mode::Libstd)
+                           .join(target).join("doc");
+        let rustdoc = builder.rustdoc(compiler);
+
+        // Here what we're doing is creating a *symlink* (directory junction on
+        // Windows) to the final output location. This is not done as an
+        // optimization but rather for correctness. We've got three trees of
+        // documentation, one for std, one for test, and one for rustc. It's then
+        // our job to merge them all together.
+        //
+        // Unfortunately rustbuild doesn't know nearly as well how to merge doc
+        // trees as rustdoc does itself, so instead of actually having three
+        // separate trees we just have rustdoc output to the same location across
+        // all of them.
+        //
+        // This way rustdoc generates output directly into the output, and rustdoc
+        // will also directly handle merging.
+        let my_out = build.crate_doc_out(target);
+        build.clear_if_dirty(&my_out, &rustdoc);
+        t!(symlink_dir_force(&my_out, &out_dir));
+
+        let mut cargo = builder.cargo(compiler, Mode::Libstd, target, "doc");
+        cargo.arg("--manifest-path")
+             .arg(build.src.join("src/libstd/Cargo.toml"))
+             .arg("--features").arg(build.std_features());
+
+        // We don't want to build docs for internal std dependencies unless
+        // in compiler-docs mode. When not in that mode, we whitelist the crates
+        // for which docs must be built.
+        if !build.config.compiler_docs {
+            cargo.arg("--no-deps");
+            for krate in &["alloc", "collections", "core", "std", "std_unicode"] {
+                cargo.arg("-p").arg(krate);
+                // Create all crate output directories first to make sure rustdoc uses
+                // relative links.
+                // FIXME: Cargo should probably do this itself.
+                t!(fs::create_dir_all(out_dir.join(krate)));
+            }
         }
-        build.run(&mut cmd);
+
+
+        build.run(&mut cargo);
+        cp_r(&my_out, &out);
     }
 }
 
-/// Compile all standard library documentation.
-///
-/// This will generate all documentation for the standard library and its
-/// dependencies. This is largely just a wrapper around `cargo doc`.
-pub fn std(build: &Build, stage: u32, target: &str) {
-    println!("Documenting stage{} std ({})", stage, target);
-    let out = build.doc_out(target);
-    t!(fs::create_dir_all(&out));
-    let compiler = Compiler::new(stage, &build.build);
-    let compiler = if build.force_use_stage1(&compiler, target) {
-        Compiler::new(1, compiler.host)
-    } else {
-        compiler
-    };
-    let out_dir = build.stage_out(&compiler, Mode::Libstd)
-                       .join(target).join("doc");
-    let rustdoc = build.rustdoc(&compiler);
-
-    // Here what we're doing is creating a *symlink* (directory junction on
-    // Windows) to the final output location. This is not done as an
-    // optimization but rather for correctness. We've got three trees of
-    // documentation, one for std, one for test, and one for rustc. It's then
-    // our job to merge them all together.
-    //
-    // Unfortunately rustbuild doesn't know nearly as well how to merge doc
-    // trees as rustdoc does itself, so instead of actually having three
-    // separate trees we just have rustdoc output to the same location across
-    // all of them.
-    //
-    // This way rustdoc generates output directly into the output, and rustdoc
-    // will also directly handle merging.
-    let my_out = build.crate_doc_out(target);
-    build.clear_if_dirty(&my_out, &rustdoc);
-    t!(symlink_dir_force(&my_out, &out_dir));
-
-    let mut cargo = build.cargo(&compiler, Mode::Libstd, target, "doc");
-    cargo.arg("--manifest-path")
-         .arg(build.src.join("src/libstd/Cargo.toml"))
-         .arg("--features").arg(build.std_features());
-
-    // We don't want to build docs for internal std dependencies unless
-    // in compiler-docs mode. When not in that mode, we whitelist the crates
-    // for which docs must be built.
-    if !build.config.compiler_docs {
-        cargo.arg("--no-deps");
-        for krate in &["alloc", "collections", "core", "std", "std_unicode"] {
-            cargo.arg("-p").arg(krate);
-            // Create all crate output directories first to make sure rustdoc uses
-            // relative links.
-            // FIXME: Cargo should probably do this itself.
-            t!(fs::create_dir_all(out_dir.join(krate)));
-        }
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct Test {
+    stage: u32,
+    target: Interned<String>,
+}
+
+impl Step for Test {
+    type Output = ();
+    const DEFAULT: bool = true;
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        let builder = run.builder;
+        run.krate("test").default_condition(builder.config.compiler_docs)
     }
 
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(Test {
+            stage: run.builder.top_stage,
+            target: run.target,
+        });
+    }
 
-    build.run(&mut cargo);
-    cp_r(&my_out, &out);
+    /// Compile all libtest documentation.
+    ///
+    /// This will generate all documentation for libtest and its dependencies. This
+    /// is largely just a wrapper around `cargo doc`.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let stage = self.stage;
+        let target = self.target;
+        println!("Documenting stage{} test ({})", stage, target);
+        let out = build.doc_out(target);
+        t!(fs::create_dir_all(&out));
+        let compiler = builder.compiler(stage, build.build);
+        let compiler = if build.force_use_stage1(compiler, target) {
+            builder.compiler(1, compiler.host)
+        } else {
+            compiler
+        };
+
+        // Build libstd docs so that we generate relative links
+        builder.ensure(Std { stage, target });
+
+        builder.ensure(compile::Test { compiler, target });
+        let out_dir = build.stage_out(compiler, Mode::Libtest)
+                           .join(target).join("doc");
+        let rustdoc = builder.rustdoc(compiler);
+
+        // See docs in std above for why we symlink
+        let my_out = build.crate_doc_out(target);
+        build.clear_if_dirty(&my_out, &rustdoc);
+        t!(symlink_dir_force(&my_out, &out_dir));
+
+        let mut cargo = builder.cargo(compiler, Mode::Libtest, target, "doc");
+        cargo.arg("--manifest-path")
+             .arg(build.src.join("src/libtest/Cargo.toml"));
+        build.run(&mut cargo);
+        cp_r(&my_out, &out);
+    }
 }
 
-/// Compile all libtest documentation.
-///
-/// This will generate all documentation for libtest and its dependencies. This
-/// is largely just a wrapper around `cargo doc`.
-pub fn test(build: &Build, stage: u32, target: &str) {
-    println!("Documenting stage{} test ({})", stage, target);
-    let out = build.doc_out(target);
-    t!(fs::create_dir_all(&out));
-    let compiler = Compiler::new(stage, &build.build);
-    let compiler = if build.force_use_stage1(&compiler, target) {
-        Compiler::new(1, compiler.host)
-    } else {
-        compiler
-    };
-    let out_dir = build.stage_out(&compiler, Mode::Libtest)
-                       .join(target).join("doc");
-    let rustdoc = build.rustdoc(&compiler);
-
-    // See docs in std above for why we symlink
-    let my_out = build.crate_doc_out(target);
-    build.clear_if_dirty(&my_out, &rustdoc);
-    t!(symlink_dir_force(&my_out, &out_dir));
-
-    let mut cargo = build.cargo(&compiler, Mode::Libtest, target, "doc");
-    cargo.arg("--manifest-path")
-         .arg(build.src.join("src/libtest/Cargo.toml"));
-    build.run(&mut cargo);
-    cp_r(&my_out, &out);
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct Rustc {
+    stage: u32,
+    target: Interned<String>,
 }
 
-/// Generate all compiler documentation.
-///
-/// This will generate all documentation for the compiler libraries and their
-/// dependencies. This is largely just a wrapper around `cargo doc`.
-pub fn rustc(build: &Build, stage: u32, target: &str) {
-    println!("Documenting stage{} compiler ({})", stage, target);
-    let out = build.doc_out(target);
-    t!(fs::create_dir_all(&out));
-    let compiler = Compiler::new(stage, &build.build);
-    let compiler = if build.force_use_stage1(&compiler, target) {
-        Compiler::new(1, compiler.host)
-    } else {
-        compiler
-    };
-    let out_dir = build.stage_out(&compiler, Mode::Librustc)
-                       .join(target).join("doc");
-    let rustdoc = build.rustdoc(&compiler);
-
-    // See docs in std above for why we symlink
-    let my_out = build.crate_doc_out(target);
-    build.clear_if_dirty(&my_out, &rustdoc);
-    t!(symlink_dir_force(&my_out, &out_dir));
-
-    let mut cargo = build.cargo(&compiler, Mode::Librustc, target, "doc");
-    cargo.arg("--manifest-path")
-         .arg(build.src.join("src/rustc/Cargo.toml"))
-         .arg("--features").arg(build.rustc_features());
-
-    if build.config.compiler_docs {
-        // src/rustc/Cargo.toml contains bin crates called rustc and rustdoc
-        // which would otherwise overwrite the docs for the real rustc and
-        // rustdoc lib crates.
-        cargo.arg("-p").arg("rustc_driver")
-             .arg("-p").arg("rustdoc");
-    } else {
-        // Like with libstd above if compiler docs aren't enabled then we're not
-        // documenting internal dependencies, so we have a whitelist.
-        cargo.arg("--no-deps");
-        for krate in &["proc_macro"] {
-            cargo.arg("-p").arg(krate);
+impl Step for Rustc {
+    type Output = ();
+    const DEFAULT: bool = true;
+    const ONLY_HOSTS: bool = true;
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        let builder = run.builder;
+        run.krate("rustc-main").default_condition(builder.build.config.docs)
+    }
+
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(Rustc {
+            stage: run.builder.top_stage,
+            target: run.target,
+        });
+    }
+
+    /// Generate all compiler documentation.
+    ///
+    /// This will generate all documentation for the compiler libraries and their
+    /// dependencies. This is largely just a wrapper around `cargo doc`.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let stage = self.stage;
+        let target = self.target;
+        println!("Documenting stage{} compiler ({})", stage, target);
+        let out = build.doc_out(target);
+        t!(fs::create_dir_all(&out));
+        let compiler = builder.compiler(stage, build.build);
+        let compiler = if build.force_use_stage1(compiler, target) {
+            builder.compiler(1, compiler.host)
+        } else {
+            compiler
+        };
+
+        // Build libstd docs so that we generate relative links
+        builder.ensure(Std { stage, target });
+
+        builder.ensure(compile::Rustc { compiler, target });
+        let out_dir = build.stage_out(compiler, Mode::Librustc)
+                           .join(target).join("doc");
+        let rustdoc = builder.rustdoc(compiler);
+
+        // See docs in std above for why we symlink
+        let my_out = build.crate_doc_out(target);
+        build.clear_if_dirty(&my_out, &rustdoc);
+        t!(symlink_dir_force(&my_out, &out_dir));
+
+        let mut cargo = builder.cargo(compiler, Mode::Librustc, target, "doc");
+        cargo.arg("--manifest-path")
+             .arg(build.src.join("src/rustc/Cargo.toml"))
+             .arg("--features").arg(build.rustc_features());
+
+        if build.config.compiler_docs {
+            // src/rustc/Cargo.toml contains bin crates called rustc and rustdoc
+            // which would otherwise overwrite the docs for the real rustc and
+            // rustdoc lib crates.
+            cargo.arg("-p").arg("rustc_driver")
+                 .arg("-p").arg("rustdoc");
+        } else {
+            // Like with libstd above if compiler docs aren't enabled then we're not
+            // documenting internal dependencies, so we have a whitelist.
+            cargo.arg("--no-deps");
+            for krate in &["proc_macro"] {
+                cargo.arg("-p").arg(krate);
+            }
         }
+
+        build.run(&mut cargo);
+        cp_r(&my_out, &out);
     }
+}
 
-    build.run(&mut cargo);
-    cp_r(&my_out, &out);
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct ErrorIndex {
+    target: Interned<String>,
 }
 
-/// Generates the HTML rendered error-index by running the
-/// `error_index_generator` tool.
-pub fn error_index(build: &Build, target: &str) {
-    println!("Documenting error index ({})", target);
-    let out = build.doc_out(target);
-    t!(fs::create_dir_all(&out));
-    let compiler = Compiler::new(0, &build.build);
-    let mut index = build.tool_cmd(&compiler, "error_index_generator");
-    index.arg("html");
-    index.arg(out.join("error-index.html"));
+impl Step for ErrorIndex {
+    type Output = ();
+    const DEFAULT: bool = true;
+    const ONLY_HOSTS: bool = true;
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        let builder = run.builder;
+        run.path("src/tools/error_index_generator").default_condition(builder.build.config.docs)
+    }
+
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(ErrorIndex {
+            target: run.target,
+        });
+    }
+
+    /// Generates the HTML rendered error-index by running the
+    /// `error_index_generator` tool.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let target = self.target;
+
+        builder.ensure(compile::Rustc {
+            compiler: builder.compiler(0, build.build),
+            target,
+        });
+
+        println!("Documenting error index ({})", target);
+        let out = build.doc_out(target);
+        t!(fs::create_dir_all(&out));
+        let mut index = builder.tool_cmd(Tool::ErrorIndex);
+        index.arg("html");
+        index.arg(out.join("error-index.html"));
 
-    // FIXME: shouldn't have to pass this env var
-    index.env("CFG_BUILD", &build.build);
+        // FIXME: shouldn't have to pass this env var
+        index.env("CFG_BUILD", &build.build);
 
-    build.run(&mut index);
+        build.run(&mut index);
+    }
+}
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct UnstableBookGen {
+    target: Interned<String>,
 }
 
-pub fn unstable_book_gen(build: &Build, target: &str) {
-    println!("Generating unstable book md files ({})", target);
-    let out = build.md_doc_out(target).join("unstable-book");
-    t!(fs::create_dir_all(&out));
-    t!(fs::remove_dir_all(&out));
-    let compiler = Compiler::new(0, &build.build);
-    let mut cmd = build.tool_cmd(&compiler, "unstable-book-gen");
-    cmd.arg(build.src.join("src"));
-    cmd.arg(out);
+impl Step for UnstableBookGen {
+    type Output = ();
+    const DEFAULT: bool = true;
+    const ONLY_HOSTS: bool = true;
 
-    build.run(&mut cmd);
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        let builder = run.builder;
+        run.path("src/tools/unstable-book-gen").default_condition(builder.build.config.docs)
+    }
+
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(UnstableBookGen {
+            target: run.target,
+        });
+    }
+
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let target = self.target;
+
+        builder.ensure(compile::Std {
+            compiler: builder.compiler(builder.top_stage, build.build),
+            target,
+        });
+
+        println!("Generating unstable book md files ({})", target);
+        let out = build.md_doc_out(target).join("unstable-book");
+        t!(fs::create_dir_all(&out));
+        t!(fs::remove_dir_all(&out));
+        let mut cmd = builder.tool_cmd(Tool::UnstableBookGen);
+        cmd.arg(build.src.join("src"));
+        cmd.arg(out);
+
+        build.run(&mut cmd);
+    }
 }
 
 fn symlink_dir_force(src: &Path, dst: &Path) -> io::Result<()> {
diff --git a/src/bootstrap/flags.rs b/src/bootstrap/flags.rs
index 5804df34e8b..1a3a008ed26 100644
--- a/src/bootstrap/flags.rs
+++ b/src/bootstrap/flags.rs
@@ -23,7 +23,9 @@ use getopts::Options;
 use Build;
 use config::Config;
 use metadata;
-use step;
+use builder::Builder;
+
+use cache::{Interned, INTERNER};
 
 /// Deserialized version of all flags for this compile.
 pub struct Flags {
@@ -31,9 +33,9 @@ pub struct Flags {
     pub on_fail: Option<String>,
     pub stage: Option<u32>,
     pub keep_stage: Option<u32>,
-    pub build: String,
-    pub host: Vec<String>,
-    pub target: Vec<String>,
+    pub build: Interned<String>,
+    pub host: Vec<Interned<String>>,
+    pub target: Vec<Interned<String>>,
     pub config: Option<PathBuf>,
     pub src: PathBuf,
     pub jobs: Option<u32>,
@@ -246,10 +248,9 @@ Arguments:
             config.build = flags.build.clone();
             let mut build = Build::new(flags, config);
             metadata::build(&mut build);
-            let maybe_rules_help = step::build_rules(&build).get_help(subcommand);
-            if maybe_rules_help.is_some() {
-                extra_help.push_str(maybe_rules_help.unwrap().as_str());
-            }
+
+            let maybe_rules_help = Builder::get_help(&build, subcommand.as_str());
+            extra_help.push_str(maybe_rules_help.unwrap_or_default().as_str());
         } else {
             extra_help.push_str(format!("Run `./x.py {} -h -v` to see a list of available paths.",
                      subcommand).as_str());
@@ -319,11 +320,13 @@ Arguments:
             stage: stage,
             on_fail: matches.opt_str("on-fail"),
             keep_stage: matches.opt_str("keep-stage").map(|j| j.parse().unwrap()),
-            build: matches.opt_str("build").unwrap_or_else(|| {
+            build: INTERNER.intern_string(matches.opt_str("build").unwrap_or_else(|| {
                 env::var("BUILD").unwrap()
-            }),
-            host: split(matches.opt_strs("host")),
-            target: split(matches.opt_strs("target")),
+            })),
+            host: split(matches.opt_strs("host"))
+                .into_iter().map(|x| INTERNER.intern_string(x)).collect::<Vec<_>>(),
+            target: split(matches.opt_strs("target"))
+                .into_iter().map(|x| INTERNER.intern_string(x)).collect::<Vec<_>>(),
             config: cfg_file,
             src: src,
             jobs: matches.opt_str("jobs").map(|j| j.parse().unwrap()),
diff --git a/src/bootstrap/install.rs b/src/bootstrap/install.rs
index 8e2ef527b16..85402e875d9 100644
--- a/src/bootstrap/install.rs
+++ b/src/bootstrap/install.rs
@@ -18,121 +18,99 @@ use std::fs;
 use std::path::{Path, PathBuf, Component};
 use std::process::Command;
 
-use Build;
-use dist::{pkgname, sanitize_sh, tmpdir};
-
-pub struct Installer<'a> {
-    build: &'a Build,
-    prefix: PathBuf,
-    sysconfdir: PathBuf,
-    docdir: PathBuf,
-    bindir: PathBuf,
-    libdir: PathBuf,
-    mandir: PathBuf,
-    empty_dir: PathBuf,
-}
+use dist::{self, pkgname, sanitize_sh, tmpdir};
 
-impl<'a> Drop for Installer<'a> {
-    fn drop(&mut self) {
-        t!(fs::remove_dir_all(&self.empty_dir));
-    }
-}
+use builder::{Builder, RunConfig, ShouldRun, Step};
+use cache::Interned;
 
-impl<'a> Installer<'a> {
-    pub fn new(build: &'a Build) -> Installer<'a> {
-        let prefix_default = PathBuf::from("/usr/local");
-        let sysconfdir_default = PathBuf::from("/etc");
-        let docdir_default = PathBuf::from("share/doc/rust");
-        let bindir_default = PathBuf::from("bin");
-        let libdir_default = PathBuf::from("lib");
-        let mandir_default = PathBuf::from("share/man");
-        let prefix = build.config.prefix.as_ref().unwrap_or(&prefix_default);
-        let sysconfdir = build.config.sysconfdir.as_ref().unwrap_or(&sysconfdir_default);
-        let docdir = build.config.docdir.as_ref().unwrap_or(&docdir_default);
-        let bindir = build.config.bindir.as_ref().unwrap_or(&bindir_default);
-        let libdir = build.config.libdir.as_ref().unwrap_or(&libdir_default);
-        let mandir = build.config.mandir.as_ref().unwrap_or(&mandir_default);
-
-        let sysconfdir = prefix.join(sysconfdir);
-        let docdir = prefix.join(docdir);
-        let bindir = prefix.join(bindir);
-        let libdir = prefix.join(libdir);
-        let mandir = prefix.join(mandir);
-
-        let destdir = env::var_os("DESTDIR").map(PathBuf::from);
-
-        let prefix = add_destdir(&prefix, &destdir);
-        let sysconfdir = add_destdir(&sysconfdir, &destdir);
-        let docdir = add_destdir(&docdir, &destdir);
-        let bindir = add_destdir(&bindir, &destdir);
-        let libdir = add_destdir(&libdir, &destdir);
-        let mandir = add_destdir(&mandir, &destdir);
-
-        let empty_dir = build.out.join("tmp/empty_dir");
-
-        t!(fs::create_dir_all(&empty_dir));
-
-        Installer {
-            build,
-            prefix,
-            sysconfdir,
-            docdir,
-            bindir,
-            libdir,
-            mandir,
-            empty_dir,
-        }
-    }
+pub fn install_docs(builder: &Builder, stage: u32, host: Interned<String>) {
+    install_sh(builder, "docs", "rust-docs", stage, Some(host));
+}
 
-    pub fn install_docs(&self, stage: u32, host: &str) {
-        self.install_sh("docs", "rust-docs", stage, Some(host));
+pub fn install_std(builder: &Builder, stage: u32) {
+    for target in builder.build.config.target.iter() {
+        install_sh(builder, "std", "rust-std", stage, Some(*target));
     }
+}
 
-    pub fn install_std(&self, stage: u32) {
-        for target in self.build.config.target.iter() {
-            self.install_sh("std", "rust-std", stage, Some(target));
-        }
-    }
+pub fn install_cargo(builder: &Builder, stage: u32, host: Interned<String>) {
+    install_sh(builder, "cargo", "cargo", stage, Some(host));
+}
 
-    pub fn install_cargo(&self, stage: u32, host: &str) {
-        self.install_sh("cargo", "cargo", stage, Some(host));
-    }
+pub fn install_rls(builder: &Builder, stage: u32, host: Interned<String>) {
+    install_sh(builder, "rls", "rls", stage, Some(host));
+}
 
-    pub fn install_rls(&self, stage: u32, host: &str) {
-        self.install_sh("rls", "rls", stage, Some(host));
-    }
+pub fn install_analysis(builder: &Builder, stage: u32, host: Interned<String>) {
+    install_sh(builder, "analysis", "rust-analysis", stage, Some(host));
+}
 
-    pub fn install_analysis(&self, stage: u32, host: &str) {
-        self.install_sh("analysis", "rust-analysis", stage, Some(host));
-    }
+pub fn install_src(builder: &Builder, stage: u32) {
+    install_sh(builder, "src", "rust-src", stage, None);
+}
+pub fn install_rustc(builder: &Builder, stage: u32, host: Interned<String>) {
+    install_sh(builder, "rustc", "rustc", stage, Some(host));
+}
 
-    pub fn install_src(&self, stage: u32) {
-        self.install_sh("src", "rust-src", stage, None);
-    }
-    pub fn install_rustc(&self, stage: u32, host: &str) {
-        self.install_sh("rustc", "rustc", stage, Some(host));
-    }
+fn install_sh(
+    builder: &Builder,
+    package: &str,
+    name: &str,
+    stage: u32,
+    host: Option<Interned<String>>
+) {
+    let build = builder.build;
+    println!("Install {} stage{} ({:?})", package, stage, host);
+
+    let prefix_default = PathBuf::from("/usr/local");
+    let sysconfdir_default = PathBuf::from("/etc");
+    let docdir_default = PathBuf::from("share/doc/rust");
+    let bindir_default = PathBuf::from("bin");
+    let libdir_default = PathBuf::from("lib");
+    let mandir_default = PathBuf::from("share/man");
+    let prefix = build.config.prefix.as_ref().unwrap_or(&prefix_default);
+    let sysconfdir = build.config.sysconfdir.as_ref().unwrap_or(&sysconfdir_default);
+    let docdir = build.config.docdir.as_ref().unwrap_or(&docdir_default);
+    let bindir = build.config.bindir.as_ref().unwrap_or(&bindir_default);
+    let libdir = build.config.libdir.as_ref().unwrap_or(&libdir_default);
+    let mandir = build.config.mandir.as_ref().unwrap_or(&mandir_default);
+
+    let sysconfdir = prefix.join(sysconfdir);
+    let docdir = prefix.join(docdir);
+    let bindir = prefix.join(bindir);
+    let libdir = prefix.join(libdir);
+    let mandir = prefix.join(mandir);
+
+    let destdir = env::var_os("DESTDIR").map(PathBuf::from);
+
+    let prefix = add_destdir(&prefix, &destdir);
+    let sysconfdir = add_destdir(&sysconfdir, &destdir);
+    let docdir = add_destdir(&docdir, &destdir);
+    let bindir = add_destdir(&bindir, &destdir);
+    let libdir = add_destdir(&libdir, &destdir);
+    let mandir = add_destdir(&mandir, &destdir);
+
+    let empty_dir = build.out.join("tmp/empty_dir");
+
+    t!(fs::create_dir_all(&empty_dir));
+    let package_name = if let Some(host) = host {
+        format!("{}-{}", pkgname(build, name), host)
+    } else {
+        pkgname(build, name)
+    };
 
-    fn install_sh(&self, package: &str, name: &str, stage: u32, host: Option<&str>) {
-        println!("Install {} stage{} ({:?})", package, stage, host);
-        let package_name = if let Some(host) = host {
-            format!("{}-{}", pkgname(self.build, name), host)
-        } else {
-            pkgname(self.build, name)
-        };
-
-        let mut cmd = Command::new("sh");
-        cmd.current_dir(&self.empty_dir)
-           .arg(sanitize_sh(&tmpdir(self.build).join(&package_name).join("install.sh")))
-           .arg(format!("--prefix={}", sanitize_sh(&self.prefix)))
-           .arg(format!("--sysconfdir={}", sanitize_sh(&self.sysconfdir)))
-           .arg(format!("--docdir={}", sanitize_sh(&self.docdir)))
-           .arg(format!("--bindir={}", sanitize_sh(&self.bindir)))
-           .arg(format!("--libdir={}", sanitize_sh(&self.libdir)))
-           .arg(format!("--mandir={}", sanitize_sh(&self.mandir)))
-           .arg("--disable-ldconfig");
-        self.build.run(&mut cmd);
-    }
+    let mut cmd = Command::new("sh");
+    cmd.current_dir(&empty_dir)
+        .arg(sanitize_sh(&tmpdir(build).join(&package_name).join("install.sh")))
+        .arg(format!("--prefix={}", sanitize_sh(&prefix)))
+        .arg(format!("--sysconfdir={}", sanitize_sh(&sysconfdir)))
+        .arg(format!("--docdir={}", sanitize_sh(&docdir)))
+        .arg(format!("--bindir={}", sanitize_sh(&bindir)))
+        .arg(format!("--libdir={}", sanitize_sh(&libdir)))
+        .arg(format!("--mandir={}", sanitize_sh(&mandir)))
+        .arg("--disable-ldconfig");
+    build.run(&mut cmd);
+    t!(fs::remove_dir_all(&empty_dir));
 }
 
 fn add_destdir(path: &Path, destdir: &Option<PathBuf>) -> PathBuf {
@@ -148,3 +126,82 @@ fn add_destdir(path: &Path, destdir: &Option<PathBuf>) -> PathBuf {
     }
     ret
 }
+
+macro_rules! install {
+    (($sel:ident, $builder:ident, $_config:ident),
+       $($name:ident,
+       $path:expr,
+       $default_cond:expr,
+       only_hosts: $only_hosts:expr,
+       $run_item:block $(, $c:ident)*;)+) => {
+        $(
+            #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+        pub struct $name {
+            pub stage: u32,
+            pub target: Interned<String>,
+            pub host: Interned<String>,
+        }
+
+        impl Step for $name {
+            type Output = ();
+            const DEFAULT: bool = true;
+            const ONLY_BUILD_TARGETS: bool = true;
+            const ONLY_HOSTS: bool = $only_hosts;
+            $(const $c: bool = true;)*
+
+            fn should_run(run: ShouldRun) -> ShouldRun {
+                let $_config = &run.builder.config;
+                run.path($path).default_condition($default_cond)
+            }
+
+            fn make_run(run: RunConfig) {
+                run.builder.ensure($name {
+                    stage: run.builder.top_stage,
+                    target: run.target,
+                    host: run.host,
+                });
+            }
+
+            fn run($sel, $builder: &Builder) {
+                $run_item
+            }
+        })+
+    }
+}
+
+install!((self, builder, _config),
+    Docs, "src/doc", _config.docs, only_hosts: false, {
+        builder.ensure(dist::Docs { stage: self.stage, target: self.target });
+        install_docs(builder, self.stage, self.target);
+    };
+    Std, "src/libstd", true, only_hosts: true, {
+        builder.ensure(dist::Std {
+            compiler: builder.compiler(self.stage, self.host),
+            target: self.target
+        });
+        install_std(builder, self.stage);
+    };
+    Cargo, "cargo", _config.extended, only_hosts: true, {
+        builder.ensure(dist::Cargo { stage: self.stage, target: self.target });
+        install_cargo(builder, self.stage, self.target);
+    };
+    Rls, "rls", _config.extended, only_hosts: true, {
+        builder.ensure(dist::Rls { stage: self.stage, target: self.target });
+        install_rls(builder, self.stage, self.target);
+    };
+    Analysis, "analysis", _config.extended, only_hosts: false, {
+        builder.ensure(dist::Analysis {
+            compiler: builder.compiler(self.stage, self.host),
+            target: self.target
+        });
+        install_analysis(builder, self.stage, self.target);
+    };
+    Src, "src", _config.extended, only_hosts: true, {
+        builder.ensure(dist::Src);
+        install_src(builder, self.stage);
+    }, ONLY_BUILD;
+    Rustc, "src/librustc", _config.extended, only_hosts: true, {
+        builder.ensure(dist::Rustc { stage: self.stage, target: self.target });
+        install_rustc(builder, self.stage, self.target);
+    };
+);
diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs
index e06de5bac4b..5b5ef3f07f9 100644
--- a/src/bootstrap/lib.rs
+++ b/src/bootstrap/lib.rs
@@ -23,38 +23,87 @@
 //!
 //! ## Architecture
 //!
-//! Although this build system defers most of the complicated logic to Cargo
-//! itself, it still needs to maintain a list of targets and dependencies which
-//! it can itself perform. Rustbuild is made up of a list of rules with
-//! dependencies amongst them (created in the `step` module) and then knows how
-//! to execute each in sequence. Each time rustbuild is invoked, it will simply
-//! iterate through this list of steps and execute each serially in turn.  For
-//! each step rustbuild relies on the step internally being incremental and
+//! The build system defers most of the complicated logic managing invocations
+//! of rustc and rustdoc to Cargo itself. However, moving through various stages
+//! and copying artifacts is still necessary for it to do. Each time rustbuild
+//! is invoked, it will iterate through the list of predefined steps and execute
+//! each serially in turn if it matches the paths passed or is a default rule.
+//! For each step rustbuild relies on the step internally being incremental and
 //! parallel. Note, though, that the `-j` parameter to rustbuild gets forwarded
 //! to appropriate test harnesses and such.
 //!
 //! Most of the "meaty" steps that matter are backed by Cargo, which does indeed
 //! have its own parallelism and incremental management. Later steps, like
 //! tests, aren't incremental and simply run the entire suite currently.
+//! However, compiletest itself tries to avoid running tests when the artifacts
+//! that are involved (mainly the compiler) haven't changed.
 //!
 //! When you execute `x.py build`, the steps which are executed are:
 //!
 //! * First, the python script is run. This will automatically download the
-//!   stage0 rustc and cargo according to `src/stage0.txt`, or using the cached
+//!   stage0 rustc and cargo according to `src/stage0.txt`, or use the cached
 //!   versions if they're available. These are then used to compile rustbuild
 //!   itself (using Cargo). Finally, control is then transferred to rustbuild.
 //!
 //! * Rustbuild takes over, performs sanity checks, probes the environment,
-//!   reads configuration, builds up a list of steps, and then starts executing
-//!   them.
+//!   reads configuration, and starts executing steps as it reads the command
+//!   line arguments (paths) or going through the default rules.
 //!
-//! * The stage0 libstd is compiled
-//! * The stage0 libtest is compiled
-//! * The stage0 librustc is compiled
-//! * The stage1 compiler is assembled
-//! * The stage1 libstd, libtest, librustc are compiled
-//! * The stage2 compiler is assembled
-//! * The stage2 libstd, libtest, librustc are compiled
+//!   The build output will be something like the following:
+//!
+//!   Building stage0 std artifacts
+//!   Copying stage0 std
+//!   Building stage0 test artifacts
+//!   Copying stage0 test
+//!   Building stage0 compiler artifacts
+//!   Copying stage0 rustc
+//!   Assembling stage1 compiler
+//!   Building stage1 std artifacts
+//!   Copying stage1 std
+//!   Building stage1 test artifacts
+//!   Copying stage1 test
+//!   Building stage1 compiler artifacts
+//!   Copying stage1 rustc
+//!   Assembling stage2 compiler
+//!   Uplifting stage1 std
+//!   Uplifting stage1 test
+//!   Uplifting stage1 rustc
+//!
+//! Let's disect that a little:
+//!
+//! ## Building stage0 {std,test,compiler} artifacts
+//!
+//! These steps use the provided (downloaded, usually) compiler to compile the
+//! local Rust source into libraries we can use.
+//!
+//! ## Copying stage0 {std,test,rustc}
+//!
+//! This copies the build output from Cargo into
+//! `build/$HOST/stage0-sysroot/lib/rustlib/$ARCH/lib`. FIXME: This step's
+//! documentation should be expanded -- the information already here may be
+//! incorrect.
+//!
+//! ## Assembling stage1 compiler
+//!
+//! This copies the libraries we built in "building stage0 ... artifacts" into
+//! the stage1 compiler's lib directory. These are the host libraries that the
+//! compiler itself uses to run. These aren't actually used by artifacts the new
+//! compiler generates. This step also copies the rustc and rustdoc binaries we
+//! generated into build/$HOST/stage/bin.
+//!
+//! The stage1/bin/rustc is a fully functional compiler, but it doesn't yet have
+//! any libraries to link built binaries or libraries to. The next 3 steps will
+//! provide those libraries for it; they are mostly equivalent to constructing
+//! the stage1/bin compiler so we don't go through them individually.
+//!
+//! ## Uplifting stage1 {std,test,rustc}
+//!
+//! This step copies the libraries from the stage1 compiler sysroot into the
+//! stage2 compiler. This is done to avoid rebuilding the compiler; libraries
+//! we'd build in this step should be identical (in function, if not necessarily
+//! identical on disk) so there's no need to recompile the compiler again. Note
+//! that if you want to, you can enable the full-bootstrap option to change this
+//! behavior.
 //!
 //! Each step is driven by a separate Cargo project and rustbuild orchestrates
 //! copying files between steps and otherwise preparing for Cargo to run.
@@ -65,15 +114,22 @@
 //! also check out the `src/bootstrap/README.md` file for more information.
 
 #![deny(warnings)]
+#![allow(stable_features)]
+#![feature(associated_consts)]
 
 #[macro_use]
 extern crate build_helper;
+#[macro_use]
+extern crate serde_derive;
+#[macro_use]
+extern crate lazy_static;
+extern crate serde;
+extern crate serde_json;
 extern crate cmake;
 extern crate filetime;
 extern crate gcc;
 extern crate getopts;
 extern crate num_cpus;
-extern crate rustc_serialize;
 extern crate toml;
 
 #[cfg(unix)]
@@ -81,9 +137,8 @@ extern crate libc;
 
 use std::cell::Cell;
 use std::cmp;
-use std::collections::HashMap;
+use std::collections::{HashSet, HashMap};
 use std::env;
-use std::ffi::OsString;
 use std::fs::{self, File};
 use std::io::Read;
 use std::path::{PathBuf, Path};
@@ -91,7 +146,7 @@ use std::process::Command;
 
 use build_helper::{run_silent, run_suppressed, try_run_silent, try_run_suppressed, output, mtime};
 
-use util::{exe, libdir, add_lib_path, OutputFolder, CiEnv};
+use util::{exe, libdir, OutputFolder, CiEnv};
 
 mod cc;
 mod channel;
@@ -106,8 +161,10 @@ mod flags;
 mod install;
 mod native;
 mod sanity;
-mod step;
 pub mod util;
+mod builder;
+mod cache;
+mod tool;
 
 #[cfg(windows)]
 mod job;
@@ -131,6 +188,7 @@ mod job {
 
 pub use config::Config;
 pub use flags::{Flags, Subcommand};
+use cache::{Interned, INTERNER};
 
 /// A structure representing a Rust compiler.
 ///
@@ -138,9 +196,9 @@ pub use flags::{Flags, Subcommand};
 /// corresponds to the platform the compiler runs on. This structure is used as
 /// a parameter to many methods below.
 #[derive(Eq, PartialEq, Clone, Copy, Hash, Debug)]
-pub struct Compiler<'a> {
+pub struct Compiler {
     stage: u32,
-    host: &'a str,
+    host: Interned<String>,
 }
 
 /// Global configuration for the build system.
@@ -171,9 +229,9 @@ pub struct Build {
     verbosity: usize,
 
     // Targets for which to build.
-    build: String,
-    hosts: Vec<String>,
-    targets: Vec<String>,
+    build: Interned<String>,
+    hosts: Vec<Interned<String>>,
+    targets: Vec<Interned<String>>,
 
     // Stage 0 (downloaded) compiler and cargo or their local rust equivalents.
     initial_rustc: PathBuf,
@@ -185,10 +243,10 @@ pub struct Build {
 
     // Runtime state filled in later on
     // target -> (cc, ar)
-    cc: HashMap<String, (gcc::Tool, Option<PathBuf>)>,
+    cc: HashMap<Interned<String>, (gcc::Tool, Option<PathBuf>)>,
     // host -> (cc, ar)
-    cxx: HashMap<String, gcc::Tool>,
-    crates: HashMap<String, Crate>,
+    cxx: HashMap<Interned<String>, gcc::Tool>,
+    crates: HashMap<Interned<String>, Crate>,
     is_sudo: bool,
     ci_env: CiEnv,
     delayed_failures: Cell<usize>,
@@ -196,9 +254,9 @@ pub struct Build {
 
 #[derive(Debug)]
 struct Crate {
-    name: String,
+    name: Interned<String>,
     version: String,
-    deps: Vec<String>,
+    deps: Vec<Interned<String>>,
     path: PathBuf,
     doc_step: String,
     build_step: String,
@@ -210,7 +268,7 @@ struct Crate {
 ///
 /// These entries currently correspond to the various output directories of the
 /// build system, with each mod generating output in a different directory.
-#[derive(Clone, Copy, PartialEq, Eq)]
+#[derive(Debug, Hash, Clone, Copy, PartialEq, Eq)]
 pub enum Mode {
     /// Build the standard library, placing output in the "stageN-std" directory.
     Libstd,
@@ -299,12 +357,6 @@ impl Build {
         }
     }
 
-    fn build_slice(&self) -> &[String] {
-        unsafe {
-            std::slice::from_raw_parts(&self.build, 1)
-        }
-    }
-
     /// Executes the entire build, as configured by the flags and configuration.
     pub fn build(&mut self) {
         unsafe {
@@ -333,7 +385,7 @@ impl Build {
         self.verbose("learning about cargo");
         metadata::build(self);
 
-        step::run(self);
+        builder::Builder::run(&self);
     }
 
     /// Clear out `dir` if `input` is newer.
@@ -351,242 +403,6 @@ impl Build {
         t!(File::create(stamp));
     }
 
-    /// Prepares an invocation of `cargo` to be run.
-    ///
-    /// This will create a `Command` that represents a pending execution of
-    /// Cargo. This cargo will be configured to use `compiler` as the actual
-    /// rustc compiler, its output will be scoped by `mode`'s output directory,
-    /// it will pass the `--target` flag for the specified `target`, and will be
-    /// executing the Cargo command `cmd`.
-    fn cargo(&self,
-             compiler: &Compiler,
-             mode: Mode,
-             target: &str,
-             cmd: &str) -> Command {
-        let mut cargo = Command::new(&self.initial_cargo);
-        let out_dir = self.stage_out(compiler, mode);
-        cargo.env("CARGO_TARGET_DIR", out_dir)
-             .arg(cmd)
-             .arg("-j").arg(self.jobs().to_string())
-             .arg("--target").arg(target);
-
-        // FIXME: Temporary fix for https://github.com/rust-lang/cargo/issues/3005
-        // Force cargo to output binaries with disambiguating hashes in the name
-        cargo.env("__CARGO_DEFAULT_LIB_METADATA", &self.config.channel);
-
-        let stage;
-        if compiler.stage == 0 && self.local_rebuild {
-            // Assume the local-rebuild rustc already has stage1 features.
-            stage = 1;
-        } else {
-            stage = compiler.stage;
-        }
-
-        // Customize the compiler we're running. Specify the compiler to cargo
-        // as our shim and then pass it some various options used to configure
-        // how the actual compiler itself is called.
-        //
-        // These variables are primarily all read by
-        // src/bootstrap/bin/{rustc.rs,rustdoc.rs}
-        cargo.env("RUSTBUILD_NATIVE_DIR", self.native_dir(target))
-             .env("RUSTC", self.out.join("bootstrap/debug/rustc"))
-             .env("RUSTC_REAL", self.compiler_path(compiler))
-             .env("RUSTC_STAGE", stage.to_string())
-             .env("RUSTC_CODEGEN_UNITS",
-                  self.config.rust_codegen_units.to_string())
-             .env("RUSTC_DEBUG_ASSERTIONS",
-                  self.config.rust_debug_assertions.to_string())
-             .env("RUSTC_SYSROOT", self.sysroot(compiler))
-             .env("RUSTC_LIBDIR", self.rustc_libdir(compiler))
-             .env("RUSTC_RPATH", self.config.rust_rpath.to_string())
-             .env("RUSTDOC", self.out.join("bootstrap/debug/rustdoc"))
-             .env("RUSTDOC_REAL", self.rustdoc(compiler))
-             .env("RUSTC_FLAGS", self.rustc_flags(target).join(" "));
-
-        if mode != Mode::Tool {
-            // Tools don't get debuginfo right now, e.g. cargo and rls don't
-            // get compiled with debuginfo.
-            cargo.env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string())
-                 .env("RUSTC_DEBUGINFO_LINES", self.config.rust_debuginfo_lines.to_string())
-                 .env("RUSTC_FORCE_UNSTABLE", "1");
-
-            // Currently the compiler depends on crates from crates.io, and
-            // then other crates can depend on the compiler (e.g. proc-macro
-            // crates). Let's say, for example that rustc itself depends on the
-            // bitflags crate. If an external crate then depends on the
-            // bitflags crate as well, we need to make sure they don't
-            // conflict, even if they pick the same verison of bitflags. We'll
-            // want to make sure that e.g. a plugin and rustc each get their
-            // own copy of bitflags.
-
-            // Cargo ensures that this works in general through the -C metadata
-            // flag. This flag will frob the symbols in the binary to make sure
-            // they're different, even though the source code is the exact
-            // same. To solve this problem for the compiler we extend Cargo's
-            // already-passed -C metadata flag with our own. Our rustc.rs
-            // wrapper around the actual rustc will detect -C metadata being
-            // passed and frob it with this extra string we're passing in.
-            cargo.env("RUSTC_METADATA_SUFFIX", "rustc");
-        }
-
-        // Enable usage of unstable features
-        cargo.env("RUSTC_BOOTSTRAP", "1");
-        self.add_rust_test_threads(&mut cargo);
-
-        // Almost all of the crates that we compile as part of the bootstrap may
-        // have a build script, including the standard library. To compile a
-        // build script, however, it itself needs a standard library! This
-        // introduces a bit of a pickle when we're compiling the standard
-        // library itself.
-        //
-        // To work around this we actually end up using the snapshot compiler
-        // (stage0) for compiling build scripts of the standard library itself.
-        // The stage0 compiler is guaranteed to have a libstd available for use.
-        //
-        // For other crates, however, we know that we've already got a standard
-        // library up and running, so we can use the normal compiler to compile
-        // build scripts in that situation.
-        if mode == Mode::Libstd {
-            cargo.env("RUSTC_SNAPSHOT", &self.initial_rustc)
-                 .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_snapshot_libdir());
-        } else {
-            cargo.env("RUSTC_SNAPSHOT", self.compiler_path(compiler))
-                 .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_libdir(compiler));
-        }
-
-        // Ignore incremental modes except for stage0, since we're
-        // not guaranteeing correctness across builds if the compiler
-        // is changing under your feet.`
-        if self.flags.incremental && compiler.stage == 0 {
-            let incr_dir = self.incremental_dir(compiler);
-            cargo.env("RUSTC_INCREMENTAL", incr_dir);
-        }
-
-        if let Some(ref on_fail) = self.flags.on_fail {
-            cargo.env("RUSTC_ON_FAIL", on_fail);
-        }
-
-        cargo.env("RUSTC_VERBOSE", format!("{}", self.verbosity));
-
-        // Specify some various options for build scripts used throughout
-        // the build.
-        //
-        // FIXME: the guard against msvc shouldn't need to be here
-        if !target.contains("msvc") {
-            cargo.env(format!("CC_{}", target), self.cc(target))
-                 .env(format!("AR_{}", target), self.ar(target).unwrap()) // only msvc is None
-                 .env(format!("CFLAGS_{}", target), self.cflags(target).join(" "));
-
-            if let Ok(cxx) = self.cxx(target) {
-                 cargo.env(format!("CXX_{}", target), cxx);
-            }
-        }
-
-        if mode == Mode::Libstd &&
-           self.config.extended &&
-           compiler.is_final_stage(self) {
-            cargo.env("RUSTC_SAVE_ANALYSIS", "api".to_string());
-        }
-
-        // When being built Cargo will at some point call `nmake.exe` on Windows
-        // MSVC. Unfortunately `nmake` will read these two environment variables
-        // below and try to intepret them. We're likely being run, however, from
-        // MSYS `make` which uses the same variables.
-        //
-        // As a result, to prevent confusion and errors, we remove these
-        // variables from our environment to prevent passing MSYS make flags to
-        // nmake, causing it to blow up.
-        if cfg!(target_env = "msvc") {
-            cargo.env_remove("MAKE");
-            cargo.env_remove("MAKEFLAGS");
-        }
-
-        // Environment variables *required* throughout the build
-        //
-        // FIXME: should update code to not require this env var
-        cargo.env("CFG_COMPILER_HOST_TRIPLE", target);
-
-        if self.is_verbose() {
-            cargo.arg("-v");
-        }
-        // FIXME: cargo bench does not accept `--release`
-        if self.config.rust_optimize && cmd != "bench" {
-            cargo.arg("--release");
-        }
-        if self.config.locked_deps {
-            cargo.arg("--locked");
-        }
-        if self.config.vendor || self.is_sudo {
-            cargo.arg("--frozen");
-        }
-
-        self.ci_env.force_coloring_in_ci(&mut cargo);
-
-        cargo
-    }
-
-    /// Get a path to the compiler specified.
-    fn compiler_path(&self, compiler: &Compiler) -> PathBuf {
-        if compiler.is_snapshot(self) {
-            self.initial_rustc.clone()
-        } else {
-            self.sysroot(compiler).join("bin").join(exe("rustc", compiler.host))
-        }
-    }
-
-    /// Get the specified tool built by the specified compiler
-    fn tool(&self, compiler: &Compiler, tool: &str) -> PathBuf {
-        self.cargo_out(compiler, Mode::Tool, compiler.host)
-            .join(exe(tool, compiler.host))
-    }
-
-    /// Get the `rustdoc` executable next to the specified compiler
-    fn rustdoc(&self, compiler: &Compiler) -> PathBuf {
-        let mut rustdoc = self.compiler_path(compiler);
-        rustdoc.pop();
-        rustdoc.push(exe("rustdoc", compiler.host));
-        rustdoc
-    }
-
-    /// Get a `Command` which is ready to run `tool` in `stage` built for
-    /// `host`.
-    fn tool_cmd(&self, compiler: &Compiler, tool: &str) -> Command {
-        let mut cmd = Command::new(self.tool(&compiler, tool));
-        self.prepare_tool_cmd(compiler, &mut cmd);
-        cmd
-    }
-
-    /// Prepares the `cmd` provided to be able to run the `compiler` provided.
-    ///
-    /// Notably this munges the dynamic library lookup path to point to the
-    /// right location to run `compiler`.
-    fn prepare_tool_cmd(&self, compiler: &Compiler, cmd: &mut Command) {
-        let host = compiler.host;
-        let mut paths = vec![
-            self.sysroot_libdir(compiler, compiler.host),
-            self.cargo_out(compiler, Mode::Tool, host).join("deps"),
-        ];
-
-        // On MSVC a tool may invoke a C compiler (e.g. compiletest in run-make
-        // mode) and that C compiler may need some extra PATH modification. Do
-        // so here.
-        if compiler.host.contains("msvc") {
-            let curpaths = env::var_os("PATH").unwrap_or(OsString::new());
-            let curpaths = env::split_paths(&curpaths).collect::<Vec<_>>();
-            for &(ref k, ref v) in self.cc[compiler.host].0.env() {
-                if k != "PATH" {
-                    continue
-                }
-                for path in env::split_paths(v) {
-                    if !curpaths.contains(&path) {
-                        paths.push(path);
-                    }
-                }
-            }
-        }
-        add_lib_path(paths, cmd);
-    }
-
     /// Get the space-separated set of activated features for the standard
     /// library.
     fn std_features(&self) -> String {
@@ -622,51 +438,24 @@ impl Build {
         if self.config.rust_optimize {"release"} else {"debug"}
     }
 
-    /// Returns the sysroot for the `compiler` specified that *this build system
-    /// generates*.
-    ///
-    /// That is, the sysroot for the stage0 compiler is not what the compiler
-    /// thinks it is by default, but it's the same as the default for stages
-    /// 1-3.
-    fn sysroot(&self, compiler: &Compiler) -> PathBuf {
-        if compiler.stage == 0 {
-            self.out.join(compiler.host).join("stage0-sysroot")
-        } else {
-            self.out.join(compiler.host).join(format!("stage{}", compiler.stage))
-        }
-    }
-
     /// Get the directory for incremental by-products when using the
     /// given compiler.
-    fn incremental_dir(&self, compiler: &Compiler) -> PathBuf {
-        self.out.join(compiler.host).join(format!("stage{}-incremental", compiler.stage))
-    }
-
-    /// Returns the libdir where the standard library and other artifacts are
-    /// found for a compiler's sysroot.
-    fn sysroot_libdir(&self, compiler: &Compiler, target: &str) -> PathBuf {
-        if compiler.stage >= 2 {
-            if let Some(ref libdir_relative) = self.config.libdir_relative {
-                return self.sysroot(compiler).join(libdir_relative)
-                    .join("rustlib").join(target).join("lib")
-            }
-        }
-       self.sysroot(compiler).join("lib").join("rustlib")
-           .join(target).join("lib")
+    fn incremental_dir(&self, compiler: Compiler) -> PathBuf {
+        self.out.join(&*compiler.host).join(format!("stage{}-incremental", compiler.stage))
     }
 
     /// Returns the root directory for all output generated in a particular
     /// stage when running with a particular host compiler.
     ///
     /// The mode indicates what the root directory is for.
-    fn stage_out(&self, compiler: &Compiler, mode: Mode) -> PathBuf {
+    fn stage_out(&self, compiler: Compiler, mode: Mode) -> PathBuf {
         let suffix = match mode {
             Mode::Libstd => "-std",
             Mode::Libtest => "-test",
             Mode::Tool => "-tools",
             Mode::Librustc => "-rustc",
         };
-        self.out.join(compiler.host)
+        self.out.join(&*compiler.host)
                 .join(format!("stage{}{}", compiler.stage, suffix))
     }
 
@@ -674,42 +463,42 @@ impl Build {
     /// running a particular compiler, wehther or not we're building the
     /// standard library, and targeting the specified architecture.
     fn cargo_out(&self,
-                 compiler: &Compiler,
+                 compiler: Compiler,
                  mode: Mode,
-                 target: &str) -> PathBuf {
-        self.stage_out(compiler, mode).join(target).join(self.cargo_dir())
+                 target: Interned<String>) -> PathBuf {
+        self.stage_out(compiler, mode).join(&*target).join(self.cargo_dir())
     }
 
     /// Root output directory for LLVM compiled for `target`
     ///
     /// Note that if LLVM is configured externally then the directory returned
     /// will likely be empty.
-    fn llvm_out(&self, target: &str) -> PathBuf {
-        self.out.join(target).join("llvm")
+    fn llvm_out(&self, target: Interned<String>) -> PathBuf {
+        self.out.join(&*target).join("llvm")
     }
 
     /// Output directory for all documentation for a target
-    fn doc_out(&self, target: &str) -> PathBuf {
-        self.out.join(target).join("doc")
+    fn doc_out(&self, target: Interned<String>) -> PathBuf {
+        self.out.join(&*target).join("doc")
     }
 
     /// Output directory for some generated md crate documentation for a target (temporary)
-    fn md_doc_out(&self, target: &str) -> PathBuf {
-        self.out.join(target).join("md-doc")
+    fn md_doc_out(&self, target: Interned<String>) -> Interned<PathBuf> {
+        INTERNER.intern_path(self.out.join(&*target).join("md-doc"))
     }
 
     /// Output directory for all crate documentation for a target (temporary)
     ///
     /// The artifacts here are then copied into `doc_out` above.
-    fn crate_doc_out(&self, target: &str) -> PathBuf {
-        self.out.join(target).join("crate-docs")
+    fn crate_doc_out(&self, target: Interned<String>) -> PathBuf {
+        self.out.join(&*target).join("crate-docs")
     }
 
     /// Returns true if no custom `llvm-config` is set for the specified target.
     ///
     /// If no custom `llvm-config` was specified then Rust's llvm will be used.
-    fn is_rust_llvm(&self, target: &str) -> bool {
-        match self.config.target_config.get(target) {
+    fn is_rust_llvm(&self, target: Interned<String>) -> bool {
+        match self.config.target_config.get(&target) {
             Some(ref c) => c.llvm_config.is_none(),
             None => true
         }
@@ -719,25 +508,25 @@ impl Build {
     ///
     /// If a custom `llvm-config` was specified for target then that's returned
     /// instead.
-    fn llvm_config(&self, target: &str) -> PathBuf {
-        let target_config = self.config.target_config.get(target);
+    fn llvm_config(&self, target: Interned<String>) -> PathBuf {
+        let target_config = self.config.target_config.get(&target);
         if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
             s.clone()
         } else {
-            self.llvm_out(&self.config.build).join("bin")
-                .join(exe("llvm-config", target))
+            self.llvm_out(self.config.build).join("bin")
+                .join(exe("llvm-config", &*target))
         }
     }
 
     /// Returns the path to `FileCheck` binary for the specified target
-    fn llvm_filecheck(&self, target: &str) -> PathBuf {
-        let target_config = self.config.target_config.get(target);
+    fn llvm_filecheck(&self, target: Interned<String>) -> PathBuf {
+        let target_config = self.config.target_config.get(&target);
         if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
             let llvm_bindir = output(Command::new(s).arg("--bindir"));
-            Path::new(llvm_bindir.trim()).join(exe("FileCheck", target))
+            Path::new(llvm_bindir.trim()).join(exe("FileCheck", &*target))
         } else {
-            let base = self.llvm_out(&self.config.build).join("build");
-            let exe = exe("FileCheck", target);
+            let base = self.llvm_out(self.config.build).join("build");
+            let exe = exe("FileCheck", &*target);
             if !self.config.ninja && self.config.build.contains("msvc") {
                 base.join("Release/bin").join(exe)
             } else {
@@ -747,29 +536,16 @@ impl Build {
     }
 
     /// Directory for libraries built from C/C++ code and shared between stages.
-    fn native_dir(&self, target: &str) -> PathBuf {
-        self.out.join(target).join("native")
+    fn native_dir(&self, target: Interned<String>) -> PathBuf {
+        self.out.join(&*target).join("native")
     }
 
     /// Root output directory for rust_test_helpers library compiled for
     /// `target`
-    fn test_helpers_out(&self, target: &str) -> PathBuf {
+    fn test_helpers_out(&self, target: Interned<String>) -> PathBuf {
         self.native_dir(target).join("rust-test-helpers")
     }
 
-    /// Adds the compiler's directory of dynamic libraries to `cmd`'s dynamic
-    /// library lookup path.
-    fn add_rustc_lib_path(&self, compiler: &Compiler, cmd: &mut Command) {
-        // Windows doesn't need dylib path munging because the dlls for the
-        // compiler live next to the compiler and the system will find them
-        // automatically.
-        if cfg!(windows) {
-            return
-        }
-
-        add_lib_path(vec![self.rustc_libdir(compiler)], cmd);
-    }
-
     /// Adds the `RUST_TEST_THREADS` env var if necessary
     fn add_rust_test_threads(&self, cmd: &mut Command) {
         if env::var_os("RUST_TEST_THREADS").is_none() {
@@ -777,19 +553,6 @@ impl Build {
         }
     }
 
-    /// Returns the compiler's libdir where it stores the dynamic libraries that
-    /// it itself links against.
-    ///
-    /// For example this returns `<sysroot>/lib` on Unix and `<sysroot>/bin` on
-    /// Windows.
-    fn rustc_libdir(&self, compiler: &Compiler) -> PathBuf {
-        if compiler.is_snapshot(self) {
-            self.rustc_snapshot_libdir()
-        } else {
-            self.sysroot(compiler).join(libdir(compiler.host))
-        }
-    }
-
     /// Returns the libdir of the snapshot compiler.
     fn rustc_snapshot_libdir(&self) -> PathBuf {
         self.initial_rustc.parent().unwrap().parent().unwrap()
@@ -846,16 +609,16 @@ impl Build {
     }
 
     /// Returns the path to the C compiler for the target specified.
-    fn cc(&self, target: &str) -> &Path {
-        self.cc[target].0.path()
+    fn cc(&self, target: Interned<String>) -> &Path {
+        self.cc[&target].0.path()
     }
 
     /// Returns a list of flags to pass to the C compiler for the target
     /// specified.
-    fn cflags(&self, target: &str) -> Vec<String> {
+    fn cflags(&self, target: Interned<String>) -> Vec<String> {
         // Filter out -O and /O (the optimization flags) that we picked up from
         // gcc-rs because the build scripts will determine that for themselves.
-        let mut base = self.cc[target].0.args().iter()
+        let mut base = self.cc[&target].0.args().iter()
                            .map(|s| s.to_string_lossy().into_owned())
                            .filter(|s| !s.starts_with("-O") && !s.starts_with("/O"))
                            .collect::<Vec<_>>();
@@ -871,20 +634,20 @@ impl Build {
         // Work around an apparently bad MinGW / GCC optimization,
         // See: http://lists.llvm.org/pipermail/cfe-dev/2016-December/051980.html
         // See: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=78936
-        if target == "i686-pc-windows-gnu" {
+        if &*target == "i686-pc-windows-gnu" {
             base.push("-fno-omit-frame-pointer".into());
         }
         base
     }
 
     /// Returns the path to the `ar` archive utility for the target specified.
-    fn ar(&self, target: &str) -> Option<&Path> {
-        self.cc[target].1.as_ref().map(|p| &**p)
+    fn ar(&self, target: Interned<String>) -> Option<&Path> {
+        self.cc[&target].1.as_ref().map(|p| &**p)
     }
 
     /// Returns the path to the C++ compiler for the target specified.
-    fn cxx(&self, target: &str) -> Result<&Path, String> {
-        match self.cxx.get(target) {
+    fn cxx(&self, target: Interned<String>) -> Result<&Path, String> {
+        match self.cxx.get(&target) {
             Some(p) => Ok(p.path()),
             None => Err(format!(
                     "target `{}` is not configured as a host, only as a target",
@@ -893,7 +656,7 @@ impl Build {
     }
 
     /// Returns flags to pass to the compiler to generate code for `target`.
-    fn rustc_flags(&self, target: &str) -> Vec<String> {
+    fn rustc_flags(&self, target: Interned<String>) -> Vec<String> {
         // New flags should be added here with great caution!
         //
         // It's quite unfortunate to **require** flags to generate code for a
@@ -910,8 +673,8 @@ impl Build {
     }
 
     /// Returns the "musl root" for this `target`, if defined
-    fn musl_root(&self, target: &str) -> Option<&Path> {
-        self.config.target_config.get(target)
+    fn musl_root(&self, target: Interned<String>) -> Option<&Path> {
+        self.config.target_config.get(&target)
             .and_then(|t| t.musl_root.as_ref())
             .or(self.config.musl_root.as_ref())
             .map(|p| &**p)
@@ -919,7 +682,7 @@ impl Build {
 
     /// Returns whether the target will be tested using the `remote-test-client`
     /// and `remote-test-server` binaries.
-    fn remote_tested(&self, target: &str) -> bool {
+    fn remote_tested(&self, target: Interned<String>) -> bool {
         self.qemu_rootfs(target).is_some() || target.contains("android") ||
         env::var_os("TEST_DEVICE_ADDR").is_some()
     }
@@ -929,8 +692,8 @@ impl Build {
     ///
     /// If `Some` is returned then that means that tests for this target are
     /// emulated with QEMU and binaries will need to be shipped to the emulator.
-    fn qemu_rootfs(&self, target: &str) -> Option<&Path> {
-        self.config.target_config.get(target)
+    fn qemu_rootfs(&self, target: Interned<String>) -> Option<&Path> {
+        self.config.target_config.get(&target)
             .and_then(|t| t.qemu_rootfs.as_ref())
             .map(|p| &**p)
     }
@@ -958,20 +721,20 @@ impl Build {
     ///
     /// When all of these conditions are met the build will lift artifacts from
     /// the previous stage forward.
-    fn force_use_stage1(&self, compiler: &Compiler, target: &str) -> bool {
+    fn force_use_stage1(&self, compiler: Compiler, target: Interned<String>) -> bool {
         !self.config.full_bootstrap &&
             compiler.stage >= 2 &&
-            self.config.host.iter().any(|h| h == target)
+            self.config.host.iter().any(|h| *h == target)
     }
 
     /// Returns the directory that OpenSSL artifacts are compiled into if
     /// configured to do so.
-    fn openssl_dir(&self, target: &str) -> Option<PathBuf> {
+    fn openssl_dir(&self, target: Interned<String>) -> Option<PathBuf> {
         // OpenSSL not used on Windows
         if target.contains("windows") {
             None
         } else if self.config.openssl_static {
-            Some(self.out.join(target).join("openssl"))
+            Some(self.out.join(&*target).join("openssl"))
         } else {
             None
         }
@@ -979,7 +742,7 @@ impl Build {
 
     /// Returns the directory that OpenSSL artifacts are installed into if
     /// configured as such.
-    fn openssl_install_dir(&self, target: &str) -> Option<PathBuf> {
+    fn openssl_install_dir(&self, target: Interned<String>) -> Option<PathBuf> {
         self.openssl_dir(target).map(|p| p.join("install"))
     }
 
@@ -1078,16 +841,38 @@ impl Build {
             None
         }
     }
+
+    /// Get a list of crates from a root crate.
+    ///
+    /// Returns Vec<(crate, path to crate, is_root_crate)>
+    fn crates(&self, root: &str) -> Vec<(Interned<String>, &Path)> {
+        let interned = INTERNER.intern_string(root.to_owned());
+        let mut ret = Vec::new();
+        let mut list = vec![interned];
+        let mut visited = HashSet::new();
+        while let Some(krate) = list.pop() {
+            let krate = &self.crates[&krate];
+            // If we can't strip prefix, then out-of-tree path
+            let path = krate.path.strip_prefix(&self.src).unwrap_or(&krate.path);
+            ret.push((krate.name, path));
+            for dep in &krate.deps {
+                if visited.insert(dep) && dep != "build_helper" {
+                    list.push(*dep);
+                }
+            }
+        }
+        ret
+    }
 }
 
-impl<'a> Compiler<'a> {
-    /// Creates a new complier for the specified stage/host
-    fn new(stage: u32, host: &'a str) -> Compiler<'a> {
-        Compiler { stage: stage, host: host }
+impl<'a> Compiler {
+    pub fn with_stage(mut self, stage: u32) -> Compiler {
+        self.stage = stage;
+        self
     }
 
     /// Returns whether this is a snapshot compiler for `build`'s configuration
-    fn is_snapshot(&self, build: &Build) -> bool {
+    pub fn is_snapshot(&self, build: &Build) -> bool {
         self.stage == 0 && self.host == build.build
     }
 
@@ -1095,7 +880,7 @@ impl<'a> Compiler<'a> {
     /// current build session.
     /// This takes into account whether we're performing a full bootstrap or
     /// not; don't directly compare the stage with `2`!
-    fn is_final_stage(&self, build: &Build) -> bool {
+    pub fn is_final_stage(&self, build: &Build) -> bool {
         let final_stage = if build.config.full_bootstrap { 2 } else { 1 };
         self.stage >= final_stage
     }
diff --git a/src/bootstrap/metadata.rs b/src/bootstrap/metadata.rs
index 9326bb7129a..ad555be877a 100644
--- a/src/bootstrap/metadata.rs
+++ b/src/bootstrap/metadata.rs
@@ -13,17 +13,18 @@ use std::process::Command;
 use std::path::PathBuf;
 
 use build_helper::output;
-use rustc_serialize::json;
+use serde_json;
 
 use {Build, Crate};
+use cache::INTERNER;
 
-#[derive(RustcDecodable)]
+#[derive(Deserialize)]
 struct Output {
     packages: Vec<Package>,
     resolve: Resolve,
 }
 
-#[derive(RustcDecodable)]
+#[derive(Deserialize)]
 struct Package {
     id: String,
     name: String,
@@ -32,12 +33,12 @@ struct Package {
     manifest_path: String,
 }
 
-#[derive(RustcDecodable)]
+#[derive(Deserialize)]
 struct Resolve {
     nodes: Vec<ResolveNode>,
 }
 
-#[derive(RustcDecodable)]
+#[derive(Deserialize)]
 struct ResolveNode {
     id: String,
     dependencies: Vec<String>,
@@ -61,19 +62,20 @@ fn build_krate(build: &mut Build, krate: &str) {
          .arg("--format-version").arg("1")
          .arg("--manifest-path").arg(build.src.join(krate).join("Cargo.toml"));
     let output = output(&mut cargo);
-    let output: Output = json::decode(&output).unwrap();
+    let output: Output = serde_json::from_str(&output).unwrap();
     let mut id2name = HashMap::new();
     for package in output.packages {
         if package.source.is_none() {
-            id2name.insert(package.id, package.name.clone());
+            let name = INTERNER.intern_string(package.name);
+            id2name.insert(package.id, name);
             let mut path = PathBuf::from(package.manifest_path);
             path.pop();
-            build.crates.insert(package.name.clone(), Crate {
-                build_step: format!("build-crate-{}", package.name),
-                doc_step: format!("doc-crate-{}", package.name),
-                test_step: format!("test-crate-{}", package.name),
-                bench_step: format!("bench-crate-{}", package.name),
-                name: package.name,
+            build.crates.insert(name, Crate {
+                build_step: format!("build-crate-{}", name),
+                doc_step: format!("doc-crate-{}", name),
+                test_step: format!("test-crate-{}", name),
+                bench_step: format!("bench-crate-{}", name),
+                name: name,
                 version: package.version,
                 deps: Vec::new(),
                 path: path,
@@ -93,7 +95,7 @@ fn build_krate(build: &mut Build, krate: &str) {
                 Some(dep) => dep,
                 None => continue,
             };
-            krate.deps.push(dep.clone());
+            krate.deps.push(*dep);
         }
     }
 }
diff --git a/src/bootstrap/native.rs b/src/bootstrap/native.rs
index 20eec97d8e5..f0dfd857ab6 100644
--- a/src/bootstrap/native.rs
+++ b/src/bootstrap/native.rs
@@ -32,174 +32,193 @@ use gcc;
 use Build;
 use util;
 use build_helper::up_to_date;
+use builder::{Builder, RunConfig, ShouldRun, Step};
+use cache::Interned;
 
-/// Compile LLVM for `target`.
-pub fn llvm(build: &Build, target: &str) {
-    // If we're using a custom LLVM bail out here, but we can only use a
-    // custom LLVM for the build triple.
-    if let Some(config) = build.config.target_config.get(target) {
-        if let Some(ref s) = config.llvm_config {
-            return check_llvm_version(build, s);
-        }
-    }
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct Llvm {
+    pub target: Interned<String>,
+}
 
-    let rebuild_trigger = build.src.join("src/rustllvm/llvm-rebuild-trigger");
-    let mut rebuild_trigger_contents = String::new();
-    t!(t!(File::open(&rebuild_trigger)).read_to_string(&mut rebuild_trigger_contents));
+impl Step for Llvm {
+    type Output = ();
+    const ONLY_HOSTS: bool = true;
 
-    let out_dir = build.llvm_out(target);
-    let done_stamp = out_dir.join("llvm-finished-building");
-    if done_stamp.exists() {
-        let mut done_contents = String::new();
-        t!(t!(File::open(&done_stamp)).read_to_string(&mut done_contents));
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.path("src/llvm")
+    }
 
-        // If LLVM was already built previously and contents of the rebuild-trigger file
-        // didn't change from the previous build, then no action is required.
-        if done_contents == rebuild_trigger_contents {
-            return
+    /// Compile LLVM for `target`.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let target = self.target;
+        // If we're using a custom LLVM bail out here, but we can only use a
+        // custom LLVM for the build triple.
+        if let Some(config) = build.config.target_config.get(&target) {
+            if let Some(ref s) = config.llvm_config {
+                return check_llvm_version(build, s);
+            }
         }
-    }
-    if build.config.llvm_clean_rebuild {
-        drop(fs::remove_dir_all(&out_dir));
-    }
 
-    let _folder = build.fold_output(|| "llvm");
-    println!("Building LLVM for {}", target);
-    let _time = util::timeit();
-    t!(fs::create_dir_all(&out_dir));
+        let rebuild_trigger = build.src.join("src/rustllvm/llvm-rebuild-trigger");
+        let mut rebuild_trigger_contents = String::new();
+        t!(t!(File::open(&rebuild_trigger)).read_to_string(&mut rebuild_trigger_contents));
 
-    // http://llvm.org/docs/CMake.html
-    let mut cfg = cmake::Config::new(build.src.join("src/llvm"));
-    if build.config.ninja {
-        cfg.generator("Ninja");
-    }
+        let out_dir = build.llvm_out(target);
+        let done_stamp = out_dir.join("llvm-finished-building");
+        if done_stamp.exists() {
+            let mut done_contents = String::new();
+            t!(t!(File::open(&done_stamp)).read_to_string(&mut done_contents));
 
-    let profile = match (build.config.llvm_optimize, build.config.llvm_release_debuginfo) {
-        (false, _) => "Debug",
-        (true, false) => "Release",
-        (true, true) => "RelWithDebInfo",
-    };
-
-    // NOTE: remember to also update `config.toml.example` when changing the defaults!
-    let llvm_targets = match build.config.llvm_targets {
-        Some(ref s) => s,
-        None => "X86;ARM;AArch64;Mips;PowerPC;SystemZ;JSBackend;MSP430;Sparc;NVPTX;Hexagon",
-    };
-
-    let llvm_exp_targets = match build.config.llvm_experimental_targets {
-        Some(ref s) => s,
-        None => "",
-    };
-
-    let assertions = if build.config.llvm_assertions {"ON"} else {"OFF"};
-
-    cfg.target(target)
-       .host(&build.build)
-       .out_dir(&out_dir)
-       .profile(profile)
-       .define("LLVM_ENABLE_ASSERTIONS", assertions)
-       .define("LLVM_TARGETS_TO_BUILD", llvm_targets)
-       .define("LLVM_EXPERIMENTAL_TARGETS_TO_BUILD", llvm_exp_targets)
-       .define("LLVM_INCLUDE_EXAMPLES", "OFF")
-       .define("LLVM_INCLUDE_TESTS", "OFF")
-       .define("LLVM_INCLUDE_DOCS", "OFF")
-       .define("LLVM_ENABLE_ZLIB", "OFF")
-       .define("WITH_POLLY", "OFF")
-       .define("LLVM_ENABLE_TERMINFO", "OFF")
-       .define("LLVM_ENABLE_LIBEDIT", "OFF")
-       .define("LLVM_PARALLEL_COMPILE_JOBS", build.jobs().to_string())
-       .define("LLVM_TARGET_ARCH", target.split('-').next().unwrap())
-       .define("LLVM_DEFAULT_TARGET_TRIPLE", target);
-
-    if target.contains("msvc") {
-        cfg.define("LLVM_USE_CRT_DEBUG", "MT");
-        cfg.define("LLVM_USE_CRT_RELEASE", "MT");
-        cfg.define("LLVM_USE_CRT_RELWITHDEBINFO", "MT");
-        cfg.static_crt(true);
-    }
+            // If LLVM was already built previously and contents of the rebuild-trigger file
+            // didn't change from the previous build, then no action is required.
+            if done_contents == rebuild_trigger_contents {
+                return
+            }
+        }
+        if build.config.llvm_clean_rebuild {
+            drop(fs::remove_dir_all(&out_dir));
+        }
 
-    if target.starts_with("i686") {
-        cfg.define("LLVM_BUILD_32_BITS", "ON");
-    }
+        let _folder = build.fold_output(|| "llvm");
+        println!("Building LLVM for {}", target);
+        let _time = util::timeit();
+        t!(fs::create_dir_all(&out_dir));
 
-    if let Some(num_linkers) = build.config.llvm_link_jobs {
-        if num_linkers > 0 {
-            cfg.define("LLVM_PARALLEL_LINK_JOBS", num_linkers.to_string());
+        // http://llvm.org/docs/CMake.html
+        let mut cfg = cmake::Config::new(build.src.join("src/llvm"));
+        if build.config.ninja {
+            cfg.generator("Ninja");
         }
-    }
 
-    // http://llvm.org/docs/HowToCrossCompileLLVM.html
-    if target != build.build {
-        // FIXME: if the llvm root for the build triple is overridden then we
-        //        should use llvm-tblgen from there, also should verify that it
-        //        actually exists most of the time in normal installs of LLVM.
-        let host = build.llvm_out(&build.build).join("bin/llvm-tblgen");
-        cfg.define("CMAKE_CROSSCOMPILING", "True")
-           .define("LLVM_TABLEGEN", &host);
-    }
+        let profile = match (build.config.llvm_optimize, build.config.llvm_release_debuginfo) {
+            (false, _) => "Debug",
+            (true, false) => "Release",
+            (true, true) => "RelWithDebInfo",
+        };
+
+        // NOTE: remember to also update `config.toml.example` when changing the defaults!
+        let llvm_targets = match build.config.llvm_targets {
+            Some(ref s) => s,
+            None => "X86;ARM;AArch64;Mips;PowerPC;SystemZ;JSBackend;MSP430;Sparc;NVPTX;Hexagon",
+        };
+
+        let llvm_exp_targets = match build.config.llvm_experimental_targets {
+            Some(ref s) => s,
+            None => "",
+        };
+
+        let assertions = if build.config.llvm_assertions {"ON"} else {"OFF"};
+
+        cfg.target(&target)
+           .host(&build.build)
+           .out_dir(&out_dir)
+           .profile(profile)
+           .define("LLVM_ENABLE_ASSERTIONS", assertions)
+           .define("LLVM_TARGETS_TO_BUILD", llvm_targets)
+           .define("LLVM_EXPERIMENTAL_TARGETS_TO_BUILD", llvm_exp_targets)
+           .define("LLVM_INCLUDE_EXAMPLES", "OFF")
+           .define("LLVM_INCLUDE_TESTS", "OFF")
+           .define("LLVM_INCLUDE_DOCS", "OFF")
+           .define("LLVM_ENABLE_ZLIB", "OFF")
+           .define("WITH_POLLY", "OFF")
+           .define("LLVM_ENABLE_TERMINFO", "OFF")
+           .define("LLVM_ENABLE_LIBEDIT", "OFF")
+           .define("LLVM_PARALLEL_COMPILE_JOBS", build.jobs().to_string())
+           .define("LLVM_TARGET_ARCH", target.split('-').next().unwrap())
+           .define("LLVM_DEFAULT_TARGET_TRIPLE", target);
 
-    let sanitize_cc = |cc: &Path| {
         if target.contains("msvc") {
-            OsString::from(cc.to_str().unwrap().replace("\\", "/"))
-        } else {
-            cc.as_os_str().to_owned()
+            cfg.define("LLVM_USE_CRT_DEBUG", "MT");
+            cfg.define("LLVM_USE_CRT_RELEASE", "MT");
+            cfg.define("LLVM_USE_CRT_RELWITHDEBINFO", "MT");
+            cfg.static_crt(true);
         }
-    };
 
-    let configure_compilers = |cfg: &mut cmake::Config| {
-        // MSVC with CMake uses msbuild by default which doesn't respect these
-        // vars that we'd otherwise configure. In that case we just skip this
-        // entirely.
-        if target.contains("msvc") && !build.config.ninja {
-            return
+        if target.starts_with("i686") {
+            cfg.define("LLVM_BUILD_32_BITS", "ON");
+        }
+
+        if let Some(num_linkers) = build.config.llvm_link_jobs {
+            if num_linkers > 0 {
+                cfg.define("LLVM_PARALLEL_LINK_JOBS", num_linkers.to_string());
+            }
         }
 
-        let cc = build.cc(target);
-        let cxx = build.cxx(target).unwrap();
-
-        // Handle msvc + ninja + ccache specially (this is what the bots use)
-        if target.contains("msvc") &&
-           build.config.ninja &&
-           build.config.ccache.is_some() {
-            let mut cc = env::current_exe().expect("failed to get cwd");
-            cc.set_file_name("sccache-plus-cl.exe");
-
-           cfg.define("CMAKE_C_COMPILER", sanitize_cc(&cc))
-              .define("CMAKE_CXX_COMPILER", sanitize_cc(&cc));
-           cfg.env("SCCACHE_PATH",
-                   build.config.ccache.as_ref().unwrap())
-              .env("SCCACHE_TARGET", target);
-
-        // If ccache is configured we inform the build a little differently hwo
-        // to invoke ccache while also invoking our compilers.
-        } else if let Some(ref ccache) = build.config.ccache {
-           cfg.define("CMAKE_C_COMPILER", ccache)
-              .define("CMAKE_C_COMPILER_ARG1", sanitize_cc(cc))
-              .define("CMAKE_CXX_COMPILER", ccache)
-              .define("CMAKE_CXX_COMPILER_ARG1", sanitize_cc(cxx));
-        } else {
-           cfg.define("CMAKE_C_COMPILER", sanitize_cc(cc))
-              .define("CMAKE_CXX_COMPILER", sanitize_cc(cxx));
+        // http://llvm.org/docs/HowToCrossCompileLLVM.html
+        if target != build.build {
+            builder.ensure(Llvm { target: build.build });
+            // FIXME: if the llvm root for the build triple is overridden then we
+            //        should use llvm-tblgen from there, also should verify that it
+            //        actually exists most of the time in normal installs of LLVM.
+            let host = build.llvm_out(build.build).join("bin/llvm-tblgen");
+            cfg.define("CMAKE_CROSSCOMPILING", "True")
+               .define("LLVM_TABLEGEN", &host);
         }
 
-        cfg.build_arg("-j").build_arg(build.jobs().to_string());
-        cfg.define("CMAKE_C_FLAGS", build.cflags(target).join(" "));
-        cfg.define("CMAKE_CXX_FLAGS", build.cflags(target).join(" "));
-    };
+        let sanitize_cc = |cc: &Path| {
+            if target.contains("msvc") {
+                OsString::from(cc.to_str().unwrap().replace("\\", "/"))
+            } else {
+                cc.as_os_str().to_owned()
+            }
+        };
+
+        let configure_compilers = |cfg: &mut cmake::Config| {
+            // MSVC with CMake uses msbuild by default which doesn't respect these
+            // vars that we'd otherwise configure. In that case we just skip this
+            // entirely.
+            if target.contains("msvc") && !build.config.ninja {
+                return
+            }
 
-    configure_compilers(&mut cfg);
+            let cc = build.cc(target);
+            let cxx = build.cxx(target).unwrap();
+
+            // Handle msvc + ninja + ccache specially (this is what the bots use)
+            if target.contains("msvc") &&
+               build.config.ninja &&
+               build.config.ccache.is_some() {
+                let mut cc = env::current_exe().expect("failed to get cwd");
+                cc.set_file_name("sccache-plus-cl.exe");
+
+               cfg.define("CMAKE_C_COMPILER", sanitize_cc(&cc))
+                  .define("CMAKE_CXX_COMPILER", sanitize_cc(&cc));
+               cfg.env("SCCACHE_PATH",
+                       build.config.ccache.as_ref().unwrap())
+                  .env("SCCACHE_TARGET", target);
+
+            // If ccache is configured we inform the build a little differently hwo
+            // to invoke ccache while also invoking our compilers.
+            } else if let Some(ref ccache) = build.config.ccache {
+               cfg.define("CMAKE_C_COMPILER", ccache)
+                  .define("CMAKE_C_COMPILER_ARG1", sanitize_cc(cc))
+                  .define("CMAKE_CXX_COMPILER", ccache)
+                  .define("CMAKE_CXX_COMPILER_ARG1", sanitize_cc(cxx));
+            } else {
+               cfg.define("CMAKE_C_COMPILER", sanitize_cc(cc))
+                  .define("CMAKE_CXX_COMPILER", sanitize_cc(cxx));
+            }
 
-    if env::var_os("SCCACHE_ERROR_LOG").is_some() {
-        cfg.env("RUST_LOG", "sccache=warn");
-    }
+            cfg.build_arg("-j").build_arg(build.jobs().to_string());
+            cfg.define("CMAKE_C_FLAGS", build.cflags(target).join(" "));
+            cfg.define("CMAKE_CXX_FLAGS", build.cflags(target).join(" "));
+        };
 
-    // FIXME: we don't actually need to build all LLVM tools and all LLVM
-    //        libraries here, e.g. we just want a few components and a few
-    //        tools. Figure out how to filter them down and only build the right
-    //        tools and libs on all platforms.
-    cfg.build();
+        configure_compilers(&mut cfg);
 
-    t!(t!(File::create(&done_stamp)).write_all(rebuild_trigger_contents.as_bytes()));
+        if env::var_os("SCCACHE_ERROR_LOG").is_some() {
+            cfg.env("RUST_LOG", "sccache=warn");
+        }
+
+        // FIXME: we don't actually need to build all LLVM tools and all LLVM
+        //        libraries here, e.g. we just want a few components and a few
+        //        tools. Figure out how to filter them down and only build the right
+        //        tools and libs on all platforms.
+        cfg.build();
+
+        t!(t!(File::create(&done_stamp)).write_all(rebuild_trigger_contents.as_bytes()));
+    }
 }
 
 fn check_llvm_version(build: &Build, llvm_config: &Path) {
@@ -216,161 +235,196 @@ fn check_llvm_version(build: &Build, llvm_config: &Path) {
     panic!("\n\nbad LLVM version: {}, need >=3.5\n\n", version)
 }
 
-/// Compiles the `rust_test_helpers.c` library which we used in various
-/// `run-pass` test suites for ABI testing.
-pub fn test_helpers(build: &Build, target: &str) {
-    let dst = build.test_helpers_out(target);
-    let src = build.src.join("src/rt/rust_test_helpers.c");
-    if up_to_date(&src, &dst.join("librust_test_helpers.a")) {
-        return
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub struct TestHelpers {
+    pub target: Interned<String>,
+}
+
+impl Step for TestHelpers {
+    type Output = ();
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.path("src/rt/rust_test_helpers.c")
     }
 
-    let _folder = build.fold_output(|| "build_test_helpers");
-    println!("Building test helpers");
-    t!(fs::create_dir_all(&dst));
-    let mut cfg = gcc::Config::new();
-
-    // We may have found various cross-compilers a little differently due to our
-    // extra configuration, so inform gcc of these compilers. Note, though, that
-    // on MSVC we still need gcc's detection of env vars (ugh).
-    if !target.contains("msvc") {
-        if let Some(ar) = build.ar(target) {
-            cfg.archiver(ar);
-        }
-        cfg.compiler(build.cc(target));
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(TestHelpers { target: run.target })
     }
 
-    cfg.cargo_metadata(false)
-       .out_dir(&dst)
-       .target(target)
-       .host(&build.build)
-       .opt_level(0)
-       .debug(false)
-       .file(build.src.join("src/rt/rust_test_helpers.c"))
-       .compile("librust_test_helpers.a");
+    /// Compiles the `rust_test_helpers.c` library which we used in various
+    /// `run-pass` test suites for ABI testing.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let target = self.target;
+        let dst = build.test_helpers_out(target);
+        let src = build.src.join("src/rt/rust_test_helpers.c");
+        if up_to_date(&src, &dst.join("librust_test_helpers.a")) {
+            return
+        }
+
+        let _folder = build.fold_output(|| "build_test_helpers");
+        println!("Building test helpers");
+        t!(fs::create_dir_all(&dst));
+        let mut cfg = gcc::Config::new();
+
+        // We may have found various cross-compilers a little differently due to our
+        // extra configuration, so inform gcc of these compilers. Note, though, that
+        // on MSVC we still need gcc's detection of env vars (ugh).
+        if !target.contains("msvc") {
+            if let Some(ar) = build.ar(target) {
+                cfg.archiver(ar);
+            }
+            cfg.compiler(build.cc(target));
+        }
+
+        cfg.cargo_metadata(false)
+           .out_dir(&dst)
+           .target(&target)
+           .host(&build.build)
+           .opt_level(0)
+           .debug(false)
+           .file(build.src.join("src/rt/rust_test_helpers.c"))
+           .compile("librust_test_helpers.a");
+    }
 }
+
 const OPENSSL_VERS: &'static str = "1.0.2k";
 const OPENSSL_SHA256: &'static str =
     "6b3977c61f2aedf0f96367dcfb5c6e578cf37e7b8d913b4ecb6643c3cb88d8c0";
 
-pub fn openssl(build: &Build, target: &str) {
-    let out = match build.openssl_dir(target) {
-        Some(dir) => dir,
-        None => return,
-    };
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct Openssl {
+    pub target: Interned<String>,
+}
 
-    let stamp = out.join(".stamp");
-    let mut contents = String::new();
-    drop(File::open(&stamp).and_then(|mut f| f.read_to_string(&mut contents)));
-    if contents == OPENSSL_VERS {
-        return
+impl Step for Openssl {
+    type Output = ();
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.never()
     }
-    t!(fs::create_dir_all(&out));
-
-    let name = format!("openssl-{}.tar.gz", OPENSSL_VERS);
-    let tarball = out.join(&name);
-    if !tarball.exists() {
-        let tmp = tarball.with_extension("tmp");
-        // originally from https://www.openssl.org/source/...
-        let url = format!("https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/{}",
-                          name);
-        let mut ok = false;
-        for _ in 0..3 {
-            let status = Command::new("curl")
-                            .arg("-o").arg(&tmp)
-                            .arg(&url)
-                            .status()
-                            .expect("failed to spawn curl");
-            if status.success() {
-                ok = true;
-                break
-            }
+
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let target = self.target;
+        let out = match build.openssl_dir(target) {
+            Some(dir) => dir,
+            None => return,
+        };
+
+        let stamp = out.join(".stamp");
+        let mut contents = String::new();
+        drop(File::open(&stamp).and_then(|mut f| f.read_to_string(&mut contents)));
+        if contents == OPENSSL_VERS {
+            return
         }
-        if !ok {
-            panic!("failed to download openssl source")
+        t!(fs::create_dir_all(&out));
+
+        let name = format!("openssl-{}.tar.gz", OPENSSL_VERS);
+        let tarball = out.join(&name);
+        if !tarball.exists() {
+            let tmp = tarball.with_extension("tmp");
+            // originally from https://www.openssl.org/source/...
+            let url = format!("https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/{}",
+                              name);
+            let mut ok = false;
+            for _ in 0..3 {
+                let status = Command::new("curl")
+                                .arg("-o").arg(&tmp)
+                                .arg(&url)
+                                .status()
+                                .expect("failed to spawn curl");
+                if status.success() {
+                    ok = true;
+                    break
+                }
+            }
+            if !ok {
+                panic!("failed to download openssl source")
+            }
+            let mut shasum = if target.contains("apple") {
+                let mut cmd = Command::new("shasum");
+                cmd.arg("-a").arg("256");
+                cmd
+            } else {
+                Command::new("sha256sum")
+            };
+            let output = output(&mut shasum.arg(&tmp));
+            let found = output.split_whitespace().next().unwrap();
+            if found != OPENSSL_SHA256 {
+                panic!("downloaded openssl sha256 different\n\
+                        expected: {}\n\
+                        found:    {}\n", OPENSSL_SHA256, found);
+            }
+            t!(fs::rename(&tmp, &tarball));
         }
-        let mut shasum = if target.contains("apple") {
-            let mut cmd = Command::new("shasum");
-            cmd.arg("-a").arg("256");
-            cmd
-        } else {
-            Command::new("sha256sum")
+        let obj = out.join(format!("openssl-{}", OPENSSL_VERS));
+        let dst = build.openssl_install_dir(target).unwrap();
+        drop(fs::remove_dir_all(&obj));
+        drop(fs::remove_dir_all(&dst));
+        build.run(Command::new("tar").arg("xf").arg(&tarball).current_dir(&out));
+
+        let mut configure = Command::new(obj.join("Configure"));
+        configure.arg(format!("--prefix={}", dst.display()));
+        configure.arg("no-dso");
+        configure.arg("no-ssl2");
+        configure.arg("no-ssl3");
+
+        let os = match &*target {
+            "aarch64-linux-android" => "linux-aarch64",
+            "aarch64-unknown-linux-gnu" => "linux-aarch64",
+            "arm-linux-androideabi" => "android",
+            "arm-unknown-linux-gnueabi" => "linux-armv4",
+            "arm-unknown-linux-gnueabihf" => "linux-armv4",
+            "armv7-linux-androideabi" => "android-armv7",
+            "armv7-unknown-linux-gnueabihf" => "linux-armv4",
+            "i686-apple-darwin" => "darwin-i386-cc",
+            "i686-linux-android" => "android-x86",
+            "i686-unknown-freebsd" => "BSD-x86-elf",
+            "i686-unknown-linux-gnu" => "linux-elf",
+            "i686-unknown-linux-musl" => "linux-elf",
+            "mips-unknown-linux-gnu" => "linux-mips32",
+            "mips64-unknown-linux-gnuabi64" => "linux64-mips64",
+            "mips64el-unknown-linux-gnuabi64" => "linux64-mips64",
+            "mipsel-unknown-linux-gnu" => "linux-mips32",
+            "powerpc-unknown-linux-gnu" => "linux-ppc",
+            "powerpc64-unknown-linux-gnu" => "linux-ppc64",
+            "powerpc64le-unknown-linux-gnu" => "linux-ppc64le",
+            "s390x-unknown-linux-gnu" => "linux64-s390x",
+            "x86_64-apple-darwin" => "darwin64-x86_64-cc",
+            "x86_64-linux-android" => "linux-x86_64",
+            "x86_64-unknown-freebsd" => "BSD-x86_64",
+            "x86_64-unknown-linux-gnu" => "linux-x86_64",
+            "x86_64-unknown-linux-musl" => "linux-x86_64",
+            "x86_64-unknown-netbsd" => "BSD-x86_64",
+            _ => panic!("don't know how to configure OpenSSL for {}", target),
         };
-        let output = output(&mut shasum.arg(&tmp));
-        let found = output.split_whitespace().next().unwrap();
-        if found != OPENSSL_SHA256 {
-            panic!("downloaded openssl sha256 different\n\
-                    expected: {}\n\
-                    found:    {}\n", OPENSSL_SHA256, found);
+        configure.arg(os);
+        configure.env("CC", build.cc(target));
+        for flag in build.cflags(target) {
+            configure.arg(flag);
         }
-        t!(fs::rename(&tmp, &tarball));
-    }
-    let obj = out.join(format!("openssl-{}", OPENSSL_VERS));
-    let dst = build.openssl_install_dir(target).unwrap();
-    drop(fs::remove_dir_all(&obj));
-    drop(fs::remove_dir_all(&dst));
-    build.run(Command::new("tar").arg("xf").arg(&tarball).current_dir(&out));
-
-    let mut configure = Command::new(obj.join("Configure"));
-    configure.arg(format!("--prefix={}", dst.display()));
-    configure.arg("no-dso");
-    configure.arg("no-ssl2");
-    configure.arg("no-ssl3");
-
-    let os = match target {
-        "aarch64-linux-android" => "linux-aarch64",
-        "aarch64-unknown-linux-gnu" => "linux-aarch64",
-        "arm-linux-androideabi" => "android",
-        "arm-unknown-linux-gnueabi" => "linux-armv4",
-        "arm-unknown-linux-gnueabihf" => "linux-armv4",
-        "armv7-linux-androideabi" => "android-armv7",
-        "armv7-unknown-linux-gnueabihf" => "linux-armv4",
-        "i686-apple-darwin" => "darwin-i386-cc",
-        "i686-linux-android" => "android-x86",
-        "i686-unknown-freebsd" => "BSD-x86-elf",
-        "i686-unknown-linux-gnu" => "linux-elf",
-        "i686-unknown-linux-musl" => "linux-elf",
-        "mips-unknown-linux-gnu" => "linux-mips32",
-        "mips64-unknown-linux-gnuabi64" => "linux64-mips64",
-        "mips64el-unknown-linux-gnuabi64" => "linux64-mips64",
-        "mipsel-unknown-linux-gnu" => "linux-mips32",
-        "powerpc-unknown-linux-gnu" => "linux-ppc",
-        "powerpc64-unknown-linux-gnu" => "linux-ppc64",
-        "powerpc64le-unknown-linux-gnu" => "linux-ppc64le",
-        "s390x-unknown-linux-gnu" => "linux64-s390x",
-        "x86_64-apple-darwin" => "darwin64-x86_64-cc",
-        "x86_64-linux-android" => "linux-x86_64",
-        "x86_64-unknown-freebsd" => "BSD-x86_64",
-        "x86_64-unknown-linux-gnu" => "linux-x86_64",
-        "x86_64-unknown-linux-musl" => "linux-x86_64",
-        "x86_64-unknown-netbsd" => "BSD-x86_64",
-        _ => panic!("don't know how to configure OpenSSL for {}", target),
-    };
-    configure.arg(os);
-    configure.env("CC", build.cc(target));
-    for flag in build.cflags(target) {
-        configure.arg(flag);
-    }
-    // There is no specific os target for android aarch64 or x86_64,
-    // so we need to pass some extra cflags
-    if target == "aarch64-linux-android" || target == "x86_64-linux-android" {
-        configure.arg("-mandroid");
-        configure.arg("-fomit-frame-pointer");
-    }
-    // Make PIE binaries
-    // Non-PIE linker support was removed in Lollipop
-    // https://source.android.com/security/enhancements/enhancements50
-    if target == "i686-linux-android" {
-        configure.arg("no-asm");
+        // There is no specific os target for android aarch64 or x86_64,
+        // so we need to pass some extra cflags
+        if target == "aarch64-linux-android" || target == "x86_64-linux-android" {
+            configure.arg("-mandroid");
+            configure.arg("-fomit-frame-pointer");
+        }
+        // Make PIE binaries
+        // Non-PIE linker support was removed in Lollipop
+        // https://source.android.com/security/enhancements/enhancements50
+        if target == "i686-linux-android" {
+            configure.arg("no-asm");
+        }
+        configure.current_dir(&obj);
+        println!("Configuring openssl for {}", target);
+        build.run_quiet(&mut configure);
+        println!("Building openssl for {}", target);
+        build.run_quiet(Command::new("make").arg("-j1").current_dir(&obj));
+        println!("Installing openssl for {}", target);
+        build.run_quiet(Command::new("make").arg("install").current_dir(&obj));
+
+        let mut f = t!(File::create(&stamp));
+        t!(f.write_all(OPENSSL_VERS.as_bytes()));
     }
-    configure.current_dir(&obj);
-    println!("Configuring openssl for {}", target);
-    build.run_quiet(&mut configure);
-    println!("Building openssl for {}", target);
-    build.run_quiet(Command::new("make").arg("-j1").current_dir(&obj));
-    println!("Installing openssl for {}", target);
-    build.run_quiet(Command::new("make").arg("install").current_dir(&obj));
-
-    let mut f = t!(File::create(&stamp));
-    t!(f.write_all(OPENSSL_VERS.as_bytes()));
 }
diff --git a/src/bootstrap/sanity.rs b/src/bootstrap/sanity.rs
index a9c1b023dd4..7063b28f19d 100644
--- a/src/bootstrap/sanity.rs
+++ b/src/bootstrap/sanity.rs
@@ -122,14 +122,14 @@ pub fn check(build: &mut Build) {
             continue;
         }
 
-        cmd_finder.must_have(build.cc(target));
-        if let Some(ar) = build.ar(target) {
+        cmd_finder.must_have(build.cc(*target));
+        if let Some(ar) = build.ar(*target) {
             cmd_finder.must_have(ar);
         }
     }
 
     for host in build.config.host.iter() {
-        cmd_finder.must_have(build.cxx(host).unwrap());
+        cmd_finder.must_have(build.cxx(*host).unwrap());
 
         // The msvc hosts don't use jemalloc, turn it off globally to
         // avoid packaging the dummy liballoc_jemalloc on that platform.
@@ -139,7 +139,7 @@ pub fn check(build: &mut Build) {
     }
 
     // Externally configured LLVM requires FileCheck to exist
-    let filecheck = build.llvm_filecheck(&build.build);
+    let filecheck = build.llvm_filecheck(build.build);
     if !filecheck.starts_with(&build.out) && !filecheck.exists() && build.config.codegen_tests {
         panic!("FileCheck executable {:?} does not exist", filecheck);
     }
@@ -153,7 +153,7 @@ pub fn check(build: &mut Build) {
 
         // Make sure musl-root is valid if specified
         if target.contains("musl") && !target.contains("mips") {
-            match build.musl_root(target) {
+            match build.musl_root(*target) {
                 Some(root) => {
                     if fs::metadata(root.join("lib/libc.a")).is_err() {
                         panic!("couldn't find libc.a in musl dir: {}",
diff --git a/src/bootstrap/step.rs b/src/bootstrap/step.rs
deleted file mode 100644
index a1b26f44b7d..00000000000
--- a/src/bootstrap/step.rs
+++ /dev/null
@@ -1,1820 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Definition of steps of the build system.
-//!
-//! This is where some of the real meat of rustbuild is located, in how we
-//! define targets and the dependencies amongst them. This file can sort of be
-//! viewed as just defining targets in a makefile which shell out to predefined
-//! functions elsewhere about how to execute the target.
-//!
-//! The primary function here you're likely interested in is the `build_rules`
-//! function. This will create a `Rules` structure which basically just lists
-//! everything that rustbuild can do. Each rule has a human-readable name, a
-//! path associated with it, some dependencies, and then a closure of how to
-//! actually perform the rule.
-//!
-//! All steps below are defined in self-contained units, so adding a new target
-//! to the build system should just involve adding the meta information here
-//! along with the actual implementation elsewhere. You can find more comments
-//! about how to define rules themselves below.
-
-use std::collections::{BTreeMap, HashSet, HashMap};
-use std::mem;
-use std::path::PathBuf;
-use std::process;
-
-use check::{self, TestKind};
-use compile;
-use dist;
-use doc;
-use flags::Subcommand;
-use install;
-use native;
-use {Compiler, Build, Mode};
-
-pub fn run(build: &Build) {
-    let rules = build_rules(build);
-    let steps = rules.plan();
-    rules.run(&steps);
-}
-
-pub fn build_rules<'a>(build: &'a Build) -> Rules {
-    let mut rules = Rules::new(build);
-
-    // This is the first rule that we're going to define for rustbuild, which is
-    // used to compile LLVM itself. All rules are added through the `rules`
-    // structure created above and are configured through a builder-style
-    // interface.
-    //
-    // First up we see the `build` method. This represents a rule that's part of
-    // the top-level `build` subcommand. For example `./x.py build` is what this
-    // is associating with. Note that this is normally only relevant if you flag
-    // a rule as `default`, which we'll talk about later.
-    //
-    // Next up we'll see two arguments to this method:
-    //
-    // * `llvm` - this is the "human readable" name of this target. This name is
-    //            not accessed anywhere outside this file itself (e.g. not in
-    //            the CLI nor elsewhere in rustbuild). The purpose of this is to
-    //            easily define dependencies between rules. That is, other rules
-    //            will depend on this with the name "llvm".
-    // * `src/llvm` - this is the relevant path to the rule that we're working
-    //                with. This path is the engine behind how commands like
-    //                `./x.py build src/llvm` work. This should typically point
-    //                to the relevant component, but if there's not really a
-    //                path to be assigned here you can pass something like
-    //                `path/to/nowhere` to ignore it.
-    //
-    // After we create the rule with the `build` method we can then configure
-    // various aspects of it. For example this LLVM rule uses `.host(true)` to
-    // flag that it's a rule only for host targets. In other words, LLVM isn't
-    // compiled for targets configured through `--target` (e.g. those we're just
-    // building a standard library for).
-    //
-    // Next up the `dep` method will add a dependency to this rule. The closure
-    // is yielded the step that represents executing the `llvm` rule itself
-    // (containing information like stage, host, target, ...) and then it must
-    // return a target that the step depends on. Here LLVM is actually
-    // interesting where a cross-compiled LLVM depends on the host LLVM, but
-    // otherwise it has no dependencies.
-    //
-    // To handle this we do a bit of dynamic dispatch to see what the dependency
-    // is. If we're building a LLVM for the build triple, then we don't actually
-    // have any dependencies! To do that we return a dependency on the `Step::noop()`
-    // target which does nothing.
-    //
-    // If we're build a cross-compiled LLVM, however, we need to assemble the
-    // libraries from the previous compiler. This step has the same name as
-    // ours (llvm) but we want it for a different target, so we use the
-    // builder-style methods on `Step` to configure this target to the build
-    // triple.
-    //
-    // Finally, to finish off this rule, we define how to actually execute it.
-    // That logic is all defined in the `native` module so we just delegate to
-    // the relevant function there. The argument to the closure passed to `run`
-    // is a `Step` (defined below) which encapsulates information like the
-    // stage, target, host, etc.
-    rules.build("llvm", "src/llvm")
-         .host(true)
-         .dep(move |s| {
-             if s.target == build.build {
-                 Step::noop()
-             } else {
-                 s.target(&build.build)
-             }
-         })
-         .run(move |s| native::llvm(build, s.target));
-
-    // Ok! After that example rule  that's hopefully enough to explain what's
-    // going on here. You can check out the API docs below and also see a bunch
-    // more examples of rules directly below as well.
-
-    // the compiler with no target libraries ready to go
-    rules.build("rustc", "src/rustc")
-         .dep(|s| s.name("create-sysroot").target(s.host))
-         .dep(move |s| {
-             if s.stage == 0 {
-                 Step::noop()
-             } else {
-                 s.name("librustc")
-                  .host(&build.build)
-                  .stage(s.stage - 1)
-             }
-         })
-         .run(move |s| compile::assemble_rustc(build, s.stage, s.target));
-
-    // Helper for loading an entire DAG of crates, rooted at `name`
-    let krates = |name: &str| {
-        let mut ret = Vec::new();
-        let mut list = vec![name];
-        let mut visited = HashSet::new();
-        while let Some(krate) = list.pop() {
-            let default = krate == name;
-            let krate = &build.crates[krate];
-            let path = krate.path.strip_prefix(&build.src)
-                // This handles out of tree paths
-                .unwrap_or(&krate.path);
-            ret.push((krate, path.to_str().unwrap(), default));
-            for dep in krate.deps.iter() {
-                if visited.insert(dep) && dep != "build_helper" {
-                    list.push(dep);
-                }
-            }
-        }
-        ret
-    };
-
-    // ========================================================================
-    // Crate compilations
-    //
-    // Tools used during the build system but not shipped
-    rules.build("create-sysroot", "path/to/nowhere")
-         .run(move |s| compile::create_sysroot(build, &s.compiler()));
-
-    // These rules are "pseudo rules" that don't actually do any work
-    // themselves, but represent a complete sysroot with the relevant compiler
-    // linked into place.
-    //
-    // That is, depending on "libstd" means that when the rule is completed then
-    // the `stage` sysroot for the compiler `host` will be available with a
-    // standard library built for `target` linked in place. Not all rules need
-    // the compiler itself to be available, just the standard library, so
-    // there's a distinction between the two.
-    rules.build("libstd", "src/libstd")
-         .dep(|s| s.name("rustc").target(s.host))
-         .dep(|s| s.name("libstd-link"));
-    rules.build("libtest", "src/libtest")
-         .dep(|s| s.name("libstd"))
-         .dep(|s| s.name("libtest-link"))
-         .default(true);
-    rules.build("librustc", "src/librustc")
-         .dep(|s| s.name("libtest"))
-         .dep(|s| s.name("librustc-link"))
-         .host(true)
-         .default(true);
-
-    // Helper method to define the rules to link a crate into its place in the
-    // sysroot.
-    //
-    // The logic here is a little subtle as there's a few cases to consider.
-    // Not all combinations of (stage, host, target) actually require something
-    // to be compiled, but rather libraries could get propagated from a
-    // different location. For example:
-    //
-    // * Any crate with a `host` that's not the build triple will not actually
-    //   compile something. A different `host` means that the build triple will
-    //   actually compile the libraries, and then we'll copy them over from the
-    //   build triple to the `host` directory.
-    //
-    // * Some crates aren't even compiled by the build triple, but may be copied
-    //   from previous stages. For example if we're not doing a full bootstrap
-    //   then we may just depend on the stage1 versions of libraries to be
-    //   available to get linked forward.
-    //
-    // * Finally, there are some cases, however, which do indeed comiple crates
-    //   and link them into place afterwards.
-    //
-    // The rule definition below mirrors these three cases. The `dep` method
-    // calculates the correct dependency which either comes from stage1, a
-    // different compiler, or from actually building the crate itself (the `dep`
-    // rule). The `run` rule then mirrors these three cases and links the cases
-    // forward into the compiler sysroot specified from the correct location.
-    fn crate_rule<'a, 'b>(build: &'a Build,
-                          rules: &'b mut Rules<'a>,
-                          krate: &'a str,
-                          dep: &'a str,
-                          link: fn(&Build, &Compiler, &Compiler, &str))
-                          -> RuleBuilder<'a, 'b> {
-        let mut rule = rules.build(&krate, "path/to/nowhere");
-        rule.dep(move |s| {
-                if build.force_use_stage1(&s.compiler(), s.target) {
-                    s.host(&build.build).stage(1)
-                } else if s.host == build.build {
-                    s.name(dep)
-                } else {
-                    s.host(&build.build)
-                }
-            })
-            .run(move |s| {
-                if build.force_use_stage1(&s.compiler(), s.target) {
-                    link(build,
-                         &s.stage(1).host(&build.build).compiler(),
-                         &s.compiler(),
-                         s.target)
-                } else if s.host == build.build {
-                    link(build, &s.compiler(), &s.compiler(), s.target)
-                } else {
-                    link(build,
-                         &s.host(&build.build).compiler(),
-                         &s.compiler(),
-                         s.target)
-                }
-            });
-            rule
-    }
-
-    // Similar to the `libstd`, `libtest`, and `librustc` rules above, except
-    // these rules only represent the libraries being available in the sysroot,
-    // not the compiler itself. This is done as not all rules need a compiler in
-    // the sysroot, but may just need the libraries.
-    //
-    // All of these rules use the helper definition above.
-    crate_rule(build,
-               &mut rules,
-               "libstd-link",
-               "build-crate-std",
-               compile::std_link)
-        .dep(|s| s.name("startup-objects"))
-        .dep(|s| s.name("create-sysroot").target(s.host));
-    crate_rule(build,
-               &mut rules,
-               "libtest-link",
-               "build-crate-test",
-               compile::test_link)
-        .dep(|s| s.name("libstd-link"));
-    crate_rule(build,
-               &mut rules,
-               "librustc-link",
-               "build-crate-rustc-main",
-               compile::rustc_link)
-        .dep(|s| s.name("libtest-link"));
-
-    for (krate, path, _default) in krates("std") {
-        rules.build(&krate.build_step, path)
-             .dep(|s| s.name("startup-objects"))
-             .dep(move |s| s.name("rustc").host(&build.build).target(s.host))
-             .run(move |s| compile::std(build, s.target, &s.compiler()));
-    }
-    for (krate, path, _default) in krates("test") {
-        rules.build(&krate.build_step, path)
-             .dep(|s| s.name("libstd-link"))
-             .run(move |s| compile::test(build, s.target, &s.compiler()));
-    }
-    for (krate, path, _default) in krates("rustc-main") {
-        rules.build(&krate.build_step, path)
-             .dep(|s| s.name("libtest-link"))
-             .dep(move |s| s.name("llvm").host(&build.build).stage(0))
-             .dep(|s| s.name("may-run-build-script"))
-             .run(move |s| compile::rustc(build, s.target, &s.compiler()));
-    }
-
-    // Crates which have build scripts need to rely on this rule to ensure that
-    // the necessary prerequisites for a build script are linked and located in
-    // place.
-    rules.build("may-run-build-script", "path/to/nowhere")
-         .dep(move |s| {
-             s.name("libstd-link")
-              .host(&build.build)
-              .target(&build.build)
-         });
-    rules.build("startup-objects", "src/rtstartup")
-         .dep(|s| s.name("create-sysroot").target(s.host))
-         .run(move |s| compile::build_startup_objects(build, &s.compiler(), s.target));
-
-    // ========================================================================
-    // Test targets
-    //
-    // Various unit tests and tests suites we can run
-    {
-        let mut suite = |name, path, mode, dir| {
-            rules.test(name, path)
-                 .dep(|s| s.name("libtest"))
-                 .dep(|s| s.name("tool-compiletest").target(s.host).stage(0))
-                 .dep(|s| s.name("test-helpers"))
-                 .dep(|s| s.name("remote-copy-libs"))
-                 .default(mode != "pretty") // pretty tests don't run everywhere
-                 .run(move |s| {
-                     check::compiletest(build, &s.compiler(), s.target, mode, dir)
-                 });
-        };
-
-        suite("check-ui", "src/test/ui", "ui", "ui");
-        suite("check-rpass", "src/test/run-pass", "run-pass", "run-pass");
-        suite("check-cfail", "src/test/compile-fail", "compile-fail", "compile-fail");
-        suite("check-pfail", "src/test/parse-fail", "parse-fail", "parse-fail");
-        suite("check-rfail", "src/test/run-fail", "run-fail", "run-fail");
-        suite("check-rpass-valgrind", "src/test/run-pass-valgrind",
-              "run-pass-valgrind", "run-pass-valgrind");
-        suite("check-mir-opt", "src/test/mir-opt", "mir-opt", "mir-opt");
-        if build.config.codegen_tests {
-            suite("check-codegen", "src/test/codegen", "codegen", "codegen");
-        }
-        suite("check-codegen-units", "src/test/codegen-units", "codegen-units",
-              "codegen-units");
-        suite("check-incremental", "src/test/incremental", "incremental",
-              "incremental");
-    }
-
-    if build.build.contains("msvc") {
-        // nothing to do for debuginfo tests
-    } else {
-        rules.test("check-debuginfo-lldb", "src/test/debuginfo-lldb")
-             .dep(|s| s.name("libtest"))
-             .dep(|s| s.name("tool-compiletest").target(s.host).stage(0))
-             .dep(|s| s.name("test-helpers"))
-             .dep(|s| s.name("debugger-scripts"))
-             .run(move |s| check::compiletest(build, &s.compiler(), s.target,
-                                         "debuginfo-lldb", "debuginfo"));
-        rules.test("check-debuginfo-gdb", "src/test/debuginfo-gdb")
-             .dep(|s| s.name("libtest"))
-             .dep(|s| s.name("tool-compiletest").target(s.host).stage(0))
-             .dep(|s| s.name("test-helpers"))
-             .dep(|s| s.name("debugger-scripts"))
-             .dep(|s| s.name("remote-copy-libs"))
-             .run(move |s| check::compiletest(build, &s.compiler(), s.target,
-                                         "debuginfo-gdb", "debuginfo"));
-        let mut rule = rules.test("check-debuginfo", "src/test/debuginfo");
-        rule.default(true);
-        if build.build.contains("apple") {
-            rule.dep(|s| s.name("check-debuginfo-lldb"));
-        } else {
-            rule.dep(|s| s.name("check-debuginfo-gdb"));
-        }
-    }
-
-    rules.test("debugger-scripts", "src/etc/lldb_batchmode.py")
-         .run(move |s| dist::debugger_scripts(build, &build.sysroot(&s.compiler()),
-                                         s.target));
-
-    {
-        let mut suite = |name, path, mode, dir| {
-            rules.test(name, path)
-                 .dep(|s| s.name("librustc"))
-                 .dep(|s| s.name("test-helpers"))
-                 .dep(|s| s.name("tool-compiletest").target(s.host).stage(0))
-                 .default(mode != "pretty")
-                 .host(true)
-                 .run(move |s| {
-                     check::compiletest(build, &s.compiler(), s.target, mode, dir)
-                 });
-        };
-
-        suite("check-ui-full", "src/test/ui-fulldeps", "ui", "ui-fulldeps");
-        suite("check-rpass-full", "src/test/run-pass-fulldeps",
-              "run-pass", "run-pass-fulldeps");
-        suite("check-rfail-full", "src/test/run-fail-fulldeps",
-              "run-fail", "run-fail-fulldeps");
-        suite("check-cfail-full", "src/test/compile-fail-fulldeps",
-              "compile-fail", "compile-fail-fulldeps");
-        suite("check-rmake", "src/test/run-make", "run-make", "run-make");
-        suite("check-rustdoc", "src/test/rustdoc", "rustdoc", "rustdoc");
-        suite("check-pretty", "src/test/pretty", "pretty", "pretty");
-        suite("check-pretty-rpass", "src/test/run-pass/pretty", "pretty",
-              "run-pass");
-        suite("check-pretty-rfail", "src/test/run-fail/pretty", "pretty",
-              "run-fail");
-        suite("check-pretty-valgrind", "src/test/run-pass-valgrind/pretty", "pretty",
-              "run-pass-valgrind");
-        suite("check-pretty-rpass-full", "src/test/run-pass-fulldeps/pretty",
-              "pretty", "run-pass-fulldeps");
-        suite("check-pretty-rfail-full", "src/test/run-fail-fulldeps/pretty",
-              "pretty", "run-fail-fulldeps");
-    }
-
-    for (krate, path, _default) in krates("std") {
-        rules.test(&krate.test_step, path)
-             .dep(|s| s.name("libtest"))
-             .dep(|s| s.name("remote-copy-libs"))
-             .run(move |s| check::krate(build, &s.compiler(), s.target,
-                                        Mode::Libstd, TestKind::Test,
-                                        Some(&krate.name)));
-    }
-    rules.test("check-std-all", "path/to/nowhere")
-         .dep(|s| s.name("libtest"))
-         .dep(|s| s.name("remote-copy-libs"))
-         .default(true)
-         .run(move |s| check::krate(build, &s.compiler(), s.target,
-                                    Mode::Libstd, TestKind::Test, None));
-
-    // std benchmarks
-    for (krate, path, _default) in krates("std") {
-        rules.bench(&krate.bench_step, path)
-             .dep(|s| s.name("libtest"))
-             .dep(|s| s.name("remote-copy-libs"))
-             .run(move |s| check::krate(build, &s.compiler(), s.target,
-                                        Mode::Libstd, TestKind::Bench,
-                                        Some(&krate.name)));
-    }
-    rules.bench("bench-std-all", "path/to/nowhere")
-         .dep(|s| s.name("libtest"))
-         .dep(|s| s.name("remote-copy-libs"))
-         .default(true)
-         .run(move |s| check::krate(build, &s.compiler(), s.target,
-                                    Mode::Libstd, TestKind::Bench, None));
-
-    for (krate, path, _default) in krates("test") {
-        rules.test(&krate.test_step, path)
-             .dep(|s| s.name("libtest"))
-             .dep(|s| s.name("remote-copy-libs"))
-             .run(move |s| check::krate(build, &s.compiler(), s.target,
-                                        Mode::Libtest, TestKind::Test,
-                                        Some(&krate.name)));
-    }
-    rules.test("check-test-all", "path/to/nowhere")
-         .dep(|s| s.name("libtest"))
-         .dep(|s| s.name("remote-copy-libs"))
-         .default(true)
-         .run(move |s| check::krate(build, &s.compiler(), s.target,
-                                    Mode::Libtest, TestKind::Test, None));
-    for (krate, path, _default) in krates("rustc-main") {
-        rules.test(&krate.test_step, path)
-             .dep(|s| s.name("librustc"))
-             .dep(|s| s.name("remote-copy-libs"))
-             .host(true)
-             .run(move |s| check::krate(build, &s.compiler(), s.target,
-                                        Mode::Librustc, TestKind::Test,
-                                        Some(&krate.name)));
-    }
-    rules.test("check-rustc-all", "path/to/nowhere")
-         .dep(|s| s.name("librustc"))
-         .dep(|s| s.name("remote-copy-libs"))
-         .default(true)
-         .host(true)
-         .run(move |s| check::krate(build, &s.compiler(), s.target,
-                                    Mode::Librustc, TestKind::Test, None));
-
-    rules.test("check-linkchecker", "src/tools/linkchecker")
-         .dep(|s| s.name("tool-linkchecker").stage(0))
-         .dep(|s| s.name("default:doc"))
-         .default(build.config.docs)
-         .host(true)
-         .run(move |s| check::linkcheck(build, s.target));
-    rules.test("check-cargotest", "src/tools/cargotest")
-         .dep(|s| s.name("tool-cargotest").stage(0))
-         .dep(|s| s.name("librustc"))
-         .host(true)
-         .run(move |s| check::cargotest(build, s.stage, s.target));
-    rules.test("check-cargo", "src/tools/cargo")
-         .dep(|s| s.name("tool-cargo"))
-         .host(true)
-         .run(move |s| check::cargo(build, s.stage, s.target));
-    rules.test("check-rls", "src/tools/rls")
-         .dep(|s| s.name("tool-rls"))
-         .host(true)
-         .run(move |s| check::rls(build, s.stage, s.target));
-    rules.test("check-tidy", "src/tools/tidy")
-         .dep(|s| s.name("tool-tidy").stage(0))
-         .default(true)
-         .host(true)
-         .only_build(true)
-         .run(move |s| check::tidy(build, s.target));
-    rules.test("check-error-index", "src/tools/error_index_generator")
-         .dep(|s| s.name("libstd"))
-         .dep(|s| s.name("tool-error-index").host(s.host).stage(0))
-         .default(true)
-         .host(true)
-         .run(move |s| check::error_index(build, &s.compiler()));
-    rules.test("check-docs", "src/doc")
-         .dep(|s| s.name("libtest"))
-         .default(true)
-         .host(true)
-         .run(move |s| check::docs(build, &s.compiler()));
-    rules.test("check-distcheck", "distcheck")
-         .dep(|s| s.name("dist-plain-source-tarball"))
-         .dep(|s| s.name("dist-src"))
-         .run(move |_| check::distcheck(build));
-
-    rules.build("test-helpers", "src/rt/rust_test_helpers.c")
-         .run(move |s| native::test_helpers(build, s.target));
-    rules.build("openssl", "path/to/nowhere")
-         .run(move |s| native::openssl(build, s.target));
-
-    // Some test suites are run inside emulators or on remote devices, and most
-    // of our test binaries are linked dynamically which means we need to ship
-    // the standard library and such to the emulator ahead of time. This step
-    // represents this and is a dependency of all test suites.
-    //
-    // Most of the time this step is a noop (the `check::emulator_copy_libs`
-    // only does work if necessary). For some steps such as shipping data to
-    // QEMU we have to build our own tools so we've got conditional dependencies
-    // on those programs as well. Note that the remote test client is built for
-    // the build target (us) and the server is built for the target.
-    rules.test("remote-copy-libs", "path/to/nowhere")
-         .dep(|s| s.name("libtest"))
-         .dep(move |s| {
-             if build.remote_tested(s.target) {
-                s.name("tool-remote-test-client").target(s.host).stage(0)
-             } else {
-                 Step::noop()
-             }
-         })
-         .dep(move |s| {
-             if build.remote_tested(s.target) {
-                s.name("tool-remote-test-server")
-             } else {
-                 Step::noop()
-             }
-         })
-         .run(move |s| check::remote_copy_libs(build, &s.compiler(), s.target));
-
-    rules.test("check-bootstrap", "src/bootstrap")
-         .default(true)
-         .host(true)
-         .only_build(true)
-         .run(move |_| check::bootstrap(build));
-
-    // ========================================================================
-    // Build tools
-    //
-    // Tools used during the build system but not shipped
-    rules.build("tool-rustbook", "src/tools/rustbook")
-         .dep(|s| s.name("maybe-clean-tools"))
-         .dep(|s| s.name("librustc-tool"))
-         .run(move |s| compile::tool(build, s.stage, s.target, "rustbook"));
-    rules.build("tool-error-index", "src/tools/error_index_generator")
-         .dep(|s| s.name("maybe-clean-tools"))
-         .dep(|s| s.name("librustc-tool"))
-         .run(move |s| compile::tool(build, s.stage, s.target, "error_index_generator"));
-    rules.build("tool-unstable-book-gen", "src/tools/unstable-book-gen")
-         .dep(|s| s.name("maybe-clean-tools"))
-         .dep(|s| s.name("libstd-tool"))
-         .run(move |s| compile::tool(build, s.stage, s.target, "unstable-book-gen"));
-    rules.build("tool-tidy", "src/tools/tidy")
-         .dep(|s| s.name("maybe-clean-tools"))
-         .dep(|s| s.name("libstd-tool"))
-         .run(move |s| compile::tool(build, s.stage, s.target, "tidy"));
-    rules.build("tool-linkchecker", "src/tools/linkchecker")
-         .dep(|s| s.name("maybe-clean-tools"))
-         .dep(|s| s.name("libstd-tool"))
-         .run(move |s| compile::tool(build, s.stage, s.target, "linkchecker"));
-    rules.build("tool-cargotest", "src/tools/cargotest")
-         .dep(|s| s.name("maybe-clean-tools"))
-         .dep(|s| s.name("libstd-tool"))
-         .run(move |s| compile::tool(build, s.stage, s.target, "cargotest"));
-    rules.build("tool-compiletest", "src/tools/compiletest")
-         .dep(|s| s.name("maybe-clean-tools"))
-         .dep(|s| s.name("libtest-tool"))
-         .run(move |s| compile::tool(build, s.stage, s.target, "compiletest"));
-    rules.build("tool-build-manifest", "src/tools/build-manifest")
-         .dep(|s| s.name("maybe-clean-tools"))
-         .dep(|s| s.name("libstd-tool"))
-         .run(move |s| compile::tool(build, s.stage, s.target, "build-manifest"));
-    rules.build("tool-remote-test-server", "src/tools/remote-test-server")
-         .dep(|s| s.name("maybe-clean-tools"))
-         .dep(|s| s.name("libstd-tool"))
-         .run(move |s| compile::tool(build, s.stage, s.target, "remote-test-server"));
-    rules.build("tool-remote-test-client", "src/tools/remote-test-client")
-         .dep(|s| s.name("maybe-clean-tools"))
-         .dep(|s| s.name("libstd-tool"))
-         .run(move |s| compile::tool(build, s.stage, s.target, "remote-test-client"));
-    rules.build("tool-rust-installer", "src/tools/rust-installer")
-         .dep(|s| s.name("maybe-clean-tools"))
-         .dep(|s| s.name("libstd-tool"))
-         .run(move |s| compile::tool(build, s.stage, s.target, "rust-installer"));
-    rules.build("tool-cargo", "src/tools/cargo")
-         .host(true)
-         .default(build.config.extended)
-         .dep(|s| s.name("maybe-clean-tools"))
-         .dep(|s| s.name("libstd-tool"))
-         .dep(|s| s.stage(0).host(s.target).name("openssl"))
-         .dep(move |s| {
-             // Cargo depends on procedural macros, which requires a full host
-             // compiler to be available, so we need to depend on that.
-             s.name("librustc-link")
-              .target(&build.build)
-              .host(&build.build)
-         })
-         .run(move |s| compile::tool(build, s.stage, s.target, "cargo"));
-    rules.build("tool-rls", "src/tools/rls")
-         .host(true)
-         .default(build.config.extended)
-         .dep(|s| s.name("librustc-tool"))
-         .dep(|s| s.stage(0).host(s.target).name("openssl"))
-         .dep(move |s| {
-             // rls, like cargo, uses procedural macros
-             s.name("librustc-link")
-              .target(&build.build)
-              .host(&build.build)
-         })
-         .run(move |s| compile::tool(build, s.stage, s.target, "rls"));
-
-    // "pseudo rule" which represents completely cleaning out the tools dir in
-    // one stage. This needs to happen whenever a dependency changes (e.g.
-    // libstd, libtest, librustc) and all of the tool compilations above will
-    // be sequenced after this rule.
-    rules.build("maybe-clean-tools", "path/to/nowhere")
-         .after("librustc-tool")
-         .after("libtest-tool")
-         .after("libstd-tool");
-
-    rules.build("librustc-tool", "path/to/nowhere")
-         .dep(|s| s.name("librustc"))
-         .run(move |s| compile::maybe_clean_tools(build, s.stage, s.target, Mode::Librustc));
-    rules.build("libtest-tool", "path/to/nowhere")
-         .dep(|s| s.name("libtest"))
-         .run(move |s| compile::maybe_clean_tools(build, s.stage, s.target, Mode::Libtest));
-    rules.build("libstd-tool", "path/to/nowhere")
-         .dep(|s| s.name("libstd"))
-         .run(move |s| compile::maybe_clean_tools(build, s.stage, s.target, Mode::Libstd));
-
-    // ========================================================================
-    // Documentation targets
-    rules.doc("doc-book", "src/doc/book")
-         .dep(move |s| {
-             s.name("tool-rustbook")
-              .host(&build.build)
-              .target(&build.build)
-              .stage(0)
-         })
-         .default(build.config.docs)
-         .run(move |s| doc::book(build, s.target, "book"));
-    rules.doc("doc-nomicon", "src/doc/nomicon")
-         .dep(move |s| {
-             s.name("tool-rustbook")
-              .host(&build.build)
-              .target(&build.build)
-              .stage(0)
-         })
-         .default(build.config.docs)
-         .run(move |s| doc::rustbook(build, s.target, "nomicon"));
-    rules.doc("doc-reference", "src/doc/reference")
-         .dep(move |s| {
-             s.name("tool-rustbook")
-              .host(&build.build)
-              .target(&build.build)
-              .stage(0)
-         })
-         .default(build.config.docs)
-         .run(move |s| doc::rustbook(build, s.target, "reference"));
-    rules.doc("doc-unstable-book", "src/doc/unstable-book")
-         .dep(move |s| {
-             s.name("tool-rustbook")
-              .host(&build.build)
-              .target(&build.build)
-              .stage(0)
-         })
-         .dep(move |s| s.name("doc-unstable-book-gen"))
-         .default(build.config.docs)
-         .run(move |s| doc::rustbook_src(build,
-                                         s.target,
-                                         "unstable-book",
-                                         &build.md_doc_out(s.target)));
-    rules.doc("doc-standalone", "src/doc")
-         .dep(move |s| {
-             s.name("rustc")
-              .host(&build.build)
-              .target(&build.build)
-              .stage(0)
-         })
-         .default(build.config.docs)
-         .run(move |s| doc::standalone(build, s.target));
-    rules.doc("doc-error-index", "src/tools/error_index_generator")
-         .dep(move |s| s.name("tool-error-index").target(&build.build).stage(0))
-         .dep(move |s| s.name("librustc-link"))
-         .default(build.config.docs)
-         .host(true)
-         .run(move |s| doc::error_index(build, s.target));
-    rules.doc("doc-unstable-book-gen", "src/tools/unstable-book-gen")
-         .dep(move |s| {
-             s.name("tool-unstable-book-gen")
-              .host(&build.build)
-              .target(&build.build)
-              .stage(0)
-         })
-         .dep(move |s| s.name("libstd-link"))
-         .default(build.config.docs)
-         .host(true)
-         .run(move |s| doc::unstable_book_gen(build, s.target));
-    for (krate, path, default) in krates("std") {
-        rules.doc(&krate.doc_step, path)
-             .dep(|s| s.name("libstd-link"))
-             .default(default && build.config.docs)
-             .run(move |s| doc::std(build, s.stage, s.target));
-    }
-    for (krate, path, default) in krates("test") {
-        rules.doc(&krate.doc_step, path)
-             .dep(|s| s.name("libtest-link"))
-             // Needed so rustdoc generates relative links to std.
-             .dep(|s| s.name("doc-crate-std"))
-             .default(default && build.config.compiler_docs)
-             .run(move |s| doc::test(build, s.stage, s.target));
-    }
-    for (krate, path, default) in krates("rustc-main") {
-        rules.doc(&krate.doc_step, path)
-             .dep(|s| s.name("librustc-link"))
-             // Needed so rustdoc generates relative links to std.
-             .dep(|s| s.name("doc-crate-std"))
-             .host(true)
-             .default(default && build.config.docs)
-             .run(move |s| doc::rustc(build, s.stage, s.target));
-    }
-
-    // ========================================================================
-    // Distribution targets
-    rules.dist("dist-rustc", "src/librustc")
-         .dep(move |s| s.name("rustc").host(&build.build))
-         .host(true)
-         .only_host_build(true)
-         .default(true)
-         .dep(move |s| tool_rust_installer(build, s))
-         .run(move |s| dist::rustc(build, s.stage, s.target));
-    rules.dist("dist-std", "src/libstd")
-         .dep(move |s| {
-             // We want to package up as many target libraries as possible
-             // for the `rust-std` package, so if this is a host target we
-             // depend on librustc and otherwise we just depend on libtest.
-             if build.config.host.iter().any(|t| t == s.target) {
-                 s.name("librustc-link")
-             } else {
-                 s.name("libtest-link")
-             }
-         })
-         .default(true)
-         .only_host_build(true)
-         .dep(move |s| tool_rust_installer(build, s))
-         .run(move |s| dist::std(build, &s.compiler(), s.target));
-    rules.dist("dist-mingw", "path/to/nowhere")
-         .default(true)
-         .only_host_build(true)
-         .dep(move |s| tool_rust_installer(build, s))
-         .run(move |s| {
-             if s.target.contains("pc-windows-gnu") {
-                 dist::mingw(build, s.target)
-             }
-         });
-    rules.dist("dist-plain-source-tarball", "src")
-         .default(build.config.rust_dist_src)
-         .host(true)
-         .only_build(true)
-         .only_host_build(true)
-         .dep(move |s| tool_rust_installer(build, s))
-         .run(move |_| dist::plain_source_tarball(build));
-    rules.dist("dist-src", "src")
-         .default(true)
-         .host(true)
-         .only_build(true)
-         .only_host_build(true)
-         .dep(move |s| tool_rust_installer(build, s))
-         .run(move |_| dist::rust_src(build));
-    rules.dist("dist-docs", "src/doc")
-         .default(true)
-         .only_host_build(true)
-         .dep(|s| s.name("default:doc"))
-         .dep(move |s| tool_rust_installer(build, s))
-         .run(move |s| dist::docs(build, s.stage, s.target));
-    rules.dist("dist-analysis", "analysis")
-         .default(build.config.extended)
-         .dep(|s| s.name("dist-std"))
-         .only_host_build(true)
-         .dep(move |s| tool_rust_installer(build, s))
-         .run(move |s| dist::analysis(build, &s.compiler(), s.target));
-    rules.dist("dist-rls", "rls")
-         .host(true)
-         .only_host_build(true)
-         .dep(|s| s.name("tool-rls"))
-         .dep(move |s| tool_rust_installer(build, s))
-         .run(move |s| dist::rls(build, s.stage, s.target));
-    rules.dist("dist-cargo", "cargo")
-         .host(true)
-         .only_host_build(true)
-         .dep(|s| s.name("tool-cargo"))
-         .dep(move |s| tool_rust_installer(build, s))
-         .run(move |s| dist::cargo(build, s.stage, s.target));
-    rules.dist("dist-extended", "extended")
-         .default(build.config.extended)
-         .host(true)
-         .only_host_build(true)
-         .dep(|d| d.name("dist-std"))
-         .dep(|d| d.name("dist-rustc"))
-         .dep(|d| d.name("dist-mingw"))
-         .dep(|d| d.name("dist-docs"))
-         .dep(|d| d.name("dist-cargo"))
-         .dep(|d| d.name("dist-rls"))
-         .dep(|d| d.name("dist-analysis"))
-         .dep(move |s| tool_rust_installer(build, s))
-         .run(move |s| dist::extended(build, s.stage, s.target));
-
-    rules.dist("dist-sign", "hash-and-sign")
-         .host(true)
-         .only_build(true)
-         .only_host_build(true)
-         .dep(move |s| s.name("tool-build-manifest").target(&build.build).stage(0))
-         .run(move |_| dist::hash_and_sign(build));
-
-    rules.install("install-docs", "src/doc")
-         .default(build.config.docs)
-         .only_host_build(true)
-         .dep(|s| s.name("dist-docs"))
-         .run(move |s| install::Installer::new(build).install_docs(s.stage, s.target));
-    rules.install("install-std", "src/libstd")
-         .default(true)
-         .only_host_build(true)
-         .dep(|s| s.name("dist-std"))
-         .run(move |s| install::Installer::new(build).install_std(s.stage));
-    rules.install("install-cargo", "cargo")
-         .default(build.config.extended)
-         .host(true)
-         .only_host_build(true)
-         .dep(|s| s.name("dist-cargo"))
-         .run(move |s| install::Installer::new(build).install_cargo(s.stage, s.target));
-    rules.install("install-rls", "rls")
-         .default(build.config.extended)
-         .host(true)
-         .only_host_build(true)
-         .dep(|s| s.name("dist-rls"))
-         .run(move |s| install::Installer::new(build).install_rls(s.stage, s.target));
-    rules.install("install-analysis", "analysis")
-         .default(build.config.extended)
-         .only_host_build(true)
-         .dep(|s| s.name("dist-analysis"))
-         .run(move |s| install::Installer::new(build).install_analysis(s.stage, s.target));
-    rules.install("install-src", "src")
-         .default(build.config.extended)
-         .host(true)
-         .only_build(true)
-         .only_host_build(true)
-         .dep(|s| s.name("dist-src"))
-         .run(move |s| install::Installer::new(build).install_src(s.stage));
-    rules.install("install-rustc", "src/librustc")
-         .default(true)
-         .host(true)
-         .only_host_build(true)
-         .dep(|s| s.name("dist-rustc"))
-         .run(move |s| install::Installer::new(build).install_rustc(s.stage, s.target));
-
-    rules.verify();
-    return rules;
-
-    /// Helper to depend on a stage0 build-only rust-installer tool.
-    fn tool_rust_installer<'a>(build: &'a Build, step: &Step<'a>) -> Step<'a> {
-        step.name("tool-rust-installer")
-            .host(&build.build)
-            .target(&build.build)
-            .stage(0)
-    }
-}
-
-#[derive(PartialEq, Eq, Hash, Clone, Debug)]
-struct Step<'a> {
-    /// Human readable name of the rule this step is executing. Possible names
-    /// are all defined above in `build_rules`.
-    name: &'a str,
-
-    /// The stage this step is executing in. This is typically 0, 1, or 2.
-    stage: u32,
-
-    /// This step will likely involve a compiler, and the target that compiler
-    /// itself is built for is called the host, this variable. Typically this is
-    /// the target of the build machine itself.
-    host: &'a str,
-
-    /// The target that this step represents generating. If you're building a
-    /// standard library for a new suite of targets, for example, this'll be set
-    /// to those targets.
-    target: &'a str,
-}
-
-impl<'a> Step<'a> {
-    fn noop() -> Step<'a> {
-        Step { name: "", stage: 0, host: "", target: "" }
-    }
-
-    /// Creates a new step which is the same as this, except has a new name.
-    fn name(&self, name: &'a str) -> Step<'a> {
-        Step { name: name, ..*self }
-    }
-
-    /// Creates a new step which is the same as this, except has a new stage.
-    fn stage(&self, stage: u32) -> Step<'a> {
-        Step { stage: stage, ..*self }
-    }
-
-    /// Creates a new step which is the same as this, except has a new host.
-    fn host(&self, host: &'a str) -> Step<'a> {
-        Step { host: host, ..*self }
-    }
-
-    /// Creates a new step which is the same as this, except has a new target.
-    fn target(&self, target: &'a str) -> Step<'a> {
-        Step { target: target, ..*self }
-    }
-
-    /// Returns the `Compiler` structure that this step corresponds to.
-    fn compiler(&self) -> Compiler<'a> {
-        Compiler::new(self.stage, self.host)
-    }
-}
-
-struct Rule<'a> {
-    /// The human readable name of this target, defined in `build_rules`.
-    name: &'a str,
-
-    /// The path associated with this target, used in the `./x.py` driver for
-    /// easy and ergonomic specification of what to do.
-    path: &'a str,
-
-    /// The "kind" of top-level command that this rule is associated with, only
-    /// relevant if this is a default rule.
-    kind: Kind,
-
-    /// List of dependencies this rule has. Each dependency is a function from a
-    /// step that's being executed to another step that should be executed.
-    deps: Vec<Box<Fn(&Step<'a>) -> Step<'a> + 'a>>,
-
-    /// How to actually execute this rule. Takes a step with contextual
-    /// information and then executes it.
-    run: Box<Fn(&Step<'a>) + 'a>,
-
-    /// Whether or not this is a "default" rule. That basically means that if
-    /// you run, for example, `./x.py test` whether it's included or not.
-    default: bool,
-
-    /// Whether or not this is a "host" rule, or in other words whether this is
-    /// only intended for compiler hosts and not for targets that are being
-    /// generated.
-    host: bool,
-
-    /// Whether this rule is only for steps where the host is the build triple,
-    /// not anything in hosts or targets.
-    only_host_build: bool,
-
-    /// Whether this rule is only for the build triple, not anything in hosts or
-    /// targets.
-    only_build: bool,
-
-    /// A list of "order only" dependencies. This rules does not actually
-    /// depend on these rules, but if they show up in the dependency graph then
-    /// this rule must be executed after all these rules.
-    after: Vec<&'a str>,
-}
-
-#[derive(PartialEq)]
-enum Kind {
-    Build,
-    Test,
-    Bench,
-    Dist,
-    Doc,
-    Install,
-}
-
-impl<'a> Rule<'a> {
-    fn new(name: &'a str, path: &'a str, kind: Kind) -> Rule<'a> {
-        Rule {
-            name: name,
-            deps: Vec::new(),
-            run: Box::new(|_| ()),
-            path: path,
-            kind: kind,
-            default: false,
-            host: false,
-            only_host_build: false,
-            only_build: false,
-            after: Vec::new(),
-        }
-    }
-}
-
-/// Builder pattern returned from the various methods on `Rules` which will add
-/// the rule to the internal list on `Drop`.
-struct RuleBuilder<'a: 'b, 'b> {
-    rules: &'b mut Rules<'a>,
-    rule: Rule<'a>,
-}
-
-impl<'a, 'b> RuleBuilder<'a, 'b> {
-    fn dep<F>(&mut self, f: F) -> &mut Self
-        where F: Fn(&Step<'a>) -> Step<'a> + 'a,
-    {
-        self.rule.deps.push(Box::new(f));
-        self
-    }
-
-    fn after(&mut self, step: &'a str) -> &mut Self {
-        self.rule.after.push(step);
-        self
-    }
-
-    fn run<F>(&mut self, f: F) -> &mut Self
-        where F: Fn(&Step<'a>) + 'a,
-    {
-        self.rule.run = Box::new(f);
-        self
-    }
-
-    fn default(&mut self, default: bool) -> &mut Self {
-        self.rule.default = default;
-        self
-    }
-
-    fn host(&mut self, host: bool) -> &mut Self {
-        self.rule.host = host;
-        self
-    }
-
-    fn only_build(&mut self, only_build: bool) -> &mut Self {
-        self.rule.only_build = only_build;
-        self
-    }
-
-    fn only_host_build(&mut self, only_host_build: bool) -> &mut Self {
-        self.rule.only_host_build = only_host_build;
-        self
-    }
-}
-
-impl<'a, 'b> Drop for RuleBuilder<'a, 'b> {
-    fn drop(&mut self) {
-        let rule = mem::replace(&mut self.rule, Rule::new("", "", Kind::Build));
-        let prev = self.rules.rules.insert(rule.name, rule);
-        if let Some(prev) = prev {
-            panic!("duplicate rule named: {}", prev.name);
-        }
-    }
-}
-
-pub struct Rules<'a> {
-    build: &'a Build,
-    sbuild: Step<'a>,
-    rules: BTreeMap<&'a str, Rule<'a>>,
-}
-
-impl<'a> Rules<'a> {
-    fn new(build: &'a Build) -> Rules<'a> {
-        Rules {
-            build: build,
-            sbuild: Step {
-                stage: build.flags.stage.unwrap_or(2),
-                target: &build.build,
-                host: &build.build,
-                name: "",
-            },
-            rules: BTreeMap::new(),
-        }
-    }
-
-    /// Creates a new rule of `Kind::Build` with the specified human readable
-    /// name and path associated with it.
-    ///
-    /// The builder returned should be configured further with information such
-    /// as how to actually run this rule.
-    fn build<'b>(&'b mut self, name: &'a str, path: &'a str)
-                 -> RuleBuilder<'a, 'b> {
-        self.rule(name, path, Kind::Build)
-    }
-
-    /// Same as `build`, but for `Kind::Test`.
-    fn test<'b>(&'b mut self, name: &'a str, path: &'a str)
-                -> RuleBuilder<'a, 'b> {
-        self.rule(name, path, Kind::Test)
-    }
-
-    /// Same as `build`, but for `Kind::Bench`.
-    fn bench<'b>(&'b mut self, name: &'a str, path: &'a str)
-                -> RuleBuilder<'a, 'b> {
-        self.rule(name, path, Kind::Bench)
-    }
-
-    /// Same as `build`, but for `Kind::Doc`.
-    fn doc<'b>(&'b mut self, name: &'a str, path: &'a str)
-               -> RuleBuilder<'a, 'b> {
-        self.rule(name, path, Kind::Doc)
-    }
-
-    /// Same as `build`, but for `Kind::Dist`.
-    fn dist<'b>(&'b mut self, name: &'a str, path: &'a str)
-                -> RuleBuilder<'a, 'b> {
-        self.rule(name, path, Kind::Dist)
-    }
-
-    /// Same as `build`, but for `Kind::Install`.
-    fn install<'b>(&'b mut self, name: &'a str, path: &'a str)
-                -> RuleBuilder<'a, 'b> {
-        self.rule(name, path, Kind::Install)
-    }
-
-    fn rule<'b>(&'b mut self,
-                name: &'a str,
-                path: &'a str,
-                kind: Kind) -> RuleBuilder<'a, 'b> {
-        RuleBuilder {
-            rules: self,
-            rule: Rule::new(name, path, kind),
-        }
-    }
-
-    /// Verify the dependency graph defined by all our rules are correct, e.g.
-    /// everything points to a valid something else.
-    fn verify(&self) {
-        for rule in self.rules.values() {
-            for dep in rule.deps.iter() {
-                let dep = dep(&self.sbuild.name(rule.name));
-                if self.rules.contains_key(&dep.name) || dep.name.starts_with("default:") {
-                    continue
-                }
-                if dep == Step::noop() {
-                    continue
-                }
-                panic!("\
-
-invalid rule dependency graph detected, was a rule added and maybe typo'd?
-
-    `{}` depends on `{}` which does not exist
-
-", rule.name, dep.name);
-            }
-        }
-    }
-
-    pub fn get_help(&self, command: &str) -> Option<String> {
-        let kind = match command {
-            "build" => Kind::Build,
-            "doc" => Kind::Doc,
-            "test" => Kind::Test,
-            "bench" => Kind::Bench,
-            "dist" => Kind::Dist,
-            "install" => Kind::Install,
-            _ => return None,
-        };
-        let rules = self.rules.values().filter(|r| r.kind == kind);
-        let rules = rules.filter(|r| !r.path.contains("nowhere"));
-        let mut rules = rules.collect::<Vec<_>>();
-        rules.sort_by_key(|r| r.path);
-
-        let mut help_string = String::from("Available paths:\n");
-        for rule in rules {
-            help_string.push_str(format!("    ./x.py {} {}\n", command, rule.path).as_str());
-        }
-        Some(help_string)
-    }
-
-    /// Construct the top-level build steps that we're going to be executing,
-    /// given the subcommand that our build is performing.
-    fn plan(&self) -> Vec<Step<'a>> {
-        // Ok, the logic here is pretty subtle, and involves quite a few
-        // conditionals. The basic idea here is to:
-        //
-        // 1. First, filter all our rules to the relevant ones. This means that
-        //    the command specified corresponds to one of our `Kind` variants,
-        //    and we filter all rules based on that.
-        //
-        // 2. Next, we determine which rules we're actually executing. If a
-        //    number of path filters were specified on the command line we look
-        //    for those, otherwise we look for anything tagged `default`.
-        //    Here we also compute the priority of each rule based on how early
-        //    in the command line the matching path filter showed up.
-        //
-        // 3. Finally, we generate some steps with host and target information.
-        //
-        // The last step is by far the most complicated and subtle. The basic
-        // thinking here is that we want to take the cartesian product of
-        // specified hosts and targets and build rules with that. The list of
-        // hosts and targets, if not specified, come from the how this build was
-        // configured. If the rule we're looking at is a host-only rule the we
-        // ignore the list of targets and instead consider the list of hosts
-        // also the list of targets.
-        //
-        // Once the host and target lists are generated we take the cartesian
-        // product of the two and then create a step based off them. Note that
-        // the stage each step is associated was specified with the `--step`
-        // flag on the command line.
-        let (kind, paths) = match self.build.flags.cmd {
-            Subcommand::Build { ref paths } => (Kind::Build, &paths[..]),
-            Subcommand::Doc { ref paths } => (Kind::Doc, &paths[..]),
-            Subcommand::Test { ref paths, .. } => (Kind::Test, &paths[..]),
-            Subcommand::Bench { ref paths, .. } => (Kind::Bench, &paths[..]),
-            Subcommand::Dist { ref paths } => (Kind::Dist, &paths[..]),
-            Subcommand::Install { ref paths } => (Kind::Install, &paths[..]),
-            Subcommand::Clean => panic!(),
-        };
-
-        let mut rules: Vec<_> = self.rules.values().filter_map(|rule| {
-            if rule.kind != kind {
-                return None;
-            }
-
-            if paths.len() == 0 && rule.default {
-                Some((rule, 0))
-            } else {
-                paths.iter()
-                     .position(|path| path.ends_with(rule.path))
-                     .map(|priority| (rule, priority))
-            }
-        }).collect();
-
-        if rules.is_empty() &&
-           !paths.get(0).unwrap_or(&PathBuf::new())
-                 .ends_with("nonexistent/path/to/trigger/cargo/metadata") {
-            println!("\nNothing to run...\n");
-            process::exit(1);
-        }
-
-        rules.sort_by_key(|&(_, priority)| priority);
-
-        rules.into_iter().flat_map(|(rule, _)| {
-            let hosts = if rule.only_host_build || rule.only_build {
-                self.build.build_slice()
-            } else {
-                &self.build.hosts
-            };
-            // Determine the actual targets participating in this rule.
-            // NOTE: We should keep the full projection from build triple to
-            // the hosts for the dist steps, now that the hosts array above is
-            // truncated to avoid duplication of work in that case. Therefore
-            // the original non-shadowed hosts array is used below.
-            let arr = if rule.host {
-                // If --target was specified but --host wasn't specified,
-                // don't run any host-only tests.
-                if self.build.flags.host.len() > 0 {
-                    &self.build.hosts
-                } else if self.build.flags.target.len() > 0 {
-                    &[]
-                } else if rule.only_build {
-                    self.build.build_slice()
-                } else {
-                    &self.build.hosts
-                }
-            } else {
-                &self.build.targets
-            };
-
-            hosts.iter().flat_map(move |host| {
-                arr.iter().map(move |target| {
-                    self.sbuild.name(rule.name).target(target).host(host)
-                })
-            })
-        }).collect()
-    }
-
-    /// Execute all top-level targets indicated by `steps`.
-    ///
-    /// This will take the list returned by `plan` and then execute each step
-    /// along with all required dependencies as it goes up the chain.
-    fn run(&self, steps: &[Step<'a>]) {
-        self.build.verbose("bootstrap top targets:");
-        for step in steps.iter() {
-            self.build.verbose(&format!("\t{:?}", step));
-        }
-
-        // Using `steps` as the top-level targets, make a topological ordering
-        // of what we need to do.
-        let order = self.expand(steps);
-
-        // Print out what we're doing for debugging
-        self.build.verbose("bootstrap build plan:");
-        for step in order.iter() {
-            self.build.verbose(&format!("\t{:?}", step));
-        }
-
-        // And finally, iterate over everything and execute it.
-        for step in order.iter() {
-            if self.build.flags.keep_stage.map_or(false, |s| step.stage <= s) {
-                self.build.verbose(&format!("keeping step {:?}", step));
-                continue;
-            }
-            self.build.verbose(&format!("executing step {:?}", step));
-            (self.rules[step.name].run)(step);
-        }
-
-        // Check for postponed failures from `test --no-fail-fast`.
-        let failures = self.build.delayed_failures.get();
-        if failures > 0 {
-            println!("\n{} command(s) did not execute successfully.\n", failures);
-            process::exit(1);
-        }
-    }
-
-    /// From the top level targets `steps` generate a topological ordering of
-    /// all steps needed to run those steps.
-    fn expand(&self, steps: &[Step<'a>]) -> Vec<Step<'a>> {
-        // First up build a graph of steps and their dependencies. The `nodes`
-        // map is a map from step to a unique number. The `edges` map is a
-        // map from these unique numbers to a list of other numbers,
-        // representing dependencies.
-        let mut nodes = HashMap::new();
-        nodes.insert(Step::noop(), 0);
-        let mut edges = HashMap::new();
-        edges.insert(0, HashSet::new());
-        for step in steps {
-            self.build_graph(step.clone(), &mut nodes, &mut edges);
-        }
-
-        // Now that we've built up the actual dependency graph, draw more
-        // dependency edges to satisfy the `after` dependencies field for each
-        // rule.
-        self.satisfy_after_deps(&nodes, &mut edges);
-
-        // And finally, perform a topological sort to return a list of steps to
-        // execute.
-        let mut order = Vec::new();
-        let mut visited = HashSet::new();
-        visited.insert(0);
-        let idx_to_node = nodes.iter().map(|p| (*p.1, p.0)).collect::<HashMap<_, _>>();
-        for idx in 0..nodes.len() {
-            self.topo_sort(idx, &idx_to_node, &edges, &mut visited, &mut order);
-        }
-        order
-    }
-
-    /// Builds the dependency graph rooted at `step`.
-    ///
-    /// The `nodes` and `edges` maps are filled out according to the rule
-    /// described by `step.name`.
-    fn build_graph(&self,
-                   step: Step<'a>,
-                   nodes: &mut HashMap<Step<'a>, usize>,
-                   edges: &mut HashMap<usize, HashSet<usize>>) -> usize {
-        use std::collections::hash_map::Entry;
-
-        let idx = nodes.len();
-        match nodes.entry(step.clone()) {
-            Entry::Vacant(e) => { e.insert(idx); }
-            Entry::Occupied(e) => return *e.get(),
-        }
-
-        let mut deps = Vec::new();
-        for dep in self.rules[step.name].deps.iter() {
-            let dep = dep(&step);
-            if dep.name.starts_with("default:") {
-                let kind = match &dep.name[8..] {
-                    "doc" => Kind::Doc,
-                    "dist" => Kind::Dist,
-                    kind => panic!("unknown kind: `{}`", kind),
-                };
-                let host = self.build.config.host.iter().any(|h| h == dep.target);
-                let rules = self.rules.values().filter(|r| r.default);
-                for rule in rules.filter(|r| r.kind == kind && (!r.host || host)) {
-                    deps.push(self.build_graph(dep.name(rule.name), nodes, edges));
-                }
-            } else {
-                deps.push(self.build_graph(dep, nodes, edges));
-            }
-        }
-
-        edges.entry(idx).or_insert(HashSet::new()).extend(deps);
-        idx
-    }
-
-    /// Given a dependency graph with a finished list of `nodes`, fill out more
-    /// dependency `edges`.
-    ///
-    /// This is the step which satisfies all `after` listed dependencies in
-    /// `Rule` above.
-    fn satisfy_after_deps(&self,
-                          nodes: &HashMap<Step<'a>, usize>,
-                          edges: &mut HashMap<usize, HashSet<usize>>) {
-        // Reverse map from the name of a step to the node indices that it
-        // appears at.
-        let mut name_to_idx = HashMap::new();
-        for (step, &idx) in nodes {
-            name_to_idx.entry(step.name).or_insert(Vec::new()).push(idx);
-        }
-
-        for (step, idx) in nodes {
-            if *step == Step::noop() {
-                continue
-            }
-            for after in self.rules[step.name].after.iter() {
-                // This is the critical piece of an `after` dependency. If the
-                // dependency isn't actually in our graph then no edge is drawn,
-                // only if it's already present do we draw the edges.
-                if let Some(idxs) = name_to_idx.get(after) {
-                    edges.get_mut(idx).unwrap()
-                         .extend(idxs.iter().cloned());
-                }
-            }
-        }
-    }
-
-    fn topo_sort(&self,
-                 cur: usize,
-                 nodes: &HashMap<usize, &Step<'a>>,
-                 edges: &HashMap<usize, HashSet<usize>>,
-                 visited: &mut HashSet<usize>,
-                 order: &mut Vec<Step<'a>>) {
-        if !visited.insert(cur) {
-            return
-        }
-        for dep in edges[&cur].iter() {
-            self.topo_sort(*dep, nodes, edges, visited, order);
-        }
-        order.push(nodes[&cur].clone());
-    }
-}
-
-#[cfg(test)]
-mod tests {
-    use std::env;
-
-    use Build;
-    use config::Config;
-    use flags::Flags;
-
-    fn build(args: &[&str],
-             extra_host: &[&str],
-             extra_target: &[&str]) -> Build {
-        build_(args, extra_host, extra_target, true)
-    }
-
-    fn build_(args: &[&str],
-              extra_host: &[&str],
-              extra_target: &[&str],
-              docs: bool) -> Build {
-        let mut args = args.iter().map(|s| s.to_string()).collect::<Vec<_>>();
-        args.push("--build".to_string());
-        args.push("A".to_string());
-        let flags = Flags::parse(&args);
-
-        let mut config = Config::default();
-        config.docs = docs;
-        config.build = "A".to_string();
-        config.host = vec![config.build.clone()];
-        config.host.extend(extra_host.iter().map(|s| s.to_string()));
-        config.target = config.host.clone();
-        config.target.extend(extra_target.iter().map(|s| s.to_string()));
-
-        let mut build = Build::new(flags, config);
-        let cwd = env::current_dir().unwrap();
-        build.crates.insert("std".to_string(), ::Crate {
-            name: "std".to_string(),
-            deps: Vec::new(),
-            path: cwd.join("src/std"),
-            doc_step: "doc-crate-std".to_string(),
-            build_step: "build-crate-std".to_string(),
-            test_step: "test-crate-std".to_string(),
-            bench_step: "bench-crate-std".to_string(),
-            version: String::new(),
-        });
-        build.crates.insert("test".to_string(), ::Crate {
-            name: "test".to_string(),
-            deps: Vec::new(),
-            path: cwd.join("src/test"),
-            doc_step: "doc-crate-test".to_string(),
-            build_step: "build-crate-test".to_string(),
-            test_step: "test-crate-test".to_string(),
-            bench_step: "bench-crate-test".to_string(),
-            version: String::new(),
-        });
-        build.crates.insert("rustc-main".to_string(), ::Crate {
-            name: "rustc-main".to_string(),
-            deps: Vec::new(),
-            version: String::new(),
-            path: cwd.join("src/rustc-main"),
-            doc_step: "doc-crate-rustc-main".to_string(),
-            build_step: "build-crate-rustc-main".to_string(),
-            test_step: "test-crate-rustc-main".to_string(),
-            bench_step: "bench-crate-rustc-main".to_string(),
-        });
-        return build
-    }
-
-    #[test]
-    fn dist_baseline() {
-        let build = build(&["dist"], &[], &[]);
-        let rules = super::build_rules(&build);
-        let plan = rules.plan();
-        println!("rules: {:#?}", plan);
-        assert!(plan.iter().all(|s| s.stage == 2));
-        assert!(plan.iter().all(|s| s.host == "A" ));
-        assert!(plan.iter().all(|s| s.target == "A" ));
-
-        let step = super::Step {
-            name: "",
-            stage: 2,
-            host: &build.build,
-            target: &build.build,
-        };
-
-        assert!(plan.contains(&step.name("dist-docs")));
-        assert!(plan.contains(&step.name("dist-mingw")));
-        assert!(plan.contains(&step.name("dist-rustc")));
-        assert!(plan.contains(&step.name("dist-std")));
-        assert!(plan.contains(&step.name("dist-src")));
-    }
-
-    #[test]
-    fn dist_with_targets() {
-        let build = build(&["dist"], &[], &["B"]);
-        let rules = super::build_rules(&build);
-        let plan = rules.plan();
-        println!("rules: {:#?}", plan);
-        assert!(plan.iter().all(|s| s.stage == 2));
-        assert!(plan.iter().all(|s| s.host == "A" ));
-
-        let step = super::Step {
-            name: "",
-            stage: 2,
-            host: &build.build,
-            target: &build.build,
-        };
-
-        assert!(plan.contains(&step.name("dist-docs")));
-        assert!(plan.contains(&step.name("dist-mingw")));
-        assert!(plan.contains(&step.name("dist-rustc")));
-        assert!(plan.contains(&step.name("dist-std")));
-        assert!(plan.contains(&step.name("dist-src")));
-
-        assert!(plan.contains(&step.target("B").name("dist-docs")));
-        assert!(plan.contains(&step.target("B").name("dist-mingw")));
-        assert!(!plan.contains(&step.target("B").name("dist-rustc")));
-        assert!(plan.contains(&step.target("B").name("dist-std")));
-        assert!(!plan.contains(&step.target("B").name("dist-src")));
-    }
-
-    #[test]
-    fn dist_with_hosts() {
-        let build = build(&["dist"], &["B"], &[]);
-        let rules = super::build_rules(&build);
-        let plan = rules.plan();
-        println!("rules: {:#?}", plan);
-        assert!(plan.iter().all(|s| s.stage == 2));
-
-        let step = super::Step {
-            name: "",
-            stage: 2,
-            host: &build.build,
-            target: &build.build,
-        };
-
-        assert!(!plan.iter().any(|s| s.host == "B"));
-
-        assert!(plan.contains(&step.name("dist-docs")));
-        assert!(plan.contains(&step.name("dist-mingw")));
-        assert!(plan.contains(&step.name("dist-rustc")));
-        assert!(plan.contains(&step.name("dist-std")));
-        assert!(plan.contains(&step.name("dist-src")));
-
-        assert!(plan.contains(&step.target("B").name("dist-docs")));
-        assert!(plan.contains(&step.target("B").name("dist-mingw")));
-        assert!(plan.contains(&step.target("B").name("dist-rustc")));
-        assert!(plan.contains(&step.target("B").name("dist-std")));
-        assert!(!plan.contains(&step.target("B").name("dist-src")));
-    }
-
-    #[test]
-    fn dist_with_targets_and_hosts() {
-        let build = build(&["dist"], &["B"], &["C"]);
-        let rules = super::build_rules(&build);
-        let plan = rules.plan();
-        println!("rules: {:#?}", plan);
-        assert!(plan.iter().all(|s| s.stage == 2));
-
-        let step = super::Step {
-            name: "",
-            stage: 2,
-            host: &build.build,
-            target: &build.build,
-        };
-
-        assert!(!plan.iter().any(|s| s.host == "B"));
-        assert!(!plan.iter().any(|s| s.host == "C"));
-
-        assert!(plan.contains(&step.name("dist-docs")));
-        assert!(plan.contains(&step.name("dist-mingw")));
-        assert!(plan.contains(&step.name("dist-rustc")));
-        assert!(plan.contains(&step.name("dist-std")));
-        assert!(plan.contains(&step.name("dist-src")));
-
-        assert!(plan.contains(&step.target("B").name("dist-docs")));
-        assert!(plan.contains(&step.target("B").name("dist-mingw")));
-        assert!(plan.contains(&step.target("B").name("dist-rustc")));
-        assert!(plan.contains(&step.target("B").name("dist-std")));
-        assert!(!plan.contains(&step.target("B").name("dist-src")));
-
-        assert!(plan.contains(&step.target("C").name("dist-docs")));
-        assert!(plan.contains(&step.target("C").name("dist-mingw")));
-        assert!(!plan.contains(&step.target("C").name("dist-rustc")));
-        assert!(plan.contains(&step.target("C").name("dist-std")));
-        assert!(!plan.contains(&step.target("C").name("dist-src")));
-    }
-
-    #[test]
-    fn dist_target_with_target_flag() {
-        let build = build(&["dist", "--target=C"], &["B"], &["C"]);
-        let rules = super::build_rules(&build);
-        let plan = rules.plan();
-        println!("rules: {:#?}", plan);
-        assert!(plan.iter().all(|s| s.stage == 2));
-
-        let step = super::Step {
-            name: "",
-            stage: 2,
-            host: &build.build,
-            target: &build.build,
-        };
-
-        assert!(!plan.iter().any(|s| s.target == "A"));
-        assert!(!plan.iter().any(|s| s.target == "B"));
-        assert!(!plan.iter().any(|s| s.host == "B"));
-        assert!(!plan.iter().any(|s| s.host == "C"));
-
-        assert!(plan.contains(&step.target("C").name("dist-docs")));
-        assert!(plan.contains(&step.target("C").name("dist-mingw")));
-        assert!(!plan.contains(&step.target("C").name("dist-rustc")));
-        assert!(plan.contains(&step.target("C").name("dist-std")));
-        assert!(!plan.contains(&step.target("C").name("dist-src")));
-    }
-
-    #[test]
-    fn dist_host_with_target_flag() {
-        let build = build(&["dist", "--host=B", "--target=B"], &["B"], &["C"]);
-        let rules = super::build_rules(&build);
-        let plan = rules.plan();
-        println!("rules: {:#?}", plan);
-        assert!(plan.iter().all(|s| s.stage == 2));
-
-        let step = super::Step {
-            name: "",
-            stage: 2,
-            host: &build.build,
-            target: &build.build,
-        };
-
-        assert!(!plan.iter().any(|s| s.target == "A"));
-        assert!(!plan.iter().any(|s| s.target == "C"));
-        assert!(!plan.iter().any(|s| s.host == "B"));
-        assert!(!plan.iter().any(|s| s.host == "C"));
-
-        assert!(plan.contains(&step.target("B").name("dist-docs")));
-        assert!(plan.contains(&step.target("B").name("dist-mingw")));
-        assert!(plan.contains(&step.target("B").name("dist-rustc")));
-        assert!(plan.contains(&step.target("B").name("dist-std")));
-        assert!(plan.contains(&step.target("B").name("dist-src")));
-
-        let all = rules.expand(&plan);
-        println!("all rules: {:#?}", all);
-        assert!(!all.contains(&step.name("rustc")));
-        assert!(!all.contains(&step.name("build-crate-test").stage(1)));
-
-        // all stage0 compiles should be for the build target, A
-        for step in all.iter().filter(|s| s.stage == 0) {
-            if !step.name.contains("build-crate") {
-                continue
-            }
-            println!("step: {:?}", step);
-            assert!(step.host != "B");
-            assert!(step.target != "B");
-            assert!(step.host != "C");
-            assert!(step.target != "C");
-        }
-    }
-
-    #[test]
-    fn build_default() {
-        let build = build(&["build"], &["B"], &["C"]);
-        let rules = super::build_rules(&build);
-        let plan = rules.plan();
-        println!("rules: {:#?}", plan);
-        assert!(plan.iter().all(|s| s.stage == 2));
-
-        let step = super::Step {
-            name: "",
-            stage: 2,
-            host: &build.build,
-            target: &build.build,
-        };
-
-        // rustc built for all for of (A, B) x (A, B)
-        assert!(plan.contains(&step.name("librustc")));
-        assert!(plan.contains(&step.target("B").name("librustc")));
-        assert!(plan.contains(&step.host("B").target("A").name("librustc")));
-        assert!(plan.contains(&step.host("B").target("B").name("librustc")));
-
-        // rustc never built for C
-        assert!(!plan.iter().any(|s| {
-            s.name.contains("rustc") && (s.host == "C" || s.target == "C")
-        }));
-
-        // test built for everything
-        assert!(plan.contains(&step.name("libtest")));
-        assert!(plan.contains(&step.target("B").name("libtest")));
-        assert!(plan.contains(&step.host("B").target("A").name("libtest")));
-        assert!(plan.contains(&step.host("B").target("B").name("libtest")));
-        assert!(plan.contains(&step.host("A").target("C").name("libtest")));
-        assert!(plan.contains(&step.host("B").target("C").name("libtest")));
-
-        let all = rules.expand(&plan);
-        println!("all rules: {:#?}", all);
-        assert!(all.contains(&step.name("rustc")));
-        assert!(all.contains(&step.name("libstd")));
-    }
-
-    #[test]
-    fn build_filtered() {
-        let build = build(&["build", "--target=C"], &["B"], &["C"]);
-        let rules = super::build_rules(&build);
-        let plan = rules.plan();
-        println!("rules: {:#?}", plan);
-        assert!(plan.iter().all(|s| s.stage == 2));
-
-        assert!(!plan.iter().any(|s| s.name.contains("rustc")));
-        assert!(plan.iter().all(|s| {
-            !s.name.contains("test") || s.target == "C"
-        }));
-    }
-
-    #[test]
-    fn test_default() {
-        let build = build(&["test"], &[], &[]);
-        let rules = super::build_rules(&build);
-        let plan = rules.plan();
-        println!("rules: {:#?}", plan);
-        assert!(plan.iter().all(|s| s.stage == 2));
-        assert!(plan.iter().all(|s| s.host == "A"));
-        assert!(plan.iter().all(|s| s.target == "A"));
-
-        assert!(plan.iter().any(|s| s.name.contains("-ui")));
-        assert!(plan.iter().any(|s| s.name.contains("cfail")));
-        assert!(plan.iter().any(|s| s.name.contains("cfail-full")));
-        assert!(plan.iter().any(|s| s.name.contains("codegen-units")));
-        assert!(plan.iter().any(|s| s.name.contains("debuginfo")));
-        assert!(plan.iter().any(|s| s.name.contains("docs")));
-        assert!(plan.iter().any(|s| s.name.contains("error-index")));
-        assert!(plan.iter().any(|s| s.name.contains("incremental")));
-        assert!(plan.iter().any(|s| s.name.contains("linkchecker")));
-        assert!(plan.iter().any(|s| s.name.contains("mir-opt")));
-        assert!(plan.iter().any(|s| s.name.contains("pfail")));
-        assert!(plan.iter().any(|s| s.name.contains("rfail")));
-        assert!(plan.iter().any(|s| s.name.contains("rfail-full")));
-        assert!(plan.iter().any(|s| s.name.contains("rmake")));
-        assert!(plan.iter().any(|s| s.name.contains("rpass")));
-        assert!(plan.iter().any(|s| s.name.contains("rpass-full")));
-        assert!(plan.iter().any(|s| s.name.contains("rustc-all")));
-        assert!(plan.iter().any(|s| s.name.contains("rustdoc")));
-        assert!(plan.iter().any(|s| s.name.contains("std-all")));
-        assert!(plan.iter().any(|s| s.name.contains("test-all")));
-        assert!(plan.iter().any(|s| s.name.contains("tidy")));
-        assert!(plan.iter().any(|s| s.name.contains("valgrind")));
-    }
-
-    #[test]
-    fn test_with_a_target() {
-        let build = build(&["test", "--target=C"], &[], &["C"]);
-        let rules = super::build_rules(&build);
-        let plan = rules.plan();
-        println!("rules: {:#?}", plan);
-        assert!(plan.iter().all(|s| s.stage == 2));
-        assert!(plan.iter().all(|s| s.host == "A"));
-        assert!(plan.iter().all(|s| s.target == "C"));
-
-        assert!(plan.iter().any(|s| s.name.contains("-ui")));
-        assert!(!plan.iter().any(|s| s.name.contains("ui-full")));
-        assert!(plan.iter().any(|s| s.name.contains("cfail")));
-        assert!(!plan.iter().any(|s| s.name.contains("cfail-full")));
-        assert!(plan.iter().any(|s| s.name.contains("codegen-units")));
-        assert!(plan.iter().any(|s| s.name.contains("debuginfo")));
-        assert!(!plan.iter().any(|s| s.name.contains("docs")));
-        assert!(!plan.iter().any(|s| s.name.contains("error-index")));
-        assert!(plan.iter().any(|s| s.name.contains("incremental")));
-        assert!(!plan.iter().any(|s| s.name.contains("linkchecker")));
-        assert!(plan.iter().any(|s| s.name.contains("mir-opt")));
-        assert!(plan.iter().any(|s| s.name.contains("pfail")));
-        assert!(plan.iter().any(|s| s.name.contains("rfail")));
-        assert!(!plan.iter().any(|s| s.name.contains("rfail-full")));
-        assert!(!plan.iter().any(|s| s.name.contains("rmake")));
-        assert!(plan.iter().any(|s| s.name.contains("rpass")));
-        assert!(!plan.iter().any(|s| s.name.contains("rpass-full")));
-        assert!(!plan.iter().any(|s| s.name.contains("rustc-all")));
-        assert!(!plan.iter().any(|s| s.name.contains("rustdoc")));
-        assert!(plan.iter().any(|s| s.name.contains("std-all")));
-        assert!(plan.iter().any(|s| s.name.contains("test-all")));
-        assert!(!plan.iter().any(|s| s.name.contains("tidy")));
-        assert!(plan.iter().any(|s| s.name.contains("valgrind")));
-    }
-
-    #[test]
-    fn test_disable_docs() {
-        let build = build_(&["test"], &[], &[], false);
-        let rules = super::build_rules(&build);
-        let plan = rules.plan();
-        println!("rules: {:#?}", plan);
-        assert!(!plan.iter().any(|s| {
-            s.name.contains("doc-") || s.name.contains("default:doc")
-        }));
-        // none of the dependencies should be a doc rule either
-        assert!(!plan.iter().any(|s| {
-            rules.rules[s.name].deps.iter().any(|dep| {
-                let dep = dep(&rules.sbuild.name(s.name));
-                dep.name.contains("doc-") || dep.name.contains("default:doc")
-            })
-        }));
-    }
-}
diff --git a/src/bootstrap/tool.rs b/src/bootstrap/tool.rs
new file mode 100644
index 00000000000..b31d891051c
--- /dev/null
+++ b/src/bootstrap/tool.rs
@@ -0,0 +1,353 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use std::env;
+use std::path::PathBuf;
+use std::process::Command;
+
+use Mode;
+use Compiler;
+use builder::{Step, RunConfig, ShouldRun, Builder};
+use util::{exe, add_lib_path};
+use compile::{self, libtest_stamp, libstd_stamp, librustc_stamp};
+use native;
+use channel::GitInfo;
+use cache::Interned;
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct CleanTools {
+    pub stage: u32,
+    pub target: Interned<String>,
+    pub mode: Mode,
+}
+
+impl Step for CleanTools {
+    type Output = ();
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.never()
+    }
+
+    /// Build a tool in `src/tools`
+    ///
+    /// This will build the specified tool with the specified `host` compiler in
+    /// `stage` into the normal cargo output directory.
+    fn run(self, builder: &Builder) {
+        let build = builder.build;
+        let stage = self.stage;
+        let target = self.target;
+        let mode = self.mode;
+
+        let compiler = builder.compiler(stage, build.build);
+
+        let stamp = match mode {
+            Mode::Libstd => libstd_stamp(build, compiler, target),
+            Mode::Libtest => libtest_stamp(build, compiler, target),
+            Mode::Librustc => librustc_stamp(build, compiler, target),
+            _ => panic!(),
+        };
+        let out_dir = build.cargo_out(compiler, Mode::Tool, target);
+        build.clear_if_dirty(&out_dir, &stamp);
+    }
+}
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct ToolBuild {
+    pub stage: u32,
+    pub target: Interned<String>,
+    pub tool: &'static str,
+    pub mode: Mode,
+}
+
+impl Step for ToolBuild {
+    type Output = PathBuf;
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.never()
+    }
+
+    /// Build a tool in `src/tools`
+    ///
+    /// This will build the specified tool with the specified `host` compiler in
+    /// `stage` into the normal cargo output directory.
+    fn run(self, builder: &Builder) -> PathBuf {
+        let build = builder.build;
+        let stage = self.stage;
+        let target = self.target;
+        let tool = self.tool;
+
+        let compiler = builder.compiler(stage, build.build);
+        builder.ensure(CleanTools { stage, target, mode: self.mode });
+        match self.mode {
+            Mode::Libstd => builder.ensure(compile::Std { compiler, target }),
+            Mode::Libtest => builder.ensure(compile::Test { compiler, target }),
+            Mode::Librustc => builder.ensure(compile::Rustc { compiler, target }),
+            Mode::Tool => panic!("unexpected Mode::Tool for tool build")
+        }
+
+        let _folder = build.fold_output(|| format!("stage{}-{}", stage, tool));
+        println!("Building stage{} tool {} ({})", stage, tool, target);
+
+        let mut cargo = builder.cargo(compiler, Mode::Tool, target, "build");
+        let dir = build.src.join("src/tools").join(tool);
+        cargo.arg("--manifest-path").arg(dir.join("Cargo.toml"));
+
+        // We don't want to build tools dynamically as they'll be running across
+        // stages and such and it's just easier if they're not dynamically linked.
+        cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
+
+        if let Some(dir) = build.openssl_install_dir(target) {
+            cargo.env("OPENSSL_STATIC", "1");
+            cargo.env("OPENSSL_DIR", dir);
+            cargo.env("LIBZ_SYS_STATIC", "1");
+        }
+
+        cargo.env("CFG_RELEASE_CHANNEL", &build.config.channel);
+
+        let info = GitInfo::new(&dir);
+        if let Some(sha) = info.sha() {
+            cargo.env("CFG_COMMIT_HASH", sha);
+        }
+        if let Some(sha_short) = info.sha_short() {
+            cargo.env("CFG_SHORT_COMMIT_HASH", sha_short);
+        }
+        if let Some(date) = info.commit_date() {
+            cargo.env("CFG_COMMIT_DATE", date);
+        }
+
+        build.run(&mut cargo);
+        build.cargo_out(compiler, Mode::Tool, target).join(exe(tool, &compiler.host))
+    }
+}
+
+macro_rules! tool {
+    ($($name:ident, $path:expr, $tool_name:expr, $mode:expr;)+) => {
+        #[derive(Copy, Clone)]
+        pub enum Tool {
+            $(
+                $name,
+            )+
+        }
+
+        impl<'a> Builder<'a> {
+            pub fn tool_exe(&self, tool: Tool) -> PathBuf {
+                match tool {
+                    $(Tool::$name =>
+                        self.ensure($name {
+                            stage: 0,
+                            target: self.build.build,
+                        }),
+                    )+
+                }
+            }
+        }
+
+        $(
+            #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+        pub struct $name {
+            pub stage: u32,
+            pub target: Interned<String>,
+        }
+
+        impl Step for $name {
+            type Output = PathBuf;
+
+            fn should_run(run: ShouldRun) -> ShouldRun {
+                run.path($path)
+            }
+
+            fn make_run(run: RunConfig) {
+                run.builder.ensure($name {
+                    stage: run.builder.top_stage,
+                    target: run.target,
+                });
+            }
+
+            fn run(self, builder: &Builder) -> PathBuf {
+                builder.ensure(ToolBuild {
+                    stage: self.stage,
+                    target: self.target,
+                    tool: $tool_name,
+                    mode: $mode,
+                })
+            }
+        }
+        )+
+    }
+}
+
+tool!(
+    Rustbook, "src/tools/rustbook", "rustbook", Mode::Librustc;
+    ErrorIndex, "src/tools/error_index_generator", "error_index_generator", Mode::Librustc;
+    UnstableBookGen, "src/tools/unstable-book-gen", "unstable-book-gen", Mode::Libstd;
+    Tidy, "src/tools/tidy", "tidy", Mode::Libstd;
+    Linkchecker, "src/tools/linkchecker", "linkchecker", Mode::Libstd;
+    CargoTest, "src/tools/cargotest", "cargotest", Mode::Libstd;
+    Compiletest, "src/tools/compiletest", "compiletest", Mode::Libtest;
+    BuildManifest, "src/tools/build-manifest", "build-manifest", Mode::Librustc;
+    RemoteTestClient, "src/tools/remote-test-client", "remote-test-client", Mode::Libstd;
+    RustInstaller, "src/tools/rust-installer", "rust-installer", Mode::Libstd;
+);
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct RemoteTestServer {
+    pub stage: u32,
+    pub target: Interned<String>,
+}
+
+impl Step for RemoteTestServer {
+    type Output = PathBuf;
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        run.path("src/tools/remote-test-server")
+    }
+
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(RemoteTestServer {
+            stage: run.builder.top_stage,
+            target: run.target,
+        });
+    }
+
+    fn run(self, builder: &Builder) -> PathBuf {
+        builder.ensure(ToolBuild {
+            stage: self.stage,
+            target: self.target,
+            tool: "remote-test-server",
+            mode: Mode::Libstd,
+        })
+    }
+}
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct Cargo {
+    pub stage: u32,
+    pub target: Interned<String>,
+}
+
+impl Step for Cargo {
+    type Output = PathBuf;
+    const DEFAULT: bool = true;
+    const ONLY_HOSTS: bool = true;
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        let builder = run.builder;
+        run.path("src/tools/cargo").default_condition(builder.build.config.extended)
+    }
+
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(Cargo {
+            stage: run.builder.top_stage,
+            target: run.target,
+        });
+    }
+
+    fn run(self, builder: &Builder) -> PathBuf {
+        builder.ensure(native::Openssl {
+            target: self.target,
+        });
+        // Cargo depends on procedural macros, which requires a full host
+        // compiler to be available, so we need to depend on that.
+        builder.ensure(compile::Rustc {
+            compiler: builder.compiler(self.stage, builder.build.build),
+            target: builder.build.build,
+        });
+        builder.ensure(ToolBuild {
+            stage: self.stage,
+            target: self.target,
+            tool: "cargo",
+            mode: Mode::Libstd,
+        })
+    }
+}
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct Rls {
+    pub stage: u32,
+    pub target: Interned<String>,
+}
+
+impl Step for Rls {
+    type Output = PathBuf;
+    const DEFAULT: bool = true;
+    const ONLY_HOSTS: bool = true;
+
+    fn should_run(run: ShouldRun) -> ShouldRun {
+        let builder = run.builder;
+        run.path("src/tools/rls").default_condition(builder.build.config.extended)
+    }
+
+    fn make_run(run: RunConfig) {
+        run.builder.ensure(Rls {
+            stage: run.builder.top_stage,
+            target: run.target,
+        });
+    }
+
+    fn run(self, builder: &Builder) -> PathBuf {
+        builder.ensure(native::Openssl {
+            target: self.target,
+        });
+        // RLS depends on procedural macros, which requires a full host
+        // compiler to be available, so we need to depend on that.
+        builder.ensure(compile::Rustc {
+            compiler: builder.compiler(self.stage, builder.build.build),
+            target: builder.build.build,
+        });
+        builder.ensure(ToolBuild {
+            stage: self.stage,
+            target: self.target,
+            tool: "rls",
+            mode: Mode::Librustc,
+        })
+    }
+}
+
+impl<'a> Builder<'a> {
+    /// Get a `Command` which is ready to run `tool` in `stage` built for
+    /// `host`.
+    pub fn tool_cmd(&self, tool: Tool) -> Command {
+        let mut cmd = Command::new(self.tool_exe(tool));
+        let compiler = self.compiler(0, self.build.build);
+        self.prepare_tool_cmd(compiler, &mut cmd);
+        cmd
+    }
+
+    /// Prepares the `cmd` provided to be able to run the `compiler` provided.
+    ///
+    /// Notably this munges the dynamic library lookup path to point to the
+    /// right location to run `compiler`.
+    fn prepare_tool_cmd(&self, compiler: Compiler, cmd: &mut Command) {
+        let host = &compiler.host;
+        let mut paths: Vec<PathBuf> = vec![
+            PathBuf::from(&self.sysroot_libdir(compiler, compiler.host)),
+            self.cargo_out(compiler, Mode::Tool, *host).join("deps"),
+        ];
+
+        // On MSVC a tool may invoke a C compiler (e.g. compiletest in run-make
+        // mode) and that C compiler may need some extra PATH modification. Do
+        // so here.
+        if compiler.host.contains("msvc") {
+            let curpaths = env::var_os("PATH").unwrap_or_default();
+            let curpaths = env::split_paths(&curpaths).collect::<Vec<_>>();
+            for &(ref k, ref v) in self.cc[&compiler.host].0.env() {
+                if k != "PATH" {
+                    continue
+                }
+                for path in env::split_paths(v) {
+                    if !curpaths.contains(&path) {
+                        paths.push(path);
+                    }
+                }
+            }
+        }
+        add_lib_path(paths, cmd);
+    }
+}
diff --git a/src/tools/build-manifest/Cargo.toml b/src/tools/build-manifest/Cargo.toml
index 4b876753b1f..844b7aad72f 100644
--- a/src/tools/build-manifest/Cargo.toml
+++ b/src/tools/build-manifest/Cargo.toml
@@ -4,5 +4,6 @@ version = "0.1.0"
 authors = ["Alex Crichton <alex@alexcrichton.com>"]
 
 [dependencies]
-toml = "0.1"
-rustc-serialize = "0.3"
+toml = "0.4"
+serde = "1.0"
+serde_derive = "1.0"
diff --git a/src/tools/build-manifest/src/main.rs b/src/tools/build-manifest/src/main.rs
index ba37863b1f6..b8efb88acfb 100644
--- a/src/tools/build-manifest/src/main.rs
+++ b/src/tools/build-manifest/src/main.rs
@@ -9,7 +9,9 @@
 // except according to those terms.
 
 extern crate toml;
-extern crate rustc_serialize;
+#[macro_use]
+extern crate serde_derive;
+extern crate serde;
 
 use std::collections::BTreeMap;
 use std::env;
@@ -99,19 +101,21 @@ static MINGW: &'static [&'static str] = &[
     "x86_64-pc-windows-gnu",
 ];
 
+#[derive(Serialize)]
+#[serde(rename_all = "kebab-case")]
 struct Manifest {
     manifest_version: String,
     date: String,
     pkg: BTreeMap<String, Package>,
 }
 
-#[derive(RustcEncodable)]
+#[derive(Serialize)]
 struct Package {
     version: String,
     target: BTreeMap<String, Target>,
 }
 
-#[derive(RustcEncodable)]
+#[derive(Serialize)]
 struct Target {
     available: bool,
     url: Option<String>,
@@ -136,7 +140,7 @@ impl Target {
     }
 }
 
-#[derive(RustcEncodable)]
+#[derive(Serialize)]
 struct Component {
     pkg: String,
     target: String,
@@ -199,28 +203,16 @@ impl Builder {
         self.rls_version = self.version("rls", "x86_64-unknown-linux-gnu");
 
         self.digest_and_sign();
-        let Manifest { manifest_version, date, pkg } = self.build_manifest();
-
-        // Unfortunately we can't use derive(RustcEncodable) here because the
-        // version field is called `manifest-version`, not `manifest_version`.
-        // In lieu of that just create the table directly here with a `BTreeMap`
-        // and wrap it up in a `Value::Table`.
-        let mut manifest = BTreeMap::new();
-        manifest.insert("manifest-version".to_string(),
-                        toml::Value::String(manifest_version));
-        manifest.insert("date".to_string(), toml::Value::String(date.clone()));
-        manifest.insert("pkg".to_string(), toml::encode(&pkg));
-        let manifest = toml::Value::Table(manifest).to_string();
-
+        let manifest = self.build_manifest();
         let filename = format!("channel-rust-{}.toml", self.rust_release);
-        self.write_manifest(&manifest, &filename);
+        self.write_manifest(&toml::to_string(&manifest).unwrap(), &filename);
 
         let filename = format!("channel-rust-{}-date.txt", self.rust_release);
-        self.write_date_stamp(&date, &filename);
+        self.write_date_stamp(&manifest.date, &filename);
 
         if self.rust_release != "beta" && self.rust_release != "nightly" {
-            self.write_manifest(&manifest, "channel-rust-stable.toml");
-            self.write_date_stamp(&date, "channel-rust-stable-date.txt");
+            self.write_manifest(&toml::to_string(&manifest).unwrap(), "channel-rust-stable.toml");
+            self.write_date_stamp(&manifest.date, "channel-rust-stable-date.txt");
         }
     }