about summary refs log tree commit diff
path: root/src/bootstrap/bootstrap.py
diff options
context:
space:
mode:
Diffstat (limited to 'src/bootstrap/bootstrap.py')
-rw-r--r--src/bootstrap/bootstrap.py663
1 files changed, 393 insertions, 270 deletions
diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py
index 9ef9ccec193..89415afbe3b 100644
--- a/src/bootstrap/bootstrap.py
+++ b/src/bootstrap/bootstrap.py
@@ -19,14 +19,17 @@ try:
 except ImportError:
     lzma = None
 
+
 def platform_is_win32():
-    return sys.platform == 'win32'
+    return sys.platform == "win32"
+
 
 if platform_is_win32():
     EXE_SUFFIX = ".exe"
 else:
     EXE_SUFFIX = ""
 
+
 def get_cpus():
     if hasattr(os, "sched_getaffinity"):
         return len(os.sched_getaffinity(0))
@@ -51,11 +54,14 @@ def get(base, url, path, checksums, verbose=False):
 
     try:
         if url not in checksums:
-            raise RuntimeError(("src/stage0 doesn't contain a checksum for {}. "
-                                "Pre-built artifacts might not be available for this "
-                                "target at this time, see https://doc.rust-lang.org/nightly"
-                                "/rustc/platform-support.html for more information.")
-                                .format(url))
+            raise RuntimeError(
+                (
+                    "src/stage0 doesn't contain a checksum for {}. "
+                    "Pre-built artifacts might not be available for this "
+                    "target at this time, see https://doc.rust-lang.org/nightly"
+                    "/rustc/platform-support.html for more information."
+                ).format(url)
+            )
         sha256 = checksums[url]
         if os.path.exists(path):
             if verify(path, sha256, False):
@@ -64,8 +70,11 @@ def get(base, url, path, checksums, verbose=False):
                 return
             else:
                 if verbose:
-                    eprint("ignoring already-download file",
-                        path, "due to failed verification")
+                    eprint(
+                        "ignoring already-download file",
+                        path,
+                        "due to failed verification",
+                    )
                 os.unlink(path)
         download(temp_path, "{}/{}".format(base, url), True, verbose)
         if not verify(temp_path, sha256, verbose):
@@ -79,12 +88,14 @@ def get(base, url, path, checksums, verbose=False):
                 eprint("removing", temp_path)
             os.unlink(temp_path)
 
+
 def curl_version():
     m = re.match(bytes("^curl ([0-9]+)\\.([0-9]+)", "utf8"), require(["curl", "-V"]))
     if m is None:
         return (0, 0)
     return (int(m[1]), int(m[2]))
 
+
 def download(path, url, probably_big, verbose):
     for _ in range(4):
         try:
@@ -114,32 +125,53 @@ def _download(path, url, probably_big, verbose, exception):
         require(["curl", "--version"], exception=platform_is_win32())
         extra_flags = []
         if curl_version() > (7, 70):
-            extra_flags = [ "--retry-all-errors" ]
+            extra_flags = ["--retry-all-errors"]
         # options should be kept in sync with
         # src/bootstrap/src/core/download.rs
         # for consistency.
         # they are also more compreprensivly explained in that file.
-        run(["curl", option] + extra_flags + [
-            # Follow redirect.
-            "--location",
-            # timeout if speed is < 10 bytes/sec for > 30 seconds
-            "--speed-time", "30", "--speed-limit", "10",
-            # timeout if cannot connect within 30 seconds
-            "--connect-timeout", "30",
-            "--output", path,
-            "--continue-at", "-",
-            "--retry", "3", "--show-error", "--remote-time", "--fail", url],
+        run(
+            ["curl", option]
+            + extra_flags
+            + [
+                # Follow redirect.
+                "--location",
+                # timeout if speed is < 10 bytes/sec for > 30 seconds
+                "--speed-time",
+                "30",
+                "--speed-limit",
+                "10",
+                # timeout if cannot connect within 30 seconds
+                "--connect-timeout",
+                "30",
+                "--output",
+                path,
+                "--continue-at",
+                "-",
+                "--retry",
+                "3",
+                "--show-error",
+                "--remote-time",
+                "--fail",
+                url,
+            ],
             verbose=verbose,
-            exception=True, # Will raise RuntimeError on failure
+            exception=True,  # Will raise RuntimeError on failure
         )
     except (subprocess.CalledProcessError, OSError, RuntimeError):
         # see http://serverfault.com/questions/301128/how-to-download
+        script = "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;"
         if platform_is_win32():
-            run_powershell([
-                 "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;",
-                 "(New-Object System.Net.WebClient).DownloadFile('{}', '{}')".format(url, path)],
+            run_powershell(
+                [
+                    script,
+                    "(New-Object System.Net.WebClient).DownloadFile('{}', '{}')".format(
+                        url, path
+                    ),
+                ],
                 verbose=verbose,
-                exception=exception)
+                exception=exception,
+            )
         # Check if the RuntimeError raised by run(curl) should be silenced
         elif verbose or exception:
             raise
@@ -153,9 +185,11 @@ def verify(path, expected, verbose):
         found = hashlib.sha256(source.read()).hexdigest()
     verified = found == expected
     if not verified:
-        eprint("invalid checksum:\n"
-              "    found:    {}\n"
-              "    expected: {}".format(found, expected))
+        eprint(
+            "invalid checksum:\n" "    found:    {}\n" "    expected: {}".format(
+                found, expected
+            )
+        )
     return verified
 
 
@@ -170,7 +204,7 @@ def unpack(tarball, tarball_suffix, dst, verbose=False, match=None):
             name = member.replace(fname + "/", "", 1)
             if match is not None and not name.startswith(match):
                 continue
-            name = name[len(match) + 1:]
+            name = name[len(match) + 1 :]
 
             dst_path = os.path.join(dst, name)
             if verbose:
@@ -186,18 +220,18 @@ def unpack(tarball, tarball_suffix, dst, verbose=False, match=None):
 def run(args, verbose=False, exception=False, is_bootstrap=False, **kwargs):
     """Run a child program in a new process"""
     if verbose:
-        eprint("running: " + ' '.join(args))
+        eprint("running: " + " ".join(args))
     sys.stdout.flush()
     # Ensure that the .exe is used on Windows just in case a Linux ELF has been
     # compiled in the same directory.
-    if os.name == 'nt' and not args[0].endswith('.exe'):
-        args[0] += '.exe'
+    if os.name == "nt" and not args[0].endswith(".exe"):
+        args[0] += ".exe"
     # Use Popen here instead of call() as it apparently allows powershell on
     # Windows to not lock up waiting for input presumably.
     ret = subprocess.Popen(args, **kwargs)
     code = ret.wait()
     if code != 0:
-        err = "failed to run: " + ' '.join(args)
+        err = "failed to run: " + " ".join(args)
         if verbose or exception:
             raise RuntimeError(err)
         # For most failures, we definitely do want to print this error, or the user will have no
@@ -209,30 +243,30 @@ def run(args, verbose=False, exception=False, is_bootstrap=False, **kwargs):
         else:
             sys.exit(err)
 
+
 def run_powershell(script, *args, **kwargs):
     """Run a powershell script"""
     run(["PowerShell.exe", "/nologo", "-Command"] + script, *args, **kwargs)
 
 
 def require(cmd, exit=True, exception=False):
-    '''Run a command, returning its output.
+    """Run a command, returning its output.
     On error,
         If `exception` is `True`, raise the error
         Otherwise If `exit` is `True`, exit the process
-        Else return None.'''
+        Else return None."""
     try:
         return subprocess.check_output(cmd).strip()
     except (subprocess.CalledProcessError, OSError) as exc:
         if exception:
             raise
         elif exit:
-            eprint("ERROR: unable to run `{}`: {}".format(' '.join(cmd), exc))
+            eprint("ERROR: unable to run `{}`: {}".format(" ".join(cmd), exc))
             eprint("Please make sure it's installed and in the path.")
             sys.exit(1)
         return None
 
 
-
 def format_build_time(duration):
     """Return a nicer format for build time
 
@@ -252,13 +286,16 @@ def default_build_triple(verbose):
 
     if platform_is_win32():
         try:
-            version = subprocess.check_output(["rustc", "--version", "--verbose"],
-                    stderr=subprocess.DEVNULL)
+            version = subprocess.check_output(
+                ["rustc", "--version", "--verbose"], stderr=subprocess.DEVNULL
+            )
             version = version.decode(default_encoding)
-            host = next(x for x in version.split('\n') if x.startswith("host: "))
+            host = next(x for x in version.split("\n") if x.startswith("host: "))
             triple = host.split("host: ")[1]
             if verbose:
-                eprint("detected default triple {} from pre-installed rustc".format(triple))
+                eprint(
+                    "detected default triple {} from pre-installed rustc".format(triple)
+                )
             return triple
         except Exception as e:
             if verbose:
@@ -270,148 +307,149 @@ def default_build_triple(verbose):
 
     # If we do not have `uname`, assume Windows.
     if uname is None:
-        return 'x86_64-pc-windows-msvc'
+        return "x86_64-pc-windows-msvc"
 
     kernel, cputype, processor = uname.decode(default_encoding).split(maxsplit=2)
 
     # The goal here is to come up with the same triple as LLVM would,
     # at least for the subset of platforms we're willing to target.
     kerneltype_mapper = {
-        'Darwin': 'apple-darwin',
-        'DragonFly': 'unknown-dragonfly',
-        'FreeBSD': 'unknown-freebsd',
-        'Haiku': 'unknown-haiku',
-        'NetBSD': 'unknown-netbsd',
-        'OpenBSD': 'unknown-openbsd',
-        'GNU': 'unknown-hurd',
+        "Darwin": "apple-darwin",
+        "DragonFly": "unknown-dragonfly",
+        "FreeBSD": "unknown-freebsd",
+        "Haiku": "unknown-haiku",
+        "NetBSD": "unknown-netbsd",
+        "OpenBSD": "unknown-openbsd",
+        "GNU": "unknown-hurd",
     }
 
     # Consider the direct transformation first and then the special cases
     if kernel in kerneltype_mapper:
         kernel = kerneltype_mapper[kernel]
-    elif kernel == 'Linux':
+    elif kernel == "Linux":
         # Apple doesn't support `-o` so this can't be used in the combined
         # uname invocation above
         ostype = require(["uname", "-o"], exit=required).decode(default_encoding)
-        if ostype == 'Android':
-            kernel = 'linux-android'
+        if ostype == "Android":
+            kernel = "linux-android"
         else:
-            kernel = 'unknown-linux-gnu'
-    elif kernel == 'SunOS':
-        kernel = 'pc-solaris'
+            kernel = "unknown-linux-gnu"
+    elif kernel == "SunOS":
+        kernel = "pc-solaris"
         # On Solaris, uname -m will return a machine classification instead
         # of a cpu type, so uname -p is recommended instead.  However, the
         # output from that option is too generic for our purposes (it will
         # always emit 'i386' on x86/amd64 systems).  As such, isainfo -k
         # must be used instead.
-        cputype = require(['isainfo', '-k']).decode(default_encoding)
+        cputype = require(["isainfo", "-k"]).decode(default_encoding)
         # sparc cpus have sun as a target vendor
-        if 'sparc' in cputype:
-            kernel = 'sun-solaris'
-    elif kernel.startswith('MINGW'):
+        if "sparc" in cputype:
+            kernel = "sun-solaris"
+    elif kernel.startswith("MINGW"):
         # msys' `uname` does not print gcc configuration, but prints msys
         # configuration. so we cannot believe `uname -m`:
         # msys1 is always i686 and msys2 is always x86_64.
         # instead, msys defines $MSYSTEM which is MINGW32 on i686 and
         # MINGW64 on x86_64.
-        kernel = 'pc-windows-gnu'
-        cputype = 'i686'
-        if os.environ.get('MSYSTEM') == 'MINGW64':
-            cputype = 'x86_64'
-    elif kernel.startswith('MSYS'):
-        kernel = 'pc-windows-gnu'
-    elif kernel.startswith('CYGWIN_NT'):
-        cputype = 'i686'
-        if kernel.endswith('WOW64'):
-            cputype = 'x86_64'
-        kernel = 'pc-windows-gnu'
+        kernel = "pc-windows-gnu"
+        cputype = "i686"
+        if os.environ.get("MSYSTEM") == "MINGW64":
+            cputype = "x86_64"
+    elif kernel.startswith("MSYS"):
+        kernel = "pc-windows-gnu"
+    elif kernel.startswith("CYGWIN_NT"):
+        cputype = "i686"
+        if kernel.endswith("WOW64"):
+            cputype = "x86_64"
+        kernel = "pc-windows-gnu"
     elif platform_is_win32():
         # Some Windows platforms might have a `uname` command that returns a
         # non-standard string (e.g. gnuwin32 tools returns `windows32`). In
         # these cases, fall back to using sys.platform.
-        return 'x86_64-pc-windows-msvc'
-    elif kernel == 'AIX':
+        return "x86_64-pc-windows-msvc"
+    elif kernel == "AIX":
         # `uname -m` returns the machine ID rather than machine hardware on AIX,
         # so we are unable to use cputype to form triple. AIX 7.2 and
         # above supports 32-bit and 64-bit mode simultaneously and `uname -p`
         # returns `powerpc`, however we only supports `powerpc64-ibm-aix` in
         # rust on AIX. For above reasons, kerneltype_mapper and cputype_mapper
         # are not used to infer AIX's triple.
-        return 'powerpc64-ibm-aix'
+        return "powerpc64-ibm-aix"
     else:
         err = "unknown OS type: {}".format(kernel)
         sys.exit(err)
 
-    if cputype in ['powerpc', 'riscv'] and kernel == 'unknown-freebsd':
-        cputype = subprocess.check_output(
-              ['uname', '-p']).strip().decode(default_encoding)
+    if cputype in ["powerpc", "riscv"] and kernel == "unknown-freebsd":
+        cputype = (
+            subprocess.check_output(["uname", "-p"]).strip().decode(default_encoding)
+        )
     cputype_mapper = {
-        'BePC': 'i686',
-        'aarch64': 'aarch64',
-        'aarch64eb': 'aarch64',
-        'amd64': 'x86_64',
-        'arm64': 'aarch64',
-        'i386': 'i686',
-        'i486': 'i686',
-        'i686': 'i686',
-        'i686-AT386': 'i686',
-        'i786': 'i686',
-        'loongarch64': 'loongarch64',
-        'm68k': 'm68k',
-        'csky': 'csky',
-        'powerpc': 'powerpc',
-        'powerpc64': 'powerpc64',
-        'powerpc64le': 'powerpc64le',
-        'ppc': 'powerpc',
-        'ppc64': 'powerpc64',
-        'ppc64le': 'powerpc64le',
-        'riscv64': 'riscv64gc',
-        's390x': 's390x',
-        'x64': 'x86_64',
-        'x86': 'i686',
-        'x86-64': 'x86_64',
-        'x86_64': 'x86_64'
+        "BePC": "i686",
+        "aarch64": "aarch64",
+        "aarch64eb": "aarch64",
+        "amd64": "x86_64",
+        "arm64": "aarch64",
+        "i386": "i686",
+        "i486": "i686",
+        "i686": "i686",
+        "i686-AT386": "i686",
+        "i786": "i686",
+        "loongarch64": "loongarch64",
+        "m68k": "m68k",
+        "csky": "csky",
+        "powerpc": "powerpc",
+        "powerpc64": "powerpc64",
+        "powerpc64le": "powerpc64le",
+        "ppc": "powerpc",
+        "ppc64": "powerpc64",
+        "ppc64le": "powerpc64le",
+        "riscv64": "riscv64gc",
+        "s390x": "s390x",
+        "x64": "x86_64",
+        "x86": "i686",
+        "x86-64": "x86_64",
+        "x86_64": "x86_64",
     }
 
     # Consider the direct transformation first and then the special cases
     if cputype in cputype_mapper:
         cputype = cputype_mapper[cputype]
-    elif cputype in {'xscale', 'arm'}:
-        cputype = 'arm'
-        if kernel == 'linux-android':
-            kernel = 'linux-androideabi'
-        elif kernel == 'unknown-freebsd':
+    elif cputype in {"xscale", "arm"}:
+        cputype = "arm"
+        if kernel == "linux-android":
+            kernel = "linux-androideabi"
+        elif kernel == "unknown-freebsd":
             cputype = processor
-            kernel = 'unknown-freebsd'
-    elif cputype == 'armv6l':
-        cputype = 'arm'
-        if kernel == 'linux-android':
-            kernel = 'linux-androideabi'
+            kernel = "unknown-freebsd"
+    elif cputype == "armv6l":
+        cputype = "arm"
+        if kernel == "linux-android":
+            kernel = "linux-androideabi"
         else:
-            kernel += 'eabihf'
-    elif cputype in {'armv7l', 'armv8l'}:
-        cputype = 'armv7'
-        if kernel == 'linux-android':
-            kernel = 'linux-androideabi'
+            kernel += "eabihf"
+    elif cputype in {"armv7l", "armv8l"}:
+        cputype = "armv7"
+        if kernel == "linux-android":
+            kernel = "linux-androideabi"
         else:
-            kernel += 'eabihf'
-    elif cputype == 'mips':
-        if sys.byteorder == 'big':
-            cputype = 'mips'
-        elif sys.byteorder == 'little':
-            cputype = 'mipsel'
+            kernel += "eabihf"
+    elif cputype == "mips":
+        if sys.byteorder == "big":
+            cputype = "mips"
+        elif sys.byteorder == "little":
+            cputype = "mipsel"
         else:
             raise ValueError("unknown byteorder: {}".format(sys.byteorder))
-    elif cputype == 'mips64':
-        if sys.byteorder == 'big':
-            cputype = 'mips64'
-        elif sys.byteorder == 'little':
-            cputype = 'mips64el'
+    elif cputype == "mips64":
+        if sys.byteorder == "big":
+            cputype = "mips64"
+        elif sys.byteorder == "little":
+            cputype = "mips64el"
         else:
-            raise ValueError('unknown byteorder: {}'.format(sys.byteorder))
+            raise ValueError("unknown byteorder: {}".format(sys.byteorder))
         # only the n64 ABI is supported, indicate it
-        kernel += 'abi64'
-    elif cputype == 'sparc' or cputype == 'sparcv9' or cputype == 'sparc64':
+        kernel += "abi64"
+    elif cputype == "sparc" or cputype == "sparcv9" or cputype == "sparc64":
         pass
     else:
         err = "unknown cpu type: {}".format(cputype)
@@ -422,8 +460,8 @@ def default_build_triple(verbose):
 
 @contextlib.contextmanager
 def output(filepath):
-    tmp = filepath + '.tmp'
-    with open(tmp, 'w') as f:
+    tmp = filepath + ".tmp"
+    with open(tmp, "w") as f:
         yield f
     try:
         if os.path.exists(filepath):
@@ -467,6 +505,7 @@ class DownloadInfo:
         self.pattern = pattern
         self.verbose = verbose
 
+
 def download_component(download_info):
     if not os.path.exists(download_info.tarball_path):
         get(
@@ -477,6 +516,7 @@ def download_component(download_info):
             verbose=download_info.verbose,
         )
 
+
 def unpack_component(download_info):
     unpack(
         download_info.tarball_path,
@@ -486,26 +526,30 @@ def unpack_component(download_info):
         verbose=download_info.verbose,
     )
 
+
 class FakeArgs:
     """Used for unit tests to avoid updating all call sites"""
+
     def __init__(self):
-        self.build = ''
-        self.build_dir = ''
+        self.build = ""
+        self.build_dir = ""
         self.clean = False
         self.verbose = False
         self.json_output = False
-        self.color = 'auto'
-        self.warnings = 'default'
+        self.color = "auto"
+        self.warnings = "default"
+
 
 class RustBuild(object):
     """Provide all the methods required to build Rust"""
+
     def __init__(self, config_toml="", args=None):
         if args is None:
             args = FakeArgs()
         self.git_version = None
         self.nix_deps_dir = None
         self._should_fix_bins_and_dylibs = None
-        self.rust_root = os.path.abspath(os.path.join(__file__, '../../..'))
+        self.rust_root = os.path.abspath(os.path.join(__file__, "../../.."))
 
         self.config_toml = config_toml
 
@@ -515,26 +559,28 @@ class RustBuild(object):
         self.color = args.color
         self.warnings = args.warnings
 
-        config_verbose_count = self.get_toml('verbose', 'build')
+        config_verbose_count = self.get_toml("verbose", "build")
         if config_verbose_count is not None:
             self.verbose = max(self.verbose, int(config_verbose_count))
 
-        self.use_vendored_sources = self.get_toml('vendor', 'build') == 'true'
-        self.use_locked_deps = self.get_toml('locked-deps', 'build') == 'true'
+        self.use_vendored_sources = self.get_toml("vendor", "build") == "true"
+        self.use_locked_deps = self.get_toml("locked-deps", "build") == "true"
 
-        build_dir = args.build_dir or self.get_toml('build-dir', 'build') or 'build'
+        build_dir = args.build_dir or self.get_toml("build-dir", "build") or "build"
         self.build_dir = os.path.abspath(build_dir)
 
-        self.stage0_data = parse_stage0_file(os.path.join(self.rust_root, "src", "stage0"))
+        self.stage0_data = parse_stage0_file(
+            os.path.join(self.rust_root, "src", "stage0")
+        )
         self.stage0_compiler = Stage0Toolchain(
-            self.stage0_data["compiler_date"],
-            self.stage0_data["compiler_version"]
+            self.stage0_data["compiler_date"], self.stage0_data["compiler_version"]
+        )
+        self.download_url = (
+            os.getenv("RUSTUP_DIST_SERVER") or self.stage0_data["dist_server"]
         )
-        self.download_url = os.getenv("RUSTUP_DIST_SERVER") or self.stage0_data["dist_server"]
 
         self.build = args.build or self.build_triple()
 
-
     def download_toolchain(self):
         """Fetch the build system for Rust, written in Rust
 
@@ -550,58 +596,73 @@ class RustBuild(object):
 
         key = self.stage0_compiler.date
         is_outdated = self.program_out_of_date(self.rustc_stamp(), key)
-        need_rustc = self.rustc().startswith(bin_root) and (not os.path.exists(self.rustc()) \
-            or is_outdated)
-        need_cargo = self.cargo().startswith(bin_root) and (not os.path.exists(self.cargo()) \
-            or is_outdated)
+        need_rustc = self.rustc().startswith(bin_root) and (
+            not os.path.exists(self.rustc()) or is_outdated
+        )
+        need_cargo = self.cargo().startswith(bin_root) and (
+            not os.path.exists(self.cargo()) or is_outdated
+        )
 
         if need_rustc or need_cargo:
             if os.path.exists(bin_root):
                 # HACK: On Windows, we can't delete rust-analyzer-proc-macro-server while it's
                 # running. Kill it.
                 if platform_is_win32():
-                    print("Killing rust-analyzer-proc-macro-srv before deleting stage0 toolchain")
-                    regex =  '{}\\\\(host|{})\\\\stage0\\\\libexec'.format(
-                        os.path.basename(self.build_dir),
-                        self.build
+                    print(
+                        "Killing rust-analyzer-proc-macro-srv before deleting stage0 toolchain"
+                    )
+                    regex = "{}\\\\(host|{})\\\\stage0\\\\libexec".format(
+                        os.path.basename(self.build_dir), self.build
                     )
                     script = (
                         # NOTE: can't use `taskkill` or `Get-Process -Name` because they error if
                         # the server isn't running.
-                        'Get-Process | ' +
-                        'Where-Object {$_.Name -eq "rust-analyzer-proc-macro-srv"} |' +
-                        'Where-Object {{$_.Path -match "{}"}} |'.format(regex) +
-                        'Stop-Process'
+                        "Get-Process | "
+                        + 'Where-Object {$_.Name -eq "rust-analyzer-proc-macro-srv"} |'
+                        + 'Where-Object {{$_.Path -match "{}"}} |'.format(regex)
+                        + "Stop-Process"
                     )
                     run_powershell([script])
                 shutil.rmtree(bin_root)
 
-            cache_dst = (self.get_toml('bootstrap-cache-path', 'build') or
-                os.path.join(self.build_dir, "cache"))
+            cache_dst = self.get_toml("bootstrap-cache-path", "build") or os.path.join(
+                self.build_dir, "cache"
+            )
 
             rustc_cache = os.path.join(cache_dst, key)
             if not os.path.exists(rustc_cache):
                 os.makedirs(rustc_cache)
 
-            tarball_suffix = '.tar.gz' if lzma is None else '.tar.xz'
+            tarball_suffix = ".tar.gz" if lzma is None else ".tar.xz"
 
-            toolchain_suffix = "{}-{}{}".format(rustc_channel, self.build, tarball_suffix)
+            toolchain_suffix = "{}-{}{}".format(
+                rustc_channel, self.build, tarball_suffix
+            )
 
             tarballs_to_download = []
 
             if need_rustc:
                 tarballs_to_download.append(
-                    ("rust-std-{}".format(toolchain_suffix), "rust-std-{}".format(self.build))
+                    (
+                        "rust-std-{}".format(toolchain_suffix),
+                        "rust-std-{}".format(self.build),
+                    )
+                )
+                tarballs_to_download.append(
+                    ("rustc-{}".format(toolchain_suffix), "rustc")
                 )
-                tarballs_to_download.append(("rustc-{}".format(toolchain_suffix), "rustc"))
 
             if need_cargo:
-                tarballs_to_download.append(("cargo-{}".format(toolchain_suffix), "cargo"))
+                tarballs_to_download.append(
+                    ("cargo-{}".format(toolchain_suffix), "cargo")
+                )
 
             tarballs_download_info = [
                 DownloadInfo(
                     base_download_url=self.download_url,
-                    download_path="dist/{}/{}".format(self.stage0_compiler.date, filename),
+                    download_path="dist/{}/{}".format(
+                        self.stage0_compiler.date, filename
+                    ),
                     bin_root=self.bin_root(),
                     tarball_path=os.path.join(rustc_cache, filename),
                     tarball_suffix=tarball_suffix,
@@ -620,7 +681,11 @@ class RustBuild(object):
             # In Python 2.7, Pool cannot be used as a context manager.
             pool_size = min(len(tarballs_download_info), get_cpus())
             if self.verbose:
-                print('Choosing a pool size of', pool_size, 'for the unpacking of the tarballs')
+                print(
+                    "Choosing a pool size of",
+                    pool_size,
+                    "for the unpacking of the tarballs",
+                )
             p = Pool(pool_size)
             try:
                 # FIXME: A cheap workaround for https://github.com/rust-lang/rust/issues/125578,
@@ -639,7 +704,9 @@ class RustBuild(object):
 
                 self.fix_bin_or_dylib("{}/bin/rustc".format(bin_root))
                 self.fix_bin_or_dylib("{}/bin/rustdoc".format(bin_root))
-                self.fix_bin_or_dylib("{}/libexec/rust-analyzer-proc-macro-srv".format(bin_root))
+                self.fix_bin_or_dylib(
+                    "{}/libexec/rust-analyzer-proc-macro-srv".format(bin_root)
+                )
                 lib_dir = "{}/lib".format(bin_root)
                 rustlib_bin_dir = "{}/rustlib/{}/bin".format(lib_dir, self.build)
                 self.fix_bin_or_dylib("{}/rust-lld".format(rustlib_bin_dir))
@@ -667,12 +734,15 @@ class RustBuild(object):
         def get_answer():
             default_encoding = sys.getdefaultencoding()
             try:
-                ostype = subprocess.check_output(
-                    ['uname', '-s']).strip().decode(default_encoding)
+                ostype = (
+                    subprocess.check_output(["uname", "-s"])
+                    .strip()
+                    .decode(default_encoding)
+                )
             except subprocess.CalledProcessError:
                 return False
             except OSError as reason:
-                if getattr(reason, 'winerror', None) is not None:
+                if getattr(reason, "winerror", None) is not None:
                     return False
                 raise reason
 
@@ -690,17 +760,23 @@ class RustBuild(object):
             # The latter one does not exist on NixOS when using tmpfs as root.
             try:
                 with open("/etc/os-release", "r") as f:
-                    is_nixos = any(ln.strip() in ("ID=nixos", "ID='nixos'", 'ID="nixos"')
-                                   for ln in f)
+                    is_nixos = any(
+                        ln.strip() in ("ID=nixos", "ID='nixos'", 'ID="nixos"')
+                        for ln in f
+                    )
             except FileNotFoundError:
                 is_nixos = False
 
             # If not on NixOS, then warn if user seems to be atop Nix shell
             if not is_nixos:
-                in_nix_shell = os.getenv('IN_NIX_SHELL')
+                in_nix_shell = os.getenv("IN_NIX_SHELL")
                 if in_nix_shell:
-                    eprint("The IN_NIX_SHELL environment variable is `{}`;".format(in_nix_shell),
-                          "you may need to set `patch-binaries-for-nix=true` in config.toml")
+                    eprint(
+                        "The IN_NIX_SHELL environment variable is `{}`;".format(
+                            in_nix_shell
+                        ),
+                        "you may need to set `patch-binaries-for-nix=true` in config.toml",
+                    )
 
             return is_nixos
 
@@ -736,7 +812,7 @@ class RustBuild(object):
             # zlib: Needed as a system dependency of `libLLVM-*.so`.
             # patchelf: Needed for patching ELF binaries (see doc comment above).
             nix_deps_dir = "{}/{}".format(self.build_dir, ".nix-deps")
-            nix_expr = '''
+            nix_expr = """
             with (import <nixpkgs> {});
             symlinkJoin {
               name = "rust-stage0-dependencies";
@@ -746,24 +822,30 @@ class RustBuild(object):
                 stdenv.cc.bintools
               ];
             }
-            '''
+            """
             try:
-                subprocess.check_output([
-                    "nix-build", "-E", nix_expr, "-o", nix_deps_dir,
-                ])
+                subprocess.check_output(
+                    [
+                        "nix-build",
+                        "-E",
+                        nix_expr,
+                        "-o",
+                        nix_deps_dir,
+                    ]
+                )
             except subprocess.CalledProcessError as reason:
                 eprint("WARNING: failed to call nix-build:", reason)
                 return
             self.nix_deps_dir = nix_deps_dir
 
         patchelf = "{}/bin/patchelf".format(nix_deps_dir)
-        rpath_entries = [
-            os.path.join(os.path.realpath(nix_deps_dir), "lib")
-        ]
+        rpath_entries = [os.path.join(os.path.realpath(nix_deps_dir), "lib")]
         patchelf_args = ["--add-rpath", ":".join(rpath_entries)]
         if ".so" not in fname:
             # Finally, set the correct .interp for binaries
-            with open("{}/nix-support/dynamic-linker".format(nix_deps_dir)) as dynamic_linker:
+            with open(
+                "{}/nix-support/dynamic-linker".format(nix_deps_dir)
+            ) as dynamic_linker:
                 patchelf_args += ["--set-interpreter", dynamic_linker.read().rstrip()]
 
         try:
@@ -781,13 +863,13 @@ class RustBuild(object):
         >>> expected = os.path.join("build", "host", "stage0", ".rustc-stamp")
         >>> assert rb.rustc_stamp() == expected, rb.rustc_stamp()
         """
-        return os.path.join(self.bin_root(), '.rustc-stamp')
+        return os.path.join(self.bin_root(), ".rustc-stamp")
 
     def program_out_of_date(self, stamp_path, key):
         """Check if the given program stamp is out of date"""
         if not os.path.exists(stamp_path) or self.clean:
             return True
-        with open(stamp_path, 'r') as stamp:
+        with open(stamp_path, "r") as stamp:
             return key != stamp.read()
 
     def bin_root(self):
@@ -834,11 +916,11 @@ class RustBuild(object):
     def get_toml_static(config_toml, key, section=None):
         cur_section = None
         for line in config_toml.splitlines():
-            section_match = re.match(r'^\s*\[(.*)\]\s*$', line)
+            section_match = re.match(r"^\s*\[(.*)\]\s*$", line)
             if section_match is not None:
                 cur_section = section_match.group(1)
 
-            match = re.match(r'^{}\s*=(.*)$'.format(key), line)
+            match = re.match(r"^{}\s*=(.*)$".format(key), line)
             if match is not None:
                 value = match.group(1)
                 if section is None or section == cur_section:
@@ -847,11 +929,11 @@ class RustBuild(object):
 
     def cargo(self):
         """Return config path for cargo"""
-        return self.program_config('cargo')
+        return self.program_config("cargo")
 
     def rustc(self):
         """Return config path for rustc"""
-        return self.program_config('rustc')
+        return self.program_config("rustc")
 
     def program_config(self, program):
         """Return config path for the given program at the given stage
@@ -886,12 +968,12 @@ class RustBuild(object):
         """
         start = line.find('"')
         if start != -1:
-            end = start + 1 + line[start + 1:].find('"')
-            return line[start + 1:end]
-        start = line.find('\'')
+            end = start + 1 + line[start + 1 :].find('"')
+            return line[start + 1 : end]
+        start = line.find("'")
         if start != -1:
-            end = start + 1 + line[start + 1:].find('\'')
-            return line[start + 1:end]
+            end = start + 1 + line[start + 1 :].find("'")
+            return line[start + 1 : end]
         return None
 
     def bootstrap_out(self):
@@ -941,24 +1023,37 @@ class RustBuild(object):
             del env["CARGO_BUILD_TARGET"]
         env["CARGO_TARGET_DIR"] = build_dir
         env["RUSTC"] = self.rustc()
-        env["LD_LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib") + \
-            (os.pathsep + env["LD_LIBRARY_PATH"]) \
-            if "LD_LIBRARY_PATH" in env else ""
-        env["DYLD_LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib") + \
-            (os.pathsep + env["DYLD_LIBRARY_PATH"]) \
-            if "DYLD_LIBRARY_PATH" in env else ""
-        env["LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib") + \
-            (os.pathsep + env["LIBRARY_PATH"]) \
-            if "LIBRARY_PATH" in env else ""
-        env["LIBPATH"] = os.path.join(self.bin_root(), "lib") + \
-            (os.pathsep + env["LIBPATH"]) \
-            if "LIBPATH" in env else ""
+        env["LD_LIBRARY_PATH"] = (
+            os.path.join(self.bin_root(), "lib") + (os.pathsep + env["LD_LIBRARY_PATH"])
+            if "LD_LIBRARY_PATH" in env
+            else ""
+        )
+        env["DYLD_LIBRARY_PATH"] = (
+            os.path.join(self.bin_root(), "lib")
+            + (os.pathsep + env["DYLD_LIBRARY_PATH"])
+            if "DYLD_LIBRARY_PATH" in env
+            else ""
+        )
+        env["LIBRARY_PATH"] = (
+            os.path.join(self.bin_root(), "lib") + (os.pathsep + env["LIBRARY_PATH"])
+            if "LIBRARY_PATH" in env
+            else ""
+        )
+        env["LIBPATH"] = (
+            os.path.join(self.bin_root(), "lib") + (os.pathsep + env["LIBPATH"])
+            if "LIBPATH" in env
+            else ""
+        )
 
         # Export Stage0 snapshot compiler related env variables
         build_section = "target.{}".format(self.build)
         host_triple_sanitized = self.build.replace("-", "_")
         var_data = {
-            "CC": "cc", "CXX": "cxx", "LD": "linker", "AR": "ar", "RANLIB": "ranlib"
+            "CC": "cc",
+            "CXX": "cxx",
+            "LD": "linker",
+            "AR": "ar",
+            "RANLIB": "ranlib",
         }
         for var_name, toml_key in var_data.items():
             toml_val = self.get_toml(toml_key, build_section)
@@ -1023,14 +1118,16 @@ class RustBuild(object):
         if "RUSTFLAGS_BOOTSTRAP" in env:
             env["RUSTFLAGS"] += " " + env["RUSTFLAGS_BOOTSTRAP"]
 
-        env["PATH"] = os.path.join(self.bin_root(), "bin") + \
-            os.pathsep + env["PATH"]
+        env["PATH"] = os.path.join(self.bin_root(), "bin") + os.pathsep + env["PATH"]
         if not os.path.isfile(self.cargo()):
-            raise Exception("no cargo executable found at `{}`".format(
-                self.cargo()))
-        args = [self.cargo(), "build", "--manifest-path",
-                os.path.join(self.rust_root, "src/bootstrap/Cargo.toml"),
-                "-Zroot-dir="+self.rust_root]
+            raise Exception("no cargo executable found at `{}`".format(self.cargo()))
+        args = [
+            self.cargo(),
+            "build",
+            "--manifest-path",
+            os.path.join(self.rust_root, "src/bootstrap/Cargo.toml"),
+            "-Zroot-dir=" + self.rust_root,
+        ]
         args.extend("--verbose" for _ in range(self.verbose))
         if self.use_locked_deps:
             args.append("--locked")
@@ -1058,83 +1155,103 @@ class RustBuild(object):
         Note that `default_build_triple` is moderately expensive,
         so use `self.build` where possible.
         """
-        config = self.get_toml('build')
+        config = self.get_toml("build")
         return config or default_build_triple(self.verbose)
 
     def check_vendored_status(self):
         """Check that vendoring is configured properly"""
         # keep this consistent with the equivalent check in bootstrap:
         # https://github.com/rust-lang/rust/blob/a8a33cf27166d3eabaffc58ed3799e054af3b0c6/src/bootstrap/lib.rs#L399-L405
-        if 'SUDO_USER' in os.environ and not self.use_vendored_sources:
+        if "SUDO_USER" in os.environ and not self.use_vendored_sources:
             if os.getuid() == 0:
                 self.use_vendored_sources = True
-                eprint('INFO: looks like you\'re trying to run this command as root')
-                eprint('      and so in order to preserve your $HOME this will now')
-                eprint('      use vendored sources by default.')
+                eprint("INFO: looks like you're trying to run this command as root")
+                eprint("      and so in order to preserve your $HOME this will now")
+                eprint("      use vendored sources by default.")
 
-        cargo_dir = os.path.join(self.rust_root, '.cargo')
+        cargo_dir = os.path.join(self.rust_root, ".cargo")
+        url = "https://ci-artifacts.rust-lang.org/rustc-builds/<commit>/rustc-nightly-src.tar.xz"
         if self.use_vendored_sources:
-            vendor_dir = os.path.join(self.rust_root, 'vendor')
+            vendor_dir = os.path.join(self.rust_root, "vendor")
             if not os.path.exists(vendor_dir):
-                eprint('ERROR: vendoring required, but vendor directory does not exist.')
-                eprint('       Run `x.py vendor` to initialize the vendor directory.')
-                eprint('       Alternatively, use the pre-vendored `rustc-src` dist component.')
-                eprint('       To get a stable/beta/nightly version, download it from: ')
-                eprint('       '
-                'https://forge.rust-lang.org/infra/other-installation-methods.html#source-code')
-                eprint('       To get a specific commit version, download it using the below URL,')
-                eprint('       replacing <commit> with a specific commit checksum: ')
-                eprint('       '
-                'https://ci-artifacts.rust-lang.org/rustc-builds/<commit>/rustc-nightly-src.tar.xz')
-                eprint('       Once you have the source downloaded, place the vendor directory')
-                eprint('       from the archive in the root of the rust project.')
+                eprint(
+                    "ERROR: vendoring required, but vendor directory does not exist."
+                )
+                eprint("       Run `x.py vendor` to initialize the vendor directory.")
+                eprint(
+                    "       Alternatively, use the pre-vendored `rustc-src` dist component."
+                )
+                eprint(
+                    "       To get a stable/beta/nightly version, download it from: "
+                )
+                eprint(
+                    "       "
+                    "https://forge.rust-lang.org/infra/other-installation-methods.html#source-code"
+                )
+                eprint(
+                    "       To get a specific commit version, download it using the below URL,"
+                )
+                eprint("       replacing <commit> with a specific commit checksum: ")
+                eprint("       ", url)
+                eprint(
+                    "       Once you have the source downloaded, place the vendor directory"
+                )
+                eprint("       from the archive in the root of the rust project.")
                 raise Exception("{} not found".format(vendor_dir))
 
             if not os.path.exists(cargo_dir):
-                eprint('ERROR: vendoring required, but .cargo/config does not exist.')
+                eprint("ERROR: vendoring required, but .cargo/config does not exist.")
                 raise Exception("{} not found".format(cargo_dir))
 
+
 def parse_args(args):
     """Parse the command line arguments that the python script needs."""
     parser = argparse.ArgumentParser(add_help=False)
-    parser.add_argument('-h', '--help', action='store_true')
-    parser.add_argument('--config')
-    parser.add_argument('--build-dir')
-    parser.add_argument('--build')
-    parser.add_argument('--color', choices=['always', 'never', 'auto'])
-    parser.add_argument('--clean', action='store_true')
-    parser.add_argument('--json-output', action='store_true')
-    parser.add_argument('--warnings', choices=['deny', 'warn', 'default'], default='default')
-    parser.add_argument('-v', '--verbose', action='count', default=0)
+    parser.add_argument("-h", "--help", action="store_true")
+    parser.add_argument("--config")
+    parser.add_argument("--build-dir")
+    parser.add_argument("--build")
+    parser.add_argument("--color", choices=["always", "never", "auto"])
+    parser.add_argument("--clean", action="store_true")
+    parser.add_argument("--json-output", action="store_true")
+    parser.add_argument(
+        "--warnings", choices=["deny", "warn", "default"], default="default"
+    )
+    parser.add_argument("-v", "--verbose", action="count", default=0)
 
     return parser.parse_known_args(args)[0]
 
+
 def parse_stage0_file(path):
     result = {}
-    with open(path, 'r') as file:
+    with open(path, "r") as file:
         for line in file:
             line = line.strip()
-            if line and not line.startswith('#'):
-                key, value = line.split('=', 1)
+            if line and not line.startswith("#"):
+                key, value = line.split("=", 1)
                 result[key.strip()] = value.strip()
     return result
 
+
 def bootstrap(args):
     """Configure, fetch, build and run the initial bootstrap"""
-    rust_root = os.path.abspath(os.path.join(__file__, '../../..'))
+    rust_root = os.path.abspath(os.path.join(__file__, "../../.."))
 
-    if not os.path.exists(os.path.join(rust_root, '.git')) and \
-            os.path.exists(os.path.join(rust_root, '.github')):
-        eprint("warn: Looks like you are trying to bootstrap Rust from a source that is neither a "
-               "git clone nor distributed tarball.\nThis build may fail due to missing submodules "
-               "unless you put them in place manually.")
+    if not os.path.exists(os.path.join(rust_root, ".git")) and os.path.exists(
+        os.path.join(rust_root, ".github")
+    ):
+        eprint(
+            "warn: Looks like you are trying to bootstrap Rust from a source that is neither a "
+            "git clone nor distributed tarball.\nThis build may fail due to missing submodules "
+            "unless you put them in place manually."
+        )
 
     # Read from `--config`, then `RUST_BOOTSTRAP_CONFIG`, then `./config.toml`,
     # then `config.toml` in the root directory.
-    toml_path = args.config or os.getenv('RUST_BOOTSTRAP_CONFIG')
+    toml_path = args.config or os.getenv("RUST_BOOTSTRAP_CONFIG")
     using_default_path = toml_path is None
     if using_default_path:
-        toml_path = 'config.toml'
+        toml_path = "config.toml"
         if not os.path.exists(toml_path):
             toml_path = os.path.join(rust_root, toml_path)
 
@@ -1144,23 +1261,23 @@ def bootstrap(args):
         with open(toml_path) as config:
             config_toml = config.read()
     else:
-        config_toml = ''
+        config_toml = ""
 
-    profile = RustBuild.get_toml_static(config_toml, 'profile')
+    profile = RustBuild.get_toml_static(config_toml, "profile")
     if profile is not None:
         # Allows creating alias for profile names, allowing
         # profiles to be renamed while maintaining back compatibility
         # Keep in sync with `profile_aliases` in config.rs
-        profile_aliases = {
-            "user": "dist"
-        }
-        include_file = 'config.{}.toml'.format(profile_aliases.get(profile) or profile)
-        include_dir = os.path.join(rust_root, 'src', 'bootstrap', 'defaults')
+        profile_aliases = {"user": "dist"}
+        include_file = "config.{}.toml".format(profile_aliases.get(profile) or profile)
+        include_dir = os.path.join(rust_root, "src", "bootstrap", "defaults")
         include_path = os.path.join(include_dir, include_file)
 
         if not os.path.exists(include_path):
-            raise Exception("Unrecognized config profile '{}'. Check src/bootstrap/defaults"
-            " for available options.".format(profile))
+            raise Exception(
+                "Unrecognized config profile '{}'. Check src/bootstrap/defaults"
+                " for available options.".format(profile)
+            )
 
         # HACK: This works because `self.get_toml()` returns the first match it finds for a
         # specific key, so appending our defaults at the end allows the user to override them
@@ -1196,8 +1313,8 @@ def main():
     start_time = time()
 
     # x.py help <cmd> ...
-    if len(sys.argv) > 1 and sys.argv[1] == 'help':
-        sys.argv[1] = '-h'
+    if len(sys.argv) > 1 and sys.argv[1] == "help":
+        sys.argv[1] = "-h"
 
     args = parse_args(sys.argv)
     help_triggered = args.help or len(sys.argv) == 1
@@ -1207,14 +1324,15 @@ def main():
     if help_triggered:
         eprint(
             "INFO: Downloading and building bootstrap before processing --help command.\n"
-            "      See src/bootstrap/README.md for help with common commands.")
+            "      See src/bootstrap/README.md for help with common commands."
+        )
 
     exit_code = 0
     success_word = "successfully"
     try:
         bootstrap(args)
     except (SystemExit, KeyboardInterrupt) as error:
-        if hasattr(error, 'code') and isinstance(error.code, int):
+        if hasattr(error, "code") and isinstance(error.code, int):
             exit_code = error.code
         else:
             exit_code = 1
@@ -1222,9 +1340,14 @@ def main():
         success_word = "unsuccessfully"
 
     if not help_triggered:
-        eprint("Build completed", success_word, "in", format_build_time(time() - start_time))
+        eprint(
+            "Build completed",
+            success_word,
+            "in",
+            format_build_time(time() - start_time),
+        )
     sys.exit(exit_code)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     main()