about summary refs log tree commit diff
path: root/src/ci/docker/scripts
diff options
context:
space:
mode:
authorErick Tryzelaar <etryzelaar@google.com>2024-05-31 20:08:29 +0000
committerErick Tryzelaar <etryzelaar@google.com>2024-06-17 16:50:33 +0000
commitb7c23761d87c593d239118b647a2cfb8acd3aa28 (patch)
tree720b4733c2cfe335598076ae41ff9533057ae17d /src/ci/docker/scripts
parent7ac6c2fc685681824fbfc156b38035df743881dd (diff)
downloadrust-b7c23761d87c593d239118b647a2cfb8acd3aa28.tar.gz
rust-b7c23761d87c593d239118b647a2cfb8acd3aa28.zip
Sync fuchsia test runner with clang test runner
This synchronizes the fuchsia test running code with the clang test
runner. This brings with it:

* Improved logging
* Uses the fuchsia image from the SDK version
* Caches the product bundle across test runs
* Strips the binaries to reduce the data sent to the emulator
Diffstat (limited to 'src/ci/docker/scripts')
-rwxr-xr-xsrc/ci/docker/scripts/fuchsia-test-runner.py1014
1 files changed, 628 insertions, 386 deletions
diff --git a/src/ci/docker/scripts/fuchsia-test-runner.py b/src/ci/docker/scripts/fuchsia-test-runner.py
index d791550a8db..115ee69a589 100755
--- a/src/ci/docker/scripts/fuchsia-test-runner.py
+++ b/src/ci/docker/scripts/fuchsia-test-runner.py
@@ -8,34 +8,137 @@ https://doc.rust-lang.org/stable/rustc/platform-support/fuchsia.html#aarch64-unk
 """
 
 import argparse
+from concurrent.futures import ThreadPoolExecutor
 from dataclasses import dataclass
 import glob
-import hashlib
+import io
 import json
+import logging
 import os
 import platform
+import shlex
 import shutil
 import subprocess
 import sys
-from typing import ClassVar, List
-
-
-@dataclass
+from pathlib import Path
+from typing import ClassVar, List, Optional
+
+
+def check_call_with_logging(
+    args, *, stdout_handler, stderr_handler, check=True, text=True, **kwargs
+):
+    stdout_handler(f"Subprocess: {shlex.join(str(arg) for arg in args)}")
+
+    with subprocess.Popen(
+        args,
+        text=text,
+        stdout=subprocess.PIPE,
+        stderr=subprocess.PIPE,
+        **kwargs,
+    ) as process:
+        with ThreadPoolExecutor(max_workers=2) as executor:
+
+            def exhaust_pipe(handler, pipe):
+                for line in pipe:
+                    handler(line.rstrip())
+
+            executor_out = executor.submit(
+                exhaust_pipe, stdout_handler, process.stdout
+            )
+            executor_err = executor.submit(
+                exhaust_pipe, stderr_handler, process.stderr
+            )
+            executor_out.result()
+            executor_err.result()
+    retcode = process.poll()
+    if check and retcode:
+        raise subprocess.CalledProcessError(retcode, process.args)
+    return subprocess.CompletedProcess(process.args, retcode)
+
+
+def check_output_with_logging(
+    args, *, stdout_handler, stderr_handler, check=True, text=True, **kwargs
+):
+    stdout_handler(f"Subprocess: {shlex.join(str(arg) for arg in args)}")
+
+    buf = io.StringIO()
+
+    with subprocess.Popen(
+        args,
+        text=text,
+        stdout=subprocess.PIPE,
+        stderr=subprocess.PIPE,
+        **kwargs,
+    ) as process:
+        with ThreadPoolExecutor(max_workers=2) as executor:
+
+            def exhaust_stdout(handler, buf, pipe):
+                for line in pipe:
+                    handler(line.rstrip())
+                    buf.write(line)
+                    buf.write("\n")
+
+            def exhaust_stderr(handler, pipe):
+                for line in pipe:
+                    handler(line.rstrip())
+
+            executor_out = executor.submit(
+                exhaust_stdout, stdout_handler, buf, process.stdout
+            )
+            executor_err = executor.submit(
+                exhaust_stderr, stderr_handler, process.stderr
+            )
+            executor_out.result()
+            executor_err.result()
+    retcode = process.poll()
+    if check and retcode:
+        raise subprocess.CalledProcessError(retcode, process.args)
+
+    return buf.getvalue()
+
+
+def atomic_link(link: Path, target: Path):
+    link_dir = link.parent
+    os.makedirs(link_dir, exist_ok=True)
+    link_file = link.name
+    tmp_file = link_dir.joinpath(link_file + "_tmp")
+    os.link(target, tmp_file)
+    try:
+        os.rename(tmp_file, link)
+    except Exception as e:
+        raise e
+    finally:
+        if tmp_file.exists():
+            os.remove(tmp_file)
+
+
+@dataclass(kw_only=True)
 class TestEnvironment:
-    rust_build_dir: str
-    sdk_dir: str
+    rust_build_dir: Path
+    sdk_dir: Path
     target: str
+    toolchain_dir: Path
+    local_pb_path: Optional[Path]
+    use_local_pb: bool
     verbose: bool = False
 
+    env_logger = logging.getLogger("env")
+    subprocess_logger = logging.getLogger("env.subprocess")
+    __tmp_dir = None
+
     @staticmethod
-    def tmp_dir():
+    def tmp_dir() -> Path:
+        if TestEnvironment.__tmp_dir:
+            return TestEnvironment.__tmp_dir
         tmp_dir = os.environ.get("TEST_TOOLCHAIN_TMP_DIR")
         if tmp_dir is not None:
-            return os.path.abspath(tmp_dir)
-        return os.path.join(os.path.dirname(__file__), "tmp~")
+            TestEnvironment.__tmp_dir = Path(tmp_dir).absolute()
+        else:
+            TestEnvironment.__tmp_dir = Path(__file__).parent.joinpath("tmp~")
+        return TestEnvironment.__tmp_dir
 
     @staticmethod
-    def triple_to_arch(triple):
+    def triple_to_arch(triple) -> str:
         if "x86_64" in triple:
             return "x64"
         elif "aarch64" in triple:
@@ -44,61 +147,175 @@ class TestEnvironment:
             raise Exception(f"Unrecognized target triple {triple}")
 
     @classmethod
-    def env_file_path(cls):
-        return os.path.join(cls.tmp_dir(), "test_env.json")
+    def env_file_path(cls) -> Path:
+        return cls.tmp_dir().joinpath("test_env.json")
 
     @classmethod
     def from_args(cls, args):
+        local_pb_path = args.local_product_bundle_path
+        if local_pb_path is not None:
+            local_pb_path = Path(local_pb_path).absolute()
+
         return cls(
-            os.path.abspath(args.rust_build),
-            os.path.abspath(args.sdk),
-            args.target,
+            rust_build_dir=Path(args.rust_build).absolute(),
+            sdk_dir=Path(args.sdk).absolute(),
+            target=args.target,
+            toolchain_dir=Path(args.toolchain_dir).absolute(),
+            local_pb_path=local_pb_path,
+            use_local_pb=args.use_local_product_bundle_if_exists,
             verbose=args.verbose,
         )
 
     @classmethod
     def read_from_file(cls):
         with open(cls.env_file_path(), encoding="utf-8") as f:
-            test_env = json.loads(f.read())
+            test_env = json.load(f)
+            local_pb_path = test_env["local_pb_path"]
+            if local_pb_path is not None:
+                local_pb_path = Path(local_pb_path)
+
             return cls(
-                test_env["rust_build_dir"],
-                test_env["sdk_dir"],
-                test_env["target"],
+                rust_build_dir=Path(test_env["rust_build_dir"]),
+                sdk_dir=Path(test_env["sdk_dir"]),
+                target=test_env["target"],
+                toolchain_dir=Path(test_env["toolchain_dir"]),
+                local_pb_path=local_pb_path,
+                use_local_pb=test_env["use_local_pb"],
                 verbose=test_env["verbose"],
             )
 
+    def build_id(self, binary):
+        llvm_readelf = Path(self.toolchain_dir).joinpath("bin", "llvm-readelf")
+        process = subprocess.run(
+            args=[
+                llvm_readelf,
+                "-n",
+                "--elf-output-style=JSON",
+                binary,
+            ],
+            stdout=subprocess.PIPE,
+            stderr=subprocess.STDOUT,
+        )
+        if process.returncode:
+            self.env_logger.error(
+                f"llvm-readelf failed for binary {binary} with output {process.stdout}"
+            )
+            raise Exception(f"Unreadable build-id for binary {binary}")
+        data = json.loads(process.stdout)
+        if len(data) != 1:
+            raise Exception(
+                f"Unreadable output from llvm-readelf for binary {binary}"
+            )
+        notes = data[0]["Notes"]
+        for note in notes:
+            note_section = note["NoteSection"]
+            if note_section["Name"] == ".note.gnu.build-id":
+                return note_section["Note"]["Build ID"]
+        raise Exception(f"Build ID not found for binary {binary}")
+
+    def generate_buildid_dir(
+        self,
+        binary: Path,
+        build_id_dir: Path,
+        build_id: str,
+        log_handler: logging.Logger,
+    ):
+        os.makedirs(build_id_dir, exist_ok=True)
+        suffix = ".debug"
+        # Hardlink the original binary
+        build_id_prefix_dir = build_id_dir.joinpath(build_id[:2])
+        unstripped_binary = build_id_prefix_dir.joinpath(build_id[2:] + suffix)
+        build_id_prefix_dir.mkdir(parents=True, exist_ok=True)
+        atomic_link(unstripped_binary, binary)
+        assert unstripped_binary.exists()
+        stripped_binary = unstripped_binary.with_suffix("")
+        llvm_objcopy = Path(self.toolchain_dir).joinpath("bin", "llvm-objcopy")
+        strip_mode = "--strip-sections"
+        check_call_with_logging(
+            [
+                llvm_objcopy,
+                strip_mode,
+                unstripped_binary,
+                stripped_binary,
+            ],
+            stdout_handler=log_handler.info,
+            stderr_handler=log_handler.error,
+        )
+        return stripped_binary
+
     def write_to_file(self):
         with open(self.env_file_path(), "w", encoding="utf-8") as f:
-            f.write(json.dumps(self.__dict__))
+            local_pb_path = self.local_pb_path
+            if local_pb_path is not None:
+                local_pb_path = str(local_pb_path)
+
+            json.dump(
+                {
+                    "rust_build_dir": str(self.rust_build_dir),
+                    "sdk_dir": str(self.sdk_dir),
+                    "target": self.target,
+                    "toolchain_dir": str(self.toolchain_dir),
+                    "local_pb_path": local_pb_path,
+                    "use_local_pb": self.use_local_pb,
+                    "verbose": self.verbose,
+                },
+                f,
+            )
 
-    def package_server_log_path(self):
-        return os.path.join(self.tmp_dir(), "package_server_log")
+    def setup_logging(self, log_to_file=False):
+        fs = logging.Formatter("%(asctime)s %(levelname)s:%(name)s:%(message)s")
+        if log_to_file:
+            logfile_handler = logging.FileHandler(
+                self.tmp_dir().joinpath("log")
+            )
+            logfile_handler.setLevel(logging.DEBUG)
+            logfile_handler.setFormatter(fs)
+            logging.getLogger().addHandler(logfile_handler)
+        stream_handler = logging.StreamHandler(sys.stdout)
+        stream_handler.setFormatter(fs)
+        if self.verbose:
+            stream_handler.setLevel(logging.DEBUG)
+        else:
+            stream_handler.setLevel(logging.INFO)
+        logging.getLogger().addHandler(stream_handler)
+        logging.getLogger().setLevel(logging.DEBUG)
+
+    @property
+    def package_server_log_path(self) -> Path:
+        return self.tmp_dir().joinpath("package_server_log")
+
+    @property
+    def emulator_log_path(self) -> Path:
+        return self.tmp_dir().joinpath("emulator_log")
 
-    def emulator_log_path(self):
-        return os.path.join(self.tmp_dir(), "emulator_log")
+    @property
+    def packages_dir(self) -> Path:
+        return self.tmp_dir().joinpath("packages")
 
-    def packages_dir(self):
-        return os.path.join(self.tmp_dir(), "packages")
+    @property
+    def output_dir(self) -> Path:
+        return self.tmp_dir().joinpath("output")
 
-    def output_dir(self):
-        return os.path.join(self.tmp_dir(), "output")
+    def read_sdk_version(self):
+        meta_json_path = Path(self.sdk_dir).joinpath("meta", "manifest.json")
+        with open(meta_json_path, encoding="utf-8") as f:
+            meta_json = json.load(f)
+            return meta_json["id"]
 
     TEST_REPO_NAME: ClassVar[str] = "rust-testing"
 
-    def repo_dir(self):
-        return os.path.join(self.tmp_dir(), self.TEST_REPO_NAME)
+    def repo_dir(self) -> Path:
+        return self.tmp_dir().joinpath(self.TEST_REPO_NAME)
 
-    def libs_dir(self):
-        return os.path.join(
-            self.rust_build_dir,
+    def libs_dir(self) -> Path:
+        return self.rust_build_dir.joinpath(
             "host",
             "stage2",
             "lib",
         )
 
-    def rustlibs_dir(self):
-        return os.path.join(
-            self.libs_dir(),
+    def rustlibs_dir(self) -> Path:
+        return self.libs_dir().joinpath(
             "rustlib",
             self.target,
             "lib",
@@ -112,8 +329,8 @@ class TestEnvironment:
             return "a64"
         raise Exception(f"Unrecognized host architecture {machine}")
 
-    def tool_path(self, tool):
-        return os.path.join(self.sdk_dir, "tools", self.sdk_arch(), tool)
+    def tool_path(self, tool) -> Path:
+        return Path(self.sdk_dir).joinpath("tools", self.sdk_arch(), tool)
 
     def host_arch_triple(self):
         machine = platform.machine()
@@ -123,45 +340,25 @@ class TestEnvironment:
             return "aarch64-unknown-linux-gnu"
         raise Exception(f"Unrecognized host architecture {machine}")
 
-    def zxdb_script_path(self):
-        return os.path.join(self.tmp_dir(), "zxdb_script")
-
-    def pm_lockfile_path(self):
-        return os.path.join(self.tmp_dir(), "pm.lock")
-
-    def log_info(self, msg):
-        print(msg)
-
-    def log_debug(self, msg):
-        if self.verbose:
-            print(msg)
-
-    def subprocess_output(self):
-        if self.verbose:
-            return sys.stdout
-        return subprocess.DEVNULL
-
-    def check_call(self, args, **kwargs):
-        self.log_info(f"Running: {' '.join(args)}")
-        return subprocess.check_call(args, **kwargs)
-
-    def check_output(self, args, **kwargs):
-        self.log_info(f"Running: {' '.join(args)}")
-        return subprocess.check_output(args, **kwargs)
+    def zxdb_script_path(self) -> Path:
+        return Path(self.tmp_dir(), "zxdb_script")
 
+    @property
     def ffx_daemon_log_path(self):
-        return os.path.join(self.tmp_dir(), "ffx_daemon_log")
+        return self.tmp_dir().joinpath("ffx_daemon_log")
 
+    @property
     def ffx_isolate_dir(self):
-        return os.path.join(self.tmp_dir(), "ffx_isolate")
+        return self.tmp_dir().joinpath("ffx_isolate")
 
+    @property
     def home_dir(self):
-        return os.path.join(self.tmp_dir(), "user-home")
+        return self.tmp_dir().joinpath("user-home")
 
     def start_ffx_isolation(self):
         # Most of this is translated directly from ffx's isolate library
-        os.mkdir(self.ffx_isolate_dir())
-        os.mkdir(self.home_dir())
+        os.mkdir(self.ffx_isolate_dir)
+        os.mkdir(self.home_dir)
 
         ffx_path = self.tool_path("ffx")
         ffx_env = self.ffx_cmd_env()
@@ -170,7 +367,7 @@ class TestEnvironment:
         # We want this to be a long-running process that persists after the script finishes
         # pylint: disable=consider-using-with
         with open(
-            self.ffx_daemon_log_path(), "w", encoding="utf-8"
+            self.ffx_daemon_log_path, "w", encoding="utf-8"
         ) as ffx_daemon_log_file:
             subprocess.Popen(
                 [
@@ -184,7 +381,7 @@ class TestEnvironment:
             )
 
         # Disable analytics
-        self.check_call(
+        check_call_with_logging(
             [
                 ffx_path,
                 "config",
@@ -192,8 +389,8 @@ class TestEnvironment:
                 "disable",
             ],
             env=ffx_env,
-            stdout=self.subprocess_output(),
-            stderr=self.subprocess_output(),
+            stdout_handler=self.subprocess_logger.debug,
+            stderr_handler=self.subprocess_logger.debug,
         )
 
         # Set configs
@@ -203,7 +400,7 @@ class TestEnvironment:
             "test.experimental_structured_output": "true",
         }
         for key, value in configs.items():
-            self.check_call(
+            check_call_with_logging(
                 [
                     ffx_path,
                     "config",
@@ -212,14 +409,14 @@ class TestEnvironment:
                     value,
                 ],
                 env=ffx_env,
-                stdout=self.subprocess_output(),
-                stderr=self.subprocess_output(),
+                stdout_handler=self.subprocess_logger.debug,
+                stderr_handler=self.subprocess_logger.debug,
             )
 
     def ffx_cmd_env(self):
         return {
-            "HOME": self.home_dir(),
-            "FFX_ISOLATE_DIR": self.ffx_isolate_dir(),
+            "HOME": self.home_dir,
+            "FFX_ISOLATE_DIR": self.ffx_isolate_dir,
             # We want to use our own specified temp directory
             "TMP": self.tmp_dir(),
             "TEMP": self.tmp_dir(),
@@ -228,16 +425,15 @@ class TestEnvironment:
         }
 
     def stop_ffx_isolation(self):
-        self.check_call(
+        check_call_with_logging(
             [
                 self.tool_path("ffx"),
                 "daemon",
                 "stop",
-                "-w",
             ],
             env=self.ffx_cmd_env(),
-            stdout=self.subprocess_output(),
-            stderr=self.subprocess_output(),
+            stdout_handler=self.subprocess_logger.debug,
+            stderr_handler=self.subprocess_logger.debug,
         )
 
     def start(self):
@@ -256,22 +452,23 @@ class TestEnvironment:
         """
 
         # Initialize temp directory
-        if not os.path.exists(self.tmp_dir()):
-            os.mkdir(self.tmp_dir())
-        elif len(os.listdir(self.tmp_dir())) != 0:
-            raise Exception(f"Temp directory is not clean (in {self.tmp_dir()})")
-
-        os.mkdir(self.output_dir())
+        os.makedirs(self.tmp_dir(), exist_ok=True)
+        if len(os.listdir(self.tmp_dir())) != 0:
+            raise Exception(
+                f"Temp directory is not clean (in {self.tmp_dir()})"
+            )
+        self.setup_logging(log_to_file=True)
+        os.mkdir(self.output_dir)
 
         ffx_path = self.tool_path("ffx")
         ffx_env = self.ffx_cmd_env()
 
         # Start ffx isolation
-        self.log_info("Starting ffx isolation...")
+        self.env_logger.info("Starting ffx isolation...")
         self.start_ffx_isolation()
 
         # Stop any running emulators (there shouldn't be any)
-        self.check_call(
+        check_call_with_logging(
             [
                 ffx_path,
                 "emu",
@@ -279,79 +476,95 @@ class TestEnvironment:
                 "--all",
             ],
             env=ffx_env,
-            stdout=self.subprocess_output(),
-            stderr=self.subprocess_output(),
+            stdout_handler=self.subprocess_logger.debug,
+            stderr_handler=self.subprocess_logger.debug,
         )
 
-        # Look up the product bundle transfer manifest.
-        self.log_info("Looking up the product bundle transfer manifest...")
-        product_name = "minimal." + self.triple_to_arch(self.target)
-        fuchsia_version = "21.20240610.2.1"
+        if not self.local_pb_path:
+            self.local_pb_path = os.path.join(self.tmp_dir(), "local_pb")
+        else:
+            self.local_pb_path = os.path.abspath(self.local_pb_path)
 
-        out = self.check_output(
-            [
-                ffx_path,
-                "--machine",
-                "json",
-                "product",
-                "lookup",
-                product_name,
-                fuchsia_version,
-                "--base-url",
-                "gs://fuchsia/development/" + fuchsia_version,
-            ],
-            env=ffx_env,
-            stderr=self.subprocess_output(),
-        )
+        if self.use_local_pb and os.path.exists(self.local_pb_path):
+            self.env_logger.info(
+                'Using existing emulator image at "%s"' % self.local_pb_path
+            )
+        else:
+            shutil.rmtree(self.local_pb_path, ignore_errors=True)
 
-        self.log_debug(out)
+            # Look up the product bundle transfer manifest.
+            self.env_logger.info(
+                "Looking up the product bundle transfer manifest..."
+            )
+            product_name = "minimal." + self.triple_to_arch(self.target)
+            sdk_version = self.read_sdk_version()
 
-        try:
-            transfer_manifest_url = json.loads(out)["transfer_manifest_url"]
-        except Exception as e:
-            print(e)
-            raise Exception("Unable to parse transfer manifest") from e
+            output = check_output_with_logging(
+                [
+                    ffx_path,
+                    "--machine",
+                    "json",
+                    "product",
+                    "lookup",
+                    product_name,
+                    sdk_version,
+                    "--base-url",
+                    "gs://fuchsia/development/" + sdk_version,
+                ],
+                env=ffx_env,
+                stdout_handler=self.subprocess_logger.debug,
+                stderr_handler=self.subprocess_logger.debug,
+            )
 
-        # Download the product bundle.
-        product_bundle_dir = os.path.join(self.tmp_dir(), 'product-bundle')
-        self.check_call(
-            [
-                ffx_path,
-                "product",
-                "download",
-                transfer_manifest_url,
-                product_bundle_dir,
-                "--force",
-            ],
-            env=ffx_env,
-            stdout=self.subprocess_output(),
-            stderr=self.subprocess_output(),
-        )
+            try:
+                transfer_manifest_url = json.loads(output)[
+                    "transfer_manifest_url"
+                ]
+            except Exception as e:
+                print(e)
+                raise Exception("Unable to parse transfer manifest") from e
+
+            # Download the product bundle.
+            self.env_logger.info("Downloading the product bundle...")
+            check_call_with_logging(
+                [
+                    ffx_path,
+                    "product",
+                    "download",
+                    transfer_manifest_url,
+                    self.local_pb_path,
+                ],
+                env=ffx_env,
+                stdout_handler=self.subprocess_logger.debug,
+                stderr_handler=self.subprocess_logger.debug,
+            )
 
         # Start emulator
+        self.env_logger.info("Starting emulator...")
+
         # FIXME: condition --accel hyper on target arch matching host arch
-        self.check_call(
+        check_call_with_logging(
             [
                 ffx_path,
                 "emu",
                 "start",
-                product_bundle_dir,
+                self.local_pb_path,
                 "--headless",
                 "--log",
-                self.emulator_log_path(),
+                self.emulator_log_path,
                 "--net",
-                "tap",
+                "auto",
                 "--accel",
-                "hyper",
+                "auto",
             ],
             env=ffx_env,
-            stdout=self.subprocess_output(),
-            stderr=self.subprocess_output(),
+            stdout_handler=self.subprocess_logger.debug,
+            stderr_handler=self.subprocess_logger.debug,
         )
 
         # Create new package repo
-        self.log_info("Creating package repo...")
-        self.check_call(
+        self.env_logger.info("Creating package repo...")
+        check_call_with_logging(
             [
                 ffx_path,
                 "repository",
@@ -359,11 +572,12 @@ class TestEnvironment:
                 self.repo_dir(),
             ],
             env=ffx_env,
-            stdout=self.subprocess_output(),
-            stderr=self.subprocess_output(),
+            stdout_handler=self.subprocess_logger.debug,
+            stderr_handler=self.subprocess_logger.debug,
         )
 
-        self.check_call(
+        # Add repository
+        check_call_with_logging(
             [
                 ffx_path,
                 "repository",
@@ -373,15 +587,12 @@ class TestEnvironment:
                 self.repo_dir(),
             ],
             env=ffx_env,
-            stdout=self.subprocess_output(),
-            stderr=self.subprocess_output(),
+            stdout_handler=self.subprocess_logger.debug,
+            stderr_handler=self.subprocess_logger.debug,
         )
 
-        # Write to file
-        self.write_to_file()
-
         # Start repository server
-        self.check_call(
+        check_call_with_logging(
             [
                 ffx_path,
                 "repository",
@@ -391,12 +602,12 @@ class TestEnvironment:
                 "[::]:0",
             ],
             env=ffx_env,
-            stdout=self.subprocess_output(),
-            stderr=self.subprocess_output(),
+            stdout_handler=self.subprocess_logger.debug,
+            stderr_handler=self.subprocess_logger.debug,
         )
 
         # Register with newly-started emulator
-        self.check_call(
+        check_call_with_logging(
             [
                 ffx_path,
                 "target",
@@ -406,11 +617,14 @@ class TestEnvironment:
                 self.TEST_REPO_NAME,
             ],
             env=ffx_env,
-            stdout=self.subprocess_output(),
-            stderr=self.subprocess_output(),
+            stdout_handler=self.subprocess_logger.debug,
+            stderr_handler=self.subprocess_logger.debug,
         )
 
-        self.log_info("Success! Your environment is ready to run tests.")
+        # Write to file
+        self.write_to_file()
+
+        self.env_logger.info("Success! Your environment is ready to run tests.")
 
     # FIXME: shardify this
     # `facet` statement required for TCP testing via
@@ -481,7 +695,7 @@ class TestEnvironment:
         - Forward the test's stdout and stderr as this script's stdout and stderr
         """
 
-        bin_path = os.path.abspath(args.bin_path)
+        bin_path = Path(args.bin_path).absolute()
 
         # Find libstd and libtest
         libstd_paths = glob.glob(os.path.join(self.rustlibs_dir(), "libstd-*.so"))
@@ -490,233 +704,240 @@ class TestEnvironment:
         if not libstd_paths:
             raise Exception(f"Failed to locate libstd (in {self.rustlibs_dir()})")
 
-        # Build a unique, deterministic name for the test using the name of the
-        # binary and the last 6 hex digits of the hash of the full path
-        def path_checksum(path):
-            m = hashlib.sha256()
-            m.update(path.encode("utf-8"))
-            return m.hexdigest()[0:6]
-
         base_name = os.path.basename(os.path.dirname(args.bin_path))
         exe_name = base_name.lower().replace(".", "_")
-        package_name = f"{exe_name}_{path_checksum(bin_path)}"
-
-        package_dir = os.path.join(self.packages_dir(), package_name)
-        cml_path = os.path.join(package_dir, "meta", f"{package_name}.cml")
-        cm_path = os.path.join(package_dir, "meta", f"{package_name}.cm")
-        manifest_path = os.path.join(package_dir, f"{package_name}.manifest")
-        manifest_json_path = os.path.join(package_dir, "package_manifest.json")
-        far_path = os.path.join(package_dir, f"{package_name}-0.far")
+        build_id = self.build_id(bin_path)
+        package_name = f"{exe_name}_{build_id}"
+
+        package_dir = self.packages_dir.joinpath(package_name)
+        package_dir.mkdir(parents=True, exist_ok=True)
+        meta_dir = package_dir.joinpath("meta")
+        meta_dir.mkdir(parents=True, exist_ok=True)
+        meta_package_path = meta_dir.joinpath("package")
+        cml_path = meta_dir.joinpath(f"{package_name}.cml")
+        cm_path = meta_dir.joinpath(f"{package_name}.cm")
+        manifest_path = package_dir.joinpath(f"{package_name}.manifest")
 
         shared_libs = args.shared_libs[: args.n]
         arguments = args.shared_libs[args.n :]
 
-        test_output_dir = os.path.join(self.output_dir(), package_name)
+        test_output_dir = self.output_dir.joinpath(package_name)
 
         # Clean and create temporary output directory
-        if os.path.exists(test_output_dir):
+        if test_output_dir.exists():
             shutil.rmtree(test_output_dir)
-
-        os.mkdir(test_output_dir)
+        test_output_dir.mkdir(parents=True)
 
         # Open log file
-        log_path = os.path.join(test_output_dir, "log")
-        with open(log_path, "w", encoding="utf-8") as log_file:
-
-            def log(msg):
-                print(msg, file=log_file)
-                log_file.flush()
+        runner_logger = logging.getLogger(f"env.package.{package_name}")
+        runner_logger.setLevel(logging.DEBUG)
+        logfile_handler = logging.FileHandler(test_output_dir.joinpath("log"))
+        logfile_handler.setLevel(logging.DEBUG)
+        logfile_handler.setFormatter(
+            logging.Formatter("%(levelname)s:%(name)s:%(message)s")
+        )
+        runner_logger.addHandler(logfile_handler)
+
+        runner_logger.info(f"Bin path: {bin_path}")
+        runner_logger.info("Setting up package...")
+
+        # Link binary to build-id dir and strip it.
+        build_id_dir = self.tmp_dir().joinpath(".build-id")
+        stripped_binary = self.generate_buildid_dir(
+            binary=bin_path,
+            build_id_dir=build_id_dir,
+            build_id=build_id,
+            log_handler=runner_logger,
+        )
+        runner_logger.info(f"Stripped Bin path: {stripped_binary}")
 
-            log(f"Bin path: {bin_path}")
+        runner_logger.info("Writing CML...")
 
-            log("Writing CML...")
+        # Write and compile CML
+        with open(cml_path, "w", encoding="utf-8") as cml:
+            # Collect environment variables
+            env_vars = ""
+            for var_name in self.TEST_ENV_VARS:
+                var_value = os.getenv(var_name)
+                if var_value is not None:
+                    env_vars += f'\n            "{var_name}={var_value}",'
 
-            # Write and compile CML
-            with open(cml_path, "w", encoding="utf-8") as cml:
-                # Collect environment variables
-                env_vars = ""
-                for var_name in self.TEST_ENV_VARS:
-                    var_value = os.getenv(var_name)
-                    if var_value is not None:
-                        env_vars += f'\n            "{var_name}={var_value}",'
+            # Default to no backtrace for test suite
+            if os.getenv("RUST_BACKTRACE") is None:
+                env_vars += '\n            "RUST_BACKTRACE=0",'
 
-                # Default to no backtrace for test suite
-                if os.getenv("RUST_BACKTRACE") is None:
-                    env_vars += '\n            "RUST_BACKTRACE=0",'
+            # Use /tmp as the test temporary directory
+            env_vars += '\n            "RUST_TEST_TMPDIR=/tmp",'
 
-                # Use /tmp as the test temporary directory
-                env_vars += '\n            "RUST_TEST_TMPDIR=/tmp",'
+            cml.write(
+                self.CML_TEMPLATE.format(env_vars=env_vars, exe_name=exe_name)
+            )
 
-                cml.write(
-                    self.CML_TEMPLATE.format(env_vars=env_vars, exe_name=exe_name)
-                )
+        runner_logger.info("Compiling CML...")
 
-            log("Compiling CML...")
+        check_call_with_logging(
+            [
+                self.tool_path("cmc"),
+                "compile",
+                cml_path,
+                "--includepath",
+                ".",
+                "--output",
+                cm_path,
+            ],
+            stdout_handler=runner_logger.info,
+            stderr_handler=runner_logger.warning,
+        )
 
-            self.check_call(
-                [
-                    self.tool_path("cmc"),
-                    "compile",
-                    cml_path,
-                    "--includepath",
-                    ".",
-                    "--output",
-                    cm_path,
-                ],
-                stdout=log_file,
-                stderr=log_file,
+        runner_logger.info("Writing meta/package...")
+        with open(meta_package_path, "w", encoding="utf-8") as f:
+            json.dump({"name": package_name, "version": "0"}, f)
+
+        runner_logger.info("Writing manifest...")
+
+        # Write package manifest
+        with open(manifest_path, "w", encoding="utf-8") as manifest:
+            manifest.write(
+                self.MANIFEST_TEMPLATE.format(
+                    bin_path=stripped_binary,
+                    exe_name=exe_name,
+                    package_dir=package_dir,
+                    package_name=package_name,
+                    target=self.target,
+                    sdk_dir=self.sdk_dir,
+                    libstd_name=os.path.basename(libstd_paths[0]),
+                    libstd_path=libstd_paths[0],
+                    target_arch=self.triple_to_arch(self.target),
+                )
             )
-
-            log("Writing manifest...")
-
-            # Write, build, and archive manifest
-            with open(manifest_path, "w", encoding="utf-8") as manifest:
+            # `libtest`` was historically a shared library, but now seems to be (sometimes?)
+            # statically linked. If we find it as a shared library, include it in the manifest.
+            if libtest_paths:
                 manifest.write(
-                    self.MANIFEST_TEMPLATE.format(
-                        bin_path=bin_path,
-                        exe_name=exe_name,
-                        package_dir=package_dir,
-                        package_name=package_name,
-                        target=self.target,
-                        sdk_dir=self.sdk_dir,
-                        libstd_name=os.path.basename(libstd_paths[0]),
-                        libstd_path=libstd_paths[0],
-                        target_arch=self.triple_to_arch(self.target),
-                    )
+                    f"lib/{os.path.basename(libtest_paths[0])}={libtest_paths[0]}\n"
                 )
-                # `libtest`` was historically a shared library, but now seems to be (sometimes?)
-                # statically linked. If we find it as a shared library, include it in the manifest.
-                if libtest_paths:
-                    manifest.write(
-                        f"lib/{os.path.basename(libtest_paths[0])}={libtest_paths[0]}\n"
-                    )
-                for shared_lib in shared_libs:
-                    manifest.write(f"lib/{os.path.basename(shared_lib)}={shared_lib}\n")
-
-            log("Determining API level...")
-            out = self.check_output(
-                [
-                    self.tool_path("ffx"),
-                    "--machine",
-                    "json",
-                    "version",
-                ],
-                env=self.ffx_cmd_env(),
-                stderr=log_file,
-            )
-            api_level = json.loads(out)["tool_version"]["api_level"]
+            for shared_lib in shared_libs:
+                manifest.write(f"lib/{os.path.basename(shared_lib)}={shared_lib}\n")
 
-            log("Compiling and archiving manifest...")
+        runner_logger.info("Determining API level...")
+        out = check_output_with_logging(
+            [
+                self.tool_path("ffx"),
+                "--machine",
+                "json",
+                "version",
+            ],
+            env=self.ffx_cmd_env(),
+            stdout_handler=self.subprocess_logger.debug,
+            stderr_handler=self.subprocess_logger.debug,
+        )
+        api_level = json.loads(out)["tool_version"]["api_level"]
 
-            self.check_call(
-                [
-                    self.tool_path("ffx"),
-                    "package",
-                    "build",
-                    manifest_path,
-                    "-o",
-                    package_dir,
-                    "--api-level",
-                    str(api_level),
-                ],
-                env=self.ffx_cmd_env(),
-                stdout=log_file,
-                stderr=log_file,
-            )
+        runner_logger.info("Compiling manifest...")
 
-            self.check_call(
-                [
-                    self.tool_path("ffx"),
-                    "package",
-                    "archive",
-                    "create",
-                    "-o",
-                    far_path,
-                    manifest_json_path,
-                ],
-                env=self.ffx_cmd_env(),
-                stdout=log_file,
-                stderr=log_file,
-            )
+        check_call_with_logging(
+            [
+                self.tool_path("ffx"),
+                "package",
+                "build",
+                manifest_path,
+                "-o",
+                package_dir,
+                "--api-level",
+                str(api_level),
+            ],
+            env=self.ffx_cmd_env(),
+            stdout_handler=runner_logger.info,
+            stderr_handler=runner_logger.warning,
+        )
 
-            log("Publishing package to repo...")
+        runner_logger.info("Publishing package to repo...")
 
-            # Publish package to repo
-            self.check_call(
-                [
-                    self.tool_path("ffx"),
-                    "repository",
-                    "publish",
-                    "--package",
-                    os.path.join(package_dir, "package_manifest.json"),
-                    self.repo_dir(),
-                ],
-                stdout=log_file,
-                stderr=log_file,
-            )
+        # Publish package to repo
+        check_call_with_logging(
+            [
+                self.tool_path("ffx"),
+                "repository",
+                "publish",
+                "--package",
+                os.path.join(package_dir, "package_manifest.json"),
+                self.repo_dir(),
+            ],
+            env=self.ffx_cmd_env(),
+            stdout_handler=runner_logger.info,
+            stderr_handler=runner_logger.warning,
+        )
 
-            log("Running ffx test...")
+        runner_logger.info("Running ffx test...")
 
-            # Run test on emulator
-            subprocess.run(
-                [
-                    self.tool_path("ffx"),
-                    "test",
-                    "run",
-                    f"fuchsia-pkg://{self.TEST_REPO_NAME}/{package_name}#meta/{package_name}.cm",
-                    "--min-severity-logs",
-                    "TRACE",
-                    "--output-directory",
-                    test_output_dir,
-                    "--",
-                ]
-                + arguments,
-                env=self.ffx_cmd_env(),
-                check=False,
-                stdout=log_file,
-                stderr=log_file,
-            )
+        # Run test on emulator
+        check_call_with_logging(
+            [
+                self.tool_path("ffx"),
+                "test",
+                "run",
+                f"fuchsia-pkg://{self.TEST_REPO_NAME}/{package_name}#meta/{package_name}.cm",
+                "--min-severity-logs",
+                "TRACE",
+                "--output-directory",
+                test_output_dir,
+                "--",
+            ]
+            + arguments,
+            env=self.ffx_cmd_env(),
+            check=False,
+            stdout_handler=runner_logger.info,
+            stderr_handler=runner_logger.warning,
+        )
 
-            log("Reporting test suite output...")
+        runner_logger.info("Reporting test suite output...")
 
-            # Read test suite output
-            run_summary_path = os.path.join(test_output_dir, "run_summary.json")
-            if os.path.exists(run_summary_path):
-                with open(run_summary_path, encoding="utf-8") as f:
-                    run_summary = json.loads(f.read())
+        # Read test suite output
+        run_summary_path = test_output_dir.joinpath("run_summary.json")
+        if not run_summary_path.exists():
+            runner_logger.error("Failed to open test run summary")
+            return 254
 
-                suite = run_summary["data"]["suites"][0]
-                case = suite["cases"][0]
+        with open(run_summary_path, encoding="utf-8") as f:
+            run_summary = json.load(f)
 
-                return_code = 0 if case["outcome"] == "PASSED" else 1
+        suite = run_summary["data"]["suites"][0]
+        case = suite["cases"][0]
 
-                artifacts = case["artifacts"]
-                artifact_dir = case["artifact_dir"]
-                stdout_path = None
-                stderr_path = None
+        return_code = 0 if case["outcome"] == "PASSED" else 1
 
-                for path, artifact in artifacts.items():
-                    artifact_path = os.path.join(test_output_dir, artifact_dir, path)
-                    artifact_type = artifact["artifact_type"]
+        artifacts = case["artifacts"]
+        artifact_dir = case["artifact_dir"]
+        stdout_path = None
+        stderr_path = None
 
-                    if artifact_type == "STDERR":
-                        stderr_path = artifact_path
-                    elif artifact_type == "STDOUT":
-                        stdout_path = artifact_path
+        for path, artifact in artifacts.items():
+            artifact_path = os.path.join(test_output_dir, artifact_dir, path)
+            artifact_type = artifact["artifact_type"]
 
-                if stdout_path is not None and os.path.exists(stdout_path):
-                    with open(stdout_path, encoding="utf-8") as f:
-                        print(f.read(), file=sys.stdout, end="")
+            if artifact_type == "STDERR":
+                stderr_path = artifact_path
+            elif artifact_type == "STDOUT":
+                stdout_path = artifact_path
 
-                if stderr_path is not None and os.path.exists(stderr_path):
-                    with open(stderr_path, encoding="utf-8") as f:
-                        print(f.read(), file=sys.stderr, end="")
+        if stdout_path is not None:
+            if not os.path.exists(stdout_path):
+                runner_logger.error(
+                    f"stdout file {stdout_path} does not exist."
+                )
             else:
-                log("Failed to open test run summary")
-                return_code = 254
-
-            log("Done!")
+                with open(stdout_path, encoding="utf-8", errors="ignore") as f:
+                    runner_logger.info(f.read())
+        if stderr_path is not None:
+            if not os.path.exists(stderr_path):
+                runner_logger.error(
+                    f"stderr file {stderr_path} does not exist."
+                )
+            else:
+                with open(stderr_path, encoding="utf-8", errors="ignore") as f:
+                    runner_logger.error(f.read())
 
+        runner_logger.info("Done!")
         return return_code
 
     def stop(self):
@@ -730,65 +951,65 @@ class TestEnvironment:
         During cleanup, this function will stop the emulator, package server, and
         update server, then delete all temporary files. If an error is encountered
         while stopping any running processes, the temporary files will not be deleted.
-        Passing --delete-tmp will force the process to delete the files anyway.
+        Passing --cleanup will force the process to delete the files anyway.
         """
 
-        self.log_debug("Reporting logs...")
+        self.env_logger.debug("Reporting logs...")
 
         # Print test log files
-        for test_dir in os.listdir(self.output_dir()):
-            log_path = os.path.join(self.output_dir(), test_dir, "log")
-            self.log_debug(f"\n---- Logs for test '{test_dir}' ----\n")
+        for test_dir in os.listdir(self.output_dir):
+            log_path = os.path.join(self.output_dir, test_dir, "log")
+            self.env_logger.debug(f"\n---- Logs for test '{test_dir}' ----\n")
             if os.path.exists(log_path):
-                with open(log_path, encoding="utf-8") as log:
-                    self.log_debug(log.read())
+                with open(log_path, encoding="utf-8", errors="ignore") as log:
+                    self.env_logger.debug(log.read())
             else:
-                self.log_debug("No logs found")
+                self.env_logger.debug("No logs found")
 
         # Print the emulator log
-        self.log_debug("\n---- Emulator logs ----\n")
-        if os.path.exists(self.emulator_log_path()):
-            with open(self.emulator_log_path(), encoding="utf-8") as log:
-                self.log_debug(log.read())
+        self.env_logger.debug("\n---- Emulator logs ----\n")
+        if os.path.exists(self.emulator_log_path):
+            with open(self.emulator_log_path, encoding="utf-8") as log:
+                self.env_logger.debug(log.read())
         else:
-            self.log_debug("No emulator logs found")
+            self.env_logger.debug("No emulator logs found")
 
         # Print the package server log
-        self.log_debug("\n---- Package server log ----\n")
-        if os.path.exists(self.package_server_log_path()):
-            with open(self.package_server_log_path(), encoding="utf-8") as log:
-                self.log_debug(log.read())
+        self.env_logger.debug("\n---- Package server log ----\n")
+        if os.path.exists(self.package_server_log_path):
+            with open(self.package_server_log_path, encoding="utf-8") as log:
+                self.env_logger.debug(log.read())
         else:
-            self.log_debug("No package server log found")
+            self.env_logger.debug("No package server log found")
 
         # Print the ffx daemon log
-        self.log_debug("\n---- ffx daemon log ----\n")
-        if os.path.exists(self.ffx_daemon_log_path()):
-            with open(self.ffx_daemon_log_path(), encoding="utf-8") as log:
-                self.log_debug(log.read())
+        self.env_logger.debug("\n---- ffx daemon log ----\n")
+        if os.path.exists(self.ffx_daemon_log_path):
+            with open(self.ffx_daemon_log_path, encoding="utf-8") as log:
+                self.env_logger.debug(log.read())
         else:
-            self.log_debug("No ffx daemon log found")
+            self.env_logger.debug("No ffx daemon log found")
 
         # Shut down the emulator
-        self.log_info("Stopping emulator...")
-        self.check_call(
+        self.env_logger.info("Stopping emulator...")
+        check_call_with_logging(
             [
                 self.tool_path("ffx"),
                 "emu",
                 "stop",
             ],
             env=self.ffx_cmd_env(),
-            stdout=self.subprocess_output(),
-            stderr=self.subprocess_output(),
+            stdout_handler=self.subprocess_logger.debug,
+            stderr_handler=self.subprocess_logger.debug,
         )
 
         # Stop ffx isolation
-        self.log_info("Stopping ffx isolation...")
+        self.env_logger.info("Stopping ffx isolation...")
         self.stop_ffx_isolation()
 
-    def delete_tmp(self):
+    def cleanup(self):
         # Remove temporary files
-        self.log_info("Deleting temporary files...")
+        self.env_logger.info("Deleting temporary files...")
         shutil.rmtree(self.tmp_dir(), ignore_errors=True)
 
     def debug(self, args):
@@ -816,7 +1037,7 @@ class TestEnvironment:
                 f"--symbol-path={self.rust_dir}/lib/rustlib/{self.target}/lib",
             ]
 
-        # Add rust source if it's available
+      # Add rust source if it's available
         rust_src_map = None
         if args.rust_src is not None:
             # This matches the remapped prefix used by compiletest. There's no
@@ -908,21 +1129,24 @@ def start(args):
 
 def run(args):
     test_env = TestEnvironment.read_from_file()
+    test_env.setup_logging(log_to_file=True)
     return test_env.run(args)
 
 
 def stop(args):
     test_env = TestEnvironment.read_from_file()
+    test_env.setup_logging(log_to_file=False)
     test_env.stop()
-    if not args.no_delete:
-        test_env.delete_tmp()
+    if not args.no_cleanup:
+        test_env.cleanup()
     return 0
 
 
-def delete_tmp(args):
+def cleanup(args):
     del args
     test_env = TestEnvironment.read_from_file()
-    test_env.delete_tmp()
+    test_env.setup_logging(log_to_file=False)
+    test_env.cleanup()
     return 0
 
 
@@ -934,6 +1158,7 @@ def debug(args):
 
 def syslog(args):
     test_env = TestEnvironment.read_from_file()
+    test_env.setup_logging(log_to_file=True)
     test_env.syslog(args)
     return 0
 
@@ -973,6 +1198,21 @@ def main():
         help="the target platform to test",
         required=True,
     )
+    start_parser.add_argument(
+        "--toolchain-dir",
+        help="the toolchain directory",
+        required=True,
+    )
+    start_parser.add_argument(
+        "--local-product-bundle-path",
+        help="the path where the product-bundle should be downloaded to",
+    )
+    start_parser.add_argument(
+        "--use-local-product-bundle-if-exists",
+        help="if the product bundle already exists in the local path, use "
+            "it instead of downloading it again",
+        action="store_true",
+    )
     start_parser.set_defaults(func=start)
 
     run_parser = subparsers.add_parser(
@@ -993,18 +1233,23 @@ def main():
         "stop", help="shuts down and cleans up the testing environment"
     )
     stop_parser.add_argument(
-        "--no-delete",
+        "--no-cleanup",
         default=False,
         action="store_true",
         help="don't delete temporary files after stopping",
     )
     stop_parser.set_defaults(func=stop)
 
-    delete_parser = subparsers.add_parser(
-        "delete-tmp",
+    cleanup_parser = subparsers.add_parser(
+        "cleanup",
         help="deletes temporary files after the testing environment has been manually cleaned up",
     )
-    delete_parser.set_defaults(func=delete_tmp)
+    cleanup_parser.set_defaults(func=cleanup)
+
+    syslog_parser = subparsers.add_parser(
+        "syslog", help="prints the device syslog"
+    )
+    syslog_parser.set_defaults(func=syslog)
 
     debug_parser = subparsers.add_parser(
         "debug",
@@ -1033,9 +1278,6 @@ def main():
     )
     debug_parser.set_defaults(func=debug)
 
-    syslog_parser = subparsers.add_parser("syslog", help="prints the device syslog")
-    syslog_parser.set_defaults(func=syslog)
-
     args = parser.parse_args()
     return args.func(args)