summary refs log tree commit diff
diff options
context:
space:
mode:
authorJakub Beránek <berykubik@gmail.com>2025-03-15 09:35:29 +0100
committerJakub Beránek <berykubik@gmail.com>2025-03-15 11:16:11 +0100
commit30d57576b9040a438adc2414540da3778addf34b (patch)
tree6196f7c1a56843cad21ab1f9add481caf5c65052
parent6c24c9c088a0eb858976781090a5b1fbb57981bb (diff)
downloadrust-30d57576b9040a438adc2414540da3778addf34b.tar.gz
rust-30d57576b9040a438adc2414540da3778addf34b.zip
Print test diffs into GitHub summary
So that we can also observe them for try builds, before merging a PR.
-rw-r--r--.github/workflows/ci.yml5
-rw-r--r--src/ci/citool/src/main.rs41
2 files changed, 40 insertions, 6 deletions
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index ffcdc40de3a..aaae67c28bc 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -252,7 +252,12 @@ jobs:
             exit 0
           fi
 
+          # Get closest bors merge commit
+          PARENT_COMMIT=`git rev-list --author='bors <bors@rust-lang.org>' -n1 --first-parent HEAD^1`
+
           ./build/citool/debug/citool postprocess-metrics \
+              --job-name ${CI_JOB_NAME} \
+              --parent ${PARENT_COMMIT} \
               ${METRICS} >> ${GITHUB_STEP_SUMMARY}
 
       - name: upload job metrics to DataDog
diff --git a/src/ci/citool/src/main.rs b/src/ci/citool/src/main.rs
index 5f1932854b5..fb0639367bd 100644
--- a/src/ci/citool/src/main.rs
+++ b/src/ci/citool/src/main.rs
@@ -5,7 +5,7 @@ mod jobs;
 mod metrics;
 mod utils;
 
-use std::collections::BTreeMap;
+use std::collections::{BTreeMap, HashMap};
 use std::path::{Path, PathBuf};
 use std::process::Command;
 
@@ -18,7 +18,7 @@ use crate::analysis::output_test_diffs;
 use crate::cpu_usage::load_cpu_usage;
 use crate::datadog::upload_datadog_metric;
 use crate::jobs::RunType;
-use crate::metrics::{download_auto_job_metrics, load_metrics};
+use crate::metrics::{JobMetrics, download_auto_job_metrics, download_job_metrics, load_metrics};
 use crate::utils::load_env_var;
 use analysis::output_bootstrap_stats;
 
@@ -138,6 +138,27 @@ fn upload_ci_metrics(cpu_usage_csv: &Path) -> anyhow::Result<()> {
     Ok(())
 }
 
+fn postprocess_metrics(
+    metrics_path: PathBuf,
+    parent: Option<String>,
+    job_name: Option<String>,
+) -> anyhow::Result<()> {
+    let metrics = load_metrics(&metrics_path)?;
+    output_bootstrap_stats(&metrics);
+
+    let (Some(parent), Some(job_name)) = (parent, job_name) else {
+        return Ok(());
+    };
+
+    let parent_metrics =
+        download_job_metrics(&job_name, &parent).context("cannot download parent metrics")?;
+    let job_metrics =
+        HashMap::from([(job_name, JobMetrics { parent: Some(parent_metrics), current: metrics })]);
+    output_test_diffs(job_metrics);
+
+    Ok(())
+}
+
 #[derive(clap::Parser)]
 enum Args {
     /// Calculate a list of jobs that should be executed on CI.
@@ -155,10 +176,19 @@ enum Args {
         #[clap(long = "type", default_value = "auto")]
         job_type: JobType,
     },
-    /// Postprocess the metrics.json file generated by bootstrap.
+    /// Postprocess the metrics.json file generated by bootstrap and output
+    /// various statistics.
+    /// If `--parent` and `--job-name` are provided, also display a diff
+    /// against previous metrics that are downloaded from CI.
     PostprocessMetrics {
         /// Path to the metrics.json file
         metrics_path: PathBuf,
+        /// A parent SHA against which to compare.
+        #[clap(long, requires("job_name"))]
+        parent: Option<String>,
+        /// The name of the current job.
+        #[clap(long, requires("parent"))]
+        job_name: Option<String>,
     },
     /// Upload CI metrics to Datadog.
     UploadBuildMetrics {
@@ -209,9 +239,8 @@ fn main() -> anyhow::Result<()> {
         Args::UploadBuildMetrics { cpu_usage_csv } => {
             upload_ci_metrics(&cpu_usage_csv)?;
         }
-        Args::PostprocessMetrics { metrics_path } => {
-            let metrics = load_metrics(&metrics_path)?;
-            output_bootstrap_stats(&metrics);
+        Args::PostprocessMetrics { metrics_path, parent, job_name } => {
+            postprocess_metrics(metrics_path, parent, job_name)?;
         }
         Args::PostMergeReport { current, parent } => {
             let db = load_db(default_jobs_file)?;