compliance: Initialize compliance checks (#53231)

Finn Evers created

Release Notes:

- N/A

Change summary

.github/workflows/compliance_check.yml                |  55 +
.github/workflows/release.yml                         |  84 +
Cargo.lock                                            | 148 ++
Cargo.toml                                            |   3 
tooling/compliance/Cargo.toml                         |  38 
tooling/compliance/LICENSE-GPL                        |   1 
tooling/compliance/src/checks.rs                      | 647 +++++++++++++
tooling/compliance/src/git.rs                         | 591 +++++++++++
tooling/compliance/src/github.rs                      | 424 ++++++++
tooling/compliance/src/lib.rs                         |   4 
tooling/compliance/src/report.rs                      | 446 ++++++++
tooling/xtask/Cargo.toml                              |   6 
tooling/xtask/src/main.rs                             |   2 
tooling/xtask/src/tasks.rs                            |   1 
tooling/xtask/src/tasks/compliance.rs                 | 135 ++
tooling/xtask/src/tasks/workflows.rs                  |   2 
tooling/xtask/src/tasks/workflows/compliance_check.rs |  66 +
tooling/xtask/src/tasks/workflows/release.rs          | 113 ++
18 files changed, 2,756 insertions(+), 10 deletions(-)

Detailed changes

.github/workflows/compliance_check.yml 🔗

@@ -0,0 +1,55 @@
+# Generated from xtask::workflows::compliance_check
+# Rebuild with `cargo xtask workflows`.
+name: compliance_check
+env:
+  CARGO_TERM_COLOR: always
+on:
+  schedule:
+  - cron: 30 17 * * 2
+jobs:
+  scheduled_compliance_check:
+    if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
+    runs-on: namespace-profile-2x4-ubuntu-2404
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd
+      with:
+        clean: false
+        fetch-depth: 0
+    - name: steps::cache_rust_dependencies_namespace
+      uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9
+      with:
+        cache: rust
+        path: ~/.rustup
+    - id: determine-version
+      name: compliance_check::scheduled_compliance_check
+      run: |
+        VERSION=$(sed -n 's/^version = "\(.*\)"/\1/p' crates/zed/Cargo.toml | tr -d '[:space:]')
+        if [ -z "$VERSION" ]; then
+            echo "Could not determine version from crates/zed/Cargo.toml"
+            exit 1
+        fi
+        TAG="v${VERSION}-pre"
+        echo "Checking compliance for $TAG"
+        echo "tag=$TAG" >> "$GITHUB_OUTPUT"
+    - id: run-compliance-check
+      name: compliance_check::scheduled_compliance_check::run_compliance_check
+      run: cargo xtask compliance "$LATEST_TAG" --branch main --report-path target/compliance-report
+      env:
+        LATEST_TAG: ${{ steps.determine-version.outputs.tag }}
+        GITHUB_APP_ID: ${{ secrets.ZED_ZIPPY_APP_ID }}
+        GITHUB_APP_KEY: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }}
+    - name: compliance_check::scheduled_compliance_check::send_failure_slack_notification
+      if: failure()
+      run: |
+        MESSAGE="⚠️ Scheduled compliance check failed for upcoming preview release $LATEST_TAG: There are PRs with missing reviews."
+
+        curl -X POST -H 'Content-type: application/json' \
+            --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \
+            "$SLACK_WEBHOOK"
+      env:
+        SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
+        LATEST_TAG: ${{ steps.determine-version.outputs.tag }}
+defaults:
+  run:
+    shell: bash -euxo pipefail {0}

.github/workflows/release.yml 🔗

@@ -293,6 +293,51 @@ jobs:
       env:
         GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
     timeout-minutes: 60
+  compliance_check:
+    if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
+    runs-on: namespace-profile-16x32-ubuntu-2204
+    env:
+      COMPLIANCE_FILE_PATH: compliance.md
+    steps:
+    - name: steps::checkout_repo
+      uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd
+      with:
+        clean: false
+        fetch-depth: 0
+        ref: ${{ github.ref }}
+    - name: steps::cache_rust_dependencies_namespace
+      uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9
+      with:
+        cache: rust
+        path: ~/.rustup
+    - id: run-compliance-check
+      name: release::compliance_check::run_compliance_check
+      run: cargo xtask compliance "$GITHUB_REF_NAME" --report-path "$COMPLIANCE_FILE_OUTPUT"
+      env:
+        GITHUB_APP_ID: ${{ secrets.ZED_ZIPPY_APP_ID }}
+        GITHUB_APP_KEY: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }}
+    - name: release::compliance_check::send_compliance_slack_notification
+      if: always()
+      run: |
+        if [ "$COMPLIANCE_OUTCOME" == "success" ]; then
+            STATUS="✅ Compliance check passed for $GITHUB_REF_NAME"
+        else
+            STATUS="❌ Compliance check failed for $GITHUB_REF_NAME"
+        fi
+
+        REPORT_CONTENT=""
+        if [ -f "$COMPLIANCE_FILE_OUTPUT" ]; then
+            REPORT_CONTENT=$(cat "$REPORT_FILE")
+        fi
+
+        MESSAGE=$(printf "%s\n\n%s" "$STATUS" "$REPORT_CONTENT")
+
+        curl -X POST -H 'Content-type: application/json' \
+            --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \
+            "$SLACK_WEBHOOK"
+      env:
+        SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
+        COMPLIANCE_OUTCOME: ${{ steps.run-compliance-check.outcome }}
   bundle_linux_aarch64:
     needs:
     - run_tests_linux
@@ -613,6 +658,45 @@ jobs:
         echo "All expected assets are present in the release."
       env:
         GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+    - name: steps::checkout_repo
+      uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd
+      with:
+        clean: false
+        fetch-depth: 0
+        ref: ${{ github.ref }}
+    - name: steps::cache_rust_dependencies_namespace
+      uses: namespacelabs/nscloud-cache-action@a90bb5d4b27522ce881c6e98eebd7d7e6d1653f9
+      with:
+        cache: rust
+        path: ~/.rustup
+    - id: run-post-upload-compliance-check
+      name: release::validate_release_assets::run_post_upload_compliance_check
+      run: cargo xtask compliance "$GITHUB_REF_NAME" --report-path target/compliance-report
+      env:
+        GITHUB_APP_ID: ${{ secrets.ZED_ZIPPY_APP_ID }}
+        GITHUB_APP_KEY: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }}
+    - name: release::validate_release_assets::send_post_upload_compliance_notification
+      if: always()
+      run: |
+        if [ -z "$COMPLIANCE_OUTCOME" ] || [ "$COMPLIANCE_OUTCOME" == "skipped" ]; then
+            echo "Compliance check was skipped, not sending notification"
+            exit 0
+        fi
+
+        TAG="$GITHUB_REF_NAME"
+
+        if [ "$COMPLIANCE_OUTCOME" == "success" ]; then
+            MESSAGE="✅ Post-upload compliance re-check passed for $TAG"
+        else
+            MESSAGE="❌ Post-upload compliance re-check failed for $TAG"
+        fi
+
+        curl -X POST -H 'Content-type: application/json' \
+            --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \
+            "$SLACK_WEBHOOK"
+      env:
+        SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
+        COMPLIANCE_OUTCOME: ${{ steps.run-post-upload-compliance-check.outcome }}
   auto_release_preview:
     needs:
     - validate_release_assets

Cargo.lock 🔗

@@ -677,6 +677,15 @@ dependencies = [
  "derive_arbitrary",
 ]
 
+[[package]]
+name = "arc-swap"
+version = "1.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a07d1f37ff60921c83bdfc7407723bdefe89b44b98a9b772f225c8f9d67141a6"
+dependencies = [
+ "rustversion",
+]
+
 [[package]]
 name = "arg_enum_proc_macro"
 version = "0.3.4"
@@ -2530,6 +2539,16 @@ dependencies = [
  "serde",
 ]
 
+[[package]]
+name = "cargo-platform"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "87a0c0e6148f11f01f32650a2ea02d532b2ad4e81d8bd41e6e565b5adc5e6082"
+dependencies = [
+ "serde",
+ "serde_core",
+]
+
 [[package]]
 name = "cargo_metadata"
 version = "0.19.2"
@@ -2537,7 +2556,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "dd5eb614ed4c27c5d706420e4320fbe3216ab31fa1c33cd8246ac36dae4479ba"
 dependencies = [
  "camino",
- "cargo-platform",
+ "cargo-platform 0.1.9",
+ "semver",
+ "serde",
+ "serde_json",
+ "thiserror 2.0.17",
+]
+
+[[package]]
+name = "cargo_metadata"
+version = "0.23.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ef987d17b0a113becdd19d3d0022d04d7ef41f9efe4f3fb63ac44ba61df3ade9"
+dependencies = [
+ "camino",
+ "cargo-platform 0.3.2",
  "semver",
  "serde",
  "serde_json",
@@ -3284,6 +3317,25 @@ dependencies = [
  "workspace",
 ]
 
+[[package]]
+name = "compliance"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "async-trait",
+ "derive_more",
+ "futures 0.3.32",
+ "indoc",
+ "itertools 0.14.0",
+ "jsonwebtoken",
+ "octocrab",
+ "regex",
+ "semver",
+ "serde",
+ "serde_json",
+ "tokio",
+]
+
 [[package]]
 name = "component"
 version = "0.1.0"
@@ -8324,6 +8376,7 @@ dependencies = [
  "http 1.3.1",
  "hyper 1.7.0",
  "hyper-util",
+ "log",
  "rustls 0.23.33",
  "rustls-native-certs 0.8.2",
  "rustls-pki-types",
@@ -8332,6 +8385,19 @@ dependencies = [
  "tower-service",
 ]
 
+[[package]]
+name = "hyper-timeout"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0"
+dependencies = [
+ "hyper 1.7.0",
+ "hyper-util",
+ "pin-project-lite",
+ "tokio",
+ "tower-service",
+]
+
 [[package]]
 name = "hyper-tls"
 version = "0.5.0"
@@ -11380,6 +11446,48 @@ dependencies = [
  "memchr",
 ]
 
+[[package]]
+name = "octocrab"
+version = "0.49.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "63f6687a23731011d0117f9f4c3cdabaa7b5e42ca671f42b5cc0657c492540e3"
+dependencies = [
+ "arc-swap",
+ "async-trait",
+ "base64 0.22.1",
+ "bytes 1.11.1",
+ "cargo_metadata 0.23.1",
+ "cfg-if",
+ "chrono",
+ "either",
+ "futures 0.3.32",
+ "futures-core",
+ "futures-util",
+ "getrandom 0.2.16",
+ "http 1.3.1",
+ "http-body 1.0.1",
+ "http-body-util",
+ "hyper 1.7.0",
+ "hyper-rustls 0.27.7",
+ "hyper-timeout",
+ "hyper-util",
+ "jsonwebtoken",
+ "once_cell",
+ "percent-encoding",
+ "pin-project",
+ "secrecy",
+ "serde",
+ "serde_json",
+ "serde_path_to_error",
+ "serde_urlencoded",
+ "snafu",
+ "tokio",
+ "tower 0.5.2",
+ "tower-http 0.6.6",
+ "url",
+ "web-time",
+]
+
 [[package]]
 name = "ollama"
 version = "0.1.0"
@@ -15381,6 +15489,15 @@ dependencies = [
  "zeroize",
 ]
 
+[[package]]
+name = "secrecy"
+version = "0.10.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e891af845473308773346dc847b2c23ee78fe442e0472ac50e22a18a93d3ae5a"
+dependencies = [
+ "zeroize",
+]
+
 [[package]]
 name = "security-framework"
 version = "2.11.1"
@@ -16085,6 +16202,27 @@ version = "0.3.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "0f7a918bd2a9951d18ee6e48f076843e8e73a9a5d22cf05bcd4b7a81bdd04e17"
 
+[[package]]
+name = "snafu"
+version = "0.8.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6e84b3f4eacbf3a1ce05eac6763b4d629d60cbc94d632e4092c54ade71f1e1a2"
+dependencies = [
+ "snafu-derive",
+]
+
+[[package]]
+name = "snafu-derive"
+version = "0.8.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c1c97747dbf44bb1ca44a561ece23508e99cb592e862f22222dcf42f51d1e451"
+dependencies = [
+ "heck 0.5.0",
+ "proc-macro2",
+ "quote",
+ "syn 2.0.117",
+]
+
 [[package]]
 name = "snippet"
 version = "0.1.0"
@@ -18089,8 +18227,10 @@ dependencies = [
  "pin-project-lite",
  "sync_wrapper 1.0.2",
  "tokio",
+ "tokio-util",
  "tower-layer",
  "tower-service",
+ "tracing",
 ]
 
 [[package]]
@@ -18128,6 +18268,7 @@ dependencies = [
  "tower 0.5.2",
  "tower-layer",
  "tower-service",
+ "tracing",
 ]
 
 [[package]]
@@ -19974,6 +20115,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb"
 dependencies = [
  "js-sys",
+ "serde",
  "wasm-bindgen",
 ]
 
@@ -21711,9 +21853,10 @@ dependencies = [
  "annotate-snippets",
  "anyhow",
  "backtrace",
- "cargo_metadata",
+ "cargo_metadata 0.19.2",
  "cargo_toml",
  "clap",
+ "compliance",
  "gh-workflow",
  "indexmap",
  "indoc",
@@ -21723,6 +21866,7 @@ dependencies = [
  "serde_json",
  "serde_yaml",
  "strum 0.27.2",
+ "tokio",
  "toml 0.8.23",
  "toml_edit 0.22.27",
 ]

Cargo.toml 🔗

@@ -242,6 +242,7 @@ members = [
     # Tooling
     #
 
+    "tooling/compliance",
     "tooling/perf",
     "tooling/xtask",
 ]
@@ -289,6 +290,7 @@ collab_ui = { path = "crates/collab_ui" }
 collections = { path = "crates/collections", version = "0.1.0" }
 command_palette = { path = "crates/command_palette" }
 command_palette_hooks = { path = "crates/command_palette_hooks" }
+compliance = { path = "tooling/compliance" }
 component = { path = "crates/component" }
 component_preview = { path = "crates/component_preview" }
 context_server = { path = "crates/context_server" }
@@ -547,6 +549,7 @@ derive_more = { version = "2.1.1", features = [
     "add_assign",
     "deref",
     "deref_mut",
+    "display",
     "from_str",
     "mul",
     "mul_assign",

tooling/compliance/Cargo.toml 🔗

@@ -0,0 +1,38 @@
+[package]
+name = "compliance"
+version = "0.1.0"
+edition.workspace = true
+publish.workspace = true
+license = "GPL-3.0-or-later"
+
+[lints]
+workspace = true
+
+[features]
+octo-client = ["dep:octocrab", "dep:jsonwebtoken", "dep:futures", "dep:tokio"]
+
+[dependencies]
+anyhow.workspace = true
+async-trait.workspace = true
+derive_more.workspace = true
+futures = { workspace = true, optional = true }
+itertools.workspace = true
+jsonwebtoken = { version = "10.2", features = ["use_pem"], optional = true }
+octocrab = { version = "0.49", default-features = false, features = [
+    "default-client",
+    "jwt-aws-lc-rs",
+    "retry",
+    "rustls",
+    "rustls-aws-lc-rs",
+    "stream",
+    "timeout"
+], optional = true }
+regex.workspace = true
+semver.workspace = true
+serde.workspace = true
+serde_json.workspace = true
+tokio = { workspace = true, optional = true }
+
+[dev-dependencies]
+indoc.workspace = true
+tokio = { workspace = true, features = ["rt", "macros"] }

tooling/compliance/src/checks.rs 🔗

@@ -0,0 +1,647 @@
+use std::{fmt, ops::Not as _};
+
+use itertools::Itertools as _;
+
+use crate::{
+    git::{CommitDetails, CommitList},
+    github::{
+        CommitAuthor, GitHubClient, GitHubUser, GithubLogin, PullRequestComment, PullRequestData,
+        PullRequestReview, ReviewState,
+    },
+    report::Report,
+};
+
+const ZED_ZIPPY_COMMENT_APPROVAL_PATTERN: &str = "@zed-zippy approve";
+const ZED_ZIPPY_GROUP_APPROVAL: &str = "@zed-industries/approved";
+
+#[derive(Debug)]
+pub enum ReviewSuccess {
+    ApprovingComment(Vec<PullRequestComment>),
+    CoAuthored(Vec<CommitAuthor>),
+    ExternalMergedContribution { merged_by: GitHubUser },
+    PullRequestReviewed(Vec<PullRequestReview>),
+}
+
+impl ReviewSuccess {
+    pub(crate) fn reviewers(&self) -> anyhow::Result<String> {
+        let reviewers = match self {
+            Self::CoAuthored(authors) => authors.iter().map(ToString::to_string).collect_vec(),
+            Self::PullRequestReviewed(reviews) => reviews
+                .iter()
+                .filter_map(|review| review.user.as_ref())
+                .map(|user| format!("@{}", user.login))
+                .collect_vec(),
+            Self::ApprovingComment(comments) => comments
+                .iter()
+                .map(|comment| format!("@{}", comment.user.login))
+                .collect_vec(),
+            Self::ExternalMergedContribution { merged_by } => {
+                vec![format!("@{}", merged_by.login)]
+            }
+        };
+
+        let reviewers = reviewers.into_iter().unique().collect_vec();
+
+        reviewers
+            .is_empty()
+            .not()
+            .then(|| reviewers.join(", "))
+            .ok_or_else(|| anyhow::anyhow!("Expected at least one reviewer"))
+    }
+}
+
+impl fmt::Display for ReviewSuccess {
+    fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+        match self {
+            Self::CoAuthored(_) => formatter.write_str("Co-authored by an organization member"),
+            Self::PullRequestReviewed(_) => {
+                formatter.write_str("Approved by an organization review")
+            }
+            Self::ApprovingComment(_) => {
+                formatter.write_str("Approved by an organization approval comment")
+            }
+            Self::ExternalMergedContribution { .. } => {
+                formatter.write_str("External merged contribution")
+            }
+        }
+    }
+}
+
+#[derive(Debug)]
+pub enum ReviewFailure {
+    // todo: We could still query the GitHub API here to search for one
+    NoPullRequestFound,
+    Unreviewed,
+    UnableToDetermineReviewer,
+    Other(anyhow::Error),
+}
+
+impl fmt::Display for ReviewFailure {
+    fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+        match self {
+            Self::NoPullRequestFound => formatter.write_str("No pull request found"),
+            Self::Unreviewed => formatter
+                .write_str("No qualifying organization approval found for the pull request"),
+            Self::UnableToDetermineReviewer => formatter.write_str("Could not determine reviewer"),
+            Self::Other(error) => write!(formatter, "Failed to inspect review state: {error}"),
+        }
+    }
+}
+
+pub(crate) type ReviewResult = Result<ReviewSuccess, ReviewFailure>;
+
+impl<E: Into<anyhow::Error>> From<E> for ReviewFailure {
+    fn from(err: E) -> Self {
+        Self::Other(anyhow::anyhow!(err))
+    }
+}
+
+pub struct Reporter<'a> {
+    commits: CommitList,
+    github_client: &'a GitHubClient,
+}
+
+impl<'a> Reporter<'a> {
+    pub fn new(commits: CommitList, github_client: &'a GitHubClient) -> Self {
+        Self {
+            commits,
+            github_client,
+        }
+    }
+
+    /// Method that checks every commit for compliance
+    async fn check_commit(&self, commit: &CommitDetails) -> Result<ReviewSuccess, ReviewFailure> {
+        let Some(pr_number) = commit.pr_number() else {
+            return Err(ReviewFailure::NoPullRequestFound);
+        };
+
+        let pull_request = self.github_client.get_pull_request(pr_number).await?;
+
+        if let Some(approval) = self.check_pull_request_approved(&pull_request).await? {
+            return Ok(approval);
+        }
+
+        if let Some(approval) = self
+            .check_approving_pull_request_comment(&pull_request)
+            .await?
+        {
+            return Ok(approval);
+        }
+
+        if let Some(approval) = self.check_commit_co_authors(commit).await? {
+            return Ok(approval);
+        }
+
+        // if let Some(approval) = self.check_external_merged_pr(pr_number).await? {
+        //     return Ok(approval);
+        // }
+
+        Err(ReviewFailure::Unreviewed)
+    }
+
+    async fn check_commit_co_authors(
+        &self,
+        commit: &CommitDetails,
+    ) -> Result<Option<ReviewSuccess>, ReviewFailure> {
+        if commit.co_authors().is_some()
+            && let Some(commit_authors) = self
+                .github_client
+                .get_commit_authors([commit.sha()])
+                .await?
+                .get(commit.sha())
+                .and_then(|authors| authors.co_authors())
+        {
+            let mut org_co_authors = Vec::new();
+            for co_author in commit_authors {
+                if let Some(github_login) = co_author.user()
+                    && self
+                        .github_client
+                        .check_org_membership(github_login)
+                        .await?
+                {
+                    org_co_authors.push(co_author.clone());
+                }
+            }
+
+            Ok(org_co_authors
+                .is_empty()
+                .not()
+                .then_some(ReviewSuccess::CoAuthored(org_co_authors)))
+        } else {
+            Ok(None)
+        }
+    }
+
+    #[allow(unused)]
+    async fn check_external_merged_pr(
+        &self,
+        pull_request: PullRequestData,
+    ) -> Result<Option<ReviewSuccess>, ReviewFailure> {
+        if let Some(user) = pull_request.user
+            && self
+                .github_client
+                .check_org_membership(&GithubLogin::new(user.login))
+                .await?
+                .not()
+        {
+            pull_request.merged_by.map_or(
+                Err(ReviewFailure::UnableToDetermineReviewer),
+                |merged_by| {
+                    Ok(Some(ReviewSuccess::ExternalMergedContribution {
+                        merged_by,
+                    }))
+                },
+            )
+        } else {
+            Ok(None)
+        }
+    }
+
+    async fn check_pull_request_approved(
+        &self,
+        pull_request: &PullRequestData,
+    ) -> Result<Option<ReviewSuccess>, ReviewFailure> {
+        let pr_reviews = self
+            .github_client
+            .get_pull_request_reviews(pull_request.number)
+            .await?;
+
+        if !pr_reviews.is_empty() {
+            let mut org_approving_reviews = Vec::new();
+            for review in pr_reviews {
+                if let Some(github_login) = review.user.as_ref()
+                    && pull_request
+                        .user
+                        .as_ref()
+                        .is_none_or(|pr_user| pr_user.login != github_login.login)
+                    && review
+                        .state
+                        .is_some_and(|state| state == ReviewState::Approved)
+                    && self
+                        .github_client
+                        .check_org_membership(&GithubLogin::new(github_login.login.clone()))
+                        .await?
+                {
+                    org_approving_reviews.push(review);
+                }
+            }
+
+            Ok(org_approving_reviews
+                .is_empty()
+                .not()
+                .then_some(ReviewSuccess::PullRequestReviewed(org_approving_reviews)))
+        } else {
+            Ok(None)
+        }
+    }
+
+    async fn check_approving_pull_request_comment(
+        &self,
+        pull_request: &PullRequestData,
+    ) -> Result<Option<ReviewSuccess>, ReviewFailure> {
+        let other_comments = self
+            .github_client
+            .get_pull_request_comments(pull_request.number)
+            .await?;
+
+        if !other_comments.is_empty() {
+            let mut org_approving_comments = Vec::new();
+
+            for comment in other_comments {
+                if pull_request
+                    .user
+                    .as_ref()
+                    .is_some_and(|pr_author| pr_author.login != comment.user.login)
+                    && comment.body.as_ref().is_some_and(|body| {
+                        body.contains(ZED_ZIPPY_COMMENT_APPROVAL_PATTERN)
+                            || body.contains(ZED_ZIPPY_GROUP_APPROVAL)
+                    })
+                    && self
+                        .github_client
+                        .check_org_membership(&GithubLogin::new(comment.user.login.clone()))
+                        .await?
+                {
+                    org_approving_comments.push(comment);
+                }
+            }
+
+            Ok(org_approving_comments
+                .is_empty()
+                .not()
+                .then_some(ReviewSuccess::ApprovingComment(org_approving_comments)))
+        } else {
+            Ok(None)
+        }
+    }
+
+    pub async fn generate_report(mut self) -> anyhow::Result<Report> {
+        let mut report = Report::new();
+
+        let commits_to_check = std::mem::take(&mut self.commits);
+        let total_commits = commits_to_check.len();
+
+        for (i, commit) in commits_to_check.into_iter().enumerate() {
+            println!(
+                "Checking commit {:?} ({current}/{total})",
+                commit.sha().short(),
+                current = i + 1,
+                total = total_commits
+            );
+
+            let review_result = self.check_commit(&commit).await;
+
+            if let Err(err) = &review_result {
+                println!("Commit {:?} failed review: {:?}", commit.sha().short(), err);
+            }
+
+            report.add(commit, review_result);
+        }
+
+        Ok(report)
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use std::rc::Rc;
+    use std::str::FromStr;
+
+    use crate::git::{CommitDetails, CommitList, CommitSha};
+    use crate::github::{
+        AuthorsForCommits, GitHubApiClient, GitHubClient, GitHubUser, GithubLogin,
+        PullRequestComment, PullRequestData, PullRequestReview, ReviewState,
+    };
+
+    use super::{Reporter, ReviewFailure, ReviewSuccess};
+
+    struct MockGitHubApi {
+        pull_request: PullRequestData,
+        reviews: Vec<PullRequestReview>,
+        comments: Vec<PullRequestComment>,
+        commit_authors_json: serde_json::Value,
+        org_members: Vec<String>,
+    }
+
+    #[async_trait::async_trait(?Send)]
+    impl GitHubApiClient for MockGitHubApi {
+        async fn get_pull_request(&self, _pr_number: u64) -> anyhow::Result<PullRequestData> {
+            Ok(self.pull_request.clone())
+        }
+
+        async fn get_pull_request_reviews(
+            &self,
+            _pr_number: u64,
+        ) -> anyhow::Result<Vec<PullRequestReview>> {
+            Ok(self.reviews.clone())
+        }
+
+        async fn get_pull_request_comments(
+            &self,
+            _pr_number: u64,
+        ) -> anyhow::Result<Vec<PullRequestComment>> {
+            Ok(self.comments.clone())
+        }
+
+        async fn get_commit_authors(
+            &self,
+            _commit_shas: &[&CommitSha],
+        ) -> anyhow::Result<AuthorsForCommits> {
+            serde_json::from_value(self.commit_authors_json.clone()).map_err(Into::into)
+        }
+
+        async fn check_org_membership(&self, login: &GithubLogin) -> anyhow::Result<bool> {
+            Ok(self
+                .org_members
+                .iter()
+                .any(|member| member == login.as_str()))
+        }
+
+        async fn ensure_pull_request_has_label(
+            &self,
+            _label: &str,
+            _pr_number: u64,
+        ) -> anyhow::Result<()> {
+            Ok(())
+        }
+    }
+
+    fn make_commit(
+        sha: &str,
+        author_name: &str,
+        author_email: &str,
+        title: &str,
+        body: &str,
+    ) -> CommitDetails {
+        let formatted = format!(
+            "{sha}|field-delimiter|{author_name}|field-delimiter|{author_email}|field-delimiter|\
+             {title}|body-delimiter|{body}|commit-delimiter|"
+        );
+        CommitList::from_str(&formatted)
+            .expect("test commit should parse")
+            .into_iter()
+            .next()
+            .expect("should have one commit")
+    }
+
+    fn review(login: &str, state: ReviewState) -> PullRequestReview {
+        PullRequestReview {
+            user: Some(GitHubUser {
+                login: login.to_owned(),
+            }),
+            state: Some(state),
+        }
+    }
+
+    fn comment(login: &str, body: &str) -> PullRequestComment {
+        PullRequestComment {
+            user: GitHubUser {
+                login: login.to_owned(),
+            },
+            body: Some(body.to_owned()),
+        }
+    }
+
+    struct TestScenario {
+        pull_request: PullRequestData,
+        reviews: Vec<PullRequestReview>,
+        comments: Vec<PullRequestComment>,
+        commit_authors_json: serde_json::Value,
+        org_members: Vec<String>,
+        commit: CommitDetails,
+    }
+
+    impl TestScenario {
+        fn single_commit() -> Self {
+            Self {
+                pull_request: PullRequestData {
+                    number: 1234,
+                    user: Some(GitHubUser {
+                        login: "alice".to_owned(),
+                    }),
+                    merged_by: None,
+                },
+                reviews: vec![],
+                comments: vec![],
+                commit_authors_json: serde_json::json!({}),
+                org_members: vec![],
+                commit: make_commit(
+                    "abc12345abc12345",
+                    "Alice",
+                    "alice@test.com",
+                    "Fix thing (#1234)",
+                    "",
+                ),
+            }
+        }
+
+        fn with_reviews(mut self, reviews: Vec<PullRequestReview>) -> Self {
+            self.reviews = reviews;
+            self
+        }
+
+        fn with_comments(mut self, comments: Vec<PullRequestComment>) -> Self {
+            self.comments = comments;
+            self
+        }
+
+        fn with_org_members(mut self, members: Vec<&str>) -> Self {
+            self.org_members = members.into_iter().map(str::to_owned).collect();
+            self
+        }
+
+        fn with_commit_authors_json(mut self, json: serde_json::Value) -> Self {
+            self.commit_authors_json = json;
+            self
+        }
+
+        fn with_commit(mut self, commit: CommitDetails) -> Self {
+            self.commit = commit;
+            self
+        }
+
+        async fn run_scenario(self) -> Result<ReviewSuccess, ReviewFailure> {
+            let mock = MockGitHubApi {
+                pull_request: self.pull_request,
+                reviews: self.reviews,
+                comments: self.comments,
+                commit_authors_json: self.commit_authors_json,
+                org_members: self.org_members,
+            };
+            let client = GitHubClient::new(Rc::new(mock));
+            let reporter = Reporter::new(CommitList::default(), &client);
+            reporter.check_commit(&self.commit).await
+        }
+    }
+
+    #[tokio::test]
+    async fn approved_review_by_org_member_succeeds() {
+        let result = TestScenario::single_commit()
+            .with_reviews(vec![review("bob", ReviewState::Approved)])
+            .with_org_members(vec!["bob"])
+            .run_scenario()
+            .await;
+        assert!(matches!(result, Ok(ReviewSuccess::PullRequestReviewed(_))));
+    }
+
+    #[tokio::test]
+    async fn non_approved_review_state_is_not_accepted() {
+        let result = TestScenario::single_commit()
+            .with_reviews(vec![review("bob", ReviewState::Other)])
+            .with_org_members(vec!["bob"])
+            .run_scenario()
+            .await;
+        assert!(matches!(result, Err(ReviewFailure::Unreviewed)));
+    }
+
+    #[tokio::test]
+    async fn review_by_non_org_member_is_not_accepted() {
+        let result = TestScenario::single_commit()
+            .with_reviews(vec![review("bob", ReviewState::Approved)])
+            .run_scenario()
+            .await;
+        assert!(matches!(result, Err(ReviewFailure::Unreviewed)));
+    }
+
+    #[tokio::test]
+    async fn pr_author_own_approval_review_is_rejected() {
+        let result = TestScenario::single_commit()
+            .with_reviews(vec![review("alice", ReviewState::Approved)])
+            .with_org_members(vec!["alice"])
+            .run_scenario()
+            .await;
+        assert!(matches!(result, Err(ReviewFailure::Unreviewed)));
+    }
+
+    #[tokio::test]
+    async fn pr_author_own_approval_comment_is_rejected() {
+        let result = TestScenario::single_commit()
+            .with_comments(vec![comment("alice", "@zed-zippy approve")])
+            .with_org_members(vec!["alice"])
+            .run_scenario()
+            .await;
+        assert!(matches!(result, Err(ReviewFailure::Unreviewed)));
+    }
+
+    #[tokio::test]
+    async fn approval_comment_by_org_member_succeeds() {
+        let result = TestScenario::single_commit()
+            .with_comments(vec![comment("bob", "@zed-zippy approve")])
+            .with_org_members(vec!["bob"])
+            .run_scenario()
+            .await;
+        assert!(matches!(result, Ok(ReviewSuccess::ApprovingComment(_))));
+    }
+
+    #[tokio::test]
+    async fn group_approval_comment_by_org_member_succeeds() {
+        let result = TestScenario::single_commit()
+            .with_comments(vec![comment("bob", "@zed-industries/approved")])
+            .with_org_members(vec!["bob"])
+            .run_scenario()
+            .await;
+        assert!(matches!(result, Ok(ReviewSuccess::ApprovingComment(_))));
+    }
+
+    #[tokio::test]
+    async fn comment_without_approval_pattern_is_not_accepted() {
+        let result = TestScenario::single_commit()
+            .with_comments(vec![comment("bob", "looks good")])
+            .with_org_members(vec!["bob"])
+            .run_scenario()
+            .await;
+        assert!(matches!(result, Err(ReviewFailure::Unreviewed)));
+    }
+
+    #[tokio::test]
+    async fn commit_without_pr_number_is_no_pr_found() {
+        let result = TestScenario::single_commit()
+            .with_commit(make_commit(
+                "abc12345abc12345",
+                "Alice",
+                "alice@test.com",
+                "Fix thing without PR number",
+                "",
+            ))
+            .run_scenario()
+            .await;
+        assert!(matches!(result, Err(ReviewFailure::NoPullRequestFound)));
+    }
+
+    #[tokio::test]
+    async fn pr_review_takes_precedence_over_comment() {
+        let result = TestScenario::single_commit()
+            .with_reviews(vec![review("bob", ReviewState::Approved)])
+            .with_comments(vec![comment("charlie", "@zed-zippy approve")])
+            .with_org_members(vec!["bob", "charlie"])
+            .run_scenario()
+            .await;
+        assert!(matches!(result, Ok(ReviewSuccess::PullRequestReviewed(_))));
+    }
+
+    #[tokio::test]
+    async fn comment_takes_precedence_over_co_author() {
+        let result = TestScenario::single_commit()
+            .with_comments(vec![comment("bob", "@zed-zippy approve")])
+            .with_commit_authors_json(serde_json::json!({
+                "abc12345abc12345": {
+                    "author": {
+                        "name": "Alice",
+                        "email": "alice@test.com",
+                        "user": { "login": "alice" }
+                    },
+                    "authors": [{
+                        "name": "Charlie",
+                        "email": "charlie@test.com",
+                        "user": { "login": "charlie" }
+                    }]
+                }
+            }))
+            .with_commit(make_commit(
+                "abc12345abc12345",
+                "Alice",
+                "alice@test.com",
+                "Fix thing (#1234)",
+                "Co-authored-by: Charlie <charlie@test.com>",
+            ))
+            .with_org_members(vec!["bob", "charlie"])
+            .run_scenario()
+            .await;
+        assert!(matches!(result, Ok(ReviewSuccess::ApprovingComment(_))));
+    }
+
+    #[tokio::test]
+    async fn co_author_org_member_succeeds() {
+        let result = TestScenario::single_commit()
+            .with_commit_authors_json(serde_json::json!({
+                "abc12345abc12345": {
+                    "author": {
+                        "name": "Alice",
+                        "email": "alice@test.com",
+                        "user": { "login": "alice" }
+                    },
+                    "authors": [{
+                        "name": "Bob",
+                        "email": "bob@test.com",
+                        "user": { "login": "bob" }
+                    }]
+                }
+            }))
+            .with_commit(make_commit(
+                "abc12345abc12345",
+                "Alice",
+                "alice@test.com",
+                "Fix thing (#1234)",
+                "Co-authored-by: Bob <bob@test.com>",
+            ))
+            .with_org_members(vec!["bob"])
+            .run_scenario()
+            .await;
+        assert!(matches!(result, Ok(ReviewSuccess::CoAuthored(_))));
+    }
+
+    #[tokio::test]
+    async fn no_reviews_no_comments_no_coauthors_is_unreviewed() {
+        let result = TestScenario::single_commit().run_scenario().await;
+        assert!(matches!(result, Err(ReviewFailure::Unreviewed)));
+    }
+}

tooling/compliance/src/git.rs 🔗

@@ -0,0 +1,591 @@
+#![allow(clippy::disallowed_methods, reason = "This is only used in xtasks")]
+use std::{
+    fmt::{self, Debug},
+    ops::Not,
+    process::Command,
+    str::FromStr,
+    sync::LazyLock,
+};
+
+use anyhow::{Context, Result, anyhow};
+use derive_more::{Deref, DerefMut, FromStr};
+
+use itertools::Itertools;
+use regex::Regex;
+use semver::Version;
+use serde::Deserialize;
+
+pub trait Subcommand {
+    type ParsedOutput: FromStr<Err = anyhow::Error>;
+
+    fn args(&self) -> impl IntoIterator<Item = String>;
+}
+
+#[derive(Deref, DerefMut)]
+pub struct GitCommand<G: Subcommand> {
+    #[deref]
+    #[deref_mut]
+    subcommand: G,
+}
+
+impl<G: Subcommand> GitCommand<G> {
+    #[must_use]
+    pub fn run(subcommand: G) -> Result<G::ParsedOutput> {
+        Self { subcommand }.run_impl()
+    }
+
+    fn run_impl(self) -> Result<G::ParsedOutput> {
+        let command_output = Command::new("git")
+            .args(self.subcommand.args())
+            .output()
+            .context("Failed to spawn command")?;
+
+        if command_output.status.success() {
+            String::from_utf8(command_output.stdout)
+                .map_err(|_| anyhow!("Invalid UTF8"))
+                .and_then(|s| {
+                    G::ParsedOutput::from_str(s.trim())
+                        .map_err(|e| anyhow!("Failed to parse from string: {e:?}"))
+                })
+        } else {
+            anyhow::bail!(
+                "Command failed with exit code {}, stderr: {}",
+                command_output.status.code().unwrap_or_default(),
+                String::from_utf8(command_output.stderr).unwrap_or_default()
+            )
+        }
+    }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
+pub enum ReleaseChannel {
+    Stable,
+    Preview,
+}
+
+impl ReleaseChannel {
+    pub(crate) fn tag_suffix(&self) -> &'static str {
+        match self {
+            ReleaseChannel::Stable => "",
+            ReleaseChannel::Preview => "-pre",
+        }
+    }
+}
+
+#[derive(Debug, Clone)]
+pub struct VersionTag(Version, ReleaseChannel);
+
+impl VersionTag {
+    pub fn parse(input: &str) -> Result<Self, anyhow::Error> {
+        // Being a bit more lenient for human inputs
+        let version = input.strip_prefix('v').unwrap_or(input);
+
+        let (version_str, channel) = version
+            .strip_suffix("-pre")
+            .map_or((version, ReleaseChannel::Stable), |version_str| {
+                (version_str, ReleaseChannel::Preview)
+            });
+
+        Version::parse(version_str)
+            .map(|version| Self(version, channel))
+            .map_err(|_| anyhow::anyhow!("Failed to parse version from tag!"))
+    }
+
+    pub fn version(&self) -> &Version {
+        &self.0
+    }
+}
+
+impl ToString for VersionTag {
+    fn to_string(&self) -> String {
+        format!(
+            "v{version}{channel_suffix}",
+            version = self.0,
+            channel_suffix = self.1.tag_suffix()
+        )
+    }
+}
+
+#[derive(Debug, Deref, FromStr, PartialEq, Eq, Hash, Deserialize)]
+pub struct CommitSha(pub(crate) String);
+
+impl CommitSha {
+    pub fn short(&self) -> &str {
+        self.0.as_str().split_at(8).0
+    }
+}
+
+#[derive(Debug)]
+pub struct CommitDetails {
+    sha: CommitSha,
+    author: Committer,
+    title: String,
+    body: String,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct Committer {
+    name: String,
+    email: String,
+}
+
+impl Committer {
+    pub fn new(name: &str, email: &str) -> Self {
+        Self {
+            name: name.to_owned(),
+            email: email.to_owned(),
+        }
+    }
+}
+
+impl fmt::Display for Committer {
+    fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+        write!(formatter, "{} ({})", self.name, self.email)
+    }
+}
+
+impl CommitDetails {
+    const BODY_DELIMITER: &str = "|body-delimiter|";
+    const COMMIT_DELIMITER: &str = "|commit-delimiter|";
+    const FIELD_DELIMITER: &str = "|field-delimiter|";
+    const FORMAT_STRING: &str = "%H|field-delimiter|%an|field-delimiter|%ae|field-delimiter|%s|body-delimiter|%b|commit-delimiter|";
+
+    fn parse(line: &str, body: &str) -> Result<Self, anyhow::Error> {
+        let Some([sha, author_name, author_email, title]) =
+            line.splitn(4, Self::FIELD_DELIMITER).collect_array()
+        else {
+            return Err(anyhow!("Failed to parse commit fields from input {line}"));
+        };
+
+        Ok(CommitDetails {
+            sha: CommitSha(sha.to_owned()),
+            author: Committer::new(author_name, author_email),
+            title: title.to_owned(),
+            body: body.to_owned(),
+        })
+    }
+
+    pub fn pr_number(&self) -> Option<u64> {
+        // Since we use squash merge, all commit titles end with the '(#12345)' pattern.
+        // While we could strictly speaking index into this directly, go for a slightly
+        // less prone approach to errors
+        const PATTERN: &str = " (#";
+        self.title
+            .rfind(PATTERN)
+            .and_then(|location| {
+                self.title[location..]
+                    .find(')')
+                    .map(|relative_end| location + PATTERN.len()..location + relative_end)
+            })
+            .and_then(|range| self.title[range].parse().ok())
+    }
+
+    pub(crate) fn co_authors(&self) -> Option<Vec<Committer>> {
+        static CO_AUTHOR_REGEX: LazyLock<Regex> =
+            LazyLock::new(|| Regex::new(r"Co-authored-by: (.+) <(.+)>").unwrap());
+
+        let mut co_authors = Vec::new();
+
+        for cap in CO_AUTHOR_REGEX.captures_iter(&self.body.as_ref()) {
+            let Some((name, email)) = cap
+                .get(1)
+                .map(|m| m.as_str())
+                .zip(cap.get(2).map(|m| m.as_str()))
+            else {
+                continue;
+            };
+            co_authors.push(Committer::new(name, email));
+        }
+
+        co_authors.is_empty().not().then_some(co_authors)
+    }
+
+    pub(crate) fn author(&self) -> &Committer {
+        &self.author
+    }
+
+    pub(crate) fn title(&self) -> &str {
+        &self.title
+    }
+
+    pub(crate) fn sha(&self) -> &CommitSha {
+        &self.sha
+    }
+}
+
+#[derive(Debug, Deref, Default, DerefMut)]
+pub struct CommitList(Vec<CommitDetails>);
+
+impl CommitList {
+    pub fn range(&self) -> Option<String> {
+        self.0
+            .first()
+            .zip(self.0.last())
+            .map(|(first, last)| format!("{}..{}", first.sha().0, last.sha().0))
+    }
+}
+
+impl IntoIterator for CommitList {
+    type IntoIter = std::vec::IntoIter<CommitDetails>;
+    type Item = CommitDetails;
+
+    fn into_iter(self) -> std::vec::IntoIter<Self::Item> {
+        self.0.into_iter()
+    }
+}
+
+impl FromStr for CommitList {
+    type Err = anyhow::Error;
+
+    fn from_str(input: &str) -> Result<Self, Self::Err> {
+        Ok(CommitList(
+            input
+                .split(CommitDetails::COMMIT_DELIMITER)
+                .filter(|commit_details| !commit_details.is_empty())
+                .map(|commit_details| {
+                    let (line, body) = commit_details
+                        .trim()
+                        .split_once(CommitDetails::BODY_DELIMITER)
+                        .expect("Missing body delimiter");
+                    CommitDetails::parse(line, body)
+                        .expect("Parsing from the output should succeed")
+                })
+                .collect(),
+        ))
+    }
+}
+
+pub struct GetVersionTags;
+
+impl Subcommand for GetVersionTags {
+    type ParsedOutput = VersionTagList;
+
+    fn args(&self) -> impl IntoIterator<Item = String> {
+        ["tag", "-l", "v*"].map(ToOwned::to_owned)
+    }
+}
+
+pub struct VersionTagList(Vec<VersionTag>);
+
+impl VersionTagList {
+    pub fn sorted(mut self) -> Self {
+        self.0.sort_by(|a, b| a.version().cmp(b.version()));
+        self
+    }
+
+    pub fn find_previous_minor_version(&self, version_tag: &VersionTag) -> Option<&VersionTag> {
+        self.0
+            .iter()
+            .take_while(|tag| tag.version() < version_tag.version())
+            .collect_vec()
+            .into_iter()
+            .rev()
+            .find(|tag| {
+                (tag.version().major < version_tag.version().major
+                    || (tag.version().major == version_tag.version().major
+                        && tag.version().minor < version_tag.version().minor))
+                    && tag.version().patch == 0
+            })
+    }
+}
+
+impl FromStr for VersionTagList {
+    type Err = anyhow::Error;
+
+    fn from_str(s: &str) -> Result<Self, Self::Err> {
+        let version_tags = s.lines().flat_map(VersionTag::parse).collect_vec();
+
+        version_tags
+            .is_empty()
+            .not()
+            .then_some(Self(version_tags))
+            .ok_or_else(|| anyhow::anyhow!("No version tags found"))
+    }
+}
+
+pub struct CommitsFromVersionToHead {
+    version_tag: VersionTag,
+    branch: String,
+}
+
+impl CommitsFromVersionToHead {
+    pub fn new(version_tag: VersionTag, branch: String) -> Self {
+        Self {
+            version_tag,
+            branch,
+        }
+    }
+}
+
+impl Subcommand for CommitsFromVersionToHead {
+    type ParsedOutput = CommitList;
+
+    fn args(&self) -> impl IntoIterator<Item = String> {
+        [
+            "log".to_string(),
+            format!("--pretty=format:{}", CommitDetails::FORMAT_STRING),
+            format!(
+                "{version}..{branch}",
+                version = self.version_tag.to_string(),
+                branch = self.branch
+            ),
+        ]
+    }
+}
+
+pub struct NoOutput;
+
+impl FromStr for NoOutput {
+    type Err = anyhow::Error;
+
+    fn from_str(_: &str) -> Result<Self, Self::Err> {
+        Ok(NoOutput)
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use indoc::indoc;
+
+    #[test]
+    fn parse_stable_version_tag() {
+        let tag = VersionTag::parse("v0.172.8").unwrap();
+        assert_eq!(tag.version().major, 0);
+        assert_eq!(tag.version().minor, 172);
+        assert_eq!(tag.version().patch, 8);
+        assert_eq!(tag.1, ReleaseChannel::Stable);
+    }
+
+    #[test]
+    fn parse_preview_version_tag() {
+        let tag = VersionTag::parse("v0.172.1-pre").unwrap();
+        assert_eq!(tag.version().major, 0);
+        assert_eq!(tag.version().minor, 172);
+        assert_eq!(tag.version().patch, 1);
+        assert_eq!(tag.1, ReleaseChannel::Preview);
+    }
+
+    #[test]
+    fn parse_version_tag_without_v_prefix() {
+        let tag = VersionTag::parse("0.172.8").unwrap();
+        assert_eq!(tag.version().major, 0);
+        assert_eq!(tag.version().minor, 172);
+        assert_eq!(tag.version().patch, 8);
+    }
+
+    #[test]
+    fn parse_invalid_version_tag() {
+        let result = VersionTag::parse("vConradTest");
+        assert!(result.is_err());
+    }
+
+    #[test]
+    fn version_tag_stable_roundtrip() {
+        let tag = VersionTag::parse("v0.172.8").unwrap();
+        assert_eq!(tag.to_string(), "v0.172.8");
+    }
+
+    #[test]
+    fn version_tag_preview_roundtrip() {
+        let tag = VersionTag::parse("v0.172.1-pre").unwrap();
+        assert_eq!(tag.to_string(), "v0.172.1-pre");
+    }
+
+    #[test]
+    fn sorted_orders_by_semver() {
+        let input = indoc! {"
+            v0.172.8
+            v0.170.1
+            v0.171.4
+            v0.170.2
+            v0.172.11
+            v0.171.3
+            v0.172.9
+        "};
+        let list = VersionTagList::from_str(input).unwrap().sorted();
+        for window in list.0.windows(2) {
+            assert!(
+                window[0].version() <= window[1].version(),
+                "{} should come before {}",
+                window[0].to_string(),
+                window[1].to_string()
+            );
+        }
+        assert_eq!(list.0[0].to_string(), "v0.170.1");
+        assert_eq!(list.0[list.0.len() - 1].to_string(), "v0.172.11");
+    }
+
+    #[test]
+    fn find_previous_minor_for_173_returns_172() {
+        let input = indoc! {"
+            v0.170.1
+            v0.170.2
+            v0.171.3
+            v0.171.4
+            v0.172.0
+            v0.172.8
+            v0.172.9
+            v0.172.11
+        "};
+        let list = VersionTagList::from_str(input).unwrap().sorted();
+        let target = VersionTag::parse("v0.173.0").unwrap();
+        let previous = list.find_previous_minor_version(&target).unwrap();
+        assert_eq!(previous.version().major, 0);
+        assert_eq!(previous.version().minor, 172);
+        assert_eq!(previous.version().patch, 0);
+    }
+
+    #[test]
+    fn find_previous_minor_skips_same_minor() {
+        let input = indoc! {"
+            v0.172.8
+            v0.172.9
+            v0.172.11
+        "};
+        let list = VersionTagList::from_str(input).unwrap().sorted();
+        let target = VersionTag::parse("v0.172.8").unwrap();
+        assert!(list.find_previous_minor_version(&target).is_none());
+    }
+
+    #[test]
+    fn find_previous_minor_with_major_version_gap() {
+        let input = indoc! {"
+            v0.172.0
+            v0.172.9
+            v0.172.11
+        "};
+        let list = VersionTagList::from_str(input).unwrap().sorted();
+        let target = VersionTag::parse("v1.0.0").unwrap();
+        let previous = list.find_previous_minor_version(&target).unwrap();
+        assert_eq!(previous.to_string(), "v0.172.0");
+    }
+
+    #[test]
+    fn find_previous_minor_requires_zero_patch_version() {
+        let input = indoc! {"
+            v0.172.1
+            v0.172.9
+            v0.172.11
+        "};
+        let list = VersionTagList::from_str(input).unwrap().sorted();
+        let target = VersionTag::parse("v1.0.0").unwrap();
+        assert!(list.find_previous_minor_version(&target).is_none());
+    }
+
+    #[test]
+    fn parse_tag_list_from_real_tags() {
+        let input = indoc! {"
+            v0.9999-temporary
+            vConradTest
+            v0.172.8
+        "};
+        let list = VersionTagList::from_str(input).unwrap();
+        assert_eq!(list.0.len(), 1);
+        assert_eq!(list.0[0].to_string(), "v0.172.8");
+    }
+
+    #[test]
+    fn parse_empty_tag_list_fails() {
+        let result = VersionTagList::from_str("");
+        assert!(result.is_err());
+    }
+
+    #[test]
+    fn pr_number_from_squash_merge_title() {
+        let line = format!(
+            "abc123{d}Author Name{d}author@email.com{d}Add cool feature (#12345)",
+            d = CommitDetails::FIELD_DELIMITER
+        );
+        let commit = CommitDetails::parse(&line, "").unwrap();
+        assert_eq!(commit.pr_number(), Some(12345));
+    }
+
+    #[test]
+    fn pr_number_missing() {
+        let line = format!(
+            "abc123{d}Author Name{d}author@email.com{d}Some commit without PR ref",
+            d = CommitDetails::FIELD_DELIMITER
+        );
+        let commit = CommitDetails::parse(&line, "").unwrap();
+        assert_eq!(commit.pr_number(), None);
+    }
+
+    #[test]
+    fn pr_number_takes_last_match() {
+        let line = format!(
+            "abc123{d}Author Name{d}author@email.com{d}Fix (#123) and refactor (#456)",
+            d = CommitDetails::FIELD_DELIMITER
+        );
+        let commit = CommitDetails::parse(&line, "").unwrap();
+        assert_eq!(commit.pr_number(), Some(456));
+    }
+
+    #[test]
+    fn co_authors_parsed_from_body() {
+        let line = format!(
+            "abc123{d}Author Name{d}author@email.com{d}Some title",
+            d = CommitDetails::FIELD_DELIMITER
+        );
+        let body = indoc! {"
+            Co-authored-by: Alice Smith <alice@example.com>
+            Co-authored-by: Bob Jones <bob@example.com>
+        "};
+        let commit = CommitDetails::parse(&line, body).unwrap();
+        let co_authors = commit.co_authors().unwrap();
+        assert_eq!(co_authors.len(), 2);
+        assert_eq!(
+            co_authors[0],
+            Committer::new("Alice Smith", "alice@example.com")
+        );
+        assert_eq!(
+            co_authors[1],
+            Committer::new("Bob Jones", "bob@example.com")
+        );
+    }
+
+    #[test]
+    fn no_co_authors_returns_none() {
+        let line = format!(
+            "abc123{d}Author Name{d}author@email.com{d}Some title",
+            d = CommitDetails::FIELD_DELIMITER
+        );
+        let commit = CommitDetails::parse(&line, "").unwrap();
+        assert!(commit.co_authors().is_none());
+    }
+
+    #[test]
+    fn commit_sha_short_returns_first_8_chars() {
+        let sha = CommitSha("abcdef1234567890abcdef1234567890abcdef12".into());
+        assert_eq!(sha.short(), "abcdef12");
+    }
+
+    #[test]
+    fn parse_commit_list_from_git_log_format() {
+        let fd = CommitDetails::FIELD_DELIMITER;
+        let bd = CommitDetails::BODY_DELIMITER;
+        let cd = CommitDetails::COMMIT_DELIMITER;
+
+        let input = format!(
+            "sha111{fd}Alice{fd}alice@test.com{fd}First commit (#100){bd}First body{cd}sha222{fd}Bob{fd}bob@test.com{fd}Second commit (#200){bd}Second body{cd}"
+        );
+
+        let list = CommitList::from_str(&input).unwrap();
+        assert_eq!(list.0.len(), 2);
+
+        assert_eq!(list.0[0].sha().0, "sha111");
+        assert_eq!(
+            list.0[0].author(),
+            &Committer::new("Alice", "alice@test.com")
+        );
+        assert_eq!(list.0[0].title(), "First commit (#100)");
+        assert_eq!(list.0[0].pr_number(), Some(100));
+        assert_eq!(list.0[0].body, "First body");
+
+        assert_eq!(list.0[1].sha().0, "sha222");
+        assert_eq!(list.0[1].author(), &Committer::new("Bob", "bob@test.com"));
+        assert_eq!(list.0[1].title(), "Second commit (#200)");
+        assert_eq!(list.0[1].pr_number(), Some(200));
+        assert_eq!(list.0[1].body, "Second body");
+    }
+}

tooling/compliance/src/github.rs 🔗

@@ -0,0 +1,424 @@
+use std::{collections::HashMap, fmt, ops::Not, rc::Rc};
+
+use anyhow::Result;
+use derive_more::Deref;
+use serde::Deserialize;
+
+use crate::git::CommitSha;
+
+pub const PR_REVIEW_LABEL: &str = "PR state:needs review";
+
+#[derive(Debug, Clone)]
+pub struct GitHubUser {
+    pub login: String,
+}
+
+#[derive(Debug, Clone)]
+pub struct PullRequestData {
+    pub number: u64,
+    pub user: Option<GitHubUser>,
+    pub merged_by: Option<GitHubUser>,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum ReviewState {
+    Approved,
+    Other,
+}
+
+#[derive(Debug, Clone)]
+pub struct PullRequestReview {
+    pub user: Option<GitHubUser>,
+    pub state: Option<ReviewState>,
+}
+
+#[derive(Debug, Clone)]
+pub struct PullRequestComment {
+    pub user: GitHubUser,
+    pub body: Option<String>,
+}
+
+#[derive(Debug, Deserialize, Clone, Deref, PartialEq, Eq)]
+pub struct GithubLogin {
+    login: String,
+}
+
+impl GithubLogin {
+    pub(crate) fn new(login: String) -> Self {
+        Self { login }
+    }
+}
+
+impl fmt::Display for GithubLogin {
+    fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+        write!(formatter, "@{}", self.login)
+    }
+}
+
+#[derive(Debug, Deserialize, Clone)]
+pub struct CommitAuthor {
+    name: String,
+    email: String,
+    user: Option<GithubLogin>,
+}
+
+impl CommitAuthor {
+    pub(crate) fn user(&self) -> Option<&GithubLogin> {
+        self.user.as_ref()
+    }
+}
+
+impl PartialEq for CommitAuthor {
+    fn eq(&self, other: &Self) -> bool {
+        self.user.as_ref().zip(other.user.as_ref()).map_or_else(
+            || self.email == other.email || self.name == other.name,
+            |(l, r)| l == r,
+        )
+    }
+}
+
+impl fmt::Display for CommitAuthor {
+    fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+        match self.user.as_ref() {
+            Some(user) => write!(formatter, "{} ({user})", self.name),
+            None => write!(formatter, "{} ({})", self.name, self.email),
+        }
+    }
+}
+
+#[derive(Debug, Deserialize)]
+pub struct CommitAuthors {
+    #[serde(rename = "author")]
+    primary_author: CommitAuthor,
+    #[serde(rename = "authors")]
+    co_authors: Vec<CommitAuthor>,
+}
+
+impl CommitAuthors {
+    pub fn co_authors(&self) -> Option<impl Iterator<Item = &CommitAuthor>> {
+        self.co_authors.is_empty().not().then(|| {
+            self.co_authors
+                .iter()
+                .filter(|co_author| *co_author != &self.primary_author)
+        })
+    }
+}
+
+#[derive(Debug, Deserialize, Deref)]
+pub struct AuthorsForCommits(HashMap<CommitSha, CommitAuthors>);
+
+#[async_trait::async_trait(?Send)]
+pub trait GitHubApiClient {
+    async fn get_pull_request(&self, pr_number: u64) -> Result<PullRequestData>;
+    async fn get_pull_request_reviews(&self, pr_number: u64) -> Result<Vec<PullRequestReview>>;
+    async fn get_pull_request_comments(&self, pr_number: u64) -> Result<Vec<PullRequestComment>>;
+    async fn get_commit_authors(&self, commit_shas: &[&CommitSha]) -> Result<AuthorsForCommits>;
+    async fn check_org_membership(&self, login: &GithubLogin) -> Result<bool>;
+    async fn ensure_pull_request_has_label(&self, label: &str, pr_number: u64) -> Result<()>;
+}
+
+pub struct GitHubClient {
+    api: Rc<dyn GitHubApiClient>,
+}
+
+impl GitHubClient {
+    pub fn new(api: Rc<dyn GitHubApiClient>) -> Self {
+        Self { api }
+    }
+
+    #[cfg(feature = "octo-client")]
+    pub async fn for_app(app_id: u64, app_private_key: &str) -> Result<Self> {
+        let client = OctocrabClient::new(app_id, app_private_key).await?;
+        Ok(Self::new(Rc::new(client)))
+    }
+
+    pub async fn get_pull_request(&self, pr_number: u64) -> Result<PullRequestData> {
+        self.api.get_pull_request(pr_number).await
+    }
+
+    pub async fn get_pull_request_reviews(&self, pr_number: u64) -> Result<Vec<PullRequestReview>> {
+        self.api.get_pull_request_reviews(pr_number).await
+    }
+
+    pub async fn get_pull_request_comments(
+        &self,
+        pr_number: u64,
+    ) -> Result<Vec<PullRequestComment>> {
+        self.api.get_pull_request_comments(pr_number).await
+    }
+
+    pub async fn get_commit_authors<'a>(
+        &self,
+        commit_shas: impl IntoIterator<Item = &'a CommitSha>,
+    ) -> Result<AuthorsForCommits> {
+        let shas: Vec<&CommitSha> = commit_shas.into_iter().collect();
+        self.api.get_commit_authors(&shas).await
+    }
+
+    pub async fn check_org_membership(&self, login: &GithubLogin) -> Result<bool> {
+        self.api.check_org_membership(login).await
+    }
+
+    pub async fn add_label_to_pull_request(&self, label: &str, pr_number: u64) -> Result<()> {
+        self.api
+            .ensure_pull_request_has_label(label, pr_number)
+            .await
+    }
+}
+
+#[cfg(feature = "octo-client")]
+mod octo_client {
+    use anyhow::{Context, Result};
+    use futures::TryStreamExt as _;
+    use itertools::Itertools;
+    use jsonwebtoken::EncodingKey;
+    use octocrab::{
+        Octocrab, Page, models::pulls::ReviewState as OctocrabReviewState,
+        service::middleware::cache::mem::InMemoryCache,
+    };
+    use serde::de::DeserializeOwned;
+    use tokio::pin;
+
+    use crate::git::CommitSha;
+
+    use super::{
+        AuthorsForCommits, GitHubApiClient, GitHubUser, GithubLogin, PullRequestComment,
+        PullRequestData, PullRequestReview, ReviewState,
+    };
+
+    const PAGE_SIZE: u8 = 100;
+    const ORG: &str = "zed-industries";
+    const REPO: &str = "zed";
+
+    pub struct OctocrabClient {
+        client: Octocrab,
+    }
+
+    impl OctocrabClient {
+        pub async fn new(app_id: u64, app_private_key: &str) -> Result<Self> {
+            let octocrab = Octocrab::builder()
+                .cache(InMemoryCache::new())
+                .app(
+                    app_id.into(),
+                    EncodingKey::from_rsa_pem(app_private_key.as_bytes())?,
+                )
+                .build()?;
+
+            let installations = octocrab
+                .apps()
+                .installations()
+                .send()
+                .await
+                .context("Failed to fetch installations")?
+                .take_items();
+
+            let installation_id = installations
+                .into_iter()
+                .find(|installation| installation.account.login == ORG)
+                .context("Could not find Zed repository in installations")?
+                .id;
+
+            let client = octocrab.installation(installation_id)?;
+            Ok(Self { client })
+        }
+
+        fn build_co_authors_query<'a>(shas: impl IntoIterator<Item = &'a CommitSha>) -> String {
+            const FRAGMENT: &str = r#"
+                ... on Commit {
+                    author {
+                        name
+                        email
+                        user { login }
+                    }
+                    authors(first: 10) {
+                        nodes {
+                            name
+                            email
+                            user { login }
+                        }
+                    }
+                }
+            "#;
+
+            let objects: String = shas
+                .into_iter()
+                .map(|commit_sha| {
+                    format!(
+                        "commit{sha}: object(oid: \"{sha}\") {{ {FRAGMENT} }}",
+                        sha = **commit_sha
+                    )
+                })
+                .join("\n");
+
+            format!("{{  repository(owner: \"{ORG}\", name: \"{REPO}\") {{ {objects}  }} }}")
+                .replace("\n", "")
+        }
+
+        async fn graphql<R: octocrab::FromResponse>(
+            &self,
+            query: &serde_json::Value,
+        ) -> octocrab::Result<R> {
+            self.client.graphql(query).await
+        }
+
+        async fn get_all<T: DeserializeOwned + 'static>(
+            &self,
+            page: Page<T>,
+        ) -> octocrab::Result<Vec<T>> {
+            self.get_filtered(page, |_| true).await
+        }
+
+        async fn get_filtered<T: DeserializeOwned + 'static>(
+            &self,
+            page: Page<T>,
+            predicate: impl Fn(&T) -> bool,
+        ) -> octocrab::Result<Vec<T>> {
+            let stream = page.into_stream(&self.client);
+            pin!(stream);
+
+            let mut results = Vec::new();
+
+            while let Some(item) = stream.try_next().await?
+                && predicate(&item)
+            {
+                results.push(item);
+            }
+
+            Ok(results)
+        }
+    }
+
+    #[async_trait::async_trait(?Send)]
+    impl GitHubApiClient for OctocrabClient {
+        async fn get_pull_request(&self, pr_number: u64) -> Result<PullRequestData> {
+            let pr = self.client.pulls(ORG, REPO).get(pr_number).await?;
+            Ok(PullRequestData {
+                number: pr.number,
+                user: pr.user.map(|user| GitHubUser { login: user.login }),
+                merged_by: pr.merged_by.map(|user| GitHubUser { login: user.login }),
+            })
+        }
+
+        async fn get_pull_request_reviews(&self, pr_number: u64) -> Result<Vec<PullRequestReview>> {
+            let page = self
+                .client
+                .pulls(ORG, REPO)
+                .list_reviews(pr_number)
+                .per_page(PAGE_SIZE)
+                .send()
+                .await?;
+
+            let reviews = self.get_all(page).await?;
+
+            Ok(reviews
+                .into_iter()
+                .map(|review| PullRequestReview {
+                    user: review.user.map(|user| GitHubUser { login: user.login }),
+                    state: review.state.map(|state| match state {
+                        OctocrabReviewState::Approved => ReviewState::Approved,
+                        _ => ReviewState::Other,
+                    }),
+                })
+                .collect())
+        }
+
+        async fn get_pull_request_comments(
+            &self,
+            pr_number: u64,
+        ) -> Result<Vec<PullRequestComment>> {
+            let page = self
+                .client
+                .issues(ORG, REPO)
+                .list_comments(pr_number)
+                .per_page(PAGE_SIZE)
+                .send()
+                .await?;
+
+            let comments = self.get_all(page).await?;
+
+            Ok(comments
+                .into_iter()
+                .map(|comment| PullRequestComment {
+                    user: GitHubUser {
+                        login: comment.user.login,
+                    },
+                    body: comment.body,
+                })
+                .collect())
+        }
+
+        async fn get_commit_authors(
+            &self,
+            commit_shas: &[&CommitSha],
+        ) -> Result<AuthorsForCommits> {
+            let query = Self::build_co_authors_query(commit_shas.iter().copied());
+            let query = serde_json::json!({ "query": query });
+            let mut response = self.graphql::<serde_json::Value>(&query).await?;
+
+            response
+                .get_mut("data")
+                .and_then(|data| data.get_mut("repository"))
+                .and_then(|repo| repo.as_object_mut())
+                .ok_or_else(|| anyhow::anyhow!("Unexpected response format!"))
+                .and_then(|commit_data| {
+                    let mut response_map = serde_json::Map::with_capacity(commit_data.len());
+
+                    for (key, value) in commit_data.iter_mut() {
+                        let key_without_prefix = key.strip_prefix("commit").unwrap_or(key);
+                        if let Some(authors) = value.get_mut("authors") {
+                            if let Some(nodes) = authors.get("nodes") {
+                                *authors = nodes.clone();
+                            }
+                        }
+
+                        response_map.insert(key_without_prefix.to_owned(), value.clone());
+                    }
+
+                    serde_json::from_value(serde_json::Value::Object(response_map))
+                        .context("Failed to deserialize commit authors")
+                })
+        }
+
+        async fn check_org_membership(&self, login: &GithubLogin) -> Result<bool> {
+            let page = self
+                .client
+                .orgs(ORG)
+                .list_members()
+                .per_page(PAGE_SIZE)
+                .send()
+                .await?;
+
+            let members = self.get_all(page).await?;
+
+            Ok(members
+                .into_iter()
+                .any(|member| member.login == login.as_str()))
+        }
+
+        async fn ensure_pull_request_has_label(&self, label: &str, pr_number: u64) -> Result<()> {
+            if self
+                .get_filtered(
+                    self.client
+                        .issues(ORG, REPO)
+                        .list_labels_for_issue(pr_number)
+                        .per_page(PAGE_SIZE)
+                        .send()
+                        .await?,
+                    |pr_label| pr_label.name == label,
+                )
+                .await
+                .is_ok_and(|l| l.is_empty())
+            {
+                self.client
+                    .issues(ORG, REPO)
+                    .add_labels(pr_number, &[label.to_owned()])
+                    .await?;
+            }
+
+            Ok(())
+        }
+    }
+}
+
+#[cfg(feature = "octo-client")]
+pub use octo_client::OctocrabClient;

tooling/compliance/src/report.rs 🔗

@@ -0,0 +1,446 @@
+use std::{
+    fs::{self, File},
+    io::{BufWriter, Write},
+    path::Path,
+};
+
+use anyhow::Context as _;
+use derive_more::Display;
+use itertools::{Either, Itertools};
+
+use crate::{
+    checks::{ReviewFailure, ReviewResult, ReviewSuccess},
+    git::CommitDetails,
+};
+
+const PULL_REQUEST_BASE_URL: &str = "https://github.com/zed-industries/zed/pull";
+
+#[derive(Debug)]
+pub struct ReportEntry<R> {
+    pub commit: CommitDetails,
+    reason: R,
+}
+
+impl<R: ToString> ReportEntry<R> {
+    fn commit_cell(&self) -> String {
+        let title = escape_markdown_link_text(self.commit.title());
+
+        match self.commit.pr_number() {
+            Some(pr_number) => format!("[{title}]({PULL_REQUEST_BASE_URL}/{pr_number})"),
+            None => escape_markdown_table_text(self.commit.title()),
+        }
+    }
+
+    fn pull_request_cell(&self) -> String {
+        self.commit
+            .pr_number()
+            .map(|pr_number| format!("#{pr_number}"))
+            .unwrap_or_else(|| "—".to_owned())
+    }
+
+    fn author_cell(&self) -> String {
+        escape_markdown_table_text(&self.commit.author().to_string())
+    }
+
+    fn reason_cell(&self) -> String {
+        escape_markdown_table_text(&self.reason.to_string())
+    }
+}
+
+impl ReportEntry<ReviewFailure> {
+    fn issue_kind(&self) -> IssueKind {
+        match self.reason {
+            ReviewFailure::Other(_) => IssueKind::Error,
+            _ => IssueKind::NotReviewed,
+        }
+    }
+}
+
+impl ReportEntry<ReviewSuccess> {
+    fn reviewers_cell(&self) -> String {
+        match &self.reason.reviewers() {
+            Ok(reviewers) => escape_markdown_table_text(&reviewers),
+            Err(_) => "—".to_owned(),
+        }
+    }
+}
+
+#[derive(Debug, Default)]
+pub struct ReportSummary {
+    pub pull_requests: usize,
+    pub reviewed: usize,
+    pub not_reviewed: usize,
+    pub errors: usize,
+}
+
+pub enum ReportReviewSummary {
+    MissingReviews,
+    MissingReviewsWithErrors,
+    NoIssuesFound,
+}
+
+impl ReportSummary {
+    fn from_entries(entries: &[ReportEntry<ReviewResult>]) -> Self {
+        Self {
+            pull_requests: entries
+                .iter()
+                .filter_map(|entry| entry.commit.pr_number())
+                .unique()
+                .count(),
+            reviewed: entries.iter().filter(|entry| entry.reason.is_ok()).count(),
+            not_reviewed: entries
+                .iter()
+                .filter(|entry| {
+                    matches!(
+                        entry.reason,
+                        Err(ReviewFailure::NoPullRequestFound | ReviewFailure::Unreviewed)
+                    )
+                })
+                .count(),
+            errors: entries
+                .iter()
+                .filter(|entry| matches!(entry.reason, Err(ReviewFailure::Other(_))))
+                .count(),
+        }
+    }
+
+    pub fn review_summary(&self) -> ReportReviewSummary {
+        match self.not_reviewed {
+            0 if self.errors == 0 => ReportReviewSummary::NoIssuesFound,
+            1.. if self.errors == 0 => ReportReviewSummary::MissingReviews,
+            _ => ReportReviewSummary::MissingReviewsWithErrors,
+        }
+    }
+
+    fn has_errors(&self) -> bool {
+        self.errors > 0
+    }
+}
+
+#[derive(Clone, Copy, Debug, Display, PartialEq, Eq, PartialOrd, Ord)]
+enum IssueKind {
+    #[display("Error")]
+    Error,
+    #[display("Not reviewed")]
+    NotReviewed,
+}
+
+#[derive(Debug, Default)]
+pub struct Report {
+    entries: Vec<ReportEntry<ReviewResult>>,
+}
+
+impl Report {
+    pub fn new() -> Self {
+        Self::default()
+    }
+
+    pub fn add(&mut self, commit: CommitDetails, result: ReviewResult) {
+        self.entries.push(ReportEntry {
+            commit,
+            reason: result,
+        });
+    }
+
+    pub fn errors(&self) -> impl Iterator<Item = &ReportEntry<ReviewResult>> {
+        self.entries.iter().filter(|entry| entry.reason.is_err())
+    }
+
+    pub fn summary(&self) -> ReportSummary {
+        ReportSummary::from_entries(&self.entries)
+    }
+
+    pub fn write_markdown(self, path: impl AsRef<Path>) -> anyhow::Result<()> {
+        let path = path.as_ref();
+
+        if let Some(parent) = path
+            .parent()
+            .filter(|parent| !parent.as_os_str().is_empty())
+        {
+            fs::create_dir_all(parent).with_context(|| {
+                format!(
+                    "Failed to create parent directory for markdown report at {}",
+                    path.display()
+                )
+            })?;
+        }
+
+        let summary = self.summary();
+        let (successes, mut issues): (Vec<_>, Vec<_>) =
+            self.entries
+                .into_iter()
+                .partition_map(|entry| match entry.reason {
+                    Ok(success) => Either::Left(ReportEntry {
+                        reason: success,
+                        commit: entry.commit,
+                    }),
+                    Err(fail) => Either::Right(ReportEntry {
+                        reason: fail,
+                        commit: entry.commit,
+                    }),
+                });
+
+        issues.sort_by_key(|entry| entry.issue_kind());
+
+        let file = File::create(path)
+            .with_context(|| format!("Failed to create markdown report at {}", path.display()))?;
+        let mut writer = BufWriter::new(file);
+
+        writeln!(writer, "# Compliance report")?;
+        writeln!(writer)?;
+        writeln!(writer, "## Overview")?;
+        writeln!(writer)?;
+        writeln!(writer, "- PRs: {}", summary.pull_requests)?;
+        writeln!(writer, "- Reviewed: {}", summary.reviewed)?;
+        writeln!(writer, "- Not reviewed: {}", summary.not_reviewed)?;
+        if summary.has_errors() {
+            writeln!(writer, "- Errors: {}", summary.errors)?;
+        }
+        writeln!(writer)?;
+
+        write_issue_table(&mut writer, &issues, &summary)?;
+        write_success_table(&mut writer, &successes)?;
+
+        writer
+            .flush()
+            .with_context(|| format!("Failed to flush markdown report to {}", path.display()))
+    }
+}
+
+fn write_issue_table(
+    writer: &mut impl Write,
+    issues: &[ReportEntry<ReviewFailure>],
+    summary: &ReportSummary,
+) -> std::io::Result<()> {
+    if summary.has_errors() {
+        writeln!(writer, "## Errors and unreviewed commits")?;
+    } else {
+        writeln!(writer, "## Unreviewed commits")?;
+    }
+    writeln!(writer)?;
+
+    if issues.is_empty() {
+        if summary.has_errors() {
+            writeln!(writer, "No errors or unreviewed commits found.")?;
+        } else {
+            writeln!(writer, "No unreviewed commits found.")?;
+        }
+        writeln!(writer)?;
+        return Ok(());
+    }
+
+    writeln!(writer, "| Commit | PR | Author | Outcome | Reason |")?;
+    writeln!(writer, "| --- | --- | --- | --- | --- |")?;
+
+    for entry in issues {
+        let issue_kind = entry.issue_kind();
+        writeln!(
+            writer,
+            "| {} | {} | {} | {} | {} |",
+            entry.commit_cell(),
+            entry.pull_request_cell(),
+            entry.author_cell(),
+            issue_kind,
+            entry.reason_cell(),
+        )?;
+    }
+
+    writeln!(writer)?;
+    Ok(())
+}
+
+fn write_success_table(
+    writer: &mut impl Write,
+    successful_entries: &[ReportEntry<ReviewSuccess>],
+) -> std::io::Result<()> {
+    writeln!(writer, "## Successful commits")?;
+    writeln!(writer)?;
+
+    if successful_entries.is_empty() {
+        writeln!(writer, "No successful commits found.")?;
+        writeln!(writer)?;
+        return Ok(());
+    }
+
+    writeln!(writer, "| Commit | PR | Author | Reviewers | Reason |")?;
+    writeln!(writer, "| --- | --- | --- | --- | --- |")?;
+
+    for entry in successful_entries {
+        writeln!(
+            writer,
+            "| {} | {} | {} | {} | {} |",
+            entry.commit_cell(),
+            entry.pull_request_cell(),
+            entry.author_cell(),
+            entry.reviewers_cell(),
+            entry.reason_cell(),
+        )?;
+    }
+
+    writeln!(writer)?;
+    Ok(())
+}
+
+fn escape_markdown_link_text(input: &str) -> String {
+    escape_markdown_table_text(input)
+        .replace('[', r"\[")
+        .replace(']', r"\]")
+}
+
+fn escape_markdown_table_text(input: &str) -> String {
+    input
+        .replace('\\', r"\\")
+        .replace('|', r"\|")
+        .replace('\r', "")
+        .replace('\n', "<br>")
+}
+
+#[cfg(test)]
+mod tests {
+    use std::str::FromStr;
+
+    use crate::{
+        checks::{ReviewFailure, ReviewSuccess},
+        git::{CommitDetails, CommitList},
+        github::{GitHubUser, PullRequestReview, ReviewState},
+    };
+
+    use super::{Report, ReportReviewSummary};
+
+    fn make_commit(
+        sha: &str,
+        author_name: &str,
+        author_email: &str,
+        title: &str,
+        body: &str,
+    ) -> CommitDetails {
+        let formatted = format!(
+            "{sha}|field-delimiter|{author_name}|field-delimiter|{author_email}|field-delimiter|{title}|body-delimiter|{body}|commit-delimiter|"
+        );
+        CommitList::from_str(&formatted)
+            .expect("test commit should parse")
+            .into_iter()
+            .next()
+            .expect("should have one commit")
+    }
+
+    fn reviewed() -> ReviewSuccess {
+        ReviewSuccess::PullRequestReviewed(vec![PullRequestReview {
+            user: Some(GitHubUser {
+                login: "reviewer".to_owned(),
+            }),
+            state: Some(ReviewState::Approved),
+        }])
+    }
+
+    #[test]
+    fn report_summary_counts_are_accurate() {
+        let mut report = Report::new();
+
+        report.add(
+            make_commit(
+                "aaa",
+                "Alice",
+                "alice@test.com",
+                "Reviewed commit (#100)",
+                "",
+            ),
+            Ok(reviewed()),
+        );
+        report.add(
+            make_commit("bbb", "Bob", "bob@test.com", "Unreviewed commit (#200)", ""),
+            Err(ReviewFailure::Unreviewed),
+        );
+        report.add(
+            make_commit("ccc", "Carol", "carol@test.com", "No PR commit", ""),
+            Err(ReviewFailure::NoPullRequestFound),
+        );
+        report.add(
+            make_commit("ddd", "Dave", "dave@test.com", "Error commit (#300)", ""),
+            Err(ReviewFailure::Other(anyhow::anyhow!("some error"))),
+        );
+
+        let summary = report.summary();
+        assert_eq!(summary.pull_requests, 3);
+        assert_eq!(summary.reviewed, 1);
+        assert_eq!(summary.not_reviewed, 2);
+        assert_eq!(summary.errors, 1);
+    }
+
+    #[test]
+    fn report_summary_all_reviewed_is_no_issues() {
+        let mut report = Report::new();
+
+        report.add(
+            make_commit("aaa", "Alice", "alice@test.com", "First (#100)", ""),
+            Ok(reviewed()),
+        );
+        report.add(
+            make_commit("bbb", "Bob", "bob@test.com", "Second (#200)", ""),
+            Ok(reviewed()),
+        );
+
+        let summary = report.summary();
+        assert!(matches!(
+            summary.review_summary(),
+            ReportReviewSummary::NoIssuesFound
+        ));
+    }
+
+    #[test]
+    fn report_summary_missing_reviews_only() {
+        let mut report = Report::new();
+
+        report.add(
+            make_commit("aaa", "Alice", "alice@test.com", "Reviewed (#100)", ""),
+            Ok(reviewed()),
+        );
+        report.add(
+            make_commit("bbb", "Bob", "bob@test.com", "Unreviewed (#200)", ""),
+            Err(ReviewFailure::Unreviewed),
+        );
+
+        let summary = report.summary();
+        assert!(matches!(
+            summary.review_summary(),
+            ReportReviewSummary::MissingReviews
+        ));
+    }
+
+    #[test]
+    fn report_summary_errors_and_missing_reviews() {
+        let mut report = Report::new();
+
+        report.add(
+            make_commit("aaa", "Alice", "alice@test.com", "Unreviewed (#100)", ""),
+            Err(ReviewFailure::Unreviewed),
+        );
+        report.add(
+            make_commit("bbb", "Bob", "bob@test.com", "Errored (#200)", ""),
+            Err(ReviewFailure::Other(anyhow::anyhow!("check failed"))),
+        );
+
+        let summary = report.summary();
+        assert!(matches!(
+            summary.review_summary(),
+            ReportReviewSummary::MissingReviewsWithErrors
+        ));
+    }
+
+    #[test]
+    fn report_summary_deduplicates_pull_requests() {
+        let mut report = Report::new();
+
+        report.add(
+            make_commit("aaa", "Alice", "alice@test.com", "First change (#100)", ""),
+            Ok(reviewed()),
+        );
+        report.add(
+            make_commit("bbb", "Bob", "bob@test.com", "Second change (#100)", ""),
+            Ok(reviewed()),
+        );
+
+        let summary = report.summary();
+        assert_eq!(summary.pull_requests, 1);
+    }
+}

tooling/xtask/Cargo.toml 🔗

@@ -15,7 +15,8 @@ backtrace.workspace = true
 cargo_metadata.workspace = true
 cargo_toml.workspace = true
 clap = { workspace = true, features = ["derive"] }
-toml.workspace = true
+compliance = { workspace = true, features = ["octo-client"] }
+gh-workflow.workspace = true
 indoc.workspace = true
 indexmap.workspace = true
 itertools.workspace = true
@@ -24,5 +25,6 @@ serde.workspace = true
 serde_json.workspace = true
 serde_yaml = "0.9.34"
 strum.workspace = true
+tokio = { workspace = true, features = ["rt", "rt-multi-thread"] }
+toml.workspace = true
 toml_edit.workspace = true
-gh-workflow.workspace = true

tooling/xtask/src/main.rs 🔗

@@ -15,6 +15,7 @@ struct Args {
 enum CliCommand {
     /// Runs `cargo clippy`.
     Clippy(tasks::clippy::ClippyArgs),
+    Compliance(tasks::compliance::ComplianceArgs),
     Licenses(tasks::licenses::LicensesArgs),
     /// Checks that packages conform to a set of standards.
     PackageConformity(tasks::package_conformity::PackageConformityArgs),
@@ -31,6 +32,7 @@ fn main() -> Result<()> {
 
     match args.command {
         CliCommand::Clippy(args) => tasks::clippy::run_clippy(args),
+        CliCommand::Compliance(args) => tasks::compliance::check_compliance(args),
         CliCommand::Licenses(args) => tasks::licenses::run_licenses(args),
         CliCommand::PackageConformity(args) => {
             tasks::package_conformity::run_package_conformity(args)

tooling/xtask/src/tasks.rs 🔗

@@ -1,4 +1,5 @@
 pub mod clippy;
+pub mod compliance;
 pub mod licenses;
 pub mod package_conformity;
 pub mod publish_gpui;

tooling/xtask/src/tasks/compliance.rs 🔗

@@ -0,0 +1,135 @@
+use std::path::PathBuf;
+
+use anyhow::{Context, Result};
+use clap::Parser;
+
+use compliance::{
+    checks::Reporter,
+    git::{CommitsFromVersionToHead, GetVersionTags, GitCommand, VersionTag},
+    github::GitHubClient,
+    report::ReportReviewSummary,
+};
+
+#[derive(Parser)]
+pub struct ComplianceArgs {
+    #[arg(value_parser = VersionTag::parse)]
+    // The version to be on the lookout for
+    pub(crate) version_tag: VersionTag,
+    #[arg(long)]
+    // The markdown file to write the compliance report to
+    report_path: PathBuf,
+    #[arg(long)]
+    // An optional branch to use instead of the determined version branch
+    branch: Option<String>,
+}
+
+impl ComplianceArgs {
+    pub(crate) fn version_tag(&self) -> &VersionTag {
+        &self.version_tag
+    }
+
+    fn version_branch(&self) -> String {
+        self.branch.clone().unwrap_or_else(|| {
+            format!(
+                "v{major}.{minor}.x",
+                major = self.version_tag().version().major,
+                minor = self.version_tag().version().minor
+            )
+        })
+    }
+}
+
+async fn check_compliance_impl(args: ComplianceArgs) -> Result<()> {
+    let app_id = std::env::var("GITHUB_APP_ID").context("Missing GITHUB_APP_ID")?;
+    let key = std::env::var("GITHUB_APP_KEY").context("Missing GITHUB_APP_KEY")?;
+
+    let tag = args.version_tag();
+
+    let previous_version = GitCommand::run(GetVersionTags)?
+        .sorted()
+        .find_previous_minor_version(&tag)
+        .cloned()
+        .ok_or_else(|| {
+            anyhow::anyhow!(
+                "Could not find previous version for tag {tag}",
+                tag = tag.to_string()
+            )
+        })?;
+
+    println!(
+        "Checking compliance for version {} with version {} as base",
+        tag.version(),
+        previous_version.version()
+    );
+
+    let commits = GitCommand::run(CommitsFromVersionToHead::new(
+        previous_version,
+        args.version_branch(),
+    ))?;
+
+    let Some(range) = commits.range() else {
+        anyhow::bail!("No commits found to check");
+    };
+
+    println!("Checking commit range {range}, {} total", commits.len());
+
+    let client = GitHubClient::for_app(
+        app_id.parse().context("Failed to parse app ID as int")?,
+        key.as_ref(),
+    )
+    .await?;
+
+    println!("Initialized GitHub client for app ID {app_id}");
+
+    let report = Reporter::new(commits, &client).generate_report().await?;
+
+    println!(
+        "Generated report for version {}",
+        args.version_tag().to_string()
+    );
+
+    let summary = report.summary();
+
+    println!(
+        "Applying compliance labels to {} pull requests",
+        summary.pull_requests
+    );
+
+    for report in report.errors() {
+        if let Some(pr_number) = report.commit.pr_number() {
+            println!("Adding review label to PR {}...", pr_number);
+
+            client
+                .add_label_to_pull_request(compliance::github::PR_REVIEW_LABEL, pr_number)
+                .await?;
+        }
+    }
+
+    let report_path = args.report_path.with_extension("md");
+
+    report.write_markdown(&report_path)?;
+
+    println!("Wrote compliance report to {}", report_path.display());
+
+    match summary.review_summary() {
+        ReportReviewSummary::MissingReviews => Err(anyhow::anyhow!(
+            "Compliance check failed, found {} commits not reviewed",
+            summary.not_reviewed
+        )),
+        ReportReviewSummary::MissingReviewsWithErrors => Err(anyhow::anyhow!(
+            "Compliance check failed with {} unreviewed commits and {} other issues",
+            summary.not_reviewed,
+            summary.errors
+        )),
+        ReportReviewSummary::NoIssuesFound => {
+            println!("No issues found, compliance check passed.");
+            Ok(())
+        }
+    }
+}
+
+pub fn check_compliance(args: ComplianceArgs) -> Result<()> {
+    tokio::runtime::Runtime::new()
+        .context("Failed to create tokio runtime")
+        .and_then(|handle| handle.block_on(check_compliance_impl(args)))
+}

tooling/xtask/src/tasks/workflows.rs 🔗

@@ -11,6 +11,7 @@ mod autofix_pr;
 mod bump_patch_version;
 mod cherry_pick;
 mod compare_perf;
+mod compliance_check;
 mod danger;
 mod deploy_collab;
 mod extension_auto_bump;
@@ -197,6 +198,7 @@ pub fn run_workflows(args: GenerateWorkflowArgs) -> Result<()> {
         WorkflowFile::zed(bump_patch_version::bump_patch_version),
         WorkflowFile::zed(cherry_pick::cherry_pick),
         WorkflowFile::zed(compare_perf::compare_perf),
+        WorkflowFile::zed(compliance_check::compliance_check),
         WorkflowFile::zed(danger::danger),
         WorkflowFile::zed(deploy_collab::deploy_collab),
         WorkflowFile::zed(extension_bump::extension_bump),

tooling/xtask/src/tasks/workflows/compliance_check.rs 🔗

@@ -0,0 +1,66 @@
+use gh_workflow::{Event, Expression, Job, Run, Schedule, Step, Workflow};
+
+use crate::tasks::workflows::{
+    runners,
+    steps::{self, CommonJobConditions, named},
+    vars::{self, StepOutput},
+};
+
+pub fn compliance_check() -> Workflow {
+    let check = scheduled_compliance_check();
+
+    named::workflow()
+        .on(Event::default().schedule([Schedule::new("30 17 * * 2")]))
+        .add_env(("CARGO_TERM_COLOR", "always"))
+        .add_job(check.name, check.job)
+}
+
+fn scheduled_compliance_check() -> steps::NamedJob {
+    let determine_version_step = named::bash(indoc::indoc! {r#"
+        VERSION=$(sed -n 's/^version = "\(.*\)"/\1/p' crates/zed/Cargo.toml | tr -d '[:space:]')
+        if [ -z "$VERSION" ]; then
+            echo "Could not determine version from crates/zed/Cargo.toml"
+            exit 1
+        fi
+        TAG="v${VERSION}-pre"
+        echo "Checking compliance for $TAG"
+        echo "tag=$TAG" >> "$GITHUB_OUTPUT"
+    "#})
+    .id("determine-version");
+
+    let tag_output = StepOutput::new(&determine_version_step, "tag");
+
+    fn run_compliance_check(tag: &StepOutput) -> Step<Run> {
+        named::bash(
+            r#"cargo xtask compliance "$LATEST_TAG" --branch main --report-path target/compliance-report"#,
+        )
+        .id("run-compliance-check")
+        .add_env(("LATEST_TAG", tag.to_string()))
+        .add_env(("GITHUB_APP_ID", vars::ZED_ZIPPY_APP_ID))
+        .add_env(("GITHUB_APP_KEY", vars::ZED_ZIPPY_APP_PRIVATE_KEY))
+    }
+
+    fn send_failure_slack_notification(tag: &StepOutput) -> Step<Run> {
+        named::bash(indoc::indoc! {r#"
+            MESSAGE="⚠️ Scheduled compliance check failed for upcoming preview release $LATEST_TAG: There are PRs with missing reviews."
+
+            curl -X POST -H 'Content-type: application/json' \
+                --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \
+                "$SLACK_WEBHOOK"
+        "#})
+        .if_condition(Expression::new("failure()"))
+        .add_env(("SLACK_WEBHOOK", vars::SLACK_WEBHOOK_WORKFLOW_FAILURES))
+        .add_env(("LATEST_TAG", tag.to_string()))
+    }
+
+    named::job(
+        Job::default()
+            .with_repository_owner_guard()
+            .runs_on(runners::LINUX_SMALL)
+            .add_step(steps::checkout_repo().with_full_history())
+            .add_step(steps::cache_rust_dependencies_namespace())
+            .add_step(determine_version_step)
+            .add_step(run_compliance_check(&tag_output))
+            .add_step(send_failure_slack_notification(&tag_output)),
+    )
+}

tooling/xtask/src/tasks/workflows/release.rs 🔗

@@ -1,11 +1,13 @@
-use gh_workflow::{Event, Expression, Push, Run, Step, Use, Workflow, ctx::Context};
+use gh_workflow::{Event, Expression, Job, Push, Run, Step, Use, Workflow, ctx::Context};
 use indoc::formatdoc;
 
 use crate::tasks::workflows::{
     run_bundling::{bundle_linux, bundle_mac, bundle_windows},
     run_tests,
     runners::{self, Arch, Platform},
-    steps::{self, FluentBuilder, NamedJob, dependant_job, named, release_job},
+    steps::{
+        self, CommonJobConditions, FluentBuilder, NamedJob, dependant_job, named, release_job,
+    },
     vars::{self, StepOutput, assets},
 };
 
@@ -22,6 +24,7 @@ pub(crate) fn release() -> Workflow {
     let check_scripts = run_tests::check_scripts();
 
     let create_draft_release = create_draft_release();
+    let compliance = compliance_check();
 
     let bundle = ReleaseBundleJobs {
         linux_aarch64: bundle_linux(
@@ -92,6 +95,7 @@ pub(crate) fn release() -> Workflow {
         .add_job(windows_clippy.name, windows_clippy.job)
         .add_job(check_scripts.name, check_scripts.job)
         .add_job(create_draft_release.name, create_draft_release.job)
+        .add_job(compliance.name, compliance.job)
         .map(|mut workflow| {
             for job in bundle.into_jobs() {
                 workflow = workflow.add_job(job.name, job.job);
@@ -149,6 +153,59 @@ pub(crate) fn create_sentry_release() -> Step<Use> {
     .add_with(("environment", "production"))
 }
 
+fn compliance_check() -> NamedJob {
+    fn run_compliance_check() -> Step<Run> {
+        named::bash(
+            r#"cargo xtask compliance "$GITHUB_REF_NAME" --report-path "$COMPLIANCE_FILE_OUTPUT""#,
+        )
+        .id("run-compliance-check")
+        .add_env(("GITHUB_APP_ID", vars::ZED_ZIPPY_APP_ID))
+        .add_env(("GITHUB_APP_KEY", vars::ZED_ZIPPY_APP_PRIVATE_KEY))
+    }
+
+    fn send_compliance_slack_notification() -> Step<Run> {
+        named::bash(indoc::indoc! {r#"
+            if [ "$COMPLIANCE_OUTCOME" == "success" ]; then
+                STATUS="✅ Compliance check passed for $GITHUB_REF_NAME"
+            else
+                STATUS="❌ Compliance check failed for $GITHUB_REF_NAME"
+            fi
+
+            REPORT_CONTENT=""
+            if [ -f "$COMPLIANCE_FILE_OUTPUT" ]; then
+                REPORT_CONTENT=$(cat "$REPORT_FILE")
+            fi
+
+            MESSAGE=$(printf "%s\n\n%s" "$STATUS" "$REPORT_CONTENT")
+
+            curl -X POST -H 'Content-type: application/json' \
+                --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \
+                "$SLACK_WEBHOOK"
+        "#})
+        .if_condition(Expression::new("always()"))
+        .add_env(("SLACK_WEBHOOK", vars::SLACK_WEBHOOK_WORKFLOW_FAILURES))
+        .add_env((
+            "COMPLIANCE_OUTCOME",
+            "${{ steps.run-compliance-check.outcome }}",
+        ))
+    }
+
+    named::job(
+        Job::default()
+            .add_env(("COMPLIANCE_FILE_PATH", "compliance.md"))
+            .with_repository_owner_guard()
+            .runs_on(runners::LINUX_DEFAULT)
+            .add_step(
+                steps::checkout_repo()
+                    .with_full_history()
+                    .with_ref(Context::github().ref_()),
+            )
+            .add_step(steps::cache_rust_dependencies_namespace())
+            .add_step(run_compliance_check())
+            .add_step(send_compliance_slack_notification()),
+    )
+}
+
 fn validate_release_assets(deps: &[&NamedJob]) -> NamedJob {
     let expected_assets: Vec<String> = assets::all().iter().map(|a| format!("\"{a}\"")).collect();
     let expected_assets_json = format!("[{}]", expected_assets.join(", "));
@@ -171,10 +228,54 @@ fn validate_release_assets(deps: &[&NamedJob]) -> NamedJob {
         "#,
     };
 
+    fn run_post_upload_compliance_check() -> Step<Run> {
+        named::bash(
+            r#"cargo xtask compliance "$GITHUB_REF_NAME" --report-path target/compliance-report"#,
+        )
+        .id("run-post-upload-compliance-check")
+        .add_env(("GITHUB_APP_ID", vars::ZED_ZIPPY_APP_ID))
+        .add_env(("GITHUB_APP_KEY", vars::ZED_ZIPPY_APP_PRIVATE_KEY))
+    }
+
+    fn send_post_upload_compliance_notification() -> Step<Run> {
+        named::bash(indoc::indoc! {r#"
+            if [ -z "$COMPLIANCE_OUTCOME" ] || [ "$COMPLIANCE_OUTCOME" == "skipped" ]; then
+                echo "Compliance check was skipped, not sending notification"
+                exit 0
+            fi
+
+            TAG="$GITHUB_REF_NAME"
+
+            if [ "$COMPLIANCE_OUTCOME" == "success" ]; then
+                MESSAGE="✅ Post-upload compliance re-check passed for $TAG"
+            else
+                MESSAGE="❌ Post-upload compliance re-check failed for $TAG"
+            fi
+
+            curl -X POST -H 'Content-type: application/json' \
+                --data "$(jq -n --arg text "$MESSAGE" '{"text": $text}')" \
+                "$SLACK_WEBHOOK"
+        "#})
+        .if_condition(Expression::new("always()"))
+        .add_env(("SLACK_WEBHOOK", vars::SLACK_WEBHOOK_WORKFLOW_FAILURES))
+        .add_env((
+            "COMPLIANCE_OUTCOME",
+            "${{ steps.run-post-upload-compliance-check.outcome }}",
+        ))
+    }
+
     named::job(
-        dependant_job(deps).runs_on(runners::LINUX_SMALL).add_step(
-            named::bash(&validation_script).add_env(("GITHUB_TOKEN", vars::GITHUB_TOKEN)),
-        ),
+        dependant_job(deps)
+            .runs_on(runners::LINUX_SMALL)
+            .add_step(named::bash(&validation_script).add_env(("GITHUB_TOKEN", vars::GITHUB_TOKEN)))
+            .add_step(
+                steps::checkout_repo()
+                    .with_full_history()
+                    .with_ref(Context::github().ref_()),
+            )
+            .add_step(steps::cache_rust_dependencies_namespace())
+            .add_step(run_post_upload_compliance_check())
+            .add_step(send_post_upload_compliance_notification()),
     )
 }
 
@@ -255,7 +356,7 @@ fn create_draft_release() -> NamedJob {
             .add_step(
                 steps::checkout_repo()
                     .with_custom_fetch_depth(25)
-                    .with_ref("${{ github.ref }}"),
+                    .with_ref(Context::github().ref_()),
             )
             .add_step(steps::script("script/determine-release-channel"))
             .add_step(steps::script("mkdir -p target/"))