ci: Clean up some of our workflows (#50499)

Finn Evers created

Release Notes:

- N/A

Change summary

.github/workflows/add_commented_closed_issue_to_project.yml      |   9 
.github/workflows/after_release.yml                              |   7 
.github/workflows/autofix_pr.yml                                 |   6 
.github/workflows/catch_blank_issues.yml                         |   4 
.github/workflows/cherry_pick.yml                                |   5 
.github/workflows/community_update_all_top_ranking_issues.yml    |   4 
.github/workflows/community_update_weekly_top_ranking_issues.yml |   4 
.github/workflows/compare_perf.yml                               |  31 
.github/workflows/deploy_collab.yml                              |  11 
.github/workflows/extension_bump.yml                             |   9 
.github/workflows/extension_tests.yml                            |  19 
.github/workflows/extension_workflow_rollout.yml                 |  16 
.github/workflows/publish_extension_cli.yml                      |  10 
.github/workflows/release.yml                                    |  66 
.github/workflows/release_nightly.yml                            |   5 
.github/workflows/run_tests.yml                                  |  58 
.github/workflows/slack_notify_first_responders.yml              |   3 
.github/workflows/update_duplicate_magnets.yml                   |   4 
Cargo.lock                                                       |  16 
tooling/xtask/Cargo.toml                                         |   5 
tooling/xtask/src/main.rs                                        |   2 
tooling/xtask/src/tasks.rs                                       |   1 
tooling/xtask/src/tasks/workflow_checks.rs                       | 118 +
tooling/xtask/src/tasks/workflow_checks/check_run_patterns.rs    | 124 ++
tooling/xtask/src/tasks/workflows.rs                             |  10 
tooling/xtask/src/tasks/workflows/after_release.rs               |   2 
tooling/xtask/src/tasks/workflows/autofix_pr.rs                  |   7 
tooling/xtask/src/tasks/workflows/cherry_pick.rs                 |   5 
tooling/xtask/src/tasks/workflows/compare_perf.rs                |  22 
tooling/xtask/src/tasks/workflows/deploy_collab.rs               |  15 
tooling/xtask/src/tasks/workflows/extension_bump.rs              |  10 
tooling/xtask/src/tasks/workflows/extension_tests.rs             |   8 
tooling/xtask/src/tasks/workflows/extension_workflow_rollout.rs  |  27 
tooling/xtask/src/tasks/workflows/publish_extension_cli.rs       |   8 
tooling/xtask/src/tasks/workflows/release.rs                     | 102 +
tooling/xtask/src/tasks/workflows/run_tests.rs                   |  40 
tooling/xtask/src/tasks/workflows/steps.rs                       |   5 
37 files changed, 606 insertions(+), 192 deletions(-)

Detailed changes

.github/workflows/add_commented_closed_issue_to_project.yml 🔗

@@ -63,13 +63,18 @@ jobs:
             }
 
       - if: steps.is-post-close-comment.outputs.result == 'true' && steps.check-staff.outputs.result == 'true'
+        env:
+          ISSUE_NUMBER: ${{ github.event.issue.number }}
         run: |
-          echo "::notice::Skipping issue #${{ github.event.issue.number }} - commenter is staff member"
+          echo "::notice::Skipping issue #$ISSUE_NUMBER - commenter is staff member"
 
       # github-script outputs are JSON strings, so we compare against 'false' (string)
       - if: steps.is-post-close-comment.outputs.result == 'true' && steps.check-staff.outputs.result == 'false'
+        env:
+          ISSUE_NUMBER: ${{ github.event.issue.number }}
+          COMMENT_USER_LOGIN: ${{ github.event.comment.user.login }}
         run: |
-          echo "::notice::Adding issue #${{ github.event.issue.number }} to project (comment by ${{ github.event.comment.user.login }})"
+          echo "::notice::Adding issue #$ISSUE_NUMBER to project (comment by $COMMENT_USER_LOGIN)"
 
       - if: steps.is-post-close-comment.outputs.result == 'true' && steps.check-staff.outputs.result == 'false'
         uses: actions/add-to-project@244f685bbc3b7adfa8466e08b698b5577571133e # v1.0.2

.github/workflows/after_release.yml 🔗

@@ -76,7 +76,7 @@ jobs:
             "X-GitHub-Api-Version" = "2022-11-28"
         }
         $body = @{ branch = "master" } | ConvertTo-Json
-        $uri = "https://api.github.com/repos/${{ github.repository_owner }}/winget-pkgs/merge-upstream"
+        $uri = "https://api.github.com/repos/$env:GITHUB_REPOSITORY_OWNER/winget-pkgs/merge-upstream"
         try {
             Invoke-RestMethod -Uri $uri -Method Post -Headers $headers -Body $body -ContentType "application/json"
             Write-Host "Successfully synced winget-pkgs fork"
@@ -131,11 +131,10 @@ jobs:
     runs-on: namespace-profile-2x4-ubuntu-2404
     steps:
     - name: release::send_slack_message
-      run: |
-        curl -X POST -H 'Content-type: application/json'\
-         --data '{"text":"❌ ${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"}' "$SLACK_WEBHOOK"
+      run: 'curl -X POST -H ''Content-type: application/json'' --data "$(jq -n --arg text "$SLACK_MESSAGE" ''{"text": $text}'')" "$SLACK_WEBHOOK"'
       env:
         SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
+        SLACK_MESSAGE: '❌ ${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}'
 defaults:
   run:
     shell: bash -euxo pipefail {0}

.github/workflows/autofix_pr.yml 🔗

@@ -22,8 +22,9 @@ jobs:
       with:
         clean: false
     - name: autofix_pr::run_autofix::checkout_pr
-      run: gh pr checkout ${{ inputs.pr_number }}
+      run: gh pr checkout "$PR_NUMBER"
       env:
+        PR_NUMBER: ${{ inputs.pr_number }}
         GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
     - name: steps::setup_cargo_config
       run: |
@@ -104,8 +105,9 @@ jobs:
         clean: false
         token: ${{ steps.get-app-token.outputs.token }}
     - name: autofix_pr::commit_changes::checkout_pr
-      run: gh pr checkout ${{ inputs.pr_number }}
+      run: gh pr checkout "$PR_NUMBER"
       env:
+        PR_NUMBER: ${{ inputs.pr_number }}
         GITHUB_TOKEN: ${{ steps.get-app-token.outputs.token }}
     - name: autofix_pr::download_patch_artifact
       uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53

.github/workflows/catch_blank_issues.yml 🔗

@@ -42,8 +42,10 @@ jobs:
             }
 
       - if: steps.check-staff.outputs.result == 'true'
+        env:
+          ISSUE_NUMBER: ${{ github.event.issue.number }}
         run: |
-          echo "::notice::Skipping issue #${{ github.event.issue.number }} - actor is staff member"
+          echo "::notice::Skipping issue #$ISSUE_NUMBER - actor is staff member"
 
       - if: steps.check-staff.outputs.result == 'false'
         id: add-label

.github/workflows/cherry_pick.yml 🔗

@@ -36,8 +36,11 @@ jobs:
         app-id: ${{ secrets.ZED_ZIPPY_APP_ID }}
         private-key: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }}
     - name: cherry_pick::run_cherry_pick::cherry_pick
-      run: ./script/cherry-pick ${{ inputs.branch }} ${{ inputs.commit }} ${{ inputs.channel }}
+      run: ./script/cherry-pick "$BRANCH" "$COMMIT" "$CHANNEL"
       env:
+        BRANCH: ${{ inputs.branch }}
+        COMMIT: ${{ inputs.commit }}
+        CHANNEL: ${{ inputs.channel }}
         GIT_COMMITTER_NAME: Zed Zippy
         GIT_COMMITTER_EMAIL: hi@zed.dev
         GITHUB_TOKEN: ${{ steps.get-app-token.outputs.token }}

.github/workflows/community_update_all_top_ranking_issues.yml 🔗

@@ -22,4 +22,6 @@ jobs:
       - name: Install dependencies
         run: uv sync --project script/update_top_ranking_issues -p 3.13
       - name: Run script
-        run: uv run --project script/update_top_ranking_issues script/update_top_ranking_issues/main.py --github-token ${{ secrets.GITHUB_TOKEN }} --issue-reference-number 5393
+        env:
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+        run: uv run --project script/update_top_ranking_issues script/update_top_ranking_issues/main.py --github-token "$GITHUB_TOKEN" --issue-reference-number 5393

.github/workflows/community_update_weekly_top_ranking_issues.yml 🔗

@@ -22,4 +22,6 @@ jobs:
       - name: Install dependencies
         run: uv sync --project script/update_top_ranking_issues -p 3.13
       - name: Run script
-        run: uv run --project script/update_top_ranking_issues script/update_top_ranking_issues/main.py --github-token ${{ secrets.GITHUB_TOKEN }} --issue-reference-number 6952 --query-day-interval 7
+        env:
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+        run: uv run --project script/update_top_ranking_issues script/update_top_ranking_issues/main.py --github-token "$GITHUB_TOKEN" --issue-reference-number 6952 --query-day-interval 7

.github/workflows/compare_perf.yml 🔗

@@ -37,27 +37,40 @@ jobs:
     - name: compare_perf::run_perf::install_hyperfine
       uses: taiki-e/install-action@hyperfine
     - name: steps::git_checkout
-      run: git fetch origin ${{ inputs.base }} && git checkout ${{ inputs.base }}
+      run: git fetch origin "$REF_NAME" && git checkout "$REF_NAME"
+      env:
+        REF_NAME: ${{ inputs.base }}
     - name: compare_perf::run_perf::cargo_perf_test
       run: |2-
 
-                    if [ -n "${{ inputs.crate_name }}" ]; then
-                        cargo perf-test -p ${{ inputs.crate_name }} -- --json=${{ inputs.base }};
+                    if [ -n "$CRATE_NAME" ]; then
+                        cargo perf-test -p "$CRATE_NAME" -- --json="$REF_NAME";
                     else
-                        cargo perf-test -p vim -- --json=${{ inputs.base }};
+                        cargo perf-test -p vim -- --json="$REF_NAME";
                     fi
+      env:
+        REF_NAME: ${{ inputs.base }}
+        CRATE_NAME: ${{ inputs.crate_name }}
     - name: steps::git_checkout
-      run: git fetch origin ${{ inputs.head }} && git checkout ${{ inputs.head }}
+      run: git fetch origin "$REF_NAME" && git checkout "$REF_NAME"
+      env:
+        REF_NAME: ${{ inputs.head }}
     - name: compare_perf::run_perf::cargo_perf_test
       run: |2-
 
-                    if [ -n "${{ inputs.crate_name }}" ]; then
-                        cargo perf-test -p ${{ inputs.crate_name }} -- --json=${{ inputs.head }};
+                    if [ -n "$CRATE_NAME" ]; then
+                        cargo perf-test -p "$CRATE_NAME" -- --json="$REF_NAME";
                     else
-                        cargo perf-test -p vim -- --json=${{ inputs.head }};
+                        cargo perf-test -p vim -- --json="$REF_NAME";
                     fi
+      env:
+        REF_NAME: ${{ inputs.head }}
+        CRATE_NAME: ${{ inputs.crate_name }}
     - name: compare_perf::run_perf::compare_runs
-      run: cargo perf-compare --save=results.md ${{ inputs.base }} ${{ inputs.head }}
+      run: cargo perf-compare --save=results.md "$BASE" "$HEAD"
+      env:
+        BASE: ${{ inputs.base }}
+        HEAD: ${{ inputs.head }}
     - name: '@actions/upload-artifact results.md'
       uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
       with:

.github/workflows/deploy_collab.yml 🔗

@@ -119,8 +119,9 @@ jobs:
       with:
         token: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }}
     - name: deploy_collab::deploy::sign_into_kubernetes
-      run: |
-        doctl kubernetes cluster kubeconfig save --expiry-seconds 600 ${{ secrets.CLUSTER_NAME }}
+      run: doctl kubernetes cluster kubeconfig save --expiry-seconds 600 "$CLUSTER_NAME"
+      env:
+        CLUSTER_NAME: ${{ secrets.CLUSTER_NAME }}
     - name: deploy_collab::deploy::start_rollout
       run: |
         set -eu
@@ -140,7 +141,7 @@ jobs:
         echo "Deploying collab:$GITHUB_SHA to $ZED_KUBE_NAMESPACE"
 
         source script/lib/deploy-helpers.sh
-        export_vars_for_environment $ZED_KUBE_NAMESPACE
+        export_vars_for_environment "$ZED_KUBE_NAMESPACE"
 
         ZED_DO_CERTIFICATE_ID="$(doctl compute certificate list --format ID --no-header)"
         export ZED_DO_CERTIFICATE_ID
@@ -150,14 +151,14 @@ jobs:
         export ZED_LOAD_BALANCER_SIZE_UNIT=$ZED_COLLAB_LOAD_BALANCER_SIZE_UNIT
         export DATABASE_MAX_CONNECTIONS=850
         envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f -
-        kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch
+        kubectl -n "$ZED_KUBE_NAMESPACE" rollout status "deployment/$ZED_SERVICE_NAME" --watch
         echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}"
 
         export ZED_SERVICE_NAME=api
         export ZED_LOAD_BALANCER_SIZE_UNIT=$ZED_API_LOAD_BALANCER_SIZE_UNIT
         export DATABASE_MAX_CONNECTIONS=60
         envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f -
-        kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch
+        kubectl -n "$ZED_KUBE_NAMESPACE" rollout status "deployment/$ZED_SERVICE_NAME" --watch
         echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}"
 defaults:
   run:

.github/workflows/extension_bump.yml 🔗

@@ -39,7 +39,7 @@ jobs:
       run: |
         CURRENT_VERSION="$(sed -n 's/^version = \"\(.*\)\"/\1/p' < extension.toml | tr -d '[:space:]')"
 
-        if [[ "${{ github.event_name }}" == "pull_request" ]]; then
+        if [[ "$GITHUB_EVENT_NAME" == "pull_request" ]]; then
             PR_FORK_POINT="$(git merge-base origin/main HEAD)"
             git checkout "$PR_FORK_POINT"
         elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then
@@ -82,8 +82,6 @@ jobs:
     - id: bump-version
       name: extension_bump::bump_version
       run: |
-        OLD_VERSION="${{ needs.check_version_changed.outputs.current_version }}"
-
         BUMP_FILES=("extension.toml")
         if [[ -f "Cargo.toml" ]]; then
             BUMP_FILES+=("Cargo.toml")
@@ -93,7 +91,7 @@ jobs:
             --search "version = \"{current_version}"\" \
             --replace "version = \"{new_version}"\" \
             --current-version "$OLD_VERSION" \
-            --no-configured-files ${{ inputs.bump-type }} "${BUMP_FILES[@]}"
+            --no-configured-files "$BUMP_TYPE" "${BUMP_FILES[@]}"
 
         if [[ -f "Cargo.toml" ]]; then
             cargo update --workspace
@@ -102,6 +100,9 @@ jobs:
         NEW_VERSION="$(sed -n 's/^version = \"\(.*\)\"/\1/p' < extension.toml | tr -d '[:space:]')"
 
         echo "new_version=${NEW_VERSION}" >> "$GITHUB_OUTPUT"
+      env:
+        OLD_VERSION: ${{ needs.check_version_changed.outputs.current_version }}
+        BUMP_TYPE: ${{ inputs.bump-type }}
     - name: extension_bump::create_pull_request
       uses: peter-evans/create-pull-request@v7
       with:

.github/workflows/extension_tests.yml 🔗

@@ -32,7 +32,7 @@ jobs:
           git fetch origin "$GITHUB_BASE_REF" --depth=350
           COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
         fi
-        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
+        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")"
 
         check_pattern() {
           local output_name="$1"
@@ -129,7 +129,7 @@ jobs:
       run: |
         CURRENT_VERSION="$(sed -n 's/^version = \"\(.*\)\"/\1/p' < extension.toml | tr -d '[:space:]')"
 
-        if [[ "${{ github.event_name }}" == "pull_request" ]]; then
+        if [[ "$GITHUB_EVENT_NAME" == "pull_request" ]]; then
             PR_FORK_POINT="$(git merge-base origin/main HEAD)"
             git checkout "$PR_FORK_POINT"
         elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then
@@ -147,11 +147,14 @@ jobs:
         echo "current_version=${CURRENT_VERSION}" >> "$GITHUB_OUTPUT"
     - name: extension_tests::verify_version_did_not_change
       run: |
-        if [[ ${{ steps.compare-versions-check.outputs.version_changed }} == "true" && "${{ github.event_name }}" == "pull_request" && "${{ github.event.pull_request.user.login }}" != "zed-zippy[bot]" ]] ; then
+        if [[ "$VERSION_CHANGED" == "true" && "$GITHUB_EVENT_NAME" == "pull_request" && "$PR_USER_LOGIN" != "zed-zippy[bot]" ]] ; then
             echo "Version change detected in your change!"
             echo "Version changes happen in separate PRs and will be performed by the zed-zippy bot"
             exit 42
         fi
+      env:
+        VERSION_CHANGED: ${{ steps.compare-versions-check.outputs.version_changed }}
+        PR_USER_LOGIN: ${{ github.event.pull_request.user.login }}
     timeout-minutes: 6
   tests_pass:
     needs:
@@ -171,11 +174,15 @@ jobs:
           if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
         }
 
-        check_result "orchestrate" "${{ needs.orchestrate.result }}"
-        check_result "check_rust" "${{ needs.check_rust.result }}"
-        check_result "check_extension" "${{ needs.check_extension.result }}"
+        check_result "orchestrate" "$RESULT_ORCHESTRATE"
+        check_result "check_rust" "$RESULT_CHECK_RUST"
+        check_result "check_extension" "$RESULT_CHECK_EXTENSION"
 
         exit $EXIT_CODE
+      env:
+        RESULT_ORCHESTRATE: ${{ needs.orchestrate.result }}
+        RESULT_CHECK_RUST: ${{ needs.check_rust.result }}
+        RESULT_CHECK_EXTENSION: ${{ needs.check_extension.result }}
 concurrency:
   group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
   cancel-in-progress: true

.github/workflows/extension_workflow_rollout.yml 🔗

@@ -80,9 +80,7 @@ jobs:
     - id: calc-changes
       name: extension_workflow_rollout::rollout_workflows_to_extension::get_removed_files
       run: |
-        PREV_COMMIT="${{ steps.prev-tag.outputs.prev_commit }}"
-
-        if [ "${{ matrix.repo }}" = "workflows" ]; then
+        if [ "$MATRIX_REPO" = "workflows" ]; then
             WORKFLOW_DIR="extensions/workflows"
         else
             WORKFLOW_DIR="extensions/workflows/shared"
@@ -101,11 +99,12 @@ jobs:
 
         echo "Files to remove: $REMOVED_FILES"
         echo "removed_files=$REMOVED_FILES" >> "$GITHUB_OUTPUT"
+      env:
+        PREV_COMMIT: ${{ steps.prev-tag.outputs.prev_commit }}
+        MATRIX_REPO: ${{ matrix.repo }}
       working-directory: zed
     - name: extension_workflow_rollout::rollout_workflows_to_extension::sync_workflow_files
       run: |
-        REMOVED_FILES="${{ steps.calc-changes.outputs.removed_files }}"
-
         mkdir -p extension/.github/workflows
         cd extension/.github/workflows
 
@@ -119,11 +118,14 @@ jobs:
 
         cd - > /dev/null
 
-        if [ "${{ matrix.repo }}" = "workflows" ]; then
+        if [ "$MATRIX_REPO" = "workflows" ]; then
             cp zed/extensions/workflows/*.yml extension/.github/workflows/
         else
             cp zed/extensions/workflows/shared/*.yml extension/.github/workflows/
         fi
+      env:
+        REMOVED_FILES: ${{ steps.calc-changes.outputs.removed_files }}
+        MATRIX_REPO: ${{ matrix.repo }}
     - id: short-sha
       name: extension_workflow_rollout::rollout_workflows_to_extension::get_short_sha
       run: |
@@ -148,13 +150,13 @@ jobs:
         sign-commits: true
     - name: extension_workflow_rollout::rollout_workflows_to_extension::enable_auto_merge
       run: |
-        PR_NUMBER="${{ steps.create-pr.outputs.pull-request-number }}"
         if [ -n "$PR_NUMBER" ]; then
             cd extension
             gh pr merge "$PR_NUMBER" --auto --squash
         fi
       env:
         GH_TOKEN: ${{ steps.generate-token.outputs.token }}
+        PR_NUMBER: ${{ steps.create-pr.outputs.pull-request-number }}
     timeout-minutes: 10
   create_rollout_tag:
     needs:

.github/workflows/publish_extension_cli.yml 🔗

@@ -27,7 +27,7 @@ jobs:
     - name: publish_extension_cli::publish_job::build_extension_cli
       run: cargo build --release --package extension_cli
     - name: publish_extension_cli::publish_job::upload_binary
-      run: script/upload-extension-cli ${{ github.sha }}
+      run: script/upload-extension-cli "$GITHUB_SHA"
       env:
         DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
         DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
@@ -55,10 +55,10 @@ jobs:
     - id: short-sha
       name: publish_extension_cli::get_short_sha
       run: |
-        echo "sha_short=$(echo "${{ github.sha }}" | cut -c1-7)" >> "$GITHUB_OUTPUT"
+        echo "sha_short=$(echo "$GITHUB_SHA" | cut -c1-7)" >> "$GITHUB_OUTPUT"
     - name: publish_extension_cli::update_sha_in_zed::replace_sha
       run: |
-        sed -i "s/ZED_EXTENSION_CLI_SHA: &str = \"[a-f0-9]*\"/ZED_EXTENSION_CLI_SHA: \&str = \"${{ github.sha }}\"/" \
+        sed -i "s/ZED_EXTENSION_CLI_SHA: &str = \"[a-f0-9]*\"/ZED_EXTENSION_CLI_SHA: \&str = \"$GITHUB_SHA\"/" \
             tooling/xtask/src/tasks/workflows/extension_tests.rs
     - name: publish_extension_cli::update_sha_in_zed::regenerate_workflows
       run: cargo xtask workflows
@@ -97,7 +97,7 @@ jobs:
     - id: short-sha
       name: publish_extension_cli::get_short_sha
       run: |
-        echo "sha_short=$(echo "${{ github.sha }}" | cut -c1-7)" >> "$GITHUB_OUTPUT"
+        echo "sha_short=$(echo "$GITHUB_SHA" | cut -c1-7)" >> "$GITHUB_OUTPUT"
     - name: publish_extension_cli::update_sha_in_extensions::checkout_extensions_repo
       uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
       with:
@@ -105,7 +105,7 @@ jobs:
         token: ${{ steps.generate-token.outputs.token }}
     - name: publish_extension_cli::update_sha_in_extensions::replace_sha
       run: |
-        sed -i "s/ZED_EXTENSION_CLI_SHA: [a-f0-9]*/ZED_EXTENSION_CLI_SHA: ${{ github.sha }}/" \
+        sed -i "s/ZED_EXTENSION_CLI_SHA: [a-f0-9]*/ZED_EXTENSION_CLI_SHA: $GITHUB_SHA/" \
             .github/workflows/ci.yml
     - name: publish_extension_cli::create_pull_request_extensions
       uses: peter-evans/create-pull-request@v7

.github/workflows/release.yml 🔗

@@ -257,8 +257,14 @@ jobs:
       name: run_tests::check_scripts::download_actionlint
       run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)
     - name: run_tests::check_scripts::run_actionlint
-      run: |
-        ${{ steps.get_actionlint.outputs.executable }} -color
+      run: '"$ACTIONLINT_BIN" -color'
+      env:
+        ACTIONLINT_BIN: ${{ steps.get_actionlint.outputs.executable }}
+    - name: steps::cache_rust_dependencies_namespace
+      uses: namespacelabs/nscloud-cache-action@v1
+      with:
+        cache: rust
+        path: ~/.rustup
     - name: run_tests::check_scripts::check_xtask_workflows
       run: |
         cargo xtask workflows
@@ -654,12 +660,7 @@ jobs:
     - id: generate-webhook-message
       name: release::generate_slack_message
       run: |
-        MESSAGE=$(DRAFT_RESULT="${{ needs.create_draft_release.result }}"
-        UPLOAD_RESULT="${{ needs.upload_release_assets.result }}"
-        VALIDATE_RESULT="${{ needs.validate_release_assets.result }}"
-        AUTO_RELEASE_RESULT="${{ needs.auto_release_preview.result }}"
-        TAG="$GITHUB_REF_NAME"
-        RUN_URL="${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
+        MESSAGE=$(TAG="$GITHUB_REF_NAME"
 
         if [ "$DRAFT_RESULT" == "failure" ]; then
             echo "❌ Draft release creation failed for $TAG: $RUN_URL"
@@ -669,19 +670,19 @@ jobs:
                 echo "❌ Release asset upload failed for $TAG: $RELEASE_URL"
             elif [ "$UPLOAD_RESULT" == "cancelled" ] || [ "$UPLOAD_RESULT" == "skipped" ]; then
                 FAILED_JOBS=""
-                if [ "${{ needs.run_tests_mac.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_mac"; fi
-                if [ "${{ needs.run_tests_linux.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_linux"; fi
-                if [ "${{ needs.run_tests_windows.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_windows"; fi
-                if [ "${{ needs.clippy_mac.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_mac"; fi
-                if [ "${{ needs.clippy_linux.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_linux"; fi
-                if [ "${{ needs.clippy_windows.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_windows"; fi
-                if [ "${{ needs.check_scripts.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS check_scripts"; fi
-                if [ "${{ needs.bundle_linux_aarch64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_linux_aarch64"; fi
-                if [ "${{ needs.bundle_linux_x86_64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_linux_x86_64"; fi
-                if [ "${{ needs.bundle_mac_aarch64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_mac_aarch64"; fi
-                if [ "${{ needs.bundle_mac_x86_64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_mac_x86_64"; fi
-                if [ "${{ needs.bundle_windows_aarch64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_windows_aarch64"; fi
-                if [ "${{ needs.bundle_windows_x86_64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_windows_x86_64"; fi
+                if [ "$RESULT_RUN_TESTS_MAC" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_mac"; fi
+                if [ "$RESULT_RUN_TESTS_LINUX" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_linux"; fi
+                if [ "$RESULT_RUN_TESTS_WINDOWS" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_windows"; fi
+                if [ "$RESULT_CLIPPY_MAC" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_mac"; fi
+                if [ "$RESULT_CLIPPY_LINUX" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_linux"; fi
+                if [ "$RESULT_CLIPPY_WINDOWS" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_windows"; fi
+                if [ "$RESULT_CHECK_SCRIPTS" == "failure" ];then FAILED_JOBS="$FAILED_JOBS check_scripts"; fi
+                if [ "$RESULT_BUNDLE_LINUX_AARCH64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_linux_aarch64"; fi
+                if [ "$RESULT_BUNDLE_LINUX_X86_64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_linux_x86_64"; fi
+                if [ "$RESULT_BUNDLE_MAC_AARCH64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_mac_aarch64"; fi
+                if [ "$RESULT_BUNDLE_MAC_X86_64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_mac_x86_64"; fi
+                if [ "$RESULT_BUNDLE_WINDOWS_AARCH64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_windows_aarch64"; fi
+                if [ "$RESULT_BUNDLE_WINDOWS_X86_64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_windows_x86_64"; fi
                 FAILED_JOBS=$(echo "$FAILED_JOBS" | xargs)
                 if [ "$UPLOAD_RESULT" == "cancelled" ]; then
                     if [ -n "$FAILED_JOBS" ]; then
@@ -710,12 +711,29 @@ jobs:
         echo "message=$MESSAGE" >> "$GITHUB_OUTPUT"
       env:
         GH_TOKEN: ${{ github.token }}
+        DRAFT_RESULT: ${{ needs.create_draft_release.result }}
+        UPLOAD_RESULT: ${{ needs.upload_release_assets.result }}
+        VALIDATE_RESULT: ${{ needs.validate_release_assets.result }}
+        AUTO_RELEASE_RESULT: ${{ needs.auto_release_preview.result }}
+        RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
+        RESULT_RUN_TESTS_MAC: ${{ needs.run_tests_mac.result }}
+        RESULT_RUN_TESTS_LINUX: ${{ needs.run_tests_linux.result }}
+        RESULT_RUN_TESTS_WINDOWS: ${{ needs.run_tests_windows.result }}
+        RESULT_CLIPPY_MAC: ${{ needs.clippy_mac.result }}
+        RESULT_CLIPPY_LINUX: ${{ needs.clippy_linux.result }}
+        RESULT_CLIPPY_WINDOWS: ${{ needs.clippy_windows.result }}
+        RESULT_CHECK_SCRIPTS: ${{ needs.check_scripts.result }}
+        RESULT_BUNDLE_LINUX_AARCH64: ${{ needs.bundle_linux_aarch64.result }}
+        RESULT_BUNDLE_LINUX_X86_64: ${{ needs.bundle_linux_x86_64.result }}
+        RESULT_BUNDLE_MAC_AARCH64: ${{ needs.bundle_mac_aarch64.result }}
+        RESULT_BUNDLE_MAC_X86_64: ${{ needs.bundle_mac_x86_64.result }}
+        RESULT_BUNDLE_WINDOWS_AARCH64: ${{ needs.bundle_windows_aarch64.result }}
+        RESULT_BUNDLE_WINDOWS_X86_64: ${{ needs.bundle_windows_x86_64.result }}
     - name: release::send_slack_message
-      run: |
-        curl -X POST -H 'Content-type: application/json'\
-         --data '{"text":"${{ steps.generate-webhook-message.outputs.message }}"}' "$SLACK_WEBHOOK"
+      run: 'curl -X POST -H ''Content-type: application/json'' --data "$(jq -n --arg text "$SLACK_MESSAGE" ''{"text": $text}'')" "$SLACK_WEBHOOK"'
       env:
         SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
+        SLACK_MESSAGE: ${{ steps.generate-webhook-message.outputs.message }}
 concurrency:
   group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
   cancel-in-progress: true

.github/workflows/release_nightly.yml 🔗

@@ -554,11 +554,10 @@ jobs:
     runs-on: namespace-profile-2x4-ubuntu-2404
     steps:
     - name: release::send_slack_message
-      run: |
-        curl -X POST -H 'Content-type: application/json'\
-         --data '{"text":"❌ ${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"}' "$SLACK_WEBHOOK"
+      run: 'curl -X POST -H ''Content-type: application/json'' --data "$(jq -n --arg text "$SLACK_MESSAGE" ''{"text": $text}'')" "$SLACK_WEBHOOK"'
       env:
         SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
+        SLACK_MESSAGE: '❌ ${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}'
 defaults:
   run:
     shell: bash -euxo pipefail {0}

.github/workflows/run_tests.yml 🔗

@@ -35,7 +35,7 @@ jobs:
           git fetch origin "$GITHUB_BASE_REF" --depth=350
           COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
         fi
-        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
+        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")"
 
         check_pattern() {
           local output_name="$1"
@@ -653,8 +653,14 @@ jobs:
       name: run_tests::check_scripts::download_actionlint
       run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)
     - name: run_tests::check_scripts::run_actionlint
-      run: |
-        ${{ steps.get_actionlint.outputs.executable }} -color
+      run: '"$ACTIONLINT_BIN" -color'
+      env:
+        ACTIONLINT_BIN: ${{ steps.get_actionlint.outputs.executable }}
+    - name: steps::cache_rust_dependencies_namespace
+      uses: namespacelabs/nscloud-cache-action@v1
+      with:
+        cache: rust
+        path: ~/.rustup
     - name: run_tests::check_scripts::check_xtask_workflows
       run: |
         cargo xtask workflows
@@ -735,23 +741,39 @@ jobs:
           if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
         }
 
-        check_result "orchestrate" "${{ needs.orchestrate.result }}"
-        check_result "check_style" "${{ needs.check_style.result }}"
-        check_result "clippy_windows" "${{ needs.clippy_windows.result }}"
-        check_result "clippy_linux" "${{ needs.clippy_linux.result }}"
-        check_result "clippy_mac" "${{ needs.clippy_mac.result }}"
-        check_result "run_tests_windows" "${{ needs.run_tests_windows.result }}"
-        check_result "run_tests_linux" "${{ needs.run_tests_linux.result }}"
-        check_result "run_tests_mac" "${{ needs.run_tests_mac.result }}"
-        check_result "doctests" "${{ needs.doctests.result }}"
-        check_result "check_workspace_binaries" "${{ needs.check_workspace_binaries.result }}"
-        check_result "check_wasm" "${{ needs.check_wasm.result }}"
-        check_result "check_dependencies" "${{ needs.check_dependencies.result }}"
-        check_result "check_docs" "${{ needs.check_docs.result }}"
-        check_result "check_licenses" "${{ needs.check_licenses.result }}"
-        check_result "check_scripts" "${{ needs.check_scripts.result }}"
+        check_result "orchestrate" "$RESULT_ORCHESTRATE"
+        check_result "check_style" "$RESULT_CHECK_STYLE"
+        check_result "clippy_windows" "$RESULT_CLIPPY_WINDOWS"
+        check_result "clippy_linux" "$RESULT_CLIPPY_LINUX"
+        check_result "clippy_mac" "$RESULT_CLIPPY_MAC"
+        check_result "run_tests_windows" "$RESULT_RUN_TESTS_WINDOWS"
+        check_result "run_tests_linux" "$RESULT_RUN_TESTS_LINUX"
+        check_result "run_tests_mac" "$RESULT_RUN_TESTS_MAC"
+        check_result "doctests" "$RESULT_DOCTESTS"
+        check_result "check_workspace_binaries" "$RESULT_CHECK_WORKSPACE_BINARIES"
+        check_result "check_wasm" "$RESULT_CHECK_WASM"
+        check_result "check_dependencies" "$RESULT_CHECK_DEPENDENCIES"
+        check_result "check_docs" "$RESULT_CHECK_DOCS"
+        check_result "check_licenses" "$RESULT_CHECK_LICENSES"
+        check_result "check_scripts" "$RESULT_CHECK_SCRIPTS"
 
         exit $EXIT_CODE
+      env:
+        RESULT_ORCHESTRATE: ${{ needs.orchestrate.result }}
+        RESULT_CHECK_STYLE: ${{ needs.check_style.result }}
+        RESULT_CLIPPY_WINDOWS: ${{ needs.clippy_windows.result }}
+        RESULT_CLIPPY_LINUX: ${{ needs.clippy_linux.result }}
+        RESULT_CLIPPY_MAC: ${{ needs.clippy_mac.result }}
+        RESULT_RUN_TESTS_WINDOWS: ${{ needs.run_tests_windows.result }}
+        RESULT_RUN_TESTS_LINUX: ${{ needs.run_tests_linux.result }}
+        RESULT_RUN_TESTS_MAC: ${{ needs.run_tests_mac.result }}
+        RESULT_DOCTESTS: ${{ needs.doctests.result }}
+        RESULT_CHECK_WORKSPACE_BINARIES: ${{ needs.check_workspace_binaries.result }}
+        RESULT_CHECK_WASM: ${{ needs.check_wasm.result }}
+        RESULT_CHECK_DEPENDENCIES: ${{ needs.check_dependencies.result }}
+        RESULT_CHECK_DOCS: ${{ needs.check_docs.result }}
+        RESULT_CHECK_LICENSES: ${{ needs.check_licenses.result }}
+        RESULT_CHECK_SCRIPTS: ${{ needs.check_scripts.result }}
 concurrency:
   group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
   cancel-in-progress: true

.github/workflows/slack_notify_first_responders.yml 🔗

@@ -17,8 +17,9 @@ jobs:
         id: check-label
         env:
           LABEL_NAME: ${{ github.event.label.name }}
+          FIRST_RESPONDER_LABELS: ${{ env.FIRST_RESPONDER_LABELS }}
         run: |
-          if echo '${{ env.FIRST_RESPONDER_LABELS }}' | jq -e --arg label "$LABEL_NAME" 'index($label) != null' > /dev/null; then
+          if echo "$FIRST_RESPONDER_LABELS" | jq -e --arg label "$LABEL_NAME" 'index($label) != null' > /dev/null; then
             echo "should_notify=true" >> "$GITHUB_OUTPUT"
             echo "Label '$LABEL_NAME' requires first responder notification"
           else

.github/workflows/update_duplicate_magnets.yml 🔗

@@ -21,7 +21,9 @@ jobs:
         run: pip install requests
 
       - name: Update duplicate magnets issue
+        env:
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
         run: |
           python script/github-find-top-duplicated-bugs.py \
-            --github-token ${{ secrets.GITHUB_TOKEN }} \
+            --github-token "$GITHUB_TOKEN" \
             --issue-number 46355

Cargo.lock 🔗

@@ -603,6 +603,17 @@ version = "0.1.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299"
 
+[[package]]
+name = "annotate-snippets"
+version = "0.12.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c86cd1c51b95d71dde52bca69ed225008f6ff4c8cc825b08042aa1ef823e1980"
+dependencies = [
+ "anstyle",
+ "memchr",
+ "unicode-width",
+]
+
 [[package]]
 name = "anstream"
 version = "0.6.21"
@@ -21529,6 +21540,7 @@ checksum = "ec7a2a501ed189703dba8b08142f057e887dfc4b2cc4db2d343ac6376ba3e0b9"
 name = "xtask"
 version = "0.1.0"
 dependencies = [
+ "annotate-snippets",
  "anyhow",
  "backtrace",
  "cargo_metadata",
@@ -21537,8 +21549,12 @@ dependencies = [
  "gh-workflow",
  "indexmap",
  "indoc",
+ "itertools 0.14.0",
+ "regex",
  "serde",
  "serde_json",
+ "serde_yaml",
+ "strum 0.27.2",
  "toml 0.8.23",
  "toml_edit 0.22.27",
 ]

tooling/xtask/Cargo.toml 🔗

@@ -9,6 +9,7 @@ license = "GPL-3.0-or-later"
 workspace = true
 
 [dependencies]
+annotate-snippets = "0.12.1"
 anyhow.workspace = true
 backtrace.workspace = true
 cargo_metadata.workspace = true
@@ -17,7 +18,11 @@ clap = { workspace = true, features = ["derive"] }
 toml.workspace = true
 indoc.workspace = true
 indexmap.workspace = true
+itertools.workspace = true
+regex.workspace = true
 serde.workspace = true
 serde_json.workspace = true
+serde_yaml = "0.9.34"
+strum.workspace = true
 toml_edit.workspace = true
 gh-workflow.workspace = true

tooling/xtask/src/main.rs 🔗

@@ -23,6 +23,7 @@ enum CliCommand {
     /// Builds GPUI web examples and serves them.
     WebExamples(tasks::web_examples::WebExamplesArgs),
     Workflows(tasks::workflows::GenerateWorkflowArgs),
+    CheckWorkflows(tasks::workflow_checks::WorkflowValidationArgs),
 }
 
 fn main() -> Result<()> {
@@ -37,5 +38,6 @@ fn main() -> Result<()> {
         CliCommand::PublishGpui(args) => tasks::publish_gpui::run_publish_gpui(args),
         CliCommand::WebExamples(args) => tasks::web_examples::run_web_examples(args),
         CliCommand::Workflows(args) => tasks::workflows::run_workflows(args),
+        CliCommand::CheckWorkflows(args) => tasks::workflow_checks::validate(args),
     }
 }

tooling/xtask/src/tasks.rs 🔗

@@ -3,4 +3,5 @@ pub mod licenses;
 pub mod package_conformity;
 pub mod publish_gpui;
 pub mod web_examples;
+pub mod workflow_checks;
 pub mod workflows;

tooling/xtask/src/tasks/workflow_checks.rs 🔗

@@ -0,0 +1,118 @@
+mod check_run_patterns;
+
+use std::{fs, path::PathBuf};
+
+use annotate_snippets::Renderer;
+use anyhow::{Result, anyhow};
+use clap::Parser;
+use itertools::{Either, Itertools};
+use serde_yaml::Value;
+use strum::IntoEnumIterator;
+
+use crate::tasks::{
+    workflow_checks::check_run_patterns::{
+        RunValidationError, WorkflowFile, WorkflowValidationError,
+    },
+    workflows::WorkflowType,
+};
+
+pub use check_run_patterns::validate_run_command;
+
+#[derive(Default, Parser)]
+pub struct WorkflowValidationArgs {}
+
+pub fn validate(_: WorkflowValidationArgs) -> Result<()> {
+    let (parsing_errors, file_errors): (Vec<_>, Vec<_>) = get_all_workflow_files()
+        .map(check_workflow)
+        .flat_map(Result::err)
+        .partition_map(|error| match error {
+            WorkflowError::ParseError(error) => Either::Left(error),
+            WorkflowError::ValidationError(error) => Either::Right(error),
+        });
+
+    if !parsing_errors.is_empty() {
+        Err(anyhow!(
+            "Failed to read or parse some workflow files: {}",
+            parsing_errors.into_iter().join("\n")
+        ))
+    } else if !file_errors.is_empty() {
+        let errors: Vec<_> = file_errors
+            .iter()
+            .map(|error| error.annotation_group())
+            .collect();
+
+        let renderer =
+            Renderer::styled().decor_style(annotate_snippets::renderer::DecorStyle::Ascii);
+        println!("{}", renderer.render(errors.as_slice()));
+
+        Err(anyhow!("Workflow checks failed!"))
+    } else {
+        Ok(())
+    }
+}
+
+enum WorkflowError {
+    ParseError(anyhow::Error),
+    ValidationError(Box<WorkflowValidationError>),
+}
+
+fn get_all_workflow_files() -> impl Iterator<Item = PathBuf> {
+    WorkflowType::iter()
+        .map(|workflow_type| workflow_type.folder_path())
+        .flat_map(|folder_path| {
+            fs::read_dir(folder_path).into_iter().flat_map(|entries| {
+                entries
+                    .flat_map(Result::ok)
+                    .map(|entry| entry.path())
+                    .filter(|path| {
+                        path.extension()
+                            .is_some_and(|ext| ext == "yaml" || ext == "yml")
+                    })
+            })
+        })
+}
+
+fn check_workflow(workflow_file_path: PathBuf) -> Result<(), WorkflowError> {
+    fn collect_errors(
+        iter: impl Iterator<Item = Result<(), Vec<RunValidationError>>>,
+    ) -> Result<(), Vec<RunValidationError>> {
+        Some(iter.flat_map(Result::err).flatten().collect::<Vec<_>>())
+            .filter(|errors| !errors.is_empty())
+            .map_or(Ok(()), Err)
+    }
+
+    fn check_recursive(key: &Value, value: &Value) -> Result<(), Vec<RunValidationError>> {
+        match value {
+            Value::Mapping(mapping) => collect_errors(
+                mapping
+                    .into_iter()
+                    .map(|(key, value)| check_recursive(key, value)),
+            ),
+            Value::Sequence(sequence) => collect_errors(
+                sequence
+                    .into_iter()
+                    .map(|value| check_recursive(key, value)),
+            ),
+            Value::String(string) => check_string(key, string).map_err(|error| vec![error]),
+            Value::Null | Value::Bool(_) | Value::Number(_) | Value::Tagged(_) => Ok(()),
+        }
+    }
+
+    let file_content =
+        WorkflowFile::load(&workflow_file_path).map_err(WorkflowError::ParseError)?;
+
+    check_recursive(&Value::Null, &file_content.parsed_content).map_err(|errors| {
+        WorkflowError::ValidationError(Box::new(WorkflowValidationError::new(
+            errors,
+            file_content,
+            workflow_file_path,
+        )))
+    })
+}
+
+fn check_string(key: &Value, value: &str) -> Result<(), RunValidationError> {
+    match key {
+        Value::String(key) if key == "run" => validate_run_command(value),
+        _ => Ok(()),
+    }
+}

tooling/xtask/src/tasks/workflow_checks/check_run_patterns.rs 🔗

@@ -0,0 +1,124 @@
+use annotate_snippets::{AnnotationKind, Group, Level, Snippet};
+use anyhow::{Result, anyhow};
+use regex::Regex;
+use serde_yaml::Value;
+use std::{
+    collections::HashMap,
+    fs,
+    ops::Range,
+    path::{Path, PathBuf},
+    sync::LazyLock,
+};
+
+static GITHUB_INPUT_PATTERN: LazyLock<Regex> = LazyLock::new(|| {
+    Regex::new(r#"\$\{\{[[:blank:]]*([[:alnum:]]|[[:punct:]])+?[[:blank:]]*\}\}"#)
+        .expect("Should compile")
+});
+
+pub struct WorkflowFile {
+    raw_content: String,
+    pub parsed_content: Value,
+}
+
+impl WorkflowFile {
+    pub fn load(workflow_file_path: &Path) -> Result<Self> {
+        fs::read_to_string(workflow_file_path)
+            .map_err(|_| {
+                anyhow!(
+                    "Could not read workflow file at {}",
+                    workflow_file_path.display()
+                )
+            })
+            .and_then(|file_content| {
+                serde_yaml::from_str(&file_content)
+                    .map(|parsed_content| Self {
+                        raw_content: file_content,
+                        parsed_content,
+                    })
+                    .map_err(|e| anyhow!("Failed to parse workflow file: {e:?}"))
+            })
+    }
+}
+
+pub struct WorkflowValidationError {
+    file_path: PathBuf,
+    contents: WorkflowFile,
+    errors: Vec<RunValidationError>,
+}
+
+impl WorkflowValidationError {
+    pub fn new(
+        errors: Vec<RunValidationError>,
+        contents: WorkflowFile,
+        file_path: PathBuf,
+    ) -> Self {
+        Self {
+            file_path,
+            contents,
+            errors,
+        }
+    }
+
+    pub fn annotation_group<'a>(&'a self) -> Group<'a> {
+        let raw_content = &self.contents.raw_content;
+        let mut identical_lines = HashMap::new();
+
+        let ranges = self
+            .errors
+            .iter()
+            .flat_map(|error| error.found_injection_patterns.iter())
+            .map(|(line, pattern_range)| {
+                let initial_offset = identical_lines
+                    .get(&(line.as_str(), pattern_range.start))
+                    .copied()
+                    .unwrap_or_default();
+
+                let line_start = raw_content[initial_offset..]
+                    .find(line.as_str())
+                    .map(|offset| offset + initial_offset)
+                    .unwrap_or_default();
+
+                let pattern_start = line_start + pattern_range.start;
+                let pattern_end = pattern_start + pattern_range.len();
+
+                identical_lines.insert((line.as_str(), pattern_range.start), pattern_end);
+
+                pattern_start..pattern_end
+            });
+
+        Level::ERROR
+            .primary_title("Found GitHub input injection in run command")
+            .element(
+                Snippet::source(&self.contents.raw_content)
+                    .path(self.file_path.display().to_string())
+                    .annotations(ranges.map(|range| {
+                        AnnotationKind::Primary
+                            .span(range)
+                            .label("This should be passed via an environment variable")
+                    })),
+            )
+    }
+}
+
+pub struct RunValidationError {
+    found_injection_patterns: Vec<(String, Range<usize>)>,
+}
+
+pub fn validate_run_command(command: &str) -> Result<(), RunValidationError> {
+    let patterns: Vec<_> = command
+        .lines()
+        .flat_map(move |line| {
+            GITHUB_INPUT_PATTERN
+                .find_iter(line)
+                .map(|m| (line.to_owned(), m.range()))
+        })
+        .collect();
+
+    if patterns.is_empty() {
+        Ok(())
+    } else {
+        Err(RunValidationError {
+            found_injection_patterns: patterns,
+        })
+    }
+}

tooling/xtask/src/tasks/workflows.rs 🔗

@@ -4,6 +4,8 @@ use gh_workflow::Workflow;
 use std::fs;
 use std::path::{Path, PathBuf};
 
+use crate::tasks::workflow_checks::{self};
+
 mod after_release;
 mod autofix_pr;
 mod bump_patch_version;
@@ -87,8 +89,8 @@ impl WorkflowFile {
     }
 }
 
-#[derive(PartialEq, Eq)]
-enum WorkflowType {
+#[derive(PartialEq, Eq, strum::EnumIter)]
+pub enum WorkflowType {
     /// Workflows living in the Zed repository
     Zed,
     /// Workflows living in the `zed-extensions/workflows` repository that are
@@ -113,7 +115,7 @@ impl WorkflowType {
         )
     }
 
-    fn folder_path(&self) -> PathBuf {
+    pub fn folder_path(&self) -> PathBuf {
         match self {
             WorkflowType::Zed => PathBuf::from(".github/workflows"),
             WorkflowType::ExtensionCi => PathBuf::from("extensions/workflows"),
@@ -155,5 +157,5 @@ pub fn run_workflows(_: GenerateWorkflowArgs) -> Result<()> {
         workflow_file.generate_file()?;
     }
 
-    Ok(())
+    workflow_checks::validate(Default::default())
 }

tooling/xtask/src/tasks/workflows/after_release.rs 🔗

@@ -123,7 +123,7 @@ fn publish_winget() -> NamedJob {
                 "X-GitHub-Api-Version" = "2022-11-28"
             }
             $body = @{ branch = "master" } | ConvertTo-Json
-            $uri = "https://api.github.com/repos/${{ github.repository_owner }}/winget-pkgs/merge-upstream"
+            $uri = "https://api.github.com/repos/$env:GITHUB_REPOSITORY_OWNER/winget-pkgs/merge-upstream"
             try {
                 Invoke-RestMethod -Uri $uri -Method Post -Headers $headers -Body $body -ContentType "application/json"
                 Write-Host "Successfully synced winget-pkgs fork"

tooling/xtask/src/tasks/workflows/autofix_pr.rs 🔗

@@ -55,7 +55,8 @@ fn download_patch_artifact() -> Step<Use> {
 
 fn run_autofix(pr_number: &WorkflowInput, run_clippy: &WorkflowInput) -> NamedJob {
     fn checkout_pr(pr_number: &WorkflowInput) -> Step<Run> {
-        named::bash(&format!("gh pr checkout {pr_number}"))
+        named::bash(r#"gh pr checkout "$PR_NUMBER""#)
+            .add_env(("PR_NUMBER", pr_number.to_string()))
             .add_env(("GITHUB_TOKEN", vars::GITHUB_TOKEN))
     }
 
@@ -133,7 +134,9 @@ fn run_autofix(pr_number: &WorkflowInput, run_clippy: &WorkflowInput) -> NamedJo
 
 fn commit_changes(pr_number: &WorkflowInput, autofix_job: &NamedJob) -> NamedJob {
     fn checkout_pr(pr_number: &WorkflowInput, token: &StepOutput) -> Step<Run> {
-        named::bash(&format!("gh pr checkout {pr_number}")).add_env(("GITHUB_TOKEN", token))
+        named::bash(r#"gh pr checkout "$PR_NUMBER""#)
+            .add_env(("PR_NUMBER", pr_number.to_string()))
+            .add_env(("GITHUB_TOKEN", token))
     }
 
     fn apply_patch() -> Step<Run> {

tooling/xtask/src/tasks/workflows/cherry_pick.rs 🔗

@@ -35,7 +35,10 @@ fn run_cherry_pick(
         channel: &WorkflowInput,
         token: &StepOutput,
     ) -> Step<Run> {
-        named::bash(&format!("./script/cherry-pick {branch} {commit} {channel}"))
+        named::bash(r#"./script/cherry-pick "$BRANCH" "$COMMIT" "$CHANNEL""#)
+            .add_env(("BRANCH", branch.to_string()))
+            .add_env(("COMMIT", commit.to_string()))
+            .add_env(("CHANNEL", channel.to_string()))
             .add_env(("GIT_COMMITTER_NAME", "Zed Zippy"))
             .add_env(("GIT_COMMITTER_EMAIL", "hi@zed.dev"))
             .add_env(("GITHUB_TOKEN", token))

tooling/xtask/src/tasks/workflows/compare_perf.rs 🔗

@@ -29,14 +29,16 @@ pub fn run_perf(
     crate_name: &WorkflowInput,
 ) -> NamedJob {
     fn cargo_perf_test(ref_name: &WorkflowInput, crate_name: &WorkflowInput) -> Step<Run> {
-        named::bash(&format!(
-            "
-            if [ -n \"{crate_name}\" ]; then
-                cargo perf-test -p {crate_name} -- --json={ref_name};
+        named::bash(
+            r#"
+            if [ -n "$CRATE_NAME" ]; then
+                cargo perf-test -p "$CRATE_NAME" -- --json="$REF_NAME";
             else
-                cargo perf-test -p vim -- --json={ref_name};
-            fi"
-        ))
+                cargo perf-test -p vim -- --json="$REF_NAME";
+            fi"#,
+        )
+        .add_env(("REF_NAME", ref_name.to_string()))
+        .add_env(("CRATE_NAME", crate_name.to_string()))
     }
 
     fn install_hyperfine() -> Step<Use> {
@@ -44,9 +46,9 @@ pub fn run_perf(
     }
 
     fn compare_runs(head: &WorkflowInput, base: &WorkflowInput) -> Step<Run> {
-        named::bash(&format!(
-            "cargo perf-compare --save=results.md {base} {head}"
-        ))
+        named::bash(r#"cargo perf-compare --save=results.md "$BASE" "$HEAD""#)
+            .add_env(("BASE", base.to_string()))
+            .add_env(("HEAD", head.to_string()))
     }
 
     named::job(

tooling/xtask/src/tasks/workflows/deploy_collab.rs 🔗

@@ -1,5 +1,5 @@
 use gh_workflow::{Container, Event, Port, Push, Run, Step, Use, Workflow};
-use indoc::{formatdoc, indoc};
+use indoc::indoc;
 
 use crate::tasks::workflows::runners::{self, Platform};
 use crate::tasks::workflows::steps::{
@@ -115,9 +115,10 @@ fn deploy(deps: &[&NamedJob]) -> NamedJob {
     }
 
     fn sign_into_kubernetes() -> Step<Run> {
-        named::bash(formatdoc! {r#"
-            doctl kubernetes cluster kubeconfig save --expiry-seconds 600 {cluster_name}
-        "#, cluster_name = vars::CLUSTER_NAME})
+        named::bash(
+            r#"doctl kubernetes cluster kubeconfig save --expiry-seconds 600 "$CLUSTER_NAME""#,
+        )
+        .add_env(("CLUSTER_NAME", vars::CLUSTER_NAME))
     }
 
     fn start_rollout() -> Step<Run> {
@@ -139,7 +140,7 @@ fn deploy(deps: &[&NamedJob]) -> NamedJob {
             echo "Deploying collab:$GITHUB_SHA to $ZED_KUBE_NAMESPACE"
 
             source script/lib/deploy-helpers.sh
-            export_vars_for_environment $ZED_KUBE_NAMESPACE
+            export_vars_for_environment "$ZED_KUBE_NAMESPACE"
 
             ZED_DO_CERTIFICATE_ID="$(doctl compute certificate list --format ID --no-header)"
             export ZED_DO_CERTIFICATE_ID
@@ -149,14 +150,14 @@ fn deploy(deps: &[&NamedJob]) -> NamedJob {
             export ZED_LOAD_BALANCER_SIZE_UNIT=$ZED_COLLAB_LOAD_BALANCER_SIZE_UNIT
             export DATABASE_MAX_CONNECTIONS=850
             envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f -
-            kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch
+            kubectl -n "$ZED_KUBE_NAMESPACE" rollout status "deployment/$ZED_SERVICE_NAME" --watch
             echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}"
 
             export ZED_SERVICE_NAME=api
             export ZED_LOAD_BALANCER_SIZE_UNIT=$ZED_API_LOAD_BALANCER_SIZE_UNIT
             export DATABASE_MAX_CONNECTIONS=60
             envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f -
-            kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch
+            kubectl -n "$ZED_KUBE_NAMESPACE" rollout status "deployment/$ZED_SERVICE_NAME" --watch
             echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}"
         "#})
     }

tooling/xtask/src/tasks/workflows/extension_bump.rs 🔗

@@ -150,7 +150,7 @@ pub(crate) fn compare_versions() -> (Step<Run>, StepOutput, StepOutput) {
     r#"
         CURRENT_VERSION="$({VERSION_CHECK})"
 
-        if [[ "${{{{ github.event_name }}}}" == "pull_request" ]]; then
+        if [[ "$GITHUB_EVENT_NAME" == "pull_request" ]]; then
             PR_FORK_POINT="$(git merge-base origin/main HEAD)"
             git checkout "$PR_FORK_POINT"
         elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then
@@ -258,8 +258,6 @@ fn install_bump_2_version() -> Step<Run> {
 
 fn bump_version(current_version: &JobOutput, bump_type: &WorkflowInput) -> (Step<Run>, StepOutput) {
     let step = named::bash(formatdoc! {r#"
-        OLD_VERSION="{current_version}"
-
         BUMP_FILES=("extension.toml")
         if [[ -f "Cargo.toml" ]]; then
             BUMP_FILES+=("Cargo.toml")
@@ -269,7 +267,7 @@ fn bump_version(current_version: &JobOutput, bump_type: &WorkflowInput) -> (Step
             --search "version = \"{{current_version}}"\" \
             --replace "version = \"{{new_version}}"\" \
             --current-version "$OLD_VERSION" \
-            --no-configured-files {bump_type} "${{BUMP_FILES[@]}}"
+            --no-configured-files "$BUMP_TYPE" "${{BUMP_FILES[@]}}"
 
         if [[ -f "Cargo.toml" ]]; then
             cargo update --workspace
@@ -280,7 +278,9 @@ fn bump_version(current_version: &JobOutput, bump_type: &WorkflowInput) -> (Step
         echo "new_version=${{NEW_VERSION}}" >> "$GITHUB_OUTPUT"
         "#
     })
-    .id("bump-version");
+    .id("bump-version")
+    .add_env(("OLD_VERSION", current_version.to_string()))
+    .add_env(("BUMP_TYPE", bump_type.to_string()));
 
     let new_version = StepOutput::new(&step, "new_version");
     (step, new_version)

tooling/xtask/src/tasks/workflows/extension_tests.rs 🔗

@@ -1,5 +1,5 @@
 use gh_workflow::*;
-use indoc::{formatdoc, indoc};
+use indoc::indoc;
 
 use crate::tasks::workflows::{
     extension_bump::compare_versions,
@@ -142,12 +142,14 @@ pub fn check() -> Step<Run> {
 }
 
 fn verify_version_did_not_change(version_changed: StepOutput) -> Step<Run> {
-    named::bash(formatdoc! {r#"
-        if [[ {version_changed} == "true" && "${{{{ github.event_name }}}}" == "pull_request" && "${{{{ github.event.pull_request.user.login }}}}" != "zed-zippy[bot]" ]] ; then
+    named::bash(indoc! {r#"
+        if [[ "$VERSION_CHANGED" == "true" && "$GITHUB_EVENT_NAME" == "pull_request" && "$PR_USER_LOGIN" != "zed-zippy[bot]" ]] ; then
             echo "Version change detected in your change!"
             echo "Version changes happen in separate PRs and will be performed by the zed-zippy bot"
             exit 42
         fi
         "#
     })
+    .add_env(("VERSION_CHANGED", version_changed.to_string()))
+    .add_env(("PR_USER_LOGIN", "${{ github.event.pull_request.user.login }}"))
 }

tooling/xtask/src/tasks/workflows/extension_workflow_rollout.rs 🔗

@@ -105,10 +105,8 @@ fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob {
     }
 
     fn get_removed_files(prev_commit: &StepOutput) -> (Step<Run>, StepOutput) {
-        let step = named::bash(formatdoc! {r#"
-            PREV_COMMIT="{prev_commit}"
-
-            if [ "${{{{ matrix.repo }}}}" = "workflows" ]; then
+        let step = named::bash(indoc::indoc! {r#"
+            if [ "$MATRIX_REPO" = "workflows" ]; then
                 WORKFLOW_DIR="extensions/workflows"
             else
                 WORKFLOW_DIR="extensions/workflows/shared"
@@ -119,8 +117,8 @@ fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob {
             # Get deleted files (status D) and renamed files (status R - old name needs removal)
             # Using -M to detect renames, then extracting files that are gone from their original location
             REMOVED_FILES=$(git diff --name-status -M "$PREV_COMMIT" HEAD -- "$WORKFLOW_DIR" | \
-                awk '/^D/ {{ print $2 }} /^R/ {{ print $2 }}' | \
-                xargs -I{{}} basename {{}} 2>/dev/null | \
+                awk '/^D/ { print $2 } /^R/ { print $2 }' | \
+                xargs -I{} basename {} 2>/dev/null | \
                 tr '\n' ' ' || echo "")
 
             REMOVED_FILES=$(echo "$REMOVED_FILES" | xargs)
@@ -129,7 +127,9 @@ fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob {
             echo "removed_files=$REMOVED_FILES" >> "$GITHUB_OUTPUT"
         "#})
         .id("calc-changes")
-        .working_directory("zed");
+        .working_directory("zed")
+        .add_env(("PREV_COMMIT", prev_commit.to_string()))
+        .add_env(("MATRIX_REPO", "${{ matrix.repo }}"));
 
         let removed_files = StepOutput::new(&step, "removed_files");
 
@@ -137,9 +137,7 @@ fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob {
     }
 
     fn sync_workflow_files(removed_files: &StepOutput) -> Step<Run> {
-        named::bash(formatdoc! {r#"
-            REMOVED_FILES="{removed_files}"
-
+        named::bash(indoc::indoc! {r#"
             mkdir -p extension/.github/workflows
             cd extension/.github/workflows
 
@@ -153,12 +151,14 @@ fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob {
 
             cd - > /dev/null
 
-            if [ "${{{{ matrix.repo }}}}" = "workflows" ]; then
+            if [ "$MATRIX_REPO" = "workflows" ]; then
                 cp zed/extensions/workflows/*.yml extension/.github/workflows/
             else
                 cp zed/extensions/workflows/shared/*.yml extension/.github/workflows/
             fi
         "#})
+        .add_env(("REMOVED_FILES", removed_files.to_string()))
+        .add_env(("MATRIX_REPO", "${{ matrix.repo }}"))
     }
 
     fn get_short_sha() -> (Step<Run>, StepOutput) {
@@ -205,13 +205,16 @@ fn rollout_workflows_to_extension(fetch_repos_job: &NamedJob) -> NamedJob {
 
     fn enable_auto_merge(token: &StepOutput) -> Step<gh_workflow::Run> {
         named::bash(indoc::indoc! {r#"
-            PR_NUMBER="${{ steps.create-pr.outputs.pull-request-number }}"
             if [ -n "$PR_NUMBER" ]; then
                 cd extension
                 gh pr merge "$PR_NUMBER" --auto --squash
             fi
         "#})
         .add_env(("GH_TOKEN", token.to_string()))
+        .add_env((
+            "PR_NUMBER",
+            "${{ steps.create-pr.outputs.pull-request-number }}",
+        ))
     }
 
     let (authenticate, token) = generate_token(

tooling/xtask/src/tasks/workflows/publish_extension_cli.rs 🔗

@@ -28,7 +28,7 @@ fn publish_job() -> NamedJob {
     }
 
     fn upload_binary() -> Step<Run> {
-        named::bash("script/upload-extension-cli ${{ github.sha }}")
+        named::bash(r#"script/upload-extension-cli "$GITHUB_SHA""#)
             .add_env((
                 "DIGITALOCEAN_SPACES_ACCESS_KEY",
                 vars::DIGITALOCEAN_SPACES_ACCESS_KEY,
@@ -60,7 +60,7 @@ fn update_sha_in_zed(publish_job: &NamedJob) -> NamedJob {
 
     fn replace_sha() -> Step<Run> {
         named::bash(indoc! {r#"
-            sed -i "s/ZED_EXTENSION_CLI_SHA: &str = \"[a-f0-9]*\"/ZED_EXTENSION_CLI_SHA: \&str = \"${{ github.sha }}\"/" \
+            sed -i "s/ZED_EXTENSION_CLI_SHA: &str = \"[a-f0-9]*\"/ZED_EXTENSION_CLI_SHA: \&str = \"$GITHUB_SHA\"/" \
                 tooling/xtask/src/tasks/workflows/extension_tests.rs
         "#})
     }
@@ -139,7 +139,7 @@ fn update_sha_in_extensions(publish_job: &NamedJob) -> NamedJob {
 
     fn replace_sha() -> Step<Run> {
         named::bash(indoc! {r#"
-            sed -i "s/ZED_EXTENSION_CLI_SHA: [a-f0-9]*/ZED_EXTENSION_CLI_SHA: ${{ github.sha }}/" \
+            sed -i "s/ZED_EXTENSION_CLI_SHA: [a-f0-9]*/ZED_EXTENSION_CLI_SHA: $GITHUB_SHA/" \
                 .github/workflows/ci.yml
         "#})
     }
@@ -191,7 +191,7 @@ fn create_pull_request_extensions(
 
 fn get_short_sha() -> (Step<Run>, StepOutput) {
     let step = named::bash(indoc::indoc! {r#"
-        echo "sha_short=$(echo "${{ github.sha }}" | cut -c1-7)" >> "$GITHUB_OUTPUT"
+        echo "sha_short=$(echo "$GITHUB_SHA" | cut -c1-7)" >> "$GITHUB_OUTPUT"
     "#})
     .id("short-sha");
 

tooling/xtask/src/tasks/workflows/release.rs 🔗

@@ -272,18 +272,55 @@ pub(crate) fn push_release_update_notification(
     test_jobs: &[&NamedJob],
     bundle_jobs: &ReleaseBundleJobs,
 ) -> NamedJob {
-    let all_job_names = test_jobs
-        .into_iter()
+    fn env_name(name: &str) -> String {
+        format!("RESULT_{}", name.to_uppercase())
+    }
+
+    let all_job_names: Vec<&str> = test_jobs
+        .iter()
         .map(|j| j.name.as_ref())
-        .chain(bundle_jobs.jobs().into_iter().map(|j| j.name.as_ref()));
+        .chain(bundle_jobs.jobs().into_iter().map(|j| j.name.as_ref()))
+        .collect();
+
+    let env_entries = [
+        (
+            "DRAFT_RESULT".into(),
+            format!("${{{{ needs.{}.result }}}}", create_draft_release_job.name),
+        ),
+        (
+            "UPLOAD_RESULT".into(),
+            format!("${{{{ needs.{}.result }}}}", upload_assets_job.name),
+        ),
+        (
+            "VALIDATE_RESULT".into(),
+            format!("${{{{ needs.{}.result }}}}", validate_assets_job.name),
+        ),
+        (
+            "AUTO_RELEASE_RESULT".into(),
+            format!("${{{{ needs.{}.result }}}}", auto_release_preview.name),
+        ),
+        ("RUN_URL".into(), CURRENT_ACTION_RUN_URL.to_string()),
+    ]
+    .into_iter()
+    .chain(
+        all_job_names
+            .iter()
+            .map(|name| (env_name(name), format!("${{{{ needs.{name}.result }}}}"))),
+    );
+
+    let failure_checks = all_job_names
+        .iter()
+        .map(|name| {
+            format!(
+                "if [ \"${env_name}\" == \"failure\" ];then FAILED_JOBS=\"$FAILED_JOBS {name}\"; fi",
+                    env_name = env_name(name)
+            )
+        })
+        .collect::<Vec<_>>()
+        .join("\n        ");
 
     let notification_script = formatdoc! {r#"
-        DRAFT_RESULT="${{{{ needs.{draft_job}.result }}}}"
-        UPLOAD_RESULT="${{{{ needs.{upload_job}.result }}}}"
-        VALIDATE_RESULT="${{{{ needs.{validate_job}.result }}}}"
-        AUTO_RELEASE_RESULT="${{{{ needs.{auto_release_job}.result }}}}"
         TAG="$GITHUB_REF_NAME"
-        RUN_URL="{run_url}"
 
         if [ "$DRAFT_RESULT" == "failure" ]; then
             echo "❌ Draft release creation failed for $TAG: $RUN_URL"
@@ -319,19 +356,6 @@ pub(crate) fn push_release_update_notification(
             fi
         fi
         "#,
-        draft_job = create_draft_release_job.name,
-        upload_job = upload_assets_job.name,
-        validate_job = validate_assets_job.name,
-        auto_release_job = auto_release_preview.name,
-        run_url = CURRENT_ACTION_RUN_URL,
-        failure_checks = all_job_names
-            .into_iter()
-            .map(|name: &str| format!(
-                "if [ \"${{{{ needs.{name}.result }}}}\" == \"failure\" ];\
-                then FAILED_JOBS=\"$FAILED_JOBS {name}\"; fi"
-            ))
-            .collect::<Vec<_>>()
-            .join("\n        "),
     };
 
     let mut all_deps: Vec<&NamedJob> = vec![
@@ -347,7 +371,10 @@ pub(crate) fn push_release_update_notification(
         .runs_on(runners::LINUX_SMALL)
         .cond(Expression::new("always()"));
 
-    for step in notify_slack(MessageType::Evaluated(notification_script)) {
+    for step in notify_slack(MessageType::Evaluated {
+        script: notification_script,
+        env: env_entries.collect(),
+    }) {
         job = job.add_step(step);
     }
     named::job(job)
@@ -368,14 +395,17 @@ pub(crate) fn notify_on_failure(deps: &[&NamedJob]) -> NamedJob {
 
 pub(crate) enum MessageType {
     Static(String),
-    Evaluated(String),
+    Evaluated {
+        script: String,
+        env: Vec<(String, String)>,
+    },
 }
 
 fn notify_slack(message: MessageType) -> Vec<Step<Run>> {
     match message {
         MessageType::Static(message) => vec![send_slack_message(message)],
-        MessageType::Evaluated(expression) => {
-            let (generate_step, generated_message) = generate_slack_message(expression);
+        MessageType::Evaluated { script, env } => {
+            let (generate_step, generated_message) = generate_slack_message(script, env);
 
             vec![
                 generate_step,
@@ -385,26 +415,32 @@ fn notify_slack(message: MessageType) -> Vec<Step<Run>> {
     }
 }
 
-fn generate_slack_message(expression: String) -> (Step<Run>, StepOutput) {
+fn generate_slack_message(
+    expression: String,
+    env: Vec<(String, String)>,
+) -> (Step<Run>, StepOutput) {
     let script = formatdoc! {r#"
         MESSAGE=$({expression})
         echo "message=$MESSAGE" >> "$GITHUB_OUTPUT"
         "#
     };
-    let generate_step = named::bash(&script)
+    let mut generate_step = named::bash(&script)
         .id("generate-webhook-message")
         .add_env(("GH_TOKEN", Context::github().token()));
 
+    for (name, value) in env {
+        generate_step = generate_step.add_env((name, value));
+    }
+
     let output = StepOutput::new(&generate_step, "message");
 
     (generate_step, output)
 }
 
 fn send_slack_message(message: String) -> Step<Run> {
-    let script = formatdoc! {r#"
-        curl -X POST -H 'Content-type: application/json'\
-         --data '{{"text":"{message}"}}' "$SLACK_WEBHOOK"
-        "#
-    };
-    named::bash(&script).add_env(("SLACK_WEBHOOK", vars::SLACK_WEBHOOK_WORKFLOW_FAILURES))
+    named::bash(
+        r#"curl -X POST -H 'Content-type: application/json' --data "$(jq -n --arg text "$SLACK_MESSAGE" '{"text": $text}')" "$SLACK_WEBHOOK""#
+    )
+    .add_env(("SLACK_WEBHOOK", vars::SLACK_WEBHOOK_WORKFLOW_FAILURES))
+    .add_env(("SLACK_MESSAGE", message))
 }

tooling/xtask/src/tasks/workflows/run_tests.rs 🔗

@@ -6,7 +6,10 @@ use indexmap::IndexMap;
 use indoc::formatdoc;
 
 use crate::tasks::workflows::{
-    steps::{CommonJobConditions, repository_owner_guard_expression, use_clang},
+    steps::{
+        CommonJobConditions, cache_rust_dependencies_namespace, repository_owner_guard_expression,
+        use_clang,
+    },
     vars::{self, PathCondition},
 };
 
@@ -116,7 +119,7 @@ fn orchestrate_impl(rules: &[&PathCondition], include_package_filter: bool) -> N
           git fetch origin "$GITHUB_BASE_REF" --depth=350
           COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
         fi
-        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
+        CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")"
 
         check_pattern() {
           local output_name="$1"
@@ -240,15 +243,20 @@ pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob {
 
     "#});
 
+    let env_entries: Vec<_> = jobs
+        .iter()
+        .map(|job| {
+            let env_name = format!("RESULT_{}", job.name.to_uppercase());
+            let env_value = format!("${{{{ needs.{}.result }}}}", job.name);
+            (env_name, env_value)
+        })
+        .collect();
+
     script.push_str(
         &jobs
             .iter()
-            .map(|job| {
-                format!(
-                    "check_result \"{}\" \"${{{{ needs.{}.result }}}}\"",
-                    job.name, job.name
-                )
-            })
+            .zip(env_entries.iter())
+            .map(|(job, (env_name, _))| format!("check_result \"{}\" \"${}\"", job.name, env_name))
             .collect::<Vec<_>>()
             .join("\n"),
     );
@@ -263,7 +271,13 @@ pub fn tests_pass(jobs: &[NamedJob]) -> NamedJob {
                 .collect::<Vec<String>>(),
         )
         .cond(repository_owner_guard_expression(true))
-        .add_step(named::bash(&script));
+        .add_step(
+            env_entries
+                .into_iter()
+                .fold(named::bash(&script), |step, env_item| {
+                    step.add_env(env_item)
+                }),
+        );
 
     named::job(job)
 }
@@ -646,9 +660,10 @@ pub(crate) fn check_scripts() -> NamedJob {
     }
 
     fn run_actionlint() -> Step<Run> {
-        named::bash(indoc::indoc! {r#"
-            ${{ steps.get_actionlint.outputs.executable }} -color
-        "#})
+        named::bash(r#""$ACTIONLINT_BIN" -color"#).add_env((
+            "ACTIONLINT_BIN",
+            "${{ steps.get_actionlint.outputs.executable }}",
+        ))
     }
 
     fn run_shellcheck() -> Step<Run> {
@@ -673,6 +688,7 @@ pub(crate) fn check_scripts() -> NamedJob {
             .add_step(run_shellcheck())
             .add_step(download_actionlint().id("get_actionlint"))
             .add_step(run_actionlint())
+            .add_step(cache_rust_dependencies_namespace())
             .add_step(check_xtask_workflows()),
     )
 }

tooling/xtask/src/tasks/workflows/steps.rs 🔗

@@ -503,9 +503,8 @@ pub mod named {
 }
 
 pub fn git_checkout(ref_name: &dyn std::fmt::Display) -> Step<Run> {
-    named::bash(&format!(
-        "git fetch origin {ref_name} && git checkout {ref_name}"
-    ))
+    named::bash(r#"git fetch origin "$REF_NAME" && git checkout "$REF_NAME""#)
+        .add_env(("REF_NAME", ref_name.to_string()))
 }
 
 pub fn authenticate_as_zippy() -> (Step<Use>, StepOutput) {