Detailed changes
@@ -2,16 +2,9 @@ name: CI
on:
push:
- branches:
- - main
- - "v[0-9]+.[0-9]+.x"
tags:
- "v*"
- pull_request:
- branches:
- - "**"
-
concurrency:
# Allow only one workflow per any non-`main` branch.
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
@@ -273,15 +266,12 @@ jobs:
uses: ./.github/actions/run_tests
- name: Build collab
+ # we should do this on a linux x86 machinge
run: cargo build -p collab
- name: Build other binaries and features
run: |
- cargo build --workspace --bins --all-features
- cargo check -p gpui --features "macos-blade"
- cargo check -p workspace
- cargo build -p remote_server
- cargo check -p gpui --examples
+ cargo build --workspace --bins --examples
# Since the macOS runners are stateful, so we need to remove the config file to prevent potential bug.
- name: Clean CI config file
@@ -731,10 +721,6 @@ jobs:
(contains(github.event.pull_request.labels.*.name, 'run-nix') ||
needs.job_spec.outputs.run_nix == 'true')
secrets: inherit
- with:
- flake-output: debug
- # excludes the final package to only cache dependencies
- cachix-filter: "-zed-editor-[0-9.]*-nightly"
bundle-windows-x64:
timeout-minutes: 120
@@ -1,14 +1,31 @@
# Generated from xtask::workflows::nix_build
# Rebuild with `cargo xtask workflows`.
name: nix_build
+env:
+ CARGO_TERM_COLOR: always
+ RUST_BACKTRACE: '1'
+ CARGO_INCREMENTAL: '0'
on:
- workflow_call:
- inputs:
- flake-output:
- type: string
- default: default
- cachix-filter:
- type: string
+ pull_request:
+ branches:
+ - '**'
+ paths:
+ - nix/**
+ - flake.*
+ - Cargo.*
+ - rust-toolchain.toml
+ - .cargo/config.toml
+ push:
+ branches:
+ - main
+ - v[0-9]+.[0-9]+.x
+ paths:
+ - nix/**
+ - flake.*
+ - Cargo.*
+ - rust-toolchain.toml
+ - .cargo/config.toml
+ workflow_call: {}
jobs:
build_nix_linux_x86_64:
if: github.repository_owner == 'zed-industries'
@@ -33,9 +50,9 @@ jobs:
name: zed
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
cachixArgs: -v
- pushFilter: ${{ inputs.cachix-filter }}
+ pushFilter: -zed-editor-[0-9.]*-nightly
- name: nix_build::build
- run: nix build .#${{ inputs.flake-output }} -L --accept-flake-config
+ run: nix build .#debug -L --accept-flake-config
shell: bash -euxo pipefail {0}
timeout-minutes: 60
continue-on-error: true
@@ -63,9 +80,9 @@ jobs:
name: zed
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
cachixArgs: -v
- pushFilter: ${{ inputs.cachix-filter }}
+ pushFilter: -zed-editor-[0-9.]*-nightly
- name: nix_build::build
- run: nix build .#${{ inputs.flake-output }} -L --accept-flake-config
+ run: nix build .#debug -L --accept-flake-config
shell: bash -euxo pipefail {0}
- name: nix_build::limit_store
run: |-
@@ -75,3 +92,6 @@ jobs:
shell: bash -euxo pipefail {0}
timeout-minutes: 60
continue-on-error: true
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
+ cancel-in-progress: true
@@ -33,7 +33,7 @@ jobs:
shell: bash -euxo pipefail {0}
timeout-minutes: 60
run_tests_mac:
- if: github.repository_owner == 'zed-industries'
+ if: 'false'
runs-on: self-mini-macos
steps:
- name: steps::checkout_repo
@@ -49,6 +49,9 @@ jobs:
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
with:
node-version: '20'
+ - name: steps::clippy
+ run: ./script/clippy
+ shell: bash -euxo pipefail {0}
- name: steps::cargo_install_nextest
run: cargo install cargo-nextest --locked
shell: bash -euxo pipefail {0}
@@ -65,7 +68,7 @@ jobs:
shell: bash -euxo pipefail {0}
timeout-minutes: 60
run_tests_windows:
- if: github.repository_owner == 'zed-industries'
+ if: 'false'
runs-on: self-32vcpu-windows-2022
steps:
- name: steps::checkout_repo
@@ -81,6 +84,9 @@ jobs:
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
with:
node-version: '20'
+ - name: steps::clippy
+ run: ./script/clippy.ps1
+ shell: pwsh
- name: steps::cargo_install_nextest
run: cargo install cargo-nextest --locked
shell: pwsh
@@ -109,10 +109,10 @@ jobs:
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- - name: ./script/linux
+ - name: steps::setup_linux
run: ./script/linux
shell: bash -euxo pipefail {0}
- - name: ./script/install-mold
+ - name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
- name: ./script/bundle-linux
@@ -143,9 +143,12 @@ jobs:
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- - name: ./script/linux
+ - name: steps::setup_linux
run: ./script/linux
shell: bash -euxo pipefail {0}
+ - name: steps::install_mold
+ run: ./script/install-mold
+ shell: bash -euxo pipefail {0}
- name: ./script/bundle-linux
run: ./script/bundle-linux
shell: bash -euxo pipefail {0}
@@ -0,0 +1,549 @@
+# Generated from xtask::workflows::run_tests
+# Rebuild with `cargo xtask workflows`.
+name: run_tests
+env:
+ CARGO_TERM_COLOR: always
+ RUST_BACKTRACE: '1'
+ CARGO_INCREMENTAL: '0'
+on:
+ pull_request:
+ branches:
+ - '**'
+ push:
+ branches:
+ - main
+ - v[0-9]+.[0-9]+.x
+jobs:
+ orchestrate:
+ if: github.repository_owner == 'zed-industries'
+ runs-on: namespace-profile-2x4-ubuntu-2404
+ steps:
+ - name: steps::checkout_repo
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+ with:
+ clean: false
+ fetch-depth: ${{ github.ref == 'refs/heads/main' && 2 || 350 }}
+ - id: filter
+ name: filter
+ run: |
+ if [ -z "$GITHUB_BASE_REF" ]; then
+ echo "Not in a PR context (i.e., push to main/stable/preview)"
+ COMPARE_REV="$(git rev-parse HEAD~1)"
+ else
+ echo "In a PR context comparing to pull_request.base.ref"
+ git fetch origin "$GITHUB_BASE_REF" --depth=350
+ COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
+ fi
+ CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
+
+ check_pattern() {
+ local output_name="$1"
+ local pattern="$2"
+ local grep_arg="$3"
+
+ echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
+ echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
+ echo "${output_name}=false" >> "$GITHUB_OUTPUT"
+ }
+
+ check_pattern "run_action_checks" '^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask' -qP
+ check_pattern "run_docs" '^docs/' -qP
+ check_pattern "run_licenses" '^(Cargo.lock|script/.*licenses)' -qP
+ check_pattern "run_nix" '^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)' -qP
+ check_pattern "run_tests" '^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))' -qvP
+ shell: bash -euxo pipefail {0}
+ outputs:
+ run_action_checks: ${{ steps.filter.outputs.run_action_checks }}
+ run_docs: ${{ steps.filter.outputs.run_docs }}
+ run_licenses: ${{ steps.filter.outputs.run_licenses }}
+ run_nix: ${{ steps.filter.outputs.run_nix }}
+ run_tests: ${{ steps.filter.outputs.run_tests }}
+ check_style:
+ if: github.repository_owner == 'zed-industries'
+ runs-on: namespace-profile-4x8-ubuntu-2204
+ steps:
+ - name: steps::checkout_repo
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+ with:
+ clean: false
+ - name: steps::setup_pnpm
+ uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2
+ with:
+ version: '9'
+ - name: ./script/prettier
+ run: ./script/prettier
+ shell: bash -euxo pipefail {0}
+ - name: ./script/check-todos
+ run: ./script/check-todos
+ shell: bash -euxo pipefail {0}
+ - name: ./script/check-keymaps
+ run: ./script/check-keymaps
+ shell: bash -euxo pipefail {0}
+ - name: run_tests::check_style::check_for_typos
+ uses: crate-ci/typos@80c8a4945eec0f6d464eaf9e65ed98ef085283d1
+ with:
+ config: ./typos.toml
+ - name: steps::cargo_fmt
+ run: cargo fmt --all -- --check
+ shell: bash -euxo pipefail {0}
+ timeout-minutes: 60
+ run_tests_windows:
+ needs:
+ - orchestrate
+ if: needs.orchestrate.outputs.run_tests == 'true'
+ runs-on: self-32vcpu-windows-2022
+ steps:
+ - name: steps::checkout_repo
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+ with:
+ clean: false
+ - name: steps::setup_cargo_config
+ run: |
+ New-Item -ItemType Directory -Path "./../.cargo" -Force
+ Copy-Item -Path "./.cargo/ci-config.toml" -Destination "./../.cargo/config.toml"
+ shell: pwsh
+ - name: steps::setup_node
+ uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
+ with:
+ node-version: '20'
+ - name: steps::clippy
+ run: ./script/clippy.ps1
+ shell: pwsh
+ - name: steps::cargo_install_nextest
+ run: cargo install cargo-nextest --locked
+ shell: pwsh
+ - name: steps::clear_target_dir_if_large
+ run: ./script/clear-target-dir-if-larger-than.ps1 250
+ shell: pwsh
+ - name: steps::cargo_nextest
+ run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
+ shell: pwsh
+ - name: steps::cleanup_cargo_config
+ if: always()
+ run: |
+ Remove-Item -Recurse -Path "./../.cargo" -Force -ErrorAction SilentlyContinue
+ shell: pwsh
+ timeout-minutes: 60
+ run_tests_linux:
+ needs:
+ - orchestrate
+ if: needs.orchestrate.outputs.run_tests == 'true'
+ runs-on: namespace-profile-16x32-ubuntu-2204
+ steps:
+ - name: steps::checkout_repo
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+ with:
+ clean: false
+ - name: steps::setup_cargo_config
+ run: |
+ mkdir -p ./../.cargo
+ cp ./.cargo/ci-config.toml ./../.cargo/config.toml
+ shell: bash -euxo pipefail {0}
+ - name: steps::setup_linux
+ run: ./script/linux
+ shell: bash -euxo pipefail {0}
+ - name: steps::install_mold
+ run: ./script/install-mold
+ shell: bash -euxo pipefail {0}
+ - name: steps::setup_node
+ uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
+ with:
+ node-version: '20'
+ - name: steps::clippy
+ run: ./script/clippy
+ shell: bash -euxo pipefail {0}
+ - name: steps::cargo_install_nextest
+ run: cargo install cargo-nextest --locked
+ shell: bash -euxo pipefail {0}
+ - name: steps::clear_target_dir_if_large
+ run: ./script/clear-target-dir-if-larger-than 100
+ shell: bash -euxo pipefail {0}
+ - name: steps::cargo_nextest
+ run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
+ shell: bash -euxo pipefail {0}
+ - name: steps::cleanup_cargo_config
+ if: always()
+ run: |
+ rm -rf ./../.cargo
+ shell: bash -euxo pipefail {0}
+ timeout-minutes: 60
+ run_tests_mac:
+ needs:
+ - orchestrate
+ if: needs.orchestrate.outputs.run_tests == 'true'
+ runs-on: self-mini-macos
+ steps:
+ - name: steps::checkout_repo
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+ with:
+ clean: false
+ - name: steps::setup_cargo_config
+ run: |
+ mkdir -p ./../.cargo
+ cp ./.cargo/ci-config.toml ./../.cargo/config.toml
+ shell: bash -euxo pipefail {0}
+ - name: steps::setup_node
+ uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
+ with:
+ node-version: '20'
+ - name: steps::clippy
+ run: ./script/clippy
+ shell: bash -euxo pipefail {0}
+ - name: steps::cargo_install_nextest
+ run: cargo install cargo-nextest --locked
+ shell: bash -euxo pipefail {0}
+ - name: steps::clear_target_dir_if_large
+ run: ./script/clear-target-dir-if-larger-than 300
+ shell: bash -euxo pipefail {0}
+ - name: steps::cargo_nextest
+ run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
+ shell: bash -euxo pipefail {0}
+ - name: steps::cleanup_cargo_config
+ if: always()
+ run: |
+ rm -rf ./../.cargo
+ shell: bash -euxo pipefail {0}
+ timeout-minutes: 60
+ doctests:
+ needs:
+ - orchestrate
+ if: needs.orchestrate.outputs.run_tests == 'true'
+ runs-on: namespace-profile-16x32-ubuntu-2204
+ steps:
+ - name: steps::checkout_repo
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+ with:
+ clean: false
+ - name: steps::cache_rust_dependencies
+ uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6
+ with:
+ save-if: ${{ github.ref == 'refs/heads/main' }}
+ - name: steps::setup_linux
+ run: ./script/linux
+ shell: bash -euxo pipefail {0}
+ - name: steps::install_mold
+ run: ./script/install-mold
+ shell: bash -euxo pipefail {0}
+ - name: steps::setup_cargo_config
+ run: |
+ mkdir -p ./../.cargo
+ cp ./.cargo/ci-config.toml ./../.cargo/config.toml
+ shell: bash -euxo pipefail {0}
+ - id: run_doctests
+ name: run_tests::doctests::run_doctests
+ run: |
+ cargo test --workspace --doc --no-fail-fast
+ shell: bash -euxo pipefail {0}
+ - name: steps::cleanup_cargo_config
+ if: always()
+ run: |
+ rm -rf ./../.cargo
+ shell: bash -euxo pipefail {0}
+ timeout-minutes: 60
+ check_workspace_binaries:
+ needs:
+ - orchestrate
+ if: needs.orchestrate.outputs.run_tests == 'true'
+ runs-on: namespace-profile-8x16-ubuntu-2204
+ steps:
+ - name: steps::checkout_repo
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+ with:
+ clean: false
+ - name: steps::setup_cargo_config
+ run: |
+ mkdir -p ./../.cargo
+ cp ./.cargo/ci-config.toml ./../.cargo/config.toml
+ shell: bash -euxo pipefail {0}
+ - name: steps::setup_linux
+ run: ./script/linux
+ shell: bash -euxo pipefail {0}
+ - name: steps::install_mold
+ run: ./script/install-mold
+ shell: bash -euxo pipefail {0}
+ - name: cargo build -p collab
+ run: cargo build -p collab
+ shell: bash -euxo pipefail {0}
+ - name: cargo build --workspace --bins --examples
+ run: cargo build --workspace --bins --examples
+ shell: bash -euxo pipefail {0}
+ - name: steps::cleanup_cargo_config
+ if: always()
+ run: |
+ rm -rf ./../.cargo
+ shell: bash -euxo pipefail {0}
+ timeout-minutes: 60
+ check_postgres_and_protobuf_migrations:
+ needs:
+ - orchestrate
+ if: needs.orchestrate.outputs.run_tests == 'true'
+ runs-on: self-mini-macos
+ steps:
+ - name: steps::checkout_repo
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+ with:
+ fetch-depth: 0
+ - name: run_tests::check_postgres_and_protobuf_migrations::remove_untracked_files
+ run: git clean -df
+ shell: bash -euxo pipefail {0}
+ - name: run_tests::check_postgres_and_protobuf_migrations::ensure_fresh_merge
+ run: |
+ if [ -z "$GITHUB_BASE_REF" ];
+ then
+ echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
+ else
+ git checkout -B temp
+ git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
+ echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
+ fi
+ shell: bash -euxo pipefail {0}
+ - name: run_tests::check_postgres_and_protobuf_migrations::bufbuild_setup_action
+ uses: bufbuild/buf-setup-action@v1
+ with:
+ version: v1.29.0
+ - name: run_tests::check_postgres_and_protobuf_migrations::bufbuild_breaking_action
+ uses: bufbuild/buf-breaking-action@v1
+ with:
+ input: crates/proto/proto/
+ against: https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/
+ timeout-minutes: 60
+ check_dependencies:
+ needs:
+ - orchestrate
+ if: needs.orchestrate.outputs.run_tests == 'true'
+ runs-on: namespace-profile-2x4-ubuntu-2404
+ steps:
+ - name: steps::checkout_repo
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+ with:
+ clean: false
+ - name: run_tests::check_dependencies::install_cargo_machete
+ uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386
+ with:
+ command: install
+ args: cargo-machete@0.7.0
+ - name: run_tests::check_dependencies::run_cargo_machete
+ uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386
+ with:
+ command: machete
+ - name: run_tests::check_dependencies::check_cargo_lock
+ run: cargo update --locked --workspace
+ shell: bash -euxo pipefail {0}
+ - name: run_tests::check_dependencies::check_vulnerable_dependencies
+ if: github.event_name == 'pull_request'
+ uses: actions/dependency-review-action@67d4f4bd7a9b17a0db54d2a7519187c65e339de8
+ with:
+ license-check: false
+ timeout-minutes: 60
+ check_docs:
+ needs:
+ - orchestrate
+ if: needs.orchestrate.outputs.run_docs == 'true'
+ runs-on: namespace-profile-8x16-ubuntu-2204
+ steps:
+ - name: steps::checkout_repo
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+ with:
+ clean: false
+ - name: steps::setup_cargo_config
+ run: |
+ mkdir -p ./../.cargo
+ cp ./.cargo/ci-config.toml ./../.cargo/config.toml
+ shell: bash -euxo pipefail {0}
+ - name: steps::cache_rust_dependencies
+ uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6
+ with:
+ save-if: ${{ github.ref == 'refs/heads/main' }}
+ - name: run_tests::check_docs::lychee_link_check
+ uses: lycheeverse/lychee-action@82202e5e9c2f4ef1a55a3d02563e1cb6041e5332
+ with:
+ args: --no-progress --exclude '^http' './docs/src/**/*'
+ fail: true
+ jobSummary: false
+ - name: steps::setup_linux
+ run: ./script/linux
+ shell: bash -euxo pipefail {0}
+ - name: steps::install_mold
+ run: ./script/install-mold
+ shell: bash -euxo pipefail {0}
+ - name: run_tests::check_docs::install_mdbook
+ uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08
+ with:
+ mdbook-version: 0.4.37
+ - name: run_tests::check_docs::build_docs
+ run: |
+ mkdir -p target/deploy
+ mdbook build ./docs --dest-dir=../target/deploy/docs/
+ shell: bash -euxo pipefail {0}
+ - name: run_tests::check_docs::lychee_link_check
+ uses: lycheeverse/lychee-action@82202e5e9c2f4ef1a55a3d02563e1cb6041e5332
+ with:
+ args: --no-progress --exclude '^http' 'target/deploy/docs'
+ fail: true
+ jobSummary: false
+ timeout-minutes: 60
+ check_licenses:
+ needs:
+ - orchestrate
+ if: needs.orchestrate.outputs.run_licenses == 'true'
+ runs-on: namespace-profile-2x4-ubuntu-2404
+ steps:
+ - name: steps::checkout_repo
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+ with:
+ clean: false
+ - name: ./script/check-licenses
+ run: ./script/check-licenses
+ shell: bash -euxo pipefail {0}
+ - name: ./script/generate-licenses
+ run: ./script/generate-licenses
+ shell: bash -euxo pipefail {0}
+ check_scripts:
+ needs:
+ - orchestrate
+ if: needs.orchestrate.outputs.run_action_checks == 'true'
+ runs-on: namespace-profile-2x4-ubuntu-2404
+ steps:
+ - name: steps::checkout_repo
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+ with:
+ clean: false
+ - name: run_tests::check_scripts::run_shellcheck
+ run: ./script/shellcheck-scripts error
+ shell: bash -euxo pipefail {0}
+ - id: get_actionlint
+ name: run_tests::check_scripts::download_actionlint
+ run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)
+ shell: bash -euxo pipefail {0}
+ - name: run_tests::check_scripts::run_actionlint
+ run: |
+ ${{ steps.get_actionlint.outputs.executable }} -color
+ shell: bash -euxo pipefail {0}
+ - name: run_tests::check_scripts::check_xtask_workflows
+ run: |
+ cargo xtask workflows
+ if ! git diff --exit-code .github; then
+ echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
+ echo "Please run 'cargo xtask workflows' locally and commit the changes"
+ exit 1
+ fi
+ shell: bash -euxo pipefail {0}
+ timeout-minutes: 60
+ build_nix_linux_x86_64:
+ needs:
+ - orchestrate
+ if: needs.orchestrate.outputs.run_nix == 'true'
+ runs-on: namespace-profile-32x64-ubuntu-2004
+ env:
+ ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+ ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
+ ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
+ GIT_LFS_SKIP_SMUDGE: '1'
+ steps:
+ - name: steps::checkout_repo
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+ with:
+ clean: false
+ - name: nix_build::install_nix
+ uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f
+ with:
+ github_access_token: ${{ secrets.GITHUB_TOKEN }}
+ - name: nix_build::cachix_action
+ uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad
+ with:
+ name: zed
+ authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
+ cachixArgs: -v
+ pushFilter: -zed-editor-[0-9.]*-nightly
+ - name: nix_build::build
+ run: nix build .#debug -L --accept-flake-config
+ shell: bash -euxo pipefail {0}
+ timeout-minutes: 60
+ continue-on-error: true
+ build_nix_mac_aarch64:
+ needs:
+ - orchestrate
+ if: needs.orchestrate.outputs.run_nix == 'true'
+ runs-on: self-mini-macos
+ env:
+ ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
+ ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
+ ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
+ GIT_LFS_SKIP_SMUDGE: '1'
+ steps:
+ - name: steps::checkout_repo
+ uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
+ with:
+ clean: false
+ - name: nix_build::set_path
+ run: |
+ echo "/nix/var/nix/profiles/default/bin" >> "$GITHUB_PATH"
+ echo "/Users/administrator/.nix-profile/bin" >> "$GITHUB_PATH"
+ shell: bash -euxo pipefail {0}
+ - name: nix_build::cachix_action
+ uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad
+ with:
+ name: zed
+ authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
+ cachixArgs: -v
+ pushFilter: -zed-editor-[0-9.]*-nightly
+ - name: nix_build::build
+ run: nix build .#debug -L --accept-flake-config
+ shell: bash -euxo pipefail {0}
+ - name: nix_build::limit_store
+ run: |-
+ if [ "$(du -sm /nix/store | cut -f1)" -gt 50000 ]; then
+ nix-collect-garbage -d || true
+ fi
+ shell: bash -euxo pipefail {0}
+ timeout-minutes: 60
+ continue-on-error: true
+ tests_pass:
+ needs:
+ - orchestrate
+ - check_style
+ - run_tests_windows
+ - run_tests_linux
+ - run_tests_mac
+ - doctests
+ - check_workspace_binaries
+ - check_postgres_and_protobuf_migrations
+ - check_dependencies
+ - check_docs
+ - check_licenses
+ - check_scripts
+ - build_nix_linux_x86_64
+ - build_nix_mac_aarch64
+ if: github.repository_owner == 'zed-industries' && always()
+ runs-on: namespace-profile-2x4-ubuntu-2404
+ steps:
+ - name: run_tests::tests_pass
+ run: |
+ set +x
+ EXIT_CODE=0
+
+ check_result() {
+ echo "* $1: $2"
+ if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
+ }
+
+ check_result "orchestrate" "${{ needs.orchestrate.result }}"
+ check_result "check_style" "${{ needs.check_style.result }}"
+ check_result "run_tests_windows" "${{ needs.run_tests_windows.result }}"
+ check_result "run_tests_linux" "${{ needs.run_tests_linux.result }}"
+ check_result "run_tests_mac" "${{ needs.run_tests_mac.result }}"
+ check_result "doctests" "${{ needs.doctests.result }}"
+ check_result "check_workspace_binaries" "${{ needs.check_workspace_binaries.result }}"
+ check_result "check_postgres_and_protobuf_migrations" "${{ needs.check_postgres_and_protobuf_migrations.result }}"
+ check_result "check_dependencies" "${{ needs.check_dependencies.result }}"
+ check_result "check_docs" "${{ needs.check_docs.result }}"
+ check_result "check_licenses" "${{ needs.check_licenses.result }}"
+ check_result "check_scripts" "${{ needs.check_scripts.result }}"
+ check_result "build_nix_linux_x86_64" "${{ needs.build_nix_linux_x86_64.result }}"
+ check_result "build_nix_mac_aarch64" "${{ needs.build_nix_mac_aarch64.result }}"
+
+ exit $EXIT_CODE
+ shell: bash -euxo pipefail {0}
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
+ cancel-in-progress: true
@@ -1,21 +0,0 @@
-name: Script
-
-on:
- pull_request:
- paths:
- - "script/**"
- push:
- branches:
- - main
-
-jobs:
- shellcheck:
- name: "ShellCheck Scripts"
- if: github.repository_owner == 'zed-industries'
- runs-on: namespace-profile-2x4-ubuntu-2404
-
- steps:
- - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
- - name: Shellcheck ./scripts
- run: |
- ./script/shellcheck-scripts error
@@ -628,7 +628,7 @@ Here's an example of these settings changed:
// Allow the cursor to reach the edges of the screen
"vertical_scroll_margin": 0,
"gutter": {
- // Disable line numbers completely:
+ // Disable line numbers completely
"line_numbers": false
},
"command_aliases": {
@@ -0,0 +1,17 @@
+#!/bin/bash
+set -euxo pipefail
+
+PRETTIER_VERSION=3.5.0
+
+pnpm dlx "prettier@${PRETTIER_VERSION}" assets/settings/default.json --check || {
+ echo "To fix, run from the root of the Zed repo:"
+ echo " pnpm dlx prettier@${PRETTIER_VERSION} assets/settings/default.json --write"
+ false
+}
+
+cd docs
+pnpm dlx "prettier@${PRETTIER_VERSION}" . --check || {
+ echo "To fix, run from the root of the Zed repo:"
+ echo " cd docs && pnpm dlx prettier@${PRETTIER_VERSION} . --write && cd .."
+ false
+}
@@ -8,6 +8,7 @@ mod nix_build;
mod release_nightly;
mod run_bundling;
+mod run_tests;
mod runners;
mod steps;
mod vars;
@@ -20,11 +21,9 @@ pub fn run_workflows(_: GenerateWorkflowArgs) -> Result<()> {
let workflows = vec![
("danger.yml", danger::danger()),
- ("nix_build.yml", nix_build::nix_build()),
("run_bundling.yml", run_bundling::run_bundling()),
("release_nightly.yml", release_nightly::release_nightly()),
- // ("run_tests.yml", run_tests::run_tests()),
- // ("release.yml", release::release()),
+ ("run_tests.yml", run_tests::run_tests()),
];
fs::create_dir_all(dir)
.with_context(|| format!("Failed to create directory: {}", dir.display()))?;
@@ -21,7 +21,7 @@ pub fn danger() -> Workflow {
.cond(Expression::new(
"github.repository_owner == 'zed-industries'",
))
- .runs_on(runners::LINUX_CHEAP)
+ .runs_on(runners::LINUX_SMALL)
.add_step(steps::checkout_repo())
.add_step(steps::setup_pnpm())
.add_step(
@@ -7,52 +7,6 @@ use super::{runners, steps, steps::named, vars};
use gh_workflow::*;
use indoc::indoc;
-/// Generates the nix.yml workflow
-pub fn nix_build() -> Workflow {
- // todo(ci) instead of having these as optional YAML inputs,
- // should we just generate two copies of the job (one for release-nightly
- // and one for CI?)
- let (input_flake_output, flake_output) = vars::input(
- "flake-output",
- WorkflowCallInput {
- input_type: "string".into(),
- default: Some("default".into()),
- ..Default::default()
- },
- );
- let (input_cachix_filter, cachix_filter) = vars::input(
- "cachix-filter",
- WorkflowCallInput {
- input_type: "string".into(),
- ..Default::default()
- },
- );
-
- let linux_x86 = build_nix(
- Platform::Linux,
- Arch::X86_64,
- &input_flake_output,
- Some(&input_cachix_filter),
- &[],
- );
- let mac_arm = build_nix(
- Platform::Mac,
- Arch::ARM64,
- &input_flake_output,
- Some(&input_cachix_filter),
- &[],
- );
-
- named::workflow()
- .on(Event::default().workflow_call(
- WorkflowCall::default()
- .add_input(flake_output.0, flake_output.1)
- .add_input(cachix_filter.0, cachix_filter.1),
- ))
- .add_job(linux_x86.name, linux_x86.job)
- .add_job(mac_arm.name, mac_arm.job)
-}
-
pub(crate) fn build_nix(
platform: Platform,
arch: Arch,
@@ -1,6 +1,7 @@
use crate::tasks::workflows::{
nix_build::build_nix,
run_bundling::bundle_mac,
+ run_tests::run_platform_tests,
runners::{Arch, Platform},
steps::NamedJob,
vars::{mac_bundle_envs, windows_bundle_envs},
@@ -32,8 +33,8 @@ pub fn release_nightly() -> Workflow {
.collect();
let style = check_style();
- let tests = run_tests(Platform::Mac);
- let windows_tests = run_tests(Platform::Windows);
+ let tests = run_platform_tests(Platform::Mac);
+ let windows_tests = run_platform_tests(Platform::Windows);
let bundle_mac_x86 = bundle_mac_nightly(Arch::X86_64, &[&style, &tests]);
let bundle_mac_arm = bundle_mac_nightly(Arch::ARM64, &[&style, &tests]);
let linux_x86 = bundle_linux_nightly(Arch::X86_64, &[&style, &tests]);
@@ -111,26 +112,6 @@ fn release_job(deps: &[&NamedJob]) -> Job {
}
}
-fn run_tests(platform: Platform) -> NamedJob {
- let runner = match platform {
- Platform::Windows => runners::WINDOWS_DEFAULT,
- Platform::Linux => runners::LINUX_DEFAULT,
- Platform::Mac => runners::MAC_DEFAULT,
- };
- NamedJob {
- name: format!("run_tests_{platform}"),
- job: release_job(&[])
- .runs_on(runner)
- .add_step(steps::checkout_repo())
- .add_step(steps::setup_cargo_config(platform))
- .add_step(steps::setup_node())
- .add_step(steps::cargo_install_nextest(platform))
- .add_step(steps::clear_target_dir_if_large(platform))
- .add_step(steps::cargo_nextest(platform))
- .add_step(steps::cleanup_cargo_config(platform)),
- }
-}
-
fn bundle_mac_nightly(arch: Arch, deps: &[&NamedJob]) -> NamedJob {
let platform = Platform::Mac;
NamedJob {
@@ -150,7 +131,7 @@ fn bundle_mac_nightly(arch: Arch, deps: &[&NamedJob]) -> NamedJob {
fn bundle_linux_nightly(arch: Arch, deps: &[&NamedJob]) -> NamedJob {
let platform = Platform::Linux;
- let mut job = release_job(deps)
+ let mut job = steps::release_job(deps)
.runs_on(arch.linux_bundler())
.add_step(steps::checkout_repo())
.add_step(steps::setup_sentry())
@@ -176,7 +157,7 @@ fn bundle_windows_nightly(arch: Arch, deps: &[&NamedJob]) -> NamedJob {
let platform = Platform::Windows;
NamedJob {
name: format!("bundle_windows_nightly_{arch}"),
- job: release_job(deps)
+ job: steps::release_job(deps)
.runs_on(runners::WINDOWS_DEFAULT)
.envs(windows_bundle_envs())
.add_step(steps::checkout_repo())
@@ -190,8 +171,8 @@ fn bundle_windows_nightly(arch: Arch, deps: &[&NamedJob]) -> NamedJob {
fn update_nightly_tag_job(deps: &[&NamedJob]) -> NamedJob {
NamedJob {
name: "update_nightly_tag".to_owned(),
- job: release_job(deps)
- .runs_on(runners::LINUX_CHEAP)
+ job: steps::release_job(deps)
+ .runs_on(runners::LINUX_SMALL)
.add_step(steps::checkout_repo().add_with(("fetch-depth", 0)))
.add_step(update_nightly_tag())
.add_step(create_sentry_release()),
@@ -1,5 +1,5 @@
use crate::tasks::workflows::{
- steps::named,
+ steps::{FluentBuilder, named},
vars::{mac_bundle_envs, windows_bundle_envs},
};
@@ -76,16 +76,12 @@ fn bundle_linux(arch: runners::Arch) -> Job {
vars::GITHUB_SHA,
arch.triple()
);
- let mut job = bundle_job()
+ bundle_job()
.runs_on(arch.linux_bundler())
.add_step(steps::checkout_repo())
.add_step(steps::setup_sentry())
- .add_step(steps::script("./script/linux"));
- // todo(ci) can we do this on arm too?
- if arch == runners::Arch::X86_64 {
- job = job.add_step(steps::script("./script/install-mold"));
- }
- job.add_step(steps::script("./script/bundle-linux"))
+ .map(steps::install_linux_dependencies)
+ .add_step(steps::script("./script/bundle-linux"))
.add_step(steps::upload_artifact(
&artifact_name,
"target/release/zed-*.tar.gz",
@@ -0,0 +1,474 @@
+use gh_workflow::{
+ Concurrency, Event, Expression, Job, PullRequest, Push, Run, Step, Use, Workflow,
+};
+use indexmap::IndexMap;
+
+use crate::tasks::workflows::{
+ nix_build::build_nix, runners::Arch, steps::BASH_SHELL, vars::PathCondition,
+};
+
+use super::{
+ runners::{self, Platform},
+ steps::{self, FluentBuilder, NamedJob, named, release_job},
+};
+
+pub(crate) fn run_tests() -> Workflow {
+ // Specify anything which should potentially skip full test suite in this regex:
+ // - docs/
+ // - script/update_top_ranking_issues/
+ // - .github/ISSUE_TEMPLATE/
+ // - .github/workflows/ (except .github/workflows/ci.yml)
+ let should_run_tests = PathCondition::inverted(
+ "run_tests",
+ r"^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))",
+ );
+ let should_check_docs = PathCondition::new("run_docs", r"^docs/");
+ let should_check_scripts = PathCondition::new(
+ "run_action_checks",
+ r"^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask",
+ );
+ let should_check_licences =
+ PathCondition::new("run_licenses", r"^(Cargo.lock|script/.*licenses)");
+ let should_build_nix = PathCondition::new(
+ "run_nix",
+ r"^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)",
+ );
+
+ let orchestrate = orchestrate(&[
+ &should_check_scripts,
+ &should_check_docs,
+ &should_check_licences,
+ &should_build_nix,
+ &should_run_tests,
+ ]);
+
+ let jobs = [
+ orchestrate,
+ check_style(),
+ should_run_tests.guard(run_platform_tests(Platform::Windows)),
+ should_run_tests.guard(run_platform_tests(Platform::Linux)),
+ should_run_tests.guard(run_platform_tests(Platform::Mac)),
+ should_run_tests.guard(doctests()),
+ should_run_tests.guard(check_workspace_binaries()),
+ should_run_tests.guard(check_postgres_and_protobuf_migrations()), // could be more specific here?
+ should_run_tests.guard(check_dependencies()), // could be more specific here?
+ should_check_docs.guard(check_docs()),
+ should_check_licences.guard(check_licenses()),
+ should_check_scripts.guard(check_scripts()),
+ should_build_nix.guard(build_nix(
+ Platform::Linux,
+ Arch::X86_64,
+ "debug",
+ // *don't* cache the built output
+ Some("-zed-editor-[0-9.]*-nightly"),
+ &[],
+ )),
+ should_build_nix.guard(build_nix(
+ Platform::Mac,
+ Arch::ARM64,
+ "debug",
+ // *don't* cache the built output
+ Some("-zed-editor-[0-9.]*-nightly"),
+ &[],
+ )),
+ ];
+ let tests_pass = tests_pass(&jobs);
+
+ let mut workflow = named::workflow()
+ .add_event(Event::default()
+ .push(
+ Push::default()
+ .add_branch("main")
+ .add_branch("v[0-9]+.[0-9]+.x")
+ )
+ .pull_request(PullRequest::default().add_branch("**"))
+ )
+ .concurrency(Concurrency::default()
+ .group("${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}")
+ .cancel_in_progress(true)
+ )
+ .add_env(( "CARGO_TERM_COLOR", "always" ))
+ .add_env(( "RUST_BACKTRACE", 1 ))
+ .add_env(( "CARGO_INCREMENTAL", 0 ));
+ for job in jobs {
+ workflow = workflow.add_job(job.name, job.job)
+ }
+ workflow.add_job(tests_pass.name, tests_pass.job)
+}
+
+// Generates a bash script that checks changed files against regex patterns
+// and sets GitHub output variables accordingly
+fn orchestrate(rules: &[&PathCondition]) -> NamedJob {
+ let name = "orchestrate".to_owned();
+ let step_name = "filter".to_owned();
+ let mut script = String::new();
+
+ script.push_str(indoc::indoc! {r#"
+ if [ -z "$GITHUB_BASE_REF" ]; then
+ echo "Not in a PR context (i.e., push to main/stable/preview)"
+ COMPARE_REV="$(git rev-parse HEAD~1)"
+ else
+ echo "In a PR context comparing to pull_request.base.ref"
+ git fetch origin "$GITHUB_BASE_REF" --depth=350
+ COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
+ fi
+ CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
+
+ check_pattern() {
+ local output_name="$1"
+ local pattern="$2"
+ local grep_arg="$3"
+
+ echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
+ echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
+ echo "${output_name}=false" >> "$GITHUB_OUTPUT"
+ }
+
+ "#});
+
+ let mut outputs = IndexMap::new();
+
+ for rule in rules {
+ assert!(
+ rule.set_by_step
+ .borrow_mut()
+ .replace(name.clone())
+ .is_none()
+ );
+ assert!(
+ outputs
+ .insert(
+ rule.name.to_owned(),
+ format!("${{{{ steps.{}.outputs.{} }}}}", step_name, rule.name)
+ )
+ .is_none()
+ );
+
+ let grep_arg = if rule.invert { "-qvP" } else { "-qP" };
+ script.push_str(&format!(
+ "check_pattern \"{}\" '{}' {}\n",
+ rule.name, rule.pattern, grep_arg
+ ));
+ }
+
+ let job = Job::default()
+ .runs_on(runners::LINUX_SMALL)
+ .cond(Expression::new(
+ "github.repository_owner == 'zed-industries'",
+ ))
+ .outputs(outputs)
+ .add_step(steps::checkout_repo().add_with((
+ "fetch-depth",
+ "${{ github.ref == 'refs/heads/main' && 2 || 350 }}",
+ )))
+ .add_step(
+ Step::new(step_name.clone())
+ .run(script)
+ .id(step_name)
+ .shell(BASH_SHELL),
+ );
+
+ NamedJob { name, job }
+}
+
+pub(crate) fn tests_pass(jobs: &[NamedJob]) -> NamedJob {
+ let mut script = String::from(indoc::indoc! {r#"
+ set +x
+ EXIT_CODE=0
+
+ check_result() {
+ echo "* $1: $2"
+ if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
+ }
+
+ "#});
+
+ script.push_str(
+ &jobs
+ .iter()
+ .map(|job| {
+ format!(
+ "check_result \"{}\" \"${{{{ needs.{}.result }}}}\"",
+ job.name, job.name
+ )
+ })
+ .collect::<Vec<_>>()
+ .join("\n"),
+ );
+
+ script.push_str("\n\nexit $EXIT_CODE\n");
+
+ let job = Job::default()
+ .runs_on(runners::LINUX_SMALL)
+ .needs(
+ jobs.iter()
+ .map(|j| j.name.to_string())
+ .collect::<Vec<String>>(),
+ )
+ .cond(Expression::new(
+ "github.repository_owner == 'zed-industries' && always()",
+ ))
+ .add_step(named::bash(&script));
+
+ named::job(job)
+}
+
+fn check_style() -> NamedJob {
+ fn check_for_typos() -> Step<Use> {
+ named::uses(
+ "crate-ci",
+ "typos",
+ "80c8a4945eec0f6d464eaf9e65ed98ef085283d1",
+ ) // v1.38.1
+ .with(("config", "./typos.toml"))
+ }
+ named::job(
+ release_job(&[])
+ .runs_on(runners::LINUX_MEDIUM)
+ .add_step(steps::checkout_repo())
+ .add_step(steps::setup_pnpm())
+ .add_step(steps::script("./script/prettier"))
+ .add_step(steps::script("./script/check-todos"))
+ .add_step(steps::script("./script/check-keymaps"))
+ .add_step(check_for_typos())
+ .add_step(steps::cargo_fmt()),
+ )
+}
+
+fn check_dependencies() -> NamedJob {
+ fn install_cargo_machete() -> Step<Use> {
+ named::uses(
+ "clechasseur",
+ "rs-cargo",
+ "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
+ )
+ .add_with(("command", "install"))
+ .add_with(("args", "cargo-machete@0.7.0"))
+ }
+
+ fn run_cargo_machete() -> Step<Use> {
+ named::uses(
+ "clechasseur",
+ "rs-cargo",
+ "8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386", // v2
+ )
+ .add_with(("command", "machete"))
+ }
+
+ fn check_cargo_lock() -> Step<Run> {
+ named::bash("cargo update --locked --workspace")
+ }
+
+ fn check_vulnerable_dependencies() -> Step<Use> {
+ named::uses(
+ "actions",
+ "dependency-review-action",
+ "67d4f4bd7a9b17a0db54d2a7519187c65e339de8", // v4
+ )
+ .if_condition(Expression::new("github.event_name == 'pull_request'"))
+ .with(("license-check", false))
+ }
+
+ named::job(
+ release_job(&[])
+ .runs_on(runners::LINUX_SMALL)
+ .add_step(steps::checkout_repo())
+ .add_step(install_cargo_machete())
+ .add_step(run_cargo_machete())
+ .add_step(check_cargo_lock())
+ .add_step(check_vulnerable_dependencies()),
+ )
+}
+
+fn check_workspace_binaries() -> NamedJob {
+ named::job(
+ release_job(&[])
+ .runs_on(runners::LINUX_LARGE)
+ .add_step(steps::checkout_repo())
+ .add_step(steps::setup_cargo_config(Platform::Linux))
+ .map(steps::install_linux_dependencies)
+ .add_step(steps::script("cargo build -p collab"))
+ .add_step(steps::script("cargo build --workspace --bins --examples"))
+ .add_step(steps::cleanup_cargo_config(Platform::Linux)),
+ )
+}
+
+pub(crate) fn run_platform_tests(platform: Platform) -> NamedJob {
+ let runner = match platform {
+ Platform::Windows => runners::WINDOWS_DEFAULT,
+ Platform::Linux => runners::LINUX_DEFAULT,
+ Platform::Mac => runners::MAC_DEFAULT,
+ };
+ NamedJob {
+ name: format!("run_tests_{platform}"),
+ job: release_job(&[])
+ .cond(Expression::new("false"))
+ .runs_on(runner)
+ .add_step(steps::checkout_repo())
+ .add_step(steps::setup_cargo_config(platform))
+ .when(
+ platform == Platform::Linux,
+ steps::install_linux_dependencies,
+ )
+ .add_step(steps::setup_node())
+ .add_step(steps::clippy(platform))
+ .add_step(steps::cargo_install_nextest(platform))
+ .add_step(steps::clear_target_dir_if_large(platform))
+ .add_step(steps::cargo_nextest(platform))
+ .add_step(steps::cleanup_cargo_config(platform)),
+ }
+}
+
+pub(crate) fn check_postgres_and_protobuf_migrations() -> NamedJob {
+ fn remove_untracked_files() -> Step<Run> {
+ named::bash("git clean -df")
+ }
+
+ fn ensure_fresh_merge() -> Step<Run> {
+ named::bash(indoc::indoc! {r#"
+ if [ -z "$GITHUB_BASE_REF" ];
+ then
+ echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
+ else
+ git checkout -B temp
+ git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
+ echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
+ fi
+ "#})
+ }
+
+ fn bufbuild_setup_action() -> Step<Use> {
+ named::uses("bufbuild", "buf-setup-action", "v1").add_with(("version", "v1.29.0"))
+ }
+
+ fn bufbuild_breaking_action() -> Step<Use> {
+ named::uses("bufbuild", "buf-breaking-action", "v1").add_with(("input", "crates/proto/proto/"))
+ .add_with(("against", "https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/"))
+ }
+
+ named::job(
+ release_job(&[])
+ .runs_on(runners::MAC_DEFAULT)
+ .add_step(steps::checkout_repo().with(("fetch-depth", 0))) // fetch full history
+ .add_step(remove_untracked_files())
+ .add_step(ensure_fresh_merge())
+ .add_step(bufbuild_setup_action())
+ .add_step(bufbuild_breaking_action()),
+ )
+}
+
+fn doctests() -> NamedJob {
+ fn run_doctests() -> Step<Run> {
+ named::bash(indoc::indoc! {r#"
+ cargo test --workspace --doc --no-fail-fast
+ "#})
+ .id("run_doctests")
+ }
+
+ named::job(
+ release_job(&[])
+ .runs_on(runners::LINUX_DEFAULT)
+ .add_step(steps::checkout_repo())
+ .add_step(steps::cache_rust_dependencies())
+ .map(steps::install_linux_dependencies)
+ .add_step(steps::setup_cargo_config(Platform::Linux))
+ .add_step(run_doctests())
+ .add_step(steps::cleanup_cargo_config(Platform::Linux)),
+ )
+}
+
+fn check_licenses() -> NamedJob {
+ named::job(
+ Job::default()
+ .runs_on(runners::LINUX_SMALL)
+ .add_step(steps::checkout_repo())
+ .add_step(steps::script("./script/check-licenses"))
+ .add_step(steps::script("./script/generate-licenses")),
+ )
+}
+
+fn check_docs() -> NamedJob {
+ fn lychee_link_check(dir: &str) -> Step<Use> {
+ named::uses(
+ "lycheeverse",
+ "lychee-action",
+ "82202e5e9c2f4ef1a55a3d02563e1cb6041e5332",
+ ) // v2.4.1
+ .add_with(("args", format!("--no-progress --exclude '^http' '{dir}'")))
+ .add_with(("fail", true))
+ .add_with(("jobSummary", false))
+ }
+
+ fn install_mdbook() -> Step<Use> {
+ named::uses(
+ "peaceiris",
+ "actions-mdbook",
+ "ee69d230fe19748b7abf22df32acaa93833fad08", // v2
+ )
+ .with(("mdbook-version", "0.4.37"))
+ }
+
+ fn build_docs() -> Step<Run> {
+ named::bash(indoc::indoc! {r#"
+ mkdir -p target/deploy
+ mdbook build ./docs --dest-dir=../target/deploy/docs/
+ "#})
+ }
+
+ named::job(
+ release_job(&[])
+ .runs_on(runners::LINUX_LARGE)
+ .add_step(steps::checkout_repo())
+ .add_step(steps::setup_cargo_config(Platform::Linux))
+ // todo(ci): un-inline build_docs/action.yml here
+ .add_step(steps::cache_rust_dependencies())
+ .add_step(
+ lychee_link_check("./docs/src/**/*"), // check markdown links
+ )
+ .map(steps::install_linux_dependencies)
+ .add_step(install_mdbook())
+ .add_step(build_docs())
+ .add_step(
+ lychee_link_check("target/deploy/docs"), // check links in generated html
+ ),
+ )
+}
+
+fn check_scripts() -> NamedJob {
+ fn download_actionlint() -> Step<Run> {
+ named::bash(
+ "bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)",
+ )
+ }
+
+ fn run_actionlint() -> Step<Run> {
+ named::bash(indoc::indoc! {r#"
+ ${{ steps.get_actionlint.outputs.executable }} -color
+ "#})
+ }
+
+ fn run_shellcheck() -> Step<Run> {
+ named::bash("./script/shellcheck-scripts error")
+ }
+
+ fn check_xtask_workflows() -> Step<Run> {
+ named::bash(indoc::indoc! {r#"
+ cargo xtask workflows
+ if ! git diff --exit-code .github; then
+ echo "Error: .github directory has uncommitted changes after running 'cargo xtask workflows'"
+ echo "Please run 'cargo xtask workflows' locally and commit the changes"
+ exit 1
+ fi
+ "#})
+ }
+
+ named::job(
+ release_job(&[])
+ .runs_on(runners::LINUX_SMALL)
+ .add_step(steps::checkout_repo())
+ .add_step(run_shellcheck())
+ .add_step(download_actionlint().id("get_actionlint"))
+ .add_step(run_actionlint())
+ .add_step(check_xtask_workflows()),
+ )
+}
@@ -1,5 +1,8 @@
-pub const LINUX_CHEAP: Runner = Runner("namespace-profile-2x4-ubuntu-2404");
-pub const LINUX_DEFAULT: Runner = Runner("namespace-profile-16x32-ubuntu-2204");
+pub const LINUX_SMALL: Runner = Runner("namespace-profile-2x4-ubuntu-2404");
+pub const LINUX_DEFAULT: Runner = LINUX_XL;
+pub const LINUX_XL: Runner = Runner("namespace-profile-16x32-ubuntu-2204");
+pub const LINUX_LARGE: Runner = Runner("namespace-profile-8x16-ubuntu-2204");
+pub const LINUX_MEDIUM: Runner = Runner("namespace-profile-4x8-ubuntu-2204");
// Using Ubuntu 20.04 for minimal glibc version
pub const LINUX_X86_BUNDLER: Runner = Runner("namespace-profile-32x64-ubuntu-2004");
@@ -2,9 +2,9 @@ use gh_workflow::*;
use crate::tasks::workflows::{runners::Platform, vars};
-const BASH_SHELL: &str = "bash -euxo pipefail {0}";
+pub const BASH_SHELL: &str = "bash -euxo pipefail {0}";
// https://docs.github.com/en/actions/reference/workflows-and-actions/workflow-syntax#jobsjob_idstepsshell
-const PWSH_SHELL: &str = "pwsh";
+pub const PWSH_SHELL: &str = "pwsh";
pub fn checkout_repo() -> Step<Use> {
named::uses(
@@ -105,6 +105,34 @@ pub fn clear_target_dir_if_large(platform: Platform) -> Step<Run> {
}
}
+pub(crate) fn clippy(platform: Platform) -> Step<Run> {
+ match platform {
+ Platform::Windows => named::pwsh("./script/clippy.ps1"),
+ _ => named::bash("./script/clippy"),
+ }
+}
+
+pub(crate) fn cache_rust_dependencies() -> Step<Use> {
+ named::uses(
+ "swatinem",
+ "rust-cache",
+ "9d47c6ad4b02e050fd481d890b2ea34778fd09d6", // v2
+ )
+ .with(("save-if", "${{ github.ref == 'refs/heads/main' }}"))
+}
+
+fn setup_linux() -> Step<Run> {
+ named::bash("./script/linux")
+}
+
+fn install_mold() -> Step<Run> {
+ named::bash("./script/install-mold")
+}
+
+pub(crate) fn install_linux_dependencies(job: Job) -> Job {
+ job.add_step(setup_linux()).add_step(install_mold())
+}
+
pub fn script(name: &str) -> Step<Run> {
if name.ends_with(".ps1") {
Step::new(name).run(name).shell(PWSH_SHELL)
@@ -118,6 +146,87 @@ pub(crate) struct NamedJob {
pub job: Job,
}
+// impl NamedJob {
+// pub fn map(self, f: impl FnOnce(Job) -> Job) -> Self {
+// NamedJob {
+// name: self.name,
+// job: f(self.job),
+// }
+// }
+// }
+
+pub(crate) fn release_job(deps: &[&NamedJob]) -> Job {
+ let job = Job::default()
+ .cond(Expression::new(
+ "github.repository_owner == 'zed-industries'",
+ ))
+ .timeout_minutes(60u32);
+ if deps.len() > 0 {
+ job.needs(deps.iter().map(|j| j.name.clone()).collect::<Vec<_>>())
+ } else {
+ job
+ }
+}
+
+impl FluentBuilder for Job {}
+impl FluentBuilder for Workflow {}
+
+/// A helper trait for building complex objects with imperative conditionals in a fluent style.
+/// Copied from GPUI to avoid adding GPUI as dependency
+/// todo(ci) just put this in gh-workflow
+#[allow(unused)]
+pub(crate) trait FluentBuilder {
+ /// Imperatively modify self with the given closure.
+ fn map<U>(self, f: impl FnOnce(Self) -> U) -> U
+ where
+ Self: Sized,
+ {
+ f(self)
+ }
+
+ /// Conditionally modify self with the given closure.
+ fn when(self, condition: bool, then: impl FnOnce(Self) -> Self) -> Self
+ where
+ Self: Sized,
+ {
+ self.map(|this| if condition { then(this) } else { this })
+ }
+
+ /// Conditionally modify self with the given closure.
+ fn when_else(
+ self,
+ condition: bool,
+ then: impl FnOnce(Self) -> Self,
+ else_fn: impl FnOnce(Self) -> Self,
+ ) -> Self
+ where
+ Self: Sized,
+ {
+ self.map(|this| if condition { then(this) } else { else_fn(this) })
+ }
+
+ /// Conditionally unwrap and modify self with the given closure, if the given option is Some.
+ fn when_some<T>(self, option: Option<T>, then: impl FnOnce(Self, T) -> Self) -> Self
+ where
+ Self: Sized,
+ {
+ self.map(|this| {
+ if let Some(value) = option {
+ then(this, value)
+ } else {
+ this
+ }
+ })
+ }
+ /// Conditionally unwrap and modify self with the given closure, if the given option is None.
+ fn when_none<T>(self, option: &Option<T>, then: impl FnOnce(Self) -> Self) -> Self
+ where
+ Self: Sized,
+ {
+ self.map(|this| if option.is_some() { this } else { then(this) })
+ }
+}
+
// (janky) helper to generate steps with a name that corresponds
// to the name of the calling function.
pub(crate) mod named {
@@ -1,4 +1,8 @@
-use gh_workflow::{Env, WorkflowCallInput};
+use std::cell::RefCell;
+
+use gh_workflow::{Env, Expression};
+
+use crate::tasks::workflows::steps::NamedJob;
macro_rules! secret {
($secret_name:ident) => {
@@ -12,10 +16,6 @@ macro_rules! var {
};
}
-pub fn input(name: &str, input: WorkflowCallInput) -> (String, (&str, WorkflowCallInput)) {
- return (format!("${{{{ inputs.{name} }}}}"), (name, input));
-}
-
secret!(APPLE_NOTARIZATION_ISSUER_ID);
secret!(APPLE_NOTARIZATION_KEY);
secret!(APPLE_NOTARIZATION_KEY_ID);
@@ -61,3 +61,46 @@ pub fn windows_bundle_envs() -> Env {
.add("TIMESTAMP_DIGEST", "SHA256")
.add("TIMESTAMP_SERVER", "http://timestamp.acs.microsoft.com")
}
+
+// Represents a pattern to check for changed files and corresponding output variable
+pub(crate) struct PathCondition {
+ pub name: &'static str,
+ pub pattern: &'static str,
+ pub invert: bool,
+ pub set_by_step: RefCell<Option<String>>,
+}
+impl PathCondition {
+ pub fn new(name: &'static str, pattern: &'static str) -> Self {
+ Self {
+ name,
+ pattern,
+ invert: false,
+ set_by_step: Default::default(),
+ }
+ }
+ pub fn inverted(name: &'static str, pattern: &'static str) -> Self {
+ Self {
+ name,
+ pattern,
+ invert: true,
+ set_by_step: Default::default(),
+ }
+ }
+ pub fn guard(&self, job: NamedJob) -> NamedJob {
+ let set_by_step = self
+ .set_by_step
+ .borrow()
+ .clone()
+ .unwrap_or_else(|| panic!("condition {},is never set", self.name));
+ NamedJob {
+ name: job.name,
+ job: job
+ .job
+ .add_needs(set_by_step.clone())
+ .cond(Expression::new(format!(
+ "needs.{}.outputs.{} == 'true'",
+ &set_by_step, self.name
+ ))),
+ }
+ }
+}