diff --git a/.config/nextest.toml b/.config/nextest.toml
index ab03abd839600e1a84ebd5eea9709f60cea1c7f0..b18a3f31e4a75af0636b4d8d8fdd81f48d8d93e6 100644
--- a/.config/nextest.toml
+++ b/.config/nextest.toml
@@ -42,3 +42,7 @@ slow-timeout = { period = "300s", terminate-after = 1 }
[[profile.default.overrides]]
filter = 'package(editor) and test(test_random_split_editor)'
slow-timeout = { period = "300s", terminate-after = 1 }
+
+[[profile.default.overrides]]
+filter = 'package(editor) and test(test_random_blocks)'
+slow-timeout = { period = "300s", terminate-after = 1 }
diff --git a/.github/ISSUE_TEMPLATE/10_bug_report.yml b/.github/ISSUE_TEMPLATE/10_bug_report.yml
index 13e43219dd65a78af4afec479330bbc5fd85fe42..5eb8e8a6299c5189384b6d060e12cd61a2249a3c 100644
--- a/.github/ISSUE_TEMPLATE/10_bug_report.yml
+++ b/.github/ISSUE_TEMPLATE/10_bug_report.yml
@@ -100,7 +100,7 @@ body:
label: (for AI issues) Model provider details
placeholder: |
- Provider: (Anthropic via ZedPro, Anthropic via API key, Copilot Chat, Mistral, OpenAI, etc.)
- - Model Name: (Claude Sonnet 4.5, Gemini 3 Pro, GPT-5)
+ - Model Name: (Claude Sonnet 4.5, Gemini 3.1 Pro, GPT-5)
- Mode: (Agent Panel, Inline Assistant, Terminal Assistant or Text Threads)
- Other details (ACPs, MCPs, other settings, etc.):
validations:
diff --git a/.github/workflows/add_commented_closed_issue_to_project.yml b/.github/workflows/add_commented_closed_issue_to_project.yml
index 5871f5ae0e61f97557ce926c4a2627841f50560d..bd84eaa9446e57c5482ab818df3dbcfe587e040e 100644
--- a/.github/workflows/add_commented_closed_issue_to_project.yml
+++ b/.github/workflows/add_commented_closed_issue_to_project.yml
@@ -63,13 +63,18 @@ jobs:
}
- if: steps.is-post-close-comment.outputs.result == 'true' && steps.check-staff.outputs.result == 'true'
+ env:
+ ISSUE_NUMBER: ${{ github.event.issue.number }}
run: |
- echo "::notice::Skipping issue #${{ github.event.issue.number }} - commenter is staff member"
+ echo "::notice::Skipping issue #$ISSUE_NUMBER - commenter is staff member"
# github-script outputs are JSON strings, so we compare against 'false' (string)
- if: steps.is-post-close-comment.outputs.result == 'true' && steps.check-staff.outputs.result == 'false'
+ env:
+ ISSUE_NUMBER: ${{ github.event.issue.number }}
+ COMMENT_USER_LOGIN: ${{ github.event.comment.user.login }}
run: |
- echo "::notice::Adding issue #${{ github.event.issue.number }} to project (comment by ${{ github.event.comment.user.login }})"
+ echo "::notice::Adding issue #$ISSUE_NUMBER to project (comment by $COMMENT_USER_LOGIN)"
- if: steps.is-post-close-comment.outputs.result == 'true' && steps.check-staff.outputs.result == 'false'
uses: actions/add-to-project@244f685bbc3b7adfa8466e08b698b5577571133e # v1.0.2
diff --git a/.github/workflows/after_release.yml b/.github/workflows/after_release.yml
index 9582e3f1956b3ecda383fc03efdb3d7ff67eaa68..95229f9f46bbd34ffe02832114b2b39da1b7e090 100644
--- a/.github/workflows/after_release.yml
+++ b/.github/workflows/after_release.yml
@@ -76,7 +76,7 @@ jobs:
"X-GitHub-Api-Version" = "2022-11-28"
}
$body = @{ branch = "master" } | ConvertTo-Json
- $uri = "https://api.github.com/repos/${{ github.repository_owner }}/winget-pkgs/merge-upstream"
+ $uri = "https://api.github.com/repos/$env:GITHUB_REPOSITORY_OWNER/winget-pkgs/merge-upstream"
try {
Invoke-RestMethod -Uri $uri -Method Post -Headers $headers -Body $body -ContentType "application/json"
Write-Host "Successfully synced winget-pkgs fork"
@@ -131,11 +131,10 @@ jobs:
runs-on: namespace-profile-2x4-ubuntu-2404
steps:
- name: release::send_slack_message
- run: |
- curl -X POST -H 'Content-type: application/json'\
- --data '{"text":"❌ ${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"}' "$SLACK_WEBHOOK"
+ run: 'curl -X POST -H ''Content-type: application/json'' --data "$(jq -n --arg text "$SLACK_MESSAGE" ''{"text": $text}'')" "$SLACK_WEBHOOK"'
env:
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
+ SLACK_MESSAGE: '❌ ${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}'
defaults:
run:
shell: bash -euxo pipefail {0}
diff --git a/.github/workflows/autofix_pr.yml b/.github/workflows/autofix_pr.yml
index 60cc66294af2cf65e17aaad530a9df511ec61503..1fa271d168a8c3d1744439647ff50b793a854d1d 100644
--- a/.github/workflows/autofix_pr.yml
+++ b/.github/workflows/autofix_pr.yml
@@ -22,8 +22,9 @@ jobs:
with:
clean: false
- name: autofix_pr::run_autofix::checkout_pr
- run: gh pr checkout ${{ inputs.pr_number }}
+ run: gh pr checkout "$PR_NUMBER"
env:
+ PR_NUMBER: ${{ inputs.pr_number }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: steps::setup_cargo_config
run: |
@@ -104,8 +105,9 @@ jobs:
clean: false
token: ${{ steps.get-app-token.outputs.token }}
- name: autofix_pr::commit_changes::checkout_pr
- run: gh pr checkout ${{ inputs.pr_number }}
+ run: gh pr checkout "$PR_NUMBER"
env:
+ PR_NUMBER: ${{ inputs.pr_number }}
GITHUB_TOKEN: ${{ steps.get-app-token.outputs.token }}
- name: autofix_pr::download_patch_artifact
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53
diff --git a/.github/workflows/catch_blank_issues.yml b/.github/workflows/catch_blank_issues.yml
index dd425afc886e86c1217a94e90eabced013f66bf0..c6f595ef2e0890ce107829f3e91490332567368a 100644
--- a/.github/workflows/catch_blank_issues.yml
+++ b/.github/workflows/catch_blank_issues.yml
@@ -42,8 +42,10 @@ jobs:
}
- if: steps.check-staff.outputs.result == 'true'
+ env:
+ ISSUE_NUMBER: ${{ github.event.issue.number }}
run: |
- echo "::notice::Skipping issue #${{ github.event.issue.number }} - actor is staff member"
+ echo "::notice::Skipping issue #$ISSUE_NUMBER - actor is staff member"
- if: steps.check-staff.outputs.result == 'false'
id: add-label
diff --git a/.github/workflows/cherry_pick.yml b/.github/workflows/cherry_pick.yml
index 9d46f300b509347b2853c00575c4e82fd9a2863c..ee0c1d35d0f9825d7c39b81fba0fe35901de2611 100644
--- a/.github/workflows/cherry_pick.yml
+++ b/.github/workflows/cherry_pick.yml
@@ -36,8 +36,11 @@ jobs:
app-id: ${{ secrets.ZED_ZIPPY_APP_ID }}
private-key: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }}
- name: cherry_pick::run_cherry_pick::cherry_pick
- run: ./script/cherry-pick ${{ inputs.branch }} ${{ inputs.commit }} ${{ inputs.channel }}
+ run: ./script/cherry-pick "$BRANCH" "$COMMIT" "$CHANNEL"
env:
+ BRANCH: ${{ inputs.branch }}
+ COMMIT: ${{ inputs.commit }}
+ CHANNEL: ${{ inputs.channel }}
GIT_COMMITTER_NAME: Zed Zippy
GIT_COMMITTER_EMAIL: hi@zed.dev
GITHUB_TOKEN: ${{ steps.get-app-token.outputs.token }}
diff --git a/.github/workflows/community_update_all_top_ranking_issues.yml b/.github/workflows/community_update_all_top_ranking_issues.yml
index 59926f35563a4b21e3486ecbd454a4ccf951461e..ef3b4fc39ddb5f0db9b09c5e861547ae8cd7eb08 100644
--- a/.github/workflows/community_update_all_top_ranking_issues.yml
+++ b/.github/workflows/community_update_all_top_ranking_issues.yml
@@ -22,4 +22,6 @@ jobs:
- name: Install dependencies
run: uv sync --project script/update_top_ranking_issues -p 3.13
- name: Run script
- run: uv run --project script/update_top_ranking_issues script/update_top_ranking_issues/main.py --github-token ${{ secrets.GITHUB_TOKEN }} --issue-reference-number 5393
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ run: uv run --project script/update_top_ranking_issues script/update_top_ranking_issues/main.py --github-token "$GITHUB_TOKEN" --issue-reference-number 5393
diff --git a/.github/workflows/community_update_weekly_top_ranking_issues.yml b/.github/workflows/community_update_weekly_top_ranking_issues.yml
index 75ba66b934b5861bd51aef4238a1a4188dddefc3..53b548f2bb4286e5de86d3823e67d75c0413a1cb 100644
--- a/.github/workflows/community_update_weekly_top_ranking_issues.yml
+++ b/.github/workflows/community_update_weekly_top_ranking_issues.yml
@@ -22,4 +22,6 @@ jobs:
- name: Install dependencies
run: uv sync --project script/update_top_ranking_issues -p 3.13
- name: Run script
- run: uv run --project script/update_top_ranking_issues script/update_top_ranking_issues/main.py --github-token ${{ secrets.GITHUB_TOKEN }} --issue-reference-number 6952 --query-day-interval 7
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ run: uv run --project script/update_top_ranking_issues script/update_top_ranking_issues/main.py --github-token "$GITHUB_TOKEN" --issue-reference-number 6952 --query-day-interval 7
diff --git a/.github/workflows/compare_perf.yml b/.github/workflows/compare_perf.yml
index e5a2d4f9c928eac2d1b1cf54ed374f8b0cca5d25..f7d78dbbf6a6d04bc47212b6842f894850288fcc 100644
--- a/.github/workflows/compare_perf.yml
+++ b/.github/workflows/compare_perf.yml
@@ -37,27 +37,40 @@ jobs:
- name: compare_perf::run_perf::install_hyperfine
uses: taiki-e/install-action@hyperfine
- name: steps::git_checkout
- run: git fetch origin ${{ inputs.base }} && git checkout ${{ inputs.base }}
+ run: git fetch origin "$REF_NAME" && git checkout "$REF_NAME"
+ env:
+ REF_NAME: ${{ inputs.base }}
- name: compare_perf::run_perf::cargo_perf_test
run: |2-
- if [ -n "${{ inputs.crate_name }}" ]; then
- cargo perf-test -p ${{ inputs.crate_name }} -- --json=${{ inputs.base }};
+ if [ -n "$CRATE_NAME" ]; then
+ cargo perf-test -p "$CRATE_NAME" -- --json="$REF_NAME";
else
- cargo perf-test -p vim -- --json=${{ inputs.base }};
+ cargo perf-test -p vim -- --json="$REF_NAME";
fi
+ env:
+ REF_NAME: ${{ inputs.base }}
+ CRATE_NAME: ${{ inputs.crate_name }}
- name: steps::git_checkout
- run: git fetch origin ${{ inputs.head }} && git checkout ${{ inputs.head }}
+ run: git fetch origin "$REF_NAME" && git checkout "$REF_NAME"
+ env:
+ REF_NAME: ${{ inputs.head }}
- name: compare_perf::run_perf::cargo_perf_test
run: |2-
- if [ -n "${{ inputs.crate_name }}" ]; then
- cargo perf-test -p ${{ inputs.crate_name }} -- --json=${{ inputs.head }};
+ if [ -n "$CRATE_NAME" ]; then
+ cargo perf-test -p "$CRATE_NAME" -- --json="$REF_NAME";
else
- cargo perf-test -p vim -- --json=${{ inputs.head }};
+ cargo perf-test -p vim -- --json="$REF_NAME";
fi
+ env:
+ REF_NAME: ${{ inputs.head }}
+ CRATE_NAME: ${{ inputs.crate_name }}
- name: compare_perf::run_perf::compare_runs
- run: cargo perf-compare --save=results.md ${{ inputs.base }} ${{ inputs.head }}
+ run: cargo perf-compare --save=results.md "$BASE" "$HEAD"
+ env:
+ BASE: ${{ inputs.base }}
+ HEAD: ${{ inputs.head }}
- name: '@actions/upload-artifact results.md'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
diff --git a/.github/workflows/deploy_cloudflare.yml b/.github/workflows/deploy_cloudflare.yml
index cb0dfc2187a06cf62255b049b7e5fe74b10c505a..37f23b20d2825e9f3d26c456903962a10c2d0081 100644
--- a/.github/workflows/deploy_cloudflare.yml
+++ b/.github/workflows/deploy_cloudflare.yml
@@ -26,6 +26,7 @@ jobs:
CC: clang
CXX: clang++
DOCS_AMPLITUDE_API_KEY: ${{ secrets.DOCS_AMPLITUDE_API_KEY }}
+ DOCS_CONSENT_IO_INSTANCE: ${{ secrets.DOCS_CONSENT_IO_INSTANCE }}
- name: Deploy Docs
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3
diff --git a/.github/workflows/deploy_collab.yml b/.github/workflows/deploy_collab.yml
index b1bdaf61979452a73380226ce1935b43eb05c32b..89fb6980b65f2d09a6571f140ab016a710be230f 100644
--- a/.github/workflows/deploy_collab.yml
+++ b/.github/workflows/deploy_collab.yml
@@ -119,8 +119,9 @@ jobs:
with:
token: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }}
- name: deploy_collab::deploy::sign_into_kubernetes
- run: |
- doctl kubernetes cluster kubeconfig save --expiry-seconds 600 ${{ secrets.CLUSTER_NAME }}
+ run: doctl kubernetes cluster kubeconfig save --expiry-seconds 600 "$CLUSTER_NAME"
+ env:
+ CLUSTER_NAME: ${{ secrets.CLUSTER_NAME }}
- name: deploy_collab::deploy::start_rollout
run: |
set -eu
@@ -140,7 +141,7 @@ jobs:
echo "Deploying collab:$GITHUB_SHA to $ZED_KUBE_NAMESPACE"
source script/lib/deploy-helpers.sh
- export_vars_for_environment $ZED_KUBE_NAMESPACE
+ export_vars_for_environment "$ZED_KUBE_NAMESPACE"
ZED_DO_CERTIFICATE_ID="$(doctl compute certificate list --format ID --no-header)"
export ZED_DO_CERTIFICATE_ID
@@ -150,14 +151,14 @@ jobs:
export ZED_LOAD_BALANCER_SIZE_UNIT=$ZED_COLLAB_LOAD_BALANCER_SIZE_UNIT
export DATABASE_MAX_CONNECTIONS=850
envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f -
- kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch
+ kubectl -n "$ZED_KUBE_NAMESPACE" rollout status "deployment/$ZED_SERVICE_NAME" --watch
echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}"
export ZED_SERVICE_NAME=api
export ZED_LOAD_BALANCER_SIZE_UNIT=$ZED_API_LOAD_BALANCER_SIZE_UNIT
export DATABASE_MAX_CONNECTIONS=60
envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f -
- kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch
+ kubectl -n "$ZED_KUBE_NAMESPACE" rollout status "deployment/$ZED_SERVICE_NAME" --watch
echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}"
defaults:
run:
diff --git a/.github/workflows/extension_bump.yml b/.github/workflows/extension_bump.yml
index b7bb78363ce4ff97680b2a53967938280c3de902..9cc53741e8007a1b3ddd02ad07b191b3ce171cc8 100644
--- a/.github/workflows/extension_bump.yml
+++ b/.github/workflows/extension_bump.yml
@@ -39,7 +39,7 @@ jobs:
run: |
CURRENT_VERSION="$(sed -n 's/^version = \"\(.*\)\"/\1/p' < extension.toml | tr -d '[:space:]')"
- if [[ "${{ github.event_name }}" == "pull_request" ]]; then
+ if [[ "$GITHUB_EVENT_NAME" == "pull_request" ]]; then
PR_FORK_POINT="$(git merge-base origin/main HEAD)"
git checkout "$PR_FORK_POINT"
elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then
@@ -82,8 +82,6 @@ jobs:
- id: bump-version
name: extension_bump::bump_version
run: |
- OLD_VERSION="${{ needs.check_version_changed.outputs.current_version }}"
-
BUMP_FILES=("extension.toml")
if [[ -f "Cargo.toml" ]]; then
BUMP_FILES+=("Cargo.toml")
@@ -93,7 +91,7 @@ jobs:
--search "version = \"{current_version}"\" \
--replace "version = \"{new_version}"\" \
--current-version "$OLD_VERSION" \
- --no-configured-files ${{ inputs.bump-type }} "${BUMP_FILES[@]}"
+ --no-configured-files "$BUMP_TYPE" "${BUMP_FILES[@]}"
if [[ -f "Cargo.toml" ]]; then
cargo update --workspace
@@ -102,6 +100,9 @@ jobs:
NEW_VERSION="$(sed -n 's/^version = \"\(.*\)\"/\1/p' < extension.toml | tr -d '[:space:]')"
echo "new_version=${NEW_VERSION}" >> "$GITHUB_OUTPUT"
+ env:
+ OLD_VERSION: ${{ needs.check_version_changed.outputs.current_version }}
+ BUMP_TYPE: ${{ inputs.bump-type }}
- name: extension_bump::create_pull_request
uses: peter-evans/create-pull-request@v7
with:
diff --git a/.github/workflows/extension_tests.yml b/.github/workflows/extension_tests.yml
index 5160aba2869b1a3234c686a6508460784b0536b1..53de373c1b79dc3ca9a3637642e10998c781580a 100644
--- a/.github/workflows/extension_tests.yml
+++ b/.github/workflows/extension_tests.yml
@@ -32,7 +32,7 @@ jobs:
git fetch origin "$GITHUB_BASE_REF" --depth=350
COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
fi
- CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
+ CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")"
check_pattern() {
local output_name="$1"
@@ -129,7 +129,7 @@ jobs:
run: |
CURRENT_VERSION="$(sed -n 's/^version = \"\(.*\)\"/\1/p' < extension.toml | tr -d '[:space:]')"
- if [[ "${{ github.event_name }}" == "pull_request" ]]; then
+ if [[ "$GITHUB_EVENT_NAME" == "pull_request" ]]; then
PR_FORK_POINT="$(git merge-base origin/main HEAD)"
git checkout "$PR_FORK_POINT"
elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then
@@ -147,11 +147,14 @@ jobs:
echo "current_version=${CURRENT_VERSION}" >> "$GITHUB_OUTPUT"
- name: extension_tests::verify_version_did_not_change
run: |
- if [[ ${{ steps.compare-versions-check.outputs.version_changed }} == "true" && "${{ github.event_name }}" == "pull_request" && "${{ github.event.pull_request.user.login }}" != "zed-zippy[bot]" ]] ; then
+ if [[ "$VERSION_CHANGED" == "true" && "$GITHUB_EVENT_NAME" == "pull_request" && "$PR_USER_LOGIN" != "zed-zippy[bot]" ]] ; then
echo "Version change detected in your change!"
echo "Version changes happen in separate PRs and will be performed by the zed-zippy bot"
exit 42
fi
+ env:
+ VERSION_CHANGED: ${{ steps.compare-versions-check.outputs.version_changed }}
+ PR_USER_LOGIN: ${{ github.event.pull_request.user.login }}
timeout-minutes: 6
tests_pass:
needs:
@@ -171,11 +174,15 @@ jobs:
if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
}
- check_result "orchestrate" "${{ needs.orchestrate.result }}"
- check_result "check_rust" "${{ needs.check_rust.result }}"
- check_result "check_extension" "${{ needs.check_extension.result }}"
+ check_result "orchestrate" "$RESULT_ORCHESTRATE"
+ check_result "check_rust" "$RESULT_CHECK_RUST"
+ check_result "check_extension" "$RESULT_CHECK_EXTENSION"
exit $EXIT_CODE
+ env:
+ RESULT_ORCHESTRATE: ${{ needs.orchestrate.result }}
+ RESULT_CHECK_RUST: ${{ needs.check_rust.result }}
+ RESULT_CHECK_EXTENSION: ${{ needs.check_extension.result }}
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
cancel-in-progress: true
diff --git a/.github/workflows/extension_workflow_rollout.yml b/.github/workflows/extension_workflow_rollout.yml
index 709956fc1bc0b25190638d9f1b5d4cd3cadd7ba2..9bfac06d4527985553ba3d04e64c656ee5bf85e4 100644
--- a/.github/workflows/extension_workflow_rollout.yml
+++ b/.github/workflows/extension_workflow_rollout.yml
@@ -80,9 +80,7 @@ jobs:
- id: calc-changes
name: extension_workflow_rollout::rollout_workflows_to_extension::get_removed_files
run: |
- PREV_COMMIT="${{ steps.prev-tag.outputs.prev_commit }}"
-
- if [ "${{ matrix.repo }}" = "workflows" ]; then
+ if [ "$MATRIX_REPO" = "workflows" ]; then
WORKFLOW_DIR="extensions/workflows"
else
WORKFLOW_DIR="extensions/workflows/shared"
@@ -101,11 +99,12 @@ jobs:
echo "Files to remove: $REMOVED_FILES"
echo "removed_files=$REMOVED_FILES" >> "$GITHUB_OUTPUT"
+ env:
+ PREV_COMMIT: ${{ steps.prev-tag.outputs.prev_commit }}
+ MATRIX_REPO: ${{ matrix.repo }}
working-directory: zed
- name: extension_workflow_rollout::rollout_workflows_to_extension::sync_workflow_files
run: |
- REMOVED_FILES="${{ steps.calc-changes.outputs.removed_files }}"
-
mkdir -p extension/.github/workflows
cd extension/.github/workflows
@@ -119,11 +118,14 @@ jobs:
cd - > /dev/null
- if [ "${{ matrix.repo }}" = "workflows" ]; then
+ if [ "$MATRIX_REPO" = "workflows" ]; then
cp zed/extensions/workflows/*.yml extension/.github/workflows/
else
cp zed/extensions/workflows/shared/*.yml extension/.github/workflows/
fi
+ env:
+ REMOVED_FILES: ${{ steps.calc-changes.outputs.removed_files }}
+ MATRIX_REPO: ${{ matrix.repo }}
- id: short-sha
name: extension_workflow_rollout::rollout_workflows_to_extension::get_short_sha
run: |
@@ -148,13 +150,13 @@ jobs:
sign-commits: true
- name: extension_workflow_rollout::rollout_workflows_to_extension::enable_auto_merge
run: |
- PR_NUMBER="${{ steps.create-pr.outputs.pull-request-number }}"
if [ -n "$PR_NUMBER" ]; then
cd extension
gh pr merge "$PR_NUMBER" --auto --squash
fi
env:
GH_TOKEN: ${{ steps.generate-token.outputs.token }}
+ PR_NUMBER: ${{ steps.create-pr.outputs.pull-request-number }}
timeout-minutes: 10
create_rollout_tag:
needs:
diff --git a/.github/workflows/publish_extension_cli.yml b/.github/workflows/publish_extension_cli.yml
index 391baac1cb3aa9da76c4fde39aa6909525541a58..75f1b16b007e33d0c4f346a33a1403648f1cd6c6 100644
--- a/.github/workflows/publish_extension_cli.yml
+++ b/.github/workflows/publish_extension_cli.yml
@@ -27,7 +27,7 @@ jobs:
- name: publish_extension_cli::publish_job::build_extension_cli
run: cargo build --release --package extension_cli
- name: publish_extension_cli::publish_job::upload_binary
- run: script/upload-extension-cli ${{ github.sha }}
+ run: script/upload-extension-cli "$GITHUB_SHA"
env:
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
@@ -55,10 +55,10 @@ jobs:
- id: short-sha
name: publish_extension_cli::get_short_sha
run: |
- echo "sha_short=$(echo "${{ github.sha }}" | cut -c1-7)" >> "$GITHUB_OUTPUT"
+ echo "sha_short=$(echo "$GITHUB_SHA" | cut -c1-7)" >> "$GITHUB_OUTPUT"
- name: publish_extension_cli::update_sha_in_zed::replace_sha
run: |
- sed -i "s/ZED_EXTENSION_CLI_SHA: &str = \"[a-f0-9]*\"/ZED_EXTENSION_CLI_SHA: \&str = \"${{ github.sha }}\"/" \
+ sed -i "s/ZED_EXTENSION_CLI_SHA: &str = \"[a-f0-9]*\"/ZED_EXTENSION_CLI_SHA: \&str = \"$GITHUB_SHA\"/" \
tooling/xtask/src/tasks/workflows/extension_tests.rs
- name: publish_extension_cli::update_sha_in_zed::regenerate_workflows
run: cargo xtask workflows
@@ -97,7 +97,7 @@ jobs:
- id: short-sha
name: publish_extension_cli::get_short_sha
run: |
- echo "sha_short=$(echo "${{ github.sha }}" | cut -c1-7)" >> "$GITHUB_OUTPUT"
+ echo "sha_short=$(echo "$GITHUB_SHA" | cut -c1-7)" >> "$GITHUB_OUTPUT"
- name: publish_extension_cli::update_sha_in_extensions::checkout_extensions_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
@@ -105,7 +105,7 @@ jobs:
token: ${{ steps.generate-token.outputs.token }}
- name: publish_extension_cli::update_sha_in_extensions::replace_sha
run: |
- sed -i "s/ZED_EXTENSION_CLI_SHA: [a-f0-9]*/ZED_EXTENSION_CLI_SHA: ${{ github.sha }}/" \
+ sed -i "s/ZED_EXTENSION_CLI_SHA: [a-f0-9]*/ZED_EXTENSION_CLI_SHA: $GITHUB_SHA/" \
.github/workflows/ci.yml
- name: publish_extension_cli::create_pull_request_extensions
uses: peter-evans/create-pull-request@v7
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 8ac5eeb998f5102d5af9b2775a82093b6ea29858..8adad5cfba278dc68dd227b86455510278c7a1ae 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -257,8 +257,14 @@ jobs:
name: run_tests::check_scripts::download_actionlint
run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)
- name: run_tests::check_scripts::run_actionlint
- run: |
- ${{ steps.get_actionlint.outputs.executable }} -color
+ run: '"$ACTIONLINT_BIN" -color'
+ env:
+ ACTIONLINT_BIN: ${{ steps.get_actionlint.outputs.executable }}
+ - name: steps::cache_rust_dependencies_namespace
+ uses: namespacelabs/nscloud-cache-action@v1
+ with:
+ cache: rust
+ path: ~/.rustup
- name: run_tests::check_scripts::check_xtask_workflows
run: |
cargo xtask workflows
@@ -654,12 +660,7 @@ jobs:
- id: generate-webhook-message
name: release::generate_slack_message
run: |
- MESSAGE=$(DRAFT_RESULT="${{ needs.create_draft_release.result }}"
- UPLOAD_RESULT="${{ needs.upload_release_assets.result }}"
- VALIDATE_RESULT="${{ needs.validate_release_assets.result }}"
- AUTO_RELEASE_RESULT="${{ needs.auto_release_preview.result }}"
- TAG="$GITHUB_REF_NAME"
- RUN_URL="${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
+ MESSAGE=$(TAG="$GITHUB_REF_NAME"
if [ "$DRAFT_RESULT" == "failure" ]; then
echo "❌ Draft release creation failed for $TAG: $RUN_URL"
@@ -669,19 +670,19 @@ jobs:
echo "❌ Release asset upload failed for $TAG: $RELEASE_URL"
elif [ "$UPLOAD_RESULT" == "cancelled" ] || [ "$UPLOAD_RESULT" == "skipped" ]; then
FAILED_JOBS=""
- if [ "${{ needs.run_tests_mac.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_mac"; fi
- if [ "${{ needs.run_tests_linux.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_linux"; fi
- if [ "${{ needs.run_tests_windows.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_windows"; fi
- if [ "${{ needs.clippy_mac.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_mac"; fi
- if [ "${{ needs.clippy_linux.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_linux"; fi
- if [ "${{ needs.clippy_windows.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_windows"; fi
- if [ "${{ needs.check_scripts.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS check_scripts"; fi
- if [ "${{ needs.bundle_linux_aarch64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_linux_aarch64"; fi
- if [ "${{ needs.bundle_linux_x86_64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_linux_x86_64"; fi
- if [ "${{ needs.bundle_mac_aarch64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_mac_aarch64"; fi
- if [ "${{ needs.bundle_mac_x86_64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_mac_x86_64"; fi
- if [ "${{ needs.bundle_windows_aarch64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_windows_aarch64"; fi
- if [ "${{ needs.bundle_windows_x86_64.result }}" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_windows_x86_64"; fi
+ if [ "$RESULT_RUN_TESTS_MAC" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_mac"; fi
+ if [ "$RESULT_RUN_TESTS_LINUX" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_linux"; fi
+ if [ "$RESULT_RUN_TESTS_WINDOWS" == "failure" ];then FAILED_JOBS="$FAILED_JOBS run_tests_windows"; fi
+ if [ "$RESULT_CLIPPY_MAC" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_mac"; fi
+ if [ "$RESULT_CLIPPY_LINUX" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_linux"; fi
+ if [ "$RESULT_CLIPPY_WINDOWS" == "failure" ];then FAILED_JOBS="$FAILED_JOBS clippy_windows"; fi
+ if [ "$RESULT_CHECK_SCRIPTS" == "failure" ];then FAILED_JOBS="$FAILED_JOBS check_scripts"; fi
+ if [ "$RESULT_BUNDLE_LINUX_AARCH64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_linux_aarch64"; fi
+ if [ "$RESULT_BUNDLE_LINUX_X86_64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_linux_x86_64"; fi
+ if [ "$RESULT_BUNDLE_MAC_AARCH64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_mac_aarch64"; fi
+ if [ "$RESULT_BUNDLE_MAC_X86_64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_mac_x86_64"; fi
+ if [ "$RESULT_BUNDLE_WINDOWS_AARCH64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_windows_aarch64"; fi
+ if [ "$RESULT_BUNDLE_WINDOWS_X86_64" == "failure" ];then FAILED_JOBS="$FAILED_JOBS bundle_windows_x86_64"; fi
FAILED_JOBS=$(echo "$FAILED_JOBS" | xargs)
if [ "$UPLOAD_RESULT" == "cancelled" ]; then
if [ -n "$FAILED_JOBS" ]; then
@@ -710,12 +711,29 @@ jobs:
echo "message=$MESSAGE" >> "$GITHUB_OUTPUT"
env:
GH_TOKEN: ${{ github.token }}
+ DRAFT_RESULT: ${{ needs.create_draft_release.result }}
+ UPLOAD_RESULT: ${{ needs.upload_release_assets.result }}
+ VALIDATE_RESULT: ${{ needs.validate_release_assets.result }}
+ AUTO_RELEASE_RESULT: ${{ needs.auto_release_preview.result }}
+ RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
+ RESULT_RUN_TESTS_MAC: ${{ needs.run_tests_mac.result }}
+ RESULT_RUN_TESTS_LINUX: ${{ needs.run_tests_linux.result }}
+ RESULT_RUN_TESTS_WINDOWS: ${{ needs.run_tests_windows.result }}
+ RESULT_CLIPPY_MAC: ${{ needs.clippy_mac.result }}
+ RESULT_CLIPPY_LINUX: ${{ needs.clippy_linux.result }}
+ RESULT_CLIPPY_WINDOWS: ${{ needs.clippy_windows.result }}
+ RESULT_CHECK_SCRIPTS: ${{ needs.check_scripts.result }}
+ RESULT_BUNDLE_LINUX_AARCH64: ${{ needs.bundle_linux_aarch64.result }}
+ RESULT_BUNDLE_LINUX_X86_64: ${{ needs.bundle_linux_x86_64.result }}
+ RESULT_BUNDLE_MAC_AARCH64: ${{ needs.bundle_mac_aarch64.result }}
+ RESULT_BUNDLE_MAC_X86_64: ${{ needs.bundle_mac_x86_64.result }}
+ RESULT_BUNDLE_WINDOWS_AARCH64: ${{ needs.bundle_windows_aarch64.result }}
+ RESULT_BUNDLE_WINDOWS_X86_64: ${{ needs.bundle_windows_x86_64.result }}
- name: release::send_slack_message
- run: |
- curl -X POST -H 'Content-type: application/json'\
- --data '{"text":"${{ steps.generate-webhook-message.outputs.message }}"}' "$SLACK_WEBHOOK"
+ run: 'curl -X POST -H ''Content-type: application/json'' --data "$(jq -n --arg text "$SLACK_MESSAGE" ''{"text": $text}'')" "$SLACK_WEBHOOK"'
env:
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
+ SLACK_MESSAGE: ${{ steps.generate-webhook-message.outputs.message }}
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
cancel-in-progress: true
diff --git a/.github/workflows/release_nightly.yml b/.github/workflows/release_nightly.yml
index 7f243411b4f540d6c7bc611df4883f5341d6a83b..46d8732b08ea658275e1fb21117a09b9e0668933 100644
--- a/.github/workflows/release_nightly.yml
+++ b/.github/workflows/release_nightly.yml
@@ -554,11 +554,10 @@ jobs:
runs-on: namespace-profile-2x4-ubuntu-2404
steps:
- name: release::send_slack_message
- run: |
- curl -X POST -H 'Content-type: application/json'\
- --data '{"text":"❌ ${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"}' "$SLACK_WEBHOOK"
+ run: 'curl -X POST -H ''Content-type: application/json'' --data "$(jq -n --arg text "$SLACK_MESSAGE" ''{"text": $text}'')" "$SLACK_WEBHOOK"'
env:
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
+ SLACK_MESSAGE: '❌ ${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}'
defaults:
run:
shell: bash -euxo pipefail {0}
diff --git a/.github/workflows/run_cron_unit_evals.yml b/.github/workflows/run_cron_unit_evals.yml
index e57b54e4f2249b92630b2d3636ce2316a0814625..2a204a9d40d78bf52f38825b4db060216e348a87 100644
--- a/.github/workflows/run_cron_unit_evals.yml
+++ b/.github/workflows/run_cron_unit_evals.yml
@@ -16,7 +16,7 @@ jobs:
model:
- anthropic/claude-sonnet-4-5-latest
- anthropic/claude-opus-4-5-latest
- - google/gemini-3-pro
+ - google/gemini-3.1-pro
- openai/gpt-5
fail-fast: false
steps:
diff --git a/.github/workflows/run_tests.yml b/.github/workflows/run_tests.yml
index 29f888cbb596593052c6adebe2341171eac9055d..00d69639a53868386157e67aeab5ce7383d32426 100644
--- a/.github/workflows/run_tests.yml
+++ b/.github/workflows/run_tests.yml
@@ -35,7 +35,7 @@ jobs:
git fetch origin "$GITHUB_BASE_REF" --depth=350
COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
fi
- CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
+ CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" "$GITHUB_SHA")"
check_pattern() {
local output_name="$1"
@@ -653,8 +653,14 @@ jobs:
name: run_tests::check_scripts::download_actionlint
run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash)
- name: run_tests::check_scripts::run_actionlint
- run: |
- ${{ steps.get_actionlint.outputs.executable }} -color
+ run: '"$ACTIONLINT_BIN" -color'
+ env:
+ ACTIONLINT_BIN: ${{ steps.get_actionlint.outputs.executable }}
+ - name: steps::cache_rust_dependencies_namespace
+ uses: namespacelabs/nscloud-cache-action@v1
+ with:
+ cache: rust
+ path: ~/.rustup
- name: run_tests::check_scripts::check_xtask_workflows
run: |
cargo xtask workflows
@@ -735,23 +741,39 @@ jobs:
if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
}
- check_result "orchestrate" "${{ needs.orchestrate.result }}"
- check_result "check_style" "${{ needs.check_style.result }}"
- check_result "clippy_windows" "${{ needs.clippy_windows.result }}"
- check_result "clippy_linux" "${{ needs.clippy_linux.result }}"
- check_result "clippy_mac" "${{ needs.clippy_mac.result }}"
- check_result "run_tests_windows" "${{ needs.run_tests_windows.result }}"
- check_result "run_tests_linux" "${{ needs.run_tests_linux.result }}"
- check_result "run_tests_mac" "${{ needs.run_tests_mac.result }}"
- check_result "doctests" "${{ needs.doctests.result }}"
- check_result "check_workspace_binaries" "${{ needs.check_workspace_binaries.result }}"
- check_result "check_wasm" "${{ needs.check_wasm.result }}"
- check_result "check_dependencies" "${{ needs.check_dependencies.result }}"
- check_result "check_docs" "${{ needs.check_docs.result }}"
- check_result "check_licenses" "${{ needs.check_licenses.result }}"
- check_result "check_scripts" "${{ needs.check_scripts.result }}"
+ check_result "orchestrate" "$RESULT_ORCHESTRATE"
+ check_result "check_style" "$RESULT_CHECK_STYLE"
+ check_result "clippy_windows" "$RESULT_CLIPPY_WINDOWS"
+ check_result "clippy_linux" "$RESULT_CLIPPY_LINUX"
+ check_result "clippy_mac" "$RESULT_CLIPPY_MAC"
+ check_result "run_tests_windows" "$RESULT_RUN_TESTS_WINDOWS"
+ check_result "run_tests_linux" "$RESULT_RUN_TESTS_LINUX"
+ check_result "run_tests_mac" "$RESULT_RUN_TESTS_MAC"
+ check_result "doctests" "$RESULT_DOCTESTS"
+ check_result "check_workspace_binaries" "$RESULT_CHECK_WORKSPACE_BINARIES"
+ check_result "check_wasm" "$RESULT_CHECK_WASM"
+ check_result "check_dependencies" "$RESULT_CHECK_DEPENDENCIES"
+ check_result "check_docs" "$RESULT_CHECK_DOCS"
+ check_result "check_licenses" "$RESULT_CHECK_LICENSES"
+ check_result "check_scripts" "$RESULT_CHECK_SCRIPTS"
exit $EXIT_CODE
+ env:
+ RESULT_ORCHESTRATE: ${{ needs.orchestrate.result }}
+ RESULT_CHECK_STYLE: ${{ needs.check_style.result }}
+ RESULT_CLIPPY_WINDOWS: ${{ needs.clippy_windows.result }}
+ RESULT_CLIPPY_LINUX: ${{ needs.clippy_linux.result }}
+ RESULT_CLIPPY_MAC: ${{ needs.clippy_mac.result }}
+ RESULT_RUN_TESTS_WINDOWS: ${{ needs.run_tests_windows.result }}
+ RESULT_RUN_TESTS_LINUX: ${{ needs.run_tests_linux.result }}
+ RESULT_RUN_TESTS_MAC: ${{ needs.run_tests_mac.result }}
+ RESULT_DOCTESTS: ${{ needs.doctests.result }}
+ RESULT_CHECK_WORKSPACE_BINARIES: ${{ needs.check_workspace_binaries.result }}
+ RESULT_CHECK_WASM: ${{ needs.check_wasm.result }}
+ RESULT_CHECK_DEPENDENCIES: ${{ needs.check_dependencies.result }}
+ RESULT_CHECK_DOCS: ${{ needs.check_docs.result }}
+ RESULT_CHECK_LICENSES: ${{ needs.check_licenses.result }}
+ RESULT_CHECK_SCRIPTS: ${{ needs.check_scripts.result }}
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
cancel-in-progress: true
diff --git a/.github/workflows/slack_notify_first_responders.yml b/.github/workflows/slack_notify_first_responders.yml
index a6f2d557a574778aea6c2a90f9721b5a41bd0724..538d02b582f18db627693b62e439f4142ea29056 100644
--- a/.github/workflows/slack_notify_first_responders.yml
+++ b/.github/workflows/slack_notify_first_responders.yml
@@ -17,8 +17,9 @@ jobs:
id: check-label
env:
LABEL_NAME: ${{ github.event.label.name }}
+ FIRST_RESPONDER_LABELS: ${{ env.FIRST_RESPONDER_LABELS }}
run: |
- if echo '${{ env.FIRST_RESPONDER_LABELS }}' | jq -e --arg label "$LABEL_NAME" 'index($label) != null' > /dev/null; then
+ if echo "$FIRST_RESPONDER_LABELS" | jq -e --arg label "$LABEL_NAME" 'index($label) != null' > /dev/null; then
echo "should_notify=true" >> "$GITHUB_OUTPUT"
echo "Label '$LABEL_NAME' requires first responder notification"
else
diff --git a/.github/workflows/update_duplicate_magnets.yml b/.github/workflows/update_duplicate_magnets.yml
index 1c6c5a562532891eb97ceb11f44b81f35612c026..c3832b7bdbec13f74a8136cb1120a682f6e53920 100644
--- a/.github/workflows/update_duplicate_magnets.yml
+++ b/.github/workflows/update_duplicate_magnets.yml
@@ -21,7 +21,9 @@ jobs:
run: pip install requests
- name: Update duplicate magnets issue
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
python script/github-find-top-duplicated-bugs.py \
- --github-token ${{ secrets.GITHUB_TOKEN }} \
+ --github-token "$GITHUB_TOKEN" \
--issue-number 46355
diff --git a/Cargo.lock b/Cargo.lock
index c813e6a4f2c9facdc68cc526c7ea8bb33a4ccf14..f5fe136c8f62fb14b5ebd1e29b636e82a3193c38 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -78,6 +78,7 @@ dependencies = [
"clock",
"collections",
"ctor",
+ "fs",
"futures 0.3.31",
"gpui",
"indoc",
@@ -171,7 +172,7 @@ dependencies = [
"context_server",
"ctor",
"db",
- "derive_more 0.99.20",
+ "derive_more",
"editor",
"env_logger 0.11.8",
"eval_utils",
@@ -243,7 +244,7 @@ dependencies = [
"anyhow",
"async-broadcast",
"async-trait",
- "derive_more 2.0.1",
+ "derive_more",
"futures 0.3.31",
"log",
"serde",
@@ -257,7 +258,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44bc1fef9c32f03bce2ab44af35b6f483bfd169bf55cc59beeb2e3b1a00ae4d1"
dependencies = [
"anyhow",
- "derive_more 2.0.1",
+ "derive_more",
"schemars",
"serde",
"serde_json",
@@ -370,6 +371,7 @@ dependencies = [
"fs",
"futures 0.3.31",
"fuzzy",
+ "git",
"gpui",
"gpui_tokio",
"html_to_markdown",
@@ -603,6 +605,17 @@ version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299"
+[[package]]
+name = "annotate-snippets"
+version = "0.12.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c86cd1c51b95d71dde52bca69ed225008f6ff4c8cc825b08042aa1ef823e1980"
+dependencies = [
+ "anstyle",
+ "memchr",
+ "unicode-width",
+]
+
[[package]]
name = "anstream"
version = "0.6.21"
@@ -815,7 +828,7 @@ dependencies = [
"anyhow",
"async-trait",
"collections",
- "derive_more 0.99.20",
+ "derive_more",
"extension",
"futures 0.3.31",
"gpui",
@@ -1353,6 +1366,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"log",
+ "scopeguard",
"simplelog",
"tempfile",
"windows 0.61.3",
@@ -3001,7 +3015,7 @@ dependencies = [
"cloud_llm_client",
"collections",
"credentials_provider",
- "derive_more 0.99.20",
+ "derive_more",
"feature_flags",
"fs",
"futures 0.3.31",
@@ -3439,7 +3453,7 @@ name = "command_palette_hooks"
version = "0.1.0"
dependencies = [
"collections",
- "derive_more 0.99.20",
+ "derive_more",
"gpui",
"workspace",
]
@@ -3615,15 +3629,18 @@ dependencies = [
[[package]]
name = "convert_case"
-version = "0.4.0"
+version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
+checksum = "baaaa0ecca5b51987b9423ccdc971514dd8b0bb7b4060b983d3664dad3f1f89f"
+dependencies = [
+ "unicode-segmentation",
+]
[[package]]
name = "convert_case"
-version = "0.8.0"
+version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "baaaa0ecca5b51987b9423ccdc971514dd8b0bb7b4060b983d3664dad3f1f89f"
+checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9"
dependencies = [
"unicode-segmentation",
]
@@ -4122,13 +4139,13 @@ dependencies = [
name = "crashes"
version = "0.1.0"
dependencies = [
- "bincode",
"cfg-if",
"crash-handler",
"futures 0.3.31",
"log",
"mach2 0.5.0",
"minidumper",
+ "parking_lot",
"paths",
"release_channel",
"serde",
@@ -4342,6 +4359,20 @@ dependencies = [
"syn 2.0.106",
]
+[[package]]
+name = "csv_preview"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "editor",
+ "feature_flags",
+ "gpui",
+ "log",
+ "text",
+ "ui",
+ "workspace",
+]
+
[[package]]
name = "ctor"
version = "0.4.3"
@@ -4779,34 +4810,23 @@ dependencies = [
[[package]]
name = "derive_more"
-version = "0.99.20"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6edb4b64a43d977b8e99788fe3a04d483834fba1215a7e02caa415b626497f7f"
-dependencies = [
- "convert_case 0.4.0",
- "proc-macro2",
- "quote",
- "rustc_version",
- "syn 2.0.106",
-]
-
-[[package]]
-name = "derive_more"
-version = "2.0.1"
+version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678"
+checksum = "d751e9e49156b02b44f9c1815bcb94b984cdcc4396ecc32521c739452808b134"
dependencies = [
"derive_more-impl",
]
[[package]]
name = "derive_more-impl"
-version = "2.0.1"
+version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3"
+checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb"
dependencies = [
+ "convert_case 0.10.0",
"proc-macro2",
"quote",
+ "rustc_version",
"syn 2.0.106",
"unicode-xid",
]
@@ -7115,7 +7135,7 @@ version = "0.8.0"
source = "git+https://github.com/zed-industries/gh-workflow?rev=c9eac0ed361583e1072860d96776fa52775b82ac#c9eac0ed361583e1072860d96776fa52775b82ac"
dependencies = [
"async-trait",
- "derive_more 2.0.1",
+ "derive_more",
"derive_setters",
"gh-workflow-macros",
"indexmap",
@@ -7184,7 +7204,7 @@ dependencies = [
"askpass",
"async-trait",
"collections",
- "derive_more 0.99.20",
+ "derive_more",
"futures 0.3.31",
"git2",
"gpui",
@@ -7563,7 +7583,7 @@ dependencies = [
"core-text",
"core-video",
"ctor",
- "derive_more 0.99.20",
+ "derive_more",
"embed-resource",
"env_logger 0.11.8",
"etagere",
@@ -7584,7 +7604,7 @@ dependencies = [
"mach2 0.5.0",
"media",
"metal",
- "naga",
+ "naga 28.0.0",
"num_cpus",
"objc",
"objc2",
@@ -7691,7 +7711,7 @@ dependencies = [
"core-text",
"core-video",
"ctor",
- "derive_more 0.99.20",
+ "derive_more",
"dispatch2",
"etagere",
"foreign-types 0.5.0",
@@ -8249,7 +8269,7 @@ dependencies = [
"async-fs",
"async-tar",
"bytes 1.11.1",
- "derive_more 0.99.20",
+ "derive_more",
"futures 0.3.31",
"http 1.3.1",
"http-body 1.0.1",
@@ -9126,9 +9146,9 @@ dependencies = [
[[package]]
name = "jupyter-protocol"
-version = "1.2.1"
+version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8c75a69caf8b8e781224badfb76c4a8da4d49856de36ce72ae3cf5d4a1c94e42"
+checksum = "4649647741f9794a7a02e3be976f1b248ba28a37dbfc626d5089316fd4fbf4c8"
dependencies = [
"async-trait",
"bytes 1.11.1",
@@ -10018,6 +10038,7 @@ dependencies = [
"ctor",
"futures 0.3.31",
"gpui",
+ "gpui_util",
"log",
"lsp-types",
"parking_lot",
@@ -10688,6 +10709,30 @@ name = "naga"
version = "28.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "618f667225063219ddfc61251087db8a9aec3c3f0950c916b614e403486f1135"
+dependencies = [
+ "arrayvec",
+ "bit-set",
+ "bitflags 2.10.0",
+ "cfg-if",
+ "cfg_aliases 0.2.1",
+ "codespan-reporting 0.12.0",
+ "half",
+ "hashbrown 0.16.1",
+ "hexf-parse",
+ "indexmap",
+ "libm",
+ "log",
+ "num-traits",
+ "once_cell",
+ "rustc-hash 1.1.0",
+ "thiserror 2.0.17",
+ "unicode-ident",
+]
+
+[[package]]
+name = "naga"
+version = "28.0.1"
+source = "git+https://github.com/zed-industries/wgpu?rev=9459e95113c5bd116b2cc2c87e8424b28059e17c#9459e95113c5bd116b2cc2c87e8424b28059e17c"
dependencies = [
"arrayvec",
"bit-set",
@@ -10746,9 +10791,9 @@ dependencies = [
[[package]]
name = "nbformat"
-version = "1.1.0"
+version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b10a89a2d910233ec3fca4de359b16ebe95e833c8b2162643ef98c6053a0549d"
+checksum = "d4983a40792c45e8639f77ef8e4461c55679cbc618f4b9e83830e8c7e79c8383"
dependencies = [
"anyhow",
"chrono",
@@ -14609,9 +14654,9 @@ dependencies = [
[[package]]
name = "runtimelib"
-version = "1.2.0"
+version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d80685459e1e5fa5603182058351ae91c98ca458dfef4e85f0a37be4f7cf1e6c"
+checksum = "fa84884e45ed4a1e663120cef3fc11f14d1a2a1933776e1c31599f7bd2dd0c9e"
dependencies = [
"async-dispatcher",
"async-std",
@@ -15517,7 +15562,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"collections",
- "derive_more 0.99.20",
+ "derive_more",
"gpui",
"log",
"schemars",
@@ -17300,7 +17345,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"collections",
- "derive_more 0.99.20",
+ "derive_more",
"fs",
"futures 0.3.31",
"gpui",
@@ -19812,6 +19857,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"client",
+ "cloud_api_types",
"cloud_llm_client",
"futures 0.3.31",
"gpui",
@@ -19876,9 +19922,8 @@ checksum = "a751b3277700db47d3e574514de2eced5e54dc8a5436a3bf7a0b248b2cee16f3"
[[package]]
name = "wgpu"
-version = "28.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f9cb534d5ffd109c7d1135f34cdae29e60eab94855a625dcfe1705f8bc7ad79f"
+version = "28.0.1"
+source = "git+https://github.com/zed-industries/wgpu?rev=9459e95113c5bd116b2cc2c87e8424b28059e17c#9459e95113c5bd116b2cc2c87e8424b28059e17c"
dependencies = [
"arrayvec",
"bitflags 2.10.0",
@@ -19889,7 +19934,7 @@ dependencies = [
"hashbrown 0.16.1",
"js-sys",
"log",
- "naga",
+ "naga 28.0.1",
"parking_lot",
"portable-atomic",
"profiling",
@@ -19906,9 +19951,8 @@ dependencies = [
[[package]]
name = "wgpu-core"
-version = "28.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8bb4c8b5db5f00e56f1f08869d870a0dff7c8bc7ebc01091fec140b0cf0211a9"
+version = "28.0.1"
+source = "git+https://github.com/zed-industries/wgpu?rev=9459e95113c5bd116b2cc2c87e8424b28059e17c#9459e95113c5bd116b2cc2c87e8424b28059e17c"
dependencies = [
"arrayvec",
"bit-set",
@@ -19920,7 +19964,7 @@ dependencies = [
"hashbrown 0.16.1",
"indexmap",
"log",
- "naga",
+ "naga 28.0.1",
"once_cell",
"parking_lot",
"portable-atomic",
@@ -19938,36 +19982,32 @@ dependencies = [
[[package]]
name = "wgpu-core-deps-apple"
-version = "28.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "87b7b696b918f337c486bf93142454080a32a37832ba8a31e4f48221890047da"
+version = "28.0.1"
+source = "git+https://github.com/zed-industries/wgpu?rev=9459e95113c5bd116b2cc2c87e8424b28059e17c#9459e95113c5bd116b2cc2c87e8424b28059e17c"
dependencies = [
"wgpu-hal",
]
[[package]]
name = "wgpu-core-deps-emscripten"
-version = "28.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "34b251c331f84feac147de3c4aa3aa45112622a95dd7ee1b74384fa0458dbd79"
+version = "28.0.1"
+source = "git+https://github.com/zed-industries/wgpu?rev=9459e95113c5bd116b2cc2c87e8424b28059e17c#9459e95113c5bd116b2cc2c87e8424b28059e17c"
dependencies = [
"wgpu-hal",
]
[[package]]
name = "wgpu-core-deps-windows-linux-android"
-version = "28.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "68ca976e72b2c9964eb243e281f6ce7f14a514e409920920dcda12ae40febaae"
+version = "28.0.1"
+source = "git+https://github.com/zed-industries/wgpu?rev=9459e95113c5bd116b2cc2c87e8424b28059e17c#9459e95113c5bd116b2cc2c87e8424b28059e17c"
dependencies = [
"wgpu-hal",
]
[[package]]
name = "wgpu-hal"
-version = "28.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "293080d77fdd14d6b08a67c5487dfddbf874534bb7921526db56a7b75d7e3bef"
+version = "28.0.1"
+source = "git+https://github.com/zed-industries/wgpu?rev=9459e95113c5bd116b2cc2c87e8424b28059e17c#9459e95113c5bd116b2cc2c87e8424b28059e17c"
dependencies = [
"android_system_properties",
"arrayvec",
@@ -19990,7 +20030,7 @@ dependencies = [
"libloading",
"log",
"metal",
- "naga",
+ "naga 28.0.1",
"ndk-sys",
"objc",
"once_cell",
@@ -20013,9 +20053,8 @@ dependencies = [
[[package]]
name = "wgpu-types"
-version = "28.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e18308757e594ed2cd27dddbb16a139c42a683819d32a2e0b1b0167552f5840c"
+version = "28.0.1"
+source = "git+https://github.com/zed-industries/wgpu?rev=9459e95113c5bd116b2cc2c87e8424b28059e17c#9459e95113c5bd116b2cc2c87e8424b28059e17c"
dependencies = [
"bitflags 2.10.0",
"bytemuck",
@@ -21239,7 +21278,6 @@ dependencies = [
"any_vec",
"anyhow",
"async-recursion",
- "call",
"chrono",
"client",
"clock",
@@ -21504,6 +21542,7 @@ checksum = "ec7a2a501ed189703dba8b08142f057e887dfc4b2cc4db2d343ac6376ba3e0b9"
name = "xtask"
version = "0.1.0"
dependencies = [
+ "annotate-snippets",
"anyhow",
"backtrace",
"cargo_metadata",
@@ -21512,8 +21551,12 @@ dependencies = [
"gh-workflow",
"indexmap",
"indoc",
+ "itertools 0.14.0",
+ "regex",
"serde",
"serde_json",
+ "serde_yaml",
+ "strum 0.27.2",
"toml 0.8.23",
"toml_edit 0.22.27",
]
@@ -21692,7 +21735,7 @@ dependencies = [
[[package]]
name = "zed"
-version = "0.227.0"
+version = "0.228.0"
dependencies = [
"acp_thread",
"acp_tools",
@@ -21710,7 +21753,6 @@ dependencies = [
"audio",
"auto_update",
"auto_update_ui",
- "bincode",
"breadcrumbs",
"call",
"channel",
@@ -21729,6 +21771,7 @@ dependencies = [
"copilot_chat",
"copilot_ui",
"crashes",
+ "csv_preview",
"dap",
"dap_adapters",
"db",
diff --git a/Cargo.toml b/Cargo.toml
index 98fccfaeb21bc6107323378605c8299d5bd5838f..b8e57bda7e46ea45451fedd6759268235c7d71ab 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -45,6 +45,7 @@ members = [
"crates/copilot_chat",
"crates/crashes",
"crates/credentials_provider",
+ "crates/csv_preview",
"crates/dap",
"crates/dap_adapters",
"crates/db",
@@ -298,6 +299,7 @@ copilot_ui = { path = "crates/copilot_ui" }
crashes = { path = "crates/crashes" }
credentials_provider = { path = "crates/credentials_provider" }
crossbeam = "0.8.4"
+csv_preview = { path = "crates/csv_preview"}
dap = { path = "crates/dap" }
dap_adapters = { path = "crates/dap_adapters" }
db = { path = "crates/db" }
@@ -536,7 +538,16 @@ criterion = { version = "0.5", features = ["html_reports"] }
ctor = "0.4.0"
dap-types = { git = "https://github.com/zed-industries/dap-types", rev = "1b461b310481d01e02b2603c16d7144b926339f8" }
dashmap = "6.0"
-derive_more = "0.99.17"
+derive_more = { version = "2.1.1", features = [
+ "add",
+ "add_assign",
+ "deref",
+ "deref_mut",
+ "from_str",
+ "mul",
+ "mul_assign",
+ "not",
+] }
dirs = "4.0"
documented = "0.9.1"
dotenvy = "0.15.0"
@@ -572,7 +583,7 @@ itertools = "0.14.0"
json_dotpath = "1.1"
jsonschema = "0.37.0"
jsonwebtoken = "10.0"
-jupyter-protocol = "1.2.0"
+jupyter-protocol = "1.4.0"
jupyter-websocket-client = "1.0.0"
libc = "0.2"
libsqlite3-sys = { version = "0.30.1", features = ["bundled"] }
@@ -588,7 +599,7 @@ minidumper = "0.8"
moka = { version = "0.12.10", features = ["sync"] }
naga = { version = "28.0", features = ["wgsl-in"] }
nanoid = "0.4"
-nbformat = "1.1.0"
+nbformat = "1.2.0"
nix = "0.29"
num-format = "0.4.4"
objc = "0.2"
@@ -658,7 +669,7 @@ reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "c15662
"stream",
], package = "zed-reqwest", version = "0.12.15-zed" }
rsa = "0.9.6"
-runtimelib = { version = "1.2.0", default-features = false, features = [
+runtimelib = { version = "1.4.0", default-features = false, features = [
"async-dispatcher-runtime", "aws-lc-rs"
] }
rust-embed = { version = "8.4", features = ["include-exclude"] }
@@ -768,7 +779,7 @@ wax = "0.7"
which = "6.0.0"
wasm-bindgen = "0.2.113"
web-time = "1.1.0"
-wgpu = "28.0"
+wgpu = { git = "https://github.com/zed-industries/wgpu", rev = "9459e95113c5bd116b2cc2c87e8424b28059e17c" }
windows-core = "0.61"
yawc = "0.2.5"
zeroize = "1.8"
@@ -813,6 +824,7 @@ features = [
"Win32_System_Ole",
"Win32_System_Performance",
"Win32_System_Pipes",
+ "Win32_System_RestartManager",
"Win32_System_SystemInformation",
"Win32_System_SystemServices",
"Win32_System_Threading",
diff --git a/assets/icons/file_icons/gitlab.svg b/assets/icons/file_icons/gitlab.svg
new file mode 100644
index 0000000000000000000000000000000000000000..f0faf570b125c7764e769ae60f7a6ce6f7825ceb
--- /dev/null
+++ b/assets/icons/file_icons/gitlab.svg
@@ -0,0 +1 @@
+
diff --git a/assets/icons/file_icons/helm.svg b/assets/icons/file_icons/helm.svg
new file mode 100644
index 0000000000000000000000000000000000000000..03e702f2d5081c4e96ff4db7ba7428817b08748f
--- /dev/null
+++ b/assets/icons/file_icons/helm.svg
@@ -0,0 +1 @@
+
diff --git a/assets/icons/file_icons/yaml.svg b/assets/icons/file_icons/yaml.svg
new file mode 100644
index 0000000000000000000000000000000000000000..2c3efd46cd45ff67d6c46d84476d563dd5ac3a73
--- /dev/null
+++ b/assets/icons/file_icons/yaml.svg
@@ -0,0 +1 @@
+
diff --git a/assets/keymaps/default-linux.json b/assets/keymaps/default-linux.json
index 9b8f2d337b1f1073bca818cf0b9c66773a3ce4e9..7e01245ec62b2590a1c88fef5946b7d06463968d 100644
--- a/assets/keymaps/default-linux.json
+++ b/assets/keymaps/default-linux.json
@@ -204,6 +204,7 @@
{
"context": "Editor && editor_agent_diff",
"bindings": {
+ "alt-y": "agent::Keep",
"ctrl-alt-y": "agent::Keep",
"ctrl-alt-z": "agent::Reject",
"shift-alt-y": "agent::KeepAll",
@@ -214,6 +215,7 @@
{
"context": "AgentDiff",
"bindings": {
+ "alt-y": "agent::Keep",
"ctrl-alt-y": "agent::Keep",
"ctrl-alt-z": "agent::Reject",
"shift-alt-y": "agent::KeepAll",
@@ -1310,6 +1312,7 @@
"bindings": {
"ctrl-shift-space": "git::WorktreeFromDefaultOnWindow",
"ctrl-space": "git::WorktreeFromDefault",
+ "ctrl-shift-backspace": "git::DeleteWorktree",
},
},
{
diff --git a/assets/keymaps/default-macos.json b/assets/keymaps/default-macos.json
index 5f210cb4da35f9909767035c941289ee24a2ee3f..43d6419575fc698110cd5a033c01127ac6543f9a 100644
--- a/assets/keymaps/default-macos.json
+++ b/assets/keymaps/default-macos.json
@@ -242,6 +242,7 @@
"context": "AgentDiff",
"use_key_equivalents": true,
"bindings": {
+ "cmd-y": "agent::Keep",
"cmd-alt-y": "agent::Keep",
"cmd-alt-z": "agent::Reject",
"shift-alt-y": "agent::KeepAll",
@@ -252,6 +253,7 @@
"context": "Editor && editor_agent_diff",
"use_key_equivalents": true,
"bindings": {
+ "cmd-y": "agent::Keep",
"cmd-alt-y": "agent::Keep",
"cmd-alt-z": "agent::Reject",
"shift-alt-y": "agent::KeepAll",
@@ -448,6 +450,13 @@
"down": "search::NextHistoryQuery",
},
},
+ {
+ "context": "BufferSearchBar || ProjectSearchBar",
+ "use_key_equivalents": true,
+ "bindings": {
+ "ctrl-enter": "editor::Newline",
+ },
+ },
{
"context": "ProjectSearchBar",
"use_key_equivalents": true,
@@ -1408,6 +1417,7 @@
"bindings": {
"ctrl-shift-space": "git::WorktreeFromDefaultOnWindow",
"ctrl-space": "git::WorktreeFromDefault",
+ "cmd-shift-backspace": "git::DeleteWorktree",
},
},
{
diff --git a/assets/keymaps/default-windows.json b/assets/keymaps/default-windows.json
index 19f75f858cd45192c4cf30dd6bd0799046c26268..22541368cecfc6a645e2b8b7ce55a6711491a012 100644
--- a/assets/keymaps/default-windows.json
+++ b/assets/keymaps/default-windows.json
@@ -203,6 +203,7 @@
"context": "Editor && editor_agent_diff",
"use_key_equivalents": true,
"bindings": {
+ "alt-y": "agent::Keep",
"ctrl-alt-y": "agent::Keep",
"ctrl-alt-z": "agent::Reject",
"shift-alt-y": "agent::KeepAll",
@@ -214,6 +215,7 @@
"context": "AgentDiff",
"use_key_equivalents": true,
"bindings": {
+ "alt-y": "agent::Keep",
"ctrl-alt-y": "agent::Keep",
"ctrl-alt-z": "agent::Reject",
"shift-alt-y": "agent::KeepAll",
@@ -1331,6 +1333,7 @@
"bindings": {
"ctrl-shift-space": "git::WorktreeFromDefaultOnWindow",
"ctrl-space": "git::WorktreeFromDefault",
+ "ctrl-shift-backspace": "git::DeleteWorktree",
},
},
{
diff --git a/assets/settings/default.json b/assets/settings/default.json
index b193c0f60d0087972381f4f85f2b864b52fdbc7d..0a824bbe93a0d68a23d934a63eb1fdab1e2f1b02 100644
--- a/assets/settings/default.json
+++ b/assets/settings/default.json
@@ -361,8 +361,11 @@
// bracket, brace, single or double quote characters.
// For example, when you select text and type '(', Zed will surround the text with ().
"use_auto_surround": true,
- // Whether indentation should be adjusted based on the context whilst typing.
- "auto_indent": true,
+ // Controls automatic indentation behavior when typing.
+ // - "syntax_aware": Adjusts indentation based on syntax context (default)
+ // - "preserve_indent": Preserves current line's indentation on new lines
+ // - "none": No automatic indentation
+ "auto_indent": "syntax_aware",
// Whether indentation of pasted content should be adjusted based on the context.
"auto_indent_on_paste": true,
// Controls how the editor handles the autoclosed characters.
@@ -1831,8 +1834,8 @@
" (",
" # multi-char path: first char (not opening delimiter, space, or box drawing char)",
" [^({\\[<\"'`\\ \\u2500-\\u257F]",
- " # middle chars: non-space, and colon/paren only if not followed by digit/paren",
- " ([^\\ :(]|[:(][^0-9()])*",
+ " # middle chars: non-space, and colon/paren only if not followed by digit/paren/space",
+ " ([^\\ :(]|[:(][^0-9()\\ ])*",
" # last char: not closing delimiter or colon",
" [^()}\\]>\"'`.,;:\\ ]",
" |",
diff --git a/crates/acp_thread/src/acp_thread.rs b/crates/acp_thread/src/acp_thread.rs
index f57ce1f4d188e260624bd90187a21890379fe6b6..1b9271918884dc020986577926d9578e3a6f049c 100644
--- a/crates/acp_thread/src/acp_thread.rs
+++ b/crates/acp_thread/src/acp_thread.rs
@@ -972,6 +972,8 @@ pub struct AcpThread {
had_error: bool,
/// The user's unsent prompt text, persisted so it can be restored when reloading the thread.
draft_prompt: Option>,
+ /// The initial scroll position for the thread view, set during session registration.
+ ui_scroll_position: Option,
}
impl From<&AcpThread> for ActionLogTelemetry {
@@ -1210,6 +1212,7 @@ impl AcpThread {
pending_terminal_exit: HashMap::default(),
had_error: false,
draft_prompt: None,
+ ui_scroll_position: None,
}
}
@@ -1229,6 +1232,14 @@ impl AcpThread {
self.draft_prompt = prompt;
}
+ pub fn ui_scroll_position(&self) -> Option {
+ self.ui_scroll_position
+ }
+
+ pub fn set_ui_scroll_position(&mut self, position: Option) {
+ self.ui_scroll_position = position;
+ }
+
pub fn connection(&self) -> &Rc {
&self.connection
}
diff --git a/crates/action_log/Cargo.toml b/crates/action_log/Cargo.toml
index 8488df691e40ea3bcfc04f4f6f74964fba7863dd..b1a1bf824fb770b8378e596fd0c799a7cf98b13d 100644
--- a/crates/action_log/Cargo.toml
+++ b/crates/action_log/Cargo.toml
@@ -20,6 +20,7 @@ buffer_diff.workspace = true
log.workspace = true
clock.workspace = true
collections.workspace = true
+fs.workspace = true
futures.workspace = true
gpui.workspace = true
language.workspace = true
diff --git a/crates/action_log/src/action_log.rs b/crates/action_log/src/action_log.rs
index 5f8a639c0559c10546fc5640dc240aeba9dde487..5679f3c58fe52057f7a4a0faa24d5b5db2b5e497 100644
--- a/crates/action_log/src/action_log.rs
+++ b/crates/action_log/src/action_log.rs
@@ -1,14 +1,20 @@
use anyhow::{Context as _, Result};
use buffer_diff::BufferDiff;
use clock;
-use collections::BTreeMap;
+use collections::{BTreeMap, HashMap};
+use fs::MTime;
use futures::{FutureExt, StreamExt, channel::mpsc};
use gpui::{
App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity,
};
use language::{Anchor, Buffer, BufferEvent, Point, ToOffset, ToPoint};
use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
-use std::{cmp, ops::Range, sync::Arc};
+use std::{
+ cmp,
+ ops::Range,
+ path::{Path, PathBuf},
+ sync::Arc,
+};
use text::{Edit, Patch, Rope};
use util::{RangeExt, ResultExt as _};
@@ -54,6 +60,8 @@ pub struct ActionLog {
linked_action_log: Option>,
/// Stores undo information for the most recent reject operation
last_reject_undo: Option,
+ /// Tracks the last time files were read by the agent, to detect external modifications
+ file_read_times: HashMap,
}
impl ActionLog {
@@ -64,6 +72,7 @@ impl ActionLog {
project,
linked_action_log: None,
last_reject_undo: None,
+ file_read_times: HashMap::default(),
}
}
@@ -76,6 +85,32 @@ impl ActionLog {
&self.project
}
+ pub fn file_read_time(&self, path: &Path) -> Option {
+ self.file_read_times.get(path).copied()
+ }
+
+ fn update_file_read_time(&mut self, buffer: &Entity, cx: &App) {
+ let buffer = buffer.read(cx);
+ if let Some(file) = buffer.file() {
+ if let Some(local_file) = file.as_local() {
+ if let Some(mtime) = file.disk_state().mtime() {
+ let abs_path = local_file.abs_path(cx);
+ self.file_read_times.insert(abs_path, mtime);
+ }
+ }
+ }
+ }
+
+ fn remove_file_read_time(&mut self, buffer: &Entity, cx: &App) {
+ let buffer = buffer.read(cx);
+ if let Some(file) = buffer.file() {
+ if let Some(local_file) = file.as_local() {
+ let abs_path = local_file.abs_path(cx);
+ self.file_read_times.remove(&abs_path);
+ }
+ }
+ }
+
fn track_buffer_internal(
&mut self,
buffer: Entity,
@@ -506,24 +541,69 @@ impl ActionLog {
/// Track a buffer as read by agent, so we can notify the model about user edits.
pub fn buffer_read(&mut self, buffer: Entity, cx: &mut Context) {
- if let Some(linked_action_log) = &mut self.linked_action_log {
- linked_action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
+ self.buffer_read_impl(buffer, true, cx);
+ }
+
+ fn buffer_read_impl(
+ &mut self,
+ buffer: Entity,
+ record_file_read_time: bool,
+ cx: &mut Context,
+ ) {
+ if let Some(linked_action_log) = &self.linked_action_log {
+ // We don't want to share read times since the other agent hasn't read it necessarily
+ linked_action_log.update(cx, |log, cx| {
+ log.buffer_read_impl(buffer.clone(), false, cx);
+ });
+ }
+ if record_file_read_time {
+ self.update_file_read_time(&buffer, cx);
}
self.track_buffer_internal(buffer, false, cx);
}
/// Mark a buffer as created by agent, so we can refresh it in the context
pub fn buffer_created(&mut self, buffer: Entity, cx: &mut Context) {
- if let Some(linked_action_log) = &mut self.linked_action_log {
- linked_action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
+ self.buffer_created_impl(buffer, true, cx);
+ }
+
+ fn buffer_created_impl(
+ &mut self,
+ buffer: Entity,
+ record_file_read_time: bool,
+ cx: &mut Context,
+ ) {
+ if let Some(linked_action_log) = &self.linked_action_log {
+ // We don't want to share read times since the other agent hasn't read it necessarily
+ linked_action_log.update(cx, |log, cx| {
+ log.buffer_created_impl(buffer.clone(), false, cx);
+ });
+ }
+ if record_file_read_time {
+ self.update_file_read_time(&buffer, cx);
}
self.track_buffer_internal(buffer, true, cx);
}
/// Mark a buffer as edited by agent, so we can refresh it in the context
pub fn buffer_edited(&mut self, buffer: Entity, cx: &mut Context) {
- if let Some(linked_action_log) = &mut self.linked_action_log {
- linked_action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
+ self.buffer_edited_impl(buffer, true, cx);
+ }
+
+ fn buffer_edited_impl(
+ &mut self,
+ buffer: Entity,
+ record_file_read_time: bool,
+ cx: &mut Context,
+ ) {
+ if let Some(linked_action_log) = &self.linked_action_log {
+ // We don't want to share read times since the other agent hasn't read it necessarily
+ linked_action_log.update(cx, |log, cx| {
+ log.buffer_edited_impl(buffer.clone(), false, cx);
+ });
+ }
+ if record_file_read_time {
+ self.update_file_read_time(&buffer, cx);
}
let new_version = buffer.read(cx).version();
let tracked_buffer = self.track_buffer_internal(buffer, false, cx);
@@ -536,6 +616,8 @@ impl ActionLog {
}
pub fn will_delete_buffer(&mut self, buffer: Entity, cx: &mut Context) {
+ // Ok to propagate file read time removal to linked action log
+ self.remove_file_read_time(&buffer, cx);
let has_linked_action_log = self.linked_action_log.is_some();
let tracked_buffer = self.track_buffer_internal(buffer.clone(), false, cx);
match tracked_buffer.status {
@@ -2976,6 +3058,196 @@ mod tests {
);
}
+ #[gpui::test]
+ async fn test_file_read_time_recorded_on_buffer_read(cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
+ .await;
+ let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
+ let action_log = cx.new(|_| ActionLog::new(project.clone()));
+
+ let file_path = project
+ .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
+ .unwrap();
+ let buffer = project
+ .update(cx, |project, cx| project.open_buffer(file_path, cx))
+ .await
+ .unwrap();
+
+ let abs_path = PathBuf::from(path!("/dir/file"));
+ assert!(
+ action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
+ "file_read_time should be None before buffer_read"
+ );
+
+ cx.update(|cx| {
+ action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
+ });
+
+ assert!(
+ action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
+ "file_read_time should be recorded after buffer_read"
+ );
+ }
+
+ #[gpui::test]
+ async fn test_file_read_time_recorded_on_buffer_edited(cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
+ .await;
+ let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
+ let action_log = cx.new(|_| ActionLog::new(project.clone()));
+
+ let file_path = project
+ .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
+ .unwrap();
+ let buffer = project
+ .update(cx, |project, cx| project.open_buffer(file_path, cx))
+ .await
+ .unwrap();
+
+ let abs_path = PathBuf::from(path!("/dir/file"));
+ assert!(
+ action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
+ "file_read_time should be None before buffer_edited"
+ );
+
+ cx.update(|cx| {
+ action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
+ });
+
+ assert!(
+ action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
+ "file_read_time should be recorded after buffer_edited"
+ );
+ }
+
+ #[gpui::test]
+ async fn test_file_read_time_recorded_on_buffer_created(cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(path!("/dir"), json!({"file": "existing content"}))
+ .await;
+ let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
+ let action_log = cx.new(|_| ActionLog::new(project.clone()));
+
+ let file_path = project
+ .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
+ .unwrap();
+ let buffer = project
+ .update(cx, |project, cx| project.open_buffer(file_path, cx))
+ .await
+ .unwrap();
+
+ let abs_path = PathBuf::from(path!("/dir/file"));
+ assert!(
+ action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
+ "file_read_time should be None before buffer_created"
+ );
+
+ cx.update(|cx| {
+ action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
+ });
+
+ assert!(
+ action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
+ "file_read_time should be recorded after buffer_created"
+ );
+ }
+
+ #[gpui::test]
+ async fn test_file_read_time_removed_on_delete(cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
+ .await;
+ let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
+ let action_log = cx.new(|_| ActionLog::new(project.clone()));
+
+ let file_path = project
+ .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
+ .unwrap();
+ let buffer = project
+ .update(cx, |project, cx| project.open_buffer(file_path, cx))
+ .await
+ .unwrap();
+
+ let abs_path = PathBuf::from(path!("/dir/file"));
+
+ cx.update(|cx| {
+ action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
+ });
+ assert!(
+ action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
+ "file_read_time should exist after buffer_read"
+ );
+
+ cx.update(|cx| {
+ action_log.update(cx, |log, cx| log.will_delete_buffer(buffer.clone(), cx));
+ });
+ assert!(
+ action_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
+ "file_read_time should be removed after will_delete_buffer"
+ );
+ }
+
+ #[gpui::test]
+ async fn test_file_read_time_not_forwarded_to_linked_action_log(cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(path!("/dir"), json!({"file": "hello world"}))
+ .await;
+ let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
+ let parent_log = cx.new(|_| ActionLog::new(project.clone()));
+ let child_log =
+ cx.new(|_| ActionLog::new(project.clone()).with_linked_action_log(parent_log.clone()));
+
+ let file_path = project
+ .read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
+ .unwrap();
+ let buffer = project
+ .update(cx, |project, cx| project.open_buffer(file_path, cx))
+ .await
+ .unwrap();
+
+ let abs_path = PathBuf::from(path!("/dir/file"));
+
+ cx.update(|cx| {
+ child_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
+ });
+ assert!(
+ child_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_some()),
+ "child should record file_read_time on buffer_read"
+ );
+ assert!(
+ parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
+ "parent should NOT get file_read_time from child's buffer_read"
+ );
+
+ cx.update(|cx| {
+ child_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
+ });
+ assert!(
+ parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
+ "parent should NOT get file_read_time from child's buffer_edited"
+ );
+
+ cx.update(|cx| {
+ child_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
+ });
+ assert!(
+ parent_log.read_with(cx, |log, _| log.file_read_time(&abs_path).is_none()),
+ "parent should NOT get file_read_time from child's buffer_created"
+ );
+ }
+
#[derive(Debug, PartialEq)]
struct HunkStatus {
range: Range,
diff --git a/crates/agent/src/agent.rs b/crates/agent/src/agent.rs
index 7cf9416840a6bd2870327c9c68135857c01f7c9b..5421538ca736028a4ea7290c09ef81036e055b81 100644
--- a/crates/agent/src/agent.rs
+++ b/crates/agent/src/agent.rs
@@ -352,6 +352,8 @@ impl NativeAgent {
let parent_session_id = thread.parent_thread_id();
let title = thread.title();
let draft_prompt = thread.draft_prompt().map(Vec::from);
+ let scroll_position = thread.ui_scroll_position();
+ let token_usage = thread.latest_token_usage();
let project = thread.project.clone();
let action_log = thread.action_log.clone();
let prompt_capabilities_rx = thread.prompt_capabilities_rx.clone();
@@ -367,6 +369,8 @@ impl NativeAgent {
cx,
);
acp_thread.set_draft_prompt(draft_prompt);
+ acp_thread.set_ui_scroll_position(scroll_position);
+ acp_thread.update_token_usage(token_usage, cx);
acp_thread
});
@@ -1917,7 +1921,9 @@ mod internal_tests {
use gpui::TestAppContext;
use indoc::formatdoc;
use language_model::fake_provider::{FakeLanguageModel, FakeLanguageModelProvider};
- use language_model::{LanguageModelProviderId, LanguageModelProviderName};
+ use language_model::{
+ LanguageModelCompletionEvent, LanguageModelProviderId, LanguageModelProviderName,
+ };
use serde_json::json;
use settings::SettingsStore;
use util::{path, rel_path::rel_path};
@@ -2549,6 +2555,13 @@ mod internal_tests {
cx.run_until_parked();
model.send_last_completion_stream_text_chunk("Lorem.");
+ model.send_last_completion_stream_event(LanguageModelCompletionEvent::UsageUpdate(
+ language_model::TokenUsage {
+ input_tokens: 150,
+ output_tokens: 75,
+ ..Default::default()
+ },
+ ));
model.end_last_completion_stream();
cx.run_until_parked();
summary_model
@@ -2587,6 +2600,12 @@ mod internal_tests {
acp_thread.update(cx, |thread, _cx| {
thread.set_draft_prompt(Some(draft_blocks.clone()));
});
+ thread.update(cx, |thread, _cx| {
+ thread.set_ui_scroll_position(Some(gpui::ListOffset {
+ item_ix: 5,
+ offset_in_item: gpui::px(12.5),
+ }));
+ });
thread.update(cx, |_thread, cx| cx.notify());
cx.run_until_parked();
@@ -2632,6 +2651,24 @@ mod internal_tests {
acp_thread.read_with(cx, |thread, _| {
assert_eq!(thread.draft_prompt(), Some(draft_blocks.as_slice()));
});
+
+ // Ensure token usage survived the round-trip.
+ acp_thread.read_with(cx, |thread, _| {
+ let usage = thread
+ .token_usage()
+ .expect("token usage should be restored after reload");
+ assert_eq!(usage.input_tokens, 150);
+ assert_eq!(usage.output_tokens, 75);
+ });
+
+ // Ensure scroll position survived the round-trip.
+ acp_thread.read_with(cx, |thread, _| {
+ let scroll = thread
+ .ui_scroll_position()
+ .expect("scroll position should be restored after reload");
+ assert_eq!(scroll.item_ix, 5);
+ assert_eq!(scroll.offset_in_item, gpui::px(12.5));
+ });
}
fn thread_entries(
diff --git a/crates/agent/src/db.rs b/crates/agent/src/db.rs
index 3a7af37cac85065d8853fbb5332093ef3fd20592..10ecb643b9a17dd6b02b47a416c526a662d12632 100644
--- a/crates/agent/src/db.rs
+++ b/crates/agent/src/db.rs
@@ -66,6 +66,14 @@ pub struct DbThread {
pub thinking_effort: Option,
#[serde(default)]
pub draft_prompt: Option>,
+ #[serde(default)]
+ pub ui_scroll_position: Option,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
+pub struct SerializedScrollPosition {
+ pub item_ix: usize,
+ pub offset_in_item: f32,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
@@ -108,6 +116,7 @@ impl SharedThread {
thinking_enabled: false,
thinking_effort: None,
draft_prompt: None,
+ ui_scroll_position: None,
}
}
@@ -286,6 +295,7 @@ impl DbThread {
thinking_enabled: false,
thinking_effort: None,
draft_prompt: None,
+ ui_scroll_position: None,
})
}
}
@@ -637,6 +647,7 @@ mod tests {
thinking_enabled: false,
thinking_effort: None,
draft_prompt: None,
+ ui_scroll_position: None,
}
}
@@ -841,4 +852,53 @@ mod tests {
assert_eq!(threads.len(), 1);
assert!(threads[0].folder_paths.is_empty());
}
+
+ #[test]
+ fn test_scroll_position_defaults_to_none() {
+ let json = r#"{
+ "title": "Old Thread",
+ "messages": [],
+ "updated_at": "2024-01-01T00:00:00Z"
+ }"#;
+
+ let db_thread: DbThread = serde_json::from_str(json).expect("Failed to deserialize");
+
+ assert!(
+ db_thread.ui_scroll_position.is_none(),
+ "Legacy threads without scroll_position field should default to None"
+ );
+ }
+
+ #[gpui::test]
+ async fn test_scroll_position_roundtrips_through_save_load(cx: &mut TestAppContext) {
+ let database = ThreadsDatabase::new(cx.executor()).unwrap();
+
+ let thread_id = session_id("thread-with-scroll");
+
+ let mut thread = make_thread(
+ "Thread With Scroll",
+ Utc.with_ymd_and_hms(2024, 1, 1, 0, 0, 0).unwrap(),
+ );
+ thread.ui_scroll_position = Some(SerializedScrollPosition {
+ item_ix: 42,
+ offset_in_item: 13.5,
+ });
+
+ database
+ .save_thread(thread_id.clone(), thread, PathList::default())
+ .await
+ .unwrap();
+
+ let loaded = database
+ .load_thread(thread_id)
+ .await
+ .unwrap()
+ .expect("thread should exist");
+
+ let scroll = loaded
+ .ui_scroll_position
+ .expect("scroll_position should be restored");
+ assert_eq!(scroll.item_ix, 42);
+ assert!((scroll.offset_in_item - 13.5).abs() < f32::EPSILON);
+ }
}
diff --git a/crates/agent/src/tests/edit_file_thread_test.rs b/crates/agent/src/tests/edit_file_thread_test.rs
index 069bf0349299e6f4952f673cbf7607e52d48d9c5..3beb5cb0d51abc55fbf3cf0849ced248a9d1fa5c 100644
--- a/crates/agent/src/tests/edit_file_thread_test.rs
+++ b/crates/agent/src/tests/edit_file_thread_test.rs
@@ -50,9 +50,9 @@ async fn test_edit_file_tool_in_thread_context(cx: &mut TestAppContext) {
// Add just the tools we need for this test
let language_registry = project.read(cx).languages().clone();
thread.add_tool(crate::ReadFileTool::new(
- cx.weak_entity(),
project.clone(),
thread.action_log().clone(),
+ true,
));
thread.add_tool(crate::EditFileTool::new(
project.clone(),
diff --git a/crates/agent/src/tests/mod.rs b/crates/agent/src/tests/mod.rs
index 8d75aae7e2948ef9c0934a72da112b926f633941..23ebe41d3c42654cb8fcdc0266009416686858aa 100644
--- a/crates/agent/src/tests/mod.rs
+++ b/crates/agent/src/tests/mod.rs
@@ -2631,6 +2631,84 @@ async fn test_in_progress_send_canceled_by_next_send(cx: &mut TestAppContext) {
assert_eq!(stop_events(events_2), vec![acp::StopReason::EndTurn]);
}
+#[gpui::test]
+async fn test_retry_cancelled_promptly_on_new_send(cx: &mut TestAppContext) {
+ // Regression test: when a completion fails with a retryable error (e.g. upstream 500),
+ // the retry loop waits on a timer. If the user switches models and sends a new message
+ // during that delay, the old turn should exit immediately instead of retrying with the
+ // stale model.
+ let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await;
+ let model_a = model.as_fake();
+
+ // Start a turn with model_a.
+ let events_1 = thread
+ .update(cx, |thread, cx| {
+ thread.send(UserMessageId::new(), ["Hello"], cx)
+ })
+ .unwrap();
+ cx.run_until_parked();
+ assert_eq!(model_a.completion_count(), 1);
+
+ // Model returns a retryable upstream 500. The turn enters the retry delay.
+ model_a.send_last_completion_stream_error(
+ LanguageModelCompletionError::UpstreamProviderError {
+ message: "Internal server error".to_string(),
+ status: http_client::StatusCode::INTERNAL_SERVER_ERROR,
+ retry_after: None,
+ },
+ );
+ model_a.end_last_completion_stream();
+ cx.run_until_parked();
+
+ // The old completion was consumed; model_a has no pending requests yet because the
+ // retry timer hasn't fired.
+ assert_eq!(model_a.completion_count(), 0);
+
+ // Switch to model_b and send a new message. This cancels the old turn.
+ let model_b = Arc::new(FakeLanguageModel::with_id_and_thinking(
+ "fake", "model-b", "Model B", false,
+ ));
+ thread.update(cx, |thread, cx| {
+ thread.set_model(model_b.clone(), cx);
+ });
+ let events_2 = thread
+ .update(cx, |thread, cx| {
+ thread.send(UserMessageId::new(), ["Continue"], cx)
+ })
+ .unwrap();
+ cx.run_until_parked();
+
+ // model_b should have received its completion request.
+ assert_eq!(model_b.as_fake().completion_count(), 1);
+
+ // Advance the clock well past the retry delay (BASE_RETRY_DELAY = 5s).
+ cx.executor().advance_clock(Duration::from_secs(10));
+ cx.run_until_parked();
+
+ // model_a must NOT have received another completion request — the cancelled turn
+ // should have exited during the retry delay rather than retrying with the old model.
+ assert_eq!(
+ model_a.completion_count(),
+ 0,
+ "old model should not receive a retry request after cancellation"
+ );
+
+ // Complete model_b's turn.
+ model_b
+ .as_fake()
+ .send_last_completion_stream_text_chunk("Done!");
+ model_b
+ .as_fake()
+ .send_last_completion_stream_event(LanguageModelCompletionEvent::Stop(StopReason::EndTurn));
+ model_b.as_fake().end_last_completion_stream();
+
+ let events_1 = events_1.collect::>().await;
+ assert_eq!(stop_events(events_1), vec![acp::StopReason::Cancelled]);
+
+ let events_2 = events_2.collect::>().await;
+ assert_eq!(stop_events(events_2), vec![acp::StopReason::EndTurn]);
+}
+
#[gpui::test]
async fn test_subsequent_successful_sends_dont_cancel(cx: &mut TestAppContext) {
let ThreadTest { model, thread, .. } = setup(cx, TestModel::Fake).await;
diff --git a/crates/agent/src/thread.rs b/crates/agent/src/thread.rs
index c5ca1118ace28b66d555d67aa40c718da292f644..148702e1bafeae05ac67c6127d8259581aff93dd 100644
--- a/crates/agent/src/thread.rs
+++ b/crates/agent/src/thread.rs
@@ -893,14 +893,13 @@ pub struct Thread {
pub(crate) prompt_capabilities_rx: watch::Receiver,
pub(crate) project: Entity,
pub(crate) action_log: Entity,
- /// Tracks the last time files were read by the agent, to detect external modifications
- pub(crate) file_read_times: HashMap,
/// True if this thread was imported from a shared thread and can be synced.
imported: bool,
/// If this is a subagent thread, contains context about the parent
subagent_context: Option,
/// The user's unsent prompt text, persisted so it can be restored when reloading the thread.
draft_prompt: Option>,
+ ui_scroll_position: Option,
/// Weak references to running subagent threads for cancellation propagation
running_subagents: Vec>,
}
@@ -1013,10 +1012,10 @@ impl Thread {
prompt_capabilities_rx,
project,
action_log,
- file_read_times: HashMap::default(),
imported: false,
subagent_context: None,
draft_prompt: None,
+ ui_scroll_position: None,
running_subagents: Vec::new(),
}
}
@@ -1229,10 +1228,13 @@ impl Thread {
updated_at: db_thread.updated_at,
prompt_capabilities_tx,
prompt_capabilities_rx,
- file_read_times: HashMap::default(),
imported: db_thread.imported,
subagent_context: db_thread.subagent_context,
draft_prompt: db_thread.draft_prompt,
+ ui_scroll_position: db_thread.ui_scroll_position.map(|sp| gpui::ListOffset {
+ item_ix: sp.item_ix,
+ offset_in_item: gpui::px(sp.offset_in_item),
+ }),
running_subagents: Vec::new(),
}
}
@@ -1258,6 +1260,12 @@ impl Thread {
thinking_enabled: self.thinking_enabled,
thinking_effort: self.thinking_effort.clone(),
draft_prompt: self.draft_prompt.clone(),
+ ui_scroll_position: self.ui_scroll_position.map(|lo| {
+ crate::db::SerializedScrollPosition {
+ item_ix: lo.item_ix,
+ offset_in_item: lo.offset_in_item.as_f32(),
+ }
+ }),
};
cx.background_spawn(async move {
@@ -1307,6 +1315,14 @@ impl Thread {
self.draft_prompt = prompt;
}
+ pub fn ui_scroll_position(&self) -> Option {
+ self.ui_scroll_position
+ }
+
+ pub fn set_ui_scroll_position(&mut self, position: Option) {
+ self.ui_scroll_position = position;
+ }
+
pub fn model(&self) -> Option<&Arc> {
self.model.as_ref()
}
@@ -1416,6 +1432,9 @@ impl Thread {
environment: Rc,
cx: &mut Context,
) {
+ // Only update the agent location for the root thread, not for subagents.
+ let update_agent_location = self.parent_thread_id().is_none();
+
let language_registry = self.project.read(cx).languages().clone();
self.add_tool(CopyPathTool::new(self.project.clone()));
self.add_tool(CreateDirectoryTool::new(self.project.clone()));
@@ -1433,6 +1452,7 @@ impl Thread {
self.add_tool(StreamingEditFileTool::new(
self.project.clone(),
cx.weak_entity(),
+ self.action_log.clone(),
language_registry,
));
self.add_tool(FetchTool::new(self.project.read(cx).client().http_client()));
@@ -1443,9 +1463,9 @@ impl Thread {
self.add_tool(NowTool);
self.add_tool(OpenTool::new(self.project.clone()));
self.add_tool(ReadFileTool::new(
- cx.weak_entity(),
self.project.clone(),
self.action_log.clone(),
+ update_agent_location,
));
self.add_tool(SaveFileTool::new(self.project.clone()));
self.add_tool(RestoreFileFromDiskTool::new(self.project.clone()));
@@ -1940,7 +1960,15 @@ impl Thread {
})??;
let timer = cx.background_executor().timer(retry.duration);
event_stream.send_retry(retry);
- timer.await;
+ futures::select! {
+ _ = timer.fuse() => {}
+ _ = cancellation_rx.changed().fuse() => {
+ if *cancellation_rx.borrow() {
+ log::debug!("Turn cancelled during retry delay, exiting");
+ return Ok(());
+ }
+ }
+ }
this.update(cx, |this, _cx| {
if let Some(Message::Agent(message)) = this.messages.last() {
if message.tool_results.is_empty() {
@@ -2308,20 +2336,18 @@ impl Thread {
) {
// Ensure the last message ends in the current tool use
let last_message = self.pending_message();
- let push_new_tool_use = last_message.content.last_mut().is_none_or(|content| {
+
+ let has_tool_use = last_message.content.iter_mut().rev().any(|content| {
if let AgentMessageContent::ToolUse(last_tool_use) = content {
if last_tool_use.id == tool_use.id {
*last_tool_use = tool_use.clone();
- false
- } else {
- true
+ return true;
}
- } else {
- true
}
+ false
});
- if push_new_tool_use {
+ if !has_tool_use {
event_stream.send_tool_call(
&tool_use.id,
&tool_use.name,
@@ -2609,7 +2635,8 @@ impl Thread {
}
}
- let use_streaming_edit_tool = cx.has_flag::();
+ let use_streaming_edit_tool =
+ cx.has_flag::() && model.supports_streaming_tools();
let mut tools = self
.tools
diff --git a/crates/agent/src/thread_store.rs b/crates/agent/src/thread_store.rs
index f944377e489a88ac0fa6dbb802edf9702e86f5f2..e26820ddacc3132d42946de3b27d25f4424fae02 100644
--- a/crates/agent/src/thread_store.rs
+++ b/crates/agent/src/thread_store.rs
@@ -146,6 +146,7 @@ mod tests {
thinking_enabled: false,
thinking_effort: None,
draft_prompt: None,
+ ui_scroll_position: None,
}
}
diff --git a/crates/agent/src/tools/edit_file_tool.rs b/crates/agent/src/tools/edit_file_tool.rs
index d8c380eba326d089b848563cca04557e903ba0f4..29b08ac09db4417123403fd3915b8575791b2a4e 100644
--- a/crates/agent/src/tools/edit_file_tool.rs
+++ b/crates/agent/src/tools/edit_file_tool.rs
@@ -305,13 +305,13 @@ impl AgentTool for EditFileTool {
// Check if the file has been modified since the agent last read it
if let Some(abs_path) = abs_path.as_ref() {
- let (last_read_mtime, current_mtime, is_dirty, has_save_tool, has_restore_tool) = self.thread.update(cx, |thread, cx| {
- let last_read = thread.file_read_times.get(abs_path).copied();
+ let last_read_mtime = action_log.read_with(cx, |log, _| log.file_read_time(abs_path));
+ let (current_mtime, is_dirty, has_save_tool, has_restore_tool) = self.thread.read_with(cx, |thread, cx| {
let current = buffer.read(cx).file().and_then(|file| file.disk_state().mtime());
let dirty = buffer.read(cx).is_dirty();
let has_save = thread.has_tool(SaveFileTool::NAME);
let has_restore = thread.has_tool(RestoreFileFromDiskTool::NAME);
- (last_read, current, dirty, has_save, has_restore)
+ (current, dirty, has_save, has_restore)
})?;
// Check for unsaved changes first - these indicate modifications we don't know about
@@ -470,17 +470,6 @@ impl AgentTool for EditFileTool {
log.buffer_edited(buffer.clone(), cx);
});
- // Update the recorded read time after a successful edit so consecutive edits work
- if let Some(abs_path) = abs_path.as_ref() {
- if let Some(new_mtime) = buffer.read_with(cx, |buffer, _| {
- buffer.file().and_then(|file| file.disk_state().mtime())
- }) {
- self.thread.update(cx, |thread, _| {
- thread.file_read_times.insert(abs_path.to_path_buf(), new_mtime);
- })?;
- }
- }
-
let new_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
let (new_text, unified_diff) = cx
.background_spawn({
@@ -2212,14 +2201,18 @@ mod tests {
let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
// Initially, file_read_times should be empty
- let is_empty = thread.read_with(cx, |thread, _| thread.file_read_times.is_empty());
+ let is_empty = action_log.read_with(cx, |action_log, _| {
+ action_log
+ .file_read_time(path!("/root/test.txt").as_ref())
+ .is_none()
+ });
assert!(is_empty, "file_read_times should start empty");
// Create read tool
let read_tool = Arc::new(crate::ReadFileTool::new(
- thread.downgrade(),
project.clone(),
- action_log,
+ action_log.clone(),
+ true,
));
// Read the file to record the read time
@@ -2238,12 +2231,9 @@ mod tests {
.unwrap();
// Verify that file_read_times now contains an entry for the file
- let has_entry = thread.read_with(cx, |thread, _| {
- thread.file_read_times.len() == 1
- && thread
- .file_read_times
- .keys()
- .any(|path| path.ends_with("test.txt"))
+ let has_entry = action_log.read_with(cx, |log, _| {
+ log.file_read_time(path!("/root/test.txt").as_ref())
+ .is_some()
});
assert!(
has_entry,
@@ -2265,11 +2255,14 @@ mod tests {
.await
.unwrap();
- // Should still have exactly one entry
- let has_one_entry = thread.read_with(cx, |thread, _| thread.file_read_times.len() == 1);
+ // Should still have an entry after re-reading
+ let has_entry = action_log.read_with(cx, |log, _| {
+ log.file_read_time(path!("/root/test.txt").as_ref())
+ .is_some()
+ });
assert!(
- has_one_entry,
- "file_read_times should still have one entry after re-reading"
+ has_entry,
+ "file_read_times should still have an entry after re-reading"
);
}
@@ -2309,11 +2302,7 @@ mod tests {
let languages = project.read_with(cx, |project, _| project.languages().clone());
let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
- let read_tool = Arc::new(crate::ReadFileTool::new(
- thread.downgrade(),
- project.clone(),
- action_log,
- ));
+ let read_tool = Arc::new(crate::ReadFileTool::new(project.clone(), action_log, true));
let edit_tool = Arc::new(EditFileTool::new(
project.clone(),
thread.downgrade(),
@@ -2423,11 +2412,7 @@ mod tests {
let languages = project.read_with(cx, |project, _| project.languages().clone());
let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
- let read_tool = Arc::new(crate::ReadFileTool::new(
- thread.downgrade(),
- project.clone(),
- action_log,
- ));
+ let read_tool = Arc::new(crate::ReadFileTool::new(project.clone(), action_log, true));
let edit_tool = Arc::new(EditFileTool::new(
project.clone(),
thread.downgrade(),
@@ -2534,11 +2519,7 @@ mod tests {
let languages = project.read_with(cx, |project, _| project.languages().clone());
let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
- let read_tool = Arc::new(crate::ReadFileTool::new(
- thread.downgrade(),
- project.clone(),
- action_log,
- ));
+ let read_tool = Arc::new(crate::ReadFileTool::new(project.clone(), action_log, true));
let edit_tool = Arc::new(EditFileTool::new(
project.clone(),
thread.downgrade(),
diff --git a/crates/agent/src/tools/read_file_tool.rs b/crates/agent/src/tools/read_file_tool.rs
index 8cfc16ddf6174a190ffe7cc11921dc204b05b79d..f7a75bc63a1c461b65c3a2e6f74f2c70e0ca15f6 100644
--- a/crates/agent/src/tools/read_file_tool.rs
+++ b/crates/agent/src/tools/read_file_tool.rs
@@ -2,7 +2,7 @@ use action_log::ActionLog;
use agent_client_protocol::{self as acp, ToolCallUpdateFields};
use anyhow::{Context as _, Result, anyhow};
use futures::FutureExt as _;
-use gpui::{App, Entity, SharedString, Task, WeakEntity};
+use gpui::{App, Entity, SharedString, Task};
use indoc::formatdoc;
use language::Point;
use language_model::{LanguageModelImage, LanguageModelToolResultContent};
@@ -21,7 +21,7 @@ use super::tool_permissions::{
ResolvedProjectPath, authorize_symlink_access, canonicalize_worktree_roots,
resolve_project_path,
};
-use crate::{AgentTool, Thread, ToolCallEventStream, ToolInput, outline};
+use crate::{AgentTool, ToolCallEventStream, ToolInput, outline};
/// Reads the content of the given file in the project.
///
@@ -56,21 +56,21 @@ pub struct ReadFileToolInput {
}
pub struct ReadFileTool {
- thread: WeakEntity,
project: Entity,
action_log: Entity,
+ update_agent_location: bool,
}
impl ReadFileTool {
pub fn new(
- thread: WeakEntity,
project: Entity,
action_log: Entity,
+ update_agent_location: bool,
) -> Self {
Self {
- thread,
project,
action_log,
+ update_agent_location,
}
}
}
@@ -119,7 +119,6 @@ impl AgentTool for ReadFileTool {
cx: &mut App,
) -> Task> {
let project = self.project.clone();
- let thread = self.thread.clone();
let action_log = self.action_log.clone();
cx.spawn(async move |cx| {
let input = input
@@ -257,20 +256,6 @@ impl AgentTool for ReadFileTool {
return Err(tool_content_err(format!("{file_path} not found")));
}
- // Record the file read time and mtime
- if let Some(mtime) = buffer.read_with(cx, |buffer, _| {
- buffer.file().and_then(|file| file.disk_state().mtime())
- }) {
- thread
- .update(cx, |thread, _| {
- thread.file_read_times.insert(abs_path.to_path_buf(), mtime);
- })
- .ok();
- }
-
-
- let update_agent_location = self.thread.read_with(cx, |thread, _cx| !thread.is_subagent()).unwrap_or_default();
-
let mut anchor = None;
// Check if specific line ranges are provided
@@ -330,7 +315,7 @@ impl AgentTool for ReadFileTool {
};
project.update(cx, |project, cx| {
- if update_agent_location {
+ if self.update_agent_location {
project.set_agent_location(
Some(AgentLocation {
buffer: buffer.downgrade(),
@@ -362,13 +347,10 @@ impl AgentTool for ReadFileTool {
#[cfg(test)]
mod test {
use super::*;
- use crate::{ContextServerRegistry, Templates, Thread};
use agent_client_protocol as acp;
use fs::Fs as _;
use gpui::{AppContext, TestAppContext, UpdateGlobal as _};
- use language_model::fake_provider::FakeLanguageModel;
use project::{FakeFs, Project};
- use prompt_store::ProjectContext;
use serde_json::json;
use settings::SettingsStore;
use std::path::PathBuf;
@@ -383,20 +365,7 @@ mod test {
fs.insert_tree(path!("/root"), json!({})).await;
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
let action_log = cx.new(|_| ActionLog::new(project.clone()));
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
- let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log));
+ let tool = Arc::new(ReadFileTool::new(project, action_log, true));
let (event_stream, _) = ToolCallEventStream::test();
let result = cx
@@ -429,20 +398,7 @@ mod test {
.await;
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
let action_log = cx.new(|_| ActionLog::new(project.clone()));
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
- let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log));
+ let tool = Arc::new(ReadFileTool::new(project, action_log, true));
let result = cx
.update(|cx| {
let input = ReadFileToolInput {
@@ -476,20 +432,7 @@ mod test {
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(language::rust_lang());
let action_log = cx.new(|_| ActionLog::new(project.clone()));
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
- let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log));
+ let tool = Arc::new(ReadFileTool::new(project, action_log, true));
let result = cx
.update(|cx| {
let input = ReadFileToolInput {
@@ -569,20 +512,7 @@ mod test {
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
let action_log = cx.new(|_| ActionLog::new(project.clone()));
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
- let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log));
+ let tool = Arc::new(ReadFileTool::new(project, action_log, true));
let result = cx
.update(|cx| {
let input = ReadFileToolInput {
@@ -614,20 +544,7 @@ mod test {
.await;
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
let action_log = cx.new(|_| ActionLog::new(project.clone()));
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
- let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log));
+ let tool = Arc::new(ReadFileTool::new(project, action_log, true));
// start_line of 0 should be treated as 1
let result = cx
@@ -757,20 +674,7 @@ mod test {
let project = Project::test(fs.clone(), [path!("/project_root").as_ref()], cx).await;
let action_log = cx.new(|_| ActionLog::new(project.clone()));
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
- let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log));
+ let tool = Arc::new(ReadFileTool::new(project, action_log, true));
// Reading a file outside the project worktree should fail
let result = cx
@@ -965,20 +869,7 @@ mod test {
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
let action_log = cx.new(|_| ActionLog::new(project.clone()));
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
- let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log));
+ let tool = Arc::new(ReadFileTool::new(project, action_log, true));
let (event_stream, mut event_rx) = ToolCallEventStream::test();
let read_task = cx.update(|cx| {
@@ -1084,24 +975,7 @@ mod test {
.await;
let action_log = cx.new(|_| ActionLog::new(project.clone()));
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
- let tool = Arc::new(ReadFileTool::new(
- thread.downgrade(),
- project.clone(),
- action_log.clone(),
- ));
+ let tool = Arc::new(ReadFileTool::new(project.clone(), action_log.clone(), true));
// Test reading allowed files in worktree1
let result = cx
@@ -1288,24 +1162,7 @@ mod test {
cx.executor().run_until_parked();
let action_log = cx.new(|_| ActionLog::new(project.clone()));
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
- let tool = Arc::new(ReadFileTool::new(
- thread.downgrade(),
- project.clone(),
- action_log,
- ));
+ let tool = Arc::new(ReadFileTool::new(project.clone(), action_log, true));
let (event_stream, mut event_rx) = ToolCallEventStream::test();
let task = cx.update(|cx| {
@@ -1364,24 +1221,7 @@ mod test {
cx.executor().run_until_parked();
let action_log = cx.new(|_| ActionLog::new(project.clone()));
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
- let tool = Arc::new(ReadFileTool::new(
- thread.downgrade(),
- project.clone(),
- action_log,
- ));
+ let tool = Arc::new(ReadFileTool::new(project.clone(), action_log, true));
let (event_stream, mut event_rx) = ToolCallEventStream::test();
let task = cx.update(|cx| {
@@ -1444,24 +1284,7 @@ mod test {
cx.executor().run_until_parked();
let action_log = cx.new(|_| ActionLog::new(project.clone()));
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
- let tool = Arc::new(ReadFileTool::new(
- thread.downgrade(),
- project.clone(),
- action_log,
- ));
+ let tool = Arc::new(ReadFileTool::new(project.clone(), action_log, true));
let (event_stream, mut event_rx) = ToolCallEventStream::test();
let result = cx
diff --git a/crates/agent/src/tools/spawn_agent_tool.rs b/crates/agent/src/tools/spawn_agent_tool.rs
index b75c41775258db49577024dca3eb1770937e52e8..162de68b86115056e9579d22a8623d675245cc91 100644
--- a/crates/agent/src/tools/spawn_agent_tool.rs
+++ b/crates/agent/src/tools/spawn_agent_tool.rs
@@ -161,29 +161,42 @@ impl AgentTool for SpawnAgentTool {
Ok((subagent, session_info))
})?;
- match subagent.send(input.message, cx).await {
- Ok(output) => {
- session_info.message_end_index =
- cx.update(|cx| Some(subagent.num_entries(cx).saturating_sub(1)));
- event_stream.update_fields_with_meta(
- acp::ToolCallUpdateFields::new().content(vec![output.clone().into()]),
- Some(acp::Meta::from_iter([(
- SUBAGENT_SESSION_INFO_META_KEY.into(),
- serde_json::json!(&session_info),
- )])),
- );
+ let send_result = subagent.send(input.message, cx).await;
+
+ session_info.message_end_index =
+ cx.update(|cx| Some(subagent.num_entries(cx).saturating_sub(1)));
+
+ let meta = Some(acp::Meta::from_iter([(
+ SUBAGENT_SESSION_INFO_META_KEY.into(),
+ serde_json::json!(&session_info),
+ )]));
+
+ let (output, result) = match send_result {
+ Ok(output) => (
+ output.clone(),
Ok(SpawnAgentToolOutput::Success {
session_id: session_info.session_id.clone(),
session_info,
output,
- })
+ }),
+ ),
+ Err(e) => {
+ let error = e.to_string();
+ (
+ error.clone(),
+ Err(SpawnAgentToolOutput::Error {
+ session_id: Some(session_info.session_id.clone()),
+ error,
+ session_info: Some(session_info),
+ }),
+ )
}
- Err(e) => Err(SpawnAgentToolOutput::Error {
- session_id: Some(session_info.session_id.clone()),
- error: e.to_string(),
- session_info: Some(session_info),
- }),
- }
+ };
+ event_stream.update_fields_with_meta(
+ acp::ToolCallUpdateFields::new().content(vec![output.into()]),
+ meta,
+ );
+ result
})
}
diff --git a/crates/agent/src/tools/streaming_edit_file_tool.rs b/crates/agent/src/tools/streaming_edit_file_tool.rs
index 6b1c70931ce00842a7cf427c492b2512bc7a3750..74e91ee1d2607ad1f68a5d327cd0519699cce88b 100644
--- a/crates/agent/src/tools/streaming_edit_file_tool.rs
+++ b/crates/agent/src/tools/streaming_edit_file_tool.rs
@@ -73,7 +73,7 @@ pub struct StreamingEditFileToolInput {
///
/// `frontend/db.js`
///
- pub path: String,
+ pub path: PathBuf,
/// The mode of operation on the file. Possible values:
/// - 'write': Replace the entire contents of the file. If the file doesn't exist, it will be created. Requires 'content' field.
@@ -93,7 +93,7 @@ pub struct StreamingEditFileToolInput {
pub edits: Option>,
}
-#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)]
+#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum StreamingEditFileMode {
/// Overwrite the file with new content (replacing any existing content).
@@ -187,20 +187,23 @@ impl From for LanguageModelToolResultContent {
}
pub struct StreamingEditFileTool {
+ project: Entity,
thread: WeakEntity,
+ action_log: Entity,
language_registry: Arc,
- project: Entity,
}
impl StreamingEditFileTool {
pub fn new(
project: Entity,
thread: WeakEntity,
+ action_log: Entity,
language_registry: Arc,
) -> Self {
Self {
project,
thread,
+ action_log,
language_registry,
}
}
@@ -264,11 +267,11 @@ impl AgentTool for StreamingEditFileTool {
.read(cx)
.short_full_path_for_project_path(&project_path, cx)
})
- .unwrap_or(input.path)
+ .unwrap_or(input.path.to_string_lossy().into_owned())
.into(),
Err(raw_input) => {
- if let Some(input) =
- serde_json::from_value::(raw_input).ok()
+ if let Ok(input) =
+ serde_json::from_value::(raw_input)
{
let path = input.path.unwrap_or_default();
let path = path.trim();
@@ -311,24 +314,37 @@ impl AgentTool for StreamingEditFileTool {
partial = input.recv_partial().fuse() => {
let Some(partial_value) = partial else { break };
if let Ok(parsed) = serde_json::from_value::(partial_value) {
- if state.is_none() && let Some(path_str) = &parsed.path
- && let Some(display_description) = &parsed.display_description
- && let Some(mode) = parsed.mode.clone() {
- state = Some(
- EditSession::new(
- path_str,
- display_description,
- mode,
- &self,
- &event_stream,
- cx,
- )
- .await?,
- );
+ if state.is_none()
+ && let StreamingEditFileToolPartialInput {
+ path: Some(path),
+ display_description: Some(display_description),
+ mode: Some(mode),
+ ..
+ } = &parsed
+ {
+ match EditSession::new(
+ &PathBuf::from(path),
+ display_description,
+ *mode,
+ &self,
+ &event_stream,
+ cx,
+ )
+ .await
+ {
+ Ok(session) => state = Some(session),
+ Err(e) => {
+ log::error!("Failed to create edit session: {}", e);
+ return Err(e);
+ }
+ }
}
if let Some(state) = &mut state {
- state.process(parsed, &self, &event_stream, cx)?;
+ if let Err(e) = state.process(parsed, &self, &event_stream, cx) {
+ log::error!("Failed to process edit: {}", e);
+ return Err(e);
+ }
}
}
}
@@ -341,22 +357,39 @@ impl AgentTool for StreamingEditFileTool {
input
.recv()
.await
- .map_err(|e| StreamingEditFileToolOutput::error(format!("Failed to receive tool input: {e}")))?;
+ .map_err(|e| {
+ let err = StreamingEditFileToolOutput::error(format!("Failed to receive tool input: {e}"));
+ log::error!("Failed to receive tool input: {e}");
+ err
+ })?;
let mut state = if let Some(state) = state {
state
} else {
- EditSession::new(
+ match EditSession::new(
&full_input.path,
&full_input.display_description,
- full_input.mode.clone(),
+ full_input.mode,
&self,
&event_stream,
cx,
)
- .await?
+ .await
+ {
+ Ok(session) => session,
+ Err(e) => {
+ log::error!("Failed to create edit session: {}", e);
+ return Err(e);
+ }
+ }
};
- state.finalize(full_input, &self, &event_stream, cx).await
+ match state.finalize(full_input, &self, &event_stream, cx).await {
+ Ok(output) => Ok(output),
+ Err(e) => {
+ log::error!("Failed to finalize edit: {}", e);
+ Err(e)
+ }
+ }
})
}
@@ -409,7 +442,7 @@ enum EditPipeline {
original_snapshot: text::BufferSnapshot,
},
Edit {
- edits: Vec,
+ current_edit: Option,
},
}
@@ -424,73 +457,51 @@ enum EditPipelineEntry {
reindenter: Reindenter,
original_snapshot: text::BufferSnapshot,
},
- Done,
}
impl EditPipeline {
- fn new(mode: StreamingEditFileMode, snapshot: text::BufferSnapshot) -> Self {
+ fn new(mode: StreamingEditFileMode, original_snapshot: text::BufferSnapshot) -> Self {
match mode {
StreamingEditFileMode::Write => Self::Write {
- content_written: false,
- streaming_diff: StreamingDiff::new(snapshot.text()),
+ streaming_diff: StreamingDiff::new(original_snapshot.text()),
line_diff: LineDiff::default(),
- original_snapshot: snapshot,
+ content_written: false,
+ original_snapshot,
},
- StreamingEditFileMode::Edit => Self::Edit { edits: Vec::new() },
- }
- }
-
- fn edits(&mut self) -> &mut [EditPipelineEntry] {
- match self {
- EditPipeline::Write { .. } => &mut [],
- EditPipeline::Edit { edits } => edits,
+ StreamingEditFileMode::Edit => Self::Edit { current_edit: None },
}
}
- fn ensure_resolving_old_text(
- &mut self,
- edit_index: usize,
- buffer: &Entity,
- cx: &mut AsyncApp,
- ) {
- match self {
- EditPipeline::Write { .. } => {}
- EditPipeline::Edit { edits } => {
- while edits.len() <= edit_index {
- let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.text_snapshot());
- edits.push(EditPipelineEntry::ResolvingOldText {
- matcher: StreamingFuzzyMatcher::new(snapshot),
- });
- }
- }
+ fn ensure_resolving_old_text(&mut self, buffer: &Entity, cx: &mut AsyncApp) {
+ if let Self::Edit { current_edit } = self
+ && current_edit.is_none()
+ {
+ let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.text_snapshot());
+ *current_edit = Some(EditPipelineEntry::ResolvingOldText {
+ matcher: StreamingFuzzyMatcher::new(snapshot),
+ });
}
}
}
-/// Compute the `LineIndent` of the first line in a set of query lines.
-fn query_first_line_indent(query_lines: &[String]) -> text::LineIndent {
- let first_line = query_lines.first().map(|s| s.as_str()).unwrap_or("");
- text::LineIndent::from_iter(first_line.chars())
-}
-
impl EditSession {
async fn new(
- path_str: &str,
+ path: &PathBuf,
display_description: &str,
mode: StreamingEditFileMode,
tool: &StreamingEditFileTool,
event_stream: &ToolCallEventStream,
cx: &mut AsyncApp,
) -> Result {
- let path = PathBuf::from(path_str);
let project_path = cx
- .update(|cx| resolve_path(mode.clone(), &path, &tool.project, cx))
+ .update(|cx| resolve_path(mode, &path, &tool.project, cx))
.map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?;
let Some(abs_path) = cx.update(|cx| tool.project.read(cx).absolute_path(&project_path, cx))
else {
return Err(StreamingEditFileToolOutput::error(format!(
- "Worktree at '{path_str}' does not exist"
+ "Worktree at '{}' does not exist",
+ path.to_string_lossy()
)));
};
@@ -520,13 +531,8 @@ impl EditSession {
}
}) as Box);
- tool.thread
- .update(cx, |thread, cx| {
- thread
- .action_log()
- .update(cx, |log, cx| log.buffer_read(buffer.clone(), cx))
- })
- .ok();
+ tool.action_log
+ .update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
let old_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
let old_text = cx
@@ -555,69 +561,31 @@ impl EditSession {
event_stream: &ToolCallEventStream,
cx: &mut AsyncApp,
) -> Result {
- let Self {
- buffer,
- old_text,
- diff,
- abs_path,
- parser,
- pipeline,
- ..
- } = self;
-
- let action_log = tool
- .thread
- .read_with(cx, |thread, _cx| thread.action_log().clone())
- .map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?;
+ let old_text = self.old_text.clone();
match input.mode {
StreamingEditFileMode::Write => {
- action_log.update(cx, |log, cx| {
- log.buffer_created(buffer.clone(), cx);
- });
let content = input.content.ok_or_else(|| {
StreamingEditFileToolOutput::error("'content' field is required for write mode")
})?;
- let events = parser.finalize_content(&content);
- Self::process_events(
- &events,
- buffer,
- diff,
- pipeline,
- abs_path,
- tool,
- event_stream,
- cx,
- )?;
+ let events = self.parser.finalize_content(&content);
+ self.process_events(&events, tool, event_stream, cx)?;
+
+ tool.action_log.update(cx, |log, cx| {
+ log.buffer_created(self.buffer.clone(), cx);
+ });
}
StreamingEditFileMode::Edit => {
let edits = input.edits.ok_or_else(|| {
StreamingEditFileToolOutput::error("'edits' field is required for edit mode")
})?;
-
- let final_edits = edits
- .into_iter()
- .map(|e| Edit {
- old_text: e.old_text,
- new_text: e.new_text,
- })
- .collect::>();
- let events = parser.finalize_edits(&final_edits);
- Self::process_events(
- &events,
- buffer,
- diff,
- pipeline,
- abs_path,
- tool,
- event_stream,
- cx,
- )?;
+ let events = self.parser.finalize_edits(&edits);
+ self.process_events(&events, tool, event_stream, cx)?;
}
}
- let format_on_save_enabled = buffer.read_with(cx, |buffer, cx| {
+ let format_on_save_enabled = self.buffer.read_with(cx, |buffer, cx| {
let settings = language_settings::language_settings(
buffer.language().map(|l| l.name()),
buffer.file(),
@@ -627,13 +595,13 @@ impl EditSession {
});
if format_on_save_enabled {
- action_log.update(cx, |log, cx| {
- log.buffer_edited(buffer.clone(), cx);
+ tool.action_log.update(cx, |log, cx| {
+ log.buffer_edited(self.buffer.clone(), cx);
});
let format_task = tool.project.update(cx, |project, cx| {
project.format(
- HashSet::from_iter([buffer.clone()]),
+ HashSet::from_iter([self.buffer.clone()]),
LspFormatTarget::Buffers,
false,
FormatTrigger::Save,
@@ -648,9 +616,9 @@ impl EditSession {
};
}
- let save_task = tool
- .project
- .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
+ let save_task = tool.project.update(cx, |project, cx| {
+ project.save_buffer(self.buffer.clone(), cx)
+ });
futures::select! {
result = save_task.fuse() => { result.map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?; },
_ = event_stream.cancelled_by_user().fuse() => {
@@ -658,23 +626,11 @@ impl EditSession {
}
};
- action_log.update(cx, |log, cx| {
- log.buffer_edited(buffer.clone(), cx);
+ tool.action_log.update(cx, |log, cx| {
+ log.buffer_edited(self.buffer.clone(), cx);
});
- if let Some(new_mtime) = buffer.read_with(cx, |buffer, _| {
- buffer.file().and_then(|file| file.disk_state().mtime())
- }) {
- tool.thread
- .update(cx, |thread, _| {
- thread
- .file_read_times
- .insert(abs_path.to_path_buf(), new_mtime);
- })
- .map_err(|e| StreamingEditFileToolOutput::error(e.to_string()))?;
- }
-
- let new_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
+ let new_snapshot = self.buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
let (new_text, unified_diff) = cx
.background_spawn({
let new_snapshot = new_snapshot.clone();
@@ -688,7 +644,7 @@ impl EditSession {
.await;
let output = StreamingEditFileToolOutput::Success {
- input_path: PathBuf::from(input.path),
+ input_path: input.path,
new_text,
old_text: old_text.clone(),
diff: unified_diff,
@@ -707,31 +663,13 @@ impl EditSession {
StreamingEditFileMode::Write => {
if let Some(content) = &partial.content {
let events = self.parser.push_content(content);
- Self::process_events(
- &events,
- &self.buffer,
- &self.diff,
- &mut self.pipeline,
- &self.abs_path,
- tool,
- event_stream,
- cx,
- )?;
+ self.process_events(&events, tool, event_stream, cx)?;
}
}
StreamingEditFileMode::Edit => {
if let Some(edits) = partial.edits {
let events = self.parser.push_edits(&edits);
- Self::process_events(
- &events,
- &self.buffer,
- &self.diff,
- &mut self.pipeline,
- &self.abs_path,
- tool,
- event_stream,
- cx,
- )?;
+ self.process_events(&events, tool, event_stream, cx)?;
}
}
}
@@ -739,52 +677,43 @@ impl EditSession {
}
fn process_events(
+ &mut self,
events: &[ToolEditEvent],
- buffer: &Entity,
- diff: &Entity,
- pipeline: &mut EditPipeline,
- abs_path: &PathBuf,
tool: &StreamingEditFileTool,
event_stream: &ToolCallEventStream,
cx: &mut AsyncApp,
) -> Result<(), StreamingEditFileToolOutput> {
- let action_log = tool
- .thread
- .read_with(cx, |thread, _cx| thread.action_log().clone())
- .ok();
-
for event in events {
match event {
ToolEditEvent::ContentChunk { chunk } => {
let EditPipeline::Write {
- original_snapshot,
- content_written,
streaming_diff,
line_diff,
- } = pipeline
+ content_written,
+ original_snapshot,
+ } = &mut self.pipeline
else {
continue;
};
- let (buffer_id, insert_at) = buffer.read_with(cx, |buffer, _cx| {
- let insert_at = if !*content_written && buffer.len() > 0 {
- 0..buffer.len()
- } else {
- let len = buffer.len();
- len..len
- };
- (buffer.remote_id(), insert_at)
- });
+ let (buffer_id, buffer_len) = self
+ .buffer
+ .read_with(cx, |buffer, _cx| (buffer.remote_id(), buffer.len()));
+ let edit_range = if *content_written {
+ buffer_len..buffer_len
+ } else {
+ 0..buffer_len
+ };
- let char_ops = streaming_diff.push_new(chunk);
agent_edit_buffer(
- buffer,
- [(insert_at, chunk.as_str())],
- action_log.as_ref(),
+ &self.buffer,
+ [(edit_range, chunk.as_str())],
+ &tool.action_log,
cx,
);
+ let char_ops = streaming_diff.push_new(chunk);
line_diff.push_char_operations(&char_ops, original_snapshot.as_rope());
- diff.update(cx, |diff, cx| {
+ self.diff.update(cx, |diff, cx| {
diff.update_pending(
line_diff.line_operations(),
original_snapshot.clone(),
@@ -794,7 +723,7 @@ impl EditSession {
cx.update(|cx| {
tool.set_agent_location(
- buffer.downgrade(),
+ self.buffer.downgrade(),
text::Anchor::max_for_buffer(buffer_id),
cx,
);
@@ -803,27 +732,27 @@ impl EditSession {
}
ToolEditEvent::OldTextChunk {
- edit_index,
- chunk,
- done: false,
+ chunk, done: false, ..
} => {
- pipeline.ensure_resolving_old_text(*edit_index, buffer, cx);
+ self.pipeline.ensure_resolving_old_text(&self.buffer, cx);
+ let EditPipeline::Edit { current_edit } = &mut self.pipeline else {
+ continue;
+ };
- if let EditPipelineEntry::ResolvingOldText { matcher } =
- &mut pipeline.edits()[*edit_index]
+ if let Some(EditPipelineEntry::ResolvingOldText { matcher }) = current_edit
+ && !chunk.is_empty()
{
- if !chunk.is_empty() {
- if let Some(match_range) = matcher.push(chunk, None) {
- let anchor_range = buffer.read_with(cx, |buffer, _cx| {
- buffer.anchor_range_between(match_range.clone())
- });
- diff.update(cx, |diff, cx| diff.reveal_range(anchor_range, cx));
-
- cx.update(|cx| {
- let position = buffer.read(cx).anchor_before(match_range.end);
- tool.set_agent_location(buffer.downgrade(), position, cx);
- });
- }
+ if let Some(match_range) = matcher.push(chunk, None) {
+ let anchor_range = self.buffer.read_with(cx, |buffer, _cx| {
+ buffer.anchor_range_between(match_range.clone())
+ });
+ self.diff
+ .update(cx, |diff, cx| diff.reveal_range(anchor_range, cx));
+
+ cx.update(|cx| {
+ let position = self.buffer.read(cx).anchor_before(match_range.end);
+ tool.set_agent_location(self.buffer.downgrade(), position, cx);
+ });
}
}
}
@@ -833,100 +762,81 @@ impl EditSession {
chunk,
done: true,
} => {
- pipeline.ensure_resolving_old_text(*edit_index, buffer, cx);
+ self.pipeline.ensure_resolving_old_text(&self.buffer, cx);
+ let EditPipeline::Edit { current_edit } = &mut self.pipeline else {
+ continue;
+ };
- let EditPipelineEntry::ResolvingOldText { matcher } =
- &mut pipeline.edits()[*edit_index]
- else {
+ let Some(EditPipelineEntry::ResolvingOldText { matcher }) = current_edit else {
continue;
};
if !chunk.is_empty() {
matcher.push(chunk, None);
}
- let matches = matcher.finish();
-
- if matches.is_empty() {
- return Err(StreamingEditFileToolOutput::error(format!(
- "Could not find matching text for edit at index {}. \
- The old_text did not match any content in the file. \
- Please read the file again to get the current content.",
- edit_index,
- )));
- }
- if matches.len() > 1 {
- let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
- let lines = matches
- .iter()
- .map(|r| (snapshot.offset_to_point(r.start).row + 1).to_string())
- .collect::>()
- .join(", ");
- return Err(StreamingEditFileToolOutput::error(format!(
- "Edit {} matched multiple locations in the file at lines: {}. \
- Please provide more context in old_text to uniquely \
- identify the location.",
- edit_index, lines
- )));
- }
-
- let range = matches.into_iter().next().expect("checked len above");
+ let range = extract_match(matcher.finish(), &self.buffer, edit_index, cx)?;
- let anchor_range = buffer
+ let anchor_range = self
+ .buffer
.read_with(cx, |buffer, _cx| buffer.anchor_range_between(range.clone()));
- diff.update(cx, |diff, cx| diff.reveal_range(anchor_range, cx));
+ self.diff
+ .update(cx, |diff, cx| diff.reveal_range(anchor_range, cx));
- let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
+ let snapshot = self.buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
let line = snapshot.offset_to_point(range.start).row;
event_stream.update_fields(
- ToolCallUpdateFields::new()
- .locations(vec![ToolCallLocation::new(abs_path).line(Some(line))]),
+ ToolCallUpdateFields::new().locations(vec![
+ ToolCallLocation::new(&self.abs_path).line(Some(line)),
+ ]),
);
- let EditPipelineEntry::ResolvingOldText { matcher } =
- &pipeline.edits()[*edit_index]
- else {
- continue;
- };
- let buffer_indent =
- snapshot.line_indent_for_row(snapshot.offset_to_point(range.start).row);
- let query_indent = query_first_line_indent(matcher.query_lines());
+ let buffer_indent = snapshot.line_indent_for_row(line);
+ let query_indent = text::LineIndent::from_iter(
+ matcher
+ .query_lines()
+ .first()
+ .map(|s| s.as_str())
+ .unwrap_or("")
+ .chars(),
+ );
let indent_delta = compute_indent_delta(buffer_indent, query_indent);
let old_text_in_buffer =
snapshot.text_for_range(range.clone()).collect::();
- let text_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.text_snapshot());
- pipeline.edits()[*edit_index] = EditPipelineEntry::StreamingNewText {
+ let text_snapshot = self
+ .buffer
+ .read_with(cx, |buffer, _cx| buffer.text_snapshot());
+ *current_edit = Some(EditPipelineEntry::StreamingNewText {
streaming_diff: StreamingDiff::new(old_text_in_buffer),
line_diff: LineDiff::default(),
edit_cursor: range.start,
reindenter: Reindenter::new(indent_delta),
original_snapshot: text_snapshot,
- };
+ });
cx.update(|cx| {
- let position = buffer.read(cx).anchor_before(range.end);
- tool.set_agent_location(buffer.downgrade(), position, cx);
+ let position = self.buffer.read(cx).anchor_before(range.end);
+ tool.set_agent_location(self.buffer.downgrade(), position, cx);
});
}
ToolEditEvent::NewTextChunk {
- edit_index,
- chunk,
- done: false,
+ chunk, done: false, ..
} => {
- if *edit_index >= pipeline.edits().len() {
+ let EditPipeline::Edit { current_edit } = &mut self.pipeline else {
continue;
- }
- let EditPipelineEntry::StreamingNewText {
+ };
+
+ let Some(EditPipelineEntry::StreamingNewText {
streaming_diff,
line_diff,
edit_cursor,
reindenter,
original_snapshot,
..
- } = &mut pipeline.edits()[*edit_index]
+ }) = current_edit
else {
continue;
};
@@ -937,16 +847,16 @@ impl EditSession {
}
let char_ops = streaming_diff.push_new(&reindented);
- Self::apply_char_operations(
+ apply_char_operations(
&char_ops,
- buffer,
+ &self.buffer,
original_snapshot,
edit_cursor,
- action_log.as_ref(),
+ &tool.action_log,
cx,
);
line_diff.push_char_operations(&char_ops, original_snapshot.as_rope());
- diff.update(cx, |diff, cx| {
+ self.diff.update(cx, |diff, cx| {
diff.update_pending(
line_diff.line_operations(),
original_snapshot.clone(),
@@ -956,29 +866,23 @@ impl EditSession {
let position = original_snapshot.anchor_before(*edit_cursor);
cx.update(|cx| {
- tool.set_agent_location(buffer.downgrade(), position, cx);
+ tool.set_agent_location(self.buffer.downgrade(), position, cx);
});
}
ToolEditEvent::NewTextChunk {
- edit_index,
- chunk,
- done: true,
+ chunk, done: true, ..
} => {
- if *edit_index >= pipeline.edits().len() {
+ let EditPipeline::Edit { current_edit } = &mut self.pipeline else {
continue;
- }
-
- let EditPipelineEntry::StreamingNewText {
+ };
+ let Some(EditPipelineEntry::StreamingNewText {
mut streaming_diff,
mut line_diff,
mut edit_cursor,
mut reindenter,
original_snapshot,
- } = std::mem::replace(
- &mut pipeline.edits()[*edit_index],
- EditPipelineEntry::Done,
- )
+ }) = current_edit.take()
else {
continue;
};
@@ -989,16 +893,16 @@ impl EditSession {
if !final_text.is_empty() {
let char_ops = streaming_diff.push_new(&final_text);
- Self::apply_char_operations(
+ apply_char_operations(
&char_ops,
- buffer,
+ &self.buffer,
&original_snapshot,
&mut edit_cursor,
- action_log.as_ref(),
+ &tool.action_log,
cx,
);
line_diff.push_char_operations(&char_ops, original_snapshot.as_rope());
- diff.update(cx, |diff, cx| {
+ self.diff.update(cx, |diff, cx| {
diff.update_pending(
line_diff.line_operations(),
original_snapshot.clone(),
@@ -1008,17 +912,17 @@ impl EditSession {
}
let remaining_ops = streaming_diff.finish();
- Self::apply_char_operations(
+ apply_char_operations(
&remaining_ops,
- buffer,
+ &self.buffer,
&original_snapshot,
&mut edit_cursor,
- action_log.as_ref(),
+ &tool.action_log,
cx,
);
line_diff.push_char_operations(&remaining_ops, original_snapshot.as_rope());
line_diff.finish(original_snapshot.as_rope());
- diff.update(cx, |diff, cx| {
+ self.diff.update(cx, |diff, cx| {
diff.update_pending(
line_diff.line_operations(),
original_snapshot.clone(),
@@ -1028,42 +932,73 @@ impl EditSession {
let position = original_snapshot.anchor_before(edit_cursor);
cx.update(|cx| {
- tool.set_agent_location(buffer.downgrade(), position, cx);
+ tool.set_agent_location(self.buffer.downgrade(), position, cx);
});
}
}
}
Ok(())
}
+}
- fn apply_char_operations(
- ops: &[CharOperation],
- buffer: &Entity,
- snapshot: &text::BufferSnapshot,
- edit_cursor: &mut usize,
- action_log: Option<&Entity>,
- cx: &mut AsyncApp,
- ) {
- for op in ops {
- match op {
- CharOperation::Insert { text } => {
- let anchor = snapshot.anchor_after(*edit_cursor);
- agent_edit_buffer(&buffer, [(anchor..anchor, text.as_str())], action_log, cx);
- }
- CharOperation::Delete { bytes } => {
- let delete_end = *edit_cursor + bytes;
- let anchor_range = snapshot.anchor_range_around(*edit_cursor..delete_end);
- agent_edit_buffer(&buffer, [(anchor_range, "")], action_log, cx);
- *edit_cursor = delete_end;
- }
- CharOperation::Keep { bytes } => {
- *edit_cursor += bytes;
- }
+fn apply_char_operations(
+ ops: &[CharOperation],
+ buffer: &Entity,
+ snapshot: &text::BufferSnapshot,
+ edit_cursor: &mut usize,
+ action_log: &Entity,
+ cx: &mut AsyncApp,
+) {
+ for op in ops {
+ match op {
+ CharOperation::Insert { text } => {
+ let anchor = snapshot.anchor_after(*edit_cursor);
+ agent_edit_buffer(&buffer, [(anchor..anchor, text.as_str())], action_log, cx);
+ }
+ CharOperation::Delete { bytes } => {
+ let delete_end = *edit_cursor + bytes;
+ let anchor_range = snapshot.anchor_range_around(*edit_cursor..delete_end);
+ agent_edit_buffer(&buffer, [(anchor_range, "")], action_log, cx);
+ *edit_cursor = delete_end;
+ }
+ CharOperation::Keep { bytes } => {
+ *edit_cursor += bytes;
}
}
}
}
+fn extract_match(
+ matches: Vec>,
+ buffer: &Entity,
+ edit_index: &usize,
+ cx: &mut AsyncApp,
+) -> Result, StreamingEditFileToolOutput> {
+ match matches.len() {
+ 0 => Err(StreamingEditFileToolOutput::error(format!(
+ "Could not find matching text for edit at index {}. \
+ The old_text did not match any content in the file. \
+ Please read the file again to get the current content.",
+ edit_index,
+ ))),
+ 1 => Ok(matches.into_iter().next().unwrap()),
+ _ => {
+ let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot());
+ let lines = matches
+ .iter()
+ .map(|r| (snapshot.offset_to_point(r.start).row + 1).to_string())
+ .collect::>()
+ .join(", ");
+ Err(StreamingEditFileToolOutput::error(format!(
+ "Edit {} matched multiple locations in the file at lines: {}. \
+ Please provide more context in old_text to uniquely \
+ identify the location.",
+ edit_index, lines
+ )))
+ }
+ }
+}
+
/// Edits a buffer and reports the edit to the action log in the same effect
/// cycle. This ensures the action log's subscription handler sees the version
/// already updated by `buffer_edited`, so it does not misattribute the agent's
@@ -1071,7 +1006,7 @@ impl EditSession {
fn agent_edit_buffer(
buffer: &Entity,
edits: I,
- action_log: Option<&Entity>,
+ action_log: &Entity,
cx: &mut AsyncApp,
) where
I: IntoIterator- , T)>,
@@ -1082,9 +1017,7 @@ fn agent_edit_buffer(
buffer.update(cx, |buffer, cx| {
buffer.edit(edits, None, cx);
});
- if let Some(action_log) = action_log {
- action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
- }
+ action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
});
}
@@ -1094,8 +1027,10 @@ fn ensure_buffer_saved(
tool: &StreamingEditFileTool,
cx: &mut AsyncApp,
) -> Result<(), StreamingEditFileToolOutput> {
- let check_result = tool.thread.update(cx, |thread, cx| {
- let last_read = thread.file_read_times.get(abs_path).copied();
+ let last_read_mtime = tool
+ .action_log
+ .read_with(cx, |log, _| log.file_read_time(abs_path));
+ let check_result = tool.thread.read_with(cx, |thread, cx| {
let current = buffer
.read(cx)
.file()
@@ -1103,12 +1038,10 @@ fn ensure_buffer_saved(
let dirty = buffer.read(cx).is_dirty();
let has_save = thread.has_tool(SaveFileTool::NAME);
let has_restore = thread.has_tool(RestoreFileFromDiskTool::NAME);
- (last_read, current, dirty, has_save, has_restore)
+ (current, dirty, has_save, has_restore)
});
- let Ok((last_read_mtime, current_mtime, is_dirty, has_save_tool, has_restore_tool)) =
- check_result
- else {
+ let Ok((current_mtime, is_dirty, has_save_tool, has_restore_tool)) = check_result else {
return Ok(());
};
@@ -1225,42 +1158,17 @@ mod tests {
#[gpui::test]
async fn test_streaming_edit_create_file(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree("/root", json!({"dir": {}})).await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
-
+ let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"dir": {}})).await;
let result = cx
.update(|cx| {
- let input = StreamingEditFileToolInput {
- display_description: "Create new file".into(),
- path: "root/dir/new_file.txt".into(),
- mode: StreamingEditFileMode::Write,
- content: Some("Hello, World!".into()),
- edits: None,
- };
- Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ))
- .run(
- ToolInput::resolved(input),
+ tool.clone().run(
+ ToolInput::resolved(StreamingEditFileToolInput {
+ display_description: "Create new file".into(),
+ path: "root/dir/new_file.txt".into(),
+ mode: StreamingEditFileMode::Write,
+ content: Some("Hello, World!".into()),
+ edits: None,
+ }),
ToolCallEventStream::test().0,
cx,
)
@@ -1276,43 +1184,18 @@ mod tests {
#[gpui::test]
async fn test_streaming_edit_overwrite_file(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree("/root", json!({"file.txt": "old content"}))
- .await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
-
+ let (tool, _project, _action_log, _fs, _thread) =
+ setup_test(cx, json!({"file.txt": "old content"})).await;
let result = cx
.update(|cx| {
- let input = StreamingEditFileToolInput {
- display_description: "Overwrite file".into(),
- path: "root/file.txt".into(),
- mode: StreamingEditFileMode::Write,
- content: Some("new content".into()),
- edits: None,
- };
- Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ))
- .run(
- ToolInput::resolved(input),
+ tool.clone().run(
+ ToolInput::resolved(StreamingEditFileToolInput {
+ display_description: "Overwrite file".into(),
+ path: "root/file.txt".into(),
+ mode: StreamingEditFileMode::Write,
+ content: Some("new content".into()),
+ edits: None,
+ }),
ToolCallEventStream::test().0,
cx,
)
@@ -1331,51 +1214,21 @@ mod tests {
#[gpui::test]
async fn test_streaming_edit_granular_edits(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "file.txt": "line 1\nline 2\nline 3\n"
- }),
- )
- .await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
-
+ let (tool, _project, _action_log, _fs, _thread) =
+ setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await;
let result = cx
.update(|cx| {
- let input = StreamingEditFileToolInput {
- display_description: "Edit lines".into(),
- path: "root/file.txt".into(),
- mode: StreamingEditFileMode::Edit,
- content: None,
- edits: Some(vec![Edit {
- old_text: "line 2".into(),
- new_text: "modified line 2".into(),
- }]),
- };
- Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ))
- .run(
- ToolInput::resolved(input),
+ tool.clone().run(
+ ToolInput::resolved(StreamingEditFileToolInput {
+ display_description: "Edit lines".into(),
+ path: "root/file.txt".into(),
+ mode: StreamingEditFileMode::Edit,
+ content: None,
+ edits: Some(vec![Edit {
+ old_text: "line 2".into(),
+ new_text: "modified line 2".into(),
+ }]),
+ }),
ToolCallEventStream::test().0,
cx,
)
@@ -1390,57 +1243,30 @@ mod tests {
#[gpui::test]
async fn test_streaming_edit_multiple_edits(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"
- }),
+ let (tool, _project, _action_log, _fs, _thread) = setup_test(
+ cx,
+ json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}),
)
.await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
-
let result = cx
.update(|cx| {
- let input = StreamingEditFileToolInput {
- display_description: "Edit multiple lines".into(),
- path: "root/file.txt".into(),
- mode: StreamingEditFileMode::Edit,
- content: None,
- edits: Some(vec![
- Edit {
- old_text: "line 5".into(),
- new_text: "modified line 5".into(),
- },
- Edit {
- old_text: "line 1".into(),
- new_text: "modified line 1".into(),
- },
- ]),
- };
- Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ))
- .run(
- ToolInput::resolved(input),
+ tool.clone().run(
+ ToolInput::resolved(StreamingEditFileToolInput {
+ display_description: "Edit multiple lines".into(),
+ path: "root/file.txt".into(),
+ mode: StreamingEditFileMode::Edit,
+ content: None,
+ edits: Some(vec![
+ Edit {
+ old_text: "line 5".into(),
+ new_text: "modified line 5".into(),
+ },
+ Edit {
+ old_text: "line 1".into(),
+ new_text: "modified line 1".into(),
+ },
+ ]),
+ }),
ToolCallEventStream::test().0,
cx,
)
@@ -1458,57 +1284,30 @@ mod tests {
#[gpui::test]
async fn test_streaming_edit_adjacent_edits(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"
- }),
+ let (tool, _project, _action_log, _fs, _thread) = setup_test(
+ cx,
+ json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}),
)
.await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
-
let result = cx
.update(|cx| {
- let input = StreamingEditFileToolInput {
- display_description: "Edit adjacent lines".into(),
- path: "root/file.txt".into(),
- mode: StreamingEditFileMode::Edit,
- content: None,
- edits: Some(vec![
- Edit {
- old_text: "line 2".into(),
- new_text: "modified line 2".into(),
- },
- Edit {
- old_text: "line 3".into(),
- new_text: "modified line 3".into(),
- },
- ]),
- };
- Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ))
- .run(
- ToolInput::resolved(input),
+ tool.clone().run(
+ ToolInput::resolved(StreamingEditFileToolInput {
+ display_description: "Edit adjacent lines".into(),
+ path: "root/file.txt".into(),
+ mode: StreamingEditFileMode::Edit,
+ content: None,
+ edits: Some(vec![
+ Edit {
+ old_text: "line 2".into(),
+ new_text: "modified line 2".into(),
+ },
+ Edit {
+ old_text: "line 3".into(),
+ new_text: "modified line 3".into(),
+ },
+ ]),
+ }),
ToolCallEventStream::test().0,
cx,
)
@@ -1526,57 +1325,30 @@ mod tests {
#[gpui::test]
async fn test_streaming_edit_ascending_order_edits(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"
- }),
+ let (tool, _project, _action_log, _fs, _thread) = setup_test(
+ cx,
+ json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}),
)
.await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
-
let result = cx
.update(|cx| {
- let input = StreamingEditFileToolInput {
- display_description: "Edit multiple lines in ascending order".into(),
- path: "root/file.txt".into(),
- mode: StreamingEditFileMode::Edit,
- content: None,
- edits: Some(vec![
- Edit {
- old_text: "line 1".into(),
- new_text: "modified line 1".into(),
- },
- Edit {
- old_text: "line 5".into(),
- new_text: "modified line 5".into(),
- },
- ]),
- };
- Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ))
- .run(
- ToolInput::resolved(input),
+ tool.clone().run(
+ ToolInput::resolved(StreamingEditFileToolInput {
+ display_description: "Edit multiple lines in ascending order".into(),
+ path: "root/file.txt".into(),
+ mode: StreamingEditFileMode::Edit,
+ content: None,
+ edits: Some(vec![
+ Edit {
+ old_text: "line 1".into(),
+ new_text: "modified line 1".into(),
+ },
+ Edit {
+ old_text: "line 5".into(),
+ new_text: "modified line 5".into(),
+ },
+ ]),
+ }),
ToolCallEventStream::test().0,
cx,
)
@@ -1594,45 +1366,20 @@ mod tests {
#[gpui::test]
async fn test_streaming_edit_nonexistent_file(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree("/root", json!({})).await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
-
+ let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({})).await;
let result = cx
.update(|cx| {
- let input = StreamingEditFileToolInput {
- display_description: "Some edit".into(),
- path: "root/nonexistent_file.txt".into(),
- mode: StreamingEditFileMode::Edit,
- content: None,
- edits: Some(vec![Edit {
- old_text: "foo".into(),
- new_text: "bar".into(),
- }]),
- };
- Arc::new(StreamingEditFileTool::new(
- project,
- thread.downgrade(),
- language_registry,
- ))
- .run(
- ToolInput::resolved(input),
+ tool.clone().run(
+ ToolInput::resolved(StreamingEditFileToolInput {
+ display_description: "Some edit".into(),
+ path: "root/nonexistent_file.txt".into(),
+ mode: StreamingEditFileMode::Edit,
+ content: None,
+ edits: Some(vec![Edit {
+ old_text: "foo".into(),
+ new_text: "bar".into(),
+ }]),
+ }),
ToolCallEventStream::test().0,
cx,
)
@@ -1647,46 +1394,21 @@ mod tests {
#[gpui::test]
async fn test_streaming_edit_failed_match(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree("/root", json!({"file.txt": "hello world"}))
- .await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
-
+ let (tool, _project, _action_log, _fs, _thread) =
+ setup_test(cx, json!({"file.txt": "hello world"})).await;
let result = cx
.update(|cx| {
- let input = StreamingEditFileToolInput {
- display_description: "Edit file".into(),
- path: "root/file.txt".into(),
- mode: StreamingEditFileMode::Edit,
- content: None,
- edits: Some(vec![Edit {
- old_text: "nonexistent text that is not in the file".into(),
- new_text: "replacement".into(),
- }]),
- };
- Arc::new(StreamingEditFileTool::new(
- project,
- thread.downgrade(),
- language_registry,
- ))
- .run(
- ToolInput::resolved(input),
+ tool.clone().run(
+ ToolInput::resolved(StreamingEditFileToolInput {
+ display_description: "Edit file".into(),
+ path: "root/file.txt".into(),
+ mode: StreamingEditFileMode::Edit,
+ content: None,
+ edits: Some(vec![Edit {
+ old_text: "nonexistent text that is not in the file".into(),
+ new_text: "replacement".into(),
+ }]),
+ }),
ToolCallEventStream::test().0,
cx,
)
@@ -1704,42 +1426,11 @@ mod tests {
#[gpui::test]
async fn test_streaming_early_buffer_open(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "file.txt": "line 1\nline 2\nline 3\n"
- }),
- )
- .await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
-
+ let (tool, _project, _action_log, _fs, _thread) =
+ setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await;
let (sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
-
- let tool = Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ));
-
- let task = cx.update(|cx| tool.run(input, event_stream, cx));
+ let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
// Send partials simulating LLM streaming: description first, then path, then mode
sender.send_partial(json!({"display_description": "Edit lines"}));
@@ -1776,42 +1467,11 @@ mod tests {
#[gpui::test]
async fn test_streaming_path_completeness_heuristic(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "file.txt": "hello world"
- }),
- )
- .await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
-
+ let (tool, _project, _action_log, _fs, _thread) =
+ setup_test(cx, json!({"file.txt": "hello world"})).await;
let (sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
-
- let tool = Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ));
-
- let task = cx.update(|cx| tool.run(input, event_stream, cx));
+ let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
// Send partial with path but NO mode — path should NOT be treated as complete
sender.send_partial(json!({
@@ -1845,43 +1505,12 @@ mod tests {
#[gpui::test]
async fn test_streaming_cancellation_during_partials(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "file.txt": "hello world"
- }),
- )
- .await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
-
+ let (tool, _project, _action_log, _fs, _thread) =
+ setup_test(cx, json!({"file.txt": "hello world"})).await;
let (sender, input) = ToolInput::::test();
let (event_stream, _receiver, mut cancellation_tx) =
ToolCallEventStream::test_with_cancellation();
-
- let tool = Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ));
-
- let task = cx.update(|cx| tool.run(input, event_stream, cx));
+ let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
// Send a partial
sender.send_partial(json!({"display_description": "Edit"}));
@@ -1907,42 +1536,14 @@ mod tests {
#[gpui::test]
async fn test_streaming_edit_with_multiple_partials(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"
- }),
+ let (tool, _project, _action_log, _fs, _thread) = setup_test(
+ cx,
+ json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}),
)
.await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
-
let (sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
-
- let tool = Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ));
-
- let task = cx.update(|cx| tool.run(input, event_stream, cx));
+ let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
// Simulate fine-grained streaming of the JSON
sender.send_partial(json!({"display_description": "Edit multiple"}));
@@ -2003,36 +1604,10 @@ mod tests {
#[gpui::test]
async fn test_streaming_create_file_with_partials(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree("/root", json!({"dir": {}})).await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
-
+ let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"dir": {}})).await;
let (sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
-
- let tool = Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ));
-
- let task = cx.update(|cx| tool.run(input, event_stream, cx));
+ let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
// Stream partials for create mode
sender.send_partial(json!({"display_description": "Create new file"}));
@@ -2070,42 +1645,11 @@ mod tests {
#[gpui::test]
async fn test_streaming_no_partials_direct_final(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "file.txt": "line 1\nline 2\nline 3\n"
- }),
- )
- .await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
-
+ let (tool, _project, _action_log, _fs, _thread) =
+ setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await;
let (sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
-
- let tool = Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ));
-
- let task = cx.update(|cx| tool.run(input, event_stream, cx));
+ let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
// Send final immediately with no partials (simulates non-streaming path)
sender.send_final(json!({
@@ -2124,42 +1668,14 @@ mod tests {
#[gpui::test]
async fn test_streaming_incremental_edit_application(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"
- }),
+ let (tool, project, _action_log, _fs, _thread) = setup_test(
+ cx,
+ json!({"file.txt": "line 1\nline 2\nline 3\nline 4\nline 5\n"}),
)
.await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
-
let (sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
-
- let tool = Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ));
-
- let task = cx.update(|cx| tool.run(input, event_stream, cx));
+ let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
// Stream description, path, mode
sender.send_partial(json!({"display_description": "Edit multiple lines"}));
@@ -2253,42 +1769,11 @@ mod tests {
#[gpui::test]
async fn test_streaming_incremental_three_edits(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "file.txt": "aaa\nbbb\nccc\nddd\neee\n"
- }),
- )
- .await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
-
+ let (tool, project, _action_log, _fs, _thread) =
+ setup_test(cx, json!({"file.txt": "aaa\nbbb\nccc\nddd\neee\n"})).await;
let (sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
-
- let tool = Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ));
-
- let task = cx.update(|cx| tool.run(input, event_stream, cx));
+ let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
// Setup: description + path + mode
sender.send_partial(json!({
@@ -2373,43 +1858,12 @@ mod tests {
}
#[gpui::test]
- async fn test_streaming_edit_failure_mid_stream(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "file.txt": "line 1\nline 2\nline 3\n"
- }),
- )
- .await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
-
+ async fn test_streaming_edit_failure_mid_stream(cx: &mut TestAppContext) {
+ let (tool, project, _action_log, _fs, _thread) =
+ setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await;
let (sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
-
- let tool = Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ));
-
- let task = cx.update(|cx| tool.run(input, event_stream, cx));
+ let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
// Setup
sender.send_partial(json!({
@@ -2486,42 +1940,11 @@ mod tests {
#[gpui::test]
async fn test_streaming_single_edit_no_incremental(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "file.txt": "hello world\n"
- }),
- )
- .await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
-
+ let (tool, project, _action_log, _fs, _thread) =
+ setup_test(cx, json!({"file.txt": "hello world\n"})).await;
let (sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
-
- let tool = Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ));
-
- let task = cx.update(|cx| tool.run(input, event_stream, cx));
+ let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
// Setup + single edit that stays in-progress (no second edit to prove completion)
sender.send_partial(json!({
@@ -2565,44 +1988,12 @@ mod tests {
#[gpui::test]
async fn test_streaming_input_partials_then_final(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "file.txt": "line 1\nline 2\nline 3\n"
- }),
- )
- .await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
-
+ let (tool, _project, _action_log, _fs, _thread) =
+ setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await;
let (sender, input): (ToolInputSender, ToolInput) =
ToolInput::test();
-
let (event_stream, _event_rx) = ToolCallEventStream::test();
- let task = cx.update(|cx| {
- Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ))
- .run(input, event_stream, cx)
- });
+ let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
// Send progressively more complete partial snapshots, as the LLM would
sender.send_partial(json!({
@@ -2642,44 +2033,12 @@ mod tests {
#[gpui::test]
async fn test_streaming_input_sender_dropped_before_final(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "file.txt": "hello world\n"
- }),
- )
- .await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
-
+ let (tool, _project, _action_log, _fs, _thread) =
+ setup_test(cx, json!({"file.txt": "hello world\n"})).await;
let (sender, input): (ToolInputSender, ToolInput) =
ToolInput::test();
-
let (event_stream, _event_rx) = ToolCallEventStream::test();
- let task = cx.update(|cx| {
- Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ))
- .run(input, event_stream, cx)
- });
+ let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
// Send a partial then drop the sender without sending final
sender.send_partial(json!({
@@ -2698,41 +2057,14 @@ mod tests {
#[gpui::test]
async fn test_streaming_input_recv_drains_partials(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree("/root", json!({"dir": {}})).await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
-
+ let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({"dir": {}})).await;
// Create a channel and send multiple partials before a final, then use
// ToolInput::resolved-style immediate delivery to confirm recv() works
// when partials are already buffered.
let (sender, input): (ToolInputSender, ToolInput) =
ToolInput::test();
-
let (event_stream, _event_rx) = ToolCallEventStream::test();
- let task = cx.update(|cx| {
- Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ))
- .run(input, event_stream, cx)
- });
+ let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
// Buffer several partials before sending the final
sender.send_partial(json!({"display_description": "Create"}));
@@ -2831,7 +2163,7 @@ mod tests {
.await;
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- cx.update(|cx| resolve_path(mode.clone(), &PathBuf::from(path), &project, cx))
+ cx.update(|cx| resolve_path(*mode, &PathBuf::from(path), &project, cx))
}
#[track_caller]
@@ -2846,8 +2178,8 @@ mod tests {
let fs = project::FakeFs::new(cx.executor());
fs.insert_tree("/root", json!({"src": {}})).await;
-
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
+ let (tool, project, action_log, fs, thread) =
+ setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await;
let rust_language = Arc::new(language::Language::new(
language::LanguageConfig {
@@ -2896,9 +2228,10 @@ mod tests {
project.register_buffer_with_language_servers(&buffer, cx)
});
- const UNFORMATTED_CONTENT: &str = "fn main() {println!(\"Hello!\");}\n";
- const FORMATTED_CONTENT: &str =
- "This file was formatted by the fake formatter in the test.\n";
+ const UNFORMATTED_CONTENT: &str = "fn main() {println!(\"Hello!\");}\
+";
+ const FORMATTED_CONTENT: &str = "This file was formatted by the fake formatter in the test.\
+";
// Get the fake language server and set up formatting handler
let fake_language_server = fake_language_servers.next().await.unwrap();
@@ -2911,20 +2244,6 @@ mod tests {
}
});
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model.clone()),
- cx,
- )
- });
-
// Test with format_on_save enabled
cx.update(|cx| {
SettingsStore::update_global(cx, |store, cx| {
@@ -2940,13 +2259,7 @@ mod tests {
let (sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
- let tool = Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry.clone(),
- ));
-
- let task = cx.update(|cx| tool.run(input, event_stream, cx));
+ let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
sender.send_partial(json!({
"display_description": "Create main function",
@@ -2997,13 +2310,14 @@ mod tests {
let (sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
- let tool = Arc::new(StreamingEditFileTool::new(
+ let tool2 = Arc::new(StreamingEditFileTool::new(
project.clone(),
thread.downgrade(),
+ action_log.clone(),
language_registry,
));
- let task = cx.update(|cx| tool.run(input, event_stream, cx));
+ let task = cx.update(|cx| tool2.run(input, event_stream, cx));
sender.send_partial(json!({
"display_description": "Update main function",
@@ -3038,7 +2352,6 @@ mod tests {
let fs = project::FakeFs::new(cx.executor());
fs.insert_tree("/root", json!({"src": {}})).await;
-
fs.save(
path!("/root/src/main.rs").as_ref(),
&"initial content".into(),
@@ -3046,22 +2359,9 @@ mod tests {
)
.await
.unwrap();
-
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model.clone()),
- cx,
- )
- });
+ let (tool, project, action_log, fs, thread) =
+ setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await;
+ let language_registry = project.read_with(cx, |p, _cx| p.languages().clone());
// Test with remove_trailing_whitespace_on_save enabled
cx.update(|cx| {
@@ -3081,20 +2381,14 @@ mod tests {
let result = cx
.update(|cx| {
- let input = StreamingEditFileToolInput {
- display_description: "Create main function".into(),
- path: "root/src/main.rs".into(),
- mode: StreamingEditFileMode::Write,
- content: Some(CONTENT_WITH_TRAILING_WHITESPACE.into()),
- edits: None,
- };
- Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry.clone(),
- ))
- .run(
- ToolInput::resolved(input),
+ tool.clone().run(
+ ToolInput::resolved(StreamingEditFileToolInput {
+ display_description: "Create main function".into(),
+ path: "root/src/main.rs".into(),
+ mode: StreamingEditFileMode::Write,
+ content: Some(CONTENT_WITH_TRAILING_WHITESPACE.into()),
+ edits: None,
+ }),
ToolCallEventStream::test().0,
cx,
)
@@ -3126,22 +2420,23 @@ mod tests {
});
});
+ let tool2 = Arc::new(StreamingEditFileTool::new(
+ project.clone(),
+ thread.downgrade(),
+ action_log.clone(),
+ language_registry,
+ ));
+
let result = cx
.update(|cx| {
- let input = StreamingEditFileToolInput {
- display_description: "Update main function".into(),
- path: "root/src/main.rs".into(),
- mode: StreamingEditFileMode::Write,
- content: Some(CONTENT_WITH_TRAILING_WHITESPACE.into()),
- edits: None,
- };
- Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ))
- .run(
- ToolInput::resolved(input),
+ tool2.run(
+ ToolInput::resolved(StreamingEditFileToolInput {
+ display_description: "Update main function".into(),
+ path: "root/src/main.rs".into(),
+ mode: StreamingEditFileMode::Write,
+ content: Some(CONTENT_WITH_TRAILING_WHITESPACE.into()),
+ edits: None,
+ }),
ToolCallEventStream::test().0,
cx,
)
@@ -3161,29 +2456,7 @@ mod tests {
#[gpui::test]
async fn test_streaming_authorize(cx: &mut TestAppContext) {
- init_test(cx);
- let fs = project::FakeFs::new(cx.executor());
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model.clone()),
- cx,
- )
- });
- let tool = Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ));
- fs.insert_tree("/root", json!({})).await;
+ let (tool, _project, _action_log, _fs, _thread) = setup_test(cx, json!({})).await;
// Test 1: Path with .zed component should require confirmation
let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
@@ -3304,27 +2577,8 @@ mod tests {
fs.insert_tree("/outside", json!({})).await;
fs.insert_symlink("/root/link", PathBuf::from("/outside"))
.await;
-
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
- let tool = Arc::new(StreamingEditFileTool::new(
- project,
- thread.downgrade(),
- language_registry,
- ));
+ let (tool, _project, _action_log, _fs, _thread) =
+ setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await;
cx.update(|cx| {
let mut settings = agent_settings::AgentSettings::get_global(cx).clone();
@@ -3378,38 +2632,17 @@ mod tests {
path!("/outside"),
json!({
"config.txt": "old content"
- }),
- )
- .await;
- fs.create_symlink(
- path!("/root/link_to_external").as_ref(),
- PathBuf::from("/outside"),
- )
- .await
- .unwrap();
-
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- cx.executor().run_until_parked();
-
- let language_registry = project.read_with(cx, |project, _| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
- let tool = Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ));
+ }),
+ )
+ .await;
+ fs.create_symlink(
+ path!("/root/link_to_external").as_ref(),
+ PathBuf::from("/outside"),
+ )
+ .await
+ .unwrap();
+ let (tool, _project, _action_log, _fs, _thread) =
+ setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await;
let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
let _authorize_task = cx.update(|cx| {
@@ -3454,29 +2687,8 @@ mod tests {
)
.await
.unwrap();
-
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- cx.executor().run_until_parked();
-
- let language_registry = project.read_with(cx, |project, _| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
- let tool = Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ));
+ let (tool, _project, _action_log, _fs, _thread) =
+ setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await;
let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
let authorize_task = cx.update(|cx| {
@@ -3531,29 +2743,8 @@ mod tests {
)
.await
.unwrap();
-
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- cx.executor().run_until_parked();
-
- let language_registry = project.read_with(cx, |project, _| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
- let tool = Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ));
+ let (tool, _project, _action_log, _fs, _thread) =
+ setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await;
let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
let result = cx
@@ -3582,26 +2773,8 @@ mod tests {
init_test(cx);
let fs = project::FakeFs::new(cx.executor());
fs.insert_tree("/project", json!({})).await;
- let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model.clone()),
- cx,
- )
- });
- let tool = Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ));
+ let (tool, _project, _action_log, _fs, _thread) =
+ setup_test_with_fs(cx, fs, &[path!("/project").as_ref()]).await;
let test_cases = vec![
(
@@ -3644,7 +2817,6 @@ mod tests {
async fn test_streaming_needs_confirmation_with_multiple_worktrees(cx: &mut TestAppContext) {
init_test(cx);
let fs = project::FakeFs::new(cx.executor());
-
fs.insert_tree(
"/workspace/frontend",
json!({
@@ -3672,36 +2844,16 @@ mod tests {
}),
)
.await;
-
- let project = Project::test(
- fs.clone(),
- [
+ let (tool, _project, _action_log, _fs, _thread) = setup_test_with_fs(
+ cx,
+ fs,
+ &[
path!("/workspace/frontend").as_ref(),
path!("/workspace/backend").as_ref(),
path!("/workspace/shared").as_ref(),
],
- cx,
)
.await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry.clone(),
- Templates::new(),
- Some(model.clone()),
- cx,
- )
- });
- let tool = Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ));
let test_cases = vec![
("frontend/src/main.js", false, "File in first worktree"),
@@ -3756,26 +2908,8 @@ mod tests {
}),
)
.await;
- let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry.clone(),
- Templates::new(),
- Some(model.clone()),
- cx,
- )
- });
- let tool = Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ));
+ let (tool, _project, _action_log, _fs, _thread) =
+ setup_test_with_fs(cx, fs, &[path!("/project").as_ref()]).await;
let test_cases = vec![
("", false, "Empty path is treated as project root"),
@@ -3831,26 +2965,8 @@ mod tests {
}),
)
.await;
- let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry.clone(),
- Templates::new(),
- Some(model.clone()),
- cx,
- )
- });
- let tool = Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ));
+ let (tool, _project, _action_log, _fs, _thread) =
+ setup_test_with_fs(cx, fs, &[path!("/project").as_ref()]).await;
let modes = vec![StreamingEditFileMode::Edit, StreamingEditFileMode::Write];
@@ -3901,26 +3017,9 @@ mod tests {
async fn test_streaming_initial_title_with_partial_input(cx: &mut TestAppContext) {
init_test(cx);
let fs = project::FakeFs::new(cx.executor());
- let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model.clone()),
- cx,
- )
- });
- let tool = Arc::new(StreamingEditFileTool::new(
- project,
- thread.downgrade(),
- language_registry,
- ));
+ fs.insert_tree("/project", json!({})).await;
+ let (tool, _project, _action_log, _fs, _thread) =
+ setup_test_with_fs(cx, fs, &[path!("/project").as_ref()]).await;
cx.update(|cx| {
assert_eq!(
@@ -3975,33 +3074,15 @@ mod tests {
init_test(cx);
let fs = project::FakeFs::new(cx.executor());
fs.insert_tree("/", json!({"main.rs": ""})).await;
-
- let project = Project::test(fs.clone(), [path!("/").as_ref()], cx).await;
- let languages = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry.clone(),
- Templates::new(),
- Some(model.clone()),
- cx,
- )
- });
+ let (tool, project, action_log, _fs, thread) =
+ setup_test_with_fs(cx, fs, &[path!("/").as_ref()]).await;
+ let language_registry = project.read_with(cx, |p, _cx| p.languages().clone());
// Ensure the diff is finalized after the edit completes.
{
- let tool = Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- languages.clone(),
- ));
let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
let edit = cx.update(|cx| {
- tool.run(
+ tool.clone().run(
ToolInput::resolved(StreamingEditFileToolInput {
display_description: "Edit file".into(),
path: path!("/main.rs").into(),
@@ -4026,7 +3107,8 @@ mod tests {
let tool = Arc::new(StreamingEditFileTool::new(
project.clone(),
thread.downgrade(),
- languages.clone(),
+ action_log,
+ language_registry,
));
let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
let edit = cx.update(|cx| {
@@ -4053,42 +3135,12 @@ mod tests {
#[gpui::test]
async fn test_streaming_consecutive_edits_work(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "test.txt": "original content"
- }),
- )
- .await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model.clone()),
- cx,
- )
- });
- let languages = project.read_with(cx, |project, _| project.languages().clone());
- let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
-
+ let (tool, project, action_log, _fs, _thread) =
+ setup_test(cx, json!({"test.txt": "original content"})).await;
let read_tool = Arc::new(crate::ReadFileTool::new(
- thread.downgrade(),
- project.clone(),
- action_log,
- ));
- let edit_tool = Arc::new(StreamingEditFileTool::new(
project.clone(),
- thread.downgrade(),
- languages,
+ action_log.clone(),
+ true,
));
// Read the file first
@@ -4109,7 +3161,7 @@ mod tests {
// First edit should work
let edit_result = cx
.update(|cx| {
- edit_tool.clone().run(
+ tool.clone().run(
ToolInput::resolved(StreamingEditFileToolInput {
display_description: "First edit".into(),
path: "root/test.txt".into(),
@@ -4134,7 +3186,7 @@ mod tests {
// Second edit should also work because the edit updated the recorded read time
let edit_result = cx
.update(|cx| {
- edit_tool.clone().run(
+ tool.clone().run(
ToolInput::resolved(StreamingEditFileToolInput {
display_description: "Second edit".into(),
path: "root/test.txt".into(),
@@ -4159,42 +3211,12 @@ mod tests {
#[gpui::test]
async fn test_streaming_external_modification_detected(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "test.txt": "original content"
- }),
- )
- .await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model.clone()),
- cx,
- )
- });
- let languages = project.read_with(cx, |project, _| project.languages().clone());
- let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
-
+ let (tool, project, action_log, fs, _thread) =
+ setup_test(cx, json!({"test.txt": "original content"})).await;
let read_tool = Arc::new(crate::ReadFileTool::new(
- thread.downgrade(),
- project.clone(),
- action_log,
- ));
- let edit_tool = Arc::new(StreamingEditFileTool::new(
project.clone(),
- thread.downgrade(),
- languages,
+ action_log.clone(),
+ true,
));
// Read the file first
@@ -4243,7 +3265,7 @@ mod tests {
// Try to edit - should fail because file was modified externally
let result = cx
.update(|cx| {
- edit_tool.clone().run(
+ tool.clone().run(
ToolInput::resolved(StreamingEditFileToolInput {
display_description: "Edit after external change".into(),
path: "root/test.txt".into(),
@@ -4262,52 +3284,22 @@ mod tests {
let StreamingEditFileToolOutput::Error { error } = result.unwrap_err() else {
panic!("expected error");
- };
- assert!(
- error.contains("has been modified since you last read it"),
- "Error should mention file modification, got: {}",
- error
- );
- }
-
- #[gpui::test]
- async fn test_streaming_dirty_buffer_detected(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "test.txt": "original content"
- }),
- )
- .await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model.clone()),
- cx,
- )
- });
- let languages = project.read_with(cx, |project, _| project.languages().clone());
- let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
+ };
+ assert!(
+ error.contains("has been modified since you last read it"),
+ "Error should mention file modification, got: {}",
+ error
+ );
+ }
+ #[gpui::test]
+ async fn test_streaming_dirty_buffer_detected(cx: &mut TestAppContext) {
+ let (tool, project, action_log, _fs, _thread) =
+ setup_test(cx, json!({"test.txt": "original content"})).await;
let read_tool = Arc::new(crate::ReadFileTool::new(
- thread.downgrade(),
- project.clone(),
- action_log,
- ));
- let edit_tool = Arc::new(StreamingEditFileTool::new(
project.clone(),
- thread.downgrade(),
- languages,
+ action_log.clone(),
+ true,
));
// Read the file first
@@ -4347,7 +3339,7 @@ mod tests {
// Try to edit - should fail because buffer has unsaved changes
let result = cx
.update(|cx| {
- edit_tool.clone().run(
+ tool.clone().run(
ToolInput::resolved(StreamingEditFileToolInput {
display_description: "Edit with dirty buffer".into(),
path: "root/test.txt".into(),
@@ -4386,46 +3378,15 @@ mod tests {
#[gpui::test]
async fn test_streaming_overlapping_edits_resolved_sequentially(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
// Edit 1's replacement introduces text that contains edit 2's
// old_text as a substring. Because edits resolve sequentially
// against the current buffer, edit 2 finds a unique match in
// the modified buffer and succeeds.
- fs.insert_tree(
- "/root",
- json!({
- "file.txt": "aaa\nbbb\nccc\nddd\neee\n"
- }),
- )
- .await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
-
+ let (tool, _project, _action_log, _fs, _thread) =
+ setup_test(cx, json!({"file.txt": "aaa\nbbb\nccc\nddd\neee\n"})).await;
let (sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
-
- let tool = Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ));
-
- let task = cx.update(|cx| tool.run(input, event_stream, cx));
+ let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
// Setup: resolve the buffer
sender.send_partial(json!({
@@ -4473,36 +3434,10 @@ mod tests {
#[gpui::test]
async fn test_streaming_create_content_streamed(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree("/root", json!({"dir": {}})).await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
-
+ let (tool, project, _action_log, _fs, _thread) = setup_test(cx, json!({"dir": {}})).await;
let (sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
-
- let tool = Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ));
-
- let task = cx.update(|cx| tool.run(input, event_stream, cx));
+ let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
// Transition to BufferResolved
sender.send_partial(json!({
@@ -4570,42 +3505,14 @@ mod tests {
#[gpui::test]
async fn test_streaming_overwrite_diff_revealed_during_streaming(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "file.txt": "old line 1\nold line 2\nold line 3\n"
- }),
+ let (tool, _project, _action_log, _fs, _thread) = setup_test(
+ cx,
+ json!({"file.txt": "old line 1\nold line 2\nold line 3\n"}),
)
.await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
-
let (sender, input) = ToolInput::::test();
let (event_stream, mut receiver) = ToolCallEventStream::test();
-
- let tool = Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ));
-
- let task = cx.update(|cx| tool.run(input, event_stream, cx));
+ let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
// Transition to BufferResolved
sender.send_partial(json!({
@@ -4663,42 +3570,14 @@ mod tests {
#[gpui::test]
async fn test_streaming_overwrite_content_streamed(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "file.txt": "old line 1\nold line 2\nold line 3\n"
- }),
+ let (tool, project, _action_log, _fs, _thread) = setup_test(
+ cx,
+ json!({"file.txt": "old line 1\nold line 2\nold line 3\n"}),
)
.await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
-
let (sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
-
- let tool = Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ));
-
- let task = cx.update(|cx| tool.run(input, event_stream, cx));
+ let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
// Transition to BufferResolved
sender.send_partial(json!({
@@ -4762,42 +3641,11 @@ mod tests {
#[gpui::test]
async fn test_streaming_edit_json_fixer_escape_corruption(cx: &mut TestAppContext) {
- init_test(cx);
-
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree(
- "/root",
- json!({
- "file.txt": "hello\nworld\nfoo\n"
- }),
- )
- .await;
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let model = Arc::new(FakeLanguageModel::default());
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- Some(model),
- cx,
- )
- });
-
+ let (tool, _project, _action_log, _fs, _thread) =
+ setup_test(cx, json!({"file.txt": "hello\nworld\nfoo\n"})).await;
let (sender, input) = ToolInput::::test();
let (event_stream, _receiver) = ToolCallEventStream::test();
-
- let tool = Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ));
-
- let task = cx.update(|cx| tool.run(input, event_stream, cx));
+ let task = cx.update(|cx| tool.clone().run(input, event_stream, cx));
sender.send_partial(json!({
"display_description": "Edit",
@@ -4847,47 +3695,17 @@ mod tests {
// reports changed buffers so that the Accept All / Reject All review UI appears.
#[gpui::test]
async fn test_streaming_edit_file_tool_registers_changed_buffers(cx: &mut TestAppContext) {
- init_test(cx);
+ let (tool, _project, action_log, _fs, _thread) =
+ setup_test(cx, json!({"file.txt": "line 1\nline 2\nline 3\n"})).await;
cx.update(|cx| {
let mut settings = agent_settings::AgentSettings::get_global(cx).clone();
settings.tool_permissions.default = settings::ToolPermissionMode::Allow;
agent_settings::AgentSettings::override_global(settings, cx);
});
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree(
- path!("/root"),
- json!({
- "file.txt": "line 1\nline 2\nline 3\n"
- }),
- )
- .await;
-
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- None,
- cx,
- )
- });
- let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
-
- let tool = Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ));
let (event_stream, _rx) = ToolCallEventStream::test();
-
let task = cx.update(|cx| {
- tool.run(
+ tool.clone().run(
ToolInput::resolved(StreamingEditFileToolInput {
display_description: "Edit lines".to_string(),
path: "root/file.txt".into(),
@@ -4911,7 +3729,7 @@ mod tests {
let changed = action_log.read_with(cx, |log, cx| log.changed_buffers(cx));
assert!(
!changed.is_empty(),
- "action_log.changed_buffers() should be non-empty after streaming edit, \
+ "action_log.changed_buffers() should be non-empty after streaming edit,
but no changed buffers were found \u{2014} Accept All / Reject All will not appear"
);
}
@@ -4921,47 +3739,17 @@ mod tests {
async fn test_streaming_edit_file_tool_write_mode_registers_changed_buffers(
cx: &mut TestAppContext,
) {
- init_test(cx);
+ let (tool, _project, action_log, _fs, _thread) =
+ setup_test(cx, json!({"file.txt": "original content"})).await;
cx.update(|cx| {
let mut settings = agent_settings::AgentSettings::get_global(cx).clone();
settings.tool_permissions.default = settings::ToolPermissionMode::Allow;
agent_settings::AgentSettings::override_global(settings, cx);
});
- let fs = project::FakeFs::new(cx.executor());
- fs.insert_tree(
- path!("/root"),
- json!({
- "file.txt": "original content"
- }),
- )
- .await;
-
- let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
- let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
- let context_server_registry =
- cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
- let thread = cx.new(|cx| {
- crate::Thread::new(
- project.clone(),
- cx.new(|_cx| ProjectContext::default()),
- context_server_registry,
- Templates::new(),
- None,
- cx,
- )
- });
- let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
-
- let tool = Arc::new(StreamingEditFileTool::new(
- project.clone(),
- thread.downgrade(),
- language_registry,
- ));
let (event_stream, _rx) = ToolCallEventStream::test();
-
let task = cx.update(|cx| {
- tool.run(
+ tool.clone().run(
ToolInput::resolved(StreamingEditFileToolInput {
display_description: "Overwrite file".to_string(),
path: "root/file.txt".into(),
@@ -4987,6 +3775,58 @@ mod tests {
);
}
+ async fn setup_test_with_fs(
+ cx: &mut TestAppContext,
+ fs: Arc,
+ worktree_paths: &[&std::path::Path],
+ ) -> (
+ Arc,
+ Entity,
+ Entity,
+ Arc,
+ Entity,
+ ) {
+ let project = Project::test(fs.clone(), worktree_paths.iter().copied(), cx).await;
+ let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
+ let context_server_registry =
+ cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
+ let model = Arc::new(FakeLanguageModel::default());
+ let thread = cx.new(|cx| {
+ crate::Thread::new(
+ project.clone(),
+ cx.new(|_cx| ProjectContext::default()),
+ context_server_registry,
+ Templates::new(),
+ Some(model),
+ cx,
+ )
+ });
+ let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
+ let tool = Arc::new(StreamingEditFileTool::new(
+ project.clone(),
+ thread.downgrade(),
+ action_log.clone(),
+ language_registry,
+ ));
+ (tool, project, action_log, fs, thread)
+ }
+
+ async fn setup_test(
+ cx: &mut TestAppContext,
+ initial_tree: serde_json::Value,
+ ) -> (
+ Arc,
+ Entity,
+ Entity,
+ Arc,
+ Entity,
+ ) {
+ init_test(cx);
+ let fs = project::FakeFs::new(cx.executor());
+ fs.insert_tree("/root", initial_tree).await;
+ setup_test_with_fs(cx, fs, &[path!("/root").as_ref()]).await
+ }
+
fn init_test(cx: &mut TestAppContext) {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
diff --git a/crates/agent_ui/Cargo.toml b/crates/agent_ui/Cargo.toml
index 2a31781054fd29b30a3c8119e87491edbfb1e658..3e46e14b53c46a2aec3ac9552246a10ffc2aeee9 100644
--- a/crates/agent_ui/Cargo.toml
+++ b/crates/agent_ui/Cargo.toml
@@ -58,6 +58,7 @@ feature_flags.workspace = true
file_icons.workspace = true
fs.workspace = true
futures.workspace = true
+git.workspace = true
fuzzy.workspace = true
gpui.workspace = true
gpui_tokio.workspace = true
diff --git a/crates/agent_ui/src/agent_panel.rs b/crates/agent_ui/src/agent_panel.rs
index 7097e5be156eb33382a1a0f47c1b4256c84ce9b1..0f1cd3ebf0fdf1df939ccc6f2b0d1a40545bf082 100644
--- a/crates/agent_ui/src/agent_panel.rs
+++ b/crates/agent_ui/src/agent_panel.rs
@@ -1,6 +1,6 @@
use std::{
ops::Range,
- path::Path,
+ path::{Path, PathBuf},
rc::Rc,
sync::{
Arc,
@@ -22,15 +22,18 @@ use project::{
use serde::{Deserialize, Serialize};
use settings::{LanguageModelProviderSetting, LanguageModelSelection};
+use feature_flags::{AgentGitWorktreesFeatureFlag, AgentV2FeatureFlag, FeatureFlagAppExt as _};
use zed_actions::agent::{OpenClaudeAgentOnboardingModal, ReauthenticateAgent, ReviewBranchDiff};
+use crate::ManageProfiles;
use crate::ui::{AcpOnboardingModal, ClaudeCodeOnboardingModal};
use crate::{
AddContextServer, AgentDiffPane, ConnectionView, CopyThreadToClipboard, Follow,
InlineAssistant, LoadThreadFromClipboard, NewTextThread, NewThread, OpenActiveThreadAsMarkdown,
- OpenAgentDiff, OpenHistory, ResetTrialEndUpsell, ResetTrialUpsell, ToggleNavigationMenu,
- ToggleNewThreadMenu, ToggleOptionsMenu,
+ OpenAgentDiff, OpenHistory, ResetTrialEndUpsell, ResetTrialUpsell, StartThreadIn,
+ ToggleNavigationMenu, ToggleNewThreadMenu, ToggleOptionsMenu,
agent_configuration::{AgentConfiguration, AssistantConfigurationEvent},
+ connection_view::{AcpThreadViewEvent, ThreadView},
slash_command::SlashCommandCompletionProvider,
text_thread_editor::{AgentPanelDelegate, TextThreadEditor, make_lsp_adapter_delegate},
ui::EndTrialUpsell,
@@ -42,7 +45,6 @@ use crate::{
ExpandMessageEditor, ThreadHistory, ThreadHistoryEvent,
text_thread_history::{TextThreadHistory, TextThreadHistoryEvent},
};
-use crate::{ManageProfiles, connection_view::ThreadView};
use agent_settings::AgentSettings;
use ai_onboarding::AgentPanelOnboarding;
use anyhow::{Result, anyhow};
@@ -54,6 +56,7 @@ use editor::{Anchor, AnchorRangeExt as _, Editor, EditorEvent, MultiBuffer};
use extension::ExtensionEvents;
use extension_host::ExtensionStore;
use fs::Fs;
+use git::repository::validate_worktree_directory;
use gpui::{
Action, Animation, AnimationExt, AnyElement, App, AsyncWindowContext, ClipboardItem, Corner,
DismissEvent, Entity, EventEmitter, ExternalPaths, FocusHandle, Focusable, KeyContext, Pixels,
@@ -61,6 +64,7 @@ use gpui::{
};
use language::LanguageRegistry;
use language_model::{ConfigurationError, LanguageModelRegistry};
+use project::project_settings::ProjectSettings;
use project::{Project, ProjectPath, Worktree};
use prompt_store::{PromptBuilder, PromptStore, UserPromptId};
use rules_library::{RulesLibrary, open_rules_library};
@@ -68,8 +72,8 @@ use search::{BufferSearchBar, buffer_search};
use settings::{Settings, update_settings_file};
use theme::ThemeSettings;
use ui::{
- Callout, ContextMenu, ContextMenuEntry, KeyBinding, PopoverMenu, PopoverMenuHandle, Tab,
- Tooltip, prelude::*, utils::WithRemSize,
+ Button, Callout, ContextMenu, ContextMenuEntry, DocumentationSide, KeyBinding, PopoverMenu,
+ PopoverMenuHandle, SpinnerLabel, Tab, Tooltip, prelude::*, utils::WithRemSize,
};
use util::ResultExt as _;
use workspace::{
@@ -123,6 +127,8 @@ struct SerializedAgentPanel {
selected_agent: Option,
#[serde(default)]
last_active_thread: Option,
+ #[serde(default)]
+ start_thread_in: Option,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
@@ -324,6 +330,13 @@ pub fn init(cx: &mut App) {
cx,
);
});
+ })
+ .register_action(|workspace, action: &StartThreadIn, _window, cx| {
+ if let Some(panel) = workspace.panel::(cx) {
+ panel.update(cx, |panel, cx| {
+ panel.set_start_thread_in(action, cx);
+ });
+ }
});
},
)
@@ -371,6 +384,10 @@ pub enum AgentType {
}
impl AgentType {
+ pub fn is_native(&self) -> bool {
+ matches!(self, Self::NativeAgent)
+ }
+
fn label(&self) -> SharedString {
match self {
Self::NativeAgent | Self::TextThread => "Zed Agent".into(),
@@ -395,6 +412,29 @@ impl From for AgentType {
}
}
+impl StartThreadIn {
+ fn label(&self) -> SharedString {
+ match self {
+ Self::LocalProject => "Local Project".into(),
+ Self::NewWorktree => "New Worktree".into(),
+ }
+ }
+
+ fn icon(&self) -> IconName {
+ match self {
+ Self::LocalProject => IconName::Screen,
+ Self::NewWorktree => IconName::GitBranchPlus,
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+#[allow(dead_code)]
+pub enum WorktreeCreationStatus {
+ Creating,
+ Error(SharedString),
+}
+
impl ActiveView {
pub fn which_font_size_used(&self) -> WhichFontSize {
match self {
@@ -515,6 +555,7 @@ pub struct AgentPanel {
previous_view: Option,
_active_view_observation: Option,
new_thread_menu_handle: PopoverMenuHandle,
+ start_thread_in_menu_handle: PopoverMenuHandle,
agent_panel_menu_handle: PopoverMenuHandle,
agent_navigation_menu_handle: PopoverMenuHandle,
agent_navigation_menu: Option>,
@@ -525,6 +566,10 @@ pub struct AgentPanel {
pending_serialization: Option>>,
onboarding: Entity,
selected_agent: AgentType,
+ start_thread_in: StartThreadIn,
+ worktree_creation_status: Option,
+ _thread_view_subscription: Option,
+ _worktree_creation_task: Option>,
show_trust_workspace_message: bool,
last_configuration_error_telemetry: Option,
on_boarding_upsell_dismissed: AtomicBool,
@@ -538,6 +583,7 @@ impl AgentPanel {
let width = self.width;
let selected_agent = self.selected_agent.clone();
+ let start_thread_in = Some(self.start_thread_in);
let last_active_thread = self.active_agent_thread(cx).map(|thread| {
let thread = thread.read(cx);
@@ -561,6 +607,7 @@ impl AgentPanel {
width,
selected_agent: Some(selected_agent),
last_active_thread,
+ start_thread_in,
},
)
.await?;
@@ -605,6 +652,37 @@ impl AgentPanel {
})?
.await?;
+ let last_active_thread = if let Some(thread_info) = serialized_panel
+ .as_ref()
+ .and_then(|p| p.last_active_thread.clone())
+ {
+ if thread_info.agent_type.is_native() {
+ let session_id = acp::SessionId::new(thread_info.session_id.clone());
+ let load_result = cx.update(|_window, cx| {
+ let thread_store = ThreadStore::global(cx);
+ thread_store.update(cx, |store, cx| store.load_thread(session_id, cx))
+ });
+ let thread_exists = if let Ok(task) = load_result {
+ task.await.ok().flatten().is_some()
+ } else {
+ false
+ };
+ if thread_exists {
+ Some(thread_info)
+ } else {
+ log::warn!(
+ "last active thread {} not found in database, skipping restoration",
+ thread_info.session_id
+ );
+ None
+ }
+ } else {
+ Some(thread_info)
+ }
+ } else {
+ None
+ };
+
let panel = workspace.update_in(cx, |workspace, window, cx| {
let panel =
cx.new(|cx| Self::new(workspace, text_thread_store, prompt_store, window, cx));
@@ -615,44 +693,45 @@ impl AgentPanel {
if let Some(selected_agent) = serialized_panel.selected_agent.clone() {
panel.selected_agent = selected_agent;
}
+ if let Some(start_thread_in) = serialized_panel.start_thread_in {
+ let is_worktree_flag_enabled =
+ cx.has_flag::();
+ let is_valid = match &start_thread_in {
+ StartThreadIn::LocalProject => true,
+ StartThreadIn::NewWorktree => {
+ let project = panel.project.read(cx);
+ is_worktree_flag_enabled && !project.is_via_collab()
+ }
+ };
+ if is_valid {
+ panel.start_thread_in = start_thread_in;
+ } else {
+ log::info!(
+ "deserialized start_thread_in {:?} is no longer valid, falling back to LocalProject",
+ start_thread_in,
+ );
+ }
+ }
cx.notify();
});
}
- panel
- })?;
-
- if let Some(thread_info) = serialized_panel.and_then(|p| p.last_active_thread) {
- let session_id = acp::SessionId::new(thread_info.session_id.clone());
- let load_task = panel.update(cx, |panel, cx| {
- let thread_store = panel.thread_store.clone();
- thread_store.update(cx, |store, cx| store.load_thread(session_id, cx))
- });
- let thread_exists = load_task
- .await
- .map(|thread: Option| thread.is_some())
- .unwrap_or(false);
-
- if thread_exists {
- panel.update_in(cx, |panel, window, cx| {
- panel.selected_agent = thread_info.agent_type.clone();
- let session_info = AgentSessionInfo {
- session_id: acp::SessionId::new(thread_info.session_id),
- cwd: thread_info.cwd,
- title: thread_info.title.map(SharedString::from),
- updated_at: None,
- meta: None,
- };
+ if let Some(thread_info) = last_active_thread {
+ let agent_type = thread_info.agent_type.clone();
+ let session_info = AgentSessionInfo {
+ session_id: acp::SessionId::new(thread_info.session_id),
+ cwd: thread_info.cwd,
+ title: thread_info.title.map(SharedString::from),
+ updated_at: None,
+ meta: None,
+ };
+ panel.update(cx, |panel, cx| {
+ panel.selected_agent = agent_type;
panel.load_agent_thread(session_info, window, cx);
- })?;
- } else {
- log::error!(
- "could not restore last active thread: \
- no thread found in database with ID {:?}",
- thread_info.session_id
- );
+ });
}
- }
+ panel
+ })?;
Ok(panel)
})
@@ -800,6 +879,7 @@ impl AgentPanel {
previous_view: None,
_active_view_observation: None,
new_thread_menu_handle: PopoverMenuHandle::default(),
+ start_thread_in_menu_handle: PopoverMenuHandle::default(),
agent_panel_menu_handle: PopoverMenuHandle::default(),
agent_navigation_menu_handle: PopoverMenuHandle::default(),
agent_navigation_menu: None,
@@ -813,6 +893,10 @@ impl AgentPanel {
text_thread_history,
thread_store,
selected_agent: AgentType::default(),
+ start_thread_in: StartThreadIn::default(),
+ worktree_creation_status: None,
+ _thread_view_subscription: None,
+ _worktree_creation_task: None,
show_trust_workspace_message: false,
last_configuration_error_telemetry: None,
on_boarding_upsell_dismissed: AtomicBool::new(OnboardingUpsell::dismissed()),
@@ -1044,7 +1128,7 @@ impl AgentPanel {
let server = ext_agent.server(fs, thread_store);
this.update_in(cx, |agent_panel, window, cx| {
- agent_panel._external_thread(
+ agent_panel.create_external_thread(
server,
resume_thread,
initial_content,
@@ -1618,15 +1702,28 @@ impl AgentPanel {
self.active_view = new_view;
}
+ // Subscribe to the active ThreadView's events (e.g. FirstSendRequested)
+ // so the panel can intercept the first send for worktree creation.
+ // Re-subscribe whenever the ConnectionView changes, since the inner
+ // ThreadView may have been replaced (e.g. navigating between threads).
self._active_view_observation = match &self.active_view {
ActiveView::AgentThread { server_view } => {
- Some(cx.observe(server_view, |this, _, cx| {
- cx.emit(AgentPanelEvent::ActiveViewChanged);
- this.serialize(cx);
- cx.notify();
- }))
+ self._thread_view_subscription =
+ Self::subscribe_to_active_thread_view(server_view, window, cx);
+ Some(
+ cx.observe_in(server_view, window, |this, server_view, window, cx| {
+ this._thread_view_subscription =
+ Self::subscribe_to_active_thread_view(&server_view, window, cx);
+ cx.emit(AgentPanelEvent::ActiveViewChanged);
+ this.serialize(cx);
+ cx.notify();
+ }),
+ )
+ }
+ _ => {
+ self._thread_view_subscription = None;
+ None
}
- _ => None,
};
let is_in_agent_history = matches!(
@@ -1740,6 +1837,56 @@ impl AgentPanel {
self.selected_agent.clone()
}
+ fn subscribe_to_active_thread_view(
+ server_view: &Entity,
+ window: &mut Window,
+ cx: &mut Context,
+ ) -> Option {
+ server_view.read(cx).active_thread().cloned().map(|tv| {
+ cx.subscribe_in(
+ &tv,
+ window,
+ |this, view, event: &AcpThreadViewEvent, window, cx| match event {
+ AcpThreadViewEvent::FirstSendRequested { content } => {
+ this.handle_first_send_requested(view.clone(), content.clone(), window, cx);
+ }
+ },
+ )
+ })
+ }
+
+ pub fn start_thread_in(&self) -> &StartThreadIn {
+ &self.start_thread_in
+ }
+
+ fn set_start_thread_in(&mut self, action: &StartThreadIn, cx: &mut Context) {
+ if matches!(action, StartThreadIn::NewWorktree)
+ && !cx.has_flag::()
+ {
+ return;
+ }
+
+ let new_target = match *action {
+ StartThreadIn::LocalProject => StartThreadIn::LocalProject,
+ StartThreadIn::NewWorktree => {
+ if !self.project_has_git_repository(cx) {
+ log::error!(
+ "set_start_thread_in: cannot use NewWorktree without a git repository"
+ );
+ return;
+ }
+ if self.project.read(cx).is_via_collab() {
+ log::error!("set_start_thread_in: cannot use NewWorktree in a collab project");
+ return;
+ }
+ StartThreadIn::NewWorktree
+ }
+ };
+ self.start_thread_in = new_target;
+ self.serialize(cx);
+ cx.notify();
+ }
+
fn selected_external_agent(&self) -> Option {
match &self.selected_agent {
AgentType::NativeAgent => Some(ExternalAgent::NativeAgent),
@@ -1830,7 +1977,7 @@ impl AgentPanel {
self.external_thread(Some(agent), Some(thread), None, window, cx);
}
- fn _external_thread(
+ pub(crate) fn create_external_thread(
&mut self,
server: Rc,
resume_thread: Option,
@@ -1869,135 +2016,641 @@ impl AgentPanel {
self.set_active_view(ActiveView::AgentThread { server_view }, true, window, cx);
}
-}
-impl Focusable for AgentPanel {
- fn focus_handle(&self, cx: &App) -> FocusHandle {
- match &self.active_view {
- ActiveView::Uninitialized => self.focus_handle.clone(),
- ActiveView::AgentThread { server_view, .. } => server_view.focus_handle(cx),
- ActiveView::History { kind } => match kind {
- HistoryKind::AgentThreads => self.acp_history.focus_handle(cx),
- HistoryKind::TextThreads => self.text_thread_history.focus_handle(cx),
- },
- ActiveView::TextThread {
- text_thread_editor, ..
- } => text_thread_editor.focus_handle(cx),
- ActiveView::Configuration => {
- if let Some(configuration) = self.configuration.as_ref() {
- configuration.focus_handle(cx)
- } else {
- self.focus_handle.clone()
- }
- }
- }
+ fn active_thread_has_messages(&self, cx: &App) -> bool {
+ self.active_agent_thread(cx)
+ .is_some_and(|thread| !thread.read(cx).entries().is_empty())
}
-}
-fn agent_panel_dock_position(cx: &App) -> DockPosition {
- AgentSettings::get_global(cx).dock.into()
-}
+ fn handle_first_send_requested(
+ &mut self,
+ thread_view: Entity,
+ content: Vec,
+ window: &mut Window,
+ cx: &mut Context,
+ ) {
+ if self.start_thread_in == StartThreadIn::NewWorktree {
+ self.handle_worktree_creation_requested(content, window, cx);
+ } else {
+ cx.defer_in(window, move |_this, window, cx| {
+ thread_view.update(cx, |thread_view, cx| {
+ let editor = thread_view.message_editor.clone();
+ thread_view.send_impl(editor, window, cx);
+ });
+ });
+ }
+ }
-pub enum AgentPanelEvent {
- ActiveViewChanged,
-}
+ /// Partitions the project's visible worktrees into git-backed repositories
+ /// and plain (non-git) paths. Git repos will have worktrees created for
+ /// them; non-git paths are carried over to the new workspace as-is.
+ ///
+ /// When multiple worktrees map to the same repository, the most specific
+ /// match wins (deepest work directory path), with a deterministic
+ /// tie-break on entity id. Each repository appears at most once.
+ fn classify_worktrees(
+ &self,
+ cx: &App,
+ ) -> (Vec>, Vec) {
+ let project = &self.project;
+ let repositories = project.read(cx).repositories(cx).clone();
+ let mut git_repos: Vec> = Vec::new();
+ let mut non_git_paths: Vec = Vec::new();
+ let mut seen_repo_ids = std::collections::HashSet::new();
+
+ for worktree in project.read(cx).visible_worktrees(cx) {
+ let wt_path = worktree.read(cx).abs_path();
+
+ let matching_repo = repositories
+ .iter()
+ .filter_map(|(id, repo)| {
+ let work_dir = repo.read(cx).work_directory_abs_path.clone();
+ if wt_path.starts_with(work_dir.as_ref())
+ || work_dir.starts_with(wt_path.as_ref())
+ {
+ Some((*id, repo.clone(), work_dir.as_ref().components().count()))
+ } else {
+ None
+ }
+ })
+ .max_by(
+ |(left_id, _left_repo, left_depth), (right_id, _right_repo, right_depth)| {
+ left_depth
+ .cmp(right_depth)
+ .then_with(|| left_id.cmp(right_id))
+ },
+ );
-impl EventEmitter for AgentPanel {}
-impl EventEmitter for AgentPanel {}
+ if let Some((id, repo, _)) = matching_repo {
+ if seen_repo_ids.insert(id) {
+ git_repos.push(repo);
+ }
+ } else {
+ non_git_paths.push(wt_path.to_path_buf());
+ }
+ }
-impl Panel for AgentPanel {
- fn persistent_name() -> &'static str {
- "AgentPanel"
+ (git_repos, non_git_paths)
}
- fn panel_key() -> &'static str {
- AGENT_PANEL_KEY
- }
+ /// Kicks off an async git-worktree creation for each repository. Returns:
+ ///
+ /// - `creation_infos`: a vec of `(repo, new_path, receiver)` tuples—the
+ /// receiver resolves once the git worktree command finishes.
+ /// - `path_remapping`: `(old_work_dir, new_worktree_path)` pairs used
+ /// later to remap open editor tabs into the new workspace.
+ fn start_worktree_creations(
+ git_repos: &[Entity],
+ branch_name: &str,
+ worktree_directory_setting: &str,
+ cx: &mut Context,
+ ) -> Result<(
+ Vec<(
+ Entity,
+ PathBuf,
+ futures::channel::oneshot::Receiver>,
+ )>,
+ Vec<(PathBuf, PathBuf)>,
+ )> {
+ let mut creation_infos = Vec::new();
+ let mut path_remapping = Vec::new();
+
+ for repo in git_repos {
+ let (work_dir, new_path, receiver) = repo.update(cx, |repo, _cx| {
+ let original_repo = repo.original_repo_abs_path.clone();
+ let directory =
+ validate_worktree_directory(&original_repo, worktree_directory_setting)?;
+ let new_path = directory.join(branch_name);
+ let receiver = repo.create_worktree(branch_name.to_string(), directory, None);
+ let work_dir = repo.work_directory_abs_path.clone();
+ anyhow::Ok((work_dir, new_path, receiver))
+ })?;
+ path_remapping.push((work_dir.to_path_buf(), new_path.clone()));
+ creation_infos.push((repo.clone(), new_path, receiver));
+ }
- fn position(&self, _window: &Window, cx: &App) -> DockPosition {
- agent_panel_dock_position(cx)
+ Ok((creation_infos, path_remapping))
}
- fn position_is_valid(&self, position: DockPosition) -> bool {
- position != DockPosition::Bottom
- }
+ /// Waits for every in-flight worktree creation to complete. If any
+ /// creation fails, all successfully-created worktrees are rolled back
+ /// (removed) so the project isn't left in a half-migrated state.
+ async fn await_and_rollback_on_failure(
+ creation_infos: Vec<(
+ Entity,
+ PathBuf,
+ futures::channel::oneshot::Receiver>,
+ )>,
+ cx: &mut AsyncWindowContext,
+ ) -> Result> {
+ let mut created_paths: Vec = Vec::new();
+ let mut repos_and_paths: Vec<(Entity, PathBuf)> =
+ Vec::new();
+ let mut first_error: Option = None;
+
+ for (repo, new_path, receiver) in creation_infos {
+ match receiver.await {
+ Ok(Ok(())) => {
+ created_paths.push(new_path.clone());
+ repos_and_paths.push((repo, new_path));
+ }
+ Ok(Err(err)) => {
+ if first_error.is_none() {
+ first_error = Some(err);
+ }
+ }
+ Err(_canceled) => {
+ if first_error.is_none() {
+ first_error = Some(anyhow!("Worktree creation was canceled"));
+ }
+ }
+ }
+ }
- fn set_position(&mut self, position: DockPosition, _: &mut Window, cx: &mut Context) {
- settings::update_settings_file(self.fs.clone(), cx, move |settings, _| {
- settings
- .agent
- .get_or_insert_default()
- .set_dock(position.into());
- });
- }
+ let Some(err) = first_error else {
+ return Ok(created_paths);
+ };
- fn size(&self, window: &Window, cx: &App) -> Pixels {
- let settings = AgentSettings::get_global(cx);
- match self.position(window, cx) {
- DockPosition::Left | DockPosition::Right => {
- self.width.unwrap_or(settings.default_width)
+ // Rollback all successfully created worktrees
+ let mut rollback_receivers = Vec::new();
+ for (rollback_repo, rollback_path) in &repos_and_paths {
+ if let Ok(receiver) = cx.update(|_, cx| {
+ rollback_repo.update(cx, |repo, _cx| {
+ repo.remove_worktree(rollback_path.clone(), true)
+ })
+ }) {
+ rollback_receivers.push((rollback_path.clone(), receiver));
}
- DockPosition::Bottom => self.height.unwrap_or(settings.default_height),
}
- }
-
- fn set_size(&mut self, size: Option, window: &mut Window, cx: &mut Context) {
- match self.position(window, cx) {
- DockPosition::Left | DockPosition::Right => self.width = size,
- DockPosition::Bottom => self.height = size,
+ let mut rollback_failures: Vec = Vec::new();
+ for (path, receiver) in rollback_receivers {
+ match receiver.await {
+ Ok(Ok(())) => {}
+ Ok(Err(rollback_err)) => {
+ log::error!(
+ "failed to rollback worktree at {}: {rollback_err}",
+ path.display()
+ );
+ rollback_failures.push(format!("{}: {rollback_err}", path.display()));
+ }
+ Err(rollback_err) => {
+ log::error!(
+ "failed to rollback worktree at {}: {rollback_err}",
+ path.display()
+ );
+ rollback_failures.push(format!("{}: {rollback_err}", path.display()));
+ }
+ }
}
- self.serialize(cx);
- cx.notify();
+ let mut error_message = format!("Failed to create worktree: {err}");
+ if !rollback_failures.is_empty() {
+ error_message.push_str("\n\nFailed to clean up: ");
+ error_message.push_str(&rollback_failures.join(", "));
+ }
+ Err(anyhow!(error_message))
}
- fn set_active(&mut self, active: bool, window: &mut Window, cx: &mut Context) {
- if active && matches!(self.active_view, ActiveView::Uninitialized) {
+ fn set_worktree_creation_error(
+ &mut self,
+ message: SharedString,
+ window: &mut Window,
+ cx: &mut Context,
+ ) {
+ self.worktree_creation_status = Some(WorktreeCreationStatus::Error(message));
+ if matches!(self.active_view, ActiveView::Uninitialized) {
let selected_agent = self.selected_agent.clone();
self.new_agent_thread(selected_agent, window, cx);
}
+ cx.notify();
}
- fn remote_id() -> Option {
- Some(proto::PanelId::AssistantPanel)
- }
-
- fn icon(&self, _window: &Window, cx: &App) -> Option {
- (self.enabled(cx) && AgentSettings::get_global(cx).button).then_some(IconName::ZedAssistant)
- }
+ fn handle_worktree_creation_requested(
+ &mut self,
+ content: Vec,
+ window: &mut Window,
+ cx: &mut Context,
+ ) {
+ if matches!(
+ self.worktree_creation_status,
+ Some(WorktreeCreationStatus::Creating)
+ ) {
+ return;
+ }
- fn icon_tooltip(&self, _window: &Window, _cx: &App) -> Option<&'static str> {
- Some("Agent Panel")
- }
+ self.worktree_creation_status = Some(WorktreeCreationStatus::Creating);
+ cx.notify();
- fn toggle_action(&self) -> Box {
- Box::new(ToggleFocus)
- }
+ let (git_repos, non_git_paths) = self.classify_worktrees(cx);
- fn activation_priority(&self) -> u32 {
- 3
- }
+ if git_repos.is_empty() {
+ self.set_worktree_creation_error(
+ "No git repositories found in the project".into(),
+ window,
+ cx,
+ );
+ return;
+ }
- fn enabled(&self, cx: &App) -> bool {
- AgentSettings::get_global(cx).enabled(cx)
- }
+ // Kick off branch listing as early as possible so it can run
+ // concurrently with the remaining synchronous setup work.
+ let branch_receivers: Vec<_> = git_repos
+ .iter()
+ .map(|repo| repo.update(cx, |repo, _cx| repo.branches()))
+ .collect();
+
+ let worktree_directory_setting = ProjectSettings::get_global(cx)
+ .git
+ .worktree_directory
+ .clone();
+
+ let (dock_structure, open_file_paths) = self
+ .workspace
+ .upgrade()
+ .map(|workspace| {
+ let dock_structure = workspace.read(cx).capture_dock_state(window, cx);
+ let open_file_paths = workspace.read(cx).open_item_abs_paths(cx);
+ (dock_structure, open_file_paths)
+ })
+ .unwrap_or_default();
- fn is_zoomed(&self, _window: &Window, _cx: &App) -> bool {
- self.zoomed
- }
+ let workspace = self.workspace.clone();
+ let window_handle = window
+ .window_handle()
+ .downcast::();
+
+ let task = cx.spawn_in(window, async move |this, cx| {
+ // Await the branch listings we kicked off earlier.
+ let mut existing_branches = Vec::new();
+ for result in futures::future::join_all(branch_receivers).await {
+ match result {
+ Ok(Ok(branches)) => {
+ for branch in branches {
+ existing_branches.push(branch.name().to_string());
+ }
+ }
+ Ok(Err(err)) => {
+ Err::<(), _>(err).log_err();
+ }
+ Err(_) => {}
+ }
+ }
- fn set_zoomed(&mut self, zoomed: bool, _window: &mut Window, cx: &mut Context) {
- self.zoomed = zoomed;
- cx.notify();
- }
-}
+ let existing_branch_refs: Vec<&str> =
+ existing_branches.iter().map(|s| s.as_str()).collect();
+ let mut rng = rand::rng();
+ let branch_name =
+ match crate::branch_names::generate_branch_name(&existing_branch_refs, &mut rng) {
+ Some(name) => name,
+ None => {
+ this.update_in(cx, |this, window, cx| {
+ this.set_worktree_creation_error(
+ "Failed to generate a branch name: all typewriter names are taken"
+ .into(),
+ window,
+ cx,
+ );
+ })?;
+ return anyhow::Ok(());
+ }
+ };
-impl AgentPanel {
- fn render_title_view(&self, _window: &mut Window, cx: &Context) -> AnyElement {
- const LOADING_SUMMARY_PLACEHOLDER: &str = "Loading Summary…";
+ let (creation_infos, path_remapping) = match this.update_in(cx, |_this, _window, cx| {
+ Self::start_worktree_creations(
+ &git_repos,
+ &branch_name,
+ &worktree_directory_setting,
+ cx,
+ )
+ }) {
+ Ok(Ok(result)) => result,
+ Ok(Err(err)) | Err(err) => {
+ this.update_in(cx, |this, window, cx| {
+ this.set_worktree_creation_error(
+ format!("Failed to validate worktree directory: {err}").into(),
+ window,
+ cx,
+ );
+ })
+ .log_err();
+ return anyhow::Ok(());
+ }
+ };
- let content = match &self.active_view {
- ActiveView::AgentThread { server_view } => {
- let is_generating_title = server_view
+ let created_paths = match Self::await_and_rollback_on_failure(creation_infos, cx).await
+ {
+ Ok(paths) => paths,
+ Err(err) => {
+ this.update_in(cx, |this, window, cx| {
+ this.set_worktree_creation_error(format!("{err}").into(), window, cx);
+ })?;
+ return anyhow::Ok(());
+ }
+ };
+
+ let mut all_paths = created_paths;
+ let has_non_git = !non_git_paths.is_empty();
+ all_paths.extend(non_git_paths.iter().cloned());
+
+ let app_state = match workspace.upgrade() {
+ Some(workspace) => cx.update(|_, cx| workspace.read(cx).app_state().clone())?,
+ None => {
+ this.update_in(cx, |this, window, cx| {
+ this.set_worktree_creation_error(
+ "Workspace no longer available".into(),
+ window,
+ cx,
+ );
+ })?;
+ return anyhow::Ok(());
+ }
+ };
+
+ let this_for_error = this.clone();
+ if let Err(err) = Self::setup_new_workspace(
+ this,
+ all_paths,
+ app_state,
+ window_handle,
+ dock_structure,
+ open_file_paths,
+ path_remapping,
+ non_git_paths,
+ has_non_git,
+ content,
+ cx,
+ )
+ .await
+ {
+ this_for_error
+ .update_in(cx, |this, window, cx| {
+ this.set_worktree_creation_error(
+ format!("Failed to set up workspace: {err}").into(),
+ window,
+ cx,
+ );
+ })
+ .log_err();
+ }
+ anyhow::Ok(())
+ });
+
+ self._worktree_creation_task = Some(cx.foreground_executor().spawn(async move {
+ task.await.log_err();
+ }));
+ }
+
+ async fn setup_new_workspace(
+ this: WeakEntity,
+ all_paths: Vec,
+ app_state: Arc,
+ window_handle: Option>,
+ dock_structure: workspace::DockStructure,
+ open_file_paths: Vec,
+ path_remapping: Vec<(PathBuf, PathBuf)>,
+ non_git_paths: Vec,
+ has_non_git: bool,
+ content: Vec,
+ cx: &mut AsyncWindowContext,
+ ) -> Result<()> {
+ let init: Option<
+ Box) + Send>,
+ > = Some(Box::new(move |workspace, window, cx| {
+ workspace.set_dock_structure(dock_structure, window, cx);
+ }));
+
+ let (new_window_handle, _) = cx
+ .update(|_window, cx| {
+ Workspace::new_local(all_paths, app_state, window_handle, None, init, false, cx)
+ })?
+ .await?;
+
+ let new_workspace = new_window_handle.update(cx, |multi_workspace, _window, _cx| {
+ let workspaces = multi_workspace.workspaces();
+ workspaces.last().cloned()
+ })?;
+
+ let Some(new_workspace) = new_workspace else {
+ anyhow::bail!("New workspace was not added to MultiWorkspace");
+ };
+
+ let panels_task = new_window_handle.update(cx, |_, _, cx| {
+ new_workspace.update(cx, |workspace, _cx| workspace.take_panels_task())
+ })?;
+ if let Some(task) = panels_task {
+ task.await.log_err();
+ }
+
+ let initial_content = AgentInitialContent::ContentBlock {
+ blocks: content,
+ auto_submit: true,
+ };
+
+ new_window_handle.update(cx, |_multi_workspace, window, cx| {
+ new_workspace.update(cx, |workspace, cx| {
+ if has_non_git {
+ let toast_id = workspace::notifications::NotificationId::unique::();
+ workspace.show_toast(
+ workspace::Toast::new(
+ toast_id,
+ "Some project folders are not git repositories. \
+ They were included as-is without creating a worktree.",
+ ),
+ cx,
+ );
+ }
+
+ let remapped_paths: Vec = open_file_paths
+ .iter()
+ .filter_map(|original_path| {
+ let best_match = path_remapping
+ .iter()
+ .filter_map(|(old_root, new_root)| {
+ original_path.strip_prefix(old_root).ok().map(|relative| {
+ (old_root.components().count(), new_root.join(relative))
+ })
+ })
+ .max_by_key(|(depth, _)| *depth);
+
+ if let Some((_, remapped_path)) = best_match {
+ return Some(remapped_path);
+ }
+
+ for non_git in &non_git_paths {
+ if original_path.starts_with(non_git) {
+ return Some(original_path.clone());
+ }
+ }
+ None
+ })
+ .collect();
+
+ if !remapped_paths.is_empty() {
+ workspace
+ .open_paths(
+ remapped_paths,
+ workspace::OpenOptions::default(),
+ None,
+ window,
+ cx,
+ )
+ .detach();
+ }
+
+ workspace.focus_panel::(window, cx);
+ if let Some(panel) = workspace.panel::(cx) {
+ panel.update(cx, |panel, cx| {
+ panel.external_thread(None, None, Some(initial_content), window, cx);
+ });
+ }
+ });
+ })?;
+
+ new_window_handle.update(cx, |multi_workspace, _window, cx| {
+ multi_workspace.activate(new_workspace.clone(), cx);
+ })?;
+
+ this.update_in(cx, |this, _window, cx| {
+ this.worktree_creation_status = None;
+ cx.notify();
+ })?;
+
+ anyhow::Ok(())
+ }
+}
+
+impl Focusable for AgentPanel {
+ fn focus_handle(&self, cx: &App) -> FocusHandle {
+ match &self.active_view {
+ ActiveView::Uninitialized => self.focus_handle.clone(),
+ ActiveView::AgentThread { server_view, .. } => server_view.focus_handle(cx),
+ ActiveView::History { kind } => match kind {
+ HistoryKind::AgentThreads => self.acp_history.focus_handle(cx),
+ HistoryKind::TextThreads => self.text_thread_history.focus_handle(cx),
+ },
+ ActiveView::TextThread {
+ text_thread_editor, ..
+ } => text_thread_editor.focus_handle(cx),
+ ActiveView::Configuration => {
+ if let Some(configuration) = self.configuration.as_ref() {
+ configuration.focus_handle(cx)
+ } else {
+ self.focus_handle.clone()
+ }
+ }
+ }
+ }
+}
+
+fn agent_panel_dock_position(cx: &App) -> DockPosition {
+ AgentSettings::get_global(cx).dock.into()
+}
+
+pub enum AgentPanelEvent {
+ ActiveViewChanged,
+}
+
+impl EventEmitter for AgentPanel {}
+impl EventEmitter for AgentPanel {}
+
+impl Panel for AgentPanel {
+ fn persistent_name() -> &'static str {
+ "AgentPanel"
+ }
+
+ fn panel_key() -> &'static str {
+ AGENT_PANEL_KEY
+ }
+
+ fn position(&self, _window: &Window, cx: &App) -> DockPosition {
+ agent_panel_dock_position(cx)
+ }
+
+ fn position_is_valid(&self, position: DockPosition) -> bool {
+ position != DockPosition::Bottom
+ }
+
+ fn set_position(&mut self, position: DockPosition, _: &mut Window, cx: &mut Context) {
+ settings::update_settings_file(self.fs.clone(), cx, move |settings, _| {
+ settings
+ .agent
+ .get_or_insert_default()
+ .set_dock(position.into());
+ });
+ }
+
+ fn size(&self, window: &Window, cx: &App) -> Pixels {
+ let settings = AgentSettings::get_global(cx);
+ match self.position(window, cx) {
+ DockPosition::Left | DockPosition::Right => {
+ self.width.unwrap_or(settings.default_width)
+ }
+ DockPosition::Bottom => self.height.unwrap_or(settings.default_height),
+ }
+ }
+
+ fn set_size(&mut self, size: Option, window: &mut Window, cx: &mut Context) {
+ match self.position(window, cx) {
+ DockPosition::Left | DockPosition::Right => self.width = size,
+ DockPosition::Bottom => self.height = size,
+ }
+ self.serialize(cx);
+ cx.notify();
+ }
+
+ fn set_active(&mut self, active: bool, window: &mut Window, cx: &mut Context) {
+ if active
+ && matches!(self.active_view, ActiveView::Uninitialized)
+ && !matches!(
+ self.worktree_creation_status,
+ Some(WorktreeCreationStatus::Creating)
+ )
+ {
+ let selected_agent = self.selected_agent.clone();
+ self.new_agent_thread(selected_agent, window, cx);
+ }
+ }
+
+ fn remote_id() -> Option {
+ Some(proto::PanelId::AssistantPanel)
+ }
+
+ fn icon(&self, _window: &Window, cx: &App) -> Option {
+ (self.enabled(cx) && AgentSettings::get_global(cx).button).then_some(IconName::ZedAssistant)
+ }
+
+ fn icon_tooltip(&self, _window: &Window, _cx: &App) -> Option<&'static str> {
+ Some("Agent Panel")
+ }
+
+ fn toggle_action(&self) -> Box {
+ Box::new(ToggleFocus)
+ }
+
+ fn activation_priority(&self) -> u32 {
+ 3
+ }
+
+ fn enabled(&self, cx: &App) -> bool {
+ AgentSettings::get_global(cx).enabled(cx)
+ }
+
+ fn is_zoomed(&self, _window: &Window, _cx: &App) -> bool {
+ self.zoomed
+ }
+
+ fn set_zoomed(&mut self, zoomed: bool, _window: &mut Window, cx: &mut Context) {
+ self.zoomed = zoomed;
+ cx.notify();
+ }
+}
+
+impl AgentPanel {
+ fn render_title_view(&self, _window: &mut Window, cx: &Context) -> AnyElement {
+ const LOADING_SUMMARY_PLACEHOLDER: &str = "Loading Summary…";
+
+ let content = match &self.active_view {
+ ActiveView::AgentThread { server_view } => {
+ let is_generating_title = server_view
.read(cx)
.as_native_thread(cx)
.map_or(false, |t| t.read(cx).is_generating_title());
@@ -2331,6 +2984,99 @@ impl AgentPanel {
})
}
+ fn project_has_git_repository(&self, cx: &App) -> bool {
+ !self.project.read(cx).repositories(cx).is_empty()
+ }
+
+ fn render_start_thread_in_selector(&self, cx: &mut Context) -> impl IntoElement {
+ let has_git_repo = self.project_has_git_repository(cx);
+ let is_via_collab = self.project.read(cx).is_via_collab();
+
+ let is_creating = matches!(
+ self.worktree_creation_status,
+ Some(WorktreeCreationStatus::Creating)
+ );
+
+ let current_target = self.start_thread_in;
+ let trigger_label = self.start_thread_in.label();
+
+ let icon = if self.start_thread_in_menu_handle.is_deployed() {
+ IconName::ChevronUp
+ } else {
+ IconName::ChevronDown
+ };
+
+ let trigger_button = Button::new("thread-target-trigger", trigger_label)
+ .label_size(LabelSize::Small)
+ .color(Color::Muted)
+ .icon(icon)
+ .icon_size(IconSize::XSmall)
+ .icon_position(IconPosition::End)
+ .icon_color(Color::Muted)
+ .disabled(is_creating);
+
+ let dock_position = AgentSettings::get_global(cx).dock;
+ let documentation_side = match dock_position {
+ settings::DockPosition::Left => DocumentationSide::Right,
+ settings::DockPosition::Bottom | settings::DockPosition::Right => {
+ DocumentationSide::Left
+ }
+ };
+
+ PopoverMenu::new("thread-target-selector")
+ .trigger(trigger_button)
+ .anchor(gpui::Corner::BottomRight)
+ .with_handle(self.start_thread_in_menu_handle.clone())
+ .menu(move |window, cx| {
+ let current_target = current_target;
+ Some(ContextMenu::build(window, cx, move |menu, _window, _cx| {
+ let is_local_selected = current_target == StartThreadIn::LocalProject;
+ let is_new_worktree_selected = current_target == StartThreadIn::NewWorktree;
+
+ let new_worktree_disabled = !has_git_repo || is_via_collab;
+
+ menu.header("Start Thread In…")
+ .item(
+ ContextMenuEntry::new("Local Project")
+ .icon(StartThreadIn::LocalProject.icon())
+ .icon_color(Color::Muted)
+ .toggleable(IconPosition::End, is_local_selected)
+ .handler(|window, cx| {
+ window
+ .dispatch_action(Box::new(StartThreadIn::LocalProject), cx);
+ }),
+ )
+ .item({
+ let entry = ContextMenuEntry::new("New Worktree")
+ .icon(StartThreadIn::NewWorktree.icon())
+ .icon_color(Color::Muted)
+ .toggleable(IconPosition::End, is_new_worktree_selected)
+ .disabled(new_worktree_disabled)
+ .handler(|window, cx| {
+ window
+ .dispatch_action(Box::new(StartThreadIn::NewWorktree), cx);
+ });
+
+ if new_worktree_disabled {
+ entry.documentation_aside(documentation_side, move |_| {
+ let reason = if !has_git_repo {
+ "No git repository found in this project."
+ } else {
+ "Not available for remote/collab projects yet."
+ };
+ Label::new(reason)
+ .color(Color::Muted)
+ .size(LabelSize::Small)
+ .into_any_element()
+ })
+ } else {
+ entry
+ }
+ })
+ }))
+ })
+ }
+
fn render_toolbar(&self, window: &mut Window, cx: &mut Context) -> impl IntoElement {
let agent_server_store = self.project.read(cx).agent_server_store().clone();
let focus_handle = self.focus_handle(cx);
@@ -2718,6 +3464,7 @@ impl AgentPanel {
};
let show_history_menu = self.history_kind_for_selected_agent(cx).is_some();
+ let has_v2_flag = cx.has_flag::();
h_flex()
.id("agent-panel-toolbar")
@@ -2748,6 +3495,12 @@ impl AgentPanel {
.gap(DynamicSpacing::Base02.rems(cx))
.pl(DynamicSpacing::Base04.rems(cx))
.pr(DynamicSpacing::Base06.rems(cx))
+ .when(
+ has_v2_flag
+ && cx.has_flag::()
+ && !self.active_thread_has_messages(cx),
+ |this| this.child(self.render_start_thread_in_selector(cx)),
+ )
.child(new_thread_menu)
.when(show_history_menu, |this| {
this.child(self.render_recent_entries_menu(
@@ -2760,6 +3513,51 @@ impl AgentPanel {
)
}
+ fn render_worktree_creation_status(&self, cx: &mut Context) -> Option {
+ let status = self.worktree_creation_status.as_ref()?;
+ match status {
+ WorktreeCreationStatus::Creating => Some(
+ h_flex()
+ .w_full()
+ .px(DynamicSpacing::Base06.rems(cx))
+ .py(DynamicSpacing::Base02.rems(cx))
+ .gap_2()
+ .bg(cx.theme().colors().surface_background)
+ .border_b_1()
+ .border_color(cx.theme().colors().border)
+ .child(SpinnerLabel::new().size(LabelSize::Small))
+ .child(
+ Label::new("Creating worktree…")
+ .color(Color::Muted)
+ .size(LabelSize::Small),
+ )
+ .into_any_element(),
+ ),
+ WorktreeCreationStatus::Error(message) => Some(
+ h_flex()
+ .w_full()
+ .px(DynamicSpacing::Base06.rems(cx))
+ .py(DynamicSpacing::Base02.rems(cx))
+ .gap_2()
+ .bg(cx.theme().colors().surface_background)
+ .border_b_1()
+ .border_color(cx.theme().colors().border)
+ .child(
+ Icon::new(IconName::Warning)
+ .size(IconSize::Small)
+ .color(Color::Warning),
+ )
+ .child(
+ Label::new(message.clone())
+ .color(Color::Warning)
+ .size(LabelSize::Small)
+ .truncate(),
+ )
+ .into_any_element(),
+ ),
+ }
+ }
+
fn should_render_trial_end_upsell(&self, cx: &mut Context) -> bool {
if TrialEndUpsell::dismissed() {
return false;
@@ -3191,6 +3989,7 @@ impl Render for AgentPanel {
}
}))
.child(self.render_toolbar(window, cx))
+ .children(self.render_worktree_creation_status(cx))
.children(self.render_workspace_trust_message(cx))
.children(self.render_onboarding(window, cx))
.map(|parent| {
@@ -3456,7 +4255,7 @@ impl AgentPanel {
name: server.name(),
};
- self._external_thread(
+ self.create_external_thread(
server, None, None, workspace, project, ext_agent, window, cx,
);
}
@@ -3468,6 +4267,61 @@ impl AgentPanel {
pub fn active_thread_view_for_tests(&self) -> Option<&Entity> {
self.active_thread_view()
}
+
+ /// Sets the start_thread_in value directly, bypassing validation.
+ ///
+ /// This is a test-only helper for visual tests that need to show specific
+ /// start_thread_in states without requiring a real git repository.
+ pub fn set_start_thread_in_for_tests(&mut self, target: StartThreadIn, cx: &mut Context) {
+ self.start_thread_in = target;
+ cx.notify();
+ }
+
+ /// Returns the current worktree creation status.
+ ///
+ /// This is a test-only helper for visual tests.
+ pub fn worktree_creation_status_for_tests(&self) -> Option<&WorktreeCreationStatus> {
+ self.worktree_creation_status.as_ref()
+ }
+
+ /// Sets the worktree creation status directly.
+ ///
+ /// This is a test-only helper for visual tests that need to show the
+ /// "Creating worktree…" spinner or error banners.
+ pub fn set_worktree_creation_status_for_tests(
+ &mut self,
+ status: Option,
+ cx: &mut Context,
+ ) {
+ self.worktree_creation_status = status;
+ cx.notify();
+ }
+
+ /// Opens the history view.
+ ///
+ /// This is a test-only helper that exposes the private `open_history()`
+ /// method for visual tests.
+ pub fn open_history_for_tests(&mut self, window: &mut Window, cx: &mut Context) {
+ self.open_history(window, cx);
+ }
+
+ /// Opens the start_thread_in selector popover menu.
+ ///
+ /// This is a test-only helper for visual tests.
+ pub fn open_start_thread_in_menu_for_tests(
+ &mut self,
+ window: &mut Window,
+ cx: &mut Context,
+ ) {
+ self.start_thread_in_menu_handle.show(window, cx);
+ }
+
+ /// Dismisses the start_thread_in dropdown menu.
+ ///
+ /// This is a test-only helper for visual tests.
+ pub fn close_start_thread_in_menu_for_tests(&mut self, cx: &mut Context) {
+ self.start_thread_in_menu_handle.hide(cx);
+ }
}
#[cfg(test)]
@@ -3479,6 +4333,7 @@ mod tests {
use fs::FakeFs;
use gpui::{TestAppContext, VisualTestContext};
use project::Project;
+ use serde_json::json;
use workspace::MultiWorkspace;
#[gpui::test]
@@ -3581,9 +4436,7 @@ mod tests {
.expect("panel B load should succeed");
cx.run_until_parked();
- // Workspace A should restore width and agent type, but the thread
- // should NOT be restored because the stub agent never persisted it
- // to the database (the load-side validation skips missing threads).
+ // Workspace A should restore its thread, width, and agent type
loaded_a.read_with(cx, |panel, _cx| {
assert_eq!(
panel.width,
@@ -3594,6 +4447,10 @@ mod tests {
panel.selected_agent, agent_type_a,
"workspace A agent type should be restored"
);
+ assert!(
+ panel.active_thread_view().is_some(),
+ "workspace A should have its active thread restored"
+ );
});
// Workspace B should restore its own width and agent type, with no thread
@@ -3663,4 +4520,383 @@ mod tests {
cx.run_until_parked();
}
+
+ #[gpui::test]
+ async fn test_thread_target_local_project(cx: &mut TestAppContext) {
+ init_test(cx);
+ cx.update(|cx| {
+ cx.update_flags(true, vec!["agent-v2".to_string()]);
+ agent::ThreadStore::init_global(cx);
+ language_model::LanguageModelRegistry::test(cx);
+ });
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ "/project",
+ json!({
+ ".git": {},
+ "src": {
+ "main.rs": "fn main() {}"
+ }
+ }),
+ )
+ .await;
+ fs.set_branch_name(Path::new("/project/.git"), Some("main"));
+
+ let project = Project::test(fs.clone(), [Path::new("/project")], cx).await;
+
+ let multi_workspace =
+ cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+
+ let workspace = multi_workspace
+ .read_with(cx, |multi_workspace, _cx| {
+ multi_workspace.workspace().clone()
+ })
+ .unwrap();
+
+ workspace.update(cx, |workspace, _cx| {
+ workspace.set_random_database_id();
+ });
+
+ let cx = &mut VisualTestContext::from_window(multi_workspace.into(), cx);
+
+ // Wait for the project to discover the git repository.
+ cx.run_until_parked();
+
+ let panel = workspace.update_in(cx, |workspace, window, cx| {
+ let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx));
+ let panel =
+ cx.new(|cx| AgentPanel::new(workspace, text_thread_store, None, window, cx));
+ workspace.add_panel(panel.clone(), window, cx);
+ panel
+ });
+
+ cx.run_until_parked();
+
+ // Default thread target should be LocalProject.
+ panel.read_with(cx, |panel, _cx| {
+ assert_eq!(
+ *panel.start_thread_in(),
+ StartThreadIn::LocalProject,
+ "default thread target should be LocalProject"
+ );
+ });
+
+ // Start a new thread with the default LocalProject target.
+ // Use StubAgentServer so the thread connects immediately in tests.
+ panel.update_in(cx, |panel, window, cx| {
+ panel.open_external_thread_with_server(
+ Rc::new(StubAgentServer::default_response()),
+ window,
+ cx,
+ );
+ });
+
+ cx.run_until_parked();
+
+ // MultiWorkspace should still have exactly one workspace (no worktree created).
+ multi_workspace
+ .read_with(cx, |multi_workspace, _cx| {
+ assert_eq!(
+ multi_workspace.workspaces().len(),
+ 1,
+ "LocalProject should not create a new workspace"
+ );
+ })
+ .unwrap();
+
+ // The thread should be active in the panel.
+ panel.read_with(cx, |panel, cx| {
+ assert!(
+ panel.active_agent_thread(cx).is_some(),
+ "a thread should be running in the current workspace"
+ );
+ });
+
+ // The thread target should still be LocalProject (unchanged).
+ panel.read_with(cx, |panel, _cx| {
+ assert_eq!(
+ *panel.start_thread_in(),
+ StartThreadIn::LocalProject,
+ "thread target should remain LocalProject"
+ );
+ });
+
+ // No worktree creation status should be set.
+ panel.read_with(cx, |panel, _cx| {
+ assert!(
+ panel.worktree_creation_status.is_none(),
+ "no worktree creation should have occurred"
+ );
+ });
+ }
+
+ #[gpui::test]
+ async fn test_thread_target_serialization_round_trip(cx: &mut TestAppContext) {
+ init_test(cx);
+ cx.update(|cx| {
+ cx.update_flags(
+ true,
+ vec!["agent-v2".to_string(), "agent-git-worktrees".to_string()],
+ );
+ agent::ThreadStore::init_global(cx);
+ language_model::LanguageModelRegistry::test(cx);
+ });
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ "/project",
+ json!({
+ ".git": {},
+ "src": {
+ "main.rs": "fn main() {}"
+ }
+ }),
+ )
+ .await;
+ fs.set_branch_name(Path::new("/project/.git"), Some("main"));
+
+ let project = Project::test(fs.clone(), [Path::new("/project")], cx).await;
+
+ let multi_workspace =
+ cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+
+ let workspace = multi_workspace
+ .read_with(cx, |multi_workspace, _cx| {
+ multi_workspace.workspace().clone()
+ })
+ .unwrap();
+
+ workspace.update(cx, |workspace, _cx| {
+ workspace.set_random_database_id();
+ });
+
+ let cx = &mut VisualTestContext::from_window(multi_workspace.into(), cx);
+
+ // Wait for the project to discover the git repository.
+ cx.run_until_parked();
+
+ let panel = workspace.update_in(cx, |workspace, window, cx| {
+ let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx));
+ let panel =
+ cx.new(|cx| AgentPanel::new(workspace, text_thread_store, None, window, cx));
+ workspace.add_panel(panel.clone(), window, cx);
+ panel
+ });
+
+ cx.run_until_parked();
+
+ // Default should be LocalProject.
+ panel.read_with(cx, |panel, _cx| {
+ assert_eq!(*panel.start_thread_in(), StartThreadIn::LocalProject);
+ });
+
+ // Change thread target to NewWorktree.
+ panel.update(cx, |panel, cx| {
+ panel.set_start_thread_in(&StartThreadIn::NewWorktree, cx);
+ });
+
+ panel.read_with(cx, |panel, _cx| {
+ assert_eq!(
+ *panel.start_thread_in(),
+ StartThreadIn::NewWorktree,
+ "thread target should be NewWorktree after set_thread_target"
+ );
+ });
+
+ // Let serialization complete.
+ cx.run_until_parked();
+
+ // Load a fresh panel from the serialized data.
+ let prompt_builder = Arc::new(prompt_store::PromptBuilder::new(None).unwrap());
+ let async_cx = cx.update(|window, cx| window.to_async(cx));
+ let loaded_panel =
+ AgentPanel::load(workspace.downgrade(), prompt_builder.clone(), async_cx)
+ .await
+ .expect("panel load should succeed");
+ cx.run_until_parked();
+
+ loaded_panel.read_with(cx, |panel, _cx| {
+ assert_eq!(
+ *panel.start_thread_in(),
+ StartThreadIn::NewWorktree,
+ "thread target should survive serialization round-trip"
+ );
+ });
+ }
+
+ #[gpui::test]
+ async fn test_thread_target_deserialization_falls_back_when_worktree_flag_disabled(
+ cx: &mut TestAppContext,
+ ) {
+ init_test(cx);
+ cx.update(|cx| {
+ cx.update_flags(
+ true,
+ vec!["agent-v2".to_string(), "agent-git-worktrees".to_string()],
+ );
+ agent::ThreadStore::init_global(cx);
+ language_model::LanguageModelRegistry::test(cx);
+ });
+
+ let fs = FakeFs::new(cx.executor());
+ fs.insert_tree(
+ "/project",
+ json!({
+ ".git": {},
+ "src": {
+ "main.rs": "fn main() {}"
+ }
+ }),
+ )
+ .await;
+ fs.set_branch_name(Path::new("/project/.git"), Some("main"));
+
+ let project = Project::test(fs.clone(), [Path::new("/project")], cx).await;
+
+ let multi_workspace =
+ cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+
+ let workspace = multi_workspace
+ .read_with(cx, |multi_workspace, _cx| {
+ multi_workspace.workspace().clone()
+ })
+ .unwrap();
+
+ workspace.update(cx, |workspace, _cx| {
+ workspace.set_random_database_id();
+ });
+
+ let cx = &mut VisualTestContext::from_window(multi_workspace.into(), cx);
+
+ // Wait for the project to discover the git repository.
+ cx.run_until_parked();
+
+ let panel = workspace.update_in(cx, |workspace, window, cx| {
+ let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx));
+ let panel =
+ cx.new(|cx| AgentPanel::new(workspace, text_thread_store, None, window, cx));
+ workspace.add_panel(panel.clone(), window, cx);
+ panel
+ });
+
+ cx.run_until_parked();
+
+ panel.update(cx, |panel, cx| {
+ panel.set_start_thread_in(&StartThreadIn::NewWorktree, cx);
+ });
+
+ panel.read_with(cx, |panel, _cx| {
+ assert_eq!(
+ *panel.start_thread_in(),
+ StartThreadIn::NewWorktree,
+ "thread target should be NewWorktree before reload"
+ );
+ });
+
+ // Let serialization complete.
+ cx.run_until_parked();
+
+ // Disable worktree flag and reload panel from serialized data.
+ cx.update(|_, cx| {
+ cx.update_flags(true, vec!["agent-v2".to_string()]);
+ });
+
+ let prompt_builder = Arc::new(prompt_store::PromptBuilder::new(None).unwrap());
+ let async_cx = cx.update(|window, cx| window.to_async(cx));
+ let loaded_panel =
+ AgentPanel::load(workspace.downgrade(), prompt_builder.clone(), async_cx)
+ .await
+ .expect("panel load should succeed");
+ cx.run_until_parked();
+
+ loaded_panel.read_with(cx, |panel, _cx| {
+ assert_eq!(
+ *panel.start_thread_in(),
+ StartThreadIn::LocalProject,
+ "thread target should fall back to LocalProject when worktree flag is disabled"
+ );
+ });
+ }
+
+ #[gpui::test]
+ async fn test_set_active_blocked_during_worktree_creation(cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor());
+ cx.update(|cx| {
+ cx.update_flags(true, vec!["agent-v2".to_string()]);
+ agent::ThreadStore::init_global(cx);
+ language_model::LanguageModelRegistry::test(cx);
+ ::set_global(fs.clone(), cx);
+ });
+
+ fs.insert_tree(
+ "/project",
+ json!({
+ ".git": {},
+ "src": {
+ "main.rs": "fn main() {}"
+ }
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs.clone(), [Path::new("/project")], cx).await;
+
+ let multi_workspace =
+ cx.add_window(|window, cx| MultiWorkspace::test_new(project.clone(), window, cx));
+
+ let workspace = multi_workspace
+ .read_with(cx, |multi_workspace, _cx| {
+ multi_workspace.workspace().clone()
+ })
+ .unwrap();
+
+ let cx = &mut VisualTestContext::from_window(multi_workspace.into(), cx);
+
+ let panel = workspace.update_in(cx, |workspace, window, cx| {
+ let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx));
+ let panel =
+ cx.new(|cx| AgentPanel::new(workspace, text_thread_store, None, window, cx));
+ workspace.add_panel(panel.clone(), window, cx);
+ panel
+ });
+
+ cx.run_until_parked();
+
+ // Simulate worktree creation in progress and reset to Uninitialized
+ panel.update_in(cx, |panel, window, cx| {
+ panel.worktree_creation_status = Some(WorktreeCreationStatus::Creating);
+ panel.active_view = ActiveView::Uninitialized;
+ Panel::set_active(panel, true, window, cx);
+ assert!(
+ matches!(panel.active_view, ActiveView::Uninitialized),
+ "set_active should not create a thread while worktree is being created"
+ );
+ });
+
+ // Clear the creation status and use open_external_thread_with_server
+ // (which bypasses new_agent_thread) to verify the panel can transition
+ // out of Uninitialized. We can't call set_active directly because
+ // new_agent_thread requires full agent server infrastructure.
+ panel.update_in(cx, |panel, window, cx| {
+ panel.worktree_creation_status = None;
+ panel.active_view = ActiveView::Uninitialized;
+ panel.open_external_thread_with_server(
+ Rc::new(StubAgentServer::default_response()),
+ window,
+ cx,
+ );
+ });
+
+ cx.run_until_parked();
+
+ panel.read_with(cx, |panel, _cx| {
+ assert!(
+ !matches!(panel.active_view, ActiveView::Uninitialized),
+ "panel should transition out of Uninitialized once worktree creation is cleared"
+ );
+ });
+ }
}
diff --git a/crates/agent_ui/src/agent_ui.rs b/crates/agent_ui/src/agent_ui.rs
index ad778ca496f7815d0155f98187c8fad3e81365eb..5ae2d677ba6dd4622127b39938f2bf005e7fcab9 100644
--- a/crates/agent_ui/src/agent_ui.rs
+++ b/crates/agent_ui/src/agent_ui.rs
@@ -3,6 +3,7 @@ mod agent_diff;
mod agent_model_selector;
mod agent_panel;
mod agent_registry_ui;
+mod branch_names;
mod buffer_codegen;
mod completion_provider;
mod config_options;
@@ -55,7 +56,9 @@ use std::any::TypeId;
use workspace::Workspace;
use crate::agent_configuration::{ConfigureContextServerModal, ManageProfilesModal};
-pub use crate::agent_panel::{AgentPanel, AgentPanelEvent, ConcreteAssistantPanelDelegate};
+pub use crate::agent_panel::{
+ AgentPanel, AgentPanelEvent, ConcreteAssistantPanelDelegate, WorktreeCreationStatus,
+};
use crate::agent_registry_ui::AgentRegistryPage;
pub use crate::inline_assistant::InlineAssistant;
pub use agent_diff::{AgentDiffPane, AgentDiffToolbar};
@@ -222,6 +225,18 @@ impl ExternalAgent {
}
}
+/// Sets where new threads will run.
+#[derive(
+ Clone, Copy, Debug, Default, PartialEq, Eq, Serialize, Deserialize, JsonSchema, Action,
+)]
+#[action(namespace = agent)]
+#[serde(rename_all = "snake_case", tag = "kind")]
+pub enum StartThreadIn {
+ #[default]
+ LocalProject,
+ NewWorktree,
+}
+
/// Content to initialize new external agent with.
pub enum AgentInitialContent {
ThreadSummary(acp_thread::AgentSessionInfo),
diff --git a/crates/agent_ui/src/branch_names.rs b/crates/agent_ui/src/branch_names.rs
new file mode 100644
index 0000000000000000000000000000000000000000..74e3dbc76b729309403606dfbecc8ea87f271913
--- /dev/null
+++ b/crates/agent_ui/src/branch_names.rs
@@ -0,0 +1,847 @@
+use collections::HashSet;
+use rand::Rng;
+
+/// Names of historical typewriter brands, for use in auto-generated branch names.
+/// (Hyphens and parens have been dropped so that the branch names are one-word.)
+///
+/// Thanks to https://typewriterdatabase.com/alph.0.brands for the names!
+const TYPEWRITER_NAMES: &[&str] = &[
+ "abeille",
+ "acme",
+ "addo",
+ "adler",
+ "adlerette",
+ "adlerita",
+ "admiral",
+ "agamli",
+ "agar",
+ "agidel",
+ "agil",
+ "aguia",
+ "aguila",
+ "ahram",
+ "aigle",
+ "ajax",
+ "aktiv",
+ "ala",
+ "alba",
+ "albus",
+ "alexander",
+ "alexis",
+ "alfa",
+ "allen",
+ "alonso",
+ "alpina",
+ "amata",
+ "amaya",
+ "amka",
+ "anavi",
+ "anderson",
+ "andina",
+ "antares",
+ "apex",
+ "apsco",
+ "aquila",
+ "archo",
+ "ardita",
+ "argyle",
+ "aristocrat",
+ "aristokrat",
+ "arlington",
+ "armstrong",
+ "arpha",
+ "artus",
+ "astoria",
+ "atlantia",
+ "atlantic",
+ "atlas",
+ "augusta",
+ "aurora",
+ "austro",
+ "automatic",
+ "avanti",
+ "avona",
+ "azzurra",
+ "bajnok",
+ "baldwin",
+ "balkan",
+ "baltica",
+ "baltimore",
+ "barlock",
+ "barr",
+ "barrat",
+ "bartholomew",
+ "bashkiriya",
+ "bavaria",
+ "beaucourt",
+ "beko",
+ "belka",
+ "bennett",
+ "bennington",
+ "berni",
+ "bianca",
+ "bijou",
+ "bing",
+ "bisei",
+ "biser",
+ "bluebird",
+ "bolida",
+ "borgo",
+ "boston",
+ "boyce",
+ "bradford",
+ "brandenburg",
+ "brigitte",
+ "briton",
+ "brooks",
+ "brosette",
+ "buddy",
+ "burns",
+ "burroughs",
+ "byron",
+ "calanda",
+ "caligraph",
+ "cappel",
+ "cardinal",
+ "carissima",
+ "carlem",
+ "carlton",
+ "carmen",
+ "cawena",
+ "cella",
+ "celtic",
+ "century",
+ "champignon",
+ "cherryland",
+ "chevron",
+ "chicago",
+ "cicero",
+ "cifra",
+ "citizen",
+ "claudia",
+ "cleveland",
+ "clover",
+ "coffman",
+ "cole",
+ "columbia",
+ "commercial",
+ "companion",
+ "concentra",
+ "concord",
+ "concordia",
+ "conover",
+ "constanta",
+ "consul",
+ "conta",
+ "contenta",
+ "contimat",
+ "contina",
+ "continento",
+ "cornelia",
+ "coronado",
+ "cosmopolita",
+ "courier",
+ "craftamatic",
+ "crandall",
+ "crown",
+ "culema",
+ "dactyle",
+ "dankers",
+ "dart",
+ "daugherty",
+ "davis",
+ "dayton",
+ "dea",
+ "delmar",
+ "densmore",
+ "depantio",
+ "diadema",
+ "dial",
+ "diamant",
+ "diana",
+ "dictatype",
+ "diplomat",
+ "diskret",
+ "dolfus",
+ "dollar",
+ "domus",
+ "drake",
+ "draper",
+ "duplex",
+ "durabel",
+ "dynacord",
+ "eagle",
+ "eclipse",
+ "edelmann",
+ "edelweiss",
+ "edison",
+ "edita",
+ "edland",
+ "efka",
+ "eldorado",
+ "electa",
+ "electromatic",
+ "elektro",
+ "elgin",
+ "elliot",
+ "emerson",
+ "emka",
+ "emona",
+ "empire",
+ "engadine",
+ "engler",
+ "erfurt",
+ "erika",
+ "esko",
+ "essex",
+ "eureka",
+ "europa",
+ "everest",
+ "everlux",
+ "excelsior",
+ "express",
+ "fabers",
+ "facit",
+ "fairbanks",
+ "faktotum",
+ "famos",
+ "federal",
+ "felio",
+ "fidat",
+ "filius",
+ "fips",
+ "fish",
+ "fitch",
+ "fleet",
+ "florida",
+ "flott",
+ "flyer",
+ "flying",
+ "fontana",
+ "ford",
+ "forto",
+ "fortuna",
+ "fox",
+ "framo",
+ "franconia",
+ "franklin",
+ "friden",
+ "frolio",
+ "furstenberg",
+ "galesburg",
+ "galiette",
+ "gallia",
+ "garbell",
+ "gardner",
+ "geka",
+ "generation",
+ "genia",
+ "geniatus",
+ "gerda",
+ "gisela",
+ "glashutte",
+ "gloria",
+ "godrej",
+ "gossen",
+ "gourland",
+ "grandjean",
+ "granta",
+ "granville",
+ "graphic",
+ "gritzner",
+ "groma",
+ "guhl",
+ "guidonia",
+ "gundka",
+ "hacabo",
+ "haddad",
+ "halberg",
+ "halda",
+ "hall",
+ "hammond",
+ "hammonia",
+ "hanford",
+ "hansa",
+ "harmony",
+ "harris",
+ "hartford",
+ "hassia",
+ "hatch",
+ "heady",
+ "hebronia",
+ "hebros",
+ "hega",
+ "helios",
+ "helma",
+ "herald",
+ "hercules",
+ "hermes",
+ "herold",
+ "heros",
+ "hesperia",
+ "hogar",
+ "hooven",
+ "hopkins",
+ "horton",
+ "hugin",
+ "hungaria",
+ "hurtu",
+ "iberia",
+ "idea",
+ "ideal",
+ "imperia",
+ "impo",
+ "industria",
+ "industrio",
+ "ingersoll",
+ "international",
+ "invicta",
+ "irene",
+ "iris",
+ "iskra",
+ "ivitsa",
+ "ivriah",
+ "jackson",
+ "janalif",
+ "janos",
+ "jolux",
+ "juki",
+ "junior",
+ "juventa",
+ "juwel",
+ "kamkap",
+ "kamo",
+ "kanzler",
+ "kappel",
+ "karli",
+ "karstadt",
+ "keaton",
+ "kenbar",
+ "keystone",
+ "kim",
+ "klein",
+ "kneist",
+ "knoch",
+ "koh",
+ "kolibri",
+ "kolumbus",
+ "komet",
+ "kondor",
+ "koniger",
+ "konryu",
+ "kontor",
+ "kosmopolit",
+ "krypton",
+ "lambert",
+ "lasalle",
+ "lectra",
+ "leframa",
+ "lemair",
+ "lemco",
+ "liberty",
+ "libia",
+ "liga",
+ "lignose",
+ "lilliput",
+ "lindeteves",
+ "linowriter",
+ "listvitsa",
+ "ludolf",
+ "lutece",
+ "luxa",
+ "lyubava",
+ "mafra",
+ "magnavox",
+ "maher",
+ "majestic",
+ "majitouch",
+ "manhattan",
+ "mapuua",
+ "marathon",
+ "marburger",
+ "maritsa",
+ "maruzen",
+ "maskelyne",
+ "masspro",
+ "matous",
+ "mccall",
+ "mccool",
+ "mcloughlin",
+ "mead",
+ "mechno",
+ "mehano",
+ "meiselbach",
+ "melbi",
+ "melior",
+ "melotyp",
+ "mentor",
+ "mepas",
+ "mercedesia",
+ "mercurius",
+ "mercury",
+ "merkur",
+ "merritt",
+ "merz",
+ "messa",
+ "meteco",
+ "meteor",
+ "micron",
+ "mignon",
+ "mikro",
+ "minerva",
+ "mirian",
+ "mirina",
+ "mitex",
+ "molle",
+ "monac",
+ "monarch",
+ "mondiale",
+ "monica",
+ "monofix",
+ "monopol",
+ "monpti",
+ "monta",
+ "montana",
+ "montgomery",
+ "moon",
+ "morgan",
+ "morris",
+ "morse",
+ "moya",
+ "moyer",
+ "munson",
+ "musicwriter",
+ "nadex",
+ "nakajima",
+ "neckermann",
+ "neubert",
+ "neya",
+ "ninety",
+ "nisa",
+ "noiseless",
+ "noor",
+ "nora",
+ "nord",
+ "norden",
+ "norica",
+ "norma",
+ "norman",
+ "north",
+ "nototyp",
+ "nova",
+ "novalevi",
+ "odell",
+ "odhner",
+ "odo",
+ "odoma",
+ "ohio",
+ "ohtani",
+ "oliva",
+ "oliver",
+ "olivetti",
+ "olympia",
+ "omega",
+ "optima",
+ "orbis",
+ "orel",
+ "orga",
+ "oriette",
+ "orion",
+ "orn",
+ "orplid",
+ "pacior",
+ "pagina",
+ "parisienne",
+ "passat",
+ "pearl",
+ "peerless",
+ "perfect",
+ "perfecta",
+ "perkeo",
+ "perkins",
+ "perlita",
+ "pettypet",
+ "phoenix",
+ "piccola",
+ "picht",
+ "pinnock",
+ "pionier",
+ "plurotyp",
+ "plutarch",
+ "pneumatic",
+ "pocket",
+ "polyglott",
+ "polygraph",
+ "pontiac",
+ "portable",
+ "portex",
+ "pozzi",
+ "premier",
+ "presto",
+ "primavera",
+ "progress",
+ "protos",
+ "pterotype",
+ "pullman",
+ "pulsatta",
+ "quick",
+ "racer",
+ "radio",
+ "rally",
+ "rand",
+ "readers",
+ "reed",
+ "referent",
+ "reff",
+ "regent",
+ "regia",
+ "regina",
+ "rekord",
+ "reliable",
+ "reliance",
+ "remagg",
+ "rembrandt",
+ "remer",
+ "remington",
+ "remsho",
+ "remstar",
+ "remtor",
+ "reporters",
+ "resko",
+ "rex",
+ "rexpel",
+ "rheinita",
+ "rheinmetall",
+ "rival",
+ "roberts",
+ "robotron",
+ "rocher",
+ "rochester",
+ "roebuck",
+ "rofa",
+ "roland",
+ "rooy",
+ "rover",
+ "roxy",
+ "roy",
+ "royal",
+ "rundstatler",
+ "sabaudia",
+ "sabb",
+ "saleem",
+ "salter",
+ "sampo",
+ "sarafan",
+ "saturn",
+ "saxonia",
+ "schade",
+ "schapiro",
+ "schreibi",
+ "scripta",
+ "sears",
+ "secor",
+ "selectric",
+ "selekta",
+ "senator",
+ "sense",
+ "senta",
+ "serd",
+ "shilling",
+ "shimade",
+ "shimer",
+ "sholes",
+ "shuang",
+ "siegfried",
+ "siemag",
+ "silma",
+ "silver",
+ "simplex",
+ "simtype",
+ "singer",
+ "smith",
+ "soemtron",
+ "sonja",
+ "speedwriter",
+ "sphinx",
+ "starlet",
+ "stearns",
+ "steel",
+ "stella",
+ "steno",
+ "sterling",
+ "stoewer",
+ "stolzenberg",
+ "stott",
+ "strangfeld",
+ "sture",
+ "stylotyp",
+ "sun",
+ "superba",
+ "superia",
+ "supermetall",
+ "surety",
+ "swintec",
+ "swissa",
+ "talbos",
+ "talleres",
+ "tatrapoint",
+ "taurus",
+ "taylorix",
+ "tell",
+ "tempotype",
+ "tippco",
+ "titania",
+ "tops",
+ "towa",
+ "toyo",
+ "tradition",
+ "transatlantic",
+ "traveller",
+ "trebla",
+ "triumph",
+ "turia",
+ "typatune",
+ "typen",
+ "typorium",
+ "ugro",
+ "ultima",
+ "unda",
+ "underwood",
+ "unica",
+ "unitype",
+ "ursula",
+ "utax",
+ "varityper",
+ "vasanta",
+ "vendex",
+ "venus",
+ "victor",
+ "victoria",
+ "video",
+ "viking",
+ "vira",
+ "virotyp",
+ "visigraph",
+ "vittoria",
+ "volcan",
+ "vornado",
+ "voss",
+ "vultur",
+ "waltons",
+ "wanamaker",
+ "wanderer",
+ "ward",
+ "warner",
+ "waterloo",
+ "waverley",
+ "wayne",
+ "webster",
+ "wedgefield",
+ "welco",
+ "wellington",
+ "wellon",
+ "weltblick",
+ "westphalia",
+ "wiedmer",
+ "williams",
+ "wilson",
+ "winkel",
+ "winsor",
+ "wizard",
+ "woodstock",
+ "woodwards",
+ "yatran",
+ "yost",
+ "zenit",
+ "zentronik",
+ "zeta",
+ "zeya",
+];
+
+/// Picks a typewriter name that isn't already taken by an existing branch.
+///
+/// Each entry in `existing_branches` is expected to be a full branch name
+/// like `"olivetti-a3f9b2c1"`. The prefix before the last `'-'` is treated
+/// as the taken typewriter name. Branches without a `'-'` are ignored.
+///
+/// Returns `None` when every name in the pool is already taken.
+pub fn pick_typewriter_name(
+ existing_branches: &[&str],
+ rng: &mut impl Rng,
+) -> Option<&'static str> {
+ let disallowed: HashSet<&str> = existing_branches
+ .iter()
+ .filter_map(|branch| branch.rsplit_once('-').map(|(prefix, _)| prefix))
+ .collect();
+
+ let available: Vec<&'static str> = TYPEWRITER_NAMES
+ .iter()
+ .copied()
+ .filter(|name| !disallowed.contains(name))
+ .collect();
+
+ if available.is_empty() {
+ return None;
+ }
+
+ let index = rng.random_range(0..available.len());
+ Some(available[index])
+}
+
+/// Generates a branch name like `"olivetti-a3f9b2c1"` by picking a typewriter
+/// name that isn't already taken and appending an 8-character alphanumeric hash.
+///
+/// Returns `None` when every typewriter name in the pool is already taken.
+pub fn generate_branch_name(existing_branches: &[&str], rng: &mut impl Rng) -> Option {
+ let typewriter_name = pick_typewriter_name(existing_branches, rng)?;
+ let hash: String = (0..8)
+ .map(|_| {
+ let idx: u8 = rng.random_range(0..36);
+ if idx < 10 {
+ (b'0' + idx) as char
+ } else {
+ (b'a' + idx - 10) as char
+ }
+ })
+ .collect();
+ Some(format!("{typewriter_name}-{hash}"))
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use rand::rngs::StdRng;
+
+ #[gpui::test(iterations = 10)]
+ fn test_pick_typewriter_name_with_no_disallowed(mut rng: StdRng) {
+ let name = pick_typewriter_name(&[], &mut rng);
+ assert!(name.is_some());
+ assert!(TYPEWRITER_NAMES.contains(&name.unwrap()));
+ }
+
+ #[gpui::test(iterations = 10)]
+ fn test_pick_typewriter_name_excludes_taken_names(mut rng: StdRng) {
+ let branch_names = &["olivetti-abc12345", "selectric-def67890"];
+ let name = pick_typewriter_name(branch_names, &mut rng).unwrap();
+ assert_ne!(name, "olivetti");
+ assert_ne!(name, "selectric");
+ }
+
+ #[gpui::test]
+ fn test_pick_typewriter_name_all_taken(mut rng: StdRng) {
+ let branch_names: Vec = TYPEWRITER_NAMES
+ .iter()
+ .map(|name| format!("{name}-00000000"))
+ .collect();
+ let branch_name_refs: Vec<&str> = branch_names.iter().map(|s| s.as_str()).collect();
+ let name = pick_typewriter_name(&branch_name_refs, &mut rng);
+ assert!(name.is_none());
+ }
+
+ #[gpui::test(iterations = 10)]
+ fn test_pick_typewriter_name_ignores_branches_without_hyphen(mut rng: StdRng) {
+ let branch_names = &["main", "develop", "feature"];
+ let name = pick_typewriter_name(branch_names, &mut rng);
+ assert!(name.is_some());
+ assert!(TYPEWRITER_NAMES.contains(&name.unwrap()));
+ }
+
+ #[gpui::test(iterations = 10)]
+ fn test_generate_branch_name_format(mut rng: StdRng) {
+ let branch_name = generate_branch_name(&[], &mut rng).unwrap();
+ let (prefix, suffix) = branch_name.rsplit_once('-').unwrap();
+ assert!(TYPEWRITER_NAMES.contains(&prefix));
+ assert_eq!(suffix.len(), 8);
+ assert!(suffix.chars().all(|c| c.is_ascii_alphanumeric()));
+ }
+
+ #[gpui::test]
+ fn test_generate_branch_name_returns_none_when_exhausted(mut rng: StdRng) {
+ let branch_names: Vec = TYPEWRITER_NAMES
+ .iter()
+ .map(|name| format!("{name}-00000000"))
+ .collect();
+ let branch_name_refs: Vec<&str> = branch_names.iter().map(|s| s.as_str()).collect();
+ let result = generate_branch_name(&branch_name_refs, &mut rng);
+ assert!(result.is_none());
+ }
+
+ #[gpui::test(iterations = 100)]
+ fn test_generate_branch_name_never_reuses_taken_prefix(mut rng: StdRng) {
+ let existing = &["olivetti-123abc", "selectric-def456"];
+ let branch_name = generate_branch_name(existing, &mut rng).unwrap();
+ let (prefix, _) = branch_name.rsplit_once('-').unwrap();
+ assert_ne!(prefix, "olivetti");
+ assert_ne!(prefix, "selectric");
+ }
+
+ #[gpui::test(iterations = 100)]
+ fn test_generate_branch_name_avoids_multiple_taken_prefixes(mut rng: StdRng) {
+ let existing = &[
+ "olivetti-aaa11111",
+ "selectric-bbb22222",
+ "corona-ccc33333",
+ "remington-ddd44444",
+ "underwood-eee55555",
+ ];
+ let taken_prefixes: HashSet<&str> = existing
+ .iter()
+ .filter_map(|b| b.rsplit_once('-').map(|(prefix, _)| prefix))
+ .collect();
+ let branch_name = generate_branch_name(existing, &mut rng).unwrap();
+ let (prefix, _) = branch_name.rsplit_once('-').unwrap();
+ assert!(
+ !taken_prefixes.contains(prefix),
+ "generated prefix {prefix:?} collides with an existing branch"
+ );
+ }
+
+ #[gpui::test(iterations = 100)]
+ fn test_generate_branch_name_with_varied_hash_suffixes(mut rng: StdRng) {
+ let existing = &[
+ "olivetti-aaaaaaaa",
+ "olivetti-bbbbbbbb",
+ "olivetti-cccccccc",
+ ];
+ let branch_name = generate_branch_name(existing, &mut rng).unwrap();
+ let (prefix, _) = branch_name.rsplit_once('-').unwrap();
+ assert_ne!(
+ prefix, "olivetti",
+ "should avoid olivetti regardless of how many variants exist"
+ );
+ }
+
+ #[test]
+ fn test_typewriter_names_are_valid() {
+ let mut seen = HashSet::default();
+ for &name in TYPEWRITER_NAMES {
+ assert!(
+ seen.insert(name),
+ "duplicate entry in TYPEWRITER_NAMES: {name:?}"
+ );
+ }
+
+ for window in TYPEWRITER_NAMES.windows(2) {
+ assert!(
+ window[0] <= window[1],
+ "TYPEWRITER_NAMES is not sorted: {0:?} should come after {1:?}",
+ window[1],
+ window[0],
+ );
+ }
+
+ for &name in TYPEWRITER_NAMES {
+ assert!(
+ !name.contains('-'),
+ "TYPEWRITER_NAMES entry contains a hyphen: {name:?}"
+ );
+ }
+
+ for &name in TYPEWRITER_NAMES {
+ assert!(
+ name.chars().all(|c| c.is_lowercase() || !c.is_alphabetic()),
+ "TYPEWRITER_NAMES entry is not lowercase: {name:?}"
+ );
+ }
+ }
+}
diff --git a/crates/agent_ui/src/connection_view.rs b/crates/agent_ui/src/connection_view.rs
index 93bf7c98098530b23522c60f987f9e341ebc69ca..07e34ccd56f0bd867135fe62894a5a3ff388c85e 100644
--- a/crates/agent_ui/src/connection_view.rs
+++ b/crates/agent_ui/src/connection_view.rs
@@ -26,10 +26,10 @@ use fs::Fs;
use futures::FutureExt as _;
use gpui::{
Action, Animation, AnimationExt, AnyView, App, ClickEvent, ClipboardItem, CursorStyle,
- ElementId, Empty, Entity, FocusHandle, Focusable, Hsla, ListOffset, ListState, ObjectFit,
- PlatformDisplay, ScrollHandle, SharedString, Subscription, Task, TextStyle, WeakEntity, Window,
- WindowHandle, div, ease_in_out, img, linear_color_stop, linear_gradient, list, point,
- pulsating_between,
+ ElementId, Empty, Entity, EventEmitter, FocusHandle, Focusable, Hsla, ListOffset, ListState,
+ ObjectFit, PlatformDisplay, ScrollHandle, SharedString, Subscription, Task, TextStyle,
+ WeakEntity, Window, WindowHandle, div, ease_in_out, img, linear_color_stop, linear_gradient,
+ list, point, pulsating_between,
};
use language::Buffer;
use language_model::LanguageModelRegistry;
@@ -295,6 +295,12 @@ impl Conversation {
}
}
+pub enum AcpServerViewEvent {
+ ActiveThreadChanged,
+}
+
+impl EventEmitter for ConnectionView {}
+
pub struct ConnectionView {
agent: Rc,
agent_server_store: Entity,
@@ -386,6 +392,7 @@ impl ConnectionView {
if let Some(view) = self.active_thread() {
view.focus_handle(cx).focus(window, cx);
}
+ cx.emit(AcpServerViewEvent::ActiveThreadChanged);
cx.notify();
}
}
@@ -524,6 +531,7 @@ impl ConnectionView {
}
self.server_state = state;
+ cx.emit(AcpServerViewEvent::ActiveThreadChanged);
cx.notify();
}
@@ -728,6 +736,14 @@ impl ConnectionView {
}
let id = current.read(cx).thread.read(cx).session_id().clone();
+ let session_list = if connection.supports_session_history() {
+ connection.session_list(cx)
+ } else {
+ None
+ };
+ this.history.update(cx, |history, cx| {
+ history.set_session_list(session_list, cx);
+ });
this.set_server_state(
ServerState::Connected(ConnectedServerState {
connection,
@@ -829,18 +845,14 @@ impl ConnectionView {
);
});
+ if let Some(scroll_position) = thread.read(cx).ui_scroll_position() {
+ list_state.scroll_to(scroll_position);
+ }
+
AgentDiff::set_active_thread(&self.workspace, thread.clone(), window, cx);
let connection = thread.read(cx).connection().clone();
let session_id = thread.read(cx).session_id().clone();
- let session_list = if connection.supports_session_history() {
- connection.session_list(cx)
- } else {
- None
- };
- self.history.update(cx, |history, cx| {
- history.set_session_list(session_list, cx);
- });
// Check for config options first
// Config options take precedence over legacy mode/model selectors
@@ -2835,6 +2847,33 @@ pub(crate) mod tests {
});
}
+ #[gpui::test]
+ async fn test_new_thread_creation_triggers_session_list_refresh(cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let session = AgentSessionInfo::new(SessionId::new("history-session"));
+ let (thread_view, history, cx) = setup_thread_view_with_history(
+ StubAgentServer::new(SessionHistoryConnection::new(vec![session.clone()])),
+ cx,
+ )
+ .await;
+
+ history.read_with(cx, |history, _cx| {
+ assert!(
+ history.has_session_list(),
+ "session list should be attached after thread creation"
+ );
+ });
+
+ active_thread(&thread_view, cx).read_with(cx, |view, _cx| {
+ assert_eq!(view.recent_history_entries.len(), 1);
+ assert_eq!(
+ view.recent_history_entries[0].session_id,
+ session.session_id
+ );
+ });
+ }
+
#[gpui::test]
async fn test_resume_without_history_adds_notice(cx: &mut TestAppContext) {
init_test(cx);
@@ -3482,6 +3521,18 @@ pub(crate) mod tests {
agent: impl AgentServer + 'static,
cx: &mut TestAppContext,
) -> (Entity, &mut VisualTestContext) {
+ let (thread_view, _history, cx) = setup_thread_view_with_history(agent, cx).await;
+ (thread_view, cx)
+ }
+
+ async fn setup_thread_view_with_history(
+ agent: impl AgentServer + 'static,
+ cx: &mut TestAppContext,
+ ) -> (
+ Entity,
+ Entity,
+ &mut VisualTestContext,
+ ) {
let fs = FakeFs::new(cx.executor());
let project = Project::test(fs, [], cx).await;
let (multi_workspace, cx) =
@@ -3501,14 +3552,14 @@ pub(crate) mod tests {
project,
Some(thread_store),
None,
- history,
+ history.clone(),
window,
cx,
)
})
});
cx.run_until_parked();
- (thread_view, cx)
+ (thread_view, history, cx)
}
fn add_to_workspace(thread_view: Entity, cx: &mut VisualTestContext) {
@@ -3648,6 +3699,102 @@ pub(crate) mod tests {
) -> Task> {
Task::ready(Ok(AgentSessionListResponse::new(self.sessions.clone())))
}
+
+ fn into_any(self: Rc) -> Rc {
+ self
+ }
+ }
+
+ #[derive(Clone)]
+ struct SessionHistoryConnection {
+ sessions: Vec,
+ }
+
+ impl SessionHistoryConnection {
+ fn new(sessions: Vec) -> Self {
+ Self { sessions }
+ }
+ }
+
+ fn build_test_thread(
+ connection: Rc,
+ project: Entity,
+ name: &'static str,
+ session_id: SessionId,
+ cx: &mut App,
+ ) -> Entity {
+ let action_log = cx.new(|_| ActionLog::new(project.clone()));
+ cx.new(|cx| {
+ AcpThread::new(
+ None,
+ name,
+ connection,
+ project,
+ action_log,
+ session_id,
+ watch::Receiver::constant(
+ acp::PromptCapabilities::new()
+ .image(true)
+ .audio(true)
+ .embedded_context(true),
+ ),
+ cx,
+ )
+ })
+ }
+
+ impl AgentConnection for SessionHistoryConnection {
+ fn telemetry_id(&self) -> SharedString {
+ "history-connection".into()
+ }
+
+ fn new_session(
+ self: Rc,
+ project: Entity,
+ _cwd: &Path,
+ cx: &mut App,
+ ) -> Task>> {
+ let thread = build_test_thread(
+ self,
+ project,
+ "SessionHistoryConnection",
+ SessionId::new("history-session"),
+ cx,
+ );
+ Task::ready(Ok(thread))
+ }
+
+ fn supports_load_session(&self) -> bool {
+ true
+ }
+
+ fn session_list(&self, _cx: &mut App) -> Option> {
+ Some(Rc::new(StubSessionList::new(self.sessions.clone())))
+ }
+
+ fn auth_methods(&self) -> &[acp::AuthMethod] {
+ &[]
+ }
+
+ fn authenticate(
+ &self,
+ _method_id: acp::AuthMethodId,
+ _cx: &mut App,
+ ) -> Task> {
+ Task::ready(Ok(()))
+ }
+
+ fn prompt(
+ &self,
+ _id: Option,
+ _params: acp::PromptRequest,
+ _cx: &mut App,
+ ) -> Task> {
+ Task::ready(Ok(acp::PromptResponse::new(acp::StopReason::EndTurn)))
+ }
+
+ fn cancel(&self, _session_id: &acp::SessionId, _cx: &mut App) {}
+
fn into_any(self: Rc) -> Rc {
self
}
@@ -3667,24 +3814,13 @@ pub(crate) mod tests {
_cwd: &Path,
cx: &mut gpui::App,
) -> Task>> {
- let action_log = cx.new(|_| ActionLog::new(project.clone()));
- let thread = cx.new(|cx| {
- AcpThread::new(
- None,
- "ResumeOnlyAgentConnection",
- self.clone(),
- project,
- action_log,
- SessionId::new("new-session"),
- watch::Receiver::constant(
- acp::PromptCapabilities::new()
- .image(true)
- .audio(true)
- .embedded_context(true),
- ),
- cx,
- )
- });
+ let thread = build_test_thread(
+ self,
+ project,
+ "ResumeOnlyAgentConnection",
+ SessionId::new("new-session"),
+ cx,
+ );
Task::ready(Ok(thread))
}
@@ -3699,24 +3835,13 @@ pub(crate) mod tests {
_cwd: &Path,
cx: &mut App,
) -> Task>> {
- let action_log = cx.new(|_| ActionLog::new(project.clone()));
- let thread = cx.new(|cx| {
- AcpThread::new(
- None,
- "ResumeOnlyAgentConnection",
- self.clone(),
- project,
- action_log,
- session.session_id,
- watch::Receiver::constant(
- acp::PromptCapabilities::new()
- .image(true)
- .audio(true)
- .embedded_context(true),
- ),
- cx,
- )
- });
+ let thread = build_test_thread(
+ self,
+ project,
+ "ResumeOnlyAgentConnection",
+ session.session_id,
+ cx,
+ );
Task::ready(Ok(thread))
}
diff --git a/crates/agent_ui/src/connection_view/thread_view.rs b/crates/agent_ui/src/connection_view/thread_view.rs
index 2544305bc8f8666b897d11285ffa7711f3af8794..8a1a7d2ea5b0f01ba559e83051861b9d6324985f 100644
--- a/crates/agent_ui/src/connection_view/thread_view.rs
+++ b/crates/agent_ui/src/connection_view/thread_view.rs
@@ -1,6 +1,8 @@
use acp_thread::ContentBlock;
use cloud_api_types::{SubmitAgentThreadFeedbackBody, SubmitAgentThreadFeedbackCommentsBody};
use editor::actions::OpenExcerpts;
+
+use crate::StartThreadIn;
use gpui::{Corner, List};
use language_model::{LanguageModelEffortLevel, Speed};
use settings::update_settings_file;
@@ -191,6 +193,12 @@ impl DiffStats {
}
}
+pub enum AcpThreadViewEvent {
+ FirstSendRequested { content: Vec },
+}
+
+impl EventEmitter